diff --git a/.craft.yml b/.craft.yml index 6da0897b36..43bbfdd7bd 100644 --- a/.craft.yml +++ b/.craft.yml @@ -1,16 +1,29 @@ ---- -minVersion: '0.5.1' -github: - owner: getsentry - repo: sentry-python -targets: +minVersion: 0.34.1 +targets: - name: pypi - - name: github + includeNames: /^sentry[_\-]sdk.*$/ - name: gh-pages - name: registry - type: sdk - config: - canonical: pypi:sentry-sdk - -changelog: CHANGES.md -changelogPolicy: simple + sdks: + pypi:sentry-sdk: + - name: github + - name: aws-lambda-layer + includeNames: /^sentry-python-serverless-\d+(\.\d+)*\.zip$/ + layerName: SentryPythonServerlessSDK + compatibleRuntimes: + - name: python + versions: + # The number of versions must be, at most, the maximum number of + # runtimes AWS Lambda permits for a layer. + # On the other hand, AWS Lambda does not support every Python runtime. + # The supported runtimes are available in the following link: + # https://docs.aws.amazon.com/lambda/latest/dg/lambda-python.html + - python3.6 + - python3.7 + - python3.8 + - python3.9 + license: MIT + - name: sentry-pypi + internalPypiRepo: getsentry/pypi +changelog: CHANGELOG.md +changelogPolicy: auto diff --git a/.flake8 b/.flake8 index 9584e3843e..37f5883f00 100644 --- a/.flake8 +++ b/.flake8 @@ -1,18 +1,17 @@ [flake8] -ignore = - E203, // Handled by black (Whitespace before ':' -- handled by black) - E266, // Handled by black (Too many leading '#' for block comment) - E501, // Handled by black (Line too long) - W503, // Handled by black (Line break occured before a binary operator) - E402, // Sometimes not possible due to execution order (Module level import is not at top of file) - E731, // I don't care (Do not assign a lambda expression, use a def) - C901, // I don't care (Function is too complex) - B950, // Handled by black (Line too long by flake8-bugbear) - B011, // I don't care (Do not call assert False) - B014, // does not apply to Python 2 (redundant exception types by flake8-bugbear) - N812, // I don't care (Lowercase imported as non-lowercase by pep8-naming) - N804 // is a worse version of and conflicts with B902 (first argument of a classmethod should be named cls) -max-line-length = 80 -max-complexity = 18 -select = N,B,C,E,F,W,T4,B9 -exclude=checkouts,lol*,.tox +extend-ignore = + # Handled by black (Whitespace before ':' -- handled by black) + E203, + # Handled by black (Line too long) + E501, + # Sometimes not possible due to execution order (Module level import is not at top of file) + E402, + # I don't care (Do not assign a lambda expression, use a def) + E731, + # does not apply to Python 2 (redundant exception types by flake8-bugbear) + B014, + # I don't care (Lowercase imported as non-lowercase by pep8-naming) + N812, + # is a worse version of and conflicts with B902 (first argument of a classmethod should be named cls) + N804, +extend-exclude=checkouts,lol* diff --git a/.github/ISSUE_TEMPLATE/bug.yml b/.github/ISSUE_TEMPLATE/bug.yml new file mode 100644 index 0000000000..f6e47929eb --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug.yml @@ -0,0 +1,50 @@ +name: 🐞 Bug Report +description: Tell us about something that's not working the way we (probably) intend. +body: + - type: dropdown + id: type + attributes: + label: How do you use Sentry? + options: + - Sentry Saas (sentry.io) + - Self-hosted/on-premise + validations: + required: true + - type: input + id: version + attributes: + label: Version + description: Which SDK version? + placeholder: ex. 1.5.2 + validations: + required: true + - type: textarea + id: repro + attributes: + label: Steps to Reproduce + description: How can we see what you're seeing? Specific is terrific. + placeholder: |- + 1. What + 2. you + 3. did. + validations: + required: true + - type: textarea + id: expected + attributes: + label: Expected Result + validations: + required: true + - type: textarea + id: actual + attributes: + label: Actual Result + description: Logs? Screenshots? Yes, please. + validations: + required: true + - type: markdown + attributes: + value: |- + ## Thanks 🙏 + validations: + required: false diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 0000000000..7f40ddc56d --- /dev/null +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,6 @@ +blank_issues_enabled: false +contact_links: + - name: Support Request + url: https://sentry.io/support + about: Use our dedicated support channel for paid accounts. + diff --git a/.github/ISSUE_TEMPLATE/feature.yml b/.github/ISSUE_TEMPLATE/feature.yml new file mode 100644 index 0000000000..e462e3bae7 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature.yml @@ -0,0 +1,30 @@ +name: 💡 Feature Request +description: Create a feature request for sentry-python SDK. +labels: 'enhancement' +body: + - type: markdown + attributes: + value: Thanks for taking the time to file a feature request! Please fill out this form as completely as possible. + - type: textarea + id: problem + attributes: + label: Problem Statement + description: A clear and concise description of what you want and what your use case is. + placeholder: |- + I want to make whirled peas, but Sentry doesn't blend. + validations: + required: true + - type: textarea + id: expected + attributes: + label: Solution Brainstorm + description: We know you have bright ideas to share ... share away, friend. + placeholder: |- + Add a blender to Sentry. + validations: + required: true + - type: markdown + attributes: + value: |- + ## Thanks 🙏 + Check our [triage docs](https://open.sentry.io/triage/) for what to expect next. diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000000..eadcd59879 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,48 @@ +version: 2 +updates: +- package-ecosystem: pip + directory: "/" + schedule: + interval: weekly + open-pull-requests-limit: 10 + allow: + - dependency-type: direct + - dependency-type: indirect + ignore: + - dependency-name: pytest + versions: + - "> 3.7.3" + - dependency-name: pytest-cov + versions: + - "> 2.8.1" + - dependency-name: pytest-forked + versions: + - "> 1.1.3" + - dependency-name: sphinx + versions: + - ">= 2.4.a, < 2.5" + - dependency-name: tox + versions: + - "> 3.7.0" + - dependency-name: werkzeug + versions: + - "> 0.15.5, < 1" + - dependency-name: werkzeug + versions: + - ">= 1.0.a, < 1.1" + - dependency-name: mypy + versions: + - "0.800" + - dependency-name: sphinx + versions: + - 3.4.3 +- package-ecosystem: gitsubmodule + directory: "/" + schedule: + interval: weekly + open-pull-requests-limit: 10 +- package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: weekly + open-pull-requests-limit: 10 diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000000..45e26fbf21 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,109 @@ +name: CI + +on: + push: + branches: + - master + - release/** + + pull_request: + +permissions: + contents: read + +env: + BUILD_CACHE_KEY: ${{ github.sha }} + CACHED_BUILD_PATHS: | + ${{ github.workspace }}/dist-serverless + +jobs: + lint: + name: Lint Sources + runs-on: ubuntu-latest + timeout-minutes: 10 + + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: 3.9 + + - run: | + pip install tox + tox -e linters + + check-ci-config: + name: Check CI config + runs-on: ubuntu-latest + timeout-minutes: 10 + + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: 3.9 + + - run: | + python scripts/split-tox-gh-actions/split-tox-gh-actions.py --fail-on-changes + + build_lambda_layer: + name: Build Package + runs-on: ubuntu-latest + timeout-minutes: 10 + + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v4 + with: + python-version: 3.9 + - name: Setup build cache + uses: actions/cache@v3 + id: build_cache + with: + path: ${{ env.CACHED_BUILD_PATHS }} + key: ${{ env.BUILD_CACHE_KEY }} + - name: Build Packages + run: | + echo "Creating directory containing Python SDK Lambda Layer" + pip install virtualenv + # This will also trigger "make dist" that creates the Python packages + make aws-lambda-layer + + echo "Saving SDK_VERSION for later" + export SDK_VERSION=$(grep "VERSION = " sentry_sdk/consts.py | cut -f3 -d' ' | tr -d '"') + echo "SDK_VERSION=$SDK_VERSION" + echo "SDK_VERSION=$SDK_VERSION" >> $GITHUB_ENV + - name: Upload Python AWS Lambda Layer + uses: getsentry/action-build-aws-lambda-extension@v1 + with: + artifact_name: ${{ github.sha }} + zip_file_name: sentry-python-serverless-${{ env.SDK_VERSION }}.zip + build_cache_paths: ${{ env.CACHED_BUILD_PATHS }} + build_cache_key: ${{ env.BUILD_CACHE_KEY }} + - name: Upload Python Packages + uses: actions/upload-artifact@v3 + with: + name: ${{ github.sha }} + path: | + dist/* + + docs: + name: Build SDK API Doc + runs-on: ubuntu-latest + timeout-minutes: 10 + + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v4 + with: + python-version: 3.9 + + - run: | + pip install virtualenv + make apidocs + cd docs/_build && zip -r gh-pages ./ + + - uses: actions/upload-artifact@v3.1.1 + with: + name: ${{ github.sha }} + path: docs/_build/gh-pages.zip diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml new file mode 100644 index 0000000000..1d88a97406 --- /dev/null +++ b/.github/workflows/codeql-analysis.yml @@ -0,0 +1,74 @@ +# For most projects, this workflow file will not need changing; you simply need +# to commit it to your repository. +# +# You may wish to alter this file to override the set of languages analyzed, +# or to provide custom queries or build logic. +# +# ******** NOTE ******** +# We have attempted to detect the languages in your repository. Please check +# the `language` matrix defined below to confirm you have the correct set of +# supported CodeQL languages. +# +name: "CodeQL" + +on: + push: + branches: [ master ] + pull_request: + # The branches below must be a subset of the branches above + branches: [ master ] + schedule: + - cron: '18 18 * * 3' + +permissions: + contents: read + +jobs: + analyze: + permissions: + actions: read # for github/codeql-action/init to get workflow details + contents: read # for actions/checkout to fetch code + security-events: write # for github/codeql-action/autobuild to send a status report + name: Analyze + runs-on: ubuntu-latest + + strategy: + fail-fast: false + matrix: + language: [ 'python' ] + # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ] + # Learn more: + # https://docs.github.com/en/free-pro-team@latest/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#changing-the-languages-that-are-analyzed + + steps: + - name: Checkout repository + uses: actions/checkout@v3 + + # Initializes the CodeQL tools for scanning. + - name: Initialize CodeQL + uses: github/codeql-action/init@v2 + with: + languages: ${{ matrix.language }} + # If you wish to specify custom queries, you can do so here or in a config file. + # By default, queries listed here will override any specified in a config file. + # Prefix the list here with "+" to use these queries and those in the config file. + # queries: ./path/to/local/query, your-org/your-repo/queries@main + + # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). + # If this step fails, then you should remove it and run the build manually (see below) + - name: Autobuild + uses: github/codeql-action/autobuild@v2 + + # ℹ️ Command-line programs to run using the OS shell. + # 📚 https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions + + # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines + # and modify them (or add more) to build your code if your project + # uses a compiled language + + #- run: | + # make bootstrap + # make release + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v2 diff --git a/.github/workflows/enforce-license-compliance.yml b/.github/workflows/enforce-license-compliance.yml new file mode 100644 index 0000000000..b331974711 --- /dev/null +++ b/.github/workflows/enforce-license-compliance.yml @@ -0,0 +1,16 @@ +name: Enforce License Compliance + +on: + push: + branches: [master, main, release/*] + pull_request: + branches: [master, main] + +jobs: + enforce-license-compliance: + runs-on: ubuntu-latest + steps: + - name: 'Enforce License Compliance' + uses: getsentry/action-enforce-license-compliance@main + with: + fossa_api_key: ${{ secrets.FOSSA_API_KEY }} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000000..139fe29007 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,28 @@ +name: Release + +on: + workflow_dispatch: + inputs: + version: + description: Version to release + required: true + force: + description: Force a release even when there are release-blockers (optional) + required: false + +jobs: + release: + runs-on: ubuntu-latest + name: "Release a new version" + steps: + - uses: actions/checkout@v3 + with: + token: ${{ secrets.GH_RELEASE_PAT }} + fetch-depth: 0 + - name: Prepare release + uses: getsentry/action-prepare-release@v1 + env: + GITHUB_TOKEN: ${{ secrets.GH_RELEASE_PAT }} + with: + version: ${{ github.event.inputs.version }} + force: ${{ github.event.inputs.force }} diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml new file mode 100644 index 0000000000..b0793b49c3 --- /dev/null +++ b/.github/workflows/stale.yml @@ -0,0 +1,51 @@ +name: 'close stale issues/PRs' +on: + schedule: + - cron: '0 0 * * *' + workflow_dispatch: +permissions: + contents: read + +jobs: + stale: + permissions: + issues: write # for actions/stale to close stale issues + pull-requests: write # for actions/stale to close stale PRs + runs-on: ubuntu-latest + steps: + - uses: actions/stale@v6 + with: + repo-token: ${{ github.token }} + days-before-stale: 21 + days-before-close: 7 + only-labels: "" + operations-per-run: 100 + remove-stale-when-updated: true + debug-only: false + ascending: false + + exempt-issue-labels: "Status: Backlog,Status: In Progress" + stale-issue-label: "Status: Stale" + stale-issue-message: |- + This issue has gone three weeks without activity. In another week, I will close it. + + But! If you comment or otherwise update it, I will reset the clock, and if you label it `Status: Backlog` or `Status: In Progress`, I will leave it alone ... forever! + + ---- + + "A weed is but an unloved flower." ― _Ella Wheeler Wilcox_ 🥀 + close-issue-label: "" + close-issue-message: "" + + exempt-pr-labels: "Status: Backlog,Status: In Progress" + stale-pr-label: "Status: Stale" + stale-pr-message: |- + This pull request has gone three weeks without activity. In another week, I will close it. + + But! If you comment or otherwise update it, I will reset the clock, and if you label it `Status: Backlog` or `Status: In Progress`, I will leave it alone ... forever! + + ---- + + "A weed is but an unloved flower." ― _Ella Wheeler Wilcox_ 🥀 + close-pr-label: + close-pr-message: "" diff --git a/.github/workflows/test-common.yml b/.github/workflows/test-common.yml new file mode 100644 index 0000000000..06a5b1f80f --- /dev/null +++ b/.github/workflows/test-common.yml @@ -0,0 +1,71 @@ +name: Test Common + +on: + push: + branches: + - master + - release/** + + pull_request: + +permissions: + contents: read + +env: + BUILD_CACHE_KEY: ${{ github.sha }} + CACHED_BUILD_PATHS: | + ${{ github.workspace }}/dist-serverless + +jobs: + test: + name: Test Python ${{ matrix.python-version }}, ${{ matrix.os }} + runs-on: ${{ matrix.os }} + timeout-minutes: 45 + continue-on-error: true + strategy: + matrix: + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] + python-version: ["2.7", "3.5", "3.6", "3.7", "3.8", "3.9", "3.10"] + services: + postgres: + image: postgres + env: + POSTGRES_PASSWORD: sentry + # Set health checks to wait until postgres has started + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + # Maps tcp port 5432 on service container to the host + ports: + - 5432:5432 + env: + SENTRY_PYTHON_TEST_POSTGRES_USER: postgres + SENTRY_PYTHON_TEST_POSTGRES_PASSWORD: sentry + SENTRY_PYTHON_TEST_POSTGRES_NAME: ci_test + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Setup Test Env + run: | + pip install codecov tox + + - name: Run Tests + timeout-minutes: 45 + shell: bash + run: | + set -x # print commands that are executed + coverage erase + + ./scripts/runtox.sh "py${{ matrix.python-version }}$" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch --ignore=tests/integrations + coverage combine .coverage* + coverage xml -i + codecov --file coverage.xml diff --git a/.github/workflows/test-integration-aiohttp.yml b/.github/workflows/test-integration-aiohttp.yml new file mode 100644 index 0000000000..7ec01b12db --- /dev/null +++ b/.github/workflows/test-integration-aiohttp.yml @@ -0,0 +1,73 @@ +name: Test aiohttp + +on: + push: + branches: + - master + - release/** + + pull_request: + +# Cancel in progress workflows on pull_requests. +# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +permissions: + contents: read + +env: + BUILD_CACHE_KEY: ${{ github.sha }} + CACHED_BUILD_PATHS: | + ${{ github.workspace }}/dist-serverless + +jobs: + test: + name: aiohttp, python ${{ matrix.python-version }}, ${{ matrix.os }} + runs-on: ${{ matrix.os }} + timeout-minutes: 45 + + strategy: + fail-fast: false + matrix: + python-version: ["3.7","3.8","3.9","3.10","3.11"] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] + + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Setup Test Env + run: | + pip install codecov "tox>=3,<4" + + - name: Test aiohttp + timeout-minutes: 45 + shell: bash + run: | + set -x # print commands that are executed + coverage erase + + ./scripts/runtox.sh "${{ matrix.python-version }}-aiohttp" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + coverage combine .coverage* + coverage xml -i + codecov --file coverage.xml + + check_required_tests: + name: All aiohttp tests passed or skipped + needs: test + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test.result, 'failure') + run: | + echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integration-asgi.yml b/.github/workflows/test-integration-asgi.yml new file mode 100644 index 0000000000..39f63d6e89 --- /dev/null +++ b/.github/workflows/test-integration-asgi.yml @@ -0,0 +1,73 @@ +name: Test asgi + +on: + push: + branches: + - master + - release/** + + pull_request: + +# Cancel in progress workflows on pull_requests. +# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +permissions: + contents: read + +env: + BUILD_CACHE_KEY: ${{ github.sha }} + CACHED_BUILD_PATHS: | + ${{ github.workspace }}/dist-serverless + +jobs: + test: + name: asgi, python ${{ matrix.python-version }}, ${{ matrix.os }} + runs-on: ${{ matrix.os }} + timeout-minutes: 45 + + strategy: + fail-fast: false + matrix: + python-version: ["3.7","3.8","3.9","3.10","3.11"] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] + + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Setup Test Env + run: | + pip install codecov "tox>=3,<4" + + - name: Test asgi + timeout-minutes: 45 + shell: bash + run: | + set -x # print commands that are executed + coverage erase + + ./scripts/runtox.sh "${{ matrix.python-version }}-asgi" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + coverage combine .coverage* + coverage xml -i + codecov --file coverage.xml + + check_required_tests: + name: All asgi tests passed or skipped + needs: test + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test.result, 'failure') + run: | + echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integration-aws_lambda.yml b/.github/workflows/test-integration-aws_lambda.yml new file mode 100644 index 0000000000..22ed7f4945 --- /dev/null +++ b/.github/workflows/test-integration-aws_lambda.yml @@ -0,0 +1,73 @@ +name: Test aws_lambda + +on: + push: + branches: + - master + - release/** + + pull_request: + +# Cancel in progress workflows on pull_requests. +# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +permissions: + contents: read + +env: + BUILD_CACHE_KEY: ${{ github.sha }} + CACHED_BUILD_PATHS: | + ${{ github.workspace }}/dist-serverless + +jobs: + test: + name: aws_lambda, python ${{ matrix.python-version }}, ${{ matrix.os }} + runs-on: ${{ matrix.os }} + timeout-minutes: 45 + + strategy: + fail-fast: false + matrix: + python-version: ["3.7"] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] + + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Setup Test Env + run: | + pip install codecov "tox>=3,<4" + + - name: Test aws_lambda + timeout-minutes: 45 + shell: bash + run: | + set -x # print commands that are executed + coverage erase + + ./scripts/runtox.sh "${{ matrix.python-version }}-aws_lambda" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + coverage combine .coverage* + coverage xml -i + codecov --file coverage.xml + + check_required_tests: + name: All aws_lambda tests passed or skipped + needs: test + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test.result, 'failure') + run: | + echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integration-beam.yml b/.github/workflows/test-integration-beam.yml new file mode 100644 index 0000000000..03a484537c --- /dev/null +++ b/.github/workflows/test-integration-beam.yml @@ -0,0 +1,73 @@ +name: Test beam + +on: + push: + branches: + - master + - release/** + + pull_request: + +# Cancel in progress workflows on pull_requests. +# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +permissions: + contents: read + +env: + BUILD_CACHE_KEY: ${{ github.sha }} + CACHED_BUILD_PATHS: | + ${{ github.workspace }}/dist-serverless + +jobs: + test: + name: beam, python ${{ matrix.python-version }}, ${{ matrix.os }} + runs-on: ${{ matrix.os }} + timeout-minutes: 45 + + strategy: + fail-fast: false + matrix: + python-version: ["3.7"] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] + + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Setup Test Env + run: | + pip install codecov "tox>=3,<4" + + - name: Test beam + timeout-minutes: 45 + shell: bash + run: | + set -x # print commands that are executed + coverage erase + + ./scripts/runtox.sh "${{ matrix.python-version }}-beam" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + coverage combine .coverage* + coverage xml -i + codecov --file coverage.xml + + check_required_tests: + name: All beam tests passed or skipped + needs: test + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test.result, 'failure') + run: | + echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integration-boto3.yml b/.github/workflows/test-integration-boto3.yml new file mode 100644 index 0000000000..cbb4ec7db1 --- /dev/null +++ b/.github/workflows/test-integration-boto3.yml @@ -0,0 +1,73 @@ +name: Test boto3 + +on: + push: + branches: + - master + - release/** + + pull_request: + +# Cancel in progress workflows on pull_requests. +# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +permissions: + contents: read + +env: + BUILD_CACHE_KEY: ${{ github.sha }} + CACHED_BUILD_PATHS: | + ${{ github.workspace }}/dist-serverless + +jobs: + test: + name: boto3, python ${{ matrix.python-version }}, ${{ matrix.os }} + runs-on: ${{ matrix.os }} + timeout-minutes: 45 + + strategy: + fail-fast: false + matrix: + python-version: ["2.7","3.6","3.7","3.8"] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] + + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Setup Test Env + run: | + pip install codecov "tox>=3,<4" + + - name: Test boto3 + timeout-minutes: 45 + shell: bash + run: | + set -x # print commands that are executed + coverage erase + + ./scripts/runtox.sh "${{ matrix.python-version }}-boto3" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + coverage combine .coverage* + coverage xml -i + codecov --file coverage.xml + + check_required_tests: + name: All boto3 tests passed or skipped + needs: test + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test.result, 'failure') + run: | + echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integration-bottle.yml b/.github/workflows/test-integration-bottle.yml new file mode 100644 index 0000000000..60979bf5dd --- /dev/null +++ b/.github/workflows/test-integration-bottle.yml @@ -0,0 +1,73 @@ +name: Test bottle + +on: + push: + branches: + - master + - release/** + + pull_request: + +# Cancel in progress workflows on pull_requests. +# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +permissions: + contents: read + +env: + BUILD_CACHE_KEY: ${{ github.sha }} + CACHED_BUILD_PATHS: | + ${{ github.workspace }}/dist-serverless + +jobs: + test: + name: bottle, python ${{ matrix.python-version }}, ${{ matrix.os }} + runs-on: ${{ matrix.os }} + timeout-minutes: 45 + + strategy: + fail-fast: false + matrix: + python-version: ["2.7","3.5","3.6","3.7","3.8","3.9","3.10","3.11"] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] + + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Setup Test Env + run: | + pip install codecov "tox>=3,<4" + + - name: Test bottle + timeout-minutes: 45 + shell: bash + run: | + set -x # print commands that are executed + coverage erase + + ./scripts/runtox.sh "${{ matrix.python-version }}-bottle" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + coverage combine .coverage* + coverage xml -i + codecov --file coverage.xml + + check_required_tests: + name: All bottle tests passed or skipped + needs: test + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test.result, 'failure') + run: | + echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integration-celery.yml b/.github/workflows/test-integration-celery.yml new file mode 100644 index 0000000000..7042f8d493 --- /dev/null +++ b/.github/workflows/test-integration-celery.yml @@ -0,0 +1,73 @@ +name: Test celery + +on: + push: + branches: + - master + - release/** + + pull_request: + +# Cancel in progress workflows on pull_requests. +# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +permissions: + contents: read + +env: + BUILD_CACHE_KEY: ${{ github.sha }} + CACHED_BUILD_PATHS: | + ${{ github.workspace }}/dist-serverless + +jobs: + test: + name: celery, python ${{ matrix.python-version }}, ${{ matrix.os }} + runs-on: ${{ matrix.os }} + timeout-minutes: 45 + + strategy: + fail-fast: false + matrix: + python-version: ["2.7","3.5","3.6","3.7","3.8","3.9","3.10"] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] + + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Setup Test Env + run: | + pip install codecov "tox>=3,<4" + + - name: Test celery + timeout-minutes: 45 + shell: bash + run: | + set -x # print commands that are executed + coverage erase + + ./scripts/runtox.sh "${{ matrix.python-version }}-celery" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + coverage combine .coverage* + coverage xml -i + codecov --file coverage.xml + + check_required_tests: + name: All celery tests passed or skipped + needs: test + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test.result, 'failure') + run: | + echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integration-chalice.yml b/.github/workflows/test-integration-chalice.yml new file mode 100644 index 0000000000..d8240fe024 --- /dev/null +++ b/.github/workflows/test-integration-chalice.yml @@ -0,0 +1,73 @@ +name: Test chalice + +on: + push: + branches: + - master + - release/** + + pull_request: + +# Cancel in progress workflows on pull_requests. +# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +permissions: + contents: read + +env: + BUILD_CACHE_KEY: ${{ github.sha }} + CACHED_BUILD_PATHS: | + ${{ github.workspace }}/dist-serverless + +jobs: + test: + name: chalice, python ${{ matrix.python-version }}, ${{ matrix.os }} + runs-on: ${{ matrix.os }} + timeout-minutes: 45 + + strategy: + fail-fast: false + matrix: + python-version: ["3.6","3.7","3.8"] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] + + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Setup Test Env + run: | + pip install codecov "tox>=3,<4" + + - name: Test chalice + timeout-minutes: 45 + shell: bash + run: | + set -x # print commands that are executed + coverage erase + + ./scripts/runtox.sh "${{ matrix.python-version }}-chalice" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + coverage combine .coverage* + coverage xml -i + codecov --file coverage.xml + + check_required_tests: + name: All chalice tests passed or skipped + needs: test + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test.result, 'failure') + run: | + echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integration-django.yml b/.github/workflows/test-integration-django.yml new file mode 100644 index 0000000000..2e462a723a --- /dev/null +++ b/.github/workflows/test-integration-django.yml @@ -0,0 +1,91 @@ +name: Test django + +on: + push: + branches: + - master + - release/** + + pull_request: + +# Cancel in progress workflows on pull_requests. +# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +permissions: + contents: read + +env: + BUILD_CACHE_KEY: ${{ github.sha }} + CACHED_BUILD_PATHS: | + ${{ github.workspace }}/dist-serverless + +jobs: + test: + name: django, python ${{ matrix.python-version }}, ${{ matrix.os }} + runs-on: ${{ matrix.os }} + timeout-minutes: 45 + + strategy: + fail-fast: false + matrix: + python-version: ["2.7","3.5","3.6","3.7","3.8","3.9","3.10","3.11"] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] + services: + postgres: + image: postgres + env: + POSTGRES_PASSWORD: sentry + # Set health checks to wait until postgres has started + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + # Maps tcp port 5432 on service container to the host + ports: + - 5432:5432 + env: + SENTRY_PYTHON_TEST_POSTGRES_USER: postgres + SENTRY_PYTHON_TEST_POSTGRES_PASSWORD: sentry + SENTRY_PYTHON_TEST_POSTGRES_NAME: ci_test + + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Setup Test Env + run: | + pip install codecov "tox>=3,<4" + + - name: Test django + timeout-minutes: 45 + shell: bash + run: | + set -x # print commands that are executed + coverage erase + + ./scripts/runtox.sh "${{ matrix.python-version }}-django" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + coverage combine .coverage* + coverage xml -i + codecov --file coverage.xml + + check_required_tests: + name: All django tests passed or skipped + needs: test + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test.result, 'failure') + run: | + echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integration-falcon.yml b/.github/workflows/test-integration-falcon.yml new file mode 100644 index 0000000000..f69ac1d9cd --- /dev/null +++ b/.github/workflows/test-integration-falcon.yml @@ -0,0 +1,73 @@ +name: Test falcon + +on: + push: + branches: + - master + - release/** + + pull_request: + +# Cancel in progress workflows on pull_requests. +# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +permissions: + contents: read + +env: + BUILD_CACHE_KEY: ${{ github.sha }} + CACHED_BUILD_PATHS: | + ${{ github.workspace }}/dist-serverless + +jobs: + test: + name: falcon, python ${{ matrix.python-version }}, ${{ matrix.os }} + runs-on: ${{ matrix.os }} + timeout-minutes: 45 + + strategy: + fail-fast: false + matrix: + python-version: ["2.7","3.5","3.6","3.7","3.8","3.9","3.10","3.11"] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] + + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Setup Test Env + run: | + pip install codecov "tox>=3,<4" + + - name: Test falcon + timeout-minutes: 45 + shell: bash + run: | + set -x # print commands that are executed + coverage erase + + ./scripts/runtox.sh "${{ matrix.python-version }}-falcon" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + coverage combine .coverage* + coverage xml -i + codecov --file coverage.xml + + check_required_tests: + name: All falcon tests passed or skipped + needs: test + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test.result, 'failure') + run: | + echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integration-fastapi.yml b/.github/workflows/test-integration-fastapi.yml new file mode 100644 index 0000000000..1b6e4e24b5 --- /dev/null +++ b/.github/workflows/test-integration-fastapi.yml @@ -0,0 +1,73 @@ +name: Test fastapi + +on: + push: + branches: + - master + - release/** + + pull_request: + +# Cancel in progress workflows on pull_requests. +# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +permissions: + contents: read + +env: + BUILD_CACHE_KEY: ${{ github.sha }} + CACHED_BUILD_PATHS: | + ${{ github.workspace }}/dist-serverless + +jobs: + test: + name: fastapi, python ${{ matrix.python-version }}, ${{ matrix.os }} + runs-on: ${{ matrix.os }} + timeout-minutes: 45 + + strategy: + fail-fast: false + matrix: + python-version: ["3.7","3.8","3.9","3.10","3.11"] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] + + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Setup Test Env + run: | + pip install codecov "tox>=3,<4" + + - name: Test fastapi + timeout-minutes: 45 + shell: bash + run: | + set -x # print commands that are executed + coverage erase + + ./scripts/runtox.sh "${{ matrix.python-version }}-fastapi" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + coverage combine .coverage* + coverage xml -i + codecov --file coverage.xml + + check_required_tests: + name: All fastapi tests passed or skipped + needs: test + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test.result, 'failure') + run: | + echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integration-flask.yml b/.github/workflows/test-integration-flask.yml new file mode 100644 index 0000000000..91e50a4eac --- /dev/null +++ b/.github/workflows/test-integration-flask.yml @@ -0,0 +1,73 @@ +name: Test flask + +on: + push: + branches: + - master + - release/** + + pull_request: + +# Cancel in progress workflows on pull_requests. +# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +permissions: + contents: read + +env: + BUILD_CACHE_KEY: ${{ github.sha }} + CACHED_BUILD_PATHS: | + ${{ github.workspace }}/dist-serverless + +jobs: + test: + name: flask, python ${{ matrix.python-version }}, ${{ matrix.os }} + runs-on: ${{ matrix.os }} + timeout-minutes: 45 + + strategy: + fail-fast: false + matrix: + python-version: ["2.7","3.5","3.6","3.7","3.8","3.9","3.10","3.11"] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] + + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Setup Test Env + run: | + pip install codecov "tox>=3,<4" + + - name: Test flask + timeout-minutes: 45 + shell: bash + run: | + set -x # print commands that are executed + coverage erase + + ./scripts/runtox.sh "${{ matrix.python-version }}-flask" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + coverage combine .coverage* + coverage xml -i + codecov --file coverage.xml + + check_required_tests: + name: All flask tests passed or skipped + needs: test + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test.result, 'failure') + run: | + echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integration-gcp.yml b/.github/workflows/test-integration-gcp.yml new file mode 100644 index 0000000000..ca6275a537 --- /dev/null +++ b/.github/workflows/test-integration-gcp.yml @@ -0,0 +1,73 @@ +name: Test gcp + +on: + push: + branches: + - master + - release/** + + pull_request: + +# Cancel in progress workflows on pull_requests. +# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +permissions: + contents: read + +env: + BUILD_CACHE_KEY: ${{ github.sha }} + CACHED_BUILD_PATHS: | + ${{ github.workspace }}/dist-serverless + +jobs: + test: + name: gcp, python ${{ matrix.python-version }}, ${{ matrix.os }} + runs-on: ${{ matrix.os }} + timeout-minutes: 45 + + strategy: + fail-fast: false + matrix: + python-version: ["3.7"] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] + + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Setup Test Env + run: | + pip install codecov "tox>=3,<4" + + - name: Test gcp + timeout-minutes: 45 + shell: bash + run: | + set -x # print commands that are executed + coverage erase + + ./scripts/runtox.sh "${{ matrix.python-version }}-gcp" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + coverage combine .coverage* + coverage xml -i + codecov --file coverage.xml + + check_required_tests: + name: All gcp tests passed or skipped + needs: test + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test.result, 'failure') + run: | + echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integration-httpx.yml b/.github/workflows/test-integration-httpx.yml new file mode 100644 index 0000000000..d8ac90e7bf --- /dev/null +++ b/.github/workflows/test-integration-httpx.yml @@ -0,0 +1,73 @@ +name: Test httpx + +on: + push: + branches: + - master + - release/** + + pull_request: + +# Cancel in progress workflows on pull_requests. +# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +permissions: + contents: read + +env: + BUILD_CACHE_KEY: ${{ github.sha }} + CACHED_BUILD_PATHS: | + ${{ github.workspace }}/dist-serverless + +jobs: + test: + name: httpx, python ${{ matrix.python-version }}, ${{ matrix.os }} + runs-on: ${{ matrix.os }} + timeout-minutes: 45 + + strategy: + fail-fast: false + matrix: + python-version: ["3.6","3.7","3.8","3.9","3.10","3.11"] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] + + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Setup Test Env + run: | + pip install codecov "tox>=3,<4" + + - name: Test httpx + timeout-minutes: 45 + shell: bash + run: | + set -x # print commands that are executed + coverage erase + + ./scripts/runtox.sh "${{ matrix.python-version }}-httpx" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + coverage combine .coverage* + coverage xml -i + codecov --file coverage.xml + + check_required_tests: + name: All httpx tests passed or skipped + needs: test + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test.result, 'failure') + run: | + echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integration-opentelemetry.yml b/.github/workflows/test-integration-opentelemetry.yml new file mode 100644 index 0000000000..7c2caa07a5 --- /dev/null +++ b/.github/workflows/test-integration-opentelemetry.yml @@ -0,0 +1,73 @@ +name: Test opentelemetry + +on: + push: + branches: + - master + - release/** + + pull_request: + +# Cancel in progress workflows on pull_requests. +# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +permissions: + contents: read + +env: + BUILD_CACHE_KEY: ${{ github.sha }} + CACHED_BUILD_PATHS: | + ${{ github.workspace }}/dist-serverless + +jobs: + test: + name: opentelemetry, python ${{ matrix.python-version }}, ${{ matrix.os }} + runs-on: ${{ matrix.os }} + timeout-minutes: 45 + + strategy: + fail-fast: false + matrix: + python-version: ["3.7","3.8","3.9","3.10","3.11"] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] + + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Setup Test Env + run: | + pip install codecov "tox>=3,<4" + + - name: Test opentelemetry + timeout-minutes: 45 + shell: bash + run: | + set -x # print commands that are executed + coverage erase + + ./scripts/runtox.sh "${{ matrix.python-version }}-opentelemetry" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + coverage combine .coverage* + coverage xml -i + codecov --file coverage.xml + + check_required_tests: + name: All opentelemetry tests passed or skipped + needs: test + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test.result, 'failure') + run: | + echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integration-pure_eval.yml b/.github/workflows/test-integration-pure_eval.yml new file mode 100644 index 0000000000..2f72e39bf4 --- /dev/null +++ b/.github/workflows/test-integration-pure_eval.yml @@ -0,0 +1,73 @@ +name: Test pure_eval + +on: + push: + branches: + - master + - release/** + + pull_request: + +# Cancel in progress workflows on pull_requests. +# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +permissions: + contents: read + +env: + BUILD_CACHE_KEY: ${{ github.sha }} + CACHED_BUILD_PATHS: | + ${{ github.workspace }}/dist-serverless + +jobs: + test: + name: pure_eval, python ${{ matrix.python-version }}, ${{ matrix.os }} + runs-on: ${{ matrix.os }} + timeout-minutes: 45 + + strategy: + fail-fast: false + matrix: + python-version: ["3.5","3.6","3.7","3.8","3.9","3.10","3.11"] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] + + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Setup Test Env + run: | + pip install codecov "tox>=3,<4" + + - name: Test pure_eval + timeout-minutes: 45 + shell: bash + run: | + set -x # print commands that are executed + coverage erase + + ./scripts/runtox.sh "${{ matrix.python-version }}-pure_eval" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + coverage combine .coverage* + coverage xml -i + codecov --file coverage.xml + + check_required_tests: + name: All pure_eval tests passed or skipped + needs: test + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test.result, 'failure') + run: | + echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integration-pymongo.yml b/.github/workflows/test-integration-pymongo.yml new file mode 100644 index 0000000000..b65fe7f74f --- /dev/null +++ b/.github/workflows/test-integration-pymongo.yml @@ -0,0 +1,73 @@ +name: Test pymongo + +on: + push: + branches: + - master + - release/** + + pull_request: + +# Cancel in progress workflows on pull_requests. +# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +permissions: + contents: read + +env: + BUILD_CACHE_KEY: ${{ github.sha }} + CACHED_BUILD_PATHS: | + ${{ github.workspace }}/dist-serverless + +jobs: + test: + name: pymongo, python ${{ matrix.python-version }}, ${{ matrix.os }} + runs-on: ${{ matrix.os }} + timeout-minutes: 45 + + strategy: + fail-fast: false + matrix: + python-version: ["2.7","3.6","3.7","3.8","3.9","3.10","3.11"] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] + + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Setup Test Env + run: | + pip install codecov "tox>=3,<4" + + - name: Test pymongo + timeout-minutes: 45 + shell: bash + run: | + set -x # print commands that are executed + coverage erase + + ./scripts/runtox.sh "${{ matrix.python-version }}-pymongo" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + coverage combine .coverage* + coverage xml -i + codecov --file coverage.xml + + check_required_tests: + name: All pymongo tests passed or skipped + needs: test + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test.result, 'failure') + run: | + echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integration-pyramid.yml b/.github/workflows/test-integration-pyramid.yml new file mode 100644 index 0000000000..bb8faeab84 --- /dev/null +++ b/.github/workflows/test-integration-pyramid.yml @@ -0,0 +1,73 @@ +name: Test pyramid + +on: + push: + branches: + - master + - release/** + + pull_request: + +# Cancel in progress workflows on pull_requests. +# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +permissions: + contents: read + +env: + BUILD_CACHE_KEY: ${{ github.sha }} + CACHED_BUILD_PATHS: | + ${{ github.workspace }}/dist-serverless + +jobs: + test: + name: pyramid, python ${{ matrix.python-version }}, ${{ matrix.os }} + runs-on: ${{ matrix.os }} + timeout-minutes: 45 + + strategy: + fail-fast: false + matrix: + python-version: ["2.7","3.5","3.6","3.7","3.8","3.9","3.10","3.11"] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] + + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Setup Test Env + run: | + pip install codecov "tox>=3,<4" + + - name: Test pyramid + timeout-minutes: 45 + shell: bash + run: | + set -x # print commands that are executed + coverage erase + + ./scripts/runtox.sh "${{ matrix.python-version }}-pyramid" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + coverage combine .coverage* + coverage xml -i + codecov --file coverage.xml + + check_required_tests: + name: All pyramid tests passed or skipped + needs: test + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test.result, 'failure') + run: | + echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integration-quart.yml b/.github/workflows/test-integration-quart.yml new file mode 100644 index 0000000000..b6ca340ac6 --- /dev/null +++ b/.github/workflows/test-integration-quart.yml @@ -0,0 +1,73 @@ +name: Test quart + +on: + push: + branches: + - master + - release/** + + pull_request: + +# Cancel in progress workflows on pull_requests. +# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +permissions: + contents: read + +env: + BUILD_CACHE_KEY: ${{ github.sha }} + CACHED_BUILD_PATHS: | + ${{ github.workspace }}/dist-serverless + +jobs: + test: + name: quart, python ${{ matrix.python-version }}, ${{ matrix.os }} + runs-on: ${{ matrix.os }} + timeout-minutes: 45 + + strategy: + fail-fast: false + matrix: + python-version: ["3.7","3.8","3.9","3.10","3.11"] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] + + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Setup Test Env + run: | + pip install codecov "tox>=3,<4" + + - name: Test quart + timeout-minutes: 45 + shell: bash + run: | + set -x # print commands that are executed + coverage erase + + ./scripts/runtox.sh "${{ matrix.python-version }}-quart" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + coverage combine .coverage* + coverage xml -i + codecov --file coverage.xml + + check_required_tests: + name: All quart tests passed or skipped + needs: test + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test.result, 'failure') + run: | + echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integration-redis.yml b/.github/workflows/test-integration-redis.yml new file mode 100644 index 0000000000..7d5eb18fb9 --- /dev/null +++ b/.github/workflows/test-integration-redis.yml @@ -0,0 +1,73 @@ +name: Test redis + +on: + push: + branches: + - master + - release/** + + pull_request: + +# Cancel in progress workflows on pull_requests. +# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +permissions: + contents: read + +env: + BUILD_CACHE_KEY: ${{ github.sha }} + CACHED_BUILD_PATHS: | + ${{ github.workspace }}/dist-serverless + +jobs: + test: + name: redis, python ${{ matrix.python-version }}, ${{ matrix.os }} + runs-on: ${{ matrix.os }} + timeout-minutes: 45 + + strategy: + fail-fast: false + matrix: + python-version: ["2.7","3.7","3.8","3.9"] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] + + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Setup Test Env + run: | + pip install codecov "tox>=3,<4" + + - name: Test redis + timeout-minutes: 45 + shell: bash + run: | + set -x # print commands that are executed + coverage erase + + ./scripts/runtox.sh "${{ matrix.python-version }}-redis" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + coverage combine .coverage* + coverage xml -i + codecov --file coverage.xml + + check_required_tests: + name: All redis tests passed or skipped + needs: test + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test.result, 'failure') + run: | + echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integration-rediscluster.yml b/.github/workflows/test-integration-rediscluster.yml new file mode 100644 index 0000000000..453d4984a9 --- /dev/null +++ b/.github/workflows/test-integration-rediscluster.yml @@ -0,0 +1,73 @@ +name: Test rediscluster + +on: + push: + branches: + - master + - release/** + + pull_request: + +# Cancel in progress workflows on pull_requests. +# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +permissions: + contents: read + +env: + BUILD_CACHE_KEY: ${{ github.sha }} + CACHED_BUILD_PATHS: | + ${{ github.workspace }}/dist-serverless + +jobs: + test: + name: rediscluster, python ${{ matrix.python-version }}, ${{ matrix.os }} + runs-on: ${{ matrix.os }} + timeout-minutes: 45 + + strategy: + fail-fast: false + matrix: + python-version: ["2.7","3.7","3.8","3.9"] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] + + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Setup Test Env + run: | + pip install codecov "tox>=3,<4" + + - name: Test rediscluster + timeout-minutes: 45 + shell: bash + run: | + set -x # print commands that are executed + coverage erase + + ./scripts/runtox.sh "${{ matrix.python-version }}-rediscluster" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + coverage combine .coverage* + coverage xml -i + codecov --file coverage.xml + + check_required_tests: + name: All rediscluster tests passed or skipped + needs: test + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test.result, 'failure') + run: | + echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integration-requests.yml b/.github/workflows/test-integration-requests.yml new file mode 100644 index 0000000000..d07b8a7ec1 --- /dev/null +++ b/.github/workflows/test-integration-requests.yml @@ -0,0 +1,73 @@ +name: Test requests + +on: + push: + branches: + - master + - release/** + + pull_request: + +# Cancel in progress workflows on pull_requests. +# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +permissions: + contents: read + +env: + BUILD_CACHE_KEY: ${{ github.sha }} + CACHED_BUILD_PATHS: | + ${{ github.workspace }}/dist-serverless + +jobs: + test: + name: requests, python ${{ matrix.python-version }}, ${{ matrix.os }} + runs-on: ${{ matrix.os }} + timeout-minutes: 45 + + strategy: + fail-fast: false + matrix: + python-version: ["2.7","3.8","3.9"] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] + + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Setup Test Env + run: | + pip install codecov "tox>=3,<4" + + - name: Test requests + timeout-minutes: 45 + shell: bash + run: | + set -x # print commands that are executed + coverage erase + + ./scripts/runtox.sh "${{ matrix.python-version }}-requests" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + coverage combine .coverage* + coverage xml -i + codecov --file coverage.xml + + check_required_tests: + name: All requests tests passed or skipped + needs: test + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test.result, 'failure') + run: | + echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integration-rq.yml b/.github/workflows/test-integration-rq.yml new file mode 100644 index 0000000000..78b0b44e29 --- /dev/null +++ b/.github/workflows/test-integration-rq.yml @@ -0,0 +1,73 @@ +name: Test rq + +on: + push: + branches: + - master + - release/** + + pull_request: + +# Cancel in progress workflows on pull_requests. +# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +permissions: + contents: read + +env: + BUILD_CACHE_KEY: ${{ github.sha }} + CACHED_BUILD_PATHS: | + ${{ github.workspace }}/dist-serverless + +jobs: + test: + name: rq, python ${{ matrix.python-version }}, ${{ matrix.os }} + runs-on: ${{ matrix.os }} + timeout-minutes: 45 + + strategy: + fail-fast: false + matrix: + python-version: ["2.7","3.5","3.6","3.7","3.8","3.9","3.10","3.11"] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] + + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Setup Test Env + run: | + pip install codecov "tox>=3,<4" + + - name: Test rq + timeout-minutes: 45 + shell: bash + run: | + set -x # print commands that are executed + coverage erase + + ./scripts/runtox.sh "${{ matrix.python-version }}-rq" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + coverage combine .coverage* + coverage xml -i + codecov --file coverage.xml + + check_required_tests: + name: All rq tests passed or skipped + needs: test + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test.result, 'failure') + run: | + echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integration-sanic.yml b/.github/workflows/test-integration-sanic.yml new file mode 100644 index 0000000000..aae23aad58 --- /dev/null +++ b/.github/workflows/test-integration-sanic.yml @@ -0,0 +1,73 @@ +name: Test sanic + +on: + push: + branches: + - master + - release/** + + pull_request: + +# Cancel in progress workflows on pull_requests. +# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +permissions: + contents: read + +env: + BUILD_CACHE_KEY: ${{ github.sha }} + CACHED_BUILD_PATHS: | + ${{ github.workspace }}/dist-serverless + +jobs: + test: + name: sanic, python ${{ matrix.python-version }}, ${{ matrix.os }} + runs-on: ${{ matrix.os }} + timeout-minutes: 45 + + strategy: + fail-fast: false + matrix: + python-version: ["3.5","3.6","3.7","3.8","3.9","3.10","3.11"] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] + + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Setup Test Env + run: | + pip install codecov "tox>=3,<4" + + - name: Test sanic + timeout-minutes: 45 + shell: bash + run: | + set -x # print commands that are executed + coverage erase + + ./scripts/runtox.sh "${{ matrix.python-version }}-sanic" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + coverage combine .coverage* + coverage xml -i + codecov --file coverage.xml + + check_required_tests: + name: All sanic tests passed or skipped + needs: test + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test.result, 'failure') + run: | + echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integration-sqlalchemy.yml b/.github/workflows/test-integration-sqlalchemy.yml new file mode 100644 index 0000000000..9bdb5064ce --- /dev/null +++ b/.github/workflows/test-integration-sqlalchemy.yml @@ -0,0 +1,73 @@ +name: Test sqlalchemy + +on: + push: + branches: + - master + - release/** + + pull_request: + +# Cancel in progress workflows on pull_requests. +# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +permissions: + contents: read + +env: + BUILD_CACHE_KEY: ${{ github.sha }} + CACHED_BUILD_PATHS: | + ${{ github.workspace }}/dist-serverless + +jobs: + test: + name: sqlalchemy, python ${{ matrix.python-version }}, ${{ matrix.os }} + runs-on: ${{ matrix.os }} + timeout-minutes: 45 + + strategy: + fail-fast: false + matrix: + python-version: ["2.7","3.7","3.8","3.9","3.10","3.11"] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] + + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Setup Test Env + run: | + pip install codecov "tox>=3,<4" + + - name: Test sqlalchemy + timeout-minutes: 45 + shell: bash + run: | + set -x # print commands that are executed + coverage erase + + ./scripts/runtox.sh "${{ matrix.python-version }}-sqlalchemy" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + coverage combine .coverage* + coverage xml -i + codecov --file coverage.xml + + check_required_tests: + name: All sqlalchemy tests passed or skipped + needs: test + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test.result, 'failure') + run: | + echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integration-starlette.yml b/.github/workflows/test-integration-starlette.yml new file mode 100644 index 0000000000..8ebe2442d0 --- /dev/null +++ b/.github/workflows/test-integration-starlette.yml @@ -0,0 +1,73 @@ +name: Test starlette + +on: + push: + branches: + - master + - release/** + + pull_request: + +# Cancel in progress workflows on pull_requests. +# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +permissions: + contents: read + +env: + BUILD_CACHE_KEY: ${{ github.sha }} + CACHED_BUILD_PATHS: | + ${{ github.workspace }}/dist-serverless + +jobs: + test: + name: starlette, python ${{ matrix.python-version }}, ${{ matrix.os }} + runs-on: ${{ matrix.os }} + timeout-minutes: 45 + + strategy: + fail-fast: false + matrix: + python-version: ["3.7","3.8","3.9","3.10","3.11"] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] + + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Setup Test Env + run: | + pip install codecov "tox>=3,<4" + + - name: Test starlette + timeout-minutes: 45 + shell: bash + run: | + set -x # print commands that are executed + coverage erase + + ./scripts/runtox.sh "${{ matrix.python-version }}-starlette" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + coverage combine .coverage* + coverage xml -i + codecov --file coverage.xml + + check_required_tests: + name: All starlette tests passed or skipped + needs: test + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test.result, 'failure') + run: | + echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integration-starlite.yml b/.github/workflows/test-integration-starlite.yml new file mode 100644 index 0000000000..8a40f7d48c --- /dev/null +++ b/.github/workflows/test-integration-starlite.yml @@ -0,0 +1,73 @@ +name: Test starlite + +on: + push: + branches: + - master + - release/** + + pull_request: + +# Cancel in progress workflows on pull_requests. +# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +permissions: + contents: read + +env: + BUILD_CACHE_KEY: ${{ github.sha }} + CACHED_BUILD_PATHS: | + ${{ github.workspace }}/dist-serverless + +jobs: + test: + name: starlite, python ${{ matrix.python-version }}, ${{ matrix.os }} + runs-on: ${{ matrix.os }} + timeout-minutes: 45 + + strategy: + fail-fast: false + matrix: + python-version: ["3.8","3.9","3.10","3.11"] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] + + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Setup Test Env + run: | + pip install codecov "tox>=3,<4" + + - name: Test starlite + timeout-minutes: 45 + shell: bash + run: | + set -x # print commands that are executed + coverage erase + + ./scripts/runtox.sh "${{ matrix.python-version }}-starlite" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + coverage combine .coverage* + coverage xml -i + codecov --file coverage.xml + + check_required_tests: + name: All starlite tests passed or skipped + needs: test + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test.result, 'failure') + run: | + echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integration-tornado.yml b/.github/workflows/test-integration-tornado.yml new file mode 100644 index 0000000000..05055b1e9d --- /dev/null +++ b/.github/workflows/test-integration-tornado.yml @@ -0,0 +1,73 @@ +name: Test tornado + +on: + push: + branches: + - master + - release/** + + pull_request: + +# Cancel in progress workflows on pull_requests. +# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +permissions: + contents: read + +env: + BUILD_CACHE_KEY: ${{ github.sha }} + CACHED_BUILD_PATHS: | + ${{ github.workspace }}/dist-serverless + +jobs: + test: + name: tornado, python ${{ matrix.python-version }}, ${{ matrix.os }} + runs-on: ${{ matrix.os }} + timeout-minutes: 45 + + strategy: + fail-fast: false + matrix: + python-version: ["3.7","3.8","3.9","3.10","3.11"] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] + + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Setup Test Env + run: | + pip install codecov "tox>=3,<4" + + - name: Test tornado + timeout-minutes: 45 + shell: bash + run: | + set -x # print commands that are executed + coverage erase + + ./scripts/runtox.sh "${{ matrix.python-version }}-tornado" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + coverage combine .coverage* + coverage xml -i + codecov --file coverage.xml + + check_required_tests: + name: All tornado tests passed or skipped + needs: test + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test.result, 'failure') + run: | + echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integration-trytond.yml b/.github/workflows/test-integration-trytond.yml new file mode 100644 index 0000000000..b8d6497e6d --- /dev/null +++ b/.github/workflows/test-integration-trytond.yml @@ -0,0 +1,73 @@ +name: Test trytond + +on: + push: + branches: + - master + - release/** + + pull_request: + +# Cancel in progress workflows on pull_requests. +# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +permissions: + contents: read + +env: + BUILD_CACHE_KEY: ${{ github.sha }} + CACHED_BUILD_PATHS: | + ${{ github.workspace }}/dist-serverless + +jobs: + test: + name: trytond, python ${{ matrix.python-version }}, ${{ matrix.os }} + runs-on: ${{ matrix.os }} + timeout-minutes: 45 + + strategy: + fail-fast: false + matrix: + python-version: ["3.5","3.6","3.7","3.8","3.9","3.10","3.11"] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] + + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Setup Test Env + run: | + pip install codecov "tox>=3,<4" + + - name: Test trytond + timeout-minutes: 45 + shell: bash + run: | + set -x # print commands that are executed + coverage erase + + ./scripts/runtox.sh "${{ matrix.python-version }}-trytond" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + coverage combine .coverage* + coverage xml -i + codecov --file coverage.xml + + check_required_tests: + name: All trytond tests passed or skipped + needs: test + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test.result, 'failure') + run: | + echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1 diff --git a/.gitignore b/.gitignore index 14a355c3c2..bd5df5dddd 100644 --- a/.gitignore +++ b/.gitignore @@ -11,6 +11,8 @@ pip-log.txt *.egg-info /build /dist +/dist-serverless +sentry-python-serverless*.zip .cache .idea .eggs diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 0000000000..ca104a4df1 --- /dev/null +++ b/.gitmodules @@ -0,0 +1,3 @@ +[submodule "checkouts/data-schemas"] + path = checkouts/data-schemas + url = https://github.com/getsentry/sentry-data-schemas diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000000..cb7882d38f --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,24 @@ +# See https://pre-commit.com for more information +# See https://pre-commit.com/hooks.html for more hooks +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.3.0 + hooks: + - id: trailing-whitespace + - id: end-of-file-fixer + +- repo: https://github.com/psf/black + rev: 22.6.0 + hooks: + - id: black + +- repo: https://github.com/pycqa/flake8 + rev: 5.0.4 + hooks: + - id: flake8 + +# Disabled for now, because it lists a lot of problems. +#- repo: https://github.com/pre-commit/mirrors-mypy +# rev: 'v0.931' +# hooks: +# - id: mypy diff --git a/.tool-versions b/.tool-versions new file mode 100644 index 0000000000..d316e6d5f1 --- /dev/null +++ b/.tool-versions @@ -0,0 +1 @@ +python 3.7.12 diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 5d4d894d49..0000000000 --- a/.travis.yml +++ /dev/null @@ -1,75 +0,0 @@ -language: python - -python: - - "2.7" - - "pypy" - - "3.4" - - "3.5" - - "3.6" - -env: - - SENTRY_PYTHON_TEST_POSTGRES_USER=postgres SENTRY_PYTHON_TEST_POSTGRES_NAME=travis_ci_test - -cache: - pip: true - cargo: true - -branches: - only: - - master - - /^release\/.+$/ - -matrix: - include: - - python: "3.7" - dist: xenial - - - python: "3.8" - dist: xenial - - - name: Linting - python: "3.6" - install: - - pip install tox - script: tox -e linters - - - python: "3.6" - name: Distribution packages - install: [] - script: make travis-upload-dist - - - python: "3.6" - name: Build documentation - install: [] - script: make travis-upload-docs - -before_script: - - psql -c 'create database travis_ci_test;' -U postgres - - psql -c 'create database test_travis_ci_test;' -U postgres - -services: - - postgresql - -install: - - pip install tox - - pip install codecov - - make install-zeus-cli - - bash scripts/download-relay.sh - -script: - - coverage erase - - ./scripts/runtox.sh '' --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - - coverage combine .coverage* - - coverage xml -i - - codecov --file coverage.xml - - '[[ -z "$ZEUS_API_TOKEN" ]] || zeus upload -t "application/x-cobertura+xml" coverage.xml' - -notifications: - webhooks: - urls: - - https://zeus.ci/hooks/7ebb3060-90d8-11e8-aa04-0a580a282e07/public/provider/travis/webhook - on_success: always - on_failure: always - on_start: always - on_cancel: always - on_error: always diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000000..ba2472c4c9 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,6 @@ +{ + "python.pythonPath": ".venv/bin/python", + "python.formatting.provider": "black", + "python.testing.unittestEnabled": false, + "python.testing.pytestEnabled": true +} diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000000..8dfde55540 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,1137 @@ +# Changelog + +## 1.14.0 + +### Various fixes & improvements + +- Add `before_send_transaction` (#1840) by @antonpirker + + Adds a hook (similar to `before_send`) that is called for all transaction events (performance releated data). + + Usage: + + ```python + import sentry_sdk + + def strip_sensitive_data(event, hint): + # modify event here (or return `None` if you want to drop the event entirely) + return event + + sentry_sdk.init( + # ... + before_send_transaction=strip_sensitive_data, + ) + ``` + + See also: https://docs.sentry.io/platforms/python/configuration/filtering/#using-platformidentifier-namebefore-send-transaction- + +- Django: Always remove values of Django session related cookies. (#1842) by @antonpirker +- Profiling: Enable profiling for ASGI frameworks (#1824) by @Zylphrex +- Profiling: Better gevent support (#1822) by @Zylphrex +- Profiling: Add profile context to transaction (#1860) by @Zylphrex +- Profiling: Use co_qualname in python 3.11 (#1831) by @Zylphrex +- OpenTelemetry: fix Use dict for sentry-trace context instead of tuple (#1847) by @AbhiPrasad +- OpenTelemetry: fix extra dependency (#1825) by @bernardotorres +- OpenTelemetry: fix NoOpSpan updates scope (#1834) by @Zylphrex +- OpenTelemetry: Make sure to noop when there is no DSN (#1852) by @antonpirker +- FastAPI: Fix middleware being patched multiple times (#1841) by @JohnnyDeuss +- Starlette: Avoid import of pkg_resource with Starlette integration (#1836) by @mgu +- Removed code coverage target (#1862) by @antonpirker + +## 1.13.0 + +### Various fixes & improvements + +- Add Starlite integration (#1748) by @gazorby + + Adding support for the [Starlite](https://starlite-api.github.io/starlite/1.48/) framework. Unhandled errors are captured. Performance spans for Starlite middleware are also captured. Thanks @gazorby for the great work! + + Usage: + + ```python + from starlite import Starlite, get + + import sentry_sdk + from sentry_sdk.integrations.starlite import StarliteIntegration + + sentry_sdk.init( + dsn="...", + traces_sample_rate=1.0, + integrations=[ + StarliteIntegration(), + ], + ) + + @get("/") + def hello_world() -> dict[str, str]: + """Keeping the tradition alive with hello world.""" + bla = 1/0 # causing an error + return {"hello": "world"} + + app = Starlite(route_handlers=[hello_world]) + ``` + +- Profiling: Remove sample buffer from profiler (#1791) by @Zylphrex +- Profiling: Performance tweaks to profile sampler (#1789) by @Zylphrex +- Add span for Django SimpleTemplateResponse rendering (#1818) by @chdsbd +- Use @wraps for Django Signal receivers (#1815) by @meanmail +- Add enqueued_at and started_at to rq job extra (#1024) by @kruvasyan +- Remove sanic v22 pin (#1819) by @sl0thentr0py +- Add support for `byterray` and `memoryview` built-in types (#1833) by @Tarty +- Handle `"rc"` in SQLAlchemy version. (#1812) by @peterschutt +- Doc: Use .venv (not .env) as a virtual env location in CONTRIBUTING.md (#1790) by @tonyo +- Auto publish to internal pypi on release (#1823) by @asottile-sentry +- Added Python 3.11 to test suite (#1795) by @antonpirker +- Update test/linting dependencies (#1801) by @antonpirker +- Deps: bump sphinx from 5.2.3 to 5.3.0 (#1686) by @dependabot + +## 1.12.1 + +### Various fixes & improvements + +- Link errors to OTel spans (#1787) by @antonpirker + +## 1.12.0 + +### Basic OTel support + +This adds support to automatically integrate OpenTelemetry performance tracing with Sentry. + +See the documentation on how to set it up: +https://docs.sentry.io/platforms/python/performance/instrumentation/opentelemetry/ + +Give it a try and let us know if you have any feedback or problems with using it. + +By: @antonpirker (#1772, #1766, #1765) + +### Various fixes & improvements + +- Tox Cleanup (#1749) by @antonpirker +- CI: Fix Github action checks (#1780) by @Zylphrex +- Profiling: Introduce active thread id on scope (#1764) by @Zylphrex +- Profiling: Eagerly hash stack for profiles (#1755) by @Zylphrex +- Profiling: Resolve inherited method class names (#1756) by @Zylphrex + +## 1.11.1 + +### Various fixes & improvements + +- Move set_transaction_name out of event processor in fastapi/starlette (#1751) by @sl0thentr0py +- Expose proxy_headers as top level config and use in ProxyManager: https://docs.sentry.io/platforms/python/configuration/options/#proxy-headers (#1746) by @sl0thentr0py + +## 1.11.0 + +### Various fixes & improvements + +- Fix signals problem on sentry.io (#1732) by @antonpirker +- Fix reading FastAPI request body twice. (#1724) by @antonpirker +- ref(profiling): Do not error if already setup (#1731) by @Zylphrex +- ref(profiling): Use sleep scheduler by default (#1729) by @Zylphrex +- feat(profiling): Extract more frame info (#1702) by @Zylphrex +- Update actions/upload-artifact to v3.1.1 (#1718) by @mattgauntseo-sentry +- Performance optimizations (#1725) by @antonpirker +- feat(pymongo): add PyMongo integration (#1590) by @Agalin +- Move relay to port 5333 to avoid collisions (#1716) by @sl0thentr0py +- fix(utils): strip_string() checks text length counting bytes not chars (#1711) by @mgaligniana +- chore: remove jira workflow (#1707) by @vladanpaunovic +- build(deps): bump checkouts/data-schemas from `a214fbc` to `20ff3b9` (#1703) by @dependabot +- perf(profiling): Tune the sample profile generation code for performance (#1694) by @Zylphrex + +## 1.10.1 + +### Various fixes & improvements + +- Bug fixes for FastAPI and Sentry SDK 1.10.0 (#1699) by @antonpirker +- The wrapped receive() did not return anything. (#1698) by @antonpirker + +## 1.10.0 + +### Various fixes & improvements + +- Unified naming for span ops (#1661) by @antonpirker + + We have unified the strings of our span operations. See https://develop.sentry.dev/sdk/performance/span-operations/ + + **WARNING**: If you have Sentry Dashboards or Sentry Discover queries that use `transaction.op` in their fields, conditions, aggregates or columns this change could potentially break your Dashboards/Discover setup. + Here is a list of the changes we made to the `op`s. Please adjust your dashboards and Discover queries accordingly: + + | Old operation (`op`) | New Operation (`op`) | + | ------------------------ | ---------------------- | + | `asgi.server` | `http.server` | + | `aws.request` | `http.client` | + | `aws.request.stream` | `http.client.stream` | + | `celery.submit` | `queue.submit.celery` | + | `celery.task` | `queue.task.celery` | + | `django.middleware` | `middleware.django` | + | `django.signals` | `event.django` | + | `django.template.render` | `template.render` | + | `django.view` | `view.render` | + | `http` | `http.client` | + | `redis` | `db.redis` | + | `rq.task` | `queue.task.rq` | + | `serverless.function` | `function.aws` | + | `serverless.function` | `function.gcp` | + | `starlette.middleware` | `middleware.starlette` | + +- Include framework in SDK name (#1662) by @antonpirker +- Asyncio integration (#1671) by @antonpirker +- Add exception handling to Asyncio Integration (#1695) by @antonpirker +- Fix asyncio task factory (#1689) by @antonpirker +- Have instrumentation for ASGI middleware receive/send callbacks. (#1673) by @antonpirker +- Use Django internal ASGI handling from Channels version 4.0.0. (#1688) by @antonpirker +- fix(integrations): Fix http putrequest when url is None (#1693) by @MattFlower +- build(deps): bump checkouts/data-schemas from `f0a57f2` to `a214fbc` (#1627) by @dependabot +- build(deps): bump flake8-bugbear from 22.9.11 to 22.9.23 (#1637) by @dependabot +- build(deps): bump sphinx from 5.1.1 to 5.2.3 (#1653) by @dependabot +- build(deps): bump actions/stale from 5 to 6 (#1638) by @dependabot +- build(deps): bump black from 22.8.0 to 22.10.0 (#1670) by @dependabot +- Remove unused node setup from ci. (#1681) by @antonpirker +- Check for Decimal is in_valid_sample_rate (#1672) by @Arvind2222 +- Add session for aiohttp integration (#1605) by @denys-pidlisnyi +- feat(profiling): Extract qualified name for each frame (#1669) by @Zylphrex +- feat(profiling): Attach thread metadata to profiles (#1660) by @Zylphrex +- ref(profiling): Rename profiling frame keys (#1680) by @Zylphrex +- fix(profiling): get_frame_name only look at arguments (#1684) by @Zylphrex +- fix(profiling): Need to sample profile correctly (#1679) by @Zylphrex +- fix(profiling): Race condition spawning multiple profiling threads (#1676) by @Zylphrex +- tests(profiling): Add basic profiling tests (#1677) by @Zylphrex +- tests(profiling): Add tests for thread schedulers (#1683) by @Zylphrex + +## 1.9.10 + +### Various fixes & improvements + +- Use content-length header in ASGI instead of reading request body (#1646, #1631, #1595, #1573) (#1649) by @antonpirker +- Added newer Celery versions to test suite (#1655) by @antonpirker +- Django 4.x support (#1632) by @antonpirker +- Cancel old CI runs when new one is started. (#1651) by @antonpirker +- Increase max string size for desc (#1647) by @k-fish +- Pin Sanic version for CI (#1650) by @antonpirker +- Fix for partial signals in old Django and old Python versions. (#1641) by @antonpirker +- Convert profile output to the sample format (#1611) by @phacops +- Dynamically adjust profiler sleep time (#1634) by @Zylphrex + +## 1.9.9 + +### Django update (ongoing) + +- Instrument Django Signals so they show up in "Performance" view (#1526) by @BeryJu +- include other Django enhancements brought up by the community + +### Various fixes & improvements + +- fix(profiling): Profiler mode type hints (#1633) by @Zylphrex +- New ASGIMiddleware tests (#1600) by @antonpirker +- build(deps): bump mypy from 0.961 to 0.971 (#1517) by @dependabot +- build(deps): bump black from 22.3.0 to 22.8.0 (#1596) by @dependabot +- build(deps): bump sphinx from 5.0.2 to 5.1.1 (#1524) by @dependabot +- ref: upgrade linters to flake8 5.x (#1610) by @asottile-sentry +- feat(profiling): Introduce different profiler schedulers (#1616) by @Zylphrex +- fix(profiling): Check transaction sampled status before profiling (#1624) by @Zylphrex +- Wrap Baggage ser/deser in capture_internal_exceptions (#1630) by @sl0thentr0py +- Faster Tests (DjangoCon) (#1602) by @antonpirker +- feat(profiling): Add support for profiles_sample_rate (#1613) by @Zylphrex +- feat(profiling): Support for multithreaded profiles (#1570) by @Zylphrex + +## 1.9.8 + +### Various fixes & improvements + +- Baggage creation for head of trace (#1589) by @sl0thentr0py + - The SDK now also generates new baggage entries for dynamic sampling when it is the first (head) SDK in the pipeline. + +## 1.9.7 + +### Various fixes & improvements + +- Let SentryAsgiMiddleware work with Starlette and FastAPI integrations (#1594) by @antonpirker + +**Note:** The last version 1.9.6 introduced a breaking change where projects that used Starlette or FastAPI +and had manually setup `SentryAsgiMiddleware` could not start. This versions fixes this behaviour. +With this version if you have a manual `SentryAsgiMiddleware` setup and are using Starlette or FastAPI +everything just works out of the box. + +Sorry for any inconveniences the last version might have brought to you. + +We can do better and in the future we will do our best to not break your code again. + +## 1.9.6 + +### Various fixes & improvements + +- Auto-enable Starlette and FastAPI (#1533) by @antonpirker +- Add more version constraints (#1574) by @isra17 +- Fix typo in starlette attribute check (#1566) by @sl0thentr0py + +## 1.9.5 + +### Various fixes & improvements + +- fix(redis): import redis pipeline using full path (#1565) by @olksdr +- Fix side effects for parallel tests (#1554) by @sl0thentr0py + +## 1.9.4 + +### Various fixes & improvements + +- Remove TRANSACTION_SOURCE_UNKNOWN and default to CUSTOM (#1558) by @sl0thentr0py +- feat(redis): Add instrumentation for redis pipeline (#1543) by @jjbayer +- Handle no release when uploading profiles (#1548) by @szokeasaurusrex + +## 1.9.3 + +### Various fixes & improvements + +- Wrap StarletteRequestExtractor in capture_internal_exceptions (#1551) by @sl0thentr0py + +## 1.9.2 + +### Various fixes & improvements + +- chore: remove quotes (#1545) by @vladanpaunovic + +## 1.9.1 + +### Various fixes & improvements + +- Fix FastAPI issues (#1532) ( #1514) (#1532) by @antonpirker +- Add deprecation warning for 3.4, 3.5 (#1541) by @sl0thentr0py +- Fast tests (#1504) by @antonpirker +- Replace Travis CI badge with GitHub Actions badge (#1538) by @153957 +- chore(deps): update urllib3 minimum version with environment markers (#1312) by @miketheman +- Update Flask and Quart integrations (#1520) by @pgjones +- chore: Remove ancient examples from tracing prototype (#1528) by @sl0thentr0py +- fix(django): Send correct "url" transaction source if Django resolver fails to resolve (#1525) by @sl0thentr0py + +## 1.9.0 + +### Various fixes & improvements + +- feat(profiler): Add experimental profiler under experiments.enable_profiling (#1481) by @szokeasaurusrex +- Fixed problem with broken response and python-multipart (#1516) by @antonpirker + +## 1.8.0 + +### Various fixes & improvements + +- feat(starlette): add Starlette integration (#1441) by @sl0thentr0py + **Important:** Remove manual usage of `SentryAsgiMiddleware`! This is now done by the Starlette integration. + Usage: + + ```python + from starlette.applications import Starlette + + from sentry_sdk.integrations.starlette import StarletteIntegration + + sentry_sdk.init( + dsn="...", + integrations=[StarletteIntegration()], + ) + + app = Starlette(debug=True, routes=[...]) + ``` + +- feat(fastapi): add FastAPI integration (#829) by @antonpirker + + **Important:** Remove manual usage of `SentryAsgiMiddleware`! This is now done by the FastAPI integration. + + Usage: + + ```python + from fastapi import FastAPI + + from sentry_sdk.integrations.starlette import StarletteIntegration + from sentry_sdk.integrations.fastapi import FastApiIntegration + + sentry_sdk.init( + dsn="...", + integrations=[StarletteIntegration(), FastApiIntegration()], + ) + + app = FastAPI() + ``` + + Yes, you have to add both, the `StarletteIntegration` **AND** the `FastApiIntegration`! + +- fix: avoid sending empty Baggage header (#1507) by @intgr +- fix: properly freeze Baggage object (#1508) by @intgr +- docs: fix simple typo, collecter | collector (#1505) by @timgates42 + +## 1.7.2 + +### Various fixes & improvements + +- feat(transactions): Transaction Source (#1490) by @antonpirker +- Removed (unused) sentry_timestamp header (#1494) by @antonpirker + +## 1.7.1 + +### Various fixes & improvements + +- Skip malformed baggage items (#1491) by @robyoung + +## 1.7.0 + +### Various fixes & improvements + +- feat(tracing): Dynamic Sampling Context / Baggage continuation (#1485) by @sl0thentr0py + + The SDK now propagates the [W3C Baggage Header](https://www.w3.org/TR/baggage/) from + incoming transactions to outgoing requests. + It also extracts Sentry specific [sampling information](https://develop.sentry.dev/sdk/performance/dynamic-sampling-context/) + and adds it to the transaction headers to enable Dynamic Sampling in the product. + +## 1.6.0 + +### Various fixes & improvements + +- Fix Deployment (#1474) by @antonpirker +- Serverless V2 (#1450) by @antonpirker +- Use logging levelno instead of levelname. Levelnames can be overridden (#1449) by @rrauenza + +## 1.5.12 + +### Various fixes & improvements + +- feat(measurements): Add experimental set_measurement api on transaction (#1359) by @sl0thentr0py +- fix: Remove incorrect usage from flask helper example (#1434) by @BYK + +## 1.5.11 + +### Various fixes & improvements + +- chore: Bump mypy and fix abstract ContextManager typing (#1421) by @sl0thentr0py +- chore(issues): add link to Sentry support (#1420) by @vladanpaunovic +- fix: replace git.io links with redirect targets (#1412) by @asottile-sentry +- ref: Update error verbose for sentry init (#1361) by @targhs +- fix(sessions): Update session also for non sampled events and change filter order (#1394) by @adinauer + +## 1.5.10 + +### Various fixes & improvements + +- Remove Flask version contraint (#1395) by @antonpirker +- Change ordering of event drop mechanisms (#1390) by @adinauer + +## 1.5.9 + +### Various fixes & improvements + +- fix(sqlalchemy): Use context instead of connection in sqlalchemy integration (#1388) by @sl0thentr0py +- Update correct test command in contributing docs (#1377) by @targhs +- Update black (#1379) by @antonpirker +- build(deps): bump sphinx from 4.1.1 to 4.5.0 (#1376) by @dependabot +- fix: Auto-enabling Redis and Pyramid integration (#737) by @untitaker +- feat(testing): Add pytest-watch (#853) by @lobsterkatie +- Treat x-api-key header as sensitive (#1236) by @simonschmidt +- fix: Remove obsolete MAX_FORMAT_PARAM_LENGTH (#1375) by @blueyed + +## 1.5.8 + +### Various fixes & improvements + +- feat(asgi): Add support for setting transaction name to path in FastAPI (#1349) by @tiangolo +- fix(sqlalchemy): Change context manager type to avoid race in threads (#1368) by @Fofanko +- fix(perf): Fix transaction setter on scope to use containing_transaction to match with getter (#1366) by @sl0thentr0py +- chore(ci): Change stale GitHub workflow to run once a day (#1367) by @kamilogorek +- feat(django): Make django middleware expose more wrapped attributes (#1202) by @MattFisher + +## 1.5.7 + +### Various fixes & improvements + +- fix(serializer): Make sentry_repr dunder method to avoid mock problems (#1364) by @sl0thentr0py + +## 1.5.6 + +### Various fixes & improvements + +- Create feature.yml (#1350) by @vladanpaunovic +- Update contribution guide (#1346) by @antonpirker +- chore: add bug issue template (#1345) by @vladanpaunovic +- Added default value for auto_session_tracking (#1337) by @antonpirker +- docs(readme): reordered content (#1343) by @antonpirker +- fix(tests): Removed unsupported Django 1.6 from tests to avoid confusion (#1338) by @antonpirker +- Group captured warnings under separate issues (#1324) by @mnito +- build(changelogs): Use automated changelogs from Craft (#1340) by @BYK +- fix(aiohttp): AioHttpIntegration sentry_app_handle() now ignores ConnectionResetError (#1331) by @cmalek +- meta: Remove black GH action (#1339) by @sl0thentr0py +- feat(flask): Add `sentry_trace()` template helper (#1336) by @BYK + +## 1.5.5 + +- Add session tracking to ASGI integration (#1329) +- Pinning test requirements versions (#1330) +- Allow classes to short circuit serializer with `sentry_repr` (#1322) +- Set default on json.dumps in compute_tracestate_value to ensure string conversion (#1318) + +Work in this release contributed by @tomchuk. Thank you for your contribution! + +## 1.5.4 + +- Add Python 3.10 to test suite (#1309) +- Capture only 5xx HTTP errors in Falcon Integration (#1314) +- Attempt custom urlconf resolve in `got_request_exception` as well (#1317) + +## 1.5.3 + +- Pick up custom urlconf set by Django middlewares from request if any (#1308) + +## 1.5.2 + +- Record event_processor client reports #1281 +- Add a Quart integration #1248 +- Sanic v21.12 support #1292 +- Support Celery abstract tasks #1287 + +Work in this release contributed by @johnzeringue, @pgjones and @ahopkins. Thank you for your contribution! + +## 1.5.1 + +- Fix django legacy url resolver regex substitution due to upstream CVE-2021-44420 fix #1272 +- Record lost `sample_rate` events only if tracing is enabled #1268 +- Fix gevent version parsing for non-numeric parts #1243 +- Record span and breadcrumb when Django opens db connection #1250 + +## 1.5.0 + +- Also record client outcomes for before send #1211 +- Add support for implicitly sized envelope items #1229 +- Fix integration with Apache Beam 2.32, 2.33 #1233 +- Remove Python 2.7 support for AWS Lambda layers in craft config #1241 +- Refactor Sanic integration for v21.9 support #1212 +- AWS Lambda Python 3.9 runtime support #1239 +- Fix "shutdown_timeout" typing #1256 + +Work in this release contributed by @galuszkak, @kianmeng, @ahopkins, @razumeiko, @tomscytale, and @seedofjoy. Thank you for your contribution! + +## 1.4.3 + +- Turned client reports on by default. + +## 1.4.2 + +- Made envelope modifications in the HTTP transport non observable #1206 + +## 1.4.1 + +- Fix race condition between `finish` and `start_child` in tracing #1203 + +## 1.4.0 + +- No longer set the last event id for transactions #1186 +- Added support for client reports (disabled by default for now) #1181 +- Added `tracestate` header handling #1179 +- Added real ip detection to asgi integration #1199 + +## 1.3.1 + +- Fix detection of contextvars compatibility with Gevent versions >=20.9.0 #1157 + +## 1.3.0 + +- Add support for Sanic versions 20 and 21 #1146 + +## 1.2.0 + +- Fix for `AWSLambda` Integration to handle other path formats for function initial handler #1139 +- Fix for worker to set daemon attribute instead of deprecated setDaemon method #1093 +- Fix for `bottle` Integration that discards `-dev` for version extraction #1085 +- Fix for transport that adds a unified hook for capturing metrics about dropped events #1100 +- Add `Httpx` Integration #1119 +- Add support for china domains in `AWSLambda` Integration #1051 + +## 1.1.0 + +- Fix for `AWSLambda` integration returns value of original handler #1106 +- Fix for `RQ` integration that only captures exception if RQ job has failed and ignore retries #1076 +- Feature that supports Tracing for the `Tornado` integration #1060 +- Feature that supports wild cards in `ignore_logger` in the `Logging` Integration #1053 +- Fix for django that deals with template span description names that are either lists or tuples #1054 + +## 1.0.0 + +This release contains a breaking change + +- **BREAKING CHANGE**: Feat: Moved `auto_session_tracking` experimental flag to a proper option and removed explicitly setting experimental `session_mode` in favor of auto detecting its value, hence enabling release health by default #994 +- Fixed Django transaction name by setting the name to `request.path_info` rather than `request.path` +- Fix for tracing by getting HTTP headers from span rather than transaction when possible #1035 +- Fix for Flask transactions missing request body in non errored transactions #1034 +- Fix for honoring the `X-Forwarded-For` header #1037 +- Fix for worker that logs data dropping of events with level error #1032 + +## 0.20.3 + +- Added scripts to support auto instrumentation of no code AWS lambda Python functions + +## 0.20.2 + +- Fix incorrect regex in craft to include wheel file in pypi release + +## 0.20.1 + +- Fix for error that occurs with Async Middlewares when the middleware is a function rather than a class + +## 0.20.0 + +- Fix for header extraction for AWS lambda/API extraction +- Fix multiple \*\*kwargs type hints # 967 +- Fix that corrects AWS lambda integration failure to detect the aws-lambda-ric 1.0 bootstrap #976 +- Fix AWSLambda integration: variable "timeout_thread" referenced before assignment #977 +- Use full git sha as release name #960 +- **BREAKING CHANGE**: The default environment is now production, not based on release +- Django integration now creates transaction spans for template rendering +- Fix headers not parsed correctly in ASGI middleware, Decode headers before creating transaction #984 +- Restored ability to have tracing disabled #991 +- Fix Django async views not behaving asynchronously +- Performance improvement: supported pre-aggregated sessions + +## 0.19.5 + +- Fix two regressions added in 0.19.2 with regard to sampling behavior when reading the sampling decision from headers. +- Increase internal transport queue size and make it configurable. + +## 0.19.4 + +- Fix a bug that would make applications crash if an old version of `boto3` was installed. + +## 0.19.3 + +- Automatically pass integration-relevant data to `traces_sampler` for AWS, AIOHTTP, ASGI, Bottle, Celery, Django, Falcon, Flask, GCP, Pyramid, Tryton, RQ, and WSGI integrations +- Fix a bug where the AWS integration would crash if event was anything besides a dictionary +- Fix the Django integrations's ASGI handler for Channels 3.0. Thanks Luke Pomfrey! + +## 0.19.2 + +- Add `traces_sampler` option. +- The SDK now attempts to infer a default release from various environment variables and the current git repo. +- Fix a crash with async views in Django 3.1. +- Fix a bug where complex URL patterns in Django would create malformed transaction names. +- Add options for transaction styling in AIOHTTP. +- Add basic attachment support (documentation tbd). +- fix a crash in the `pure_eval` integration. +- Integration for creating spans from `boto3`. + +## 0.19.1 + +- Fix dependency check for `blinker` fixes #858 +- Fix incorrect timeout warnings in AWS Lambda and GCP integrations #854 + +## 0.19.0 + +- Removed `_experiments.auto_enabling_integrations` in favor of just `auto_enabling_integrations` which is now enabled by default. + +## 0.18.0 + +- **Breaking change**: The `no_proxy` environment variable is now honored when inferring proxy settings from the system. Thanks Xavier Fernandez! +- Added Performance/Tracing support for AWS and GCP functions. +- Fix an issue with Django instrumentation where the SDK modified `resolver_match.callback` and broke user code. + +## 0.17.8 + +- Fix yet another bug with disjoint traces in Celery. +- Added support for Chalice 1.20. Thanks again to the folks at Cuenca MX! + +## 0.17.7 + +- Internal: Change data category for transaction envelopes. +- Fix a bug under Celery 4.2+ that may have caused disjoint traces or missing transactions. + +## 0.17.6 + +- Support for Flask 0.10 (only relaxing version check) + +## 0.17.5 + +- Work around an issue in the Python stdlib that makes the entire process deadlock during garbage collection if events are sent from a `__del__` implementation. +- Add possibility to wrap ASGI application twice in middleware to enable split up of request scope data and exception catching. + +## 0.17.4 + +- New integration for the Chalice web framework for AWS Lambda. Thanks to the folks at Cuenca MX! + +## 0.17.3 + +- Fix an issue with the `pure_eval` integration in interaction with trimming where `pure_eval` would create a lot of useless local variables that then drown out the useful ones in trimming. + +## 0.17.2 + +- Fix timezone bugs in GCP integration. + +## 0.17.1 + +- Fix timezone bugs in AWS Lambda integration. +- Fix crash on GCP integration because of missing parameter `timeout_warning`. + +## 0.17.0 + +- Fix a bug where class-based callables used as Django views (without using Django's regular class-based views) would not have `csrf_exempt` applied. +- New integration for Google Cloud Functions. +- Fix a bug where a recently released version of `urllib3` would cause the SDK to enter an infinite loop on networking and SSL errors. +- **Breaking change**: Remove the `traceparent_v2` option. The option has been ignored since 0.16.3, just remove it from your code. + +## 0.16.5 + +- Fix a bug that caused Django apps to crash if the view didn't have a `__name__` attribute. + +## 0.16.4 + +- Add experiment to avoid trunchating span descriptions. Initialize with `init(_experiments={"smart_transaction_trimming": True})`. +- Add a span around the Django view in transactions to distinguish its operations from middleware operations. + +## 0.16.3 + +- Fix AWS Lambda support for Python 3.8. +- The AWS Lambda integration now captures initialization/import errors for Python 3. +- The AWS Lambda integration now supports an option to warn about functions likely to time out. +- Testing for RQ 1.5 +- Flip default of `traceparent_v2`. This change should have zero impact. The flag will be removed in 0.17. +- Fix compatibility bug with Django 3.1. + +## 0.16.2 + +- New (optional) integrations for richer stacktraces: `pure_eval` for additional variables, `executing` for better function names. + +## 0.16.1 + +- Flask integration: Fix a bug that prevented custom tags from being attached to transactions. + +## 0.16.0 + +- Redis integration: add tags for more commands +- Redis integration: Patch rediscluster package if installed. +- Session tracking: A session is no longer considered crashed if there has been a fatal log message (only unhandled exceptions count). +- **Breaking change**: Revamping of the tracing API. +- **Breaking change**: `before_send` is no longer called for transactions. + +## 0.15.1 + +- Fix fatal crash in Pyramid integration on 404. + +## 0.15.0 + +- **Breaking change:** The ASGI middleware will now raise an exception if contextvars are not available, like it is already the case for other asyncio integrations. +- Contextvars are now used in more circumstances following a bugfix release of `gevent`. This will fix a few instances of wrong request data being attached to events while using an asyncio-based web framework. +- APM: Fix a bug in the SQLAlchemy integration where a span was left open if the database transaction had to be rolled back. This could have led to deeply nested span trees under that db query span. +- Fix a bug in the Pyramid integration where the transaction name could not be overridden at all. +- Fix a broken type annotation on `capture_exception`. +- Basic support for Django 3.1. More work is required for async middlewares to be instrumented properly for APM. + +## 0.14.4 + +- Fix bugs in transport rate limit enforcement for specific data categories. The bug should not have affected anybody because we do not yet emit rate limits for specific event types/data categories. +- Fix a bug in `capture_event` where it would crash if given additional kwargs. Thanks to Tatiana Vasilevskaya! +- Fix a bug where contextvars from the request handler were inaccessible in AIOHTTP error handlers. +- Fix a bug where the Celery integration would crash if newrelic instrumented Celery as well. + +## 0.14.3 + +- Attempt to use a monotonic clock to measure span durations in Performance/APM. +- Avoid overwriting explicitly set user data in web framework integrations. +- Allow to pass keyword arguments to `capture_event` instead of configuring the scope. +- Feature development for session tracking. + +## 0.14.2 + +- Fix a crash in Django Channels instrumentation when SDK is reinitialized. +- More contextual data for AWS Lambda (cloudwatch logs link). + +## 0.14.1 + +- Fix a crash in the Django integration when used in combination with Django Rest Framework's test utilities for request. +- Fix high memory consumption when sending a lot of errors in the same process. Particularly noticeable in async environments. + +## 0.14.0 + +- Show ASGI request data in Django 3.0 +- New integration for the Trytond ERP framework. Thanks n1ngu! + +## 0.13.5 + +- Fix trace continuation bugs in APM. +- No longer report `asyncio.CancelledError` as part of AIOHTTP integration. + +## 0.13.4 + +- Fix package classifiers to mark this package as supporting Python 3.8. The SDK supported 3.8 before though. +- Update schema sent for transaction events (transaction status). +- Fix a bug where `None` inside request data was skipped/omitted. + +## 0.13.3 + +- Fix an issue with the ASGI middleware that would cause Uvicorn to infer the wrong ASGI versions and call the wrapped application with the wrong argument count. +- Do not ignore the `tornado.application` logger. +- The Redis integration now instruments Redis blaster for breadcrumbs and transaction spans. + +## 0.13.2 + +- Fix a bug in APM that would cause wrong durations to be displayed on non-UTC servers. + +## 0.13.1 + +- Add new global functions for setting scope/context data. +- Fix a bug that would make Django 1.11+ apps crash when using function-based middleware. + +## 0.13.0 + +- Remove an old deprecation warning (behavior itself already changed since a long time). +- The AIOHTTP integration now attaches the request body to crash reports. Thanks to Vitali Rebkavets! +- Add an experimental PySpark integration. +- First release to be tested under Python 3.8. No code changes were necessary though, so previous releases also might have worked. + +## 0.12.3 + +- Various performance improvements to event sending. +- Avoid crashes when scope or hub is racy. +- Revert a change that broke applications using gevent and channels (in the same virtualenv, but different processes). +- Fix a bug that made the SDK crash on unicode in SQL. + +## 0.12.2 + +- Fix a crash with ASGI (Django Channels) when the ASGI request type is neither HTTP nor Websockets. + +## 0.12.1 + +- Temporarily remove sending of SQL parameters (as part of breadcrumbs or spans for APM) to Sentry to avoid memory consumption issues. + +## 0.12.0 + +- Sentry now has a [Discord server](https://discord.gg/cWnMQeA)! Join the server to get involved into SDK development and ask questions. +- Fix a bug where the response object for httplib (or requests) was held onto for an unnecessarily long amount of time. +- APM: Add spans for more methods on `subprocess.Popen` objects. +- APM: Add spans for Django middlewares. +- APM: Add spans for ASGI requests. +- Automatically inject the ASGI middleware for Django Channels 2.0. This will **break your Channels 2.0 application if it is running on Python 3.5 or 3.6** (while previously it would "only" leak a lot of memory for each ASGI request). **Install `aiocontextvars` from PyPI to make it work again.** + +## 0.11.2 + +- Fix a bug where the SDK would throw an exception on shutdown when running under eventlet. +- Add missing data to Redis breadcrumbs. + +## 0.11.1 + +- Remove a faulty assertion (observed in environment with Django Channels and ASGI). + +## 0.11.0 + +- Fix type hints for the logging integration. Thanks Steven Dignam! +- Fix an issue where scope/context data would leak in applications that use `gevent` with its threading monkeypatch. The fix is to avoid usage of contextvars in such environments. Thanks Ran Benita! +- Fix a reference cycle in the `ThreadingIntegration` that led to exceptions on interpreter shutdown. Thanks Guang Tian Li! +- Fix a series of bugs in the stdlib integration that broke usage of `subprocess`. +- More instrumentation for APM. +- New integration for SQLAlchemy (creates breadcrumbs from queries). +- New (experimental) integration for Apache Beam. +- Fix a bug in the `LoggingIntegration` that would send breadcrumbs timestamps in the wrong timezone. +- The `AiohttpIntegration` now sets the event's transaction name. +- Fix a bug that caused infinite recursion when serializing local variables that logged errors or otherwise created Sentry events. + +## 0.10.2 + +- Fix a bug where a log record with non-strings as `extra` keys would make the SDK crash. +- Added ASGI integration for better hub propagation, request data for your events and capturing uncaught exceptions. Using this middleware explicitly in your code will also fix a few issues with Django Channels. +- Fix a bug where `celery-once` was deadlocking when used in combination with the celery integration. +- Fix a memory leak in the new tracing feature when it is not enabled. + +## 0.10.1 + +- Fix bug where the SDK would yield a deprecation warning about `collections.abc` vs `collections`. +- Fix bug in stdlib integration that would cause spawned subprocesses to not inherit the environment variables from the parent process. + +## 0.10.0 + +- Massive refactor in preparation to tracing. There are no intentional breaking changes, but there is a risk of breakage (hence the minor version bump). Two new client options `traces_sample_rate` and `traceparent_v2` have been added. Do not change the defaults in production, they will bring your application down or at least fill your Sentry project up with nonsense events. + +## 0.9.5 + +- Do not use `getargspec` on Python 3 to evade deprecation warning. + +## 0.9.4 + +- Revert a change in 0.9.3 that prevented passing a `unicode` string as DSN to `init()`. + +## 0.9.3 + +- Add type hints for `init()`. +- Include user agent header when sending events. + +## 0.9.2 + +- Fix a bug in the Django integration that would prevent the user from initializing the SDK at the top of `settings.py`. + + This bug was introduced in 0.9.1 for all Django versions, but has been there for much longer for Django 1.6 in particular. + +## 0.9.1 + +- Fix a bug on Python 3.7 where gunicorn with gevent would cause the SDK to leak event data between requests. +- Fix a bug where the GNU backtrace integration would not parse certain frames. +- Fix a bug where the SDK would not pick up request bodies for Django Rest Framework based apps. +- Remove a few more headers containing sensitive data per default. +- Various improvements to type hints. Thanks Ran Benita! +- Add a event hint to access the log record from `before_send`. +- Fix a bug that would ignore `__tracebackhide__`. Thanks Matt Millican! +- Fix distribution information for mypy support (add `py.typed` file). Thanks Ran Benita! + +## 0.9.0 + +- The SDK now captures `SystemExit` and other `BaseException`s when coming from within a WSGI app (Flask, Django, ...) +- Pyramid: No longer report an exception if there exists an exception view for it. + +## 0.8.1 + +- Fix infinite recursion bug in Celery integration. + +## 0.8.0 + +- Add the always_run option in excepthook integration. +- Fix performance issues when attaching large data to events. This is not really intended to be a breaking change, but this release does include a rewrite of a larger chunk of code, therefore the minor version bump. + +## 0.7.14 + +- Fix crash when using Celery integration (`TypeError` when using `apply_async`). + +## 0.7.13 + +- Fix a bug where `Ignore` raised in a Celery task would be reported to Sentry. +- Add experimental support for tracing PoC. + +## 0.7.12 + +- Read from `X-Real-IP` for user IP address. +- Fix a bug that would not apply in-app rules for attached callstacks. +- It's now possible to disable automatic proxy support by passing `http_proxy=""`. Thanks Marco Neumann! + +## 0.7.11 + +- Fix a bug that would send `errno` in an invalid format to the server. +- Fix import-time crash when running Python with `-O` flag. +- Fix a bug that would prevent the logging integration from attaching `extra` keys called `data`. +- Fix order in which exception chains are reported to match Raven behavior. +- New integration for the Falcon web framework. Thanks to Jacob Magnusson! + +## 0.7.10 + +- Add more event trimming. +- Log Sentry's response body in debug mode. +- Fix a few bad typehints causing issues in IDEs. +- Fix a bug in the Bottle integration that would report HTTP exceptions (e.g. redirects) as errors. +- Fix a bug that would prevent use of `in_app_exclude` without setting `in_app_include`. +- Fix a bug where request bodies of Django Rest Framework apps were not captured. +- Suppress errors during SQL breadcrumb capturing in Django integration. Also change order in which formatting strategies are tried. + +## 0.7.9 + +- New integration for the Bottle web framework. Thanks to Stepan Henek! +- Self-protect against broken mapping implementations and other broken reprs instead of dropping all local vars from a stacktrace. Thanks to Marco Neumann! + +## 0.7.8 + +- Add support for Sanic versions 18 and 19. +- Fix a bug that causes an SDK crash when using composed SQL from psycopg2. + +## 0.7.7 + +- Fix a bug that would not capture request bodies if they were empty JSON arrays, objects or strings. +- New GNU backtrace integration parses stacktraces from exception messages and appends them to existing stacktrace. +- Capture Tornado formdata. +- Support Python 3.6 in Sanic and AIOHTTP integration. +- Clear breadcrumbs before starting a new request. +- Fix a bug in the Celery integration that would drop pending events during worker shutdown (particularly an issue when running with `max_tasks_per_child = 1`) +- Fix a bug with `repr`ing locals whose `__repr__` simultaneously changes the WSGI environment or other data that we're also trying to serialize at the same time. + +## 0.7.6 + +- Fix a bug where artificial frames for Django templates would not be marked as in-app and would always appear as the innermost frame. Implement a heuristic to show template frame closer to `render` or `parse` invocation. + +## 0.7.5 + +- Fix bug into Tornado integration that would send broken cookies to the server. +- Fix a bug in the logging integration that would ignore the client option `with_locals`. + +## 0.7.4 + +- Read release and environment from process environment like the Raven SDK does. The keys are called `SENTRY_RELEASE` and `SENTRY_ENVIRONMENT`. +- Fix a bug in the `serverless` integration where it would not push a new scope for each function call (leaking tags and other things across calls). +- Experimental support for type hints. + +## 0.7.3 + +- Fix crash in AIOHTTP integration when integration was set up but disabled. +- Flask integration now adds usernames, email addresses based on the protocol Flask-User defines on top of Flask-Login. +- New threading integration catches exceptions from crashing threads. +- New method `flush` on hubs and clients. New global `flush` function. +- Add decorator for serverless functions to fix common problems in those environments. +- Fix a bug in the logging integration where using explicit handlers required enabling the integration. + +## 0.7.2 + +- Fix `celery.exceptions.Retry` spamming in Celery integration. + +## 0.7.1 + +- Fix `UnboundLocalError` crash in Celery integration. + +## 0.7.0 + +- Properly display chained exceptions (PEP-3134). +- Rewrite celery integration to monkeypatch instead of using signals due to bugs in Celery 3's signal handling. The Celery scope is also now available in prerun and postrun signals. +- Fix Tornado integration to work with Tornado 6. +- Do not evaluate Django `QuerySet` when trying to capture local variables. Also an internal hook was added to overwrite `repr` for local vars. + +## 0.6.9 + +- Second attempt at fixing the bug that was supposed to be fixed in 0.6.8. + + > No longer access arbitrary sequences in local vars due to possible side effects. + +## 0.6.8 + +- No longer access arbitrary sequences in local vars due to possible side effects. + +## 0.6.7 + +- Sourcecode Django templates is now displayed in stackframes like Jinja templates in Flask already were. +- Updates to AWS Lambda integration for changes Amazon did to their Python 3.7 runtime. +- Fix a bug in the AIOHTTP integration that would report 300s and other HTTP status codes as errors. +- Fix a bug where a crashing `before_send` would crash the SDK and app. +- Fix a bug where cyclic references in e.g. local variables or `extra` data would crash the SDK. + +## 0.6.6 + +- Un-break API of internal `Auth` object that we use in Sentry itself. + +## 0.6.5 + +- Capture WSGI request data eagerly to save memory and avoid issues with uWSGI. +- Ability to use subpaths in DSN. +- Ignore `django.request` logger. + +## 0.6.4 + +- Fix bug that would lead to an `AssertionError: stack must have at least one layer`, at least in testsuites for Flask apps. + +## 0.6.3 + +- New integration for Tornado +- Fix request data in Django, Flask and other WSGI frameworks leaking between events. +- Fix infinite recursion when sending more events in `before_send`. + +## 0.6.2 + +- Fix crash in AWS Lambda integration when using Zappa. This only silences the error, the underlying bug is still in Zappa. + +## 0.6.1 + +- New integration for aiohttp-server. +- Fix crash when reading hostname in broken WSGI environments. + +## 0.6.0 + +- Fix bug where a 429 without Retry-After would not be honored. +- Fix bug where proxy setting would not fall back to `http_proxy` for HTTPs traffic. +- A WSGI middleware is now available for catching errors and adding context about the current request to them. +- Using `logging.debug("test", exc_info=True)` will now attach the current stacktrace if no `sys.exc_info` is available. +- The Python 3.7 runtime for AWS Lambda is now supported. +- Fix a bug that would drop an event or parts of it when it contained bytes that were not UTF-8 encoded. +- Logging an exception will no longer add the exception as breadcrumb to the exception's own event. + +## 0.5.5 + +- New client option `ca_certs`. +- Fix crash with Django and psycopg2. + +## 0.5.4 + +- Fix deprecation warning in relation to the `collections` stdlib module. +- Fix bug that would crash Django and Flask when streaming responses are failing halfway through. + +## 0.5.3 + +- Fix bug where using `push_scope` with a callback would not pop the scope. +- Fix crash when initializing the SDK in `push_scope`. +- Fix bug where IP addresses were sent when `send_default_pii=False`. + +## 0.5.2 + +- Fix bug where events sent through the RQ integration were sometimes lost. +- Remove a deprecation warning about usage of `logger.warn`. +- Fix bug where large frame local variables would lead to the event being rejected by Sentry. + +## 0.5.1 + +- Integration for Redis Queue (RQ) + +## 0.5.0 + +- Fix a bug that would omit several debug logs during SDK initialization. +- Fix issue that sent a event key `""` Sentry wouldn't understand. +- **Breaking change:** The `level` and `event_level` options in the logging integration now work separately from each other. +- Fix a bug in the Sanic integration that would report the exception behind any HTTP error code. +- Fix a bug that would spam breadcrumbs in the Celery integration. Ignore logger `celery.worker.job`. +- Additional attributes on log records are now put into `extra`. +- Integration for Pyramid. +- `sys.argv` is put into extra automatically. + +## 0.4.3 + +- Fix a bug that would leak WSGI responses. + +## 0.4.2 + +- Fix a bug in the Sanic integration that would leak data between requests. +- Fix a bug that would hide all debug logging happening inside of the built-in transport. +- Fix a bug that would report errors for typos in Django's shell. + +## 0.4.1 + +- Fix bug that would only show filenames in stacktraces but not the parent directories. + +## 0.4.0 + +- Changed how integrations are initialized. Integrations are now configured and enabled per-client. + +## 0.3.11 + +- Fix issue with certain deployment tools and the AWS Lambda integration. + +## 0.3.10 + +- Set transactions for Django like in Raven. Which transaction behavior is used can be configured. +- Fix a bug which would omit frame local variables from stacktraces in Celery. +- New option: `attach_stacktrace` + +## 0.3.9 + +- Bugfixes for AWS Lambda integration: Using Zappa did not catch any exceptions. + +## 0.3.8 + +- Nicer log level for internal errors. + +## 0.3.7 + +- Remove `repos` configuration option. There was never a way to make use of this feature. +- Fix a bug in `last_event_id`. +- Add Django SQL queries to breadcrumbs. +- Django integration won't set user attributes if they were already set. +- Report correct SDK version to Sentry. + +## 0.3.6 + +- Integration for Sanic + +## 0.3.5 + +- Integration for AWS Lambda +- Fix mojibake when encoding local variable values + +## 0.3.4 + +- Performance improvement when storing breadcrumbs + +## 0.3.3 + +- Fix crash when breadcrumbs had to be trunchated + +## 0.3.2 + +- Fixed an issue where some paths where not properly sent as absolute paths diff --git a/CHANGES.md b/CHANGES.md deleted file mode 100644 index fe1d6b6386..0000000000 --- a/CHANGES.md +++ /dev/null @@ -1,499 +0,0 @@ -# Changelog and versioning - -## Versioning Policy - -This project follows [semver](https://semver.org/), with three additions: - -* Semver says that major version `0` can include breaking changes at any time. - Still, it is common practice to assume that only `0.x` releases (minor - versions) can contain breaking changes while `0.x.y` releases (patch - versions) are used for backwards-compatible changes (bugfixes and features). - This project also follows that practice. - -* All undocumented APIs are considered internal. They are not part of this - contract. - -* Certain features (e.g. integrations) may be explicitly called out as - "experimental" or "unstable" in the documentation. They come with their own - versioning policy described in the documentation. - -We recommend to pin your version requirements against `0.x.*` or `0.x.y`. -Either one of the following is fine: - -``` -sentry-sdk>=0.10.0,<0.11.0 -sentry-sdk==0.10.1 -``` - -A major release `N` implies the previous release `N-1` will no longer receive updates. We generally do not backport bugfixes to older versions unless they are security relevant. However, feel free to ask for backports of specific commits on the bugtracker. - -## 0.14.4 - -* Fix bugs in transport rate limit enforcement for specific data categories. - The bug should not have affected anybody because we do not yet emit rate - limits for specific event types/data categories. -* Fix a bug in `capture_event` where it would crash if given additional kwargs. - Thanks to Tatiana Vasilevskaya! -* Fix a bug where contextvars from the request handler were inaccessible in - AIOHTTP error handlers. -* Fix a bug where the Celery integration would crash if newrelic instrumented Celery as well. - - -## 0.14.3 - -* Attempt to use a monotonic clock to measure span durations in Performance/APM. -* Avoid overwriting explicitly set user data in web framework integrations. -* Allow to pass keyword arguments to `capture_event` instead of configuring the scope. -* Feature development for session tracking. - -## 0.14.2 - -* Fix a crash in Django Channels instrumentation when SDK is reinitialized. -* More contextual data for AWS Lambda (cloudwatch logs link). - -## 0.14.1 - -* Fix a crash in the Django integration when used in combination with Django Rest Framework's test utilities for request. -* Fix high memory consumption when sending a lot of errors in the same process. Particularly noticeable in async environments. - -## 0.14.0 - -* Show ASGI request data in Django 3.0 -* New integration for the Trytond ERP framework. Thanks n1ngu! - -## 0.13.5 - -* Fix trace continuation bugs in APM. -* No longer report `asyncio.CancelledError` as part of AIOHTTP integration. - -## 0.13.4 - -* Fix package classifiers to mark this package as supporting Python 3.8. The SDK supported 3.8 before though. -* Update schema sent for transaction events (transaction status). -* Fix a bug where `None` inside request data was skipped/omitted. - -## 0.13.3 - -* Fix an issue with the ASGI middleware that would cause Uvicorn to infer the wrong ASGI versions and call the wrapped application with the wrong argument count. -* Do not ignore the `tornado.application` logger. -* The Redis integration now instruments Redis blaster for breadcrumbs and transaction spans. - -## 0.13.2 - -* Fix a bug in APM that would cause wrong durations to be displayed on non-UTC servers. - -## 0.13.1 - -* Add new global functions for setting scope/context data. -* Fix a bug that would make Django 1.11+ apps crash when using function-based middleware. - -## 0.13.0 - -* Remove an old deprecation warning (behavior itself already changed since a long time). -* The AIOHTTP integration now attaches the request body to crash reports. Thanks to Vitali Rebkavets! -* Add an experimental PySpark integration. -* First release to be tested under Python 3.8. No code changes were necessary though, so previous releases also might have worked. - -## 0.12.3 - -* Various performance improvements to event sending. -* Avoid crashes when scope or hub is racy. -* Revert a change that broke applications using gevent and channels (in the same virtualenv, but different processes). -* Fix a bug that made the SDK crash on unicode in SQL. - -## 0.12.2 - -* Fix a crash with ASGI (Django Channels) when the ASGI request type is neither HTTP nor Websockets. - -## 0.12.1 - -* Temporarily remove sending of SQL parameters (as part of breadcrumbs or spans for APM) to Sentry to avoid memory consumption issues. - -## 0.12.0 - -* Sentry now has a [Discord server](https://discord.gg/cWnMQeA)! Join the server to get involved into SDK development and ask questions. -* Fix a bug where the response object for httplib (or requests) was held onto for an unnecessarily long amount of time. -* APM: Add spans for more methods on `subprocess.Popen` objects. -* APM: Add spans for Django middlewares. -* APM: Add spans for ASGI requests. -* Automatically inject the ASGI middleware for Django Channels 2.0. This will **break your Channels 2.0 application if it is running on Python 3.5 or 3.6** (while previously it would "only" leak a lot of memory for each ASGI request). **Install `aiocontextvars` from PyPI to make it work again.** - -## 0.11.2 - -* Fix a bug where the SDK would throw an exception on shutdown when running under eventlet. -* Add missing data to Redis breadcrumbs. - -## 0.11.1 - -* Remove a faulty assertion (observed in environment with Django Channels and ASGI). - -## 0.11.0 - -* Fix type hints for the logging integration. Thansk Steven Dignam! -* Fix an issue where scope/context data would leak in applications that use `gevent` with its threading monkeypatch. The fix is to avoid usage of contextvars in such environments. Thanks Ran Benita! -* Fix a reference cycle in the `ThreadingIntegration` that led to exceptions on interpreter shutdown. Thanks Guang Tian Li! -* Fix a series of bugs in the stdlib integration that broke usage of `subprocess`. -* More instrumentation for APM. -* New integration for SQLAlchemy (creates breadcrumbs from queries). -* New (experimental) integration for Apache Beam. -* Fix a bug in the `LoggingIntegration` that would send breadcrumbs timestamps in the wrong timezone. -* The `AiohttpIntegration` now sets the event's transaction name. -* Fix a bug that caused infinite recursion when serializing local variables that logged errors or otherwise created Sentry events. - -## 0.10.2 - -* Fix a bug where a log record with non-strings as `extra` keys would make the SDK crash. -* Added ASGI integration for better hub propagation, request data for your events and capturing uncaught exceptions. Using this middleware explicitly in your code will also fix a few issues with Django Channels. -* Fix a bug where `celery-once` was deadlocking when used in combination with the celery integration. -* Fix a memory leak in the new tracing feature when it is not enabled. - -## 0.10.1 - -* Fix bug where the SDK would yield a deprecation warning about - `collections.abc` vs `collections`. -* Fix bug in stdlib integration that would cause spawned subprocesses to not - inherit the environment variables from the parent process. - -## 0.10.0 - -* Massive refactor in preparation to tracing. There are no intentional breaking - changes, but there is a risk of breakage (hence the minor version bump). Two - new client options `traces_sample_rate` and `traceparent_v2` have been added. - Do not change the defaults in production, they will bring your application - down or at least fill your Sentry project up with nonsense events. - -## 0.9.5 - -* Do not use ``getargspec`` on Python 3 to evade deprecation - warning. - -## 0.9.4 - -* Revert a change in 0.9.3 that prevented passing a ``unicode`` - string as DSN to ``init()``. - -## 0.9.3 - -* Add type hints for ``init()``. -* Include user agent header when sending events. - -## 0.9.2 - -* Fix a bug in the Django integration that would prevent the user - from initializing the SDK at the top of `settings.py`. - - This bug was introduced in 0.9.1 for all Django versions, but has been there - for much longer for Django 1.6 in particular. - -## 0.9.1 - -* Fix a bug on Python 3.7 where gunicorn with gevent would cause the SDK to - leak event data between requests. -* Fix a bug where the GNU backtrace integration would not parse certain frames. -* Fix a bug where the SDK would not pick up request bodies for Django Rest - Framework based apps. -* Remove a few more headers containing sensitive data per default. -* Various improvements to type hints. Thanks Ran Benita! -* Add a event hint to access the log record from `before_send`. -* Fix a bug that would ignore `__tracebackhide__`. Thanks Matt Millican! -* Fix distribution information for mypy support (add `py.typed` file). Thanks - Ran Benita! - -## 0.9.0 - -* The SDK now captures `SystemExit` and other `BaseException`s when coming from - within a WSGI app (Flask, Django, ...) -* Pyramid: No longer report an exception if there exists an exception view for - it. - -## 0.8.1 - -* Fix infinite recursion bug in Celery integration. - -## 0.8.0 - -* Add the always_run option in excepthook integration. -* Fix performance issues when attaching large data to events. This is not - really intended to be a breaking change, but this release does include a - rewrite of a larger chunk of code, therefore the minor version bump. - -## 0.7.14 - -* Fix crash when using Celery integration (`TypeError` when using - `apply_async`). - -## 0.7.13 - -* Fix a bug where `Ignore` raised in a Celery task would be reported to Sentry. -* Add experimental support for tracing PoC. - -## 0.7.12 - -* Read from `X-Real-IP` for user IP address. -* Fix a bug that would not apply in-app rules for attached callstacks. -* It's now possible to disable automatic proxy support by passing - `http_proxy=""`. Thanks Marco Neumann! - -## 0.7.11 - -* Fix a bug that would send `errno` in an invalid format to the server. -* Fix import-time crash when running Python with `-O` flag. -* Fix a bug that would prevent the logging integration from attaching `extra` - keys called `data`. -* Fix order in which exception chains are reported to match Raven behavior. -* New integration for the Falcon web framework. Thanks to Jacob Magnusson! - -## 0.7.10 - -* Add more event trimming. -* Log Sentry's response body in debug mode. -* Fix a few bad typehints causing issues in IDEs. -* Fix a bug in the Bottle integration that would report HTTP exceptions (e.g. - redirects) as errors. -* Fix a bug that would prevent use of `in_app_exclude` without - setting `in_app_include`. -* Fix a bug where request bodies of Django Rest Framework apps were not captured. -* Suppress errors during SQL breadcrumb capturing in Django - integration. Also change order in which formatting strategies - are tried. - -## 0.7.9 - -* New integration for the Bottle web framework. Thanks to Stepan Henek! -* Self-protect against broken mapping implementations and other broken reprs - instead of dropping all local vars from a stacktrace. Thanks to Marco - Neumann! - -## 0.7.8 - -* Add support for Sanic versions 18 and 19. -* Fix a bug that causes an SDK crash when using composed SQL from psycopg2. - -## 0.7.7 - -* Fix a bug that would not capture request bodies if they were empty JSON - arrays, objects or strings. -* New GNU backtrace integration parses stacktraces from exception messages and - appends them to existing stacktrace. -* Capture Tornado formdata. -* Support Python 3.6 in Sanic and AIOHTTP integration. -* Clear breadcrumbs before starting a new request. -* Fix a bug in the Celery integration that would drop pending events during - worker shutdown (particularly an issue when running with `max_tasks_per_child - = 1`) -* Fix a bug with `repr`ing locals whose `__repr__` simultaneously changes the - WSGI environment or other data that we're also trying to serialize at the - same time. - -## 0.7.6 - -* Fix a bug where artificial frames for Django templates would not be marked as - in-app and would always appear as the innermost frame. Implement a heuristic - to show template frame closer to `render` or `parse` invocation. - -## 0.7.5 - -* Fix bug into Tornado integration that would send broken cookies to the server. -* Fix a bug in the logging integration that would ignore the client - option `with_locals`. - -## 0.7.4 - -* Read release and environment from process environment like the Raven SDK - does. The keys are called `SENTRY_RELEASE` and `SENTRY_ENVIRONMENT`. -* Fix a bug in the `serverless` integration where it would not push a new scope - for each function call (leaking tags and other things across calls). -* Experimental support for type hints. - -## 0.7.3 - -* Fix crash in AIOHTTP integration when integration was set up but disabled. -* Flask integration now adds usernames, email addresses based on the protocol - Flask-User defines on top of Flask-Login. -* New threading integration catches exceptions from crashing threads. -* New method `flush` on hubs and clients. New global `flush` function. -* Add decorator for serverless functions to fix common problems in those - environments. -* Fix a bug in the logging integration where using explicit handlers required - enabling the integration. - -## 0.7.2 - -* Fix `celery.exceptions.Retry` spamming in Celery integration. - -## 0.7.1 - -* Fix `UnboundLocalError` crash in Celery integration. - -## 0.7.0 - -* Properly display chained exceptions (PEP-3134). -* Rewrite celery integration to monkeypatch instead of using signals due to - bugs in Celery 3's signal handling. The Celery scope is also now available in - prerun and postrun signals. -* Fix Tornado integration to work with Tornado 6. -* Do not evaluate Django `QuerySet` when trying to capture local variables. - Also an internal hook was added to overwrite `repr` for local vars. - -## 0.6.9 - -* Second attempt at fixing the bug that was supposed to be fixed in 0.6.8. - - > No longer access arbitrary sequences in local vars due to possible side effects. - -## 0.6.8 - -* No longer access arbitrary sequences in local vars due to possible side effects. - -## 0.6.7 - -* Sourcecode Django templates is now displayed in stackframes like Jinja templates in Flask already were. -* Updates to AWS Lambda integration for changes Amazon did to their Python 3.7 runtime. -* Fix a bug in the AIOHTTP integration that would report 300s and other HTTP status codes as errors. -* Fix a bug where a crashing `before_send` would crash the SDK and app. -* Fix a bug where cyclic references in e.g. local variables or `extra` data would crash the SDK. - -## 0.6.6 - -* Un-break API of internal `Auth` object that we use in Sentry itself. - -## 0.6.5 - -* Capture WSGI request data eagerly to save memory and avoid issues with uWSGI. -* Ability to use subpaths in DSN. -* Ignore `django.request` logger. - -## 0.6.4 - -* Fix bug that would lead to an `AssertionError: stack must have at least one layer`, at least in testsuites for Flask apps. - -## 0.6.3 - -* New integration for Tornado -* Fix request data in Django, Flask and other WSGI frameworks leaking between events. -* Fix infinite recursion when sending more events in `before_send`. - -## 0.6.2 - -* Fix crash in AWS Lambda integration when using Zappa. This only silences the error, the underlying bug is still in Zappa. - -## 0.6.1 - -* New integration for aiohttp-server. -* Fix crash when reading hostname in broken WSGI environments. - -## 0.6.0 - -* Fix bug where a 429 without Retry-After would not be honored. -* Fix bug where proxy setting would not fall back to `http_proxy` for HTTPs traffic. -* A WSGI middleware is now available for catching errors and adding context about the current request to them. -* Using `logging.debug("test", exc_info=True)` will now attach the current stacktrace if no `sys.exc_info` is available. -* The Python 3.7 runtime for AWS Lambda is now supported. -* Fix a bug that would drop an event or parts of it when it contained bytes that were not UTF-8 encoded. -* Logging an exception will no longer add the exception as breadcrumb to the exception's own event. - -## 0.5.5 - -* New client option `ca_certs`. -* Fix crash with Django and psycopg2. - -## 0.5.4 - -* Fix deprecation warning in relation to the `collections` stdlib module. -* Fix bug that would crash Django and Flask when streaming responses are failing halfway through. - -## 0.5.3 - -* Fix bug where using `push_scope` with a callback would not pop the scope. -* Fix crash when initializing the SDK in `push_scope`. -* Fix bug where IP addresses were sent when `send_default_pii=False`. - -## 0.5.2 - -* Fix bug where events sent through the RQ integration were sometimes lost. -* Remove a deprecation warning about usage of `logger.warn`. -* Fix bug where large frame local variables would lead to the event being rejected by Sentry. - -## 0.5.1 - -* Integration for Redis Queue (RQ) - -## 0.5.0 - -* Fix a bug that would omit several debug logs during SDK initialization. -* Fix issue that sent a event key `""` Sentry wouldn't understand. -* **Breaking change:** The `level` and `event_level` options in the logging integration now work separately from each other. -* Fix a bug in the Sanic integration that would report the exception behind any HTTP error code. -* Fix a bug that would spam breadcrumbs in the Celery integration. Ignore logger `celery.worker.job`. -* Additional attributes on log records are now put into `extra`. -* Integration for Pyramid. -* `sys.argv` is put into extra automatically. - -## 0.4.3 - -* Fix a bug that would leak WSGI responses. - -## 0.4.2 - -* Fix a bug in the Sanic integration that would leak data between requests. -* Fix a bug that would hide all debug logging happening inside of the built-in transport. -* Fix a bug that would report errors for typos in Django's shell. - -## 0.4.1 - -* Fix bug that would only show filenames in stacktraces but not the parent - directories. - -## 0.4.0 - -* Changed how integrations are initialized. Integrations are now - configured and enabled per-client. - -## 0.3.11 - -* Fix issue with certain deployment tools and the AWS Lambda integration. - -## 0.3.10 - -* Set transactions for Django like in Raven. Which transaction behavior is used - can be configured. -* Fix a bug which would omit frame local variables from stacktraces in Celery. -* New option: `attach_stacktrace` - -## 0.3.9 - -* Bugfixes for AWS Lambda integration: Using Zappa did not catch any exceptions. - -## 0.3.8 - -* Nicer log level for internal errors. - -## 0.3.7 - -* Remove `repos` configuration option. There was never a way to make use of - this feature. -* Fix a bug in `last_event_id`. -* Add Django SQL queries to breadcrumbs. -* Django integration won't set user attributes if they were already set. -* Report correct SDK version to Sentry. - -## 0.3.6 - -* Integration for Sanic - -## 0.3.5 - -* Integration for AWS Lambda -* Fix mojibake when encoding local variable values - -## 0.3.4 - -* Performance improvement when storing breadcrumbs - -## 0.3.3 - -* Fix crash when breadcrumbs had to be trunchated - -## 0.3.2 - -* Fixed an issue where some paths where not properly sent as absolute paths diff --git a/CONTRIBUTING-aws-lambda.md b/CONTRIBUTING-aws-lambda.md new file mode 100644 index 0000000000..7a6a158b45 --- /dev/null +++ b/CONTRIBUTING-aws-lambda.md @@ -0,0 +1,21 @@ +# Contributing to Sentry AWS Lambda Layer + +All the general terms of the [CONTRIBUTING.md](CONTRIBUTING.md) apply. + +## Development environment + +You need to have a AWS account and AWS CLI installed and setup. + +We put together two helper functions that can help you with development: + +- `./scripts/aws-deploy-local-layer.sh` + + This script [scripts/aws-deploy-local-layer.sh](scripts/aws-deploy-local-layer.sh) will take the code you have checked out locally, create a Lambda layer out of it and deploy it to the `eu-central-1` region of your configured AWS account using `aws` CLI. + + The Lambda layer will have the name `SentryPythonServerlessSDK-local-dev` + +- `./scripts/aws-attach-layer-to-lambda-function.sh` + + You can use this script [scripts/aws-attach-layer-to-lambda-function.sh](scripts/aws-attach-layer-to-lambda-function.sh) to attach the Lambda layer you just deployed (using the first script) onto one of your existing Lambda functions. You will have to give the name of the Lambda function to attach onto as an argument. (See the script for details.) + +With this two helper scripts it should be easy to rapidly iterate your development on the Lambda layer. diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index ebec137873..e1749587b7 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,73 +1,211 @@ -# How to contribute to the Sentry Python SDK +# Contributing to Sentry SDK for Python -`sentry-sdk` is an ordinary Python package. You can install it with `pip -install -e .` into some virtualenv, edit the sourcecode and test out your -changes manually. +We welcome contributions to python-sentry by the community. See the [Contributing to Docs](https://docs.sentry.io/contributing/) page if you want to fix or update the documentation on the website. -## Community +## How to report a problem -The public-facing channels for support and development of Sentry SDKs can be found on [Discord](https://discord.gg/Ww9hbqr). +Please search the [issue tracker](https://github.com/getsentry/sentry-python/issues) before creating a new issue (a problem or an improvement request). Please also ask in our [Sentry Community on Discord](https://discord.com/invite/Ww9hbqr) before submitting a new issue. There is a ton of great people in our Discord community ready to help you! -## Running tests and linters +If you feel that you can fix or implement it yourself, please read a few paragraphs below to learn how to submit your changes. -Make sure you have `virtualenv` installed, and the Python versions you care -about. You should have Python 2.7 and the latest Python 3 installed. +## Submitting changes -You don't need to `workon` or `activate` anything, the `Makefile` will create -one for you. Run `make` or `make help` to list commands. +- Setup the development environment. +- Clone sentry-python and prepare necessary changes. +- Add tests for your changes to `tests/`. +- Run tests and make sure all of them pass. +- Submit a pull request, referencing any issues it addresses. + +We will review your pull request as soon as possible. +Thank you for contributing! + +## Development environment + +### Clone the repo: + +```bash +git clone git@github.com:getsentry/sentry-python.git +``` + +Make sure that you have Python 3 installed. Version 3.7 or higher is required to run style checkers on pre-commit. On macOS, we recommend using brew to install Python. For Windows, we recommend an official python.org release. + +### Create a virtual environment: + +```bash +cd sentry-python + +python -m venv .venv + +source .venv/bin/activate +``` + +### Install `sentry-python` in editable mode + +```bash +pip install -e . +``` + +**Hint:** Sometimes you need a sample project to run your new changes to sentry-python. In this case install the sample project in the same virtualenv and you should be good to go because the ` pip install -e .` from above installed your local sentry-python in editable mode. + +### Install coding style pre-commit hooks: + +This will make sure that your commits will have the correct coding style. + +```bash +cd sentry-python + +pip install -r linter-requirements.txt + +pip install pre-commit + +pre-commit install +``` + +That's it. You should be ready to make changes, run tests, and make commits! If you experience any problems, please don't hesitate to ping us in our [Discord Community](https://discord.com/invite/Ww9hbqr). + +## Running tests + +We have a `Makefile` to help people get started with hacking on the SDK +without having to know or understand the Python ecosystem. +Run `make` or `make help` to list commands. + +So the simplest way to run tests is: + +```bash +cd sentry-python + +make test +``` + +This will use [Tox](https://tox.wiki/en/latest/) to run our whole test suite +under Python 2.7 and Python 3.7. + +Of course you can always run the underlying commands yourself, which is +particularly useful when wanting to provide arguments to `pytest` to run +specific tests: + +```bash +cd sentry-python + +# create virtual environment +python -m venv .venv + +# activate virtual environment +source .venv/bin/activate + +# install sentry-python +pip install -e . + +# install requirements +pip install -r test-requirements.txt + +# run tests +pytest tests/ +``` + +If you want to run the tests for a specific integration you should do so by doing this: + +```bash +pytest -rs tests/integrations/flask/ +``` + +**Hint:** Tests of integrations need additional dependencies. The switch `-rs` will show you why tests where skipped and what dependencies you need to install for the tests to run. (You can also consult the [tox.ini](tox.ini) file to see what dependencies are installed for each integration) ## Releasing a new version -We use [craft](https://github.com/getsentry/craft#python-package-index-pypi) to -release new versions. You need credentials for the `getsentry` PyPI user, and -must have `twine` installed globally. +(only relevant for Sentry employees) + +Prerequisites: + +- All the changes that should be release must be in `master` branch. +- Every commit should follow the [Commit Message Format](https://develop.sentry.dev/commit-messages/#commit-message-format) convention. +- CHANGELOG.md is updated automatically. No human intervention necessary. + +Manual Process: + +- On GitHub in the `sentry-python` repository go to "Actions" select the "Release" workflow. +- Click on "Run workflow" on the right side, make sure the `master` branch is selected. +- Set "Version to release" input field. Here you decide if it is a major, minor or patch release. (See "Versioning Policy" below) +- Click "Run Workflow" + +This will trigger [Craft](https://github.com/getsentry/craft) to prepare everything needed for a release. (For more information see [craft prepare](https://github.com/getsentry/craft#craft-prepare-preparing-a-new-release)) At the end of this process a release issue is created in the [Publish](https://github.com/getsentry/publish) repository. (Example release issue: https://github.com/getsentry/publish/issues/815) + +Now one of the persons with release privileges (most probably your engineering manager) will review this Issue and then add the `accepted` label to the issue. -The usual release process goes like this: +There are always two persons involved in a release. -1. Go through git log and write new entry into `CHANGES.md`, commit to master -2. `craft p a.b.c` -3. `craft pp a.b.c` +If you are in a hurry and the release should be out immediatly there is a Slack channel called `#proj-release-approval` where you can see your release issue and where you can ping people to please have a look immediatly. + +When the release issue is labeled `accepted` [Craft](https://github.com/getsentry/craft) is triggered again to publish the release to all the right platforms. (See [craft publish](https://github.com/getsentry/craft#craft-publish-publishing-the-release) for more information). At the end of this process the release issue on GitHub will be closed and the release is completed! Congratulations! + +There is a sequence diagram visualizing all this in the [README.md](https://github.com/getsentry/publish) of the `Publish` repository. + +### Versioning Policy + +This project follows [semver](https://semver.org/), with three additions: + +- Semver says that major version `0` can include breaking changes at any time. Still, it is common practice to assume that only `0.x` releases (minor versions) can contain breaking changes while `0.x.y` releases (patch versions) are used for backwards-compatible changes (bugfixes and features). This project also follows that practice. + +- All undocumented APIs are considered internal. They are not part of this contract. + +- Certain features (e.g. integrations) may be explicitly called out as "experimental" or "unstable" in the documentation. They come with their own versioning policy described in the documentation. + +We recommend to pin your version requirements against `1.x.*` or `1.x.y`. +Either one of the following is fine: + +``` +sentry-sdk>=1.0.0,<2.0.0 +sentry-sdk==1.5.0 +``` + +A major release `N` implies the previous release `N-1` will no longer receive updates. We generally do not backport bugfixes to older versions unless they are security relevant. However, feel free to ask for backports of specific commits on the bugtracker. ## Adding a new integration (checklist) 1. Write the integration. - * Instrument all application instances by default. Prefer global signals/patches instead of configuring a specific instance. Don't make the user pass anything to your integration for anything to work. Aim for zero configuration. + - Instrument all application instances by default. Prefer global signals/patches instead of configuring a specific instance. Don't make the user pass anything to your integration for anything to work. Aim for zero configuration. - * Everybody monkeypatches. That means: + - Everybody monkeypatches. That means: - * Make sure to think about conflicts with other monkeypatches when monkeypatching. + - Make sure to think about conflicts with other monkeypatches when monkeypatching. - * You don't need to feel bad about it. + - You don't need to feel bad about it. - * Avoid modifying the hub, registering a new client or the like. The user drives the client, and the client owns integrations. + - Avoid modifying the hub, registering a new client or the like. The user drives the client, and the client owns integrations. - * Allow the user to disable the integration by changing the client. Check `Hub.current.get_integration(MyIntegration)` from within your signal handlers to see if your integration is still active before you do anything impactful (such as sending an event). + - Allow the user to disable the integration by changing the client. Check `Hub.current.get_integration(MyIntegration)` from within your signal handlers to see if your integration is still active before you do anything impactful (such as sending an event). 2. Write tests. - * Think about the minimum versions supported, and test each version in a separate env in `tox.ini`. + - Think about the minimum versions supported, and test each version in a separate env in `tox.ini`. - * Create a new folder in `tests/integrations/`, with an `__init__` file that skips the entire suite if the package is not installed. + - Create a new folder in `tests/integrations/`, with an `__init__` file that skips the entire suite if the package is not installed. 3. Update package metadata. - * We use `extras_require` in `setup.py` to communicate minimum version requirements for integrations. People can use this in combination with tools like Poetry or Pipenv to detect conflicts between our supported versions and their used versions programmatically. + - We use `extras_require` in `setup.py` to communicate minimum version requirements for integrations. People can use this in combination with tools like Poetry or Pipenv to detect conflicts between our supported versions and their used versions programmatically. - Do not set upper-bounds on version requirements as people are often faster in adopting new versions of a web framework than we are in adding them to the test matrix or our package metadata. + Do not set upper-bounds on version requirements as people are often faster in adopting new versions of a web framework than we are in adding them to the test matrix or our package metadata. 4. Write the [docs](https://github.com/getsentry/sentry-docs). Answer the following questions: - * What does your integration do? Split in two sections: Executive summary at top and exact behavior further down. + - What does your integration do? Split in two sections: Executive summary at top and exact behavior further down. - * Which version of the SDK supports which versions of the modules it hooks into? + - Which version of the SDK supports which versions of the modules it hooks into? - * One code example with basic setup. + - One code example with basic setup. - * Make sure to add integration page to `python/index.md` (people forget to do that all the time). + - Make sure to add integration page to `python/index.md` (people forget to do that all the time). - Tip: Put most relevant parts wrapped in `..` tags for usage from within the Sentry UI. +Tip: Put most relevant parts wrapped in `..` tags for usage from within the Sentry UI. 5. Merge docs after new version has been released (auto-deploys on merge). 6. (optional) Update data in [`sdk_updates.py`](https://github.com/getsentry/sentry/blob/master/src/sentry/sdk_updates.py) to give users in-app suggestions to use your integration. May not be applicable or doable for all kinds of integrations. + +## Commit message format guidelines + +See the documentation on commit messages here: + +https://develop.sentry.dev/commit-messages/#commit-message-format diff --git a/Makefile b/Makefile index d5dd833951..339a68c069 100644 --- a/Makefile +++ b/Makefile @@ -9,6 +9,7 @@ help: @echo "make test: Run basic tests (not testing most integrations)" @echo "make test-all: Run ALL tests (slow, closest to CI)" @echo "make format: Run code formatters (destructive)" + @echo "make aws-lambda-layer: Build AWS Lambda layer directory for serverless integration" @echo @echo "Also make sure to read ./CONTRIBUTING.md" @false @@ -18,9 +19,8 @@ help: $(VENV_PATH)/bin/pip install tox dist: .venv - rm -rf dist build + rm -rf dist dist-serverless build $(VENV_PATH)/bin/python setup.py sdist bdist_wheel - .PHONY: dist format: .venv @@ -29,7 +29,7 @@ format: .venv .PHONY: format test: .venv - @$(VENV_PATH)/bin/tox -e py2.7,py3.7 + @$(VENV_PATH)/bin/tox -e py3.9 .PHONY: test test-all: .venv @@ -45,7 +45,6 @@ lint: .venv echo "Bad formatting? Run: make format"; \ echo "================================"; \ false) - .PHONY: lint apidocs: .venv @@ -59,17 +58,8 @@ apidocs-hotfix: apidocs @$(VENV_PATH)/bin/ghp-import -pf docs/_build .PHONY: apidocs-hotfix -install-zeus-cli: - npm install -g @zeus-ci/cli -.PHONY: install-zeus-cli - -travis-upload-docs: apidocs install-zeus-cli - cd docs/_build && zip -r gh-pages ./ - zeus upload -t "application/zip+docs" docs/_build/gh-pages.zip \ - || [[ ! "$(TRAVIS_BRANCH)" =~ ^release/ ]] -.PHONY: travis-upload-docs - -travis-upload-dist: dist install-zeus-cli - zeus upload -t "application/zip+wheel" dist/* \ - || [[ ! "$(TRAVIS_BRANCH)" =~ ^release/ ]] -.PHONY: travis-upload-dist +aws-lambda-layer: dist + $(VENV_PATH)/bin/pip install urllib3 + $(VENV_PATH)/bin/pip install certifi + $(VENV_PATH)/bin/python -m scripts.build_aws_lambda_layer +.PHONY: aws-lambda-layer diff --git a/README.md b/README.md index 0c845d601d..597ed852bb 100644 --- a/README.md +++ b/README.md @@ -1,42 +1,107 @@

- - - + + Sentry +

-# sentry-python - Sentry SDK for Python +_Bad software is everywhere, and we're tired of it. Sentry is on a mission to help developers write better software faster, so we can get back to enjoying technology. If you want to join us [**Check out our open positions**](https://sentry.io/careers/)_ -[![Build Status](https://travis-ci.com/getsentry/sentry-python.svg?branch=master)](https://travis-ci.com/getsentry/sentry-python) +# Official Sentry SDK for Python + +[![Build Status](https://github.com/getsentry/sentry-python/actions/workflows/ci.yml/badge.svg)](https://github.com/getsentry/sentry-python/actions/workflows/ci.yml) [![PyPi page link -- version](https://img.shields.io/pypi/v/sentry-sdk.svg)](https://pypi.python.org/pypi/sentry-sdk) [![Discord](https://img.shields.io/discord/621778831602221064)](https://discord.gg/cWnMQeA) -This is the next line of the Python SDK for [Sentry](http://sentry.io/), intended to replace the `raven` package on PyPI. +This is the official Python SDK for [Sentry](http://sentry.io/) + +--- + +## Getting Started + +### Install + +```bash +pip install --upgrade sentry-sdk +``` + +### Configuration ```python -from sentry_sdk import init, capture_message +import sentry_sdk + +sentry_sdk.init( + "https://12927b5f211046b575ee51fd8b1ac34f@o1.ingest.sentry.io/1", + + # Set traces_sample_rate to 1.0 to capture 100% + # of transactions for performance monitoring. + # We recommend adjusting this value in production. + traces_sample_rate=1.0, +) +``` -init("https://mydsn@sentry.io/123") +### Usage -capture_message("Hello World") # Will create an event. +```python +from sentry_sdk import capture_message +capture_message("Hello World") # Will create an event in Sentry. -raise ValueError() # Will also create an event. +raise ValueError() # Will also create an event in Sentry. ``` -To learn more about how to use the SDK: +- To learn more about how to use the SDK [refer to our docs](https://docs.sentry.io/platforms/python/) +- Are you coming from raven-python? [Use this migration guide](https://docs.sentry.io/platforms/python/migration/) +- To learn about internals use the [API Reference](https://getsentry.github.io/sentry-python/) + +## Integrations + +(If you want to create a new integration have a look at the [Adding a new integration checklist](CONTRIBUTING.md#adding-a-new-integration-checklist).) + +- [Django](https://docs.sentry.io/platforms/python/guides/django/) +- [Flask](https://docs.sentry.io/platforms/python/guides/flask/) +- [Bottle](https://docs.sentry.io/platforms/python/guides/bottle/) +- [AWS Lambda](https://docs.sentry.io/platforms/python/guides/aws-lambda/) +- [Google Cloud Functions](https://docs.sentry.io/platforms/python/guides/gcp-functions/) +- [WSGI](https://docs.sentry.io/platforms/python/guides/wsgi/) +- [ASGI](https://docs.sentry.io/platforms/python/guides/asgi/) +- [Starlette](https://docs.sentry.io/platforms/python/guides/starlette/) +- [FastAPI](https://docs.sentry.io/platforms/python/guides/fastapi/) +- [AIOHTTP](https://docs.sentry.io/platforms/python/guides/aiohttp/) +- [RQ (Redis Queue)](https://docs.sentry.io/platforms/python/guides/rq/) +- [Celery](https://docs.sentry.io/platforms/python/guides/celery/) +- [Chalice](https://docs.sentry.io/platforms/python/guides/chalice/) +- [Falcon](https://docs.sentry.io/platforms/python/guides/falcon/) +- [Quart](https://docs.sentry.io/platforms/python/guides/quart/) +- [Sanic](https://docs.sentry.io/platforms/python/guides/sanic/) +- [Tornado](https://docs.sentry.io/platforms/python/guides/tornado/) +- [Tryton](https://docs.sentry.io/platforms/python/guides/tryton/) +- [Pyramid](https://docs.sentry.io/platforms/python/guides/pyramid/) +- [Logging](https://docs.sentry.io/platforms/python/guides/logging/) +- [Apache Airflow](https://docs.sentry.io/platforms/python/guides/airflow/) +- [Apache Beam](https://docs.sentry.io/platforms/python/guides/beam/) +- [Apache Spark](https://docs.sentry.io/platforms/python/guides/pyspark/) + +## Migrate From sentry-raven + +The old `raven-python` client has entered maintenance mode and was moved [here](https://github.com/getsentry/raven-python). + +If you're using `raven-python`, we recommend you to migrate to this new SDK. You can find the benefits of migrating and how to do it in our [migration guide](https://docs.sentry.io/platforms/python/migration/). + +## Contributing to the SDK -- [Getting started with the new SDK](https://docs.sentry.io/quickstart/?platform=python) -- [Configuration options](https://docs.sentry.io/error-reporting/configuration/?platform=python) -- [Setting context (tags, user, extra information)](https://docs.sentry.io/enriching-error-data/context/?platform=python) -- [Integrations](https://docs.sentry.io/platforms/python/) +Please refer to [CONTRIBUTING.md](CONTRIBUTING.md). -Are you coming from raven-python? +## Getting help/support -- [Cheatsheet: Migrating to the new SDK from Raven](https://forum.sentry.io/t/switching-to-sentry-python/4733) +If you need help setting up or configuring the Python SDK (or anything else in the Sentry universe) please head over to the [Sentry Community on Discord](https://discord.com/invite/Ww9hbqr). There is a ton of great people in our Discord community ready to help you! -To learn about internals: +## Resources -- [API Reference](https://getsentry.github.io/sentry-python/) +- [![Documentation](https://img.shields.io/badge/documentation-sentry.io-green.svg)](https://docs.sentry.io/quickstart/) +- [![Forum](https://img.shields.io/badge/forum-sentry-green.svg)](https://forum.sentry.io/c/sdks) +- [![Discord](https://img.shields.io/discord/621778831602221064)](https://discord.gg/Ww9hbqr) +- [![Stack Overflow](https://img.shields.io/badge/stack%20overflow-sentry-green.svg)](http://stackoverflow.com/questions/tagged/sentry) +- [![Twitter Follow](https://img.shields.io/twitter/follow/getsentry?label=getsentry&style=social)](https://twitter.com/intent/follow?screen_name=getsentry) -# License +## License -Licensed under the BSD license, see `LICENSE` +Licensed under the BSD license, see [`LICENSE`](LICENSE) diff --git a/checkouts/data-schemas b/checkouts/data-schemas new file mode 160000 index 0000000000..0ed3357a07 --- /dev/null +++ b/checkouts/data-schemas @@ -0,0 +1 @@ +Subproject commit 0ed3357a07083bf762f7878132bb3fa6645d99d1 diff --git a/codecov.yml b/codecov.yml index c153fe0542..5d2dcbd0c7 100644 --- a/codecov.yml +++ b/codecov.yml @@ -4,5 +4,9 @@ coverage: default: false patch: default: false - + python: + target: 65% comment: false +ignore: + - "tests" + - "sentry_sdk/_types.py" diff --git a/docs-requirements.txt b/docs-requirements.txt index d9bb629201..1842226f8b 100644 --- a/docs-requirements.txt +++ b/docs-requirements.txt @@ -1,4 +1,4 @@ -sphinx==3.0.3 +sphinx==5.3.0 sphinx-rtd-theme sphinx-autodoc-typehints[type_comments]>=1.8.0 typing-extensions diff --git a/docs/conf.py b/docs/conf.py index 0b12b616b8..0bb09bffa0 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -5,6 +5,13 @@ import typing +# prevent circular imports +import sphinx.builders.html +import sphinx.builders.latex +import sphinx.builders.texinfo +import sphinx.builders.text +import sphinx.ext.autodoc + typing.TYPE_CHECKING = True # @@ -18,11 +25,11 @@ # -- Project information ----------------------------------------------------- -project = u"sentry-python" -copyright = u"2019, Sentry Team and Contributors" -author = u"Sentry Team and Contributors" +project = "sentry-python" +copyright = "2019, Sentry Team and Contributors" +author = "Sentry Team and Contributors" -release = "0.14.4" +release = "1.14.0" version = ".".join(release.split(".")[:2]) # The short X.Y version. @@ -60,12 +67,12 @@ # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = None +language = "en" # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This pattern also affects html_static_path and html_extra_path. -exclude_patterns = [u"_build", "Thumbs.db", ".DS_Store"] +exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] # The name of the Pygments (syntax highlighting) style to use. pygments_style = None @@ -82,7 +89,7 @@ html_theme = "alabaster" # Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the +# further. For a list of options available for each theme, see the # documentation. # # html_theme_options = {} @@ -96,7 +103,7 @@ # to template names. # # The default sidebars (for documents that don't match any pattern) are -# defined by theme itself. Builtin themes are using these templates by +# defined by theme itself. Builtin themes are using these templates by # default: ``['localtoc.html', 'relations.html', 'sourcelink.html', # 'searchbox.html']``. # @@ -133,8 +140,8 @@ ( master_doc, "sentry-python.tex", - u"sentry-python Documentation", - u"Sentry Team and Contributors", + "sentry-python Documentation", + "Sentry Team and Contributors", "manual", ) ] @@ -144,7 +151,7 @@ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). -man_pages = [(master_doc, "sentry-python", u"sentry-python Documentation", [author], 1)] +man_pages = [(master_doc, "sentry-python", "sentry-python Documentation", [author], 1)] # -- Options for Texinfo output ---------------------------------------------- @@ -156,7 +163,7 @@ ( master_doc, "sentry-python", - u"sentry-python Documentation", + "sentry-python Documentation", author, "sentry-python", "One line description of project.", diff --git a/examples/basic.py b/examples/basic.py deleted file mode 100644 index e6d928bbed..0000000000 --- a/examples/basic.py +++ /dev/null @@ -1,35 +0,0 @@ -import sentry_sdk -from sentry_sdk.integrations.excepthook import ExcepthookIntegration -from sentry_sdk.integrations.atexit import AtexitIntegration -from sentry_sdk.integrations.dedupe import DedupeIntegration -from sentry_sdk.integrations.stdlib import StdlibIntegration - - -sentry_sdk.init( - dsn="https://@sentry.io/", - default_integrations=False, - integrations=[ - ExcepthookIntegration(), - AtexitIntegration(), - DedupeIntegration(), - StdlibIntegration(), - ], - environment="Production", - release="1.0.0", - send_default_pii=False, - max_breadcrumbs=5, -) - -with sentry_sdk.push_scope() as scope: - scope.user = {"email": "john.doe@example.com"} - scope.set_tag("page_locale", "de-at") - scope.set_extra("request", {"id": "d5cf8a0fd85c494b9c6453c4fba8ab17"}) - scope.level = "warning" - sentry_sdk.capture_message("Something went wrong!") - -sentry_sdk.add_breadcrumb(category="auth", message="Authenticated user", level="info") - -try: - 1 / 0 -except Exception as e: - sentry_sdk.capture_exception(e) diff --git a/examples/tracing/README.md b/examples/tracing/README.md deleted file mode 100644 index ae7b79724a..0000000000 --- a/examples/tracing/README.md +++ /dev/null @@ -1,14 +0,0 @@ -To run this app: - -1. Have a Redis on the Redis default port (if you have Sentry running locally, - you probably already have this) -2. `pip install sentry-sdk flask rq` -3. `FLASK_APP=tracing flask run` -4. `FLASK_APP=tracing flask worker` -5. Go to `http://localhost:5000/` and enter a base64-encoded string (one is prefilled) -6. Hit submit, wait for heavy computation to end -7. `cat events | python traceviewer.py | dot -T svg > events.svg` -8. `open events.svg` - -The last two steps are for viewing the traces. Nothing gets sent to Sentry -right now because Sentry does not deal with this data yet. diff --git a/examples/tracing/events b/examples/tracing/events deleted file mode 100644 index f68ae2b8c2..0000000000 --- a/examples/tracing/events +++ /dev/null @@ -1,10 +0,0 @@ -{"start_timestamp": "2019-06-14T14:01:38Z", "transaction": "index", "server_name": "apfeltasche.local", "extra": {"sys.argv": ["/Users/untitaker/projects/sentry-python/.venv/bin/flask", "run", "--reload"]}, "contexts": {"trace": {"trace_id": "a0fa8803753e40fd8124b21eeb2986b5", "span_id": "968cff94913ebb07"}}, "timestamp": "2019-06-14T14:01:38Z", "modules": {"more-itertools": "5.0.0", "six": "1.12.0", "funcsigs": "1.0.2", "vine": "1.2.0", "tqdm": "4.31.1", "configparser": "3.7.4", "py-cpuinfo": "5.0.0", "pygments": "2.3.1", "attrs": "19.1.0", "pip": "19.0.3", "blinker": "1.4", "parso": "0.4.0", "django": "1.11.20", "click": "7.0", "requests-toolbelt": "0.9.1", "virtualenv": "16.4.3", "autoflake": "1.3", "tox": "3.7.0", "statistics": "1.0.3.5", "rq": "1.0", "flask": "1.0.2", "pkginfo": "1.5.0.1", "py": "1.8.0", "redis": "3.2.1", "celery": "4.2.1", "docutils": "0.14", "jedi": "0.13.3", "pytest": "4.4.1", "kombu": "4.4.0", "werkzeug": "0.14.1", "webencodings": "0.5.1", "toml": "0.10.0", "itsdangerous": "1.1.0", "certifi": "2019.3.9", "readme-renderer": "24.0", "wheel": "0.33.1", "pathlib2": "2.3.3", "python": "2.7.15", "urllib3": "1.24.1", "sentry-sdk": "0.9.0", "twine": "1.13.0", "pytest-benchmark": "3.2.2", "markupsafe": "1.1.1", "billiard": "3.5.0.5", "jinja2": "2.10", "coverage": "4.5.3", "bleach": "3.1.0", "pluggy": "0.9.0", "atomicwrites": "1.3.0", "filelock": "3.0.10", "pyflakes": "2.1.1", "pytz": "2018.9", "futures": "3.2.0", "pytest-cov": "2.7.1", "backports.functools-lru-cache": "1.5", "wsgiref": "0.1.2", "python-jsonrpc-server": "0.1.2", "python-language-server": "0.26.1", "future": "0.17.1", "chardet": "3.0.4", "amqp": "2.4.2", "setuptools": "40.8.0", "requests": "2.21.0", "idna": "2.8", "scandir": "1.10.0"}, "request": {"url": "http://127.0.0.1:5000/", "query_string": "", "method": "GET", "env": {"SERVER_NAME": "127.0.0.1", "SERVER_PORT": "5000"}, "headers": {"Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Host": "127.0.0.1:5000", "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", "Upgrade-Insecure-Requests": "1", "Connection": "keep-alive", "Pragma": "no-cache", "Cache-Control": "no-cache", "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:67.0) Gecko/20100101 Firefox/67.0"}}, "event_id": "f9f4b21dd9da4c389426c1ffd2b62410", "platform": "python", "spans": [], "breadcrumbs": [], "type": "transaction", "sdk": {"version": "0.9.0", "name": "sentry.python", "packages": [{"version": "0.9.0", "name": "pypi:sentry-sdk"}], "integrations": ["argv", "atexit", "dedupe", "excepthook", "flask", "logging", "modules", "rq", "stdlib", "threading"]}} -{"start_timestamp": "2019-06-14T14:01:38Z", "transaction": "static", "server_name": "apfeltasche.local", "extra": {"sys.argv": ["/Users/untitaker/projects/sentry-python/.venv/bin/flask", "run", "--reload"]}, "contexts": {"trace": {"trace_id": "8eb30d5ae5f3403ba3a036e696111ec3", "span_id": "97e894108ff7a8cd"}}, "timestamp": "2019-06-14T14:01:38Z", "modules": {"more-itertools": "5.0.0", "six": "1.12.0", "funcsigs": "1.0.2", "vine": "1.2.0", "tqdm": "4.31.1", "configparser": "3.7.4", "py-cpuinfo": "5.0.0", "pygments": "2.3.1", "attrs": "19.1.0", "pip": "19.0.3", "blinker": "1.4", "parso": "0.4.0", "django": "1.11.20", "click": "7.0", "requests-toolbelt": "0.9.1", "virtualenv": "16.4.3", "autoflake": "1.3", "tox": "3.7.0", "statistics": "1.0.3.5", "rq": "1.0", "flask": "1.0.2", "pkginfo": "1.5.0.1", "py": "1.8.0", "redis": "3.2.1", "celery": "4.2.1", "docutils": "0.14", "jedi": "0.13.3", "pytest": "4.4.1", "kombu": "4.4.0", "werkzeug": "0.14.1", "webencodings": "0.5.1", "toml": "0.10.0", "itsdangerous": "1.1.0", "certifi": "2019.3.9", "readme-renderer": "24.0", "wheel": "0.33.1", "pathlib2": "2.3.3", "python": "2.7.15", "urllib3": "1.24.1", "sentry-sdk": "0.9.0", "twine": "1.13.0", "pytest-benchmark": "3.2.2", "markupsafe": "1.1.1", "billiard": "3.5.0.5", "jinja2": "2.10", "coverage": "4.5.3", "bleach": "3.1.0", "pluggy": "0.9.0", "atomicwrites": "1.3.0", "filelock": "3.0.10", "pyflakes": "2.1.1", "pytz": "2018.9", "futures": "3.2.0", "pytest-cov": "2.7.1", "backports.functools-lru-cache": "1.5", "wsgiref": "0.1.2", "python-jsonrpc-server": "0.1.2", "python-language-server": "0.26.1", "future": "0.17.1", "chardet": "3.0.4", "amqp": "2.4.2", "setuptools": "40.8.0", "requests": "2.21.0", "idna": "2.8", "scandir": "1.10.0"}, "request": {"url": "http://127.0.0.1:5000/static/tracing.js", "query_string": "", "method": "GET", "env": {"SERVER_NAME": "127.0.0.1", "SERVER_PORT": "5000"}, "headers": {"Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Host": "127.0.0.1:5000", "Accept": "*/*", "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:67.0) Gecko/20100101 Firefox/67.0", "Connection": "keep-alive", "Referer": "http://127.0.0.1:5000/", "Pragma": "no-cache", "Cache-Control": "no-cache"}}, "event_id": "1c71c7cb32934550bb49f05b6c2d4052", "platform": "python", "spans": [], "breadcrumbs": [], "type": "transaction", "sdk": {"version": "0.9.0", "name": "sentry.python", "packages": [{"version": "0.9.0", "name": "pypi:sentry-sdk"}], "integrations": ["argv", "atexit", "dedupe", "excepthook", "flask", "logging", "modules", "rq", "stdlib", "threading"]}} -{"start_timestamp": "2019-06-14T14:01:38Z", "transaction": "index", "server_name": "apfeltasche.local", "extra": {"sys.argv": ["/Users/untitaker/projects/sentry-python/.venv/bin/flask", "run", "--reload"]}, "contexts": {"trace": {"trace_id": "b7627895a90b41718be82d3ad21ab2f4", "span_id": "9fa95b4ffdcbe177"}}, "timestamp": "2019-06-14T14:01:38Z", "modules": {"more-itertools": "5.0.0", "six": "1.12.0", "funcsigs": "1.0.2", "vine": "1.2.0", "tqdm": "4.31.1", "configparser": "3.7.4", "py-cpuinfo": "5.0.0", "pygments": "2.3.1", "attrs": "19.1.0", "pip": "19.0.3", "blinker": "1.4", "parso": "0.4.0", "django": "1.11.20", "click": "7.0", "requests-toolbelt": "0.9.1", "virtualenv": "16.4.3", "autoflake": "1.3", "tox": "3.7.0", "statistics": "1.0.3.5", "rq": "1.0", "flask": "1.0.2", "pkginfo": "1.5.0.1", "py": "1.8.0", "redis": "3.2.1", "celery": "4.2.1", "docutils": "0.14", "jedi": "0.13.3", "pytest": "4.4.1", "kombu": "4.4.0", "werkzeug": "0.14.1", "webencodings": "0.5.1", "toml": "0.10.0", "itsdangerous": "1.1.0", "certifi": "2019.3.9", "readme-renderer": "24.0", "wheel": "0.33.1", "pathlib2": "2.3.3", "python": "2.7.15", "urllib3": "1.24.1", "sentry-sdk": "0.9.0", "twine": "1.13.0", "pytest-benchmark": "3.2.2", "markupsafe": "1.1.1", "billiard": "3.5.0.5", "jinja2": "2.10", "coverage": "4.5.3", "bleach": "3.1.0", "pluggy": "0.9.0", "atomicwrites": "1.3.0", "filelock": "3.0.10", "pyflakes": "2.1.1", "pytz": "2018.9", "futures": "3.2.0", "pytest-cov": "2.7.1", "backports.functools-lru-cache": "1.5", "wsgiref": "0.1.2", "python-jsonrpc-server": "0.1.2", "python-language-server": "0.26.1", "future": "0.17.1", "chardet": "3.0.4", "amqp": "2.4.2", "setuptools": "40.8.0", "requests": "2.21.0", "idna": "2.8", "scandir": "1.10.0"}, "request": {"url": "http://127.0.0.1:5000/", "query_string": "", "method": "GET", "env": {"SERVER_NAME": "127.0.0.1", "SERVER_PORT": "5000"}, "headers": {"Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Host": "127.0.0.1:5000", "Accept": "*/*", "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:67.0) Gecko/20100101 Firefox/67.0", "Connection": "keep-alive", "Pragma": "no-cache", "Cache-Control": "no-cache"}}, "event_id": "1430ad5b0a0d45dca3f02c10271628f9", "platform": "python", "spans": [], "breadcrumbs": [], "type": "transaction", "sdk": {"version": "0.9.0", "name": "sentry.python", "packages": [{"version": "0.9.0", "name": "pypi:sentry-sdk"}], "integrations": ["argv", "atexit", "dedupe", "excepthook", "flask", "logging", "modules", "rq", "stdlib", "threading"]}} -{"start_timestamp": "2019-06-14T14:01:38Z", "transaction": "static", "server_name": "apfeltasche.local", "extra": {"sys.argv": ["/Users/untitaker/projects/sentry-python/.venv/bin/flask", "run", "--reload"]}, "contexts": {"trace": {"trace_id": "1636fdb33db84e7c9a4e606c1b176971", "span_id": "b682a29ead55075f"}}, "timestamp": "2019-06-14T14:01:38Z", "modules": {"more-itertools": "5.0.0", "six": "1.12.0", "funcsigs": "1.0.2", "vine": "1.2.0", "tqdm": "4.31.1", "configparser": "3.7.4", "py-cpuinfo": "5.0.0", "pygments": "2.3.1", "attrs": "19.1.0", "pip": "19.0.3", "blinker": "1.4", "parso": "0.4.0", "django": "1.11.20", "click": "7.0", "requests-toolbelt": "0.9.1", "virtualenv": "16.4.3", "autoflake": "1.3", "tox": "3.7.0", "statistics": "1.0.3.5", "rq": "1.0", "flask": "1.0.2", "pkginfo": "1.5.0.1", "py": "1.8.0", "redis": "3.2.1", "celery": "4.2.1", "docutils": "0.14", "jedi": "0.13.3", "pytest": "4.4.1", "kombu": "4.4.0", "werkzeug": "0.14.1", "webencodings": "0.5.1", "toml": "0.10.0", "itsdangerous": "1.1.0", "certifi": "2019.3.9", "readme-renderer": "24.0", "wheel": "0.33.1", "pathlib2": "2.3.3", "python": "2.7.15", "urllib3": "1.24.1", "sentry-sdk": "0.9.0", "twine": "1.13.0", "pytest-benchmark": "3.2.2", "markupsafe": "1.1.1", "billiard": "3.5.0.5", "jinja2": "2.10", "coverage": "4.5.3", "bleach": "3.1.0", "pluggy": "0.9.0", "atomicwrites": "1.3.0", "filelock": "3.0.10", "pyflakes": "2.1.1", "pytz": "2018.9", "futures": "3.2.0", "pytest-cov": "2.7.1", "backports.functools-lru-cache": "1.5", "wsgiref": "0.1.2", "python-jsonrpc-server": "0.1.2", "python-language-server": "0.26.1", "future": "0.17.1", "chardet": "3.0.4", "amqp": "2.4.2", "setuptools": "40.8.0", "requests": "2.21.0", "idna": "2.8", "scandir": "1.10.0"}, "request": {"url": "http://127.0.0.1:5000/static/tracing.js.map", "query_string": "", "method": "GET", "env": {"SERVER_NAME": "127.0.0.1", "SERVER_PORT": "5000"}, "headers": {"Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Host": "127.0.0.1:5000", "Accept": "*/*", "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:67.0) Gecko/20100101 Firefox/67.0", "Connection": "keep-alive"}}, "event_id": "72b1224307294e0fb6d6b1958076c4cc", "platform": "python", "spans": [], "breadcrumbs": [], "type": "transaction", "sdk": {"version": "0.9.0", "name": "sentry.python", "packages": [{"version": "0.9.0", "name": "pypi:sentry-sdk"}], "integrations": ["argv", "atexit", "dedupe", "excepthook", "flask", "logging", "modules", "rq", "stdlib", "threading"]}} -{"start_timestamp": "2019-06-14T14:01:40Z", "transaction": "compute", "server_name": "apfeltasche.local", "extra": {"sys.argv": ["/Users/untitaker/projects/sentry-python/.venv/bin/flask", "run", "--reload"]}, "contexts": {"trace": {"parent_span_id": "bce14471e0e9654d", "trace_id": "a0fa8803753e40fd8124b21eeb2986b5", "span_id": "946edde6ee421874"}}, "timestamp": "2019-06-14T14:01:40Z", "modules": {"more-itertools": "5.0.0", "six": "1.12.0", "funcsigs": "1.0.2", "vine": "1.2.0", "tqdm": "4.31.1", "configparser": "3.7.4", "py-cpuinfo": "5.0.0", "pygments": "2.3.1", "attrs": "19.1.0", "pip": "19.0.3", "blinker": "1.4", "parso": "0.4.0", "django": "1.11.20", "click": "7.0", "requests-toolbelt": "0.9.1", "virtualenv": "16.4.3", "autoflake": "1.3", "tox": "3.7.0", "statistics": "1.0.3.5", "rq": "1.0", "flask": "1.0.2", "pkginfo": "1.5.0.1", "py": "1.8.0", "redis": "3.2.1", "celery": "4.2.1", "docutils": "0.14", "jedi": "0.13.3", "pytest": "4.4.1", "kombu": "4.4.0", "werkzeug": "0.14.1", "webencodings": "0.5.1", "toml": "0.10.0", "itsdangerous": "1.1.0", "certifi": "2019.3.9", "readme-renderer": "24.0", "wheel": "0.33.1", "pathlib2": "2.3.3", "python": "2.7.15", "urllib3": "1.24.1", "sentry-sdk": "0.9.0", "twine": "1.13.0", "pytest-benchmark": "3.2.2", "markupsafe": "1.1.1", "billiard": "3.5.0.5", "jinja2": "2.10", "coverage": "4.5.3", "bleach": "3.1.0", "pluggy": "0.9.0", "atomicwrites": "1.3.0", "filelock": "3.0.10", "pyflakes": "2.1.1", "pytz": "2018.9", "futures": "3.2.0", "pytest-cov": "2.7.1", "backports.functools-lru-cache": "1.5", "wsgiref": "0.1.2", "python-jsonrpc-server": "0.1.2", "python-language-server": "0.26.1", "future": "0.17.1", "chardet": "3.0.4", "amqp": "2.4.2", "setuptools": "40.8.0", "requests": "2.21.0", "idna": "2.8", "scandir": "1.10.0"}, "request": {"url": "http://127.0.0.1:5000/compute/aGVsbG8gd29ybGQK", "query_string": "", "method": "GET", "env": {"SERVER_NAME": "127.0.0.1", "SERVER_PORT": "5000"}, "headers": {"Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Host": "127.0.0.1:5000", "Accept": "*/*", "Sentry-Trace": "00-a0fa8803753e40fd8124b21eeb2986b5-bce14471e0e9654d-00", "Connection": "keep-alive", "Referer": "http://127.0.0.1:5000/", "Pragma": "no-cache", "Cache-Control": "no-cache", "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:67.0) Gecko/20100101 Firefox/67.0"}}, "event_id": "c72fd945c1174140a00bdbf6f6ed8fc5", "platform": "python", "spans": [], "breadcrumbs": [], "type": "transaction", "sdk": {"version": "0.9.0", "name": "sentry.python", "packages": [{"version": "0.9.0", "name": "pypi:sentry-sdk"}], "integrations": ["argv", "atexit", "dedupe", "excepthook", "flask", "logging", "modules", "rq", "stdlib", "threading"]}} -{"start_timestamp": "2019-06-14T14:01:40Z", "transaction": "wait", "server_name": "apfeltasche.local", "extra": {"sys.argv": ["/Users/untitaker/projects/sentry-python/.venv/bin/flask", "run", "--reload"]}, "contexts": {"trace": {"parent_span_id": "bce14471e0e9654d", "trace_id": "a0fa8803753e40fd8124b21eeb2986b5", "span_id": "bf5be759039ede9a"}}, "timestamp": "2019-06-14T14:01:40Z", "modules": {"more-itertools": "5.0.0", "six": "1.12.0", "funcsigs": "1.0.2", "vine": "1.2.0", "tqdm": "4.31.1", "configparser": "3.7.4", "py-cpuinfo": "5.0.0", "pygments": "2.3.1", "attrs": "19.1.0", "pip": "19.0.3", "blinker": "1.4", "parso": "0.4.0", "django": "1.11.20", "click": "7.0", "requests-toolbelt": "0.9.1", "virtualenv": "16.4.3", "autoflake": "1.3", "tox": "3.7.0", "statistics": "1.0.3.5", "rq": "1.0", "flask": "1.0.2", "pkginfo": "1.5.0.1", "py": "1.8.0", "redis": "3.2.1", "celery": "4.2.1", "docutils": "0.14", "jedi": "0.13.3", "pytest": "4.4.1", "kombu": "4.4.0", "werkzeug": "0.14.1", "webencodings": "0.5.1", "toml": "0.10.0", "itsdangerous": "1.1.0", "certifi": "2019.3.9", "readme-renderer": "24.0", "wheel": "0.33.1", "pathlib2": "2.3.3", "python": "2.7.15", "urllib3": "1.24.1", "sentry-sdk": "0.9.0", "twine": "1.13.0", "pytest-benchmark": "3.2.2", "markupsafe": "1.1.1", "billiard": "3.5.0.5", "jinja2": "2.10", "coverage": "4.5.3", "bleach": "3.1.0", "pluggy": "0.9.0", "atomicwrites": "1.3.0", "filelock": "3.0.10", "pyflakes": "2.1.1", "pytz": "2018.9", "futures": "3.2.0", "pytest-cov": "2.7.1", "backports.functools-lru-cache": "1.5", "wsgiref": "0.1.2", "python-jsonrpc-server": "0.1.2", "python-language-server": "0.26.1", "future": "0.17.1", "chardet": "3.0.4", "amqp": "2.4.2", "setuptools": "40.8.0", "requests": "2.21.0", "idna": "2.8", "scandir": "1.10.0"}, "request": {"url": "http://127.0.0.1:5000/wait/sentry-python-tracing-example-result:aGVsbG8gd29ybGQK", "query_string": "", "method": "GET", "env": {"SERVER_NAME": "127.0.0.1", "SERVER_PORT": "5000"}, "headers": {"Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Host": "127.0.0.1:5000", "Accept": "*/*", "Sentry-Trace": "00-a0fa8803753e40fd8124b21eeb2986b5-bce14471e0e9654d-00", "Connection": "keep-alive", "Referer": "http://127.0.0.1:5000/", "Pragma": "no-cache", "Cache-Control": "no-cache", "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:67.0) Gecko/20100101 Firefox/67.0"}}, "event_id": "e8c17b0cbe2045758aaffc2f11672fab", "platform": "python", "spans": [], "breadcrumbs": [], "type": "transaction", "sdk": {"version": "0.9.0", "name": "sentry.python", "packages": [{"version": "0.9.0", "name": "pypi:sentry-sdk"}], "integrations": ["argv", "atexit", "dedupe", "excepthook", "flask", "logging", "modules", "rq", "stdlib", "threading"]}} -{"start_timestamp": "2019-06-14T14:01:40Z", "transaction": "wait", "server_name": "apfeltasche.local", "extra": {"sys.argv": ["/Users/untitaker/projects/sentry-python/.venv/bin/flask", "run", "--reload"]}, "contexts": {"trace": {"parent_span_id": "bce14471e0e9654d", "trace_id": "a0fa8803753e40fd8124b21eeb2986b5", "span_id": "b2d56249f7fdf327"}}, "timestamp": "2019-06-14T14:01:40Z", "modules": {"more-itertools": "5.0.0", "six": "1.12.0", "funcsigs": "1.0.2", "vine": "1.2.0", "tqdm": "4.31.1", "configparser": "3.7.4", "py-cpuinfo": "5.0.0", "pygments": "2.3.1", "attrs": "19.1.0", "pip": "19.0.3", "blinker": "1.4", "parso": "0.4.0", "django": "1.11.20", "click": "7.0", "requests-toolbelt": "0.9.1", "virtualenv": "16.4.3", "autoflake": "1.3", "tox": "3.7.0", "statistics": "1.0.3.5", "rq": "1.0", "flask": "1.0.2", "pkginfo": "1.5.0.1", "py": "1.8.0", "redis": "3.2.1", "celery": "4.2.1", "docutils": "0.14", "jedi": "0.13.3", "pytest": "4.4.1", "kombu": "4.4.0", "werkzeug": "0.14.1", "webencodings": "0.5.1", "toml": "0.10.0", "itsdangerous": "1.1.0", "certifi": "2019.3.9", "readme-renderer": "24.0", "wheel": "0.33.1", "pathlib2": "2.3.3", "python": "2.7.15", "urllib3": "1.24.1", "sentry-sdk": "0.9.0", "twine": "1.13.0", "pytest-benchmark": "3.2.2", "markupsafe": "1.1.1", "billiard": "3.5.0.5", "jinja2": "2.10", "coverage": "4.5.3", "bleach": "3.1.0", "pluggy": "0.9.0", "atomicwrites": "1.3.0", "filelock": "3.0.10", "pyflakes": "2.1.1", "pytz": "2018.9", "futures": "3.2.0", "pytest-cov": "2.7.1", "backports.functools-lru-cache": "1.5", "wsgiref": "0.1.2", "python-jsonrpc-server": "0.1.2", "python-language-server": "0.26.1", "future": "0.17.1", "chardet": "3.0.4", "amqp": "2.4.2", "setuptools": "40.8.0", "requests": "2.21.0", "idna": "2.8", "scandir": "1.10.0"}, "request": {"url": "http://127.0.0.1:5000/wait/sentry-python-tracing-example-result:aGVsbG8gd29ybGQK", "query_string": "", "method": "GET", "env": {"SERVER_NAME": "127.0.0.1", "SERVER_PORT": "5000"}, "headers": {"Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Host": "127.0.0.1:5000", "Accept": "*/*", "Sentry-Trace": "00-a0fa8803753e40fd8124b21eeb2986b5-bce14471e0e9654d-00", "Connection": "keep-alive", "Referer": "http://127.0.0.1:5000/", "Pragma": "no-cache", "Cache-Control": "no-cache", "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:67.0) Gecko/20100101 Firefox/67.0"}}, "event_id": "6577f8056383427d85df5b33bf9ccc2c", "platform": "python", "spans": [], "breadcrumbs": [], "type": "transaction", "sdk": {"version": "0.9.0", "name": "sentry.python", "packages": [{"version": "0.9.0", "name": "pypi:sentry-sdk"}], "integrations": ["argv", "atexit", "dedupe", "excepthook", "flask", "logging", "modules", "rq", "stdlib", "threading"]}} -{"start_timestamp": "2019-06-14T14:01:41Z", "transaction": "wait", "server_name": "apfeltasche.local", "extra": {"sys.argv": ["/Users/untitaker/projects/sentry-python/.venv/bin/flask", "run", "--reload"]}, "contexts": {"trace": {"parent_span_id": "bce14471e0e9654d", "trace_id": "a0fa8803753e40fd8124b21eeb2986b5", "span_id": "ac62ff8ae1b2eda6"}}, "timestamp": "2019-06-14T14:01:41Z", "modules": {"more-itertools": "5.0.0", "six": "1.12.0", "funcsigs": "1.0.2", "vine": "1.2.0", "tqdm": "4.31.1", "configparser": "3.7.4", "py-cpuinfo": "5.0.0", "pygments": "2.3.1", "attrs": "19.1.0", "pip": "19.0.3", "blinker": "1.4", "parso": "0.4.0", "django": "1.11.20", "click": "7.0", "requests-toolbelt": "0.9.1", "virtualenv": "16.4.3", "autoflake": "1.3", "tox": "3.7.0", "statistics": "1.0.3.5", "rq": "1.0", "flask": "1.0.2", "pkginfo": "1.5.0.1", "py": "1.8.0", "redis": "3.2.1", "celery": "4.2.1", "docutils": "0.14", "jedi": "0.13.3", "pytest": "4.4.1", "kombu": "4.4.0", "werkzeug": "0.14.1", "webencodings": "0.5.1", "toml": "0.10.0", "itsdangerous": "1.1.0", "certifi": "2019.3.9", "readme-renderer": "24.0", "wheel": "0.33.1", "pathlib2": "2.3.3", "python": "2.7.15", "urllib3": "1.24.1", "sentry-sdk": "0.9.0", "twine": "1.13.0", "pytest-benchmark": "3.2.2", "markupsafe": "1.1.1", "billiard": "3.5.0.5", "jinja2": "2.10", "coverage": "4.5.3", "bleach": "3.1.0", "pluggy": "0.9.0", "atomicwrites": "1.3.0", "filelock": "3.0.10", "pyflakes": "2.1.1", "pytz": "2018.9", "futures": "3.2.0", "pytest-cov": "2.7.1", "backports.functools-lru-cache": "1.5", "wsgiref": "0.1.2", "python-jsonrpc-server": "0.1.2", "python-language-server": "0.26.1", "future": "0.17.1", "chardet": "3.0.4", "amqp": "2.4.2", "setuptools": "40.8.0", "requests": "2.21.0", "idna": "2.8", "scandir": "1.10.0"}, "request": {"url": "http://127.0.0.1:5000/wait/sentry-python-tracing-example-result:aGVsbG8gd29ybGQK", "query_string": "", "method": "GET", "env": {"SERVER_NAME": "127.0.0.1", "SERVER_PORT": "5000"}, "headers": {"Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Host": "127.0.0.1:5000", "Accept": "*/*", "Sentry-Trace": "00-a0fa8803753e40fd8124b21eeb2986b5-bce14471e0e9654d-00", "Connection": "keep-alive", "Referer": "http://127.0.0.1:5000/", "Pragma": "no-cache", "Cache-Control": "no-cache", "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:67.0) Gecko/20100101 Firefox/67.0"}}, "event_id": "c03dfbab8a8145eeaa0d1a1adfcfcaa5", "platform": "python", "spans": [], "breadcrumbs": [], "type": "transaction", "sdk": {"version": "0.9.0", "name": "sentry.python", "packages": [{"version": "0.9.0", "name": "pypi:sentry-sdk"}], "integrations": ["argv", "atexit", "dedupe", "excepthook", "flask", "logging", "modules", "rq", "stdlib", "threading"]}} -{"start_timestamp": "2019-06-14T14:01:40Z", "transaction": "tracing.decode_base64", "server_name": "apfeltasche.local", "extra": {"sys.argv": ["/Users/untitaker/projects/sentry-python/.venv/bin/flask", "worker"], "rq-job": {"kwargs": {"redis_key": "sentry-python-tracing-example-result:aGVsbG8gd29ybGQK", "encoded": "aGVsbG8gd29ybGQK"}, "args": [], "description": "tracing.decode_base64(encoded=u'aGVsbG8gd29ybGQK', redis_key='sentry-python-tracing-example-result:aGVsbG8gd29ybGQK')", "func": "tracing.decode_base64", "job_id": "fabff810-3dbb-45d3-987e-86395790dfa9"}}, "contexts": {"trace": {"parent_span_id": "946edde6ee421874", "trace_id": "a0fa8803753e40fd8124b21eeb2986b5", "span_id": "9c2a6db8c79068a2"}}, "timestamp": "2019-06-14T14:01:41Z", "modules": {"more-itertools": "5.0.0", "six": "1.12.0", "funcsigs": "1.0.2", "vine": "1.2.0", "tqdm": "4.31.1", "configparser": "3.7.4", "py-cpuinfo": "5.0.0", "pygments": "2.3.1", "attrs": "19.1.0", "pip": "19.0.3", "blinker": "1.4", "parso": "0.4.0", "django": "1.11.20", "click": "7.0", "requests-toolbelt": "0.9.1", "virtualenv": "16.4.3", "autoflake": "1.3", "tox": "3.7.0", "statistics": "1.0.3.5", "rq": "1.0", "flask": "1.0.2", "pkginfo": "1.5.0.1", "py": "1.8.0", "redis": "3.2.1", "celery": "4.2.1", "docutils": "0.14", "jedi": "0.13.3", "pytest": "4.4.1", "kombu": "4.4.0", "werkzeug": "0.14.1", "webencodings": "0.5.1", "toml": "0.10.0", "itsdangerous": "1.1.0", "certifi": "2019.3.9", "readme-renderer": "24.0", "wheel": "0.33.1", "pathlib2": "2.3.3", "python": "2.7.15", "urllib3": "1.24.1", "sentry-sdk": "0.9.0", "twine": "1.13.0", "pytest-benchmark": "3.2.2", "markupsafe": "1.1.1", "billiard": "3.5.0.5", "jinja2": "2.10", "coverage": "4.5.3", "bleach": "3.1.0", "pluggy": "0.9.0", "atomicwrites": "1.3.0", "filelock": "3.0.10", "pyflakes": "2.1.1", "pytz": "2018.9", "futures": "3.2.0", "pytest-cov": "2.7.1", "backports.functools-lru-cache": "1.5", "wsgiref": "0.1.2", "python-jsonrpc-server": "0.1.2", "python-language-server": "0.26.1", "future": "0.17.1", "chardet": "3.0.4", "amqp": "2.4.2", "setuptools": "40.8.0", "requests": "2.21.0", "idna": "2.8", "scandir": "1.10.0"}, "event_id": "2975518984734ef49d2f75db4e928ddc", "platform": "python", "spans": [{"start_timestamp": "2019-06-14T14:01:41Z", "same_process_as_parent": true, "description": "http://httpbin.org/base64/aGVsbG8gd29ybGQK GET", "tags": {"http.status_code": 200, "error": false}, "timestamp": "2019-06-14T14:01:41Z", "parent_span_id": "9c2a6db8c79068a2", "trace_id": "a0fa8803753e40fd8124b21eeb2986b5", "op": "http", "data": {"url": "http://httpbin.org/base64/aGVsbG8gd29ybGQK", "status_code": 200, "reason": "OK", "method": "GET"}, "span_id": "8c931f4740435fb8"}], "breadcrumbs": [{"category": "httplib", "data": {"url": "http://httpbin.org/base64/aGVsbG8gd29ybGQK", "status_code": 200, "reason": "OK", "method": "GET"}, "type": "http", "timestamp": "2019-06-14T12:01:41Z"}, {"category": "rq.worker", "ty": "log", "timestamp": "2019-06-14T14:01:41Z", "level": "info", "data": {"asctime": "14:01:41"}, "message": "\u001b[32mdefault\u001b[39;49;00m: \u001b[34mJob OK\u001b[39;49;00m (fabff810-3dbb-45d3-987e-86395790dfa9)", "type": "default"}, {"category": "rq.worker", "ty": "log", "timestamp": "2019-06-14T14:01:41Z", "level": "info", "data": {"asctime": "14:01:41"}, "message": "Result is kept for 500 seconds", "type": "default"}], "type": "transaction", "sdk": {"version": "0.9.0", "name": "sentry.python", "packages": [{"version": "0.9.0", "name": "pypi:sentry-sdk"}], "integrations": ["argv", "atexit", "dedupe", "excepthook", "flask", "logging", "modules", "rq", "stdlib", "threading"]}} -{"start_timestamp": "2019-06-14T14:01:41Z", "transaction": "wait", "server_name": "apfeltasche.local", "extra": {"sys.argv": ["/Users/untitaker/projects/sentry-python/.venv/bin/flask", "run", "--reload"]}, "contexts": {"trace": {"parent_span_id": "bce14471e0e9654d", "trace_id": "a0fa8803753e40fd8124b21eeb2986b5", "span_id": "9d91c6558b2e4c06"}}, "timestamp": "2019-06-14T14:01:41Z", "modules": {"more-itertools": "5.0.0", "six": "1.12.0", "funcsigs": "1.0.2", "vine": "1.2.0", "tqdm": "4.31.1", "configparser": "3.7.4", "py-cpuinfo": "5.0.0", "pygments": "2.3.1", "attrs": "19.1.0", "pip": "19.0.3", "blinker": "1.4", "parso": "0.4.0", "django": "1.11.20", "click": "7.0", "requests-toolbelt": "0.9.1", "virtualenv": "16.4.3", "autoflake": "1.3", "tox": "3.7.0", "statistics": "1.0.3.5", "rq": "1.0", "flask": "1.0.2", "pkginfo": "1.5.0.1", "py": "1.8.0", "redis": "3.2.1", "celery": "4.2.1", "docutils": "0.14", "jedi": "0.13.3", "pytest": "4.4.1", "kombu": "4.4.0", "werkzeug": "0.14.1", "webencodings": "0.5.1", "toml": "0.10.0", "itsdangerous": "1.1.0", "certifi": "2019.3.9", "readme-renderer": "24.0", "wheel": "0.33.1", "pathlib2": "2.3.3", "python": "2.7.15", "urllib3": "1.24.1", "sentry-sdk": "0.9.0", "twine": "1.13.0", "pytest-benchmark": "3.2.2", "markupsafe": "1.1.1", "billiard": "3.5.0.5", "jinja2": "2.10", "coverage": "4.5.3", "bleach": "3.1.0", "pluggy": "0.9.0", "atomicwrites": "1.3.0", "filelock": "3.0.10", "pyflakes": "2.1.1", "pytz": "2018.9", "futures": "3.2.0", "pytest-cov": "2.7.1", "backports.functools-lru-cache": "1.5", "wsgiref": "0.1.2", "python-jsonrpc-server": "0.1.2", "python-language-server": "0.26.1", "future": "0.17.1", "chardet": "3.0.4", "amqp": "2.4.2", "setuptools": "40.8.0", "requests": "2.21.0", "idna": "2.8", "scandir": "1.10.0"}, "request": {"url": "http://127.0.0.1:5000/wait/sentry-python-tracing-example-result:aGVsbG8gd29ybGQK", "query_string": "", "method": "GET", "env": {"SERVER_NAME": "127.0.0.1", "SERVER_PORT": "5000"}, "headers": {"Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Host": "127.0.0.1:5000", "Accept": "*/*", "Sentry-Trace": "00-a0fa8803753e40fd8124b21eeb2986b5-bce14471e0e9654d-00", "Connection": "keep-alive", "Referer": "http://127.0.0.1:5000/", "Pragma": "no-cache", "Cache-Control": "no-cache", "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:67.0) Gecko/20100101 Firefox/67.0"}}, "event_id": "339cfc84adf0405986514c808afb0f68", "platform": "python", "spans": [], "breadcrumbs": [], "type": "transaction", "sdk": {"version": "0.9.0", "name": "sentry.python", "packages": [{"version": "0.9.0", "name": "pypi:sentry-sdk"}], "integrations": ["argv", "atexit", "dedupe", "excepthook", "flask", "logging", "modules", "rq", "stdlib", "threading"]}} diff --git a/examples/tracing/events.svg b/examples/tracing/events.svg deleted file mode 100644 index 33f9c98f00..0000000000 --- a/examples/tracing/events.svg +++ /dev/null @@ -1,439 +0,0 @@ - - - - - - -mytrace - - - -213977312221895837199412816265326724789 - -trace:index (a0fa8803753e40fd8124b21eeb2986b5) - - - -10848326615985732359 - -span:index (968cff94913ebb07) - - - -213977312221895837199412816265326724789->10848326615985732359 - - - - - -10695730148961032308 - -span:compute (946edde6ee421874) - - - -213977312221895837199412816265326724789->10695730148961032308 - - - - - -13788869053623754394 - -span:wait (bf5be759039ede9a) - - - -213977312221895837199412816265326724789->13788869053623754394 - - - - - -12886313978623292199 - -span:wait (b2d56249f7fdf327) - - - -213977312221895837199412816265326724789->12886313978623292199 - - - - - -12421771694198418854 - -span:wait (ac62ff8ae1b2eda6) - - - -213977312221895837199412816265326724789->12421771694198418854 - - - - - -10129474377767673784 - -span:http://httpbin.org/base64/aGVsbG8gd29ybGQK GET (8c931f4740435fb8) - - - -213977312221895837199412816265326724789->10129474377767673784 - - - - - -11252927259328145570 - -span:tracing.decode_base64 (9c2a6db8c79068a2) - - - -213977312221895837199412816265326724789->11252927259328145570 - - - - - -11354074206287318022 - -span:wait (9d91c6558b2e4c06) - - - -213977312221895837199412816265326724789->11354074206287318022 - - - - - -189680067412161401408211119957991300803 - -trace:static (8eb30d5ae5f3403ba3a036e696111ec3) - - - -10946161693179750605 - -span:static (97e894108ff7a8cd) - - - -189680067412161401408211119957991300803->10946161693179750605 - - - - - -243760014067241244567037757667822711540 - -trace:index (b7627895a90b41718be82d3ad21ab2f4) - - - -11504827122213183863 - -span:index (9fa95b4ffdcbe177) - - - -243760014067241244567037757667822711540->11504827122213183863 - - - - - -29528545588201242414770090507008174449 - -trace:static (1636fdb33db84e7c9a4e606c1b176971) - - - -13151252664271832927 - -span:static (b682a29ead55075f) - - - -29528545588201242414770090507008174449->13151252664271832927 - - - - - -10695730148961032308->10848326615985732359 - - - - - -10695730148961032308->10946161693179750605 - - - - - -10695730148961032308->11504827122213183863 - - - - - -10695730148961032308->13151252664271832927 - - - - - -10695730148961032308->11252927259328145570 - - - - - -13610234804785734989 - -13610234804785734989 - - - -13610234804785734989->10695730148961032308 - - - - - -13610234804785734989->13788869053623754394 - - - - - -13610234804785734989->12886313978623292199 - - - - - -13610234804785734989->12421771694198418854 - - - - - -13610234804785734989->11354074206287318022 - - - - - -13788869053623754394->10848326615985732359 - - - - - -13788869053623754394->10946161693179750605 - - - - - -13788869053623754394->11504827122213183863 - - - - - -13788869053623754394->13151252664271832927 - - - - - -12886313978623292199->10848326615985732359 - - - - - -12886313978623292199->10946161693179750605 - - - - - -12886313978623292199->11504827122213183863 - - - - - -12886313978623292199->13151252664271832927 - - - - - -12421771694198418854->10848326615985732359 - - - - - -12421771694198418854->10946161693179750605 - - - - - -12421771694198418854->11504827122213183863 - - - - - -12421771694198418854->13151252664271832927 - - - - - -12421771694198418854->10695730148961032308 - - - - - -12421771694198418854->13788869053623754394 - - - - - -12421771694198418854->12886313978623292199 - - - - - -10129474377767673784->10848326615985732359 - - - - - -10129474377767673784->10946161693179750605 - - - - - -10129474377767673784->11504827122213183863 - - - - - -10129474377767673784->13151252664271832927 - - - - - -10129474377767673784->10695730148961032308 - - - - - -10129474377767673784->13788869053623754394 - - - - - -10129474377767673784->12886313978623292199 - - - - - -11252927259328145570->10848326615985732359 - - - - - -11252927259328145570->10946161693179750605 - - - - - -11252927259328145570->11504827122213183863 - - - - - -11252927259328145570->13151252664271832927 - - - - - -11252927259328145570->10129474377767673784 - - - - - -11354074206287318022->10848326615985732359 - - - - - -11354074206287318022->10946161693179750605 - - - - - -11354074206287318022->11504827122213183863 - - - - - -11354074206287318022->13151252664271832927 - - - - - -11354074206287318022->10695730148961032308 - - - - - -11354074206287318022->13788869053623754394 - - - - - -11354074206287318022->12886313978623292199 - - - - - diff --git a/examples/tracing/static/tracing.js b/examples/tracing/static/tracing.js deleted file mode 100644 index ad4dc9a822..0000000000 --- a/examples/tracing/static/tracing.js +++ /dev/null @@ -1,519 +0,0 @@ -(function (__window) { -var exports = {}; -Object.defineProperty(exports, '__esModule', { value: true }); - -/*! ***************************************************************************** -Copyright (c) Microsoft Corporation. All rights reserved. -Licensed under the Apache License, Version 2.0 (the "License"); you may not use -this file except in compliance with the License. You may obtain a copy of the -License at http://www.apache.org/licenses/LICENSE-2.0 - -THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED -WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE, -MERCHANTABLITY OR NON-INFRINGEMENT. - -See the Apache Version 2.0 License for specific language governing permissions -and limitations under the License. -***************************************************************************** */ -/* global Reflect, Promise */ - -var extendStatics = function(d, b) { - extendStatics = Object.setPrototypeOf || - ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || - function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; - return extendStatics(d, b); -}; - -function __extends(d, b) { - extendStatics(d, b); - function __() { this.constructor = d; } - d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); -} - -var __assign = function() { - __assign = Object.assign || function __assign(t) { - for (var s, i = 1, n = arguments.length; i < n; i++) { - s = arguments[i]; - for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; - } - return t; - }; - return __assign.apply(this, arguments); -}; - -function __read(o, n) { - var m = typeof Symbol === "function" && o[Symbol.iterator]; - if (!m) return o; - var i = m.call(o), r, ar = [], e; - try { - while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); - } - catch (error) { e = { error: error }; } - finally { - try { - if (r && !r.done && (m = i["return"])) m.call(i); - } - finally { if (e) throw e.error; } - } - return ar; -} - -function __spread() { - for (var ar = [], i = 0; i < arguments.length; i++) - ar = ar.concat(__read(arguments[i])); - return ar; -} - -/** An error emitted by Sentry SDKs and related utilities. */ -var SentryError = /** @class */ (function (_super) { - __extends(SentryError, _super); - function SentryError(message) { - var _newTarget = this.constructor; - var _this = _super.call(this, message) || this; - _this.message = message; - // tslint:disable:no-unsafe-any - _this.name = _newTarget.prototype.constructor.name; - Object.setPrototypeOf(_this, _newTarget.prototype); - return _this; - } - return SentryError; -}(Error)); - -/** - * Checks whether given value's type is one of a few Error or Error-like - * {@link isError}. - * - * @param wat A value to be checked. - * @returns A boolean representing the result. - */ -/** - * Checks whether given value's type is an regexp - * {@link isRegExp}. - * - * @param wat A value to be checked. - * @returns A boolean representing the result. - */ -function isRegExp(wat) { - return Object.prototype.toString.call(wat) === '[object RegExp]'; -} - -/** - * Requires a module which is protected _against bundler minification. - * - * @param request The module path to resolve - */ -/** - * Checks whether we're in the Node.js or Browser environment - * - * @returns Answer to given question - */ -function isNodeEnv() { - // tslint:disable:strict-type-predicates - return Object.prototype.toString.call(typeof process !== 'undefined' ? process : 0) === '[object process]'; -} -var fallbackGlobalObject = {}; -/** - * Safely get global scope object - * - * @returns Global scope object - */ -function getGlobalObject() { - return (isNodeEnv() - ? global - : typeof window !== 'undefined' - ? window - : typeof self !== 'undefined' - ? self - : fallbackGlobalObject); -} -/** JSDoc */ -function consoleSandbox(callback) { - var global = getGlobalObject(); - var levels = ['debug', 'info', 'warn', 'error', 'log', 'assert']; - if (!('console' in global)) { - return callback(); - } - var originalConsole = global.console; - var wrappedLevels = {}; - // Restore all wrapped console methods - levels.forEach(function (level) { - if (level in global.console && originalConsole[level].__sentry__) { - wrappedLevels[level] = originalConsole[level].__sentry_wrapped__; - originalConsole[level] = originalConsole[level].__sentry_original__; - } - }); - // Perform callback manipulations - var result = callback(); - // Revert restoration to wrapped state - Object.keys(wrappedLevels).forEach(function (level) { - originalConsole[level] = wrappedLevels[level]; - }); - return result; -} - -// TODO: Implement different loggers for different environments -var global$1 = getGlobalObject(); -/** Prefix for logging strings */ -var PREFIX = 'Sentry Logger '; -/** JSDoc */ -var Logger = /** @class */ (function () { - /** JSDoc */ - function Logger() { - this._enabled = false; - } - /** JSDoc */ - Logger.prototype.disable = function () { - this._enabled = false; - }; - /** JSDoc */ - Logger.prototype.enable = function () { - this._enabled = true; - }; - /** JSDoc */ - Logger.prototype.log = function () { - var args = []; - for (var _i = 0; _i < arguments.length; _i++) { - args[_i] = arguments[_i]; - } - if (!this._enabled) { - return; - } - consoleSandbox(function () { - global$1.console.log(PREFIX + "[Log]: " + args.join(' ')); // tslint:disable-line:no-console - }); - }; - /** JSDoc */ - Logger.prototype.warn = function () { - var args = []; - for (var _i = 0; _i < arguments.length; _i++) { - args[_i] = arguments[_i]; - } - if (!this._enabled) { - return; - } - consoleSandbox(function () { - global$1.console.warn(PREFIX + "[Warn]: " + args.join(' ')); // tslint:disable-line:no-console - }); - }; - /** JSDoc */ - Logger.prototype.error = function () { - var args = []; - for (var _i = 0; _i < arguments.length; _i++) { - args[_i] = arguments[_i]; - } - if (!this._enabled) { - return; - } - consoleSandbox(function () { - global$1.console.error(PREFIX + "[Error]: " + args.join(' ')); // tslint:disable-line:no-console - }); - }; - return Logger; -}()); -// Ensure we only have a single logger instance, even if multiple versions of @sentry/utils are being used -global$1.__SENTRY__ = global$1.__SENTRY__ || {}; -var logger = global$1.__SENTRY__.logger || (global$1.__SENTRY__.logger = new Logger()); - -// tslint:disable:no-unsafe-any - -/** - * Wrap a given object method with a higher-order function - * - * @param source An object that contains a method to be wrapped. - * @param name A name of method to be wrapped. - * @param replacement A function that should be used to wrap a given method. - * @returns void - */ -function fill(source, name, replacement) { - if (!(name in source)) { - return; - } - var original = source[name]; - var wrapped = replacement(original); - // Make sure it's a function first, as we need to attach an empty prototype for `defineProperties` to work - // otherwise it'll throw "TypeError: Object.defineProperties called on non-object" - // tslint:disable-next-line:strict-type-predicates - if (typeof wrapped === 'function') { - try { - wrapped.prototype = wrapped.prototype || {}; - Object.defineProperties(wrapped, { - __sentry__: { - enumerable: false, - value: true, - }, - __sentry_original__: { - enumerable: false, - value: original, - }, - __sentry_wrapped__: { - enumerable: false, - value: wrapped, - }, - }); - } - catch (_Oo) { - // This can throw if multiple fill happens on a global object like XMLHttpRequest - // Fixes https://github.com/getsentry/sentry-javascript/issues/2043 - } - } - source[name] = wrapped; -} - -// Slightly modified (no IE8 support, ES6) and transcribed to TypeScript - -/** - * Checks if the value matches a regex or includes the string - * @param value The string value to be checked against - * @param pattern Either a regex or a string that must be contained in value - */ -function isMatchingPattern(value, pattern) { - if (isRegExp(pattern)) { - return pattern.test(value); - } - if (typeof pattern === 'string') { - return value.includes(pattern); - } - return false; -} - -/** - * Tells whether current environment supports Fetch API - * {@link supportsFetch}. - * - * @returns Answer to the given question. - */ -function supportsFetch() { - if (!('fetch' in getGlobalObject())) { - return false; - } - try { - // tslint:disable-next-line:no-unused-expression - new Headers(); - // tslint:disable-next-line:no-unused-expression - new Request(''); - // tslint:disable-next-line:no-unused-expression - new Response(); - return true; - } - catch (e) { - return false; - } -} -/** - * Tells whether current environment supports Fetch API natively - * {@link supportsNativeFetch}. - * - * @returns Answer to the given question. - */ -function supportsNativeFetch() { - if (!supportsFetch()) { - return false; - } - var global = getGlobalObject(); - return global.fetch.toString().indexOf('native') !== -1; -} - -/** SyncPromise internal states */ -var States; -(function (States) { - /** Pending */ - States["PENDING"] = "PENDING"; - /** Resolved / OK */ - States["RESOLVED"] = "RESOLVED"; - /** Rejected / Error */ - States["REJECTED"] = "REJECTED"; -})(States || (States = {})); - -/** - * Tracing Integration - */ -var Tracing = /** @class */ (function () { - /** - * Constructor for Tracing - * - * @param _options TracingOptions - */ - function Tracing(_options) { - if (_options === void 0) { _options = {}; } - this._options = _options; - /** - * @inheritDoc - */ - this.name = Tracing.id; - if (!Array.isArray(_options.tracingOrigins) || _options.tracingOrigins.length === 0) { - consoleSandbox(function () { - var defaultTracingOrigins = ['localhost', /^\//]; - // @ts-ignore - console.warn('Sentry: You need to define `tracingOrigins` in the options. Set an array of urls or patterns to trace.'); - // @ts-ignore - console.warn("Sentry: We added a reasonable default for you: " + defaultTracingOrigins); - _options.tracingOrigins = defaultTracingOrigins; - }); - } - } - /** - * @inheritDoc - */ - Tracing.prototype.setupOnce = function (_, getCurrentHub) { - if (this._options.traceXHR !== false) { - this._traceXHR(getCurrentHub); - } - if (this._options.traceFetch !== false) { - this._traceFetch(getCurrentHub); - } - if (this._options.autoStartOnDomReady !== false) { - getGlobalObject().addEventListener('DOMContentLoaded', function () { - Tracing.startTrace(getCurrentHub(), getGlobalObject().location.href); - }); - getGlobalObject().document.onreadystatechange = function () { - if (document.readyState === 'complete') { - Tracing.startTrace(getCurrentHub(), getGlobalObject().location.href); - } - }; - } - }; - /** - * Starts a new trace - * @param hub The hub to start the trace on - * @param transaction Optional transaction - */ - Tracing.startTrace = function (hub, transaction) { - hub.configureScope(function (scope) { - scope.startSpan(); - scope.setTransaction(transaction); - }); - }; - /** - * JSDoc - */ - Tracing.prototype._traceXHR = function (getCurrentHub) { - if (!('XMLHttpRequest' in getGlobalObject())) { - return; - } - var xhrproto = XMLHttpRequest.prototype; - fill(xhrproto, 'open', function (originalOpen) { - return function () { - var args = []; - for (var _i = 0; _i < arguments.length; _i++) { - args[_i] = arguments[_i]; - } - // @ts-ignore - var self = getCurrentHub().getIntegration(Tracing); - if (self) { - self._xhrUrl = args[1]; - } - // tslint:disable-next-line: no-unsafe-any - return originalOpen.apply(this, args); - }; - }); - fill(xhrproto, 'send', function (originalSend) { - return function () { - var _this = this; - var args = []; - for (var _i = 0; _i < arguments.length; _i++) { - args[_i] = arguments[_i]; - } - // @ts-ignore - var self = getCurrentHub().getIntegration(Tracing); - if (self && self._xhrUrl && self._options.tracingOrigins) { - var url_1 = self._xhrUrl; - var headers_1 = getCurrentHub().traceHeaders(); - // tslint:disable-next-line: prefer-for-of - var isWhitelisted = self._options.tracingOrigins.some(function (origin) { - return isMatchingPattern(url_1, origin); - }); - if (isWhitelisted && this.setRequestHeader) { - Object.keys(headers_1).forEach(function (key) { - _this.setRequestHeader(key, headers_1[key]); - }); - } - } - // tslint:disable-next-line: no-unsafe-any - return originalSend.apply(this, args); - }; - }); - }; - /** - * JSDoc - */ - Tracing.prototype._traceFetch = function (getCurrentHub) { - if (!supportsNativeFetch()) { - return; - } - - console.log("PATCHING FETCH"); - - // tslint:disable: only-arrow-functions - fill(getGlobalObject(), 'fetch', function (originalFetch) { - return function () { - var args = []; - for (var _i = 0; _i < arguments.length; _i++) { - args[_i] = arguments[_i]; - } - // @ts-ignore - var self = getCurrentHub().getIntegration(Tracing); - if (self && self._options.tracingOrigins) { - console.log("blafalseq"); - var url_2 = args[0]; - var options = args[1] = args[1] || {}; - var whiteListed_1 = false; - self._options.tracingOrigins.forEach(function (whiteListUrl) { - if (!whiteListed_1) { - whiteListed_1 = isMatchingPattern(url_2, whiteListUrl); - console.log('a', url_2, whiteListUrl); - } - }); - if (whiteListed_1) { - console.log('aaaaaa', options, whiteListed_1); - if (options.headers) { - - if (Array.isArray(options.headers)) { - options.headers = __spread(options.headers, Object.entries(getCurrentHub().traceHeaders())); - } - else { - options.headers = __assign({}, options.headers, getCurrentHub().traceHeaders()); - } - } - else { - options.headers = getCurrentHub().traceHeaders(); - } - - console.log(options.headers); - } - } - - args[1] = options; - // tslint:disable-next-line: no-unsafe-any - return originalFetch.apply(getGlobalObject(), args); - }; - }); - // tslint:enable: only-arrow-functions - }; - /** - * @inheritDoc - */ - Tracing.id = 'Tracing'; - return Tracing; -}()); - -exports.Tracing = Tracing; - - - __window.Sentry = __window.Sentry || {}; - __window.Sentry.Integrations = __window.Sentry.Integrations || {}; - Object.assign(__window.Sentry.Integrations, exports); - - - - - - - - - - - - -}(window)); -//# sourceMappingURL=tracing.js.map diff --git a/examples/tracing/templates/index.html b/examples/tracing/templates/index.html deleted file mode 100644 index 2aa95e789c..0000000000 --- a/examples/tracing/templates/index.html +++ /dev/null @@ -1,57 +0,0 @@ - - - - - - - -

Decode your base64 string as a service (that calls another service)

- - A base64 string
- - -

Output:

-
diff --git a/examples/tracing/traceviewer.py b/examples/tracing/traceviewer.py
deleted file mode 100644
index 9c1435ff88..0000000000
--- a/examples/tracing/traceviewer.py
+++ /dev/null
@@ -1,61 +0,0 @@
-import json
-import sys
-
-print("digraph mytrace {")
-print("rankdir=LR")
-
-all_spans = []
-
-for line in sys.stdin:
-    event = json.loads(line)
-    if event.get("type") != "transaction":
-        continue
-
-    trace_ctx = event["contexts"]["trace"]
-    trace_span = dict(trace_ctx)  # fake a span entry from transaction event
-    trace_span["description"] = event["transaction"]
-    trace_span["start_timestamp"] = event["start_timestamp"]
-    trace_span["timestamp"] = event["timestamp"]
-
-    if "parent_span_id" not in trace_ctx:
-        print(
-            '{} [label="trace:{} ({})"];'.format(
-                int(trace_ctx["trace_id"], 16),
-                event["transaction"],
-                trace_ctx["trace_id"],
-            )
-        )
-
-    for span in event["spans"] + [trace_span]:
-        print(
-            '{} [label="span:{} ({})"];'.format(
-                int(span["span_id"], 16), span["description"], span["span_id"]
-            )
-        )
-        if "parent_span_id" in span:
-            print(
-                "{} -> {};".format(
-                    int(span["parent_span_id"], 16), int(span["span_id"], 16)
-                )
-            )
-
-        print(
-            "{} -> {} [style=dotted];".format(
-                int(span["trace_id"], 16), int(span["span_id"], 16)
-            )
-        )
-
-        all_spans.append(span)
-
-
-for s1 in all_spans:
-    for s2 in all_spans:
-        if s1["start_timestamp"] > s2["timestamp"]:
-            print(
-                '{} -> {} [color="#efefef"];'.format(
-                    int(s1["span_id"], 16), int(s2["span_id"], 16)
-                )
-            )
-
-
-print("}")
diff --git a/examples/tracing/tracing.py b/examples/tracing/tracing.py
deleted file mode 100644
index 9612d9acf4..0000000000
--- a/examples/tracing/tracing.py
+++ /dev/null
@@ -1,73 +0,0 @@
-import json
-import flask
-import os
-import redis
-import rq
-import sentry_sdk
-import time
-import urllib3
-
-from sentry_sdk.integrations.flask import FlaskIntegration
-from sentry_sdk.integrations.rq import RqIntegration
-
-
-app = flask.Flask(__name__)
-redis_conn = redis.Redis()
-http = urllib3.PoolManager()
-queue = rq.Queue(connection=redis_conn)
-
-
-def write_event(event):
-    with open("events", "a") as f:
-        f.write(json.dumps(event))
-        f.write("\n")
-
-
-sentry_sdk.init(
-    integrations=[FlaskIntegration(), RqIntegration()],
-    traces_sample_rate=1.0,
-    traceparent_v2=True,
-    debug=True,
-    transport=write_event,
-)
-
-
-def decode_base64(encoded, redis_key):
-    time.sleep(1)
-    r = http.request("GET", "http://httpbin.org/base64/{}".format(encoded))
-    redis_conn.set(redis_key, r.data)
-
-
-@app.route("/")
-def index():
-    return flask.render_template(
-        "index.html",
-        sentry_dsn=os.environ["SENTRY_DSN"],
-        traceparent=dict(sentry_sdk.Hub.current.iter_trace_propagation_headers()),
-    )
-
-
-@app.route("/compute/")
-def compute(input):
-    redis_key = "sentry-python-tracing-example-result:{}".format(input)
-    redis_conn.delete(redis_key)
-    queue.enqueue(decode_base64, encoded=input, redis_key=redis_key)
-
-    return redis_key
-
-
-@app.route("/wait/")
-def wait(redis_key):
-    result = redis_conn.get(redis_key)
-    if result is None:
-        return "NONE"
-    else:
-        redis_conn.delete(redis_key)
-        return "RESULT: {}".format(result)
-
-
-@app.cli.command("worker")
-def run_worker():
-    print("WORKING")
-    worker = rq.Worker([queue], connection=queue.connection)
-    worker.work()
diff --git a/linter-requirements.txt b/linter-requirements.txt
index d84ccdbce3..e181f00560 100644
--- a/linter-requirements.txt
+++ b/linter-requirements.txt
@@ -1,6 +1,10 @@
-black==19.10b0
-flake8
-flake8-import-order
-mypy==0.770
-flake8-bugbear>=19.8.0
-pep8-naming
+mypy==0.971
+black==22.12.0
+flake8==5.0.4
+types-certifi
+types-redis
+types-setuptools
+pymongo # There is no separate types module.
+flake8-bugbear==22.12.6
+pep8-naming==0.13.2
+pre-commit # local linting
diff --git a/mypy.ini b/mypy.ini
index 0e25a888a9..2a15e45e49 100644
--- a/mypy.ini
+++ b/mypy.ini
@@ -46,3 +46,20 @@ ignore_missing_imports = True
 ignore_missing_imports = True
 [mypy-pyspark.*]
 ignore_missing_imports = True
+[mypy-asgiref.*]
+ignore_missing_imports = True
+[mypy-executing.*]
+ignore_missing_imports = True
+[mypy-asttokens.*]
+ignore_missing_imports = True
+[mypy-pure_eval.*]
+ignore_missing_imports = True
+[mypy-blinker.*]
+ignore_missing_imports = True
+[mypy-sentry_sdk._queue]
+ignore_missing_imports = True
+disallow_untyped_defs = False
+[mypy-celery.app.trace]
+ignore_missing_imports = True
+[mypy-flask.signals]
+ignore_missing_imports = True
diff --git a/pytest.ini b/pytest.ini
index 19cf3a00e8..f736c30496 100644
--- a/pytest.ini
+++ b/pytest.ini
@@ -1,4 +1,14 @@
 [pytest]
 DJANGO_SETTINGS_MODULE = tests.integrations.django.myapp.settings
 addopts = --tb=short
-markers = tests_internal_exceptions
+markers =
+    tests_internal_exceptions: Handle internal exceptions just as the SDK does, to test it. (Otherwise internal exceptions are recorded and reraised.)
+    only: A temporary marker, to make pytest only run the tests with the mark, similar to jests `it.only`. To use, run `pytest -v -m only`.
+asyncio_mode = strict
+
+[pytest-watch]
+; Enable this to drop into pdb on errors
+; pdb = True
+
+verbose = True
+nobeep = True
diff --git a/scripts/aws-attach-layer-to-lambda-function.sh b/scripts/aws-attach-layer-to-lambda-function.sh
new file mode 100755
index 0000000000..71e08c6318
--- /dev/null
+++ b/scripts/aws-attach-layer-to-lambda-function.sh
@@ -0,0 +1,33 @@
+#!/usr/bin/env bash
+#
+# Attaches the layer `SentryPythonServerlessSDK-local-dev` to a given lambda function.
+#
+
+set -euo pipefail
+
+# Check for argument
+if [ $# -eq 0 ]
+  then
+    SCRIPT_NAME=$(basename "$0")
+    echo "ERROR: No argument supplied. Please give the name of a Lambda function!"
+    echo ""
+    echo "Usage: $SCRIPT_NAME "
+    echo ""
+    exit 1
+fi
+
+FUNCTION_NAME=$1
+
+echo "Getting ARN of newest Sentry lambda layer..."
+LAYER_ARN=$(aws lambda list-layer-versions --layer-name SentryPythonServerlessSDK-local-dev --query "LayerVersions[0].LayerVersionArn" | tr -d '"')
+echo "Done getting ARN of newest Sentry lambda layer $LAYER_ARN."
+
+echo "Attaching Lamba layer to function $FUNCTION_NAME..."
+echo "Warning: This remove all other layers!"
+aws lambda update-function-configuration \
+    --function-name "$FUNCTION_NAME" \
+    --layers "$LAYER_ARN" \
+    --no-cli-pager
+echo "Done attaching Lamba layer to function '$FUNCTION_NAME'."
+
+echo "All done. Have a nice day!"
diff --git a/scripts/aws-delete-lamba-layer-versions.sh b/scripts/aws-delete-lamba-layer-versions.sh
new file mode 100755
index 0000000000..5e1ea38a85
--- /dev/null
+++ b/scripts/aws-delete-lamba-layer-versions.sh
@@ -0,0 +1,18 @@
+#!/usr/bin/env bash
+#
+# Deletes all versions of the layer specified in LAYER_NAME in one region.
+#
+
+set -euo pipefail
+
+# override default AWS region
+export AWS_REGION=eu-central-1
+
+LAYER_NAME=SentryPythonServerlessSDKLocalDev
+VERSION="0"
+
+while [[ $VERSION != "1" ]]
+do
+  VERSION=$(aws lambda list-layer-versions --layer-name $LAYER_NAME | jq '.LayerVersions[0].Version')
+  aws lambda delete-layer-version --layer-name $LAYER_NAME --version-number $VERSION
+done
diff --git a/scripts/aws-deploy-local-layer.sh b/scripts/aws-deploy-local-layer.sh
new file mode 100755
index 0000000000..9e2d7c795e
--- /dev/null
+++ b/scripts/aws-deploy-local-layer.sh
@@ -0,0 +1,65 @@
+#!/usr/bin/env bash
+#
+# Builds and deploys the Sentry AWS Lambda layer (including the Sentry SDK and the Sentry Lambda Extension)
+#
+# The currently checked out version of the SDK in your local directory is used.
+# The latest version of the Lambda Extension is fetched from the Sentry Release Registry.
+#
+
+set -euo pipefail
+
+# Creating Lambda layer
+echo "Creating Lambda layer in ./dist-serverless ..."
+make aws-lambda-layer
+echo "Done creating Lambda layer in ./dist-serverless."
+
+# IMPORTANT:
+# Please make sure that this part does the same as the GitHub action that
+# is building the Lambda layer in production!
+# see: https://github.com/getsentry/action-build-aws-lambda-extension/blob/main/action.yml#L23-L40
+
+echo "Downloading relay..."
+mkdir -p dist-serverless/relay
+curl -0 --silent \
+    --output dist-serverless/relay/relay \
+    "$(curl -s https://release-registry.services.sentry.io/apps/relay/latest | jq -r .files.\"relay-Linux-x86_64\".url)"
+chmod +x dist-serverless/relay/relay
+echo "Done downloading relay."
+
+echo "Creating start script..."
+mkdir -p dist-serverless/extensions
+cat > dist-serverless/extensions/sentry-lambda-extension << EOT
+#!/bin/bash
+set -euo pipefail
+exec /opt/relay/relay run \
+    --mode=proxy \
+    --shutdown-timeout=2 \
+    --upstream-dsn="\$SENTRY_DSN" \
+    --aws-runtime-api="\$AWS_LAMBDA_RUNTIME_API"
+EOT
+chmod +x dist-serverless/extensions/sentry-lambda-extension
+echo "Done creating start script."
+
+# Zip Lambda layer and included Lambda extension
+echo "Zipping Lambda layer and included Lambda extension..."
+cd dist-serverless/
+zip -r ../sentry-python-serverless-x.x.x-dev.zip \
+    . \
+    --exclude \*__pycache__\* --exclude \*.yml
+cd ..
+echo "Done Zipping Lambda layer and included Lambda extension to ./sentry-python-serverless-x.x.x-dev.zip."
+
+
+# Deploying zipped Lambda layer to AWS
+echo "Deploying zipped Lambda layer to AWS..."
+
+aws lambda publish-layer-version \
+    --layer-name "SentryPythonServerlessSDK-local-dev" \
+    --region "eu-central-1" \
+    --zip-file "fileb://sentry-python-serverless-x.x.x-dev.zip" \
+    --description "Local test build of SentryPythonServerlessSDK (can be deleted)" \
+    --no-cli-pager
+
+echo "Done deploying zipped Lambda layer to AWS as 'SentryPythonServerlessSDK-local-dev'."
+
+echo "All done. Have a nice day!"
diff --git a/scripts/build_aws_lambda_layer.py b/scripts/build_aws_lambda_layer.py
new file mode 100644
index 0000000000..d694d15ba7
--- /dev/null
+++ b/scripts/build_aws_lambda_layer.py
@@ -0,0 +1,72 @@
+import os
+import shutil
+import subprocess
+import tempfile
+
+from sentry_sdk.consts import VERSION as SDK_VERSION
+
+DIST_PATH = "dist"  # created by "make dist" that is called by "make aws-lambda-layer"
+PYTHON_SITE_PACKAGES = "python"  # see https://docs.aws.amazon.com/lambda/latest/dg/configuration-layers.html#configuration-layers-path
+
+
+class LayerBuilder:
+    def __init__(
+        self,
+        base_dir,  # type: str
+    ):
+        # type: (...) -> None
+        self.base_dir = base_dir
+        self.python_site_packages = os.path.join(self.base_dir, PYTHON_SITE_PACKAGES)
+
+    def make_directories(self):
+        # type: (...) -> None
+        os.makedirs(self.python_site_packages)
+
+    def install_python_packages(self):
+        # type: (...) -> None
+        sentry_python_sdk = os.path.join(
+            DIST_PATH,
+            f"sentry_sdk-{SDK_VERSION}-py2.py3-none-any.whl",  # this is generated by "make dist" that is called by "make aws-lamber-layer"
+        )
+        subprocess.run(
+            [
+                "pip",
+                "install",
+                "--no-cache-dir",  # always access PyPI
+                "--quiet",
+                sentry_python_sdk,
+                "--target",
+                self.python_site_packages,
+            ],
+            check=True,
+        )
+
+    def create_init_serverless_sdk_package(self):
+        # type: (...) -> None
+        """
+        Method that creates the init_serverless_sdk pkg in the
+        sentry-python-serverless zip
+        """
+        serverless_sdk_path = (
+            f"{self.python_site_packages}/sentry_sdk/"
+            f"integrations/init_serverless_sdk"
+        )
+        if not os.path.exists(serverless_sdk_path):
+            os.makedirs(serverless_sdk_path)
+        shutil.copy(
+            "scripts/init_serverless_sdk.py", f"{serverless_sdk_path}/__init__.py"
+        )
+
+
+def build_layer_dir():
+    with tempfile.TemporaryDirectory() as base_dir:
+        layer_builder = LayerBuilder(base_dir)
+        layer_builder.make_directories()
+        layer_builder.install_python_packages()
+        layer_builder.create_init_serverless_sdk_package()
+
+        shutil.copytree(base_dir, "dist-serverless")
+
+
+if __name__ == "__main__":
+    build_layer_dir()
diff --git a/scripts/bump-version.sh b/scripts/bump-version.sh
index d04836940f..74546f5d9f 100755
--- a/scripts/bump-version.sh
+++ b/scripts/bump-version.sh
@@ -1,6 +1,11 @@
 #!/bin/bash
 set -eux
 
+if [ "$(uname -s)" != "Linux" ]; then
+    echo "Please use the GitHub Action."
+    exit 1
+fi
+
 SCRIPT_DIR="$( dirname "$0" )"
 cd $SCRIPT_DIR/..
 
diff --git a/scripts/download-relay.sh b/scripts/download-relay.sh
deleted file mode 100755
index a2abe75750..0000000000
--- a/scripts/download-relay.sh
+++ /dev/null
@@ -1,31 +0,0 @@
-#!/bin/bash
-set -e
-
-if { [ "$TRAVIS" == "true" ] || [ "$TF_BUILD" == "True" ]; } && [ -z "$GITHUB_API_TOKEN" ]; then
-    echo "Not running on external pull request"
-    exit 0;
-fi
-
-target=relay
-
-# Download the latest relay release for Travis
-
-output="$(
-    curl -s \
-    https://api.github.com/repos/getsentry/relay/releases/latest?access_token=$GITHUB_API_TOKEN
-)"
-
-echo "$output"
-
-output="$(echo "$output" \
-    | grep "$(uname -s)" \
-    | grep -v "\.zip" \
-    | grep "download" \
-    | cut -d : -f 2,3 \
-    | tr -d , \
-    | tr -d \")"
-
-echo "$output"
-echo "$output" | wget -i - -O $target
-[ -s $target ]
-chmod +x $target
diff --git a/scripts/init_serverless_sdk.py b/scripts/init_serverless_sdk.py
new file mode 100644
index 0000000000..7fc7f64d05
--- /dev/null
+++ b/scripts/init_serverless_sdk.py
@@ -0,0 +1,93 @@
+"""
+For manual instrumentation,
+The Handler function string of an aws lambda function should be added as an
+environment variable with a key of 'SENTRY_INITIAL_HANDLER' along with the 'DSN'
+Then the Handler function sstring should be replaced with
+'sentry_sdk.integrations.init_serverless_sdk.sentry_lambda_handler'
+"""
+import os
+import sys
+import re
+
+import sentry_sdk
+from sentry_sdk._types import MYPY
+from sentry_sdk.utils import Dsn
+from sentry_sdk.integrations.aws_lambda import AwsLambdaIntegration
+
+if MYPY:
+    from typing import Any
+
+
+def extension_relay_dsn(original_dsn):
+    dsn = Dsn(original_dsn)
+    dsn.host = "localhost"
+    dsn.port = 5333
+    dsn.scheme = "http"
+    return str(dsn)
+
+
+# Configure Sentry SDK
+sentry_sdk.init(
+    dsn=extension_relay_dsn(os.environ["SENTRY_DSN"]),
+    integrations=[AwsLambdaIntegration(timeout_warning=True)],
+    traces_sample_rate=float(os.environ["SENTRY_TRACES_SAMPLE_RATE"]),
+)
+
+
+class AWSLambdaModuleLoader:
+    DIR_PATH_REGEX = r"^(.+)\/([^\/]+)$"
+
+    def __init__(self, sentry_initial_handler):
+        try:
+            module_path, self.handler_name = sentry_initial_handler.rsplit(".", 1)
+        except ValueError:
+            raise ValueError("Incorrect AWS Handler path (Not a path)")
+
+        self.extract_and_load_lambda_function_module(module_path)
+
+    def extract_and_load_lambda_function_module(self, module_path):
+        """
+        Method that extracts and loads lambda function module from module_path
+        """
+        py_version = sys.version_info
+
+        if re.match(self.DIR_PATH_REGEX, module_path):
+            # With a path like -> `scheduler/scheduler/event`
+            # `module_name` is `event`, and `module_file_path` is `scheduler/scheduler/event.py`
+            module_name = module_path.split(os.path.sep)[-1]
+            module_file_path = module_path + ".py"
+
+            # Supported python versions are 2.7, 3.6, 3.7, 3.8
+            if py_version >= (3, 5):
+                import importlib.util
+
+                spec = importlib.util.spec_from_file_location(
+                    module_name, module_file_path
+                )
+                self.lambda_function_module = importlib.util.module_from_spec(spec)
+                spec.loader.exec_module(self.lambda_function_module)
+            elif py_version[0] < 3:
+                import imp
+
+                self.lambda_function_module = imp.load_source(
+                    module_name, module_file_path
+                )
+            else:
+                raise ValueError("Python version %s is not supported." % py_version)
+        else:
+            import importlib
+
+            self.lambda_function_module = importlib.import_module(module_path)
+
+    def get_lambda_handler(self):
+        return getattr(self.lambda_function_module, self.handler_name)
+
+
+def sentry_lambda_handler(event, context):
+    # type: (Any, Any) -> None
+    """
+    Handler function that invokes a lambda handler which path is defined in
+    environment variables as "SENTRY_INITIAL_HANDLER"
+    """
+    module_loader = AWSLambdaModuleLoader(os.environ["SENTRY_INITIAL_HANDLER"])
+    return module_loader.get_lambda_handler()(event, context)
diff --git a/scripts/runtox.sh b/scripts/runtox.sh
index d1c0ea31a4..8b4c4a1bef 100755
--- a/scripts/runtox.sh
+++ b/scripts/runtox.sh
@@ -1,4 +1,8 @@
 #!/bin/bash
+
+# Usage: sh scripts/runtox.sh py3.7 
+# Runs all environments with substring py3.7 and the given arguments for pytest
+
 set -ex
 
 if [ -n "$TOXPATH" ]; then
@@ -9,18 +13,7 @@ else
     TOXPATH=./.venv/bin/tox
 fi
 
-# Usage: sh scripts/runtox.sh py3.7 
-# Runs all environments with substring py3.7 and the given arguments for pytest
-
-if [ -n "$1" ]; then
-    searchstring="$1"
-elif [ -n "$TRAVIS_PYTHON_VERSION" ]; then
-    searchstring="$(echo py$TRAVIS_PYTHON_VERSION | sed -e 's/pypypy/pypy/g' -e 's/-dev//g')"
-elif [ -n "$AZURE_PYTHON_VERSION" ]; then
-    searchstring="$(echo py$AZURE_PYTHON_VERSION | sed -e 's/pypypy/pypy/g' -e 's/-dev//g')"
-    if [ "$searchstring" = pypy2 ]; then
-        searchstring=pypy
-    fi
-fi
+searchstring="$1"
 
-exec $TOXPATH -e $($TOXPATH -l | grep "$searchstring" | tr '\n' ',') -- "${@:2}"
+export TOX_PARALLEL_NO_SPINNER=1
+exec $TOXPATH -p auto -e "$($TOXPATH -l | grep "$searchstring" | tr $'\n' ',')" -- "${@:2}"
diff --git a/scripts/split-tox-gh-actions/ci-yaml-services.txt b/scripts/split-tox-gh-actions/ci-yaml-services.txt
new file mode 100644
index 0000000000..2219e5a4da
--- /dev/null
+++ b/scripts/split-tox-gh-actions/ci-yaml-services.txt
@@ -0,0 +1,18 @@
+    services:
+      postgres:
+        image: postgres
+        env:
+          POSTGRES_PASSWORD: sentry
+        # Set health checks to wait until postgres has started
+        options: >-
+          --health-cmd pg_isready
+          --health-interval 10s
+          --health-timeout 5s
+          --health-retries 5
+        # Maps tcp port 5432 on service container to the host
+        ports:
+          - 5432:5432
+    env:
+      SENTRY_PYTHON_TEST_POSTGRES_USER: postgres
+      SENTRY_PYTHON_TEST_POSTGRES_PASSWORD: sentry
+      SENTRY_PYTHON_TEST_POSTGRES_NAME: ci_test
diff --git a/scripts/split-tox-gh-actions/ci-yaml.txt b/scripts/split-tox-gh-actions/ci-yaml.txt
new file mode 100644
index 0000000000..b9ecdf39e7
--- /dev/null
+++ b/scripts/split-tox-gh-actions/ci-yaml.txt
@@ -0,0 +1,65 @@
+name: Test {{ framework }}
+
+on:
+  push:
+    branches:
+      - master
+      - release/**
+
+  pull_request:
+
+# Cancel in progress workflows on pull_requests.
+# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value
+concurrency:
+  group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
+  cancel-in-progress: true
+
+permissions:
+  contents: read
+
+env:
+  BUILD_CACHE_KEY: ${{ github.sha }}
+  CACHED_BUILD_PATHS: |
+    ${{ github.workspace }}/dist-serverless
+
+jobs:
+  test:
+    name: {{ framework }}, python ${{ matrix.python-version }}, ${{ matrix.os }}
+    runs-on: ${{ matrix.os }}
+    timeout-minutes: 45
+{{ strategy_matrix }}
+{{ services }}
+
+    steps:
+      - uses: actions/checkout@v3
+      - uses: actions/setup-python@v4
+        with:
+          python-version: ${{ matrix.python-version }}
+
+      - name: Setup Test Env
+        run: |
+          pip install codecov "tox>=3,<4"
+
+      - name: Test {{ framework }}
+        timeout-minutes: 45
+        shell: bash
+        run: |
+          set -x # print commands that are executed
+          coverage erase
+
+          ./scripts/runtox.sh "${{ matrix.python-version }}-{{ framework }}" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
+          coverage combine .coverage*
+          coverage xml -i
+          codecov --file coverage.xml
+
+  check_required_tests:
+    name: All {{ framework }} tests passed or skipped
+    needs: test
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test.result, 'failure')
+        run: |
+          echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1
diff --git a/scripts/split-tox-gh-actions/split-tox-gh-actions.py b/scripts/split-tox-gh-actions/split-tox-gh-actions.py
new file mode 100755
index 0000000000..2458fe06af
--- /dev/null
+++ b/scripts/split-tox-gh-actions/split-tox-gh-actions.py
@@ -0,0 +1,159 @@
+"""Split Tox to GitHub Actions
+
+This is a small script to split a tox.ini config file into multiple GitHub actions configuration files.
+This way each framework defined in tox.ini will get its own GitHub actions configuration file
+which allows them to be run in parallel in GitHub actions.
+
+This will generate/update several configuration files, that need to be commited to Git afterwards.
+Whenever tox.ini is changed, this script needs to be run.
+
+Usage:
+    python split-tox-gh-actions.py [--fail-on-changes]
+
+If the parameter `--fail-on-changes` is set, the script will raise a RuntimeError in case the yaml
+files have been changed by the scripts execution. This is used in CI to check if the yaml files
+represent the current tox.ini file. (And if not the CI run fails.)
+"""
+
+import configparser
+import hashlib
+import sys
+from collections import defaultdict
+from glob import glob
+from pathlib import Path
+
+OUT_DIR = Path(__file__).resolve().parent.parent.parent / ".github" / "workflows"
+TOX_FILE = Path(__file__).resolve().parent.parent.parent / "tox.ini"
+TEMPLATE_DIR = Path(__file__).resolve().parent
+TEMPLATE_FILE = TEMPLATE_DIR / "ci-yaml.txt"
+TEMPLATE_FILE_SERVICES = TEMPLATE_DIR / "ci-yaml-services.txt"
+
+FRAMEWORKS_NEEDING_POSTGRES = ["django"]
+
+MATRIX_DEFINITION = """
+    strategy:
+      fail-fast: false
+      matrix:
+        python-version: [{{ python-version }}]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
+"""
+
+
+def write_yaml_file(
+    template,
+    current_framework,
+    python_versions,
+):
+    """Write the YAML configuration file for one framework to disk."""
+    # render template for print
+    out = ""
+    for template_line in template:
+        if template_line == "{{ strategy_matrix }}\n":
+            py_versions = [f'"{py.replace("py", "")}"' for py in python_versions]
+
+            m = MATRIX_DEFINITION
+            m = m.replace("{{ framework }}", current_framework).replace(
+                "{{ python-version }}", ",".join(py_versions)
+            )
+            out += m
+
+        elif template_line == "{{ services }}\n":
+            if current_framework in FRAMEWORKS_NEEDING_POSTGRES:
+                f = open(TEMPLATE_FILE_SERVICES, "r")
+                out += "".join(f.readlines())
+                f.close()
+
+        else:
+            out += template_line.replace("{{ framework }}", current_framework)
+
+    # write rendered template
+    outfile_name = OUT_DIR / f"test-integration-{current_framework}.yml"
+    print(f"Writing {outfile_name}")
+    f = open(outfile_name, "w")
+    f.writelines(out)
+    f.close()
+
+
+def get_yaml_files_hash():
+    """Calculate a hash of all the yaml configuration files"""
+
+    hasher = hashlib.md5()
+    path_pattern = (OUT_DIR / "test-integration-*.yml").as_posix()
+    for file in glob(path_pattern):
+        with open(file, "rb") as f:
+            buf = f.read()
+            hasher.update(buf)
+
+    return hasher.hexdigest()
+
+
+def main(fail_on_changes):
+    """Create one CI workflow for each framework defined in tox.ini"""
+    if fail_on_changes:
+        old_hash = get_yaml_files_hash()
+
+    print("Read GitHub actions config file template")
+    f = open(TEMPLATE_FILE, "r")
+    template = f.readlines()
+    f.close()
+
+    print("Read tox.ini")
+    config = configparser.ConfigParser()
+    config.read(TOX_FILE)
+    lines = [x for x in config["tox"]["envlist"].split("\n") if len(x) > 0]
+
+    python_versions = defaultdict(list)
+
+    print("Parse tox.ini nevlist")
+
+    for line in lines:
+        # normalize lines
+        line = line.strip().lower()
+
+        # ignore comments
+        if line.startswith("#"):
+            continue
+
+        try:
+            # parse tox environment definition
+            try:
+                (raw_python_versions, framework, _) = line.split("-")
+            except ValueError:
+                (raw_python_versions, framework) = line.split("-")
+
+            # collect python versions to test the framework in
+            for python_version in (
+                raw_python_versions.replace("{", "").replace("}", "").split(",")
+            ):
+                if python_version not in python_versions[framework]:
+                    python_versions[framework].append(python_version)
+
+        except ValueError:
+            print(f"ERROR reading line {line}")
+
+    for framework in python_versions:
+        write_yaml_file(template, framework, python_versions[framework])
+
+    if fail_on_changes:
+        new_hash = get_yaml_files_hash()
+
+        if old_hash != new_hash:
+            raise RuntimeError(
+                "The yaml configuration files have changed. This means that tox.ini has changed "
+                "but the changes have not been propagated to the GitHub actions config files. "
+                "Please run `python scripts/split-tox-gh-actions/split-tox-gh-actions.py` "
+                "locally and commit the changes of the yaml configuration files to continue. "
+            )
+
+    print("All done. Have a nice day!")
+
+
+if __name__ == "__main__":
+    fail_on_changes = (
+        True if len(sys.argv) == 2 and sys.argv[1] == "--fail-on-changes" else False
+    )
+    main(fail_on_changes)
diff --git a/sentry_sdk/__init__.py b/sentry_sdk/__init__.py
index b211a6c754..ab5123ec64 100644
--- a/sentry_sdk/__init__.py
+++ b/sentry_sdk/__init__.py
@@ -4,11 +4,10 @@
 from sentry_sdk.client import Client
 
 from sentry_sdk.api import *  # noqa
-from sentry_sdk.api import __all__ as api_all
 
 from sentry_sdk.consts import VERSION  # noqa
 
-__all__ = api_all + [  # noqa
+__all__ = [  # noqa
     "Hub",
     "Scope",
     "Client",
@@ -16,6 +15,22 @@
     "HttpTransport",
     "init",
     "integrations",
+    # From sentry_sdk.api
+    "capture_event",
+    "capture_message",
+    "capture_exception",
+    "add_breadcrumb",
+    "configure_scope",
+    "push_scope",
+    "flush",
+    "last_event_id",
+    "start_span",
+    "start_transaction",
+    "set_tag",
+    "set_context",
+    "set_extra",
+    "set_user",
+    "set_level",
 ]
 
 # Initialize the debug support after everything is loaded
diff --git a/sentry_sdk/_compat.py b/sentry_sdk/_compat.py
index e7933e53da..62abfd1622 100644
--- a/sentry_sdk/_compat.py
+++ b/sentry_sdk/_compat.py
@@ -7,24 +7,27 @@
     from typing import Tuple
     from typing import Any
     from typing import Type
-
     from typing import TypeVar
 
     T = TypeVar("T")
 
 
 PY2 = sys.version_info[0] == 2
+PY33 = sys.version_info[0] == 3 and sys.version_info[1] >= 3
+PY37 = sys.version_info[0] == 3 and sys.version_info[1] >= 7
+PY310 = sys.version_info[0] == 3 and sys.version_info[1] >= 10
+PY311 = sys.version_info[0] == 3 and sys.version_info[1] >= 11
 
 if PY2:
-    import urlparse  # noqa
+    import urlparse
 
     text_type = unicode  # noqa
-    import Queue as queue  # noqa
 
     string_types = (str, text_type)
     number_types = (int, long, float)  # noqa
     int_types = (int, long)  # noqa
     iteritems = lambda x: x.iteritems()  # noqa: B301
+    binary_sequence_types = (bytearray, memoryview)
 
     def implements_str(cls):
         # type: (T) -> T
@@ -37,13 +40,13 @@ def implements_str(cls):
 
 else:
     import urllib.parse as urlparse  # noqa
-    import queue  # noqa
 
     text_type = str
     string_types = (text_type,)  # type: Tuple[type]
     number_types = (int, float)  # type: Tuple[type, type]
-    int_types = (int,)  # noqa
+    int_types = (int,)
     iteritems = lambda x: x.items()
+    binary_sequence_types = (bytes, bytearray, memoryview)
 
     def implements_str(x):
         # type: (T) -> T
diff --git a/sentry_sdk/_functools.py b/sentry_sdk/_functools.py
index a5abeebf52..8dcf79caaa 100644
--- a/sentry_sdk/_functools.py
+++ b/sentry_sdk/_functools.py
@@ -28,14 +28,14 @@ def update_wrapper(
     # type: (Any, Any, Any, Any) -> Any
     """Update a wrapper function to look like the wrapped function
 
-       wrapper is the function to be updated
-       wrapped is the original function
-       assigned is a tuple naming the attributes assigned directly
-       from the wrapped function to the wrapper function (defaults to
-       functools.WRAPPER_ASSIGNMENTS)
-       updated is a tuple naming the attributes of the wrapper that
-       are updated with the corresponding attribute from the wrapped
-       function (defaults to functools.WRAPPER_UPDATES)
+    wrapper is the function to be updated
+    wrapped is the original function
+    assigned is a tuple naming the attributes assigned directly
+    from the wrapped function to the wrapper function (defaults to
+    functools.WRAPPER_ASSIGNMENTS)
+    updated is a tuple naming the attributes of the wrapper that
+    are updated with the corresponding attribute from the wrapped
+    function (defaults to functools.WRAPPER_UPDATES)
     """
     for attr in assigned:
         try:
@@ -57,10 +57,10 @@ def wraps(wrapped, assigned=WRAPPER_ASSIGNMENTS, updated=WRAPPER_UPDATES):
     # type: (Callable[..., Any], Any, Any) -> Callable[[Callable[..., Any]], Callable[..., Any]]
     """Decorator factory to apply update_wrapper() to a wrapper function
 
-       Returns a decorator that invokes update_wrapper() with the decorated
-       function as the wrapper argument and the arguments to wraps() as the
-       remaining arguments. Default arguments are as for update_wrapper().
-       This is a convenience function to simplify applying partial() to
-       update_wrapper().
+    Returns a decorator that invokes update_wrapper() with the decorated
+    function as the wrapper argument and the arguments to wraps() as the
+    remaining arguments. Default arguments are as for update_wrapper().
+    This is a convenience function to simplify applying partial() to
+    update_wrapper().
     """
     return partial(update_wrapper, wrapped=wrapped, assigned=assigned, updated=updated)
diff --git a/sentry_sdk/_queue.py b/sentry_sdk/_queue.py
new file mode 100644
index 0000000000..fc845f70d1
--- /dev/null
+++ b/sentry_sdk/_queue.py
@@ -0,0 +1,227 @@
+"""
+A fork of Python 3.6's stdlib queue with Lock swapped out for RLock to avoid a
+deadlock while garbage collecting.
+
+See
+https://codewithoutrules.com/2017/08/16/concurrency-python/
+https://bugs.python.org/issue14976
+https://github.com/sqlalchemy/sqlalchemy/blob/4eb747b61f0c1b1c25bdee3856d7195d10a0c227/lib/sqlalchemy/queue.py#L1
+
+We also vendor the code to evade eventlet's broken monkeypatching, see
+https://github.com/getsentry/sentry-python/pull/484
+"""
+
+import threading
+
+from collections import deque
+from time import time
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+    from typing import Any
+
+__all__ = ["EmptyError", "FullError", "Queue"]
+
+
+class EmptyError(Exception):
+    "Exception raised by Queue.get(block=0)/get_nowait()."
+    pass
+
+
+class FullError(Exception):
+    "Exception raised by Queue.put(block=0)/put_nowait()."
+    pass
+
+
+class Queue(object):
+    """Create a queue object with a given maximum size.
+
+    If maxsize is <= 0, the queue size is infinite.
+    """
+
+    def __init__(self, maxsize=0):
+        self.maxsize = maxsize
+        self._init(maxsize)
+
+        # mutex must be held whenever the queue is mutating.  All methods
+        # that acquire mutex must release it before returning.  mutex
+        # is shared between the three conditions, so acquiring and
+        # releasing the conditions also acquires and releases mutex.
+        self.mutex = threading.RLock()
+
+        # Notify not_empty whenever an item is added to the queue; a
+        # thread waiting to get is notified then.
+        self.not_empty = threading.Condition(self.mutex)
+
+        # Notify not_full whenever an item is removed from the queue;
+        # a thread waiting to put is notified then.
+        self.not_full = threading.Condition(self.mutex)
+
+        # Notify all_tasks_done whenever the number of unfinished tasks
+        # drops to zero; thread waiting to join() is notified to resume
+        self.all_tasks_done = threading.Condition(self.mutex)
+        self.unfinished_tasks = 0
+
+    def task_done(self):
+        """Indicate that a formerly enqueued task is complete.
+
+        Used by Queue consumer threads.  For each get() used to fetch a task,
+        a subsequent call to task_done() tells the queue that the processing
+        on the task is complete.
+
+        If a join() is currently blocking, it will resume when all items
+        have been processed (meaning that a task_done() call was received
+        for every item that had been put() into the queue).
+
+        Raises a ValueError if called more times than there were items
+        placed in the queue.
+        """
+        with self.all_tasks_done:
+            unfinished = self.unfinished_tasks - 1
+            if unfinished <= 0:
+                if unfinished < 0:
+                    raise ValueError("task_done() called too many times")
+                self.all_tasks_done.notify_all()
+            self.unfinished_tasks = unfinished
+
+    def join(self):
+        """Blocks until all items in the Queue have been gotten and processed.
+
+        The count of unfinished tasks goes up whenever an item is added to the
+        queue. The count goes down whenever a consumer thread calls task_done()
+        to indicate the item was retrieved and all work on it is complete.
+
+        When the count of unfinished tasks drops to zero, join() unblocks.
+        """
+        with self.all_tasks_done:
+            while self.unfinished_tasks:
+                self.all_tasks_done.wait()
+
+    def qsize(self):
+        """Return the approximate size of the queue (not reliable!)."""
+        with self.mutex:
+            return self._qsize()
+
+    def empty(self):
+        """Return True if the queue is empty, False otherwise (not reliable!).
+
+        This method is likely to be removed at some point.  Use qsize() == 0
+        as a direct substitute, but be aware that either approach risks a race
+        condition where a queue can grow before the result of empty() or
+        qsize() can be used.
+
+        To create code that needs to wait for all queued tasks to be
+        completed, the preferred technique is to use the join() method.
+        """
+        with self.mutex:
+            return not self._qsize()
+
+    def full(self):
+        """Return True if the queue is full, False otherwise (not reliable!).
+
+        This method is likely to be removed at some point.  Use qsize() >= n
+        as a direct substitute, but be aware that either approach risks a race
+        condition where a queue can shrink before the result of full() or
+        qsize() can be used.
+        """
+        with self.mutex:
+            return 0 < self.maxsize <= self._qsize()
+
+    def put(self, item, block=True, timeout=None):
+        """Put an item into the queue.
+
+        If optional args 'block' is true and 'timeout' is None (the default),
+        block if necessary until a free slot is available. If 'timeout' is
+        a non-negative number, it blocks at most 'timeout' seconds and raises
+        the FullError exception if no free slot was available within that time.
+        Otherwise ('block' is false), put an item on the queue if a free slot
+        is immediately available, else raise the FullError exception ('timeout'
+        is ignored in that case).
+        """
+        with self.not_full:
+            if self.maxsize > 0:
+                if not block:
+                    if self._qsize() >= self.maxsize:
+                        raise FullError()
+                elif timeout is None:
+                    while self._qsize() >= self.maxsize:
+                        self.not_full.wait()
+                elif timeout < 0:
+                    raise ValueError("'timeout' must be a non-negative number")
+                else:
+                    endtime = time() + timeout
+                    while self._qsize() >= self.maxsize:
+                        remaining = endtime - time()
+                        if remaining <= 0.0:
+                            raise FullError()
+                        self.not_full.wait(remaining)
+            self._put(item)
+            self.unfinished_tasks += 1
+            self.not_empty.notify()
+
+    def get(self, block=True, timeout=None):
+        """Remove and return an item from the queue.
+
+        If optional args 'block' is true and 'timeout' is None (the default),
+        block if necessary until an item is available. If 'timeout' is
+        a non-negative number, it blocks at most 'timeout' seconds and raises
+        the EmptyError exception if no item was available within that time.
+        Otherwise ('block' is false), return an item if one is immediately
+        available, else raise the EmptyError exception ('timeout' is ignored
+        in that case).
+        """
+        with self.not_empty:
+            if not block:
+                if not self._qsize():
+                    raise EmptyError()
+            elif timeout is None:
+                while not self._qsize():
+                    self.not_empty.wait()
+            elif timeout < 0:
+                raise ValueError("'timeout' must be a non-negative number")
+            else:
+                endtime = time() + timeout
+                while not self._qsize():
+                    remaining = endtime - time()
+                    if remaining <= 0.0:
+                        raise EmptyError()
+                    self.not_empty.wait(remaining)
+            item = self._get()
+            self.not_full.notify()
+            return item
+
+    def put_nowait(self, item):
+        """Put an item into the queue without blocking.
+
+        Only enqueue the item if a free slot is immediately available.
+        Otherwise raise the FullError exception.
+        """
+        return self.put(item, block=False)
+
+    def get_nowait(self):
+        """Remove and return an item from the queue without blocking.
+
+        Only get an item if one is immediately available. Otherwise
+        raise the EmptyError exception.
+        """
+        return self.get(block=False)
+
+    # Override these methods to implement other queue organizations
+    # (e.g. stack or priority queue).
+    # These will only be called with appropriate locks held
+
+    # Initialize the queue representation
+    def _init(self, maxsize):
+        self.queue = deque()  # type: Any
+
+    def _qsize(self):
+        return len(self.queue)
+
+    # Put a new item in the queue
+    def _put(self, item):
+        self.queue.append(item)
+
+    # Get an item from the queue
+    def _get(self):
+        return self.queue.popleft()
diff --git a/sentry_sdk/_types.py b/sentry_sdk/_types.py
index 74020aea57..7064192977 100644
--- a/sentry_sdk/_types.py
+++ b/sentry_sdk/_types.py
@@ -12,6 +12,7 @@
     from typing import Optional
     from typing import Tuple
     from typing import Type
+    from typing import Union
     from typing_extensions import Literal
 
     ExcInfo = Tuple[
@@ -24,14 +25,59 @@
     Breadcrumb = Dict[str, Any]
     BreadcrumbHint = Dict[str, Any]
 
+    SamplingContext = Dict[str, Any]
+
     EventProcessor = Callable[[Event, Hint], Optional[Event]]
     ErrorProcessor = Callable[[Event, ExcInfo], Optional[Event]]
     BreadcrumbProcessor = Callable[[Breadcrumb, BreadcrumbHint], Optional[Breadcrumb]]
+    TransactionProcessor = Callable[[Event, Hint], Optional[Event]]
+
+    TracesSampler = Callable[[SamplingContext], Union[float, int, bool]]
 
     # https://github.com/python/mypy/issues/5710
     NotImplementedType = Any
 
     EventDataCategory = Literal[
-        "default", "error", "crash", "transaction", "security", "attachment", "session"
+        "default",
+        "error",
+        "crash",
+        "transaction",
+        "security",
+        "attachment",
+        "session",
+        "internal",
+        "profile",
     ]
     SessionStatus = Literal["ok", "exited", "crashed", "abnormal"]
+    EndpointType = Literal["store", "envelope"]
+
+    DurationUnit = Literal[
+        "nanosecond",
+        "microsecond",
+        "millisecond",
+        "second",
+        "minute",
+        "hour",
+        "day",
+        "week",
+    ]
+
+    InformationUnit = Literal[
+        "bit",
+        "byte",
+        "kilobyte",
+        "kibibyte",
+        "megabyte",
+        "mebibyte",
+        "gigabyte",
+        "gibibyte",
+        "terabyte",
+        "tebibyte",
+        "petabyte",
+        "pebibyte",
+        "exabyte",
+        "exbibyte",
+    ]
+
+    FractionUnit = Literal["ratio", "percent"]
+    MeasurementUnit = Union[DurationUnit, InformationUnit, FractionUnit, str]
diff --git a/sentry_sdk/api.py b/sentry_sdk/api.py
index 0f1cdfc741..ffa017cfc1 100644
--- a/sentry_sdk/api.py
+++ b/sentry_sdk/api.py
@@ -1,10 +1,10 @@
 import inspect
-from contextlib import contextmanager
 
 from sentry_sdk.hub import Hub
 from sentry_sdk.scope import Scope
 
 from sentry_sdk._types import MYPY
+from sentry_sdk.tracing import NoOpSpan
 
 if MYPY:
     from typing import Any
@@ -14,9 +14,10 @@
     from typing import Callable
     from typing import TypeVar
     from typing import ContextManager
+    from typing import Union
 
-    from sentry_sdk._types import Event, Hint, Breadcrumb, BreadcrumbHint
-    from sentry_sdk.tracing import Span
+    from sentry_sdk._types import Event, Hint, Breadcrumb, BreadcrumbHint, ExcInfo
+    from sentry_sdk.tracing import Span, Transaction
 
     T = TypeVar("T")
     F = TypeVar("F", bound=Callable[..., Any])
@@ -27,6 +28,7 @@ def overload(x):
         return x
 
 
+# When changing this, update __all__ in __init__.py too
 __all__ = [
     "capture_event",
     "capture_message",
@@ -37,6 +39,7 @@ def overload(x):
     "flush",
     "last_event_id",
     "start_span",
+    "start_transaction",
     "set_tag",
     "set_context",
     "set_extra",
@@ -68,13 +71,10 @@ def capture_event(
     event,  # type: Event
     hint=None,  # type: Optional[Hint]
     scope=None,  # type: Optional[Any]
-    **scope_args  # type: Dict[str, Any]
+    **scope_args  # type: Any
 ):
     # type: (...) -> Optional[str]
-    hub = Hub.current
-    if hub is not None:
-        return hub.capture_event(event, hint, scope=scope, **scope_args)
-    return None
+    return Hub.current.capture_event(event, hint, scope=scope, **scope_args)
 
 
 @hubmethod
@@ -82,26 +82,20 @@ def capture_message(
     message,  # type: str
     level=None,  # type: Optional[str]
     scope=None,  # type: Optional[Any]
-    **scope_args  # type: Dict[str, Any]
+    **scope_args  # type: Any
 ):
     # type: (...) -> Optional[str]
-    hub = Hub.current
-    if hub is not None:
-        return hub.capture_message(message, level, scope=scope, **scope_args)
-    return None
+    return Hub.current.capture_message(message, level, scope=scope, **scope_args)
 
 
 @hubmethod
 def capture_exception(
-    error=None,  # type: Optional[BaseException]
+    error=None,  # type: Optional[Union[BaseException, ExcInfo]]
     scope=None,  # type: Optional[Any]
-    **scope_args  # type: Dict[str, Any]
+    **scope_args  # type: Any
 ):
     # type: (...) -> Optional[str]
-    hub = Hub.current
-    if hub is not None:
-        return hub.capture_exception(error, scope=scope, **scope_args)
-    return None
+    return Hub.current.capture_exception(error, scope=scope, **scope_args)
 
 
 @hubmethod
@@ -111,117 +105,81 @@ def add_breadcrumb(
     **kwargs  # type: Any
 ):
     # type: (...) -> None
-    hub = Hub.current
-    if hub is not None:
-        return hub.add_breadcrumb(crumb, hint, **kwargs)
+    return Hub.current.add_breadcrumb(crumb, hint, **kwargs)
 
 
-@overload  # noqa
+@overload
 def configure_scope():
     # type: () -> ContextManager[Scope]
     pass
 
 
-@overload  # noqa
-def configure_scope(
+@overload
+def configure_scope(  # noqa: F811
     callback,  # type: Callable[[Scope], None]
 ):
     # type: (...) -> None
     pass
 
 
-@hubmethod  # noqa
-def configure_scope(
+@hubmethod
+def configure_scope(  # noqa: F811
     callback=None,  # type: Optional[Callable[[Scope], None]]
 ):
     # type: (...) -> Optional[ContextManager[Scope]]
-    hub = Hub.current
-    if hub is not None:
-        return hub.configure_scope(callback)
-    elif callback is None:
-
-        @contextmanager
-        def inner():
-            yield Scope()
+    return Hub.current.configure_scope(callback)
 
-        return inner()
-    else:
-        # returned if user provided callback
-        return None
 
-
-@overload  # noqa
+@overload
 def push_scope():
     # type: () -> ContextManager[Scope]
     pass
 
 
-@overload  # noqa
-def push_scope(
+@overload
+def push_scope(  # noqa: F811
     callback,  # type: Callable[[Scope], None]
 ):
     # type: (...) -> None
     pass
 
 
-@hubmethod  # noqa
-def push_scope(
+@hubmethod
+def push_scope(  # noqa: F811
     callback=None,  # type: Optional[Callable[[Scope], None]]
 ):
     # type: (...) -> Optional[ContextManager[Scope]]
-    hub = Hub.current
-    if hub is not None:
-        return hub.push_scope(callback)
-    elif callback is None:
+    return Hub.current.push_scope(callback)
 
-        @contextmanager
-        def inner():
-            yield Scope()
 
-        return inner()
-    else:
-        # returned if user provided callback
-        return None
-
-
-@scopemethod  # noqa
+@scopemethod
 def set_tag(key, value):
     # type: (str, Any) -> None
-    hub = Hub.current
-    if hub is not None:
-        hub.scope.set_tag(key, value)
+    return Hub.current.scope.set_tag(key, value)
 
 
-@scopemethod  # noqa
+@scopemethod
 def set_context(key, value):
-    # type: (str, Any) -> None
-    hub = Hub.current
-    if hub is not None:
-        hub.scope.set_context(key, value)
+    # type: (str, Dict[str, Any]) -> None
+    return Hub.current.scope.set_context(key, value)
 
 
-@scopemethod  # noqa
+@scopemethod
 def set_extra(key, value):
     # type: (str, Any) -> None
-    hub = Hub.current
-    if hub is not None:
-        hub.scope.set_extra(key, value)
+    return Hub.current.scope.set_extra(key, value)
 
 
-@scopemethod  # noqa
+@scopemethod
 def set_user(value):
-    # type: (Dict[str, Any]) -> None
-    hub = Hub.current
-    if hub is not None:
-        hub.scope.set_user(value)
+    # type: (Optional[Dict[str, Any]]) -> None
+    return Hub.current.scope.set_user(value)
 
 
-@scopemethod  # noqa
+@scopemethod
 def set_level(value):
     # type: (str) -> None
-    hub = Hub.current
-    if hub is not None:
-        hub.scope.set_level(value)
+    return Hub.current.scope.set_level(value)
 
 
 @hubmethod
@@ -230,18 +188,13 @@ def flush(
     callback=None,  # type: Optional[Callable[[int, float], None]]
 ):
     # type: (...) -> None
-    hub = Hub.current
-    if hub is not None:
-        return hub.flush(timeout=timeout, callback=callback)
+    return Hub.current.flush(timeout=timeout, callback=callback)
 
 
 @hubmethod
 def last_event_id():
     # type: () -> Optional[str]
-    hub = Hub.current
-    if hub is not None:
-        return hub.last_event_id()
-    return None
+    return Hub.current.last_event_id()
 
 
 @hubmethod
@@ -250,7 +203,13 @@ def start_span(
     **kwargs  # type: Any
 ):
     # type: (...) -> Span
-
-    # TODO: All other functions in this module check for
-    # `Hub.current is None`. That actually should never happen?
     return Hub.current.start_span(span=span, **kwargs)
+
+
+@hubmethod
+def start_transaction(
+    transaction=None,  # type: Optional[Transaction]
+    **kwargs  # type: Any
+):
+    # type: (...) -> Union[Transaction, NoOpSpan]
+    return Hub.current.start_transaction(transaction, **kwargs)
diff --git a/sentry_sdk/attachments.py b/sentry_sdk/attachments.py
new file mode 100644
index 0000000000..b7b6b0b45b
--- /dev/null
+++ b/sentry_sdk/attachments.py
@@ -0,0 +1,55 @@
+import os
+import mimetypes
+
+from sentry_sdk._types import MYPY
+from sentry_sdk.envelope import Item, PayloadRef
+
+if MYPY:
+    from typing import Optional, Union, Callable
+
+
+class Attachment(object):
+    def __init__(
+        self,
+        bytes=None,  # type: Union[None, bytes, Callable[[], bytes]]
+        filename=None,  # type: Optional[str]
+        path=None,  # type: Optional[str]
+        content_type=None,  # type: Optional[str]
+        add_to_transactions=False,  # type: bool
+    ):
+        # type: (...) -> None
+        if bytes is None and path is None:
+            raise TypeError("path or raw bytes required for attachment")
+        if filename is None and path is not None:
+            filename = os.path.basename(path)
+        if filename is None:
+            raise TypeError("filename is required for attachment")
+        if content_type is None:
+            content_type = mimetypes.guess_type(filename)[0]
+        self.bytes = bytes
+        self.filename = filename
+        self.path = path
+        self.content_type = content_type
+        self.add_to_transactions = add_to_transactions
+
+    def to_envelope_item(self):
+        # type: () -> Item
+        """Returns an envelope item for this attachment."""
+        payload = None  # type: Union[None, PayloadRef, bytes]
+        if self.bytes is not None:
+            if callable(self.bytes):
+                payload = self.bytes()
+            else:
+                payload = self.bytes
+        else:
+            payload = PayloadRef(path=self.path)
+        return Item(
+            payload=payload,
+            type="attachment",
+            content_type=self.content_type,
+            filename=self.filename,
+        )
+
+    def __repr__(self):
+        # type: () -> str
+        return "" % (self.filename,)
diff --git a/sentry_sdk/client.py b/sentry_sdk/client.py
index 036fc48340..e5df64fbfb 100644
--- a/sentry_sdk/client.py
+++ b/sentry_sdk/client.py
@@ -2,25 +2,34 @@
 import uuid
 import random
 from datetime import datetime
-from itertools import islice
 import socket
 
 from sentry_sdk._compat import string_types, text_type, iteritems
 from sentry_sdk.utils import (
-    handle_in_app,
-    get_type_name,
     capture_internal_exceptions,
     current_stacktrace,
     disable_capture_event,
+    format_timestamp,
+    get_sdk_name,
+    get_type_name,
+    get_default_release,
+    handle_in_app,
     logger,
 )
 from sentry_sdk.serializer import serialize
 from sentry_sdk.transport import make_transport
-from sentry_sdk.consts import DEFAULT_OPTIONS, SDK_INFO, ClientConstructor
+from sentry_sdk.consts import (
+    DEFAULT_OPTIONS,
+    INSTRUMENTER,
+    VERSION,
+    ClientConstructor,
+)
 from sentry_sdk.integrations import setup_integrations
 from sentry_sdk.utils import ContextVar
 from sentry_sdk.sessions import SessionFlusher
 from sentry_sdk.envelope import Envelope
+from sentry_sdk.profiler import setup_profiler
+from sentry_sdk.tracing_utils import has_tracestate_enabled, reinflate_tracestate
 
 from sentry_sdk._types import MYPY
 
@@ -28,17 +37,23 @@
     from typing import Any
     from typing import Callable
     from typing import Dict
-    from typing import List
     from typing import Optional
 
     from sentry_sdk.scope import Scope
     from sentry_sdk._types import Event, Hint
-    from sentry_sdk.sessions import Session
+    from sentry_sdk.session import Session
 
 
 _client_init_debug = ContextVar("client_init_debug")
 
 
+SDK_INFO = {
+    "name": "sentry.python",  # SDK name will be overridden after integrations have been loaded with sentry_sdk.integrations.setup_integrations()
+    "version": VERSION,
+    "packages": [{"name": "pypi:sentry-sdk", "version": VERSION}],
+}
+
+
 def _get_options(*args, **kwargs):
     # type: (*Optional[str], **Any) -> Dict[str, Any]
     if args and (isinstance(args[0], (text_type, bytes, str)) or args[0] is None):
@@ -47,6 +62,9 @@ def _get_options(*args, **kwargs):
     else:
         dsn = None
 
+    if len(args) > 1:
+        raise TypeError("Only single positional argument is expected")
+
     rv = dict(DEFAULT_OPTIONS)
     options = dict(*args, **kwargs)
     if dsn is not None and options.get("dsn") is None:
@@ -61,14 +79,17 @@ def _get_options(*args, **kwargs):
         rv["dsn"] = os.environ.get("SENTRY_DSN")
 
     if rv["release"] is None:
-        rv["release"] = os.environ.get("SENTRY_RELEASE")
+        rv["release"] = get_default_release()
 
     if rv["environment"] is None:
-        rv["environment"] = os.environ.get("SENTRY_ENVIRONMENT")
+        rv["environment"] = os.environ.get("SENTRY_ENVIRONMENT") or "production"
 
     if rv["server_name"] is None and hasattr(socket, "gethostname"):
         rv["server_name"] = socket.gethostname()
 
+    if rv["instrumenter"] is None:
+        rv["instrumenter"] = INSTRUMENTER.SENTRY
+
     return rv
 
 
@@ -97,24 +118,16 @@ def _init_impl(self):
         # type: () -> None
         old_debug = _client_init_debug.get(False)
 
-        def _send_sessions(sessions):
-            # type: (List[Any]) -> None
-            transport = self.transport
-            if not transport or not sessions:
-                return
-            sessions_iter = iter(sessions)
-            while True:
-                envelope = Envelope()
-                for session in islice(sessions_iter, 100):
-                    envelope.add_session(session)
-                if not envelope.items:
-                    break
-                transport.capture_envelope(envelope)
+        def _capture_envelope(envelope):
+            # type: (Envelope) -> None
+            if self.transport is not None:
+                self.transport.capture_envelope(envelope)
 
         try:
             _client_init_debug.set(self.options["debug"])
             self.transport = make_transport(self.options)
-            self.session_flusher = SessionFlusher(flush_func=_send_sessions)
+
+            self.session_flusher = SessionFlusher(capture_func=_capture_envelope)
 
             request_bodies = ("always", "never", "small", "medium")
             if self.options["request_bodies"] not in request_bodies:
@@ -127,13 +140,25 @@ def _send_sessions(sessions):
             self.integrations = setup_integrations(
                 self.options["integrations"],
                 with_defaults=self.options["default_integrations"],
-                with_auto_enabling_integrations=self.options["_experiments"].get(
-                    "auto_enabling_integrations", False
-                ),
+                with_auto_enabling_integrations=self.options[
+                    "auto_enabling_integrations"
+                ],
             )
+
+            sdk_name = get_sdk_name(list(self.integrations.keys()))
+            SDK_INFO["name"] = sdk_name
+            logger.debug("Setting SDK name to '%s'", sdk_name)
+
         finally:
             _client_init_debug.set(old_debug)
 
+        profiles_sample_rate = self.options["_experiments"].get("profiles_sample_rate")
+        if profiles_sample_rate is not None and profiles_sample_rate > 0:
+            try:
+                setup_profiler(self.options)
+            except ValueError as e:
+                logger.debug(str(e))
+
     @property
     def dsn(self):
         # type: () -> Optional[str]
@@ -143,7 +168,7 @@ def dsn(self):
     def _prepare_event(
         self,
         event,  # type: Event
-        hint,  # type: Optional[Hint]
+        hint,  # type: Hint
         scope,  # type: Optional[Scope]
     ):
         # type: (...) -> Optional[Event]
@@ -151,12 +176,19 @@ def _prepare_event(
         if event.get("timestamp") is None:
             event["timestamp"] = datetime.utcnow()
 
-        hint = dict(hint or ())  # type: Hint
-
         if scope is not None:
+            is_transaction = event.get("type") == "transaction"
             event_ = scope.apply_to_event(event, hint)
+
+            # one of the event/error processors returned None
             if event_ is None:
+                if self.transport:
+                    self.transport.record_lost_event(
+                        "event_processor",
+                        data_category=("transaction" if is_transaction else "error"),
+                    )
                 return None
+
             event = event_
 
         if (
@@ -196,15 +228,37 @@ def _prepare_event(
         # Postprocess the event here so that annotated types do
         # generally not surface in before_send
         if event is not None:
-            event = serialize(event)
+            event = serialize(
+                event,
+                smart_transaction_trimming=self.options["_experiments"].get(
+                    "smart_transaction_trimming"
+                ),
+            )
 
         before_send = self.options["before_send"]
-        if before_send is not None:
+        if before_send is not None and event.get("type") != "transaction":
             new_event = None
             with capture_internal_exceptions():
                 new_event = before_send(event, hint or {})
             if new_event is None:
                 logger.info("before send dropped event (%s)", event)
+                if self.transport:
+                    self.transport.record_lost_event(
+                        "before_send", data_category="error"
+                    )
+            event = new_event  # type: ignore
+
+        before_send_transaction = self.options["before_send_transaction"]
+        if before_send_transaction is not None and event.get("type") == "transaction":
+            new_event = None
+            with capture_internal_exceptions():
+                new_event = before_send_transaction(event, hint or {})
+            if new_event is None:
+                logger.info("before send transaction dropped event (%s)", event)
+                if self.transport:
+                    self.transport.record_lost_event(
+                        "before_send", data_category="transaction"
+                    )
             event = new_event  # type: ignore
 
         return event
@@ -215,17 +269,18 @@ def _is_ignored_error(self, event, hint):
         if exc_info is None:
             return False
 
-        type_name = get_type_name(exc_info[0])
-        full_name = "%s.%s" % (exc_info[0].__module__, type_name)
+        error = exc_info[0]
+        error_type_name = get_type_name(exc_info[0])
+        error_full_name = "%s.%s" % (exc_info[0].__module__, error_type_name)
 
-        for errcls in self.options["ignore_errors"]:
+        for ignored_error in self.options["ignore_errors"]:
             # String types are matched against the type name in the
             # exception only
-            if isinstance(errcls, string_types):
-                if errcls == full_name or errcls == type_name:
+            if isinstance(ignored_error, string_types):
+                if ignored_error == error_full_name or ignored_error == error_type_name:
                     return True
             else:
-                if issubclass(exc_info[0], errcls):
+                if issubclass(error, ignored_error):
                     return True
 
         return False
@@ -237,16 +292,35 @@ def _should_capture(
         scope=None,  # type: Optional[Scope]
     ):
         # type: (...) -> bool
-        if scope is not None and not scope._should_capture:
+        # Transactions are sampled independent of error events.
+        is_transaction = event.get("type") == "transaction"
+        if is_transaction:
+            return True
+
+        ignoring_prevents_recursion = scope is not None and not scope._should_capture
+        if ignoring_prevents_recursion:
             return False
 
-        if (
+        ignored_by_config_option = self._is_ignored_error(event, hint)
+        if ignored_by_config_option:
+            return False
+
+        return True
+
+    def _should_sample_error(
+        self,
+        event,  # type: Event
+    ):
+        # type: (...) -> bool
+        not_in_sample_rate = (
             self.options["sample_rate"] < 1.0
             and random.random() >= self.options["sample_rate"]
-        ):
-            return False
+        )
+        if not_in_sample_rate:
+            # because we will not sample this event, record a "lost event".
+            if self.transport:
+                self.transport.record_lost_event("sample_rate", data_category="error")
 
-        if self._is_ignored_error(event, hint):
             return False
 
         return True
@@ -262,20 +336,14 @@ def _update_session_from_event(
         errored = False
         user_agent = None
 
-        # Figure out if this counts as an error and if we should mark the
-        # session as crashed.
-        level = event.get("level")
-        if level == "fatal":
-            crashed = True
-        if not crashed:
-            exceptions = (event.get("exception") or {}).get("values")
-            if exceptions:
-                errored = True
-                for error in exceptions:
-                    mechanism = error.get("mechanism")
-                    if mechanism and mechanism.get("handled") is False:
-                        crashed = True
-                        break
+        exceptions = (event.get("exception") or {}).get("values")
+        if exceptions:
+            errored = True
+            for error in exceptions:
+                mechanism = error.get("mechanism")
+                if mechanism and mechanism.get("handled") is False:
+                    crashed = True
+                    break
 
         user = event.get("user")
 
@@ -316,10 +384,15 @@ def capture_event(
         if hint is None:
             hint = {}
         event_id = event.get("event_id")
+        hint = dict(hint or ())  # type: Hint
+
         if event_id is None:
             event["event_id"] = event_id = uuid.uuid4().hex
         if not self._should_capture(event, hint, scope):
             return None
+
+        profile = event.pop("profile", None)
+
         event_opt = self._prepare_event(event, hint, scope)
         if event_opt is None:
             return None
@@ -330,7 +403,60 @@ def capture_event(
         if session:
             self._update_session_from_event(session, event)
 
-        self.transport.capture_event(event_opt)
+        is_transaction = event_opt.get("type") == "transaction"
+
+        if not is_transaction and not self._should_sample_error(event):
+            return None
+
+        attachments = hint.get("attachments")
+
+        # this is outside of the `if` immediately below because even if we don't
+        # use the value, we want to make sure we remove it before the event is
+        # sent
+        raw_tracestate = (
+            event_opt.get("contexts", {}).get("trace", {}).pop("tracestate", "")
+        )
+
+        dynamic_sampling_context = (
+            event_opt.get("contexts", {})
+            .get("trace", {})
+            .pop("dynamic_sampling_context", {})
+        )
+
+        # Transactions or events with attachments should go to the /envelope/
+        # endpoint.
+        if is_transaction or attachments:
+
+            headers = {
+                "event_id": event_opt["event_id"],
+                "sent_at": format_timestamp(datetime.utcnow()),
+            }
+
+            if has_tracestate_enabled():
+                tracestate_data = raw_tracestate and reinflate_tracestate(
+                    raw_tracestate.replace("sentry=", "")
+                )
+
+                if tracestate_data:
+                    headers["trace"] = tracestate_data
+            elif dynamic_sampling_context:
+                headers["trace"] = dynamic_sampling_context
+
+            envelope = Envelope(headers=headers)
+
+            if is_transaction:
+                if profile is not None:
+                    envelope.add_profile(profile.to_json(event_opt, self.options))
+                envelope.add_transaction(event_opt)
+            else:
+                envelope.add_event(event_opt)
+
+            for attachment in attachments or ():
+                envelope.add_item(attachment.to_envelope_item())
+            self.transport.capture_envelope(envelope)
+        else:
+            # All other events go to the /store/ endpoint.
+            self.transport.capture_event(event_opt)
         return event_id
 
     def capture_session(
@@ -401,7 +527,6 @@ class get_options(ClientConstructor, Dict[str, Any]):  # noqa: N801
     class Client(ClientConstructor, _Client):
         pass
 
-
 else:
     # Alias `get_options` for actual usage. Go through the lambda indirection
     # to throw PyCharm off of the weakly typed signature (it would otherwise
diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py
index 27a078aae5..1e309837a3 100644
--- a/sentry_sdk/consts.py
+++ b/sentry_sdk/consts.py
@@ -1,6 +1,8 @@
 from sentry_sdk._types import MYPY
 
 if MYPY:
+    import sentry_sdk
+
     from typing import Optional
     from typing import Callable
     from typing import Union
@@ -11,10 +13,15 @@
     from typing import Sequence
     from typing_extensions import TypedDict
 
-    from sentry_sdk.transport import Transport
     from sentry_sdk.integrations import Integration
 
-    from sentry_sdk._types import Event, EventProcessor, BreadcrumbProcessor
+    from sentry_sdk._types import (
+        BreadcrumbProcessor,
+        Event,
+        EventProcessor,
+        TracesSampler,
+        TransactionProcessor,
+    )
 
     # Experiments are feature flags to enable and disable certain unstable SDK
     # functionality. Changing them from the defaults (`None`) in production
@@ -25,12 +32,54 @@
         {
             "max_spans": Optional[int],
             "record_sql_params": Optional[bool],
-            "auto_enabling_integrations": Optional[bool],
-            "auto_session_tracking": Optional[bool],
+            "smart_transaction_trimming": Optional[bool],
+            "propagate_tracestate": Optional[bool],
+            "custom_measurements": Optional[bool],
+            "profiles_sample_rate": Optional[float],
+            "profiler_mode": Optional[str],
         },
         total=False,
     )
 
+DEFAULT_QUEUE_SIZE = 100
+DEFAULT_MAX_BREADCRUMBS = 100
+
+SENSITIVE_DATA_SUBSTITUTE = "[Filtered]"
+
+
+class INSTRUMENTER:
+    SENTRY = "sentry"
+    OTEL = "otel"
+
+
+class OP:
+    DB = "db"
+    DB_REDIS = "db.redis"
+    EVENT_DJANGO = "event.django"
+    FUNCTION = "function"
+    FUNCTION_AWS = "function.aws"
+    FUNCTION_GCP = "function.gcp"
+    HTTP_CLIENT = "http.client"
+    HTTP_CLIENT_STREAM = "http.client.stream"
+    HTTP_SERVER = "http.server"
+    MIDDLEWARE_DJANGO = "middleware.django"
+    MIDDLEWARE_STARLETTE = "middleware.starlette"
+    MIDDLEWARE_STARLETTE_RECEIVE = "middleware.starlette.receive"
+    MIDDLEWARE_STARLETTE_SEND = "middleware.starlette.send"
+    MIDDLEWARE_STARLITE = "middleware.starlite"
+    MIDDLEWARE_STARLITE_RECEIVE = "middleware.starlite.receive"
+    MIDDLEWARE_STARLITE_SEND = "middleware.starlite.send"
+    QUEUE_SUBMIT_CELERY = "queue.submit.celery"
+    QUEUE_TASK_CELERY = "queue.task.celery"
+    QUEUE_TASK_RQ = "queue.task.rq"
+    SUBPROCESS = "subprocess"
+    SUBPROCESS_WAIT = "subprocess.wait"
+    SUBPROCESS_COMMUNICATE = "subprocess.communicate"
+    TEMPLATE_RENDER = "template.render"
+    VIEW_RENDER = "view.render"
+    VIEW_RESPONSE_RENDER = "view.response.render"
+    WEBSOCKET_SERVER = "websocket.server"
+
 
 # This type exists to trick mypy and PyCharm into thinking `init` and `Client`
 # take these arguments (even though they take opaque **kwargs)
@@ -39,17 +88,18 @@ def __init__(
         self,
         dsn=None,  # type: Optional[str]
         with_locals=True,  # type: bool
-        max_breadcrumbs=100,  # type: int
+        max_breadcrumbs=DEFAULT_MAX_BREADCRUMBS,  # type: int
         release=None,  # type: Optional[str]
         environment=None,  # type: Optional[str]
         server_name=None,  # type: Optional[str]
-        shutdown_timeout=2,  # type: int
+        shutdown_timeout=2,  # type: float
         integrations=[],  # type: Sequence[Integration]  # noqa: B006
         in_app_include=[],  # type: List[str]  # noqa: B006
         in_app_exclude=[],  # type: List[str]  # noqa: B006
         default_integrations=True,  # type: bool
         dist=None,  # type: Optional[str]
-        transport=None,  # type: Optional[Union[Transport, Type[Transport], Callable[[Event], None]]]
+        transport=None,  # type: Optional[Union[sentry_sdk.transport.Transport, Type[sentry_sdk.transport.Transport], Callable[[Event], None]]]
+        transport_queue_size=DEFAULT_QUEUE_SIZE,  # type: int
         sample_rate=1.0,  # type: float
         send_default_pii=False,  # type: bool
         http_proxy=None,  # type: Optional[str]
@@ -62,10 +112,15 @@ def __init__(
         attach_stacktrace=False,  # type: bool
         ca_certs=None,  # type: Optional[str]
         propagate_traces=True,  # type: bool
-        # DO NOT ENABLE THIS RIGHT NOW UNLESS YOU WANT TO EXCEED YOUR EVENT QUOTA IMMEDIATELY
-        traces_sample_rate=0.0,  # type: float
-        traceparent_v2=False,  # type: bool
+        traces_sample_rate=None,  # type: Optional[float]
+        traces_sampler=None,  # type: Optional[TracesSampler]
+        auto_enabling_integrations=True,  # type: bool
+        auto_session_tracking=True,  # type: bool
+        send_client_reports=True,  # type: bool
         _experiments={},  # type: Experiments  # noqa: B006
+        proxy_headers=None,  # type: Optional[Dict[str, str]]
+        instrumenter=INSTRUMENTER.SENTRY,  # type: Optional[str]
+        before_send_transaction=None,  # type: Optional[TransactionProcessor]
     ):
         # type: (...) -> None
         pass
@@ -89,9 +144,4 @@ def _get_default_options():
 del _get_default_options
 
 
-VERSION = "0.14.4"
-SDK_INFO = {
-    "name": "sentry.python",
-    "version": VERSION,
-    "packages": [{"name": "pypi:sentry-sdk", "version": VERSION}],
-}
+VERSION = "1.14.0"
diff --git a/sentry_sdk/envelope.py b/sentry_sdk/envelope.py
index 701b84a649..24eb87b91f 100644
--- a/sentry_sdk/envelope.py
+++ b/sentry_sdk/envelope.py
@@ -1,15 +1,14 @@
 import io
 import json
-import shutil
 import mimetypes
 
-from sentry_sdk._compat import text_type
+from sentry_sdk._compat import text_type, PY2
 from sentry_sdk._types import MYPY
-from sentry_sdk.sessions import Session
+from sentry_sdk.session import Session
+from sentry_sdk.utils import json_dumps, capture_internal_exceptions
 
 if MYPY:
     from typing import Any
-    from typing import Tuple
     from typing import Optional
     from typing import Union
     from typing import Dict
@@ -19,17 +18,18 @@
     from sentry_sdk._types import Event, EventDataCategory
 
 
-def get_event_data_category(event):
-    # type: (Event) -> EventDataCategory
-    if event.get("type") == "transaction":
-        return "transaction"
-    return "error"
+def parse_json(data):
+    # type: (Union[bytes, text_type]) -> Any
+    # on some python 3 versions this needs to be bytes
+    if not PY2 and isinstance(data, bytes):
+        data = data.decode("utf-8", "replace")
+    return json.loads(data)
 
 
 class Envelope(object):
     def __init__(
         self,
-        headers=None,  # type: Optional[Dict[str, str]]
+        headers=None,  # type: Optional[Dict[str, Any]]
         items=None,  # type: Optional[List[Item]]
     ):
         # type: (...) -> None
@@ -56,6 +56,18 @@ def add_event(
         # type: (...) -> None
         self.add_item(Item(payload=PayloadRef(json=event), type="event"))
 
+    def add_transaction(
+        self, transaction  # type: Event
+    ):
+        # type: (...) -> None
+        self.add_item(Item(payload=PayloadRef(json=transaction), type="transaction"))
+
+    def add_profile(
+        self, profile  # type: Any
+    ):
+        # type: (...) -> None
+        self.add_item(Item(payload=PayloadRef(json=profile), type="profile"))
+
     def add_session(
         self, session  # type: Union[Session, Any]
     ):
@@ -64,6 +76,12 @@ def add_session(
             session = session.to_json()
         self.add_item(Item(payload=PayloadRef(json=session), type="session"))
 
+    def add_sessions(
+        self, sessions  # type: Any
+    ):
+        # type: (...) -> None
+        self.add_item(Item(payload=PayloadRef(json=sessions), type="sessions"))
+
     def add_item(
         self, item  # type: Item
     ):
@@ -78,6 +96,14 @@ def get_event(self):
                 return event
         return None
 
+    def get_transaction_event(self):
+        # type: (...) -> Optional[Event]
+        for item in self.items:
+            event = item.get_transaction_event()
+            if event is not None:
+                return event
+        return None
+
     def __iter__(self):
         # type: (...) -> Iterator[Item]
         return iter(self.items)
@@ -86,7 +112,7 @@ def serialize_into(
         self, f  # type: Any
     ):
         # type: (...) -> None
-        f.write(json.dumps(self.headers, allow_nan=False).encode("utf-8"))
+        f.write(json_dumps(self.headers))
         f.write(b"\n")
         for item in self.items:
             item.serialize_into(f)
@@ -102,7 +128,7 @@ def deserialize_from(
         cls, f  # type: Any
     ):
         # type: (...) -> Envelope
-        headers = json.loads(f.readline())
+        headers = parse_json(f.readline())
         items = []
         while 1:
             item = Item.deserialize_from(f)
@@ -139,34 +165,15 @@ def get_bytes(self):
         # type: (...) -> bytes
         if self.bytes is None:
             if self.path is not None:
-                with open(self.path, "rb") as f:
-                    self.bytes = f.read()
+                with capture_internal_exceptions():
+                    with open(self.path, "rb") as f:
+                        self.bytes = f.read()
             elif self.json is not None:
-                self.bytes = json.dumps(self.json, allow_nan=False).encode("utf-8")
+                self.bytes = json_dumps(self.json)
             else:
                 self.bytes = b""
         return self.bytes
 
-    def _prepare_serialize(self):
-        # type: (...) -> Tuple[Any, Any]
-        if self.path is not None and self.bytes is None:
-            f = open(self.path, "rb")
-            f.seek(0, 2)
-            length = f.tell()
-            f.seek(0, 0)
-
-            def writer(out):
-                # type: (Any) -> None
-                try:
-                    shutil.copyfileobj(f, out)
-                finally:
-                    f.close()
-
-            return length, writer
-
-        bytes = self.get_bytes()
-        return len(bytes), lambda f: f.write(bytes)
-
     @property
     def inferred_content_type(self):
         # type: (...) -> str
@@ -190,7 +197,7 @@ class Item(object):
     def __init__(
         self,
         payload,  # type: Union[bytes, text_type, PayloadRef]
-        headers=None,  # type: Optional[Dict[str, str]]
+        headers=None,  # type: Optional[Dict[str, Any]]
         type=None,  # type: Optional[str]
         content_type=None,  # type: Optional[str]
         filename=None,  # type: Optional[str]
@@ -226,18 +233,29 @@ def __repr__(self):
             self.data_category,
         )
 
+    @property
+    def type(self):
+        # type: (...) -> Optional[str]
+        return self.headers.get("type")
+
     @property
     def data_category(self):
         # type: (...) -> EventDataCategory
-        rv = "default"  # type: Any
-        event = self.get_event()
-        if event is not None:
-            rv = get_event_data_category(event)
+        ty = self.headers.get("type")
+        if ty == "session":
+            return "session"
+        elif ty == "attachment":
+            return "attachment"
+        elif ty == "transaction":
+            return "transaction"
+        elif ty == "event":
+            return "error"
+        elif ty == "client_report":
+            return "internal"
+        elif ty == "profile":
+            return "profile"
         else:
-            ty = self.headers.get("type")
-            if ty in ("session", "attachment"):
-                rv = ty
-        return rv
+            return "default"
 
     def get_bytes(self):
         # type: (...) -> bytes
@@ -245,7 +263,16 @@ def get_bytes(self):
 
     def get_event(self):
         # type: (...) -> Optional[Event]
-        if self.headers.get("type") == "event" and self.payload.json is not None:
+        """
+        Returns an error event if there is one.
+        """
+        if self.type == "event" and self.payload.json is not None:
+            return self.payload.json
+        return None
+
+    def get_transaction_event(self):
+        # type: (...) -> Optional[Event]
+        if self.type == "transaction" and self.payload.json is not None:
             return self.payload.json
         return None
 
@@ -254,11 +281,11 @@ def serialize_into(
     ):
         # type: (...) -> None
         headers = dict(self.headers)
-        length, writer = self.payload._prepare_serialize()
-        headers["length"] = length
-        f.write(json.dumps(headers, allow_nan=False).encode("utf-8"))
+        bytes = self.get_bytes()
+        headers["length"] = len(bytes)
+        f.write(json_dumps(headers))
         f.write(b"\n")
-        writer(f)
+        f.write(bytes)
         f.write(b"\n")
 
     def serialize(self):
@@ -275,14 +302,19 @@ def deserialize_from(
         line = f.readline().rstrip()
         if not line:
             return None
-        headers = json.loads(line)
-        length = headers["length"]
-        payload = f.read(length)
-        if headers.get("type") == "event":
-            rv = cls(headers=headers, payload=PayloadRef(json=json.loads(payload)))
+        headers = parse_json(line)
+        length = headers.get("length")
+        if length is not None:
+            payload = f.read(length)
+            f.readline()
+        else:
+            # if no length was specified we need to read up to the end of line
+            # and remove it (if it is present, i.e. not the very last char in an eof terminated envelope)
+            payload = f.readline().rstrip(b"\n")
+        if headers.get("type") in ("event", "transaction", "metric_buckets"):
+            rv = cls(headers=headers, payload=PayloadRef(json=parse_json(payload)))
         else:
             rv = cls(headers=headers, payload=payload)
-        f.readline()
         return rv
 
     @classmethod
diff --git a/sentry_sdk/hub.py b/sentry_sdk/hub.py
index 18558761cf..df9de10fe4 100644
--- a/sentry_sdk/hub.py
+++ b/sentry_sdk/hub.py
@@ -1,15 +1,15 @@
 import copy
-import random
 import sys
 
 from datetime import datetime
 from contextlib import contextmanager
 
 from sentry_sdk._compat import with_metaclass
+from sentry_sdk.consts import INSTRUMENTER
 from sentry_sdk.scope import Scope
 from sentry_sdk.client import Client
-from sentry_sdk.tracing import Span
-from sentry_sdk.sessions import Session
+from sentry_sdk.tracing import NoOpSpan, Span, Transaction
+from sentry_sdk.session import Session
 from sentry_sdk.utils import (
     exc_info_from_error,
     event_from_exception,
@@ -97,6 +97,20 @@ def __exit__(self, exc_type, exc_value, tb):
             c.close()
 
 
+def _check_python_deprecations():
+    # type: () -> None
+    version = sys.version_info[:2]
+
+    if version == (3, 4) or version == (3, 5):
+        logger.warning(
+            "sentry-sdk 2.0.0 will drop support for Python %s.",
+            "{}.{}".format(*version),
+        )
+        logger.warning(
+            "Please upgrade to the latest version to continue receiving upgrades and bugfixes."
+        )
+
+
 def _init(*args, **kwargs):
     # type: (*Optional[str], **Any) -> ContextManager[Any]
     """Initializes the SDK and optionally integrations.
@@ -105,6 +119,7 @@ def _init(*args, **kwargs):
     """
     client = Client(*args, **kwargs)  # type: ignore
     Hub.current.bind_client(client)
+    _check_python_deprecations()
     rv = _InitGuard(client)
     return rv
 
@@ -118,10 +133,9 @@ def _init(*args, **kwargs):
     # Use `ClientConstructor` to define the argument types of `init` and
     # `ContextManager[Any]` to tell static analyzers about the return type.
 
-    class init(ClientConstructor, ContextManager[Any]):  # noqa: N801
+    class init(ClientConstructor, _InitGuard):  # noqa: N801
         pass
 
-
 else:
     # Alias `init` for actual usage. Go through the lambda indirection to throw
     # PyCharm off of the weakly typed signature (it would otherwise discover
@@ -276,7 +290,7 @@ def get_integration(
         else:
             raise ValueError("Integration has no name")
 
-        client = self._stack[-1][0]
+        client = self.client
         if client is not None:
             rv = client.integrations.get(integration_name)
             if rv is not None:
@@ -312,16 +326,16 @@ def capture_event(
         event,  # type: Event
         hint=None,  # type: Optional[Hint]
         scope=None,  # type: Optional[Any]
-        **scope_args  # type: Dict[str, Any]
+        **scope_args  # type: Any
     ):
         # type: (...) -> Optional[str]
-        """Captures an event. Alias of :py:meth:`sentry_sdk.Client.capture_event`.
-        """
+        """Captures an event. Alias of :py:meth:`sentry_sdk.Client.capture_event`."""
         client, top_scope = self._stack[-1]
         scope = _update_scope(top_scope, scope, scope_args)
         if client is not None:
+            is_transaction = event.get("type") == "transaction"
             rv = client.capture_event(event, hint, scope)
-            if rv is not None:
+            if rv is not None and not is_transaction:
                 self._last_event_id = rv
             return rv
         return None
@@ -331,7 +345,7 @@ def capture_message(
         message,  # type: str
         level=None,  # type: Optional[str]
         scope=None,  # type: Optional[Any]
-        **scope_args  # type: Dict[str, Any]
+        **scope_args  # type: Any
     ):
         # type: (...) -> Optional[str]
         """Captures a message.  The message is just a string.  If no level
@@ -351,7 +365,7 @@ def capture_exception(
         self,
         error=None,  # type: Optional[Union[BaseException, ExcInfo]]
         scope=None,  # type: Optional[Any]
-        **scope_args  # type: Dict[str, Any]
+        **scope_args  # type: Any
     ):
         # type: (...) -> Optional[str]
         """Captures an exception.
@@ -437,52 +451,122 @@ def add_breadcrumb(
     def start_span(
         self,
         span=None,  # type: Optional[Span]
+        instrumenter=INSTRUMENTER.SENTRY,  # type: str
         **kwargs  # type: Any
     ):
         # type: (...) -> Span
         """
-        Create a new span whose parent span is the currently active
-        span, if any. The return value is the span object that can
-        be used as a context manager to start and stop timing.
-
-        Note that you will not see any span that is not contained
-        within a transaction. Create a transaction with
-        ``start_span(transaction="my transaction")`` if an
-        integration doesn't already do this for you.
+        Create and start timing a new span whose parent is the currently active
+        span or transaction, if any. The return value is a span instance,
+        typically used as a context manager to start and stop timing in a `with`
+        block.
+
+        Only spans contained in a transaction are sent to Sentry. Most
+        integrations start a transaction at the appropriate time, for example
+        for every incoming HTTP request. Use `start_transaction` to start a new
+        transaction when one is not already in progress.
         """
+        configuration_instrumenter = self.client and self.client.options["instrumenter"]
+
+        if instrumenter != configuration_instrumenter:
+            return NoOpSpan()
+
+        # TODO: consider removing this in a future release.
+        # This is for backwards compatibility with releases before
+        # start_transaction existed, to allow for a smoother transition.
+        if isinstance(span, Transaction) or "transaction" in kwargs:
+            deprecation_msg = (
+                "Deprecated: use start_transaction to start transactions and "
+                "Transaction.start_child to start spans."
+            )
+            if isinstance(span, Transaction):
+                logger.warning(deprecation_msg)
+                return self.start_transaction(span)
+            if "transaction" in kwargs:
+                logger.warning(deprecation_msg)
+                name = kwargs.pop("transaction")
+                return self.start_transaction(name=name, **kwargs)
 
-        client, scope = self._stack[-1]
+        if span is not None:
+            return span
 
         kwargs.setdefault("hub", self)
 
-        if span is None:
-            span = scope.span
-            if span is not None:
-                span = span.new_span(**kwargs)
-            else:
-                span = Span(**kwargs)
+        span = self.scope.span
+        if span is not None:
+            return span.start_child(**kwargs)
+
+        return Span(**kwargs)
+
+    def start_transaction(
+        self,
+        transaction=None,  # type: Optional[Transaction]
+        instrumenter=INSTRUMENTER.SENTRY,  # type: str
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> Union[Transaction, NoOpSpan]
+        """
+        Start and return a transaction.
+
+        Start an existing transaction if given, otherwise create and start a new
+        transaction with kwargs.
+
+        This is the entry point to manual tracing instrumentation.
 
-        if span.sampled is None and span.transaction is not None:
-            sample_rate = client and client.options["traces_sample_rate"] or 0
-            span.sampled = random.random() < sample_rate
+        A tree structure can be built by adding child spans to the transaction,
+        and child spans to other spans. To start a new child span within the
+        transaction or any span, call the respective `.start_child()` method.
 
-        if span.sampled:
+        Every child span must be finished before the transaction is finished,
+        otherwise the unfinished spans are discarded.
+
+        When used as context managers, spans and transactions are automatically
+        finished at the end of the `with` block. If not using context managers,
+        call the `.finish()` method.
+
+        When the transaction is finished, it will be sent to Sentry with all its
+        finished child spans.
+        """
+        configuration_instrumenter = self.client and self.client.options["instrumenter"]
+
+        if instrumenter != configuration_instrumenter:
+            return NoOpSpan()
+
+        custom_sampling_context = kwargs.pop("custom_sampling_context", {})
+
+        # if we haven't been given a transaction, make one
+        if transaction is None:
+            kwargs.setdefault("hub", self)
+            transaction = Transaction(**kwargs)
+
+        # use traces_sample_rate, traces_sampler, and/or inheritance to make a
+        # sampling decision
+        sampling_context = {
+            "transaction_context": transaction.to_json(),
+            "parent_sampled": transaction.parent_sampled,
+        }
+        sampling_context.update(custom_sampling_context)
+        transaction._set_initial_sampling_decision(sampling_context=sampling_context)
+
+        # we don't bother to keep spans if we already know we're not going to
+        # send the transaction
+        if transaction.sampled:
             max_spans = (
-                client and client.options["_experiments"].get("max_spans") or 1000
-            )
-            span.init_finished_spans(maxlen=max_spans)
+                self.client and self.client.options["_experiments"].get("max_spans")
+            ) or 1000
+            transaction.init_span_recorder(maxlen=max_spans)
 
-        return span
+        return transaction
 
-    @overload  # noqa
+    @overload
     def push_scope(
         self, callback=None  # type: Optional[None]
     ):
         # type: (...) -> ContextManager[Scope]
         pass
 
-    @overload  # noqa
-    def push_scope(
+    @overload
+    def push_scope(  # noqa: F811
         self, callback  # type: Callable[[Scope], None]
     ):
         # type: (...) -> None
@@ -523,15 +607,15 @@ def pop_scope_unsafe(self):
         assert self._stack, "stack must have at least one layer"
         return rv
 
-    @overload  # noqa
+    @overload
     def configure_scope(
         self, callback=None  # type: Optional[None]
     ):
         # type: (...) -> ContextManager[Scope]
         pass
 
-    @overload  # noqa
-    def configure_scope(
+    @overload
+    def configure_scope(  # noqa: F811
         self, callback  # type: Callable[[Scope], None]
     ):
         # type: (...) -> None
@@ -539,7 +623,7 @@ def configure_scope(
 
     def configure_scope(  # noqa
         self, callback=None  # type: Optional[Callable[[Scope], None]]
-    ):  # noqa
+    ):
         # type: (...) -> Optional[ContextManager[Scope]]
 
         """
@@ -567,7 +651,9 @@ def inner():
 
         return inner()
 
-    def start_session(self):
+    def start_session(
+        self, session_mode="application"  # type: str
+    ):
         # type: (...) -> None
         """Starts a new session."""
         self.end_session()
@@ -576,6 +662,7 @@ def start_session(self):
             release=client.options["release"] if client else None,
             environment=client.options["environment"] if client else None,
             user=scope._user,
+            session_mode=session_mode,
         )
 
     def end_session(self):
@@ -583,11 +670,12 @@ def end_session(self):
         """Ends the current session if there is one."""
         client, scope = self._stack[-1]
         session = scope._session
+        self.scope._session = None
+
         if session is not None:
             session.close()
             if client is not None:
                 client.capture_session(session)
-        self._stack[-1][1]._session = None
 
     def stop_auto_session_tracking(self):
         # type: (...) -> None
@@ -622,25 +710,38 @@ def flush(
         if client is not None:
             return client.flush(timeout=timeout, callback=callback)
 
-    def iter_trace_propagation_headers(self):
-        # type: () -> Generator[Tuple[str, str], None, None]
-        # TODO: Document
-        client, scope = self._stack[-1]
-        span = scope.span
-
-        if span is None:
+    def iter_trace_propagation_headers(self, span=None):
+        # type: (Optional[Span]) -> Generator[Tuple[str, str], None, None]
+        """
+        Return HTTP headers which allow propagation of trace data. Data taken
+        from the span representing the request, if available, or the current
+        span on the scope if not.
+        """
+        span = span or self.scope.span
+        if not span:
             return
 
+        client = self._stack[-1][0]
+
         propagate_traces = client and client.options["propagate_traces"]
         if not propagate_traces:
             return
 
-        if client and client.options["traceparent_v2"]:
-            traceparent = span.to_traceparent()
-        else:
-            traceparent = span.to_legacy_traceparent()
+        for header in span.iter_headers():
+            yield header
+
+    def trace_propagation_meta(self, span=None):
+        # type: (Optional[Span]) -> str
+        """
+        Return meta tags which should be injected into the HTML template
+        to allow propagation of trace data.
+        """
+        meta = ""
+
+        for name, content in self.iter_trace_propagation_headers(span):
+            meta += '' % (name, content)
 
-        yield "sentry-trace", traceparent
+        return meta
 
 
 GLOBAL_HUB = Hub()
diff --git a/sentry_sdk/integrations/__init__.py b/sentry_sdk/integrations/__init__.py
index f264bc4855..8d32741542 100644
--- a/sentry_sdk/integrations/__init__.py
+++ b/sentry_sdk/integrations/__init__.py
@@ -27,8 +27,7 @@ def _generate_default_integrations_iterator(integrations, auto_enabling_integrat
 
     def iter_default_integrations(with_auto_enabling_integrations):
         # type: (bool) -> Iterator[Type[Integration]]
-        """Returns an iterator of the default integration classes:
-        """
+        """Returns an iterator of the default integration classes:"""
         from importlib import import_module
 
         if with_auto_enabling_integrations:
@@ -55,6 +54,8 @@ def iter_default_integrations(with_auto_enabling_integrations):
 _AUTO_ENABLING_INTEGRATIONS = (
     "sentry_sdk.integrations.django.DjangoIntegration",
     "sentry_sdk.integrations.flask.FlaskIntegration",
+    "sentry_sdk.integrations.starlette.StarletteIntegration",
+    "sentry_sdk.integrations.fastapi.FastApiIntegration",
     "sentry_sdk.integrations.bottle.BottleIntegration",
     "sentry_sdk.integrations.falcon.FalconIntegration",
     "sentry_sdk.integrations.sanic.SanicIntegration",
@@ -63,6 +64,9 @@ def iter_default_integrations(with_auto_enabling_integrations):
     "sentry_sdk.integrations.aiohttp.AioHttpIntegration",
     "sentry_sdk.integrations.tornado.TornadoIntegration",
     "sentry_sdk.integrations.sqlalchemy.SqlalchemyIntegration",
+    "sentry_sdk.integrations.redis.RedisIntegration",
+    "sentry_sdk.integrations.pyramid.PyramidIntegration",
+    "sentry_sdk.integrations.boto3.Boto3Integration",
 )
 
 
@@ -144,7 +148,7 @@ def setup_integrations(
     return integrations
 
 
-class DidNotEnable(Exception):
+class DidNotEnable(Exception):  # noqa: N818
     """
     The integration could not be enabled due to a trivial user error like
     `flask` not being installed for the `FlaskIntegration`.
diff --git a/sentry_sdk/integrations/_wsgi_common.py b/sentry_sdk/integrations/_wsgi_common.py
index f874663883..1b7b222f18 100644
--- a/sentry_sdk/integrations/_wsgi_common.py
+++ b/sentry_sdk/integrations/_wsgi_common.py
@@ -21,6 +21,7 @@
     "HTTP_SET_COOKIE",
     "HTTP_COOKIE",
     "HTTP_AUTHORIZATION",
+    "HTTP_X_API_KEY",
     "HTTP_X_FORWARDED_FOR",
     "HTTP_X_REAL_IP",
 )
@@ -38,8 +39,8 @@ def request_body_within_bounds(client, content_length):
     bodies = client.options["request_bodies"]
     return not (
         bodies == "never"
-        or (bodies == "small" and content_length > 10 ** 3)
-        or (bodies == "medium" and content_length > 10 ** 4)
+        or (bodies == "small" and content_length > 10**3)
+        or (bodies == "medium" and content_length > 10**4)
     )
 
 
@@ -63,19 +64,13 @@ def extract_into_event(self, event):
             request_info["cookies"] = dict(self.cookies())
 
         if not request_body_within_bounds(client, content_length):
-            data = AnnotatedValue(
-                "",
-                {"rem": [["!config", "x", 0, content_length]], "len": content_length},
-            )
+            data = AnnotatedValue.removed_because_over_size_limit()
         else:
             parsed_body = self.parsed_body()
             if parsed_body is not None:
                 data = parsed_body
             elif self.raw_data():
-                data = AnnotatedValue(
-                    "",
-                    {"rem": [["!raw", "x", 0, content_length]], "len": content_length},
-                )
+                data = AnnotatedValue.removed_because_raw_data()
             else:
                 data = None
 
@@ -109,11 +104,8 @@ def parsed_body(self):
         files = self.files()
         if form or files:
             data = dict(iteritems(form))
-            for k, v in iteritems(files):
-                size = self.size_of_file(v)
-                data[k] = AnnotatedValue(
-                    "", {"len": size, "rem": [["!raw", "x", 0, size]]}
-                )
+            for key, _ in iteritems(files):
+                data[key] = AnnotatedValue.removed_because_raw_data()
 
             return data
 
@@ -174,7 +166,7 @@ def _filter_headers(headers):
         k: (
             v
             if k.upper().replace("-", "_") not in SENSITIVE_HEADERS
-            else AnnotatedValue("", {"rem": [["!config", "x", 0, len(v)]]})
+            else AnnotatedValue.removed_because_over_size_limit()
         )
         for k, v in iteritems(headers)
     }
diff --git a/sentry_sdk/integrations/aiohttp.py b/sentry_sdk/integrations/aiohttp.py
index c00a07d2b2..d1728f6edb 100644
--- a/sentry_sdk/integrations/aiohttp.py
+++ b/sentry_sdk/integrations/aiohttp.py
@@ -2,19 +2,22 @@
 import weakref
 
 from sentry_sdk._compat import reraise
+from sentry_sdk.consts import OP
 from sentry_sdk.hub import Hub
 from sentry_sdk.integrations import Integration, DidNotEnable
 from sentry_sdk.integrations.logging import ignore_logger
+from sentry_sdk.sessions import auto_session_tracking
 from sentry_sdk.integrations._wsgi_common import (
     _filter_headers,
     request_body_within_bounds,
 )
-from sentry_sdk.tracing import Span
+from sentry_sdk.tracing import SOURCE_FOR_STYLE, Transaction, TRANSACTION_SOURCE_ROUTE
 from sentry_sdk.utils import (
     capture_internal_exceptions,
     event_from_exception,
     transaction_from_function,
     HAS_REAL_CONTEXTVARS,
+    CONTEXTVARS_ERROR_MESSAGE,
     AnnotatedValue,
 )
 
@@ -42,17 +45,29 @@
     from sentry_sdk._types import EventProcessor
 
 
+TRANSACTION_STYLE_VALUES = ("handler_name", "method_and_path_pattern")
+
+
 class AioHttpIntegration(Integration):
     identifier = "aiohttp"
 
+    def __init__(self, transaction_style="handler_name"):
+        # type: (str) -> None
+        if transaction_style not in TRANSACTION_STYLE_VALUES:
+            raise ValueError(
+                "Invalid value for transaction_style: %s (must be in %s)"
+                % (transaction_style, TRANSACTION_STYLE_VALUES)
+            )
+        self.transaction_style = transaction_style
+
     @staticmethod
     def setup_once():
         # type: () -> None
 
         try:
-            version = tuple(map(int, AIOHTTP_VERSION.split(".")))
+            version = tuple(map(int, AIOHTTP_VERSION.split(".")[:2]))
         except (TypeError, ValueError):
-            raise DidNotEnable("AIOHTTP version unparseable: {}".format(version))
+            raise DidNotEnable("AIOHTTP version unparsable: {}".format(AIOHTTP_VERSION))
 
         if version < (3, 4):
             raise DidNotEnable("AIOHTTP 3.4 or newer required.")
@@ -60,9 +75,9 @@ def setup_once():
         if not HAS_REAL_CONTEXTVARS:
             # We better have contextvars or we're going to leak state between
             # requests.
-            raise RuntimeError(
+            raise DidNotEnable(
                 "The aiohttp integration for Sentry requires Python 3.7+ "
-                " or aiocontextvars package"
+                " or aiocontextvars package." + CONTEXTVARS_ERROR_MESSAGE
             )
 
         ignore_logger("aiohttp.server")
@@ -77,35 +92,41 @@ async def sentry_app_handle(self, request, *args, **kwargs):
 
             weak_request = weakref.ref(request)
 
-            with Hub(Hub.current) as hub:
-                # Scope data will not leak between requests because aiohttp
-                # create a task to wrap each request.
-                with hub.configure_scope() as scope:
-                    scope.clear_breadcrumbs()
-                    scope.add_event_processor(_make_request_processor(weak_request))
-
-                span = Span.continue_from_headers(request.headers)
-                span.op = "http.server"
-                # If this transaction name makes it to the UI, AIOHTTP's
-                # URL resolver did not find a route or died trying.
-                span.transaction = "generic AIOHTTP request"
-
-                with hub.start_span(span):
-                    try:
-                        response = await old_handle(self, request)
-                    except HTTPException as e:
-                        span.set_http_status(e.status_code)
-                        raise
-                    except asyncio.CancelledError:
-                        span.set_status("cancelled")
-                        raise
-                    except Exception:
-                        # This will probably map to a 500 but seems like we
-                        # have no way to tell. Do not set span status.
-                        reraise(*_capture_exception(hub))
-
-                    span.set_http_status(response.status)
-                    return response
+            with Hub(hub) as hub:
+                with auto_session_tracking(hub, session_mode="request"):
+                    # Scope data will not leak between requests because aiohttp
+                    # create a task to wrap each request.
+                    with hub.configure_scope() as scope:
+                        scope.clear_breadcrumbs()
+                        scope.add_event_processor(_make_request_processor(weak_request))
+
+                    transaction = Transaction.continue_from_headers(
+                        request.headers,
+                        op=OP.HTTP_SERVER,
+                        # If this transaction name makes it to the UI, AIOHTTP's
+                        # URL resolver did not find a route or died trying.
+                        name="generic AIOHTTP request",
+                        source=TRANSACTION_SOURCE_ROUTE,
+                    )
+                    with hub.start_transaction(
+                        transaction,
+                        custom_sampling_context={"aiohttp_request": request},
+                    ):
+                        try:
+                            response = await old_handle(self, request)
+                        except HTTPException as e:
+                            transaction.set_http_status(e.status_code)
+                            raise
+                        except (asyncio.CancelledError, ConnectionResetError):
+                            transaction.set_status("cancelled")
+                            raise
+                        except Exception:
+                            # This will probably map to a 500 but seems like we
+                            # have no way to tell. Do not set span status.
+                            reraise(*_capture_exception(hub))
+
+                        transaction.set_http_status(response.status)
+                        return response
 
         Application._handle = sentry_app_handle
 
@@ -115,16 +136,27 @@ async def sentry_urldispatcher_resolve(self, request):
             # type: (UrlDispatcher, Request) -> AbstractMatchInfo
             rv = await old_urldispatcher_resolve(self, request)
 
+            hub = Hub.current
+            integration = hub.get_integration(AioHttpIntegration)
+
             name = None
 
             try:
-                name = transaction_from_function(rv.handler)
+                if integration.transaction_style == "handler_name":
+                    name = transaction_from_function(rv.handler)
+                elif integration.transaction_style == "method_and_path_pattern":
+                    route_info = rv.get_info()
+                    pattern = route_info.get("path") or route_info.get("formatter")
+                    name = "{} {}".format(request.method, pattern)
             except Exception:
                 pass
 
             if name is not None:
                 with Hub.current.configure_scope() as scope:
-                    scope.transaction = name
+                    scope.set_transaction_name(
+                        name,
+                        source=SOURCE_FOR_STYLE[integration.transaction_style],
+                    )
 
             return rv
 
@@ -190,11 +222,8 @@ def get_aiohttp_request_data(hub, request):
     if bytes_body is not None:
         # we have body to show
         if not request_body_within_bounds(hub.client, len(bytes_body)):
+            return AnnotatedValue.removed_because_over_size_limit()
 
-            return AnnotatedValue(
-                "",
-                {"rem": [["!config", "x", 0, len(bytes_body)]], "len": len(bytes_body)},
-            )
         encoding = request.charset or "utf-8"
         return bytes_body.decode(encoding, "replace")
 
diff --git a/sentry_sdk/integrations/asgi.py b/sentry_sdk/integrations/asgi.py
index 25201ccf31..c84e5ba454 100644
--- a/sentry_sdk/integrations/asgi.py
+++ b/sentry_sdk/integrations/asgi.py
@@ -1,7 +1,7 @@
 """
 An ASGI middleware.
 
-Based on Tom Christie's `sentry-asgi `_.
+Based on Tom Christie's `sentry-asgi `.
 """
 
 import asyncio
@@ -10,10 +10,25 @@
 
 from sentry_sdk._functools import partial
 from sentry_sdk._types import MYPY
+from sentry_sdk.consts import OP
 from sentry_sdk.hub import Hub, _should_send_default_pii
 from sentry_sdk.integrations._wsgi_common import _filter_headers
-from sentry_sdk.utils import ContextVar, event_from_exception, transaction_from_function
-from sentry_sdk.tracing import Span
+from sentry_sdk.integrations.modules import _get_installed_modules
+from sentry_sdk.profiler import start_profiling
+from sentry_sdk.sessions import auto_session_tracking
+from sentry_sdk.tracing import (
+    SOURCE_FOR_STYLE,
+    TRANSACTION_SOURCE_ROUTE,
+)
+from sentry_sdk.utils import (
+    ContextVar,
+    event_from_exception,
+    HAS_REAL_CONTEXTVARS,
+    CONTEXTVARS_ERROR_MESSAGE,
+    logger,
+    transaction_from_function,
+)
+from sentry_sdk.tracing import Transaction
 
 if MYPY:
     from typing import Dict
@@ -21,21 +36,27 @@
     from typing import Optional
     from typing import Callable
 
+    from typing_extensions import Literal
+
     from sentry_sdk._types import Event, Hint
 
 
 _asgi_middleware_applied = ContextVar("sentry_asgi_middleware_applied")
 
+_DEFAULT_TRANSACTION_NAME = "generic ASGI request"
+
+TRANSACTION_STYLE_VALUES = ("endpoint", "url")
 
-def _capture_exception(hub, exc):
-    # type: (Hub, Any) -> None
+
+def _capture_exception(hub, exc, mechanism_type="asgi"):
+    # type: (Hub, Any, str) -> None
 
     # Check client here as it might have been unset while streaming response
     if hub.client is not None:
         event, hint = event_from_exception(
             exc,
             client_options=hub.client.options,
-            mechanism={"type": "asgi", "handled": False},
+            mechanism={"type": mechanism_type, "handled": False},
         )
         hub.capture_event(event, hint=hint)
 
@@ -57,10 +78,48 @@ def _looks_like_asgi3(app):
 
 
 class SentryAsgiMiddleware:
-    __slots__ = ("app", "__call__")
+    __slots__ = ("app", "__call__", "transaction_style", "mechanism_type")
+
+    def __init__(
+        self,
+        app,
+        unsafe_context_data=False,
+        transaction_style="endpoint",
+        mechanism_type="asgi",
+    ):
+        # type: (Any, bool, str, str) -> None
+        """
+        Instrument an ASGI application with Sentry. Provides HTTP/websocket
+        data to sent events and basic handling for exceptions bubbling up
+        through the middleware.
 
-    def __init__(self, app):
-        # type: (Any) -> None
+        :param unsafe_context_data: Disable errors when a proper contextvars installation could not be found. We do not recommend changing this from the default.
+        """
+        if not unsafe_context_data and not HAS_REAL_CONTEXTVARS:
+            # We better have contextvars or we're going to leak state between
+            # requests.
+            raise RuntimeError(
+                "The ASGI middleware for Sentry requires Python 3.7+ "
+                "or the aiocontextvars package." + CONTEXTVARS_ERROR_MESSAGE
+            )
+        if transaction_style not in TRANSACTION_STYLE_VALUES:
+            raise ValueError(
+                "Invalid value for transaction_style: %s (must be in %s)"
+                % (transaction_style, TRANSACTION_STYLE_VALUES)
+            )
+
+        asgi_middleware_while_using_starlette_or_fastapi = (
+            mechanism_type == "asgi" and "starlette" in _get_installed_modules()
+        )
+        if asgi_middleware_while_using_starlette_or_fastapi:
+            logger.warning(
+                "The Sentry Python SDK can now automatically support ASGI frameworks like Starlette and FastAPI. "
+                "Please remove 'SentryAsgiMiddleware' from your project. "
+                "See https://docs.sentry.io/platforms/python/guides/asgi/ for more information."
+            )
+
+        self.transaction_style = transaction_style
+        self.mechanism_type = mechanism_type
         self.app = app
 
         if _looks_like_asgi3(app):
@@ -82,38 +141,52 @@ async def _run_asgi3(self, scope, receive, send):
 
     async def _run_app(self, scope, callback):
         # type: (Any, Any) -> Any
-        if _asgi_middleware_applied.get(False):
-            return await callback()
+        is_recursive_asgi_middleware = _asgi_middleware_applied.get(False)
+        if is_recursive_asgi_middleware:
+            try:
+                return await callback()
+            except Exception as exc:
+                _capture_exception(Hub.current, exc, mechanism_type=self.mechanism_type)
+                raise exc from None
 
         _asgi_middleware_applied.set(True)
         try:
             hub = Hub(Hub.current)
-            with hub:
-                with hub.configure_scope() as sentry_scope:
-                    sentry_scope.clear_breadcrumbs()
-                    sentry_scope._name = "asgi"
-                    processor = partial(self.event_processor, asgi_scope=scope)
-                    sentry_scope.add_event_processor(processor)
-
-                if scope["type"] in ("http", "websocket"):
-                    span = Span.continue_from_headers(dict(scope["headers"]))
-                    span.op = "{}.server".format(scope["type"])
-                else:
-                    span = Span()
-                    span.op = "asgi.server"
-
-                span.set_tag("asgi.type", scope["type"])
-                span.transaction = "generic ASGI request"
-
-                with hub.start_span(span) as span:
-                    # XXX: Would be cool to have correct span status, but we
-                    # would have to wrap send(). That is a bit hard to do with
-                    # the current abstraction over ASGI 2/3.
-                    try:
-                        return await callback()
-                    except Exception as exc:
-                        _capture_exception(hub, exc)
-                        raise exc from None
+            with auto_session_tracking(hub, session_mode="request"):
+                with hub:
+                    with hub.configure_scope() as sentry_scope:
+                        sentry_scope.clear_breadcrumbs()
+                        sentry_scope._name = "asgi"
+                        processor = partial(self.event_processor, asgi_scope=scope)
+                        sentry_scope.add_event_processor(processor)
+
+                    ty = scope["type"]
+
+                    if ty in ("http", "websocket"):
+                        transaction = Transaction.continue_from_headers(
+                            self._get_headers(scope),
+                            op="{}.server".format(ty),
+                        )
+                    else:
+                        transaction = Transaction(op=OP.HTTP_SERVER)
+
+                    transaction.name = _DEFAULT_TRANSACTION_NAME
+                    transaction.source = TRANSACTION_SOURCE_ROUTE
+                    transaction.set_tag("asgi.type", ty)
+
+                    with hub.start_transaction(
+                        transaction, custom_sampling_context={"asgi_scope": scope}
+                    ), start_profiling(transaction, hub):
+                        # XXX: Would be cool to have correct span status, but we
+                        # would have to wrap send(). That is a bit hard to do with
+                        # the current abstraction over ASGI 2/3.
+                        try:
+                            return await callback()
+                        except Exception as exc:
+                            _capture_exception(
+                                hub, exc, mechanism_type=self.mechanism_type
+                            )
+                            raise exc from None
         finally:
             _asgi_middleware_applied.set(False)
 
@@ -121,38 +194,82 @@ def event_processor(self, event, hint, asgi_scope):
         # type: (Event, Hint, Any) -> Optional[Event]
         request_info = event.get("request", {})
 
-        if asgi_scope["type"] in ("http", "websocket"):
-            request_info["url"] = self.get_url(asgi_scope)
-            request_info["method"] = asgi_scope["method"]
-            request_info["headers"] = _filter_headers(self.get_headers(asgi_scope))
-            request_info["query_string"] = self.get_query(asgi_scope)
+        ty = asgi_scope["type"]
+        if ty in ("http", "websocket"):
+            request_info["method"] = asgi_scope.get("method")
+            request_info["headers"] = headers = _filter_headers(
+                self._get_headers(asgi_scope)
+            )
+            request_info["query_string"] = self._get_query(asgi_scope)
 
-        if asgi_scope.get("client") and _should_send_default_pii():
-            request_info["env"] = {"REMOTE_ADDR": asgi_scope["client"][0]}
+            request_info["url"] = self._get_url(
+                asgi_scope, "http" if ty == "http" else "ws", headers.get("host")
+            )
 
-        if asgi_scope.get("endpoint"):
-            # Webframeworks like Starlette mutate the ASGI env once routing is
-            # done, which is sometime after the request has started. If we have
-            # an endpoint, overwrite our path-based transaction name.
-            event["transaction"] = self.get_transaction(asgi_scope)
+        client = asgi_scope.get("client")
+        if client and _should_send_default_pii():
+            request_info["env"] = {"REMOTE_ADDR": self._get_ip(asgi_scope)}
+
+        self._set_transaction_name_and_source(event, self.transaction_style, asgi_scope)
 
         event["request"] = request_info
 
         return event
 
-    def get_url(self, scope):
-        # type: (Any) -> str
+    # Helper functions for extracting request data.
+    #
+    # Note: Those functions are not public API. If you want to mutate request
+    # data to your liking it's recommended to use the `before_send` callback
+    # for that.
+
+    def _set_transaction_name_and_source(self, event, transaction_style, asgi_scope):
+        # type: (Event, str, Any) -> None
+        transaction_name_already_set = (
+            event.get("transaction", _DEFAULT_TRANSACTION_NAME)
+            != _DEFAULT_TRANSACTION_NAME
+        )
+        if transaction_name_already_set:
+            return
+
+        name = ""
+
+        if transaction_style == "endpoint":
+            endpoint = asgi_scope.get("endpoint")
+            # Webframeworks like Starlette mutate the ASGI env once routing is
+            # done, which is sometime after the request has started. If we have
+            # an endpoint, overwrite our generic transaction name.
+            if endpoint:
+                name = transaction_from_function(endpoint) or ""
+
+        elif transaction_style == "url":
+            # FastAPI includes the route object in the scope to let Sentry extract the
+            # path from it for the transaction name
+            route = asgi_scope.get("route")
+            if route:
+                path = getattr(route, "path", None)
+                if path is not None:
+                    name = path
+
+        if not name:
+            event["transaction"] = _DEFAULT_TRANSACTION_NAME
+            event["transaction_info"] = {"source": TRANSACTION_SOURCE_ROUTE}
+            return
+
+        event["transaction"] = name
+        event["transaction_info"] = {"source": SOURCE_FOR_STYLE[transaction_style]}
+
+    def _get_url(self, scope, default_scheme, host):
+        # type: (Dict[str, Any], Literal["ws", "http"], Optional[str]) -> str
         """
         Extract URL from the ASGI scope, without also including the querystring.
         """
-        scheme = scope.get("scheme", "http")
+        scheme = scope.get("scheme", default_scheme)
+
         server = scope.get("server", None)
-        path = scope.get("root_path", "") + scope["path"]
+        path = scope.get("root_path", "") + scope.get("path", "")
 
-        for key, value in scope["headers"]:
-            if key == b"host":
-                host_header = value.decode("latin-1")
-                return "%s://%s%s" % (scheme, host_header, path)
+        if host:
+            return "%s://%s%s" % (scheme, host, path)
 
         if server is not None:
             host, port = server
@@ -162,15 +279,36 @@ def get_url(self, scope):
             return "%s://%s%s" % (scheme, host, path)
         return path
 
-    def get_query(self, scope):
+    def _get_query(self, scope):
         # type: (Any) -> Any
         """
         Extract querystring from the ASGI scope, in the format that the Sentry protocol expects.
         """
-        return urllib.parse.unquote(scope["query_string"].decode("latin-1"))
+        qs = scope.get("query_string")
+        if not qs:
+            return None
+        return urllib.parse.unquote(qs.decode("latin-1"))
+
+    def _get_ip(self, scope):
+        # type: (Any) -> str
+        """
+        Extract IP Address from the ASGI scope based on request headers with fallback to scope client.
+        """
+        headers = self._get_headers(scope)
+        try:
+            return headers["x-forwarded-for"].split(",")[0].strip()
+        except (KeyError, IndexError):
+            pass
+
+        try:
+            return headers["x-real-ip"]
+        except KeyError:
+            pass
+
+        return scope.get("client")[0]
 
-    def get_headers(self, scope):
-        # type: (Any) -> Dict[str, Any]
+    def _get_headers(self, scope):
+        # type: (Any) -> Dict[str, str]
         """
         Extract headers from the ASGI scope, in the format that the Sentry protocol expects.
         """
@@ -183,10 +321,3 @@ def get_headers(self, scope):
             else:
                 headers[key] = value
         return headers
-
-    def get_transaction(self, scope):
-        # type: (Any) -> Optional[str]
-        """
-        Return a transaction string to identify the routed endpoint.
-        """
-        return transaction_from_function(scope["endpoint"])
diff --git a/sentry_sdk/integrations/asyncio.py b/sentry_sdk/integrations/asyncio.py
new file mode 100644
index 0000000000..2c61b85962
--- /dev/null
+++ b/sentry_sdk/integrations/asyncio.py
@@ -0,0 +1,92 @@
+from __future__ import absolute_import
+import sys
+
+from sentry_sdk._compat import reraise
+from sentry_sdk.consts import OP
+from sentry_sdk.hub import Hub
+from sentry_sdk.integrations import Integration, DidNotEnable
+from sentry_sdk._types import MYPY
+from sentry_sdk.utils import event_from_exception
+
+try:
+    import asyncio
+    from asyncio.tasks import Task
+except ImportError:
+    raise DidNotEnable("asyncio not available")
+
+
+if MYPY:
+    from typing import Any
+
+    from sentry_sdk._types import ExcInfo
+
+
+def patch_asyncio():
+    # type: () -> None
+    orig_task_factory = None
+    try:
+        loop = asyncio.get_running_loop()
+        orig_task_factory = loop.get_task_factory()
+
+        def _sentry_task_factory(loop, coro):
+            # type: (Any, Any) -> Any
+
+            async def _coro_creating_hub_and_span():
+                # type: () -> None
+                hub = Hub(Hub.current)
+                with hub:
+                    with hub.start_span(op=OP.FUNCTION, description=coro.__qualname__):
+                        try:
+                            await coro
+                        except Exception:
+                            reraise(*_capture_exception(hub))
+
+            # Trying to use user set task factory (if there is one)
+            if orig_task_factory:
+                return orig_task_factory(loop, _coro_creating_hub_and_span())  # type: ignore
+
+            # The default task factory in `asyncio` does not have its own function
+            # but is just a couple of lines in `asyncio.base_events.create_task()`
+            # Those lines are copied here.
+
+            # WARNING:
+            # If the default behavior of the task creation in asyncio changes,
+            # this will break!
+            task = Task(_coro_creating_hub_and_span(), loop=loop)
+            if task._source_traceback:  # type: ignore
+                del task._source_traceback[-1]  # type: ignore
+
+            return task
+
+        loop.set_task_factory(_sentry_task_factory)
+    except RuntimeError:
+        # When there is no running loop, we have nothing to patch.
+        pass
+
+
+def _capture_exception(hub):
+    # type: (Hub) -> ExcInfo
+    exc_info = sys.exc_info()
+
+    integration = hub.get_integration(AsyncioIntegration)
+    if integration is not None:
+        # If an integration is there, a client has to be there.
+        client = hub.client  # type: Any
+
+        event, hint = event_from_exception(
+            exc_info,
+            client_options=client.options,
+            mechanism={"type": "asyncio", "handled": False},
+        )
+        hub.capture_event(event, hint=hint)
+
+    return exc_info
+
+
+class AsyncioIntegration(Integration):
+    identifier = "asyncio"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        patch_asyncio()
diff --git a/sentry_sdk/integrations/aws_lambda.py b/sentry_sdk/integrations/aws_lambda.py
index 3a08d998db..6017adfa7b 100644
--- a/sentry_sdk/integrations/aws_lambda.py
+++ b/sentry_sdk/integrations/aws_lambda.py
@@ -1,14 +1,17 @@
 from datetime import datetime, timedelta
 from os import environ
 import sys
+from sentry_sdk.consts import OP
 
 from sentry_sdk.hub import Hub, _should_send_default_pii
+from sentry_sdk.tracing import TRANSACTION_SOURCE_COMPONENT, Transaction
 from sentry_sdk._compat import reraise
 from sentry_sdk.utils import (
     AnnotatedValue,
     capture_internal_exceptions,
     event_from_exception,
     logger,
+    TimeoutThread,
 )
 from sentry_sdk.integrations import Integration
 from sentry_sdk.integrations._wsgi_common import _filter_headers
@@ -25,36 +28,144 @@
 
     F = TypeVar("F", bound=Callable[..., Any])
 
+# Constants
+TIMEOUT_WARNING_BUFFER = 1500  # Buffer time required to send timeout warning to Sentry
+MILLIS_TO_SECONDS = 1000.0
+
+
+def _wrap_init_error(init_error):
+    # type: (F) -> F
+    def sentry_init_error(*args, **kwargs):
+        # type: (*Any, **Any) -> Any
+
+        hub = Hub.current
+        integration = hub.get_integration(AwsLambdaIntegration)
+        if integration is None:
+            return init_error(*args, **kwargs)
+
+        # If an integration is there, a client has to be there.
+        client = hub.client  # type: Any
+
+        with capture_internal_exceptions():
+            with hub.configure_scope() as scope:
+                scope.clear_breadcrumbs()
+
+            exc_info = sys.exc_info()
+            if exc_info and all(exc_info):
+                sentry_event, hint = event_from_exception(
+                    exc_info,
+                    client_options=client.options,
+                    mechanism={"type": "aws_lambda", "handled": False},
+                )
+                hub.capture_event(sentry_event, hint=hint)
+
+        return init_error(*args, **kwargs)
+
+    return sentry_init_error  # type: ignore
+
 
 def _wrap_handler(handler):
     # type: (F) -> F
-    def sentry_handler(event, context, *args, **kwargs):
+    def sentry_handler(aws_event, aws_context, *args, **kwargs):
         # type: (Any, Any, *Any, **Any) -> Any
+
+        # Per https://docs.aws.amazon.com/lambda/latest/dg/python-handler.html,
+        # `event` here is *likely* a dictionary, but also might be a number of
+        # other types (str, int, float, None).
+        #
+        # In some cases, it is a list (if the user is batch-invoking their
+        # function, for example), in which case we'll use the first entry as a
+        # representative from which to try pulling request data. (Presumably it
+        # will be the same for all events in the list, since they're all hitting
+        # the lambda in the same request.)
+
+        if isinstance(aws_event, list):
+            request_data = aws_event[0]
+            batch_size = len(aws_event)
+        else:
+            request_data = aws_event
+            batch_size = 1
+
+        if not isinstance(request_data, dict):
+            # If we're not dealing with a dictionary, we won't be able to get
+            # headers, path, http method, etc in any case, so it's fine that
+            # this is empty
+            request_data = {}
+
         hub = Hub.current
         integration = hub.get_integration(AwsLambdaIntegration)
         if integration is None:
-            return handler(event, context, *args, **kwargs)
+            return handler(aws_event, aws_context, *args, **kwargs)
 
         # If an integration is there, a client has to be there.
         client = hub.client  # type: Any
+        configured_time = aws_context.get_remaining_time_in_millis()
 
         with hub.push_scope() as scope:
+            timeout_thread = None
             with capture_internal_exceptions():
                 scope.clear_breadcrumbs()
-                scope.transaction = context.function_name
-                scope.add_event_processor(_make_request_event_processor(event, context))
-
-            try:
-                return handler(event, context, *args, **kwargs)
-            except Exception:
-                exc_info = sys.exc_info()
-                event, hint = event_from_exception(
-                    exc_info,
-                    client_options=client.options,
-                    mechanism={"type": "aws_lambda", "handled": False},
+                scope.add_event_processor(
+                    _make_request_event_processor(
+                        request_data, aws_context, configured_time
+                    )
                 )
-                hub.capture_event(event, hint=hint)
-                reraise(*exc_info)
+                scope.set_tag(
+                    "aws_region", aws_context.invoked_function_arn.split(":")[3]
+                )
+                if batch_size > 1:
+                    scope.set_tag("batch_request", True)
+                    scope.set_tag("batch_size", batch_size)
+
+                # Starting the Timeout thread only if the configured time is greater than Timeout warning
+                # buffer and timeout_warning parameter is set True.
+                if (
+                    integration.timeout_warning
+                    and configured_time > TIMEOUT_WARNING_BUFFER
+                ):
+                    waiting_time = (
+                        configured_time - TIMEOUT_WARNING_BUFFER
+                    ) / MILLIS_TO_SECONDS
+
+                    timeout_thread = TimeoutThread(
+                        waiting_time,
+                        configured_time / MILLIS_TO_SECONDS,
+                    )
+
+                    # Starting the thread to raise timeout warning exception
+                    timeout_thread.start()
+
+            headers = request_data.get("headers")
+            # AWS Service may set an explicit `{headers: None}`, we can't rely on `.get()`'s default.
+            if headers is None:
+                headers = {}
+            transaction = Transaction.continue_from_headers(
+                headers,
+                op=OP.FUNCTION_AWS,
+                name=aws_context.function_name,
+                source=TRANSACTION_SOURCE_COMPONENT,
+            )
+            with hub.start_transaction(
+                transaction,
+                custom_sampling_context={
+                    "aws_event": aws_event,
+                    "aws_context": aws_context,
+                },
+            ):
+                try:
+                    return handler(aws_event, aws_context, *args, **kwargs)
+                except Exception:
+                    exc_info = sys.exc_info()
+                    sentry_event, hint = event_from_exception(
+                        exc_info,
+                        client_options=client.options,
+                        mechanism={"type": "aws_lambda", "handled": False},
+                    )
+                    hub.capture_event(sentry_event, hint=hint)
+                    reraise(*exc_info)
+                finally:
+                    if timeout_thread:
+                        timeout_thread.stop()
 
     return sentry_handler  # type: ignore
 
@@ -73,28 +184,31 @@ def _drain_queue():
 class AwsLambdaIntegration(Integration):
     identifier = "aws_lambda"
 
+    def __init__(self, timeout_warning=False):
+        # type: (bool) -> None
+        self.timeout_warning = timeout_warning
+
     @staticmethod
     def setup_once():
         # type: () -> None
-        import __main__ as lambda_bootstrap  # type: ignore
-
-        pre_37 = True  # Python 3.6 or 2.7
 
-        if not hasattr(lambda_bootstrap, "handle_http_request"):
-            try:
-                import bootstrap as lambda_bootstrap  # type: ignore
-
-                pre_37 = False  # Python 3.7
-            except ImportError:
-                pass
+        lambda_bootstrap = get_lambda_bootstrap()
+        if not lambda_bootstrap:
+            logger.warning(
+                "Not running in AWS Lambda environment, "
+                "AwsLambdaIntegration disabled (could not find bootstrap module)"
+            )
+            return
 
         if not hasattr(lambda_bootstrap, "handle_event_request"):
             logger.warning(
                 "Not running in AWS Lambda environment, "
-                "AwsLambdaIntegration disabled"
+                "AwsLambdaIntegration disabled (could not find handle_event_request)"
             )
             return
 
+        pre_37 = hasattr(lambda_bootstrap, "handle_http_request")  # Python 3.6 or 2.7
+
         if pre_37:
             old_handle_event_request = lambda_bootstrap.handle_event_request
 
@@ -126,6 +240,10 @@ def sentry_to_json(*args, **kwargs):
 
             lambda_bootstrap.to_json = sentry_to_json
         else:
+            lambda_bootstrap.LambdaRuntimeClient.post_init_error = _wrap_init_error(
+                lambda_bootstrap.LambdaRuntimeClient.post_init_error
+            )
+
             old_handle_event_request = lambda_bootstrap.handle_event_request
 
             def sentry_handle_event_request(  # type: ignore
@@ -150,27 +268,74 @@ def inner(*args, **kwargs):
 
                 return inner  # type: ignore
 
-            lambda_bootstrap.LambdaRuntimeClient.post_invocation_result = _wrap_post_function(
-                lambda_bootstrap.LambdaRuntimeClient.post_invocation_result
+            lambda_bootstrap.LambdaRuntimeClient.post_invocation_result = (
+                _wrap_post_function(
+                    lambda_bootstrap.LambdaRuntimeClient.post_invocation_result
+                )
             )
-            lambda_bootstrap.LambdaRuntimeClient.post_invocation_error = _wrap_post_function(
-                lambda_bootstrap.LambdaRuntimeClient.post_invocation_error
+            lambda_bootstrap.LambdaRuntimeClient.post_invocation_error = (
+                _wrap_post_function(
+                    lambda_bootstrap.LambdaRuntimeClient.post_invocation_error
+                )
             )
 
 
-def _make_request_event_processor(aws_event, aws_context):
-    # type: (Any, Any) -> EventProcessor
-    start_time = datetime.now()
-
-    def event_processor(event, hint, start_time=start_time):
+def get_lambda_bootstrap():
+    # type: () -> Optional[Any]
+
+    # Python 2.7: Everything is in `__main__`.
+    #
+    # Python 3.7: If the bootstrap module is *already imported*, it is the
+    # one we actually want to use (no idea what's in __main__)
+    #
+    # Python 3.8: bootstrap is also importable, but will be the same file
+    # as __main__ imported under a different name:
+    #
+    #     sys.modules['__main__'].__file__ == sys.modules['bootstrap'].__file__
+    #     sys.modules['__main__'] is not sys.modules['bootstrap']
+    #
+    # Python 3.9: bootstrap is in __main__.awslambdaricmain
+    #
+    # On container builds using the `aws-lambda-python-runtime-interface-client`
+    # (awslamdaric) module, bootstrap is located in sys.modules['__main__'].bootstrap
+    #
+    # Such a setup would then make all monkeypatches useless.
+    if "bootstrap" in sys.modules:
+        return sys.modules["bootstrap"]
+    elif "__main__" in sys.modules:
+        module = sys.modules["__main__"]
+        # python3.9 runtime
+        if hasattr(module, "awslambdaricmain") and hasattr(
+            module.awslambdaricmain, "bootstrap"
+        ):
+            return module.awslambdaricmain.bootstrap
+        elif hasattr(module, "bootstrap"):
+            # awslambdaric python module in container builds
+            return module.bootstrap
+
+        # python3.8 runtime
+        return module
+    else:
+        return None
+
+
+def _make_request_event_processor(aws_event, aws_context, configured_timeout):
+    # type: (Any, Any, Any) -> EventProcessor
+    start_time = datetime.utcnow()
+
+    def event_processor(sentry_event, hint, start_time=start_time):
         # type: (Event, Hint, datetime) -> Optional[Event]
-        extra = event.setdefault("extra", {})
+        remaining_time_in_milis = aws_context.get_remaining_time_in_millis()
+        exec_duration = configured_timeout - remaining_time_in_milis
+
+        extra = sentry_event.setdefault("extra", {})
         extra["lambda"] = {
             "function_name": aws_context.function_name,
             "function_version": aws_context.function_version,
             "invoked_function_arn": aws_context.invoked_function_arn,
-            "remaining_time_in_millis": aws_context.get_remaining_time_in_millis(),
             "aws_request_id": aws_context.aws_request_id,
+            "execution_duration_in_millis": exec_duration,
+            "remaining_time_in_millis": remaining_time_in_milis,
         }
 
         extra["cloudwatch logs"] = {
@@ -179,7 +344,7 @@ def event_processor(event, hint, start_time=start_time):
             "log_stream": aws_context.log_stream_name,
         }
 
-        request = event.get("request", {})
+        request = sentry_event.get("request", {})
 
         if "httpMethod" in aws_event:
             request["method"] = aws_event["httpMethod"]
@@ -192,63 +357,76 @@ def event_processor(event, hint, start_time=start_time):
         if "headers" in aws_event:
             request["headers"] = _filter_headers(aws_event["headers"])
 
-        if aws_event.get("body", None):
-            # Unfortunately couldn't find a way to get structured body from AWS
-            # event. Meaning every body is unstructured to us.
-            request["data"] = AnnotatedValue("", {"rem": [["!raw", "x", 0, 0]]})
-
         if _should_send_default_pii():
-            user_info = event.setdefault("user", {})
+            user_info = sentry_event.setdefault("user", {})
+
+            identity = aws_event.get("identity")
+            if identity is None:
+                identity = {}
 
-            id = aws_event.get("identity", {}).get("userArn")
+            id = identity.get("userArn")
             if id is not None:
                 user_info.setdefault("id", id)
 
-            ip = aws_event.get("identity", {}).get("sourceIp")
+            ip = identity.get("sourceIp")
             if ip is not None:
                 user_info.setdefault("ip_address", ip)
 
-        event["request"] = request
+            if "body" in aws_event:
+                request["data"] = aws_event.get("body", "")
+        else:
+            if aws_event.get("body", None):
+                # Unfortunately couldn't find a way to get structured body from AWS
+                # event. Meaning every body is unstructured to us.
+                request["data"] = AnnotatedValue.removed_because_raw_data()
+
+        sentry_event["request"] = request
 
-        return event
+        return sentry_event
 
     return event_processor
 
 
-def _get_url(event, context):
+def _get_url(aws_event, aws_context):
     # type: (Any, Any) -> str
-    path = event.get("path", None)
-    headers = event.get("headers", {})
+    path = aws_event.get("path", None)
+
+    headers = aws_event.get("headers")
+    if headers is None:
+        headers = {}
+
     host = headers.get("Host", None)
     proto = headers.get("X-Forwarded-Proto", None)
     if proto and host and path:
         return "{}://{}{}".format(proto, host, path)
-    return "awslambda:///{}".format(context.function_name)
+    return "awslambda:///{}".format(aws_context.function_name)
 
 
-def _get_cloudwatch_logs_url(context, start_time):
+def _get_cloudwatch_logs_url(aws_context, start_time):
     # type: (Any, datetime) -> str
     """
     Generates a CloudWatchLogs console URL based on the context object
 
     Arguments:
-        context {Any} -- context from lambda handler
+        aws_context {Any} -- context from lambda handler
 
     Returns:
         str -- AWS Console URL to logs.
     """
-    formatstring = "%Y-%m-%dT%H:%M:%S"
+    formatstring = "%Y-%m-%dT%H:%M:%SZ"
+    region = environ.get("AWS_REGION", "")
 
     url = (
-        "https://console.aws.amazon.com/cloudwatch/home?region={region}"
+        "https://console.{domain}/cloudwatch/home?region={region}"
         "#logEventViewer:group={log_group};stream={log_stream}"
         ";start={start_time};end={end_time}"
     ).format(
-        region=environ.get("AWS_REGION"),
-        log_group=context.log_group_name,
-        log_stream=context.log_stream_name,
+        domain="amazonaws.cn" if region.startswith("cn-") else "aws.amazon.com",
+        region=region,
+        log_group=aws_context.log_group_name,
+        log_stream=aws_context.log_stream_name,
         start_time=(start_time - timedelta(seconds=1)).strftime(formatstring),
-        end_time=(datetime.now() + timedelta(seconds=2)).strftime(formatstring),
+        end_time=(datetime.utcnow() + timedelta(seconds=2)).strftime(formatstring),
     )
 
     return url
diff --git a/sentry_sdk/integrations/beam.py b/sentry_sdk/integrations/beam.py
index be1615dc4b..30faa3814f 100644
--- a/sentry_sdk/integrations/beam.py
+++ b/sentry_sdk/integrations/beam.py
@@ -80,7 +80,6 @@ def sentry_init_pardo(self, fn, *args, **kwargs):
 
 def _wrap_inspect_call(cls, func_name):
     # type: (Any, Any) -> Any
-    from apache_beam.typehints.decorators import getfullargspec  # type: ignore
 
     if not hasattr(cls, func_name):
         return None
@@ -105,6 +104,8 @@ def _inspect(self):
 
             return get_function_args_defaults(process_func)
         except ImportError:
+            from apache_beam.typehints.decorators import getfullargspec  # type: ignore
+
             return getfullargspec(process_func)
 
     setattr(_inspect, USED_FUNC, True)
diff --git a/sentry_sdk/integrations/boto3.py b/sentry_sdk/integrations/boto3.py
new file mode 100644
index 0000000000..2f2f6bbea9
--- /dev/null
+++ b/sentry_sdk/integrations/boto3.py
@@ -0,0 +1,131 @@
+from __future__ import absolute_import
+
+from sentry_sdk import Hub
+from sentry_sdk.consts import OP
+from sentry_sdk.integrations import Integration, DidNotEnable
+from sentry_sdk.tracing import Span
+
+from sentry_sdk._functools import partial
+from sentry_sdk._types import MYPY
+
+if MYPY:
+    from typing import Any
+    from typing import Dict
+    from typing import Optional
+    from typing import Type
+
+try:
+    from botocore import __version__ as BOTOCORE_VERSION  # type: ignore
+    from botocore.client import BaseClient  # type: ignore
+    from botocore.response import StreamingBody  # type: ignore
+    from botocore.awsrequest import AWSRequest  # type: ignore
+except ImportError:
+    raise DidNotEnable("botocore is not installed")
+
+
+class Boto3Integration(Integration):
+    identifier = "boto3"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        try:
+            version = tuple(map(int, BOTOCORE_VERSION.split(".")[:3]))
+        except (ValueError, TypeError):
+            raise DidNotEnable(
+                "Unparsable botocore version: {}".format(BOTOCORE_VERSION)
+            )
+        if version < (1, 12):
+            raise DidNotEnable("Botocore 1.12 or newer is required.")
+        orig_init = BaseClient.__init__
+
+        def sentry_patched_init(self, *args, **kwargs):
+            # type: (Type[BaseClient], *Any, **Any) -> None
+            orig_init(self, *args, **kwargs)
+            meta = self.meta
+            service_id = meta.service_model.service_id.hyphenize()
+            meta.events.register(
+                "request-created",
+                partial(_sentry_request_created, service_id=service_id),
+            )
+            meta.events.register("after-call", _sentry_after_call)
+            meta.events.register("after-call-error", _sentry_after_call_error)
+
+        BaseClient.__init__ = sentry_patched_init
+
+
+def _sentry_request_created(service_id, request, operation_name, **kwargs):
+    # type: (str, AWSRequest, str, **Any) -> None
+    hub = Hub.current
+    if hub.get_integration(Boto3Integration) is None:
+        return
+
+    description = "aws.%s.%s" % (service_id, operation_name)
+    span = hub.start_span(
+        hub=hub,
+        op=OP.HTTP_CLIENT,
+        description=description,
+    )
+    span.set_tag("aws.service_id", service_id)
+    span.set_tag("aws.operation_name", operation_name)
+    span.set_data("aws.request.url", request.url)
+
+    # We do it in order for subsequent http calls/retries be
+    # attached to this span.
+    span.__enter__()
+
+    # request.context is an open-ended data-structure
+    # where we can add anything useful in request life cycle.
+    request.context["_sentrysdk_span"] = span
+
+
+def _sentry_after_call(context, parsed, **kwargs):
+    # type: (Dict[str, Any], Dict[str, Any], **Any) -> None
+    span = context.pop("_sentrysdk_span", None)  # type: Optional[Span]
+
+    # Span could be absent if the integration is disabled.
+    if span is None:
+        return
+    span.__exit__(None, None, None)
+
+    body = parsed.get("Body")
+    if not isinstance(body, StreamingBody):
+        return
+
+    streaming_span = span.start_child(
+        op=OP.HTTP_CLIENT_STREAM,
+        description=span.description,
+    )
+
+    orig_read = body.read
+    orig_close = body.close
+
+    def sentry_streaming_body_read(*args, **kwargs):
+        # type: (*Any, **Any) -> bytes
+        try:
+            ret = orig_read(*args, **kwargs)
+            if not ret:
+                streaming_span.finish()
+            return ret
+        except Exception:
+            streaming_span.finish()
+            raise
+
+    body.read = sentry_streaming_body_read
+
+    def sentry_streaming_body_close(*args, **kwargs):
+        # type: (*Any, **Any) -> None
+        streaming_span.finish()
+        orig_close(*args, **kwargs)
+
+    body.close = sentry_streaming_body_close
+
+
+def _sentry_after_call_error(context, exception, **kwargs):
+    # type: (Dict[str, Any], Type[BaseException], **Any) -> None
+    span = context.pop("_sentrysdk_span", None)  # type: Optional[Span]
+
+    # Span could be absent if the integration is disabled.
+    if span is None:
+        return
+    span.__exit__(type(exception), exception, None)
diff --git a/sentry_sdk/integrations/bottle.py b/sentry_sdk/integrations/bottle.py
index 80224e4dc4..271fc150b1 100644
--- a/sentry_sdk/integrations/bottle.py
+++ b/sentry_sdk/integrations/bottle.py
@@ -1,6 +1,7 @@
 from __future__ import absolute_import
 
 from sentry_sdk.hub import Hub
+from sentry_sdk.tracing import SOURCE_FOR_STYLE
 from sentry_sdk.utils import (
     capture_internal_exceptions,
     event_from_exception,
@@ -20,7 +21,7 @@
     from typing import Optional
     from bottle import FileUpload, FormsDict, LocalRequest  # type: ignore
 
-    from sentry_sdk._types import EventProcessor
+    from sentry_sdk._types import EventProcessor, Event
 
 try:
     from bottle import (
@@ -40,7 +41,7 @@
 class BottleIntegration(Integration):
     identifier = "bottle"
 
-    transaction_style = None
+    transaction_style = ""
 
     def __init__(self, transaction_style="endpoint"):
         # type: (str) -> None
@@ -57,9 +58,9 @@ def setup_once():
         # type: () -> None
 
         try:
-            version = tuple(map(int, BOTTLE_VERSION.split(".")))
+            version = tuple(map(int, BOTTLE_VERSION.replace("-dev", "").split(".")))
         except (TypeError, ValueError):
-            raise DidNotEnable("Unparseable Bottle version: {}".format(version))
+            raise DidNotEnable("Unparsable Bottle version: {}".format(version))
 
         if version < (0, 12):
             raise DidNotEnable("Bottle 0.12 or newer required.")
@@ -176,24 +177,34 @@ def size_of_file(self, file):
         return file.content_length
 
 
+def _set_transaction_name_and_source(event, transaction_style, request):
+    # type: (Event, str, Any) -> None
+    name = ""
+
+    if transaction_style == "url":
+        name = request.route.rule or ""
+
+    elif transaction_style == "endpoint":
+        name = (
+            request.route.name
+            or transaction_from_function(request.route.callback)
+            or ""
+        )
+
+    event["transaction"] = name
+    event["transaction_info"] = {"source": SOURCE_FOR_STYLE[transaction_style]}
+
+
 def _make_request_event_processor(app, request, integration):
     # type: (Bottle, LocalRequest, BottleIntegration) -> EventProcessor
-    def inner(event, hint):
-        # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
 
-        try:
-            if integration.transaction_style == "endpoint":
-                event["transaction"] = request.route.name or transaction_from_function(
-                    request.route.callback
-                )
-            elif integration.transaction_style == "url":
-                event["transaction"] = request.route.rule
-        except Exception:
-            pass
+    def event_processor(event, hint):
+        # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
+        _set_transaction_name_and_source(event, integration.transaction_style, request)
 
         with capture_internal_exceptions():
             BottleRequestExtractor(request).extract_into_event(event)
 
         return event
 
-    return inner
+    return event_processor
diff --git a/sentry_sdk/integrations/celery.py b/sentry_sdk/integrations/celery.py
index 5ac0d32f40..ea865b35a4 100644
--- a/sentry_sdk/integrations/celery.py
+++ b/sentry_sdk/integrations/celery.py
@@ -1,10 +1,15 @@
 from __future__ import absolute_import
 
 import sys
+from sentry_sdk.consts import OP
 
 from sentry_sdk.hub import Hub
-from sentry_sdk.utils import capture_internal_exceptions, event_from_exception
-from sentry_sdk.tracing import Span
+from sentry_sdk.tracing import TRANSACTION_SOURCE_TASK
+from sentry_sdk.utils import (
+    capture_internal_exceptions,
+    event_from_exception,
+)
+from sentry_sdk.tracing import Transaction
 from sentry_sdk._compat import reraise
 from sentry_sdk.integrations import Integration, DidNotEnable
 from sentry_sdk.integrations.logging import ignore_logger
@@ -23,13 +28,14 @@
 
 
 try:
-    from celery import VERSION as CELERY_VERSION  # type: ignore
+    from celery import VERSION as CELERY_VERSION
     from celery.exceptions import (  # type: ignore
         SoftTimeLimitExceeded,
         Retry,
         Ignore,
         Reject,
     )
+    from celery.app.trace import task_has_custom
 except ImportError:
     raise DidNotEnable("Celery not installed")
 
@@ -57,11 +63,12 @@ def setup_once():
         def sentry_build_tracer(name, task, *args, **kwargs):
             # type: (Any, Any, *Any, **Any) -> Any
             if not getattr(task, "_sentry_is_patched", False):
-                # Need to patch both methods because older celery sometimes
-                # short-circuits to task.run if it thinks it's safe.
-                task.__call__ = _wrap_task_call(task, task.__call__)
-                task.run = _wrap_task_call(task, task.run)
-                task.apply_async = _wrap_apply_async(task, task.apply_async)
+                # determine whether Celery will use __call__ or run and patch
+                # accordingly
+                if task_has_custom(task, "__call__"):
+                    type(task).__call__ = _wrap_task_call(task, type(task).__call__)
+                else:
+                    task.run = _wrap_task_call(task, task.run)
 
                 # `build_tracer` is apparently called for every task
                 # invocation. Can't wrap every celery task for every invocation
@@ -72,6 +79,10 @@ def sentry_build_tracer(name, task, *args, **kwargs):
 
         trace.build_tracer = sentry_build_tracer
 
+        from celery.app.task import Task  # type: ignore
+
+        Task.apply_async = _wrap_apply_async(Task.apply_async)
+
         _patch_worker_exit()
 
         # This logger logs every status of every task that ran on the worker.
@@ -85,23 +96,34 @@ def sentry_build_tracer(name, task, *args, **kwargs):
         ignore_logger("celery.redirected")
 
 
-def _wrap_apply_async(task, f):
-    # type: (Any, F) -> F
+def _wrap_apply_async(f):
+    # type: (F) -> F
     @wraps(f)
     def apply_async(*args, **kwargs):
         # type: (*Any, **Any) -> Any
         hub = Hub.current
         integration = hub.get_integration(CeleryIntegration)
         if integration is not None and integration.propagate_traces:
-            headers = None
-            for key, value in hub.iter_trace_propagation_headers():
-                if headers is None:
-                    headers = dict(kwargs.get("headers") or {})
-                headers[key] = value
-            if headers is not None:
-                kwargs["headers"] = headers
-
-            with hub.start_span(op="celery.submit", description=task.name):
+            with hub.start_span(
+                op=OP.QUEUE_SUBMIT_CELERY, description=args[0].name
+            ) as span:
+                with capture_internal_exceptions():
+                    headers = dict(hub.iter_trace_propagation_headers(span))
+
+                    if headers:
+                        # Note: kwargs can contain headers=None, so no setdefault!
+                        # Unsure which backend though.
+                        kwarg_headers = kwargs.get("headers") or {}
+                        kwarg_headers.update(headers)
+
+                        # https://github.com/celery/celery/issues/4875
+                        #
+                        # Need to setdefault the inner headers too since other
+                        # tracing tools (dd-trace-py) also employ this exact
+                        # workaround and we don't want to break them.
+                        kwarg_headers.setdefault("headers", {}).update(headers)
+                        kwargs["headers"] = kwarg_headers
+
                 return f(*args, **kwargs)
         else:
             return f(*args, **kwargs)
@@ -130,19 +152,35 @@ def _inner(*args, **kwargs):
             scope.clear_breadcrumbs()
             scope.add_event_processor(_make_event_processor(task, *args, **kwargs))
 
-            span = Span.continue_from_headers(args[3].get("headers") or {})
-            span.op = "celery.task"
-            span.transaction = "unknown celery task"
-
-            # Could possibly use a better hook than this one
-            span.set_status("ok")
+            transaction = None
 
+            # Celery task objects are not a thing to be trusted. Even
+            # something such as attribute access can fail.
             with capture_internal_exceptions():
-                # Celery task objects are not a thing to be trusted. Even
-                # something such as attribute access can fail.
-                span.transaction = task.name
+                transaction = Transaction.continue_from_headers(
+                    args[3].get("headers") or {},
+                    op=OP.QUEUE_TASK_CELERY,
+                    name="unknown celery task",
+                    source=TRANSACTION_SOURCE_TASK,
+                )
+                transaction.name = task.name
+                transaction.set_status("ok")
+
+            if transaction is None:
+                return f(*args, **kwargs)
 
-            with hub.start_span(span):
+            with hub.start_transaction(
+                transaction,
+                custom_sampling_context={
+                    "celery_job": {
+                        "task": task.name,
+                        # for some reason, args[1] is a list if non-empty but a
+                        # tuple if empty
+                        "args": list(args[1]),
+                        "kwargs": args[2],
+                    }
+                },
+            ):
                 return f(*args, **kwargs)
 
     return _inner  # type: ignore
diff --git a/sentry_sdk/integrations/chalice.py b/sentry_sdk/integrations/chalice.py
new file mode 100644
index 0000000000..80069b2951
--- /dev/null
+++ b/sentry_sdk/integrations/chalice.py
@@ -0,0 +1,133 @@
+import sys
+
+from sentry_sdk._compat import reraise
+from sentry_sdk.hub import Hub
+from sentry_sdk.integrations import Integration, DidNotEnable
+from sentry_sdk.integrations.aws_lambda import _make_request_event_processor
+from sentry_sdk.tracing import TRANSACTION_SOURCE_COMPONENT
+from sentry_sdk.utils import (
+    capture_internal_exceptions,
+    event_from_exception,
+)
+from sentry_sdk._types import MYPY
+from sentry_sdk._functools import wraps
+
+import chalice  # type: ignore
+from chalice import Chalice, ChaliceViewError
+from chalice.app import EventSourceHandler as ChaliceEventSourceHandler  # type: ignore
+
+if MYPY:
+    from typing import Any
+    from typing import Dict
+    from typing import TypeVar
+    from typing import Callable
+
+    F = TypeVar("F", bound=Callable[..., Any])
+
+try:
+    from chalice import __version__ as CHALICE_VERSION
+except ImportError:
+    raise DidNotEnable("Chalice is not installed")
+
+
+class EventSourceHandler(ChaliceEventSourceHandler):  # type: ignore
+    def __call__(self, event, context):
+        # type: (Any, Any) -> Any
+        hub = Hub.current
+        client = hub.client  # type: Any
+
+        with hub.push_scope() as scope:
+            with capture_internal_exceptions():
+                configured_time = context.get_remaining_time_in_millis()
+                scope.add_event_processor(
+                    _make_request_event_processor(event, context, configured_time)
+                )
+            try:
+                return ChaliceEventSourceHandler.__call__(self, event, context)
+            except Exception:
+                exc_info = sys.exc_info()
+                event, hint = event_from_exception(
+                    exc_info,
+                    client_options=client.options,
+                    mechanism={"type": "chalice", "handled": False},
+                )
+                hub.capture_event(event, hint=hint)
+                hub.flush()
+                reraise(*exc_info)
+
+
+def _get_view_function_response(app, view_function, function_args):
+    # type: (Any, F, Any) -> F
+    @wraps(view_function)
+    def wrapped_view_function(**function_args):
+        # type: (**Any) -> Any
+        hub = Hub.current
+        client = hub.client  # type: Any
+        with hub.push_scope() as scope:
+            with capture_internal_exceptions():
+                configured_time = app.lambda_context.get_remaining_time_in_millis()
+                scope.set_transaction_name(
+                    app.lambda_context.function_name,
+                    source=TRANSACTION_SOURCE_COMPONENT,
+                )
+
+                scope.add_event_processor(
+                    _make_request_event_processor(
+                        app.current_request.to_dict(),
+                        app.lambda_context,
+                        configured_time,
+                    )
+                )
+            try:
+                return view_function(**function_args)
+            except Exception as exc:
+                if isinstance(exc, ChaliceViewError):
+                    raise
+                exc_info = sys.exc_info()
+                event, hint = event_from_exception(
+                    exc_info,
+                    client_options=client.options,
+                    mechanism={"type": "chalice", "handled": False},
+                )
+                hub.capture_event(event, hint=hint)
+                hub.flush()
+                raise
+
+    return wrapped_view_function  # type: ignore
+
+
+class ChaliceIntegration(Integration):
+    identifier = "chalice"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        try:
+            version = tuple(map(int, CHALICE_VERSION.split(".")[:3]))
+        except (ValueError, TypeError):
+            raise DidNotEnable("Unparsable Chalice version: {}".format(CHALICE_VERSION))
+        if version < (1, 20):
+            old_get_view_function_response = Chalice._get_view_function_response
+        else:
+            from chalice.app import RestAPIEventHandler
+
+            old_get_view_function_response = (
+                RestAPIEventHandler._get_view_function_response
+            )
+
+        def sentry_event_response(app, view_function, function_args):
+            # type: (Any, F, Dict[str, Any]) -> Any
+            wrapped_view_function = _get_view_function_response(
+                app, view_function, function_args
+            )
+
+            return old_get_view_function_response(
+                app, wrapped_view_function, function_args
+            )
+
+        if version < (1, 20):
+            Chalice._get_view_function_response = sentry_event_response
+        else:
+            RestAPIEventHandler._get_view_function_response = sentry_event_response
+        # for everything else (like events)
+        chalice.app.EventSourceHandler = EventSourceHandler
diff --git a/sentry_sdk/integrations/django/__init__.py b/sentry_sdk/integrations/django/__init__.py
index 4e62fe3b74..697ab484e3 100644
--- a/sentry_sdk/integrations/django/__init__.py
+++ b/sentry_sdk/integrations/django/__init__.py
@@ -6,12 +6,16 @@
 import weakref
 
 from sentry_sdk._types import MYPY
+from sentry_sdk.consts import OP, SENSITIVE_DATA_SUBSTITUTE
 from sentry_sdk.hub import Hub, _should_send_default_pii
 from sentry_sdk.scope import add_global_event_processor
 from sentry_sdk.serializer import add_global_repr_processor
-from sentry_sdk.tracing import record_sql_queries
+from sentry_sdk.tracing import SOURCE_FOR_STYLE, TRANSACTION_SOURCE_URL
+from sentry_sdk.tracing_utils import record_sql_queries
 from sentry_sdk.utils import (
+    AnnotatedValue,
     HAS_REAL_CONTEXTVARS,
+    CONTEXTVARS_ERROR_MESSAGE,
     logger,
     capture_internal_exceptions,
     event_from_exception,
@@ -25,6 +29,7 @@
 
 try:
     from django import VERSION as DJANGO_VERSION
+    from django.conf import settings as django_settings
     from django.core import signals
 
     try:
@@ -36,8 +41,13 @@
 
 
 from sentry_sdk.integrations.django.transactions import LEGACY_RESOLVER
-from sentry_sdk.integrations.django.templates import get_template_frame_from_exception
+from sentry_sdk.integrations.django.templates import (
+    get_template_frame_from_exception,
+    patch_templates,
+)
 from sentry_sdk.integrations.django.middleware import patch_django_middlewares
+from sentry_sdk.integrations.django.signals_handlers import patch_signals
+from sentry_sdk.integrations.django.views import patch_views
 
 
 if MYPY:
@@ -53,6 +63,7 @@
     from django.http.request import QueryDict
     from django.utils.datastructures import MultiValueDict
 
+    from sentry_sdk.scope import Scope
     from sentry_sdk.integrations.wsgi import _ScopedResponse
     from sentry_sdk._types import Event, Hint, EventProcessor, NotImplementedType
 
@@ -63,7 +74,6 @@ def is_authenticated(request_user):
         # type: (Any) -> bool
         return request_user.is_authenticated()
 
-
 else:
 
     def is_authenticated(request_user):
@@ -77,7 +87,7 @@ def is_authenticated(request_user):
 class DjangoIntegration(Integration):
     identifier = "django"
 
-    transaction_style = None
+    transaction_style = ""
     middleware_spans = None
 
     def __init__(self, transaction_style="url", middleware_spans=True):
@@ -94,8 +104,8 @@ def __init__(self, transaction_style="url", middleware_spans=True):
     def setup_once():
         # type: () -> None
 
-        if DJANGO_VERSION < (1, 6):
-            raise DidNotEnable("Django 1.6 or newer is required.")
+        if DJANGO_VERSION < (1, 8):
+            raise DidNotEnable("Django 1.8 or newer is required.")
 
         install_sql_hook()
         # Patch in our custom middleware.
@@ -115,43 +125,19 @@ def sentry_patched_wsgi_handler(self, environ, start_response):
 
             bound_old_app = old_app.__get__(self, WSGIHandler)
 
-            return SentryWsgiMiddleware(bound_old_app)(environ, start_response)
+            from django.conf import settings
 
-        WSGIHandler.__call__ = sentry_patched_wsgi_handler
+            use_x_forwarded_for = settings.USE_X_FORWARDED_HOST
 
-        _patch_django_asgi_handler()
+            return SentryWsgiMiddleware(bound_old_app, use_x_forwarded_for)(
+                environ, start_response
+            )
 
-        # patch get_response, because at that point we have the Django request
-        # object
-        from django.core.handlers.base import BaseHandler
-
-        old_get_response = BaseHandler.get_response
-
-        def sentry_patched_get_response(self, request):
-            # type: (Any, WSGIRequest) -> Union[HttpResponse, BaseException]
-            hub = Hub.current
-            integration = hub.get_integration(DjangoIntegration)
-            if integration is not None:
-                _patch_drf()
-
-                with hub.configure_scope() as scope:
-                    # Rely on WSGI middleware to start a trace
-                    try:
-                        if integration.transaction_style == "function_name":
-                            scope.transaction = transaction_from_function(
-                                resolve(request.path).func
-                            )
-                        elif integration.transaction_style == "url":
-                            scope.transaction = LEGACY_RESOLVER.resolve(request.path)
-                    except Exception:
-                        pass
+        WSGIHandler.__call__ = sentry_patched_wsgi_handler
 
-                    scope.add_event_processor(
-                        _make_event_processor(weakref.ref(request), integration)
-                    )
-            return old_get_response(self, request)
+        _patch_get_response()
 
-        BaseHandler.get_response = sentry_patched_get_response
+        _patch_django_asgi_handler()
 
         signals.got_request_exception.connect(_got_request_exception)
 
@@ -186,7 +172,7 @@ def process_django_templates(event, hint):
                     for i in reversed(range(len(frames))):
                         f = frames[i]
                         if (
-                            f.get("function") in ("parse", "render")
+                            f.get("function") in ("Parser.parse", "parse", "render")
                             and f.get("module") == "django.template.base"
                         ):
                             i += 1
@@ -220,7 +206,7 @@ def _django_queryset_repr(value, hint):
             # querysets. This might be surprising to the user but it's likely
             # less annoying.
 
-            return u"<%s from %s at 0x%x>" % (
+            return "<%s from %s at 0x%x>" % (
                 value.__class__.__name__,
                 value.__module__,
                 id(value),
@@ -228,6 +214,9 @@ def _django_queryset_repr(value, hint):
 
         _patch_channels()
         patch_django_middlewares()
+        patch_views()
+        patch_templates()
+        patch_signals()
 
 
 _DRF_PATCHED = False
@@ -301,11 +290,12 @@ def _patch_channels():
         # requests.
         #
         # We cannot hard-raise here because channels may not be used at all in
-        # the current process.
+        # the current process. That is the case when running traditional WSGI
+        # workers in gunicorn+gevent and the websocket stuff in a separate
+        # process.
         logger.warning(
-            "We detected that you are using Django channels 2.0. To get proper "
-            "instrumentation for ASGI requests, the Sentry SDK requires "
-            "Python 3.7+ or the aiocontextvars package from PyPI."
+            "We detected that you are using Django channels 2.0."
+            + CONTEXTVARS_ERROR_MESSAGE
         )
 
     from sentry_sdk.integrations.django.asgi import patch_channels_asgi_handler_impl
@@ -324,12 +314,10 @@ def _patch_django_asgi_handler():
         # We better have contextvars or we're going to leak state between
         # requests.
         #
-        # We cannot hard-raise here because Django may not be used at all in
-        # the current process.
+        # We cannot hard-raise here because Django's ASGI stuff may not be used
+        # at all.
         logger.warning(
-            "We detected that you are using Django 3. To get proper "
-            "instrumentation for ASGI requests, the Sentry SDK requires "
-            "Python 3.7+ or the aiocontextvars package from PyPI."
+            "We detected that you are using Django 3." + CONTEXTVARS_ERROR_MESSAGE
         )
 
     from sentry_sdk.integrations.django.asgi import patch_django_asgi_handler_impl
@@ -337,6 +325,102 @@ def _patch_django_asgi_handler():
     patch_django_asgi_handler_impl(ASGIHandler)
 
 
+def _set_transaction_name_and_source(scope, transaction_style, request):
+    # type: (Scope, str, WSGIRequest) -> None
+    try:
+        transaction_name = None
+        if transaction_style == "function_name":
+            fn = resolve(request.path).func
+            transaction_name = transaction_from_function(getattr(fn, "view_class", fn))
+
+        elif transaction_style == "url":
+            if hasattr(request, "urlconf"):
+                transaction_name = LEGACY_RESOLVER.resolve(
+                    request.path_info, urlconf=request.urlconf
+                )
+            else:
+                transaction_name = LEGACY_RESOLVER.resolve(request.path_info)
+
+        if transaction_name is None:
+            transaction_name = request.path_info
+            source = TRANSACTION_SOURCE_URL
+        else:
+            source = SOURCE_FOR_STYLE[transaction_style]
+
+        scope.set_transaction_name(
+            transaction_name,
+            source=source,
+        )
+    except Exception:
+        pass
+
+
+def _before_get_response(request):
+    # type: (WSGIRequest) -> None
+    hub = Hub.current
+    integration = hub.get_integration(DjangoIntegration)
+    if integration is None:
+        return
+
+    _patch_drf()
+
+    with hub.configure_scope() as scope:
+        # Rely on WSGI middleware to start a trace
+        _set_transaction_name_and_source(scope, integration.transaction_style, request)
+
+        scope.add_event_processor(
+            _make_event_processor(weakref.ref(request), integration)
+        )
+
+
+def _attempt_resolve_again(request, scope, transaction_style):
+    # type: (WSGIRequest, Scope, str) -> None
+    """
+    Some django middlewares overwrite request.urlconf
+    so we need to respect that contract,
+    so we try to resolve the url again.
+    """
+    if not hasattr(request, "urlconf"):
+        return
+
+    _set_transaction_name_and_source(scope, transaction_style, request)
+
+
+def _after_get_response(request):
+    # type: (WSGIRequest) -> None
+    hub = Hub.current
+    integration = hub.get_integration(DjangoIntegration)
+    if integration is None or integration.transaction_style != "url":
+        return
+
+    with hub.configure_scope() as scope:
+        _attempt_resolve_again(request, scope, integration.transaction_style)
+
+
+def _patch_get_response():
+    # type: () -> None
+    """
+    patch get_response, because at that point we have the Django request object
+    """
+    from django.core.handlers.base import BaseHandler
+
+    old_get_response = BaseHandler.get_response
+
+    def sentry_patched_get_response(self, request):
+        # type: (Any, WSGIRequest) -> Union[HttpResponse, BaseException]
+        _before_get_response(request)
+        rv = old_get_response(self, request)
+        _after_get_response(request)
+        return rv
+
+    BaseHandler.get_response = sentry_patched_get_response
+
+    if hasattr(BaseHandler, "get_response_async"):
+        from sentry_sdk.integrations.django.asgi import patch_get_response_async
+
+        patch_get_response_async(BaseHandler, _before_get_response)
+
+
 def _make_event_processor(weak_request, integration):
     # type: (Callable[[], WSGIRequest], DjangoIntegration) -> EventProcessor
     def event_processor(event, hint):
@@ -373,6 +457,10 @@ def _got_request_exception(request=None, **kwargs):
     integration = hub.get_integration(DjangoIntegration)
     if integration is not None:
 
+        if request is not None and integration.transaction_style == "url":
+            with hub.configure_scope() as scope:
+                _attempt_resolve_again(request, scope, integration.transaction_style)
+
         # If an integration is there, a client has to be there.
         client = hub.client  # type: Any
 
@@ -390,8 +478,20 @@ def env(self):
         return self.request.META
 
     def cookies(self):
-        # type: () -> Dict[str, str]
-        return self.request.COOKIES
+        # type: () -> Dict[str, Union[str, AnnotatedValue]]
+        privacy_cookies = [
+            django_settings.CSRF_COOKIE_NAME,
+            django_settings.SESSION_COOKIE_NAME,
+        ]
+
+        clean_cookies = {}  # type: Dict[str, Union[str, AnnotatedValue]]
+        for (key, val) in self.request.COOKIES.items():
+            if key in privacy_cookies:
+                clean_cookies[key] = SENSITIVE_DATA_SUBSTITUTE
+            else:
+                clean_cookies[key] = val
+
+        return clean_cookies
 
     def raw_data(self):
         # type: () -> bytes
@@ -450,9 +550,17 @@ def install_sql_hook():
     except ImportError:
         from django.db.backends.util import CursorWrapper
 
+    try:
+        # django 1.6 and 1.7 compatability
+        from django.db.backends import BaseDatabaseWrapper
+    except ImportError:
+        # django 1.8 or later
+        from django.db.backends.base.base import BaseDatabaseWrapper
+
     try:
         real_execute = CursorWrapper.execute
         real_executemany = CursorWrapper.executemany
+        real_connect = BaseDatabaseWrapper.connect
     except AttributeError:
         # This won't work on Django versions < 1.6
         return
@@ -479,6 +587,19 @@ def executemany(self, sql, param_list):
         ):
             return real_executemany(self, sql, param_list)
 
+    def connect(self):
+        # type: (BaseDatabaseWrapper) -> None
+        hub = Hub.current
+        if hub.get_integration(DjangoIntegration) is None:
+            return real_connect(self)
+
+        with capture_internal_exceptions():
+            hub.add_breadcrumb(message="connect", category="query")
+
+        with hub.start_span(op=OP.DB, description="connect"):
+            return real_connect(self)
+
     CursorWrapper.execute = execute
     CursorWrapper.executemany = executemany
+    BaseDatabaseWrapper.connect = connect
     ignore_logger("django.db.backends")
diff --git a/sentry_sdk/integrations/django/asgi.py b/sentry_sdk/integrations/django/asgi.py
index 96ae3e0809..955d8d19e8 100644
--- a/sentry_sdk/integrations/django/asgi.py
+++ b/sentry_sdk/integrations/django/asgi.py
@@ -6,18 +6,28 @@
 `django.core.handlers.asgi`.
 """
 
-from sentry_sdk import Hub
+import asyncio
+import threading
+
+from sentry_sdk import Hub, _functools
 from sentry_sdk._types import MYPY
+from sentry_sdk.consts import OP
 
-from sentry_sdk.integrations.django import DjangoIntegration
 from sentry_sdk.integrations.asgi import SentryAsgiMiddleware
 
 if MYPY:
     from typing import Any
+    from typing import Union
+    from typing import Callable
+
+    from django.http.response import HttpResponse
 
 
 def patch_django_asgi_handler_impl(cls):
     # type: (Any) -> None
+
+    from sentry_sdk.integrations.django import DjangoIntegration
+
     old_app = cls.__call__
 
     async def sentry_patched_asgi_handler(self, scope, receive, send):
@@ -25,23 +35,123 @@ async def sentry_patched_asgi_handler(self, scope, receive, send):
         if Hub.current.get_integration(DjangoIntegration) is None:
             return await old_app(self, scope, receive, send)
 
-        middleware = SentryAsgiMiddleware(old_app.__get__(self, cls))._run_asgi3
+        middleware = SentryAsgiMiddleware(
+            old_app.__get__(self, cls), unsafe_context_data=True
+        )._run_asgi3
         return await middleware(scope, receive, send)
 
     cls.__call__ = sentry_patched_asgi_handler
 
 
+def patch_get_response_async(cls, _before_get_response):
+    # type: (Any, Any) -> None
+    old_get_response_async = cls.get_response_async
+
+    async def sentry_patched_get_response_async(self, request):
+        # type: (Any, Any) -> Union[HttpResponse, BaseException]
+        _before_get_response(request)
+        return await old_get_response_async(self, request)
+
+    cls.get_response_async = sentry_patched_get_response_async
+
+
 def patch_channels_asgi_handler_impl(cls):
     # type: (Any) -> None
-    old_app = cls.__call__
 
-    async def sentry_patched_asgi_handler(self, receive, send):
-        # type: (Any, Any, Any) -> Any
-        if Hub.current.get_integration(DjangoIntegration) is None:
-            return await old_app(self, receive, send)
+    import channels  # type: ignore
+    from sentry_sdk.integrations.django import DjangoIntegration
 
-        middleware = SentryAsgiMiddleware(lambda _scope: old_app.__get__(self, cls))
+    if channels.__version__ < "3.0.0":
 
-        return await middleware(self.scope)(receive, send)
+        old_app = cls.__call__
 
-    cls.__call__ = sentry_patched_asgi_handler
+        async def sentry_patched_asgi_handler(self, receive, send):
+            # type: (Any, Any, Any) -> Any
+            if Hub.current.get_integration(DjangoIntegration) is None:
+                return await old_app(self, receive, send)
+
+            middleware = SentryAsgiMiddleware(
+                lambda _scope: old_app.__get__(self, cls), unsafe_context_data=True
+            )
+
+            return await middleware(self.scope)(receive, send)
+
+        cls.__call__ = sentry_patched_asgi_handler
+
+    else:
+        # The ASGI handler in Channels >= 3 has the same signature as
+        # the Django handler.
+        patch_django_asgi_handler_impl(cls)
+
+
+def wrap_async_view(hub, callback):
+    # type: (Hub, Any) -> Any
+    @_functools.wraps(callback)
+    async def sentry_wrapped_callback(request, *args, **kwargs):
+        # type: (Any, *Any, **Any) -> Any
+
+        with hub.configure_scope() as sentry_scope:
+            if sentry_scope.profile is not None:
+                sentry_scope.profile.active_thread_id = threading.current_thread().ident
+
+            with hub.start_span(
+                op=OP.VIEW_RENDER, description=request.resolver_match.view_name
+            ):
+                return await callback(request, *args, **kwargs)
+
+    return sentry_wrapped_callback
+
+
+def _asgi_middleware_mixin_factory(_check_middleware_span):
+    # type: (Callable[..., Any]) -> Any
+    """
+    Mixin class factory that generates a middleware mixin for handling requests
+    in async mode.
+    """
+
+    class SentryASGIMixin:
+        if MYPY:
+            _inner = None
+
+        def __init__(self, get_response):
+            # type: (Callable[..., Any]) -> None
+            self.get_response = get_response
+            self._acall_method = None
+            self._async_check()
+
+        def _async_check(self):
+            # type: () -> None
+            """
+            If get_response is a coroutine function, turns us into async mode so
+            a thread is not consumed during a whole request.
+            Taken from django.utils.deprecation::MiddlewareMixin._async_check
+            """
+            if asyncio.iscoroutinefunction(self.get_response):
+                self._is_coroutine = asyncio.coroutines._is_coroutine  # type: ignore
+
+        def async_route_check(self):
+            # type: () -> bool
+            """
+            Function that checks if we are in async mode,
+            and if we are forwards the handling of requests to __acall__
+            """
+            return asyncio.iscoroutinefunction(self.get_response)
+
+        async def __acall__(self, *args, **kwargs):
+            # type: (*Any, **Any) -> Any
+            f = self._acall_method
+            if f is None:
+                if hasattr(self._inner, "__acall__"):
+                    self._acall_method = f = self._inner.__acall__  # type: ignore
+                else:
+                    self._acall_method = f = self._inner
+
+            middleware_span = _check_middleware_span(old_method=f)
+
+            if middleware_span is None:
+                return await f(*args, **kwargs)
+
+            with middleware_span:
+                return await f(*args, **kwargs)
+
+    return SentryASGIMixin
diff --git a/sentry_sdk/integrations/django/middleware.py b/sentry_sdk/integrations/django/middleware.py
index 501f2f4c7c..35680e10b1 100644
--- a/sentry_sdk/integrations/django/middleware.py
+++ b/sentry_sdk/integrations/django/middleware.py
@@ -7,6 +7,7 @@
 from sentry_sdk import Hub
 from sentry_sdk._functools import wraps
 from sentry_sdk._types import MYPY
+from sentry_sdk.consts import OP
 from sentry_sdk.utils import (
     ContextVar,
     transaction_from_function,
@@ -16,8 +17,11 @@
 if MYPY:
     from typing import Any
     from typing import Callable
+    from typing import Optional
     from typing import TypeVar
 
+    from sentry_sdk.tracing import Span
+
     F = TypeVar("F", bound=Callable[..., Any])
 
 _import_string_should_wrap_middleware = ContextVar(
@@ -30,6 +34,12 @@
     import_string_name = "import_string"
 
 
+if DJANGO_VERSION < (3, 1):
+    _asgi_middleware_mixin_factory = lambda _: object
+else:
+    from .asgi import _asgi_middleware_mixin_factory
+
+
 def patch_django_middlewares():
     # type: () -> None
     from django.core.handlers import base
@@ -49,11 +59,11 @@ def sentry_patched_import_string(dotted_path):
 
     old_load_middleware = base.BaseHandler.load_middleware
 
-    def sentry_patched_load_middleware(self):
-        # type: (base.BaseHandler) -> Any
+    def sentry_patched_load_middleware(*args, **kwargs):
+        # type: (Any, Any) -> Any
         _import_string_should_wrap_middleware.set(True)
         try:
-            return old_load_middleware(self)
+            return old_load_middleware(*args, **kwargs)
         finally:
             _import_string_should_wrap_middleware.set(False)
 
@@ -64,44 +74,71 @@ def _wrap_middleware(middleware, middleware_name):
     # type: (Any, str) -> Any
     from sentry_sdk.integrations.django import DjangoIntegration
 
+    def _check_middleware_span(old_method):
+        # type: (Callable[..., Any]) -> Optional[Span]
+        hub = Hub.current
+        integration = hub.get_integration(DjangoIntegration)
+        if integration is None or not integration.middleware_spans:
+            return None
+
+        function_name = transaction_from_function(old_method)
+
+        description = middleware_name
+        function_basename = getattr(old_method, "__name__", None)
+        if function_basename:
+            description = "{}.{}".format(description, function_basename)
+
+        middleware_span = hub.start_span(
+            op=OP.MIDDLEWARE_DJANGO, description=description
+        )
+        middleware_span.set_tag("django.function_name", function_name)
+        middleware_span.set_tag("django.middleware_name", middleware_name)
+
+        return middleware_span
+
     def _get_wrapped_method(old_method):
         # type: (F) -> F
         with capture_internal_exceptions():
 
             def sentry_wrapped_method(*args, **kwargs):
                 # type: (*Any, **Any) -> Any
-                hub = Hub.current
-                integration = hub.get_integration(DjangoIntegration)
-                if integration is None or not integration.middleware_spans:
-                    return old_method(*args, **kwargs)
+                middleware_span = _check_middleware_span(old_method)
 
-                function_name = transaction_from_function(old_method)
-
-                description = middleware_name
-                function_basename = getattr(old_method, "__name__", None)
-                if function_basename:
-                    description = "{}.{}".format(description, function_basename)
+                if middleware_span is None:
+                    return old_method(*args, **kwargs)
 
-                with hub.start_span(
-                    op="django.middleware", description=description
-                ) as span:
-                    span.set_tag("django.function_name", function_name)
-                    span.set_tag("django.middleware_name", middleware_name)
+                with middleware_span:
                     return old_method(*args, **kwargs)
 
             try:
                 # fails for __call__ of function on Python 2 (see py2.7-django-1.11)
-                return wraps(old_method)(sentry_wrapped_method)  # type: ignore
+                sentry_wrapped_method = wraps(old_method)(sentry_wrapped_method)
+
+                # Necessary for Django 3.1
+                sentry_wrapped_method.__self__ = old_method.__self__  # type: ignore
             except Exception:
-                return sentry_wrapped_method  # type: ignore
+                pass
+
+            return sentry_wrapped_method  # type: ignore
 
         return old_method
 
-    class SentryWrappingMiddleware(object):
-        def __init__(self, *args, **kwargs):
-            # type: (*Any, **Any) -> None
-            self._inner = middleware(*args, **kwargs)
+    class SentryWrappingMiddleware(
+        _asgi_middleware_mixin_factory(_check_middleware_span)  # type: ignore
+    ):
+
+        async_capable = getattr(middleware, "async_capable", False)
+
+        def __init__(self, get_response=None, *args, **kwargs):
+            # type: (Optional[Callable[..., Any]], *Any, **Any) -> None
+            if get_response:
+                self._inner = middleware(get_response, *args, **kwargs)
+            else:
+                self._inner = middleware(*args, **kwargs)
+            self.get_response = get_response
             self._call_method = None
+            if self.async_capable:
+                super(SentryWrappingMiddleware, self).__init__(get_response)
 
         # We need correct behavior for `hasattr()`, which we can only determine
         # when we have an instance of the middleware we're wrapping.
@@ -123,12 +160,27 @@ def __getattr__(self, method_name):
 
         def __call__(self, *args, **kwargs):
             # type: (*Any, **Any) -> Any
+            if hasattr(self, "async_route_check") and self.async_route_check():
+                return self.__acall__(*args, **kwargs)
+
             f = self._call_method
             if f is None:
-                self._call_method = f = _get_wrapped_method(self._inner.__call__)
-            return f(*args, **kwargs)
+                self._call_method = f = self._inner.__call__
+
+            middleware_span = _check_middleware_span(old_method=f)
+
+            if middleware_span is None:
+                return f(*args, **kwargs)
+
+            with middleware_span:
+                return f(*args, **kwargs)
 
-    if hasattr(middleware, "__name__"):
-        SentryWrappingMiddleware.__name__ = middleware.__name__
+    for attr in (
+        "__name__",
+        "__module__",
+        "__qualname__",
+    ):
+        if hasattr(middleware, attr):
+            setattr(SentryWrappingMiddleware, attr, getattr(middleware, attr))
 
     return SentryWrappingMiddleware
diff --git a/sentry_sdk/integrations/django/signals_handlers.py b/sentry_sdk/integrations/django/signals_handlers.py
new file mode 100644
index 0000000000..a5687c897d
--- /dev/null
+++ b/sentry_sdk/integrations/django/signals_handlers.py
@@ -0,0 +1,74 @@
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import
+
+from django.dispatch import Signal
+
+from sentry_sdk import Hub
+from sentry_sdk._functools import wraps
+from sentry_sdk._types import MYPY
+from sentry_sdk.consts import OP
+
+
+if MYPY:
+    from typing import Any
+    from typing import Callable
+    from typing import List
+
+
+def _get_receiver_name(receiver):
+    # type: (Callable[..., Any]) -> str
+    name = ""
+
+    if hasattr(receiver, "__qualname__"):
+        name = receiver.__qualname__
+    elif hasattr(receiver, "__name__"):  # Python 2.7 has no __qualname__
+        name = receiver.__name__
+    elif hasattr(
+        receiver, "func"
+    ):  # certain functions (like partials) dont have a name
+        if hasattr(receiver, "func") and hasattr(receiver.func, "__name__"):  # type: ignore
+            name = "partial()"  # type: ignore
+
+    if (
+        name == ""
+    ):  # In case nothing was found, return the string representation (this is the slowest case)
+        return str(receiver)
+
+    if hasattr(receiver, "__module__"):  # prepend with module, if there is one
+        name = receiver.__module__ + "." + name
+
+    return name
+
+
+def patch_signals():
+    # type: () -> None
+    """Patch django signal receivers to create a span"""
+
+    old_live_receivers = Signal._live_receivers
+
+    def _sentry_live_receivers(self, sender):
+        # type: (Signal, Any) -> List[Callable[..., Any]]
+        hub = Hub.current
+        receivers = old_live_receivers(self, sender)
+
+        def sentry_receiver_wrapper(receiver):
+            # type: (Callable[..., Any]) -> Callable[..., Any]
+            @wraps(receiver)
+            def wrapper(*args, **kwargs):
+                # type: (Any, Any) -> Any
+                signal_name = _get_receiver_name(receiver)
+                with hub.start_span(
+                    op=OP.EVENT_DJANGO,
+                    description=signal_name,
+                ) as span:
+                    span.set_data("signal", signal_name)
+                    return receiver(*args, **kwargs)
+
+            return wrapper
+
+        for idx, receiver in enumerate(receivers):
+            receivers[idx] = sentry_receiver_wrapper(receiver)
+
+        return receivers
+
+    Signal._live_receivers = _sentry_live_receivers
diff --git a/sentry_sdk/integrations/django/templates.py b/sentry_sdk/integrations/django/templates.py
index 2285644909..39279be4ce 100644
--- a/sentry_sdk/integrations/django/templates.py
+++ b/sentry_sdk/integrations/django/templates.py
@@ -1,6 +1,9 @@
 from django.template import TemplateSyntaxError
+from django import VERSION as DJANGO_VERSION
 
+from sentry_sdk import _functools, Hub
 from sentry_sdk._types import MYPY
+from sentry_sdk.consts import OP
 
 if MYPY:
     from typing import Any
@@ -40,6 +43,61 @@ def get_template_frame_from_exception(exc_value):
     return None
 
 
+def _get_template_name_description(template_name):
+    # type: (str) -> str
+    if isinstance(template_name, (list, tuple)):
+        if template_name:
+            return "[{}, ...]".format(template_name[0])
+    else:
+        return template_name
+
+
+def patch_templates():
+    # type: () -> None
+    from django.template.response import SimpleTemplateResponse
+    from sentry_sdk.integrations.django import DjangoIntegration
+
+    real_rendered_content = SimpleTemplateResponse.rendered_content
+
+    @property  # type: ignore
+    def rendered_content(self):
+        # type: (SimpleTemplateResponse) -> str
+        hub = Hub.current
+        if hub.get_integration(DjangoIntegration) is None:
+            return real_rendered_content.fget(self)
+
+        with hub.start_span(
+            op=OP.TEMPLATE_RENDER,
+            description=_get_template_name_description(self.template_name),
+        ) as span:
+            span.set_data("context", self.context_data)
+            return real_rendered_content.fget(self)
+
+    SimpleTemplateResponse.rendered_content = rendered_content
+
+    if DJANGO_VERSION < (1, 7):
+        return
+    import django.shortcuts
+
+    real_render = django.shortcuts.render
+
+    @_functools.wraps(real_render)
+    def render(request, template_name, context=None, *args, **kwargs):
+        # type: (django.http.HttpRequest, str, Optional[Dict[str, Any]], *Any, **Any) -> django.http.HttpResponse
+        hub = Hub.current
+        if hub.get_integration(DjangoIntegration) is None:
+            return real_render(request, template_name, context, *args, **kwargs)
+
+        with hub.start_span(
+            op=OP.TEMPLATE_RENDER,
+            description=_get_template_name_description(template_name),
+        ) as span:
+            span.set_data("context", context)
+            return real_render(request, template_name, context, *args, **kwargs)
+
+    django.shortcuts.render = render
+
+
 def _get_template_frame_from_debug(debug):
     # type: (Dict[str, Any]) -> Dict[str, Any]
     if debug is None:
diff --git a/sentry_sdk/integrations/django/transactions.py b/sentry_sdk/integrations/django/transactions.py
index f20866ef95..8b6fc95f99 100644
--- a/sentry_sdk/integrations/django/transactions.py
+++ b/sentry_sdk/integrations/django/transactions.py
@@ -37,7 +37,7 @@ def get_regex(resolver_or_pattern):
 
 class RavenResolver(object):
     _optional_group_matcher = re.compile(r"\(\?\:([^\)]+)\)")
-    _named_group_matcher = re.compile(r"\(\?P<(\w+)>[^\)]+\)")
+    _named_group_matcher = re.compile(r"\(\?P<(\w+)>[^\)]+\)+")
     _non_named_group_matcher = re.compile(r"\([^\)]+\)")
     # [foo|bar|baz]
     _either_option_matcher = re.compile(r"\[([^\]]+)\|([^\]]+)\]")
@@ -76,6 +76,8 @@ def _simplify(self, pattern):
             result.replace("^", "")
             .replace("$", "")
             .replace("?", "")
+            .replace("\\A", "")
+            .replace("\\Z", "")
             .replace("//", "/")
             .replace("\\", "")
         )
@@ -125,10 +127,10 @@ def resolve(
         path,  # type: str
         urlconf=None,  # type: Union[None, Tuple[URLPattern, URLPattern, URLResolver], Tuple[URLPattern]]
     ):
-        # type: (...) -> str
+        # type: (...) -> Optional[str]
         resolver = get_resolver(urlconf)
         match = self._resolve(resolver, path)
-        return match or path
+        return match
 
 
 LEGACY_RESOLVER = RavenResolver()
diff --git a/sentry_sdk/integrations/django/views.py b/sentry_sdk/integrations/django/views.py
new file mode 100644
index 0000000000..735822aa72
--- /dev/null
+++ b/sentry_sdk/integrations/django/views.py
@@ -0,0 +1,89 @@
+import threading
+
+from sentry_sdk.consts import OP
+from sentry_sdk.hub import Hub
+from sentry_sdk._types import MYPY
+from sentry_sdk import _functools
+
+if MYPY:
+    from typing import Any
+
+
+try:
+    from asyncio import iscoroutinefunction
+except ImportError:
+    iscoroutinefunction = None  # type: ignore
+
+
+try:
+    from sentry_sdk.integrations.django.asgi import wrap_async_view
+except (ImportError, SyntaxError):
+    wrap_async_view = None  # type: ignore
+
+
+def patch_views():
+    # type: () -> None
+
+    from django.core.handlers.base import BaseHandler
+    from django.template.response import SimpleTemplateResponse
+    from sentry_sdk.integrations.django import DjangoIntegration
+
+    old_make_view_atomic = BaseHandler.make_view_atomic
+    old_render = SimpleTemplateResponse.render
+
+    def sentry_patched_render(self):
+        # type: (SimpleTemplateResponse) -> Any
+        hub = Hub.current
+        with hub.start_span(
+            op=OP.VIEW_RESPONSE_RENDER, description="serialize response"
+        ):
+            return old_render(self)
+
+    @_functools.wraps(old_make_view_atomic)
+    def sentry_patched_make_view_atomic(self, *args, **kwargs):
+        # type: (Any, *Any, **Any) -> Any
+        callback = old_make_view_atomic(self, *args, **kwargs)
+
+        # XXX: The wrapper function is created for every request. Find more
+        # efficient way to wrap views (or build a cache?)
+
+        hub = Hub.current
+        integration = hub.get_integration(DjangoIntegration)
+
+        if integration is not None and integration.middleware_spans:
+
+            if (
+                iscoroutinefunction is not None
+                and wrap_async_view is not None
+                and iscoroutinefunction(callback)
+            ):
+                sentry_wrapped_callback = wrap_async_view(hub, callback)
+            else:
+                sentry_wrapped_callback = _wrap_sync_view(hub, callback)
+
+        else:
+            sentry_wrapped_callback = callback
+
+        return sentry_wrapped_callback
+
+    SimpleTemplateResponse.render = sentry_patched_render
+    BaseHandler.make_view_atomic = sentry_patched_make_view_atomic
+
+
+def _wrap_sync_view(hub, callback):
+    # type: (Hub, Any) -> Any
+    @_functools.wraps(callback)
+    def sentry_wrapped_callback(request, *args, **kwargs):
+        # type: (Any, *Any, **Any) -> Any
+        with hub.configure_scope() as sentry_scope:
+            # set the active thread id to the handler thread for sync views
+            # this isn't necessary for async views since that runs on main
+            if sentry_scope.profile is not None:
+                sentry_scope.profile.active_thread_id = threading.current_thread().ident
+
+            with hub.start_span(
+                op=OP.VIEW_RENDER, description=request.resolver_match.view_name
+            ):
+                return callback(request, *args, **kwargs)
+
+    return sentry_wrapped_callback
diff --git a/sentry_sdk/integrations/excepthook.py b/sentry_sdk/integrations/excepthook.py
index d8aead097a..1f16ff0b06 100644
--- a/sentry_sdk/integrations/excepthook.py
+++ b/sentry_sdk/integrations/excepthook.py
@@ -10,11 +10,13 @@
     from typing import Callable
     from typing import Any
     from typing import Type
+    from typing import Optional
 
     from types import TracebackType
 
     Excepthook = Callable[
-        [Type[BaseException], BaseException, TracebackType], Any,
+        [Type[BaseException], BaseException, Optional[TracebackType]],
+        Any,
     ]
 
 
@@ -42,7 +44,7 @@ def setup_once():
 def _make_excepthook(old_excepthook):
     # type: (Excepthook) -> Excepthook
     def sentry_sdk_excepthook(type_, value, traceback):
-        # type: (Type[BaseException], BaseException, TracebackType) -> None
+        # type: (Type[BaseException], BaseException, Optional[TracebackType]) -> None
         hub = Hub.current
         integration = hub.get_integration(ExcepthookIntegration)
 
diff --git a/sentry_sdk/integrations/executing.py b/sentry_sdk/integrations/executing.py
new file mode 100644
index 0000000000..4fbf729bb1
--- /dev/null
+++ b/sentry_sdk/integrations/executing.py
@@ -0,0 +1,68 @@
+from __future__ import absolute_import
+
+from sentry_sdk import Hub
+from sentry_sdk._types import MYPY
+from sentry_sdk.integrations import Integration, DidNotEnable
+from sentry_sdk.scope import add_global_event_processor
+from sentry_sdk.utils import walk_exception_chain, iter_stacks
+
+if MYPY:
+    from typing import Optional
+
+    from sentry_sdk._types import Event, Hint
+
+try:
+    import executing
+except ImportError:
+    raise DidNotEnable("executing is not installed")
+
+
+class ExecutingIntegration(Integration):
+    identifier = "executing"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+
+        @add_global_event_processor
+        def add_executing_info(event, hint):
+            # type: (Event, Optional[Hint]) -> Optional[Event]
+            if Hub.current.get_integration(ExecutingIntegration) is None:
+                return event
+
+            if hint is None:
+                return event
+
+            exc_info = hint.get("exc_info", None)
+
+            if exc_info is None:
+                return event
+
+            exception = event.get("exception", None)
+
+            if exception is None:
+                return event
+
+            values = exception.get("values", None)
+
+            if values is None:
+                return event
+
+            for exception, (_exc_type, _exc_value, exc_tb) in zip(
+                reversed(values), walk_exception_chain(exc_info)
+            ):
+                sentry_frames = [
+                    frame
+                    for frame in exception.get("stacktrace", {}).get("frames", [])
+                    if frame.get("function")
+                ]
+                tbs = list(iter_stacks(exc_tb))
+                if len(sentry_frames) != len(tbs):
+                    continue
+
+                for sentry_frame, tb in zip(sentry_frames, tbs):
+                    frame = tb.tb_frame
+                    source = executing.Source.for_frame(frame)
+                    sentry_frame["function"] = source.code_qualname(frame.f_code)
+
+            return event
diff --git a/sentry_sdk/integrations/falcon.py b/sentry_sdk/integrations/falcon.py
index b24aac41c6..b38e4bd5b4 100644
--- a/sentry_sdk/integrations/falcon.py
+++ b/sentry_sdk/integrations/falcon.py
@@ -4,7 +4,11 @@
 from sentry_sdk.integrations import Integration, DidNotEnable
 from sentry_sdk.integrations._wsgi_common import RequestExtractor
 from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware
-from sentry_sdk.utils import capture_internal_exceptions, event_from_exception
+from sentry_sdk.tracing import SOURCE_FOR_STYLE
+from sentry_sdk.utils import (
+    capture_internal_exceptions,
+    event_from_exception,
+)
 
 from sentry_sdk._types import MYPY
 
@@ -87,7 +91,7 @@ def process_request(self, req, resp, *args, **kwargs):
 class FalconIntegration(Integration):
     identifier = "falcon"
 
-    transaction_style = None
+    transaction_style = ""
 
     def __init__(self, transaction_style="uri_template"):
         # type: (str) -> None
@@ -104,7 +108,7 @@ def setup_once():
         try:
             version = tuple(map(int, FALCON_VERSION.split(".")))
         except (ValueError, TypeError):
-            raise DidNotEnable("Unparseable Falcon version: {}".format(FALCON_VERSION))
+            raise DidNotEnable("Unparsable Falcon version: {}".format(FALCON_VERSION))
 
         if version < (1, 4):
             raise DidNotEnable("Falcon 1.4 or newer required.")
@@ -153,7 +157,7 @@ def sentry_patched_handle_exception(self, *args):
         hub = Hub.current
         integration = hub.get_integration(FalconIntegration)
 
-        if integration is not None and not _is_falcon_http_error(ex):
+        if integration is not None and _exception_leads_to_http_5xx(ex):
             # If an integration is there, a client has to be there.
             client = hub.client  # type: Any
 
@@ -186,24 +190,37 @@ def sentry_patched_prepare_middleware(
     falcon.api_helpers.prepare_middleware = sentry_patched_prepare_middleware
 
 
-def _is_falcon_http_error(ex):
-    # type: (BaseException) -> bool
-    return isinstance(ex, (falcon.HTTPError, falcon.http_status.HTTPStatus))
+def _exception_leads_to_http_5xx(ex):
+    # type: (Exception) -> bool
+    is_server_error = isinstance(ex, falcon.HTTPError) and (ex.status or "").startswith(
+        "5"
+    )
+    is_unhandled_error = not isinstance(
+        ex, (falcon.HTTPError, falcon.http_status.HTTPStatus)
+    )
+    return is_server_error or is_unhandled_error
+
+
+def _set_transaction_name_and_source(event, transaction_style, request):
+    # type: (Dict[str, Any], str, falcon.Request) -> None
+    name_for_style = {
+        "uri_template": request.uri_template,
+        "path": request.path,
+    }
+    event["transaction"] = name_for_style[transaction_style]
+    event["transaction_info"] = {"source": SOURCE_FOR_STYLE[transaction_style]}
 
 
 def _make_request_event_processor(req, integration):
     # type: (falcon.Request, FalconIntegration) -> EventProcessor
 
-    def inner(event, hint):
+    def event_processor(event, hint):
         # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
-        if integration.transaction_style == "uri_template":
-            event["transaction"] = req.uri_template
-        elif integration.transaction_style == "path":
-            event["transaction"] = req.path
+        _set_transaction_name_and_source(event, integration.transaction_style, req)
 
         with capture_internal_exceptions():
             FalconRequestExtractor(req).extract_into_event(event)
 
         return event
 
-    return inner
+    return event_processor
diff --git a/sentry_sdk/integrations/fastapi.py b/sentry_sdk/integrations/fastapi.py
new file mode 100644
index 0000000000..8bbf32eeff
--- /dev/null
+++ b/sentry_sdk/integrations/fastapi.py
@@ -0,0 +1,134 @@
+import asyncio
+import threading
+
+from sentry_sdk._types import MYPY
+from sentry_sdk.hub import Hub, _should_send_default_pii
+from sentry_sdk.integrations import DidNotEnable
+from sentry_sdk.integrations.starlette import (
+    StarletteIntegration,
+    StarletteRequestExtractor,
+)
+from sentry_sdk.tracing import SOURCE_FOR_STYLE, TRANSACTION_SOURCE_ROUTE
+from sentry_sdk.utils import transaction_from_function
+
+if MYPY:
+    from typing import Any, Callable, Dict
+
+    from sentry_sdk.scope import Scope
+
+try:
+    import fastapi  # type: ignore
+except ImportError:
+    raise DidNotEnable("FastAPI is not installed")
+
+
+_DEFAULT_TRANSACTION_NAME = "generic FastAPI request"
+
+
+class FastApiIntegration(StarletteIntegration):
+    identifier = "fastapi"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        patch_get_request_handler()
+
+
+def _set_transaction_name_and_source(scope, transaction_style, request):
+    # type: (Scope, str, Any) -> None
+    name = ""
+
+    if transaction_style == "endpoint":
+        endpoint = request.scope.get("endpoint")
+        if endpoint:
+            name = transaction_from_function(endpoint) or ""
+
+    elif transaction_style == "url":
+        route = request.scope.get("route")
+        if route:
+            path = getattr(route, "path", None)
+            if path is not None:
+                name = path
+
+    if not name:
+        name = _DEFAULT_TRANSACTION_NAME
+        source = TRANSACTION_SOURCE_ROUTE
+    else:
+        source = SOURCE_FOR_STYLE[transaction_style]
+
+    scope.set_transaction_name(name, source=source)
+
+
+def patch_get_request_handler():
+    # type: () -> None
+    old_get_request_handler = fastapi.routing.get_request_handler
+
+    def _sentry_get_request_handler(*args, **kwargs):
+        # type: (*Any, **Any) -> Any
+        dependant = kwargs.get("dependant")
+        if (
+            dependant
+            and dependant.call is not None
+            and not asyncio.iscoroutinefunction(dependant.call)
+        ):
+            old_call = dependant.call
+
+            def _sentry_call(*args, **kwargs):
+                # type: (*Any, **Any) -> Any
+                hub = Hub.current
+                with hub.configure_scope() as sentry_scope:
+                    if sentry_scope.profile is not None:
+                        sentry_scope.profile.active_thread_id = (
+                            threading.current_thread().ident
+                        )
+                    return old_call(*args, **kwargs)
+
+            dependant.call = _sentry_call
+
+        old_app = old_get_request_handler(*args, **kwargs)
+
+        async def _sentry_app(*args, **kwargs):
+            # type: (*Any, **Any) -> Any
+            hub = Hub.current
+            integration = hub.get_integration(FastApiIntegration)
+            if integration is None:
+                return await old_app(*args, **kwargs)
+
+            with hub.configure_scope() as sentry_scope:
+                request = args[0]
+
+                _set_transaction_name_and_source(
+                    sentry_scope, integration.transaction_style, request
+                )
+
+                extractor = StarletteRequestExtractor(request)
+                info = await extractor.extract_request_info()
+
+                def _make_request_event_processor(req, integration):
+                    # type: (Any, Any) -> Callable[[Dict[str, Any], Dict[str, Any]], Dict[str, Any]]
+                    def event_processor(event, hint):
+                        # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
+
+                        # Extract information from request
+                        request_info = event.get("request", {})
+                        if info:
+                            if "cookies" in info and _should_send_default_pii():
+                                request_info["cookies"] = info["cookies"]
+                            if "data" in info:
+                                request_info["data"] = info["data"]
+                        event["request"] = request_info
+
+                        return event
+
+                    return event_processor
+
+                sentry_scope._name = FastApiIntegration.identifier
+                sentry_scope.add_event_processor(
+                    _make_request_event_processor(request, integration)
+                )
+
+            return await old_app(*args, **kwargs)
+
+        return _sentry_app
+
+    fastapi.routing.get_request_handler = _sentry_get_request_handler
diff --git a/sentry_sdk/integrations/flask.py b/sentry_sdk/integrations/flask.py
index ef6ae0e4f0..67c87b64f6 100644
--- a/sentry_sdk/integrations/flask.py
+++ b/sentry_sdk/integrations/flask.py
@@ -1,26 +1,23 @@
 from __future__ import absolute_import
 
-import weakref
-
+from sentry_sdk._types import MYPY
 from sentry_sdk.hub import Hub, _should_send_default_pii
-from sentry_sdk.utils import capture_internal_exceptions, event_from_exception
-from sentry_sdk.integrations import Integration, DidNotEnable
-from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware
+from sentry_sdk.integrations import DidNotEnable, Integration
 from sentry_sdk.integrations._wsgi_common import RequestExtractor
-
-from sentry_sdk._types import MYPY
+from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware
+from sentry_sdk.scope import Scope
+from sentry_sdk.tracing import SENTRY_TRACE_HEADER_NAME, SOURCE_FOR_STYLE
+from sentry_sdk.utils import (
+    capture_internal_exceptions,
+    event_from_exception,
+)
 
 if MYPY:
-    from sentry_sdk.integrations.wsgi import _ScopedResponse
-    from typing import Any
-    from typing import Dict
-    from werkzeug.datastructures import ImmutableTypeConversionDict
-    from werkzeug.datastructures import ImmutableMultiDict
-    from werkzeug.datastructures import FileStorage
-    from typing import Union
-    from typing import Callable
+    from typing import Any, Callable, Dict, Union
 
     from sentry_sdk._types import EventProcessor
+    from sentry_sdk.integrations.wsgi import _ScopedResponse
+    from werkzeug.datastructures import FileStorage, ImmutableMultiDict
 
 
 try:
@@ -29,22 +26,21 @@
     flask_login = None
 
 try:
-    from flask import (  # type: ignore
-        Request,
-        Flask,
-        _request_ctx_stack,
-        _app_ctx_stack,
-        __version__ as FLASK_VERSION,
-    )
+    from flask import Flask, Markup, Request  # type: ignore
+    from flask import __version__ as FLASK_VERSION
+    from flask import request as flask_request
     from flask.signals import (
-        appcontext_pushed,
-        appcontext_tearing_down,
+        before_render_template,
         got_request_exception,
         request_started,
     )
 except ImportError:
     raise DidNotEnable("Flask is not installed")
 
+try:
+    import blinker  # noqa
+except ImportError:
+    raise DidNotEnable("blinker is not installed")
 
 TRANSACTION_STYLE_VALUES = ("endpoint", "url")
 
@@ -52,7 +48,7 @@
 class FlaskIntegration(Integration):
     identifier = "flask"
 
-    transaction_style = None
+    transaction_style = ""
 
     def __init__(self, transaction_style="endpoint"):
         # type: (str) -> None
@@ -66,16 +62,19 @@ def __init__(self, transaction_style="endpoint"):
     @staticmethod
     def setup_once():
         # type: () -> None
+
+        # This version parsing is absolutely naive but the alternative is to
+        # import pkg_resources which slows down the SDK a lot.
         try:
             version = tuple(map(int, FLASK_VERSION.split(".")[:3]))
         except (ValueError, TypeError):
-            raise DidNotEnable("Unparseable Flask version: {}".format(FLASK_VERSION))
-
-        if version < (0, 11):
-            raise DidNotEnable("Flask 0.11 or newer is required.")
+            # It's probably a release candidate, we assume it's fine.
+            pass
+        else:
+            if version < (0, 10):
+                raise DidNotEnable("Flask 0.10 or newer is required.")
 
-        appcontext_pushed.connect(_push_appctx)
-        appcontext_tearing_down.connect(_pop_appctx)
+        before_render_template.connect(_add_sentry_trace)
         request_started.connect(_request_started)
         got_request_exception.connect(_capture_exception)
 
@@ -90,53 +89,57 @@ def sentry_patched_wsgi_app(self, environ, start_response):
                 environ, start_response
             )
 
-        Flask.__call__ = sentry_patched_wsgi_app  # type: ignore
+        Flask.__call__ = sentry_patched_wsgi_app
 
 
-def _push_appctx(*args, **kwargs):
-    # type: (*Flask, **Any) -> None
-    hub = Hub.current
-    if hub.get_integration(FlaskIntegration) is not None:
-        # always want to push scope regardless of whether WSGI app might already
-        # have (not the case for CLI for example)
-        scope_manager = hub.push_scope()
-        scope_manager.__enter__()
-        _app_ctx_stack.top.sentry_sdk_scope_manager = scope_manager
-        with hub.configure_scope() as scope:
-            scope._name = "flask"
+def _add_sentry_trace(sender, template, context, **extra):
+    # type: (Flask, Any, Dict[str, Any], **Any) -> None
 
+    if "sentry_trace" in context:
+        return
 
-def _pop_appctx(*args, **kwargs):
-    # type: (*Flask, **Any) -> None
-    scope_manager = getattr(_app_ctx_stack.top, "sentry_sdk_scope_manager", None)
-    if scope_manager is not None:
-        scope_manager.__exit__(None, None, None)
+    sentry_span = Hub.current.scope.span
+    context["sentry_trace"] = (
+        Markup(
+            ''
+            % (
+                SENTRY_TRACE_HEADER_NAME,
+                sentry_span.to_traceparent(),
+            )
+        )
+        if sentry_span
+        else ""
+    )
+
+
+def _set_transaction_name_and_source(scope, transaction_style, request):
+    # type: (Scope, str, Request) -> None
+    try:
+        name_for_style = {
+            "url": request.url_rule.rule,
+            "endpoint": request.url_rule.endpoint,
+        }
+        scope.set_transaction_name(
+            name_for_style[transaction_style],
+            source=SOURCE_FOR_STYLE[transaction_style],
+        )
+    except Exception:
+        pass
 
 
-def _request_started(sender, **kwargs):
+def _request_started(app, **kwargs):
     # type: (Flask, **Any) -> None
     hub = Hub.current
     integration = hub.get_integration(FlaskIntegration)
     if integration is None:
         return
 
-    app = _app_ctx_stack.top.app
     with hub.configure_scope() as scope:
-        request = _request_ctx_stack.top.request
-
-        # Rely on WSGI middleware to start a trace
-        try:
-            if integration.transaction_style == "endpoint":
-                scope.transaction = request.url_rule.endpoint
-            elif integration.transaction_style == "url":
-                scope.transaction = request.url_rule.rule
-        except Exception:
-            pass
-
-        weak_request = weakref.ref(request)
-        evt_processor = _make_request_event_processor(
-            app, weak_request, integration  # type: ignore
-        )
+        # Set the transaction name and source here,
+        # but rely on WSGI middleware to actually start the transaction
+        request = flask_request._get_current_object()
+        _set_transaction_name_and_source(scope, integration.transaction_style, request)
+        evt_processor = _make_request_event_processor(app, request, integration)
         scope.add_event_processor(evt_processor)
 
 
@@ -146,8 +149,11 @@ def env(self):
         return self.request.environ
 
     def cookies(self):
-        # type: () -> ImmutableTypeConversionDict[Any, Any]
-        return self.request.cookies
+        # type: () -> Dict[Any, Any]
+        return {
+            k: v[0] if isinstance(v, list) and len(v) == 1 else v
+            for k, v in self.request.cookies.items()
+        }
 
     def raw_data(self):
         # type: () -> bytes
@@ -174,11 +180,11 @@ def size_of_file(self, file):
         return file.content_length
 
 
-def _make_request_event_processor(app, weak_request, integration):
+def _make_request_event_processor(app, request, integration):
     # type: (Flask, Callable[[], Request], FlaskIntegration) -> EventProcessor
+
     def inner(event, hint):
         # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
-        request = weak_request()
 
         # if the request is gone we are fine not logging the data from
         # it.  This might happen if the processor is pushed away to
diff --git a/sentry_sdk/integrations/gcp.py b/sentry_sdk/integrations/gcp.py
new file mode 100644
index 0000000000..a69637a409
--- /dev/null
+++ b/sentry_sdk/integrations/gcp.py
@@ -0,0 +1,229 @@
+from datetime import datetime, timedelta
+from os import environ
+import sys
+from sentry_sdk.consts import OP
+
+from sentry_sdk.hub import Hub, _should_send_default_pii
+from sentry_sdk.tracing import TRANSACTION_SOURCE_COMPONENT, Transaction
+from sentry_sdk._compat import reraise
+from sentry_sdk.utils import (
+    AnnotatedValue,
+    capture_internal_exceptions,
+    event_from_exception,
+    logger,
+    TimeoutThread,
+)
+from sentry_sdk.integrations import Integration
+from sentry_sdk.integrations._wsgi_common import _filter_headers
+
+from sentry_sdk._types import MYPY
+
+# Constants
+TIMEOUT_WARNING_BUFFER = 1.5  # Buffer time required to send timeout warning to Sentry
+MILLIS_TO_SECONDS = 1000.0
+
+if MYPY:
+    from typing import Any
+    from typing import TypeVar
+    from typing import Callable
+    from typing import Optional
+
+    from sentry_sdk._types import EventProcessor, Event, Hint
+
+    F = TypeVar("F", bound=Callable[..., Any])
+
+
+def _wrap_func(func):
+    # type: (F) -> F
+    def sentry_func(functionhandler, gcp_event, *args, **kwargs):
+        # type: (Any, Any, *Any, **Any) -> Any
+
+        hub = Hub.current
+        integration = hub.get_integration(GcpIntegration)
+        if integration is None:
+            return func(functionhandler, gcp_event, *args, **kwargs)
+
+        # If an integration is there, a client has to be there.
+        client = hub.client  # type: Any
+
+        configured_time = environ.get("FUNCTION_TIMEOUT_SEC")
+        if not configured_time:
+            logger.debug(
+                "The configured timeout could not be fetched from Cloud Functions configuration."
+            )
+            return func(functionhandler, gcp_event, *args, **kwargs)
+
+        configured_time = int(configured_time)
+
+        initial_time = datetime.utcnow()
+
+        with hub.push_scope() as scope:
+            with capture_internal_exceptions():
+                scope.clear_breadcrumbs()
+                scope.add_event_processor(
+                    _make_request_event_processor(
+                        gcp_event, configured_time, initial_time
+                    )
+                )
+                scope.set_tag("gcp_region", environ.get("FUNCTION_REGION"))
+                timeout_thread = None
+                if (
+                    integration.timeout_warning
+                    and configured_time > TIMEOUT_WARNING_BUFFER
+                ):
+                    waiting_time = configured_time - TIMEOUT_WARNING_BUFFER
+
+                    timeout_thread = TimeoutThread(waiting_time, configured_time)
+
+                    # Starting the thread to raise timeout warning exception
+                    timeout_thread.start()
+
+            headers = {}
+            if hasattr(gcp_event, "headers"):
+                headers = gcp_event.headers
+            transaction = Transaction.continue_from_headers(
+                headers,
+                op=OP.FUNCTION_GCP,
+                name=environ.get("FUNCTION_NAME", ""),
+                source=TRANSACTION_SOURCE_COMPONENT,
+            )
+            sampling_context = {
+                "gcp_env": {
+                    "function_name": environ.get("FUNCTION_NAME"),
+                    "function_entry_point": environ.get("ENTRY_POINT"),
+                    "function_identity": environ.get("FUNCTION_IDENTITY"),
+                    "function_region": environ.get("FUNCTION_REGION"),
+                    "function_project": environ.get("GCP_PROJECT"),
+                },
+                "gcp_event": gcp_event,
+            }
+            with hub.start_transaction(
+                transaction, custom_sampling_context=sampling_context
+            ):
+                try:
+                    return func(functionhandler, gcp_event, *args, **kwargs)
+                except Exception:
+                    exc_info = sys.exc_info()
+                    sentry_event, hint = event_from_exception(
+                        exc_info,
+                        client_options=client.options,
+                        mechanism={"type": "gcp", "handled": False},
+                    )
+                    hub.capture_event(sentry_event, hint=hint)
+                    reraise(*exc_info)
+                finally:
+                    if timeout_thread:
+                        timeout_thread.stop()
+                    # Flush out the event queue
+                    hub.flush()
+
+    return sentry_func  # type: ignore
+
+
+class GcpIntegration(Integration):
+    identifier = "gcp"
+
+    def __init__(self, timeout_warning=False):
+        # type: (bool) -> None
+        self.timeout_warning = timeout_warning
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        import __main__ as gcp_functions
+
+        if not hasattr(gcp_functions, "worker_v1"):
+            logger.warning(
+                "GcpIntegration currently supports only Python 3.7 runtime environment."
+            )
+            return
+
+        worker1 = gcp_functions.worker_v1
+
+        worker1.FunctionHandler.invoke_user_function = _wrap_func(
+            worker1.FunctionHandler.invoke_user_function
+        )
+
+
+def _make_request_event_processor(gcp_event, configured_timeout, initial_time):
+    # type: (Any, Any, Any) -> EventProcessor
+
+    def event_processor(event, hint):
+        # type: (Event, Hint) -> Optional[Event]
+
+        final_time = datetime.utcnow()
+        time_diff = final_time - initial_time
+
+        execution_duration_in_millis = time_diff.microseconds / MILLIS_TO_SECONDS
+
+        extra = event.setdefault("extra", {})
+        extra["google cloud functions"] = {
+            "function_name": environ.get("FUNCTION_NAME"),
+            "function_entry_point": environ.get("ENTRY_POINT"),
+            "function_identity": environ.get("FUNCTION_IDENTITY"),
+            "function_region": environ.get("FUNCTION_REGION"),
+            "function_project": environ.get("GCP_PROJECT"),
+            "execution_duration_in_millis": execution_duration_in_millis,
+            "configured_timeout_in_seconds": configured_timeout,
+        }
+
+        extra["google cloud logs"] = {
+            "url": _get_google_cloud_logs_url(final_time),
+        }
+
+        request = event.get("request", {})
+
+        request["url"] = "gcp:///{}".format(environ.get("FUNCTION_NAME"))
+
+        if hasattr(gcp_event, "method"):
+            request["method"] = gcp_event.method
+
+        if hasattr(gcp_event, "query_string"):
+            request["query_string"] = gcp_event.query_string.decode("utf-8")
+
+        if hasattr(gcp_event, "headers"):
+            request["headers"] = _filter_headers(gcp_event.headers)
+
+        if _should_send_default_pii():
+            if hasattr(gcp_event, "data"):
+                request["data"] = gcp_event.data
+        else:
+            if hasattr(gcp_event, "data"):
+                # Unfortunately couldn't find a way to get structured body from GCP
+                # event. Meaning every body is unstructured to us.
+                request["data"] = AnnotatedValue.removed_because_raw_data()
+
+        event["request"] = request
+
+        return event
+
+    return event_processor
+
+
+def _get_google_cloud_logs_url(final_time):
+    # type: (datetime) -> str
+    """
+    Generates a Google Cloud Logs console URL based on the environment variables
+    Arguments:
+        final_time {datetime} -- Final time
+    Returns:
+        str -- Google Cloud Logs Console URL to logs.
+    """
+    hour_ago = final_time - timedelta(hours=1)
+    formatstring = "%Y-%m-%dT%H:%M:%SZ"
+
+    url = (
+        "https://console.cloud.google.com/logs/viewer?project={project}&resource=cloud_function"
+        "%2Ffunction_name%2F{function_name}%2Fregion%2F{region}&minLogLevel=0&expandAll=false"
+        "×tamp={timestamp_end}&customFacets=&limitCustomFacetWidth=true"
+        "&dateRangeStart={timestamp_start}&dateRangeEnd={timestamp_end}"
+        "&interval=PT1H&scrollTimestamp={timestamp_end}"
+    ).format(
+        project=environ.get("GCP_PROJECT"),
+        function_name=environ.get("FUNCTION_NAME"),
+        region=environ.get("FUNCTION_REGION"),
+        timestamp_end=final_time.strftime(formatstring),
+        timestamp_start=hour_ago.strftime(formatstring),
+    )
+
+    return url
diff --git a/sentry_sdk/integrations/httpx.py b/sentry_sdk/integrations/httpx.py
new file mode 100644
index 0000000000..2e9142d2b8
--- /dev/null
+++ b/sentry_sdk/integrations/httpx.py
@@ -0,0 +1,95 @@
+from sentry_sdk import Hub
+from sentry_sdk.consts import OP
+from sentry_sdk.integrations import Integration, DidNotEnable
+from sentry_sdk.utils import logger
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+    from typing import Any
+
+
+try:
+    from httpx import AsyncClient, Client, Request, Response  # type: ignore
+except ImportError:
+    raise DidNotEnable("httpx is not installed")
+
+__all__ = ["HttpxIntegration"]
+
+
+class HttpxIntegration(Integration):
+    identifier = "httpx"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        """
+        httpx has its own transport layer and can be customized when needed,
+        so patch Client.send and AsyncClient.send to support both synchronous and async interfaces.
+        """
+        _install_httpx_client()
+        _install_httpx_async_client()
+
+
+def _install_httpx_client():
+    # type: () -> None
+    real_send = Client.send
+
+    def send(self, request, **kwargs):
+        # type: (Client, Request, **Any) -> Response
+        hub = Hub.current
+        if hub.get_integration(HttpxIntegration) is None:
+            return real_send(self, request, **kwargs)
+
+        with hub.start_span(
+            op=OP.HTTP_CLIENT, description="%s %s" % (request.method, request.url)
+        ) as span:
+            span.set_data("method", request.method)
+            span.set_data("url", str(request.url))
+            for key, value in hub.iter_trace_propagation_headers():
+                logger.debug(
+                    "[Tracing] Adding `{key}` header {value} to outgoing request to {url}.".format(
+                        key=key, value=value, url=request.url
+                    )
+                )
+                request.headers[key] = value
+            rv = real_send(self, request, **kwargs)
+
+            span.set_data("status_code", rv.status_code)
+            span.set_http_status(rv.status_code)
+            span.set_data("reason", rv.reason_phrase)
+            return rv
+
+    Client.send = send
+
+
+def _install_httpx_async_client():
+    # type: () -> None
+    real_send = AsyncClient.send
+
+    async def send(self, request, **kwargs):
+        # type: (AsyncClient, Request, **Any) -> Response
+        hub = Hub.current
+        if hub.get_integration(HttpxIntegration) is None:
+            return await real_send(self, request, **kwargs)
+
+        with hub.start_span(
+            op=OP.HTTP_CLIENT, description="%s %s" % (request.method, request.url)
+        ) as span:
+            span.set_data("method", request.method)
+            span.set_data("url", str(request.url))
+            for key, value in hub.iter_trace_propagation_headers():
+                logger.debug(
+                    "[Tracing] Adding `{key}` header {value} to outgoing request to {url}.".format(
+                        key=key, value=value, url=request.url
+                    )
+                )
+                request.headers[key] = value
+            rv = await real_send(self, request, **kwargs)
+
+            span.set_data("status_code", rv.status_code)
+            span.set_http_status(rv.status_code)
+            span.set_data("reason", rv.reason_phrase)
+            return rv
+
+    AsyncClient.send = send
diff --git a/sentry_sdk/integrations/logging.py b/sentry_sdk/integrations/logging.py
index c25aef4c09..86cea09bd8 100644
--- a/sentry_sdk/integrations/logging.py
+++ b/sentry_sdk/integrations/logging.py
@@ -2,6 +2,7 @@
 
 import logging
 import datetime
+from fnmatch import fnmatch
 
 from sentry_sdk.hub import Hub
 from sentry_sdk.utils import (
@@ -23,6 +24,16 @@
 
 DEFAULT_LEVEL = logging.INFO
 DEFAULT_EVENT_LEVEL = logging.ERROR
+LOGGING_TO_EVENT_LEVEL = {
+    logging.NOTSET: "notset",
+    logging.DEBUG: "debug",
+    logging.INFO: "info",
+    logging.WARN: "warning",  # WARN is same a WARNING
+    logging.WARNING: "warning",
+    logging.ERROR: "error",
+    logging.FATAL: "fatal",
+    logging.CRITICAL: "fatal",  # CRITICAL is same as FATAL
+}
 
 # Capturing events from those loggers causes recursion errors. We cannot allow
 # the user to unconditionally create events from those loggers under any
@@ -30,7 +41,9 @@
 #
 # Note: Ignoring by logger name here is better than mucking with thread-locals.
 # We do not necessarily know whether thread-locals work 100% correctly in the user's environment.
-_IGNORED_LOGGERS = set(["sentry_sdk.errors", "urllib3.connectionpool"])
+_IGNORED_LOGGERS = set(
+    ["sentry_sdk.errors", "urllib3.connectionpool", "urllib3.connection"]
+)
 
 
 def ignore_logger(
@@ -75,7 +88,7 @@ def _handle_record(self, record):
     @staticmethod
     def setup_once():
         # type: () -> None
-        old_callhandlers = logging.Logger.callHandlers  # type: ignore
+        old_callhandlers = logging.Logger.callHandlers
 
         def sentry_patched_callhandlers(self, record):
             # type: (Any, LogRecord) -> Any
@@ -96,14 +109,18 @@ def sentry_patched_callhandlers(self, record):
 
 def _can_record(record):
     # type: (LogRecord) -> bool
-    return record.name not in _IGNORED_LOGGERS
+    """Prevents ignored loggers from recording"""
+    for logger in _IGNORED_LOGGERS:
+        if fnmatch(record.name, logger):
+            return False
+    return True
 
 
 def _breadcrumb_from_record(record):
     # type: (LogRecord) -> Dict[str, Any]
     return {
-        "ty": "log",
-        "level": _logging_to_event_level(record.levelname),
+        "type": "log",
+        "level": _logging_to_event_level(record),
         "category": record.name,
         "message": record.message,
         "timestamp": datetime.datetime.utcfromtimestamp(record.created),
@@ -111,9 +128,11 @@ def _breadcrumb_from_record(record):
     }
 
 
-def _logging_to_event_level(levelname):
-    # type: (str) -> str
-    return {"critical": "fatal"}.get(levelname.lower(), levelname.lower())
+def _logging_to_event_level(record):
+    # type: (LogRecord) -> str
+    return LOGGING_TO_EVENT_LEVEL.get(
+        record.levelno, record.levelname.lower() if record.levelname else ""
+    )
 
 
 COMMON_RECORD_ATTRS = frozenset(
@@ -181,7 +200,12 @@ def _emit(self, record):
         client_options = hub.client.options
 
         # exc_info might be None or (None, None, None)
-        if record.exc_info is not None and record.exc_info[0] is not None:
+        #
+        # exc_info may also be any falsy value due to Python stdlib being
+        # liberal with what it receives and Celery's billiard being "liberal"
+        # with what it sends. See
+        # https://github.com/getsentry/sentry-python/issues/904
+        if record.exc_info and record.exc_info[0] is not None:
             event, hint = event_from_exception(
                 record.exc_info,
                 client_options=client_options,
@@ -208,9 +232,29 @@ def _emit(self, record):
 
         hint["log_record"] = record
 
-        event["level"] = _logging_to_event_level(record.levelname)
+        event["level"] = _logging_to_event_level(record)
         event["logger"] = record.name
-        event["logentry"] = {"message": to_string(record.msg), "params": record.args}
+
+        # Log records from `warnings` module as separate issues
+        record_caputured_from_warnings_module = (
+            record.name == "py.warnings" and record.msg == "%s"
+        )
+        if record_caputured_from_warnings_module:
+            # use the actual message and not "%s" as the message
+            # this prevents grouping all warnings under one "%s" issue
+            msg = record.args[0]  # type: ignore
+
+            event["logentry"] = {
+                "message": msg,
+                "params": (),
+            }
+
+        else:
+            event["logentry"] = {
+                "message": to_string(record.msg),
+                "params": record.args,
+            }
+
         event["extra"] = _extra_from_record(record)
 
         hub.capture_event(event, hint=hint)
diff --git a/sentry_sdk/integrations/opentelemetry/__init__.py b/sentry_sdk/integrations/opentelemetry/__init__.py
new file mode 100644
index 0000000000..e0020204d5
--- /dev/null
+++ b/sentry_sdk/integrations/opentelemetry/__init__.py
@@ -0,0 +1,7 @@
+from sentry_sdk.integrations.opentelemetry.span_processor import (  # noqa: F401
+    SentrySpanProcessor,
+)
+
+from sentry_sdk.integrations.opentelemetry.propagator import (  # noqa: F401
+    SentryPropagator,
+)
diff --git a/sentry_sdk/integrations/opentelemetry/consts.py b/sentry_sdk/integrations/opentelemetry/consts.py
new file mode 100644
index 0000000000..79663dd670
--- /dev/null
+++ b/sentry_sdk/integrations/opentelemetry/consts.py
@@ -0,0 +1,6 @@
+from opentelemetry.context import (  # type: ignore
+    create_key,
+)
+
+SENTRY_TRACE_KEY = create_key("sentry-trace")
+SENTRY_BAGGAGE_KEY = create_key("sentry-baggage")
diff --git a/sentry_sdk/integrations/opentelemetry/propagator.py b/sentry_sdk/integrations/opentelemetry/propagator.py
new file mode 100644
index 0000000000..7b2a88e347
--- /dev/null
+++ b/sentry_sdk/integrations/opentelemetry/propagator.py
@@ -0,0 +1,113 @@
+from opentelemetry import trace  # type: ignore
+from opentelemetry.context import (  # type: ignore
+    Context,
+    get_current,
+    set_value,
+)
+from opentelemetry.propagators.textmap import (  # type: ignore
+    CarrierT,
+    Getter,
+    Setter,
+    TextMapPropagator,
+    default_getter,
+    default_setter,
+)
+from opentelemetry.trace import (  # type: ignore
+    TraceFlags,
+    NonRecordingSpan,
+    SpanContext,
+)
+from sentry_sdk.integrations.opentelemetry.consts import (
+    SENTRY_BAGGAGE_KEY,
+    SENTRY_TRACE_KEY,
+)
+from sentry_sdk.integrations.opentelemetry.span_processor import (
+    SentrySpanProcessor,
+)
+
+from sentry_sdk.tracing import (
+    BAGGAGE_HEADER_NAME,
+    SENTRY_TRACE_HEADER_NAME,
+)
+from sentry_sdk.tracing_utils import Baggage, extract_sentrytrace_data
+from sentry_sdk._types import MYPY
+
+if MYPY:
+    from typing import Optional
+    from typing import Set
+
+
+class SentryPropagator(TextMapPropagator):  # type: ignore
+    """
+    Propagates tracing headers for Sentry's tracing system in a way OTel understands.
+    """
+
+    def extract(self, carrier, context=None, getter=default_getter):
+        # type: (CarrierT, Optional[Context], Getter) -> Context
+        if context is None:
+            context = get_current()
+
+        sentry_trace = getter.get(carrier, SENTRY_TRACE_HEADER_NAME)
+        if not sentry_trace:
+            return context
+
+        sentrytrace = extract_sentrytrace_data(sentry_trace[0])
+        if not sentrytrace:
+            return context
+
+        context = set_value(SENTRY_TRACE_KEY, sentrytrace, context)
+
+        trace_id, span_id = sentrytrace["trace_id"], sentrytrace["parent_span_id"]
+
+        span_context = SpanContext(
+            trace_id=int(trace_id, 16),  # type: ignore
+            span_id=int(span_id, 16),  # type: ignore
+            # we simulate a sampled trace on the otel side and leave the sampling to sentry
+            trace_flags=TraceFlags(TraceFlags.SAMPLED),
+            is_remote=True,
+        )
+
+        baggage_header = getter.get(carrier, BAGGAGE_HEADER_NAME)
+
+        if baggage_header:
+            baggage = Baggage.from_incoming_header(baggage_header[0])
+        else:
+            # If there's an incoming sentry-trace but no incoming baggage header,
+            # for instance in traces coming from older SDKs,
+            # baggage will be empty and frozen and won't be populated as head SDK.
+            baggage = Baggage(sentry_items={})
+
+        baggage.freeze()
+        context = set_value(SENTRY_BAGGAGE_KEY, baggage, context)
+
+        span = NonRecordingSpan(span_context)
+        modified_context = trace.set_span_in_context(span, context)
+        return modified_context
+
+    def inject(self, carrier, context=None, setter=default_setter):
+        # type: (CarrierT, Optional[Context], Setter) -> None
+        if context is None:
+            context = get_current()
+
+        current_span = trace.get_current_span(context)
+
+        if not current_span.context.is_valid:
+            return
+
+        span_id = trace.format_span_id(current_span.context.span_id)
+
+        span_map = SentrySpanProcessor().otel_span_map
+        sentry_span = span_map.get(span_id, None)
+        if not sentry_span:
+            return
+
+        setter.set(carrier, SENTRY_TRACE_HEADER_NAME, sentry_span.to_traceparent())
+
+        baggage = sentry_span.containing_transaction.get_baggage()
+        if baggage:
+            setter.set(carrier, BAGGAGE_HEADER_NAME, baggage.serialize())
+
+    @property
+    def fields(self):
+        # type: () -> Set[str]
+        return {SENTRY_TRACE_HEADER_NAME, BAGGAGE_HEADER_NAME}
diff --git a/sentry_sdk/integrations/opentelemetry/span_processor.py b/sentry_sdk/integrations/opentelemetry/span_processor.py
new file mode 100644
index 0000000000..0017708a97
--- /dev/null
+++ b/sentry_sdk/integrations/opentelemetry/span_processor.py
@@ -0,0 +1,291 @@
+from datetime import datetime
+
+from opentelemetry.context import get_value  # type: ignore
+from opentelemetry.sdk.trace import SpanProcessor  # type: ignore
+from opentelemetry.semconv.trace import SpanAttributes  # type: ignore
+from opentelemetry.trace import (  # type: ignore
+    format_span_id,
+    format_trace_id,
+    get_current_span,
+    SpanContext,
+    Span as OTelSpan,
+    SpanKind,
+)
+from opentelemetry.trace.span import (  # type: ignore
+    INVALID_SPAN_ID,
+    INVALID_TRACE_ID,
+)
+from sentry_sdk.consts import INSTRUMENTER
+from sentry_sdk.hub import Hub
+from sentry_sdk.integrations.opentelemetry.consts import (
+    SENTRY_BAGGAGE_KEY,
+    SENTRY_TRACE_KEY,
+)
+from sentry_sdk.scope import add_global_event_processor
+from sentry_sdk.tracing import Transaction, Span as SentrySpan
+from sentry_sdk.utils import Dsn
+from sentry_sdk._types import MYPY
+
+from urllib3.util import parse_url as urlparse  # type: ignore
+
+if MYPY:
+    from typing import Any
+    from typing import Dict
+    from typing import Union
+    from sentry_sdk._types import Event, Hint
+
+OPEN_TELEMETRY_CONTEXT = "otel"
+
+
+def link_trace_context_to_error_event(event, otel_span_map):
+    # type: (Event, Dict[str, Union[Transaction, OTelSpan]]) -> Event
+    hub = Hub.current
+    if not hub:
+        return event
+
+    if hub.client and hub.client.options["instrumenter"] != INSTRUMENTER.OTEL:
+        return event
+
+    if hasattr(event, "type") and event["type"] == "transaction":
+        return event
+
+    otel_span = get_current_span()
+    if not otel_span:
+        return event
+
+    ctx = otel_span.get_span_context()
+    trace_id = format_trace_id(ctx.trace_id)
+    span_id = format_span_id(ctx.span_id)
+
+    if trace_id == INVALID_TRACE_ID or span_id == INVALID_SPAN_ID:
+        return event
+
+    sentry_span = otel_span_map.get(span_id, None)
+    if not sentry_span:
+        return event
+
+    contexts = event.setdefault("contexts", {})
+    contexts.setdefault("trace", {}).update(sentry_span.get_trace_context())
+
+    return event
+
+
+class SentrySpanProcessor(SpanProcessor):  # type: ignore
+    """
+    Converts OTel spans into Sentry spans so they can be sent to the Sentry backend.
+    """
+
+    # The mapping from otel span ids to sentry spans
+    otel_span_map = {}  # type: Dict[str, Union[Transaction, OTelSpan]]
+
+    def __new__(cls):
+        # type: () -> SentrySpanProcessor
+        if not hasattr(cls, "instance"):
+            cls.instance = super(SentrySpanProcessor, cls).__new__(cls)
+
+        return cls.instance
+
+    def __init__(self):
+        # type: () -> None
+        @add_global_event_processor
+        def global_event_processor(event, hint):
+            # type: (Event, Hint) -> Event
+            return link_trace_context_to_error_event(event, self.otel_span_map)
+
+    def on_start(self, otel_span, parent_context=None):
+        # type: (OTelSpan, SpanContext) -> None
+        hub = Hub.current
+        if not hub:
+            return
+
+        if not hub.client or (hub.client and not hub.client.dsn):
+            return
+
+        try:
+            _ = Dsn(hub.client.dsn or "")
+        except Exception:
+            return
+
+        if hub.client and hub.client.options["instrumenter"] != INSTRUMENTER.OTEL:
+            return
+
+        if not otel_span.context.is_valid:
+            return
+
+        if self._is_sentry_span(hub, otel_span):
+            return
+
+        trace_data = self._get_trace_data(otel_span, parent_context)
+
+        parent_span_id = trace_data["parent_span_id"]
+        sentry_parent_span = (
+            self.otel_span_map.get(parent_span_id, None) if parent_span_id else None
+        )
+
+        sentry_span = None
+        if sentry_parent_span:
+            sentry_span = sentry_parent_span.start_child(
+                span_id=trace_data["span_id"],
+                description=otel_span.name,
+                start_timestamp=datetime.fromtimestamp(otel_span.start_time / 1e9),
+                instrumenter=INSTRUMENTER.OTEL,
+            )
+        else:
+            sentry_span = hub.start_transaction(
+                name=otel_span.name,
+                span_id=trace_data["span_id"],
+                parent_span_id=parent_span_id,
+                trace_id=trace_data["trace_id"],
+                baggage=trace_data["baggage"],
+                start_timestamp=datetime.fromtimestamp(otel_span.start_time / 1e9),
+                instrumenter=INSTRUMENTER.OTEL,
+            )
+
+        self.otel_span_map[trace_data["span_id"]] = sentry_span
+
+    def on_end(self, otel_span):
+        # type: (OTelSpan) -> None
+        hub = Hub.current
+        if not hub:
+            return
+
+        if hub.client and hub.client.options["instrumenter"] != INSTRUMENTER.OTEL:
+            return
+
+        if not otel_span.context.is_valid:
+            return
+
+        span_id = format_span_id(otel_span.context.span_id)
+        sentry_span = self.otel_span_map.pop(span_id, None)
+        if not sentry_span:
+            return
+
+        sentry_span.op = otel_span.name
+
+        if isinstance(sentry_span, Transaction):
+            sentry_span.name = otel_span.name
+            sentry_span.set_context(
+                OPEN_TELEMETRY_CONTEXT, self._get_otel_context(otel_span)
+            )
+
+        else:
+            self._update_span_with_otel_data(sentry_span, otel_span)
+
+        sentry_span.finish(
+            end_timestamp=datetime.fromtimestamp(otel_span.end_time / 1e9)
+        )
+
+    def _is_sentry_span(self, hub, otel_span):
+        # type: (Hub, OTelSpan) -> bool
+        """
+        Break infinite loop:
+        HTTP requests to Sentry are caught by OTel and send again to Sentry.
+        """
+        otel_span_url = otel_span.attributes.get(SpanAttributes.HTTP_URL, None)
+        dsn_url = hub.client and Dsn(hub.client.dsn or "").netloc
+
+        if otel_span_url and dsn_url in otel_span_url:
+            return True
+
+        return False
+
+    def _get_otel_context(self, otel_span):
+        # type: (OTelSpan) -> Dict[str, Any]
+        """
+        Returns the OTel context for Sentry.
+        See: https://develop.sentry.dev/sdk/performance/opentelemetry/#step-5-add-opentelemetry-context
+        """
+        ctx = {}
+
+        if otel_span.attributes:
+            ctx["attributes"] = dict(otel_span.attributes)
+
+        if otel_span.resource.attributes:
+            ctx["resource"] = dict(otel_span.resource.attributes)
+
+        return ctx
+
+    def _get_trace_data(self, otel_span, parent_context):
+        # type: (OTelSpan, SpanContext) -> Dict[str, Any]
+        """
+        Extracts tracing information from one OTel span and its parent OTel context.
+        """
+        trace_data = {}
+
+        span_id = format_span_id(otel_span.context.span_id)
+        trace_data["span_id"] = span_id
+
+        trace_id = format_trace_id(otel_span.context.trace_id)
+        trace_data["trace_id"] = trace_id
+
+        parent_span_id = (
+            format_span_id(otel_span.parent.span_id) if otel_span.parent else None
+        )
+        trace_data["parent_span_id"] = parent_span_id
+
+        sentry_trace_data = get_value(SENTRY_TRACE_KEY, parent_context)
+        trace_data["parent_sampled"] = (
+            sentry_trace_data["parent_sampled"] if sentry_trace_data else None
+        )
+
+        baggage = get_value(SENTRY_BAGGAGE_KEY, parent_context)
+        trace_data["baggage"] = baggage
+
+        return trace_data
+
+    def _update_span_with_otel_data(self, sentry_span, otel_span):
+        # type: (SentrySpan, OTelSpan) -> None
+        """
+        Convert OTel span data and update the Sentry span with it.
+        This should eventually happen on the server when ingesting the spans.
+        """
+        for key, val in otel_span.attributes.items():
+            sentry_span.set_data(key, val)
+
+        sentry_span.set_data("otel.kind", otel_span.kind)
+
+        op = otel_span.name
+        description = otel_span.name
+
+        http_method = otel_span.attributes.get(SpanAttributes.HTTP_METHOD, None)
+        db_query = otel_span.attributes.get(SpanAttributes.DB_SYSTEM, None)
+
+        if http_method:
+            op = "http"
+
+            if otel_span.kind == SpanKind.SERVER:
+                op += ".server"
+            elif otel_span.kind == SpanKind.CLIENT:
+                op += ".client"
+
+            description = http_method
+
+            peer_name = otel_span.attributes.get(SpanAttributes.NET_PEER_NAME, None)
+            if peer_name:
+                description += " {}".format(peer_name)
+
+            target = otel_span.attributes.get(SpanAttributes.HTTP_TARGET, None)
+            if target:
+                description += " {}".format(target)
+
+            if not peer_name and not target:
+                url = otel_span.attributes.get(SpanAttributes.HTTP_URL, None)
+                if url:
+                    parsed_url = urlparse(url)
+                    url = f"{parsed_url.scheme}://{parsed_url.netloc}{parsed_url.path}"
+                    description += " {}".format(url)
+
+            status_code = otel_span.attributes.get(
+                SpanAttributes.HTTP_STATUS_CODE, None
+            )
+            if status_code:
+                sentry_span.set_http_status(status_code)
+
+        elif db_query:
+            op = "db"
+            statement = otel_span.attributes.get(SpanAttributes.DB_STATEMENT, None)
+            if statement:
+                description = statement
+
+        sentry_span.op = op
+        sentry_span.description = description
diff --git a/sentry_sdk/integrations/pure_eval.py b/sentry_sdk/integrations/pure_eval.py
new file mode 100644
index 0000000000..c804447796
--- /dev/null
+++ b/sentry_sdk/integrations/pure_eval.py
@@ -0,0 +1,138 @@
+from __future__ import absolute_import
+
+import ast
+
+from sentry_sdk import Hub, serializer
+from sentry_sdk._types import MYPY
+from sentry_sdk.integrations import Integration, DidNotEnable
+from sentry_sdk.scope import add_global_event_processor
+from sentry_sdk.utils import walk_exception_chain, iter_stacks
+
+if MYPY:
+    from typing import Optional, Dict, Any, Tuple, List
+    from types import FrameType
+
+    from sentry_sdk._types import Event, Hint
+
+try:
+    import executing
+except ImportError:
+    raise DidNotEnable("executing is not installed")
+
+try:
+    import pure_eval
+except ImportError:
+    raise DidNotEnable("pure_eval is not installed")
+
+try:
+    # Used implicitly, just testing it's available
+    import asttokens  # noqa
+except ImportError:
+    raise DidNotEnable("asttokens is not installed")
+
+
+class PureEvalIntegration(Integration):
+    identifier = "pure_eval"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+
+        @add_global_event_processor
+        def add_executing_info(event, hint):
+            # type: (Event, Optional[Hint]) -> Optional[Event]
+            if Hub.current.get_integration(PureEvalIntegration) is None:
+                return event
+
+            if hint is None:
+                return event
+
+            exc_info = hint.get("exc_info", None)
+
+            if exc_info is None:
+                return event
+
+            exception = event.get("exception", None)
+
+            if exception is None:
+                return event
+
+            values = exception.get("values", None)
+
+            if values is None:
+                return event
+
+            for exception, (_exc_type, _exc_value, exc_tb) in zip(
+                reversed(values), walk_exception_chain(exc_info)
+            ):
+                sentry_frames = [
+                    frame
+                    for frame in exception.get("stacktrace", {}).get("frames", [])
+                    if frame.get("function")
+                ]
+                tbs = list(iter_stacks(exc_tb))
+                if len(sentry_frames) != len(tbs):
+                    continue
+
+                for sentry_frame, tb in zip(sentry_frames, tbs):
+                    sentry_frame["vars"] = (
+                        pure_eval_frame(tb.tb_frame) or sentry_frame["vars"]
+                    )
+            return event
+
+
+def pure_eval_frame(frame):
+    # type: (FrameType) -> Dict[str, Any]
+    source = executing.Source.for_frame(frame)
+    if not source.tree:
+        return {}
+
+    statements = source.statements_at_line(frame.f_lineno)
+    if not statements:
+        return {}
+
+    scope = stmt = list(statements)[0]
+    while True:
+        # Get the parent first in case the original statement is already
+        # a function definition, e.g. if we're calling a decorator
+        # In that case we still want the surrounding scope, not that function
+        scope = scope.parent
+        if isinstance(scope, (ast.FunctionDef, ast.ClassDef, ast.Module)):
+            break
+
+    evaluator = pure_eval.Evaluator.from_frame(frame)
+    expressions = evaluator.interesting_expressions_grouped(scope)
+
+    def closeness(expression):
+        # type: (Tuple[List[Any], Any]) -> Tuple[int, int]
+        # Prioritise expressions with a node closer to the statement executed
+        # without being after that statement
+        # A higher return value is better - the expression will appear
+        # earlier in the list of values and is less likely to be trimmed
+        nodes, _value = expression
+
+        def start(n):
+            # type: (ast.expr) -> Tuple[int, int]
+            return (n.lineno, n.col_offset)
+
+        nodes_before_stmt = [
+            node for node in nodes if start(node) < stmt.last_token.end  # type: ignore
+        ]
+        if nodes_before_stmt:
+            # The position of the last node before or in the statement
+            return max(start(node) for node in nodes_before_stmt)
+        else:
+            # The position of the first node after the statement
+            # Negative means it's always lower priority than nodes that come before
+            # Less negative means closer to the statement and higher priority
+            lineno, col_offset = min(start(node) for node in nodes)
+            return (-lineno, -col_offset)
+
+    # This adds the first_token and last_token attributes to nodes
+    atok = source.asttokens()
+
+    expressions.sort(key=closeness, reverse=True)
+    return {
+        atok.get_text(nodes[0]): value
+        for nodes, value in expressions[: serializer.MAX_DATABAG_BREADTH]
+    }
diff --git a/sentry_sdk/integrations/pymongo.py b/sentry_sdk/integrations/pymongo.py
new file mode 100644
index 0000000000..ca4669ec9e
--- /dev/null
+++ b/sentry_sdk/integrations/pymongo.py
@@ -0,0 +1,183 @@
+from __future__ import absolute_import
+import copy
+
+from sentry_sdk import Hub
+from sentry_sdk.hub import _should_send_default_pii
+from sentry_sdk.integrations import DidNotEnable, Integration
+from sentry_sdk.tracing import Span
+from sentry_sdk.utils import capture_internal_exceptions
+
+from sentry_sdk._types import MYPY
+
+try:
+    from pymongo import monitoring
+except ImportError:
+    raise DidNotEnable("Pymongo not installed")
+
+if MYPY:
+    from typing import Any, Dict, Union
+
+    from pymongo.monitoring import (
+        CommandFailedEvent,
+        CommandStartedEvent,
+        CommandSucceededEvent,
+    )
+
+
+SAFE_COMMAND_ATTRIBUTES = [
+    "insert",
+    "ordered",
+    "find",
+    "limit",
+    "singleBatch",
+    "aggregate",
+    "createIndexes",
+    "indexes",
+    "delete",
+    "findAndModify",
+    "renameCollection",
+    "to",
+    "drop",
+]
+
+
+def _strip_pii(command):
+    # type: (Dict[str, Any]) -> Dict[str, Any]
+    for key in command:
+        is_safe_field = key in SAFE_COMMAND_ATTRIBUTES
+        if is_safe_field:
+            # Skip if safe key
+            continue
+
+        update_db_command = key == "update" and "findAndModify" not in command
+        if update_db_command:
+            # Also skip "update" db command because it is save.
+            # There is also an "update" key in the "findAndModify" command, which is NOT safe!
+            continue
+
+        # Special stripping for documents
+        is_document = key == "documents"
+        if is_document:
+            for doc in command[key]:
+                for doc_key in doc:
+                    doc[doc_key] = "%s"
+            continue
+
+        # Special stripping for dict style fields
+        is_dict_field = key in ["filter", "query", "update"]
+        if is_dict_field:
+            for item_key in command[key]:
+                command[key][item_key] = "%s"
+            continue
+
+        # For pipeline fields strip the `$match` dict
+        is_pipeline_field = key == "pipeline"
+        if is_pipeline_field:
+            for pipeline in command[key]:
+                for match_key in pipeline["$match"] if "$match" in pipeline else []:
+                    pipeline["$match"][match_key] = "%s"
+            continue
+
+        # Default stripping
+        command[key] = "%s"
+
+    return command
+
+
+class CommandTracer(monitoring.CommandListener):
+    def __init__(self):
+        # type: () -> None
+        self._ongoing_operations = {}  # type: Dict[int, Span]
+
+    def _operation_key(self, event):
+        # type: (Union[CommandFailedEvent, CommandStartedEvent, CommandSucceededEvent]) -> int
+        return event.request_id
+
+    def started(self, event):
+        # type: (CommandStartedEvent) -> None
+        hub = Hub.current
+        if hub.get_integration(PyMongoIntegration) is None:
+            return
+        with capture_internal_exceptions():
+            command = dict(copy.deepcopy(event.command))
+
+            command.pop("$db", None)
+            command.pop("$clusterTime", None)
+            command.pop("$signature", None)
+
+            op = "db.query"
+
+            tags = {
+                "db.name": event.database_name,
+                "db.system": "mongodb",
+                "db.operation": event.command_name,
+            }
+
+            try:
+                tags["net.peer.name"] = event.connection_id[0]
+                tags["net.peer.port"] = str(event.connection_id[1])
+            except TypeError:
+                pass
+
+            data = {"operation_ids": {}}  # type: Dict[str, Dict[str, Any]]
+
+            data["operation_ids"]["operation"] = event.operation_id
+            data["operation_ids"]["request"] = event.request_id
+
+            try:
+                lsid = command.pop("lsid")["id"]
+                data["operation_ids"]["session"] = str(lsid)
+            except KeyError:
+                pass
+
+            if not _should_send_default_pii():
+                command = _strip_pii(command)
+
+            query = "{} {}".format(event.command_name, command)
+            span = hub.start_span(op=op, description=query)
+
+            for tag, value in tags.items():
+                span.set_tag(tag, value)
+
+            for key, value in data.items():
+                span.set_data(key, value)
+
+            with capture_internal_exceptions():
+                hub.add_breadcrumb(message=query, category="query", type=op, data=tags)
+
+            self._ongoing_operations[self._operation_key(event)] = span.__enter__()
+
+    def failed(self, event):
+        # type: (CommandFailedEvent) -> None
+        hub = Hub.current
+        if hub.get_integration(PyMongoIntegration) is None:
+            return
+
+        try:
+            span = self._ongoing_operations.pop(self._operation_key(event))
+            span.set_status("internal_error")
+            span.__exit__(None, None, None)
+        except KeyError:
+            return
+
+    def succeeded(self, event):
+        # type: (CommandSucceededEvent) -> None
+        hub = Hub.current
+        if hub.get_integration(PyMongoIntegration) is None:
+            return
+
+        try:
+            span = self._ongoing_operations.pop(self._operation_key(event))
+            span.set_status("ok")
+            span.__exit__(None, None, None)
+        except KeyError:
+            pass
+
+
+class PyMongoIntegration(Integration):
+    identifier = "pymongo"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        monitoring.register(CommandTracer())
diff --git a/sentry_sdk/integrations/pyramid.py b/sentry_sdk/integrations/pyramid.py
index ee9682343a..1e234fcffd 100644
--- a/sentry_sdk/integrations/pyramid.py
+++ b/sentry_sdk/integrations/pyramid.py
@@ -4,17 +4,25 @@
 import sys
 import weakref
 
-from pyramid.httpexceptions import HTTPException
-from pyramid.request import Request
-
 from sentry_sdk.hub import Hub, _should_send_default_pii
-from sentry_sdk.utils import capture_internal_exceptions, event_from_exception
+from sentry_sdk.scope import Scope
+from sentry_sdk.tracing import SOURCE_FOR_STYLE
+from sentry_sdk.utils import (
+    capture_internal_exceptions,
+    event_from_exception,
+)
 from sentry_sdk._compat import reraise, iteritems
 
-from sentry_sdk.integrations import Integration
+from sentry_sdk.integrations import Integration, DidNotEnable
 from sentry_sdk.integrations._wsgi_common import RequestExtractor
 from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware
 
+try:
+    from pyramid.httpexceptions import HTTPException
+    from pyramid.request import Request
+except ImportError:
+    raise DidNotEnable("Pyramid not installed")
+
 from sentry_sdk._types import MYPY
 
 if MYPY:
@@ -37,7 +45,6 @@ def authenticated_userid(request):
         # type: (Request) -> Optional[Any]
         return request.authenticated_userid
 
-
 else:
     # bw-compat for pyramid < 1.5
     from pyramid.security import authenticated_userid  # type: ignore
@@ -49,7 +56,7 @@ def authenticated_userid(request):
 class PyramidIntegration(Integration):
     identifier = "pyramid"
 
-    transaction_style = None
+    transaction_style = ""
 
     def __init__(self, transaction_style="route_name"):
         # type: (str) -> None
@@ -63,24 +70,27 @@ def __init__(self, transaction_style="route_name"):
     @staticmethod
     def setup_once():
         # type: () -> None
-        from pyramid.router import Router
-        from pyramid.request import Request
+        from pyramid import router
 
-        old_handle_request = Router.handle_request
+        old_call_view = router._call_view
 
-        def sentry_patched_handle_request(self, request, *args, **kwargs):
+        def sentry_patched_call_view(registry, request, *args, **kwargs):
             # type: (Any, Request, *Any, **Any) -> Response
             hub = Hub.current
             integration = hub.get_integration(PyramidIntegration)
+
             if integration is not None:
                 with hub.configure_scope() as scope:
+                    _set_transaction_name_and_source(
+                        scope, integration.transaction_style, request
+                    )
                     scope.add_event_processor(
                         _make_event_processor(weakref.ref(request), integration)
                     )
 
-            return old_handle_request(self, request, *args, **kwargs)
+            return old_call_view(registry, request, *args, **kwargs)
 
-        Router.handle_request = sentry_patched_handle_request
+        router._call_view = sentry_patched_call_view
 
         if hasattr(Request, "invoke_exception_view"):
             old_invoke_exception_view = Request.invoke_exception_view
@@ -101,7 +111,7 @@ def sentry_patched_invoke_exception_view(self, *args, **kwargs):
 
             Request.invoke_exception_view = sentry_patched_invoke_exception_view
 
-        old_wsgi_call = Router.__call__
+        old_wsgi_call = router.Router.__call__
 
         def sentry_patched_wsgi_call(self, environ, start_response):
             # type: (Any, Dict[str, str], Callable[..., Any]) -> _ScopedResponse
@@ -123,7 +133,7 @@ def sentry_patched_inner_wsgi_call(environ, start_response):
                 environ, start_response
             )
 
-        Router.__call__ = sentry_patched_wsgi_call
+        router.Router.__call__ = sentry_patched_wsgi_call
 
 
 def _capture_exception(exc_info):
@@ -146,6 +156,21 @@ def _capture_exception(exc_info):
     hub.capture_event(event, hint=hint)
 
 
+def _set_transaction_name_and_source(scope, transaction_style, request):
+    # type: (Scope, str, Request) -> None
+    try:
+        name_for_style = {
+            "route_name": request.matched_route.name,
+            "route_pattern": request.matched_route.pattern,
+        }
+        scope.set_transaction_name(
+            name_for_style[transaction_style],
+            source=SOURCE_FOR_STYLE[transaction_style],
+        )
+    except Exception:
+        pass
+
+
 class PyramidRequestExtractor(RequestExtractor):
     def url(self):
         # type: () -> str
@@ -196,14 +221,6 @@ def event_processor(event, hint):
         if request is None:
             return event
 
-        try:
-            if integration.transaction_style == "route_name":
-                event["transaction"] = request.matched_route.name
-            elif integration.transaction_style == "route_pattern":
-                event["transaction"] = request.matched_route.pattern
-        except Exception:
-            pass
-
         with capture_internal_exceptions():
             PyramidRequestExtractor(request).extract_into_event(event)
 
diff --git a/sentry_sdk/integrations/quart.py b/sentry_sdk/integrations/quart.py
new file mode 100644
index 0000000000..e1d4228651
--- /dev/null
+++ b/sentry_sdk/integrations/quart.py
@@ -0,0 +1,188 @@
+from __future__ import absolute_import
+
+from sentry_sdk.hub import _should_send_default_pii, Hub
+from sentry_sdk.integrations import DidNotEnable, Integration
+from sentry_sdk.integrations._wsgi_common import _filter_headers
+from sentry_sdk.integrations.asgi import SentryAsgiMiddleware
+from sentry_sdk.scope import Scope
+from sentry_sdk.tracing import SOURCE_FOR_STYLE
+from sentry_sdk.utils import (
+    capture_internal_exceptions,
+    event_from_exception,
+)
+
+from sentry_sdk._types import MYPY
+
+if MYPY:
+    from typing import Any
+    from typing import Dict
+    from typing import Union
+
+    from sentry_sdk._types import EventProcessor
+
+try:
+    import quart_auth  # type: ignore
+except ImportError:
+    quart_auth = None
+
+try:
+    from quart import (  # type: ignore
+        has_request_context,
+        has_websocket_context,
+        Request,
+        Quart,
+        request,
+        websocket,
+    )
+    from quart.signals import (  # type: ignore
+        got_background_exception,
+        got_request_exception,
+        got_websocket_exception,
+        request_started,
+        websocket_started,
+    )
+except ImportError:
+    raise DidNotEnable("Quart is not installed")
+
+TRANSACTION_STYLE_VALUES = ("endpoint", "url")
+
+
+class QuartIntegration(Integration):
+    identifier = "quart"
+
+    transaction_style = ""
+
+    def __init__(self, transaction_style="endpoint"):
+        # type: (str) -> None
+        if transaction_style not in TRANSACTION_STYLE_VALUES:
+            raise ValueError(
+                "Invalid value for transaction_style: %s (must be in %s)"
+                % (transaction_style, TRANSACTION_STYLE_VALUES)
+            )
+        self.transaction_style = transaction_style
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+
+        request_started.connect(_request_websocket_started)
+        websocket_started.connect(_request_websocket_started)
+        got_background_exception.connect(_capture_exception)
+        got_request_exception.connect(_capture_exception)
+        got_websocket_exception.connect(_capture_exception)
+
+        old_app = Quart.__call__
+
+        async def sentry_patched_asgi_app(self, scope, receive, send):
+            # type: (Any, Any, Any, Any) -> Any
+            if Hub.current.get_integration(QuartIntegration) is None:
+                return await old_app(self, scope, receive, send)
+
+            middleware = SentryAsgiMiddleware(lambda *a, **kw: old_app(self, *a, **kw))
+            middleware.__call__ = middleware._run_asgi3
+            return await middleware(scope, receive, send)
+
+        Quart.__call__ = sentry_patched_asgi_app
+
+
+def _set_transaction_name_and_source(scope, transaction_style, request):
+    # type: (Scope, str, Request) -> None
+
+    try:
+        name_for_style = {
+            "url": request.url_rule.rule,
+            "endpoint": request.url_rule.endpoint,
+        }
+        scope.set_transaction_name(
+            name_for_style[transaction_style],
+            source=SOURCE_FOR_STYLE[transaction_style],
+        )
+    except Exception:
+        pass
+
+
+def _request_websocket_started(app, **kwargs):
+    # type: (Quart, **Any) -> None
+    hub = Hub.current
+    integration = hub.get_integration(QuartIntegration)
+    if integration is None:
+        return
+
+    with hub.configure_scope() as scope:
+        if has_request_context():
+            request_websocket = request._get_current_object()
+        if has_websocket_context():
+            request_websocket = websocket._get_current_object()
+
+        # Set the transaction name here, but rely on ASGI middleware
+        # to actually start the transaction
+        _set_transaction_name_and_source(
+            scope, integration.transaction_style, request_websocket
+        )
+
+        evt_processor = _make_request_event_processor(
+            app, request_websocket, integration
+        )
+        scope.add_event_processor(evt_processor)
+
+
+def _make_request_event_processor(app, request, integration):
+    # type: (Quart, Request, QuartIntegration) -> EventProcessor
+    def inner(event, hint):
+        # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
+        # if the request is gone we are fine not logging the data from
+        # it.  This might happen if the processor is pushed away to
+        # another thread.
+        if request is None:
+            return event
+
+        with capture_internal_exceptions():
+            # TODO: Figure out what to do with request body. Methods on request
+            # are async, but event processors are not.
+
+            request_info = event.setdefault("request", {})
+            request_info["url"] = request.url
+            request_info["query_string"] = request.query_string
+            request_info["method"] = request.method
+            request_info["headers"] = _filter_headers(dict(request.headers))
+
+            if _should_send_default_pii():
+                request_info["env"] = {"REMOTE_ADDR": request.access_route[0]}
+                _add_user_to_event(event)
+
+        return event
+
+    return inner
+
+
+def _capture_exception(sender, exception, **kwargs):
+    # type: (Quart, Union[ValueError, BaseException], **Any) -> None
+    hub = Hub.current
+    if hub.get_integration(QuartIntegration) is None:
+        return
+
+    # If an integration is there, a client has to be there.
+    client = hub.client  # type: Any
+
+    event, hint = event_from_exception(
+        exception,
+        client_options=client.options,
+        mechanism={"type": "quart", "handled": False},
+    )
+
+    hub.capture_event(event, hint=hint)
+
+
+def _add_user_to_event(event):
+    # type: (Dict[str, Any]) -> None
+    if quart_auth is None:
+        return
+
+    user = quart_auth.current_user
+    if user is None:
+        return
+
+    with capture_internal_exceptions():
+        user_info = event.setdefault("user", {})
+
+        user_info["id"] = quart_auth.current_user._auth_id
diff --git a/sentry_sdk/integrations/redis.py b/sentry_sdk/integrations/redis.py
index 510fdbb22c..aae5647f3d 100644
--- a/sentry_sdk/integrations/redis.py
+++ b/sentry_sdk/integrations/redis.py
@@ -1,13 +1,96 @@
 from __future__ import absolute_import
 
 from sentry_sdk import Hub
-from sentry_sdk.utils import capture_internal_exceptions
-from sentry_sdk.integrations import Integration
+from sentry_sdk.consts import OP
+from sentry_sdk.utils import capture_internal_exceptions, logger
+from sentry_sdk.integrations import Integration, DidNotEnable
 
 from sentry_sdk._types import MYPY
 
 if MYPY:
-    from typing import Any
+    from typing import Any, Sequence
+
+_SINGLE_KEY_COMMANDS = frozenset(
+    ["decr", "decrby", "get", "incr", "incrby", "pttl", "set", "setex", "setnx", "ttl"]
+)
+_MULTI_KEY_COMMANDS = frozenset(["del", "touch", "unlink"])
+
+#: Trim argument lists to this many values
+_MAX_NUM_ARGS = 10
+
+
+def patch_redis_pipeline(pipeline_cls, is_cluster, get_command_args_fn):
+    # type: (Any, bool, Any) -> None
+    old_execute = pipeline_cls.execute
+
+    def sentry_patched_execute(self, *args, **kwargs):
+        # type: (Any, *Any, **Any) -> Any
+        hub = Hub.current
+
+        if hub.get_integration(RedisIntegration) is None:
+            return old_execute(self, *args, **kwargs)
+
+        with hub.start_span(
+            op=OP.DB_REDIS, description="redis.pipeline.execute"
+        ) as span:
+            with capture_internal_exceptions():
+                span.set_tag("redis.is_cluster", is_cluster)
+                transaction = self.transaction if not is_cluster else False
+                span.set_tag("redis.transaction", transaction)
+
+                commands = []
+                for i, arg in enumerate(self.command_stack):
+                    if i > _MAX_NUM_ARGS:
+                        break
+                    command_args = []
+                    for j, command_arg in enumerate(get_command_args_fn(arg)):
+                        if j > 0:
+                            command_arg = repr(command_arg)
+                        command_args.append(command_arg)
+                    commands.append(" ".join(command_args))
+
+                span.set_data(
+                    "redis.commands",
+                    {"count": len(self.command_stack), "first_ten": commands},
+                )
+
+            return old_execute(self, *args, **kwargs)
+
+    pipeline_cls.execute = sentry_patched_execute
+
+
+def _get_redis_command_args(command):
+    # type: (Any) -> Sequence[Any]
+    return command[0]
+
+
+def _parse_rediscluster_command(command):
+    # type: (Any) -> Sequence[Any]
+    return command.args
+
+
+def _patch_rediscluster():
+    # type: () -> None
+    try:
+        import rediscluster  # type: ignore
+    except ImportError:
+        return
+
+    patch_redis_client(rediscluster.RedisCluster, is_cluster=True)
+
+    # up to v1.3.6, __version__ attribute is a tuple
+    # from v2.0.0, __version__ is a string and VERSION a tuple
+    version = getattr(rediscluster, "VERSION", rediscluster.__version__)
+
+    # StrictRedisCluster was introduced in v0.2.0 and removed in v2.0.0
+    # https://github.com/Grokzen/redis-py-cluster/blob/master/docs/release-notes.rst
+    if (0, 2, 0) < version < (2, 0, 0):
+        pipeline_cls = rediscluster.pipeline.StrictClusterPipeline
+        patch_redis_client(rediscluster.StrictRedisCluster, is_cluster=True)
+    else:
+        pipeline_cls = rediscluster.pipeline.ClusterPipeline
+
+    patch_redis_pipeline(pipeline_cls, True, _parse_rediscluster_command)
 
 
 class RedisIntegration(Integration):
@@ -16,27 +99,41 @@ class RedisIntegration(Integration):
     @staticmethod
     def setup_once():
         # type: () -> None
-        import redis
+        try:
+            import redis
+        except ImportError:
+            raise DidNotEnable("Redis client not installed")
 
-        patch_redis_client(redis.StrictRedis)
+        patch_redis_client(redis.StrictRedis, is_cluster=False)
+        patch_redis_pipeline(redis.client.Pipeline, False, _get_redis_command_args)
+        try:
+            strict_pipeline = redis.client.StrictPipeline  # type: ignore
+        except AttributeError:
+            pass
+        else:
+            patch_redis_pipeline(strict_pipeline, False, _get_redis_command_args)
 
         try:
             import rb.clients  # type: ignore
         except ImportError:
             pass
         else:
-            patch_redis_client(rb.clients.FanoutClient)
-            patch_redis_client(rb.clients.MappingClient)
-            patch_redis_client(rb.clients.RoutingClient)
+            patch_redis_client(rb.clients.FanoutClient, is_cluster=False)
+            patch_redis_client(rb.clients.MappingClient, is_cluster=False)
+            patch_redis_client(rb.clients.RoutingClient, is_cluster=False)
+
+        try:
+            _patch_rediscluster()
+        except Exception:
+            logger.exception("Error occurred while patching `rediscluster` library")
 
 
-def patch_redis_client(cls):
-    # type: (Any) -> None
+def patch_redis_client(cls, is_cluster):
+    # type: (Any, bool) -> None
     """
     This function can be used to instrument custom redis client classes or
     subclasses.
     """
-
     old_execute_command = cls.execute_command
 
     def sentry_patched_execute_command(self, name, *args, **kwargs):
@@ -51,19 +148,24 @@ def sentry_patched_execute_command(self, name, *args, **kwargs):
         with capture_internal_exceptions():
             description_parts = [name]
             for i, arg in enumerate(args):
-                if i > 10:
+                if i > _MAX_NUM_ARGS:
                     break
 
                 description_parts.append(repr(arg))
 
             description = " ".join(description_parts)
 
-        with hub.start_span(op="redis", description=description) as span:
+        with hub.start_span(op=OP.DB_REDIS, description=description) as span:
+            span.set_tag("redis.is_cluster", is_cluster)
             if name:
                 span.set_tag("redis.command", name)
 
-            if name and args and name.lower() in ("get", "set", "setex", "setnx"):
-                span.set_tag("redis.key", args[0])
+            if name and args:
+                name_low = name.lower()
+                if (name_low in _SINGLE_KEY_COMMANDS) or (
+                    name_low in _MULTI_KEY_COMMANDS and len(args) == 1
+                ):
+                    span.set_tag("redis.key", args[0])
 
             return old_execute_command(self, name, *args, **kwargs)
 
diff --git a/sentry_sdk/integrations/rq.py b/sentry_sdk/integrations/rq.py
index fbe8cdda3d..3b74d8f9be 100644
--- a/sentry_sdk/integrations/rq.py
+++ b/sentry_sdk/integrations/rq.py
@@ -1,32 +1,35 @@
 from __future__ import absolute_import
 
 import weakref
+from sentry_sdk.consts import OP
 
 from sentry_sdk.hub import Hub
-from sentry_sdk.integrations import Integration, DidNotEnable
-from sentry_sdk.tracing import Span
-from sentry_sdk.utils import capture_internal_exceptions, event_from_exception
-
+from sentry_sdk.integrations import DidNotEnable, Integration
+from sentry_sdk.integrations.logging import ignore_logger
+from sentry_sdk.tracing import Transaction, TRANSACTION_SOURCE_TASK
+from sentry_sdk.utils import (
+    capture_internal_exceptions,
+    event_from_exception,
+    format_timestamp,
+)
 
 try:
-    from rq.version import VERSION as RQ_VERSION
+    from rq.queue import Queue
     from rq.timeouts import JobTimeoutException
+    from rq.version import VERSION as RQ_VERSION
     from rq.worker import Worker
-    from rq.queue import Queue
 except ImportError:
     raise DidNotEnable("RQ not installed")
 
 from sentry_sdk._types import MYPY
 
 if MYPY:
-    from typing import Any
-    from typing import Dict
-    from typing import Callable
-
-    from rq.job import Job
+    from typing import Any, Callable, Dict
 
-    from sentry_sdk.utils import ExcInfo
     from sentry_sdk._types import EventProcessor
+    from sentry_sdk.utils import ExcInfo
+
+    from rq.job import Job
 
 
 class RqIntegration(Integration):
@@ -39,7 +42,7 @@ def setup_once():
         try:
             version = tuple(map(int, RQ_VERSION.split(".")[:3]))
         except (ValueError, TypeError):
-            raise DidNotEnable("Unparseable RQ version: {}".format(RQ_VERSION))
+            raise DidNotEnable("Unparsable RQ version: {}".format(RQ_VERSION))
 
         if version < (0, 6):
             raise DidNotEnable("RQ 0.6 or newer is required.")
@@ -61,15 +64,19 @@ def sentry_patched_perform_job(self, job, *args, **kwargs):
                 scope.clear_breadcrumbs()
                 scope.add_event_processor(_make_event_processor(weakref.ref(job)))
 
-                span = Span.continue_from_headers(
-                    job.meta.get("_sentry_trace_headers") or {}
+                transaction = Transaction.continue_from_headers(
+                    job.meta.get("_sentry_trace_headers") or {},
+                    op=OP.QUEUE_TASK_RQ,
+                    name="unknown RQ task",
+                    source=TRANSACTION_SOURCE_TASK,
                 )
-                span.op = "rq.task"
 
                 with capture_internal_exceptions():
-                    span.transaction = job.func_name
+                    transaction.name = job.func_name
 
-                with hub.start_span(span):
+                with hub.start_transaction(
+                    transaction, custom_sampling_context={"rq_job": job}
+                ):
                     rv = old_perform_job(self, job, *args, **kwargs)
 
             if self.is_horse:
@@ -86,7 +93,9 @@ def sentry_patched_perform_job(self, job, *args, **kwargs):
 
         def sentry_patched_handle_exception(self, job, *exc_info, **kwargs):
             # type: (Worker, Any, *Any, **Any) -> Any
-            _capture_exception(exc_info)  # type: ignore
+            if job.is_failed:
+                _capture_exception(exc_info)  # type: ignore
+
             return old_handle_exception(self, job, *exc_info, **kwargs)
 
         Worker.handle_exception = sentry_patched_handle_exception
@@ -105,6 +114,8 @@ def sentry_patched_enqueue_job(self, job, **kwargs):
 
         Queue.enqueue_job = sentry_patched_enqueue_job
 
+        ignore_logger("rq.worker")
+
 
 def _make_event_processor(weak_job):
     # type: (Callable[[], Job]) -> EventProcessor
@@ -122,6 +133,11 @@ def event_processor(event, hint):
                     "description": job.description,
                 }
 
+                if job.enqueued_at:
+                    extra["rq-job"]["enqueued_at"] = format_timestamp(job.enqueued_at)
+                if job.started_at:
+                    extra["rq-job"]["started_at"] = format_timestamp(job.started_at)
+
         if "exc_info" in hint:
             with capture_internal_exceptions():
                 if issubclass(hint["exc_info"][0], JobTimeoutException):
diff --git a/sentry_sdk/integrations/sanic.py b/sentry_sdk/integrations/sanic.py
index e8fdca422a..8892f93ed7 100644
--- a/sentry_sdk/integrations/sanic.py
+++ b/sentry_sdk/integrations/sanic.py
@@ -4,10 +4,12 @@
 
 from sentry_sdk._compat import urlparse, reraise
 from sentry_sdk.hub import Hub
+from sentry_sdk.tracing import TRANSACTION_SOURCE_COMPONENT
 from sentry_sdk.utils import (
     capture_internal_exceptions,
     event_from_exception,
     HAS_REAL_CONTEXTVARS,
+    CONTEXTVARS_ERROR_MESSAGE,
 )
 from sentry_sdk.integrations import Integration, DidNotEnable
 from sentry_sdk.integrations._wsgi_common import RequestExtractor, _filter_headers
@@ -26,6 +28,7 @@
     from sanic.request import Request, RequestParameters
 
     from sentry_sdk._types import Event, EventProcessor, Hint
+    from sanic.router import Route
 
 try:
     from sanic import Sanic, __version__ as SANIC_VERSION
@@ -35,19 +38,31 @@
 except ImportError:
     raise DidNotEnable("Sanic not installed")
 
+old_error_handler_lookup = ErrorHandler.lookup
+old_handle_request = Sanic.handle_request
+old_router_get = Router.get
+
+try:
+    # This method was introduced in Sanic v21.9
+    old_startup = Sanic._startup
+except AttributeError:
+    pass
+
 
 class SanicIntegration(Integration):
     identifier = "sanic"
+    version = (0, 0)  # type: Tuple[int, ...]
 
     @staticmethod
     def setup_once():
         # type: () -> None
+
         try:
-            version = tuple(map(int, SANIC_VERSION.split(".")))
+            SanicIntegration.version = tuple(map(int, SANIC_VERSION.split(".")))
         except (TypeError, ValueError):
-            raise DidNotEnable("Unparseable Sanic version: {}".format(SANIC_VERSION))
+            raise DidNotEnable("Unparsable Sanic version: {}".format(SANIC_VERSION))
 
-        if version < (0, 8):
+        if SanicIntegration.version < (0, 8):
             raise DidNotEnable("Sanic 0.8 or newer required.")
 
         if not HAS_REAL_CONTEXTVARS:
@@ -55,7 +70,7 @@ def setup_once():
             # requests.
             raise DidNotEnable(
                 "The sanic integration for Sentry requires Python 3.7+ "
-                " or aiocontextvars package"
+                " or the aiocontextvars package." + CONTEXTVARS_ERROR_MESSAGE
             )
 
         if SANIC_VERSION.startswith("0.8."):
@@ -70,74 +85,201 @@ def setup_once():
             # https://github.com/huge-success/sanic/issues/1332
             ignore_logger("root")
 
-        old_handle_request = Sanic.handle_request
+        if SanicIntegration.version < (21, 9):
+            _setup_legacy_sanic()
+            return
+
+        _setup_sanic()
+
+
+class SanicRequestExtractor(RequestExtractor):
+    def content_length(self):
+        # type: () -> int
+        if self.request.body is None:
+            return 0
+        return len(self.request.body)
+
+    def cookies(self):
+        # type: () -> Dict[str, str]
+        return dict(self.request.cookies)
+
+    def raw_data(self):
+        # type: () -> bytes
+        return self.request.body
+
+    def form(self):
+        # type: () -> RequestParameters
+        return self.request.form
+
+    def is_json(self):
+        # type: () -> bool
+        raise NotImplementedError()
 
-        async def sentry_handle_request(self, request, *args, **kwargs):
-            # type: (Any, Request, *Any, **Any) -> Any
-            hub = Hub.current
-            if hub.get_integration(SanicIntegration) is None:
-                return old_handle_request(self, request, *args, **kwargs)
+    def json(self):
+        # type: () -> Optional[Any]
+        return self.request.json
 
-            weak_request = weakref.ref(request)
+    def files(self):
+        # type: () -> RequestParameters
+        return self.request.files
 
-            with Hub(hub) as hub:
-                with hub.configure_scope() as scope:
-                    scope.clear_breadcrumbs()
-                    scope.add_event_processor(_make_request_processor(weak_request))
+    def size_of_file(self, file):
+        # type: (Any) -> int
+        return len(file.body or ())
 
-                response = old_handle_request(self, request, *args, **kwargs)
-                if isawaitable(response):
-                    response = await response
 
-                return response
+def _setup_sanic():
+    # type: () -> None
+    Sanic._startup = _startup
+    ErrorHandler.lookup = _sentry_error_handler_lookup
 
-        Sanic.handle_request = sentry_handle_request
 
-        old_router_get = Router.get
+def _setup_legacy_sanic():
+    # type: () -> None
+    Sanic.handle_request = _legacy_handle_request
+    Router.get = _legacy_router_get
+    ErrorHandler.lookup = _sentry_error_handler_lookup
 
-        def sentry_router_get(self, request):
-            # type: (Any, Request) -> Any
-            rv = old_router_get(self, request)
-            hub = Hub.current
-            if hub.get_integration(SanicIntegration) is not None:
-                with capture_internal_exceptions():
-                    with hub.configure_scope() as scope:
-                        scope.transaction = rv[0].__name__
-            return rv
 
-        Router.get = sentry_router_get
+async def _startup(self):
+    # type: (Sanic) -> None
+    # This happens about as early in the lifecycle as possible, just after the
+    # Request object is created. The body has not yet been consumed.
+    self.signal("http.lifecycle.request")(_hub_enter)
 
-        old_error_handler_lookup = ErrorHandler.lookup
+    # This happens after the handler is complete. In v21.9 this signal is not
+    # dispatched when there is an exception. Therefore we need to close out
+    # and call _hub_exit from the custom exception handler as well.
+    # See https://github.com/sanic-org/sanic/issues/2297
+    self.signal("http.lifecycle.response")(_hub_exit)
 
-        def sentry_error_handler_lookup(self, exception):
-            # type: (Any, Exception) -> Optional[object]
-            _capture_exception(exception)
-            old_error_handler = old_error_handler_lookup(self, exception)
+    # This happens inside of request handling immediately after the route
+    # has been identified by the router.
+    self.signal("http.routing.after")(_set_transaction)
 
-            if old_error_handler is None:
-                return None
+    # The above signals need to be declared before this can be called.
+    await old_startup(self)
+
+
+async def _hub_enter(request):
+    # type: (Request) -> None
+    hub = Hub.current
+    request.ctx._sentry_do_integration = (
+        hub.get_integration(SanicIntegration) is not None
+    )
+
+    if not request.ctx._sentry_do_integration:
+        return
 
-            if Hub.current.get_integration(SanicIntegration) is None:
-                return old_error_handler
+    weak_request = weakref.ref(request)
+    request.ctx._sentry_hub = Hub(hub)
+    request.ctx._sentry_hub.__enter__()
 
-            async def sentry_wrapped_error_handler(request, exception):
-                # type: (Request, Exception) -> Any
-                try:
-                    response = old_error_handler(request, exception)
-                    if isawaitable(response):
-                        response = await response
-                    return response
-                except Exception:
-                    # Report errors that occur in Sanic error handler. These
-                    # exceptions will not even show up in Sanic's
-                    # `sanic.exceptions` logger.
-                    exc_info = sys.exc_info()
-                    _capture_exception(exc_info)
-                    reraise(*exc_info)
+    with request.ctx._sentry_hub.configure_scope() as scope:
+        scope.clear_breadcrumbs()
+        scope.add_event_processor(_make_request_processor(weak_request))
+
+
+async def _hub_exit(request, **_):
+    # type: (Request, **Any) -> None
+    request.ctx._sentry_hub.__exit__(None, None, None)
+
+
+async def _set_transaction(request, route, **kwargs):
+    # type: (Request, Route, **Any) -> None
+    hub = Hub.current
+    if hub.get_integration(SanicIntegration) is not None:
+        with capture_internal_exceptions():
+            with hub.configure_scope() as scope:
+                route_name = route.name.replace(request.app.name, "").strip(".")
+                scope.set_transaction_name(
+                    route_name, source=TRANSACTION_SOURCE_COMPONENT
+                )
 
-            return sentry_wrapped_error_handler
 
-        ErrorHandler.lookup = sentry_error_handler_lookup
+def _sentry_error_handler_lookup(self, exception, *args, **kwargs):
+    # type: (Any, Exception, *Any, **Any) -> Optional[object]
+    _capture_exception(exception)
+    old_error_handler = old_error_handler_lookup(self, exception, *args, **kwargs)
+
+    if old_error_handler is None:
+        return None
+
+    if Hub.current.get_integration(SanicIntegration) is None:
+        return old_error_handler
+
+    async def sentry_wrapped_error_handler(request, exception):
+        # type: (Request, Exception) -> Any
+        try:
+            response = old_error_handler(request, exception)
+            if isawaitable(response):
+                response = await response
+            return response
+        except Exception:
+            # Report errors that occur in Sanic error handler. These
+            # exceptions will not even show up in Sanic's
+            # `sanic.exceptions` logger.
+            exc_info = sys.exc_info()
+            _capture_exception(exc_info)
+            reraise(*exc_info)
+        finally:
+            # As mentioned in previous comment in _startup, this can be removed
+            # after https://github.com/sanic-org/sanic/issues/2297 is resolved
+            if SanicIntegration.version == (21, 9):
+                await _hub_exit(request)
+
+    return sentry_wrapped_error_handler
+
+
+async def _legacy_handle_request(self, request, *args, **kwargs):
+    # type: (Any, Request, *Any, **Any) -> Any
+    hub = Hub.current
+    if hub.get_integration(SanicIntegration) is None:
+        return old_handle_request(self, request, *args, **kwargs)
+
+    weak_request = weakref.ref(request)
+
+    with Hub(hub) as hub:
+        with hub.configure_scope() as scope:
+            scope.clear_breadcrumbs()
+            scope.add_event_processor(_make_request_processor(weak_request))
+
+        response = old_handle_request(self, request, *args, **kwargs)
+        if isawaitable(response):
+            response = await response
+
+        return response
+
+
+def _legacy_router_get(self, *args):
+    # type: (Any, Union[Any, Request]) -> Any
+    rv = old_router_get(self, *args)
+    hub = Hub.current
+    if hub.get_integration(SanicIntegration) is not None:
+        with capture_internal_exceptions():
+            with hub.configure_scope() as scope:
+                if SanicIntegration.version and SanicIntegration.version >= (21, 3):
+                    # Sanic versions above and including 21.3 append the app name to the
+                    # route name, and so we need to remove it from Route name so the
+                    # transaction name is consistent across all versions
+                    sanic_app_name = self.ctx.app.name
+                    sanic_route = rv[0].name
+
+                    if sanic_route.startswith("%s." % sanic_app_name):
+                        # We add a 1 to the len of the sanic_app_name because there is a dot
+                        # that joins app name and the route name
+                        # Format: app_name.route_name
+                        sanic_route = sanic_route[len(sanic_app_name) + 1 :]
+
+                    scope.set_transaction_name(
+                        sanic_route, source=TRANSACTION_SOURCE_COMPONENT
+                    )
+                else:
+                    scope.set_transaction_name(
+                        rv[0].__name__, source=TRANSACTION_SOURCE_COMPONENT
+                    )
+
+    return rv
 
 
 def _capture_exception(exception):
@@ -195,39 +337,3 @@ def sanic_processor(event, hint):
         return event
 
     return sanic_processor
-
-
-class SanicRequestExtractor(RequestExtractor):
-    def content_length(self):
-        # type: () -> int
-        if self.request.body is None:
-            return 0
-        return len(self.request.body)
-
-    def cookies(self):
-        # type: () -> Dict[str, str]
-        return dict(self.request.cookies)
-
-    def raw_data(self):
-        # type: () -> bytes
-        return self.request.body
-
-    def form(self):
-        # type: () -> RequestParameters
-        return self.request.form
-
-    def is_json(self):
-        # type: () -> bool
-        raise NotImplementedError()
-
-    def json(self):
-        # type: () -> Optional[Any]
-        return self.request.json
-
-    def files(self):
-        # type: () -> RequestParameters
-        return self.request.files
-
-    def size_of_file(self, file):
-        # type: (Any) -> int
-        return len(file.body or ())
diff --git a/sentry_sdk/integrations/serverless.py b/sentry_sdk/integrations/serverless.py
index c6ad3a2f68..c22fbfd37f 100644
--- a/sentry_sdk/integrations/serverless.py
+++ b/sentry_sdk/integrations/serverless.py
@@ -32,8 +32,8 @@ def serverless_function(f, flush=True):
     pass
 
 
-@overload  # noqa
-def serverless_function(f=None, flush=True):
+@overload
+def serverless_function(f=None, flush=True):  # noqa: F811
     # type: (None, bool) -> Callable[[F], F]
     pass
 
@@ -69,7 +69,7 @@ def _capture_and_reraise():
     # type: () -> None
     exc_info = sys.exc_info()
     hub = Hub.current
-    if hub is not None and hub.client is not None:
+    if hub.client is not None:
         event, hint = event_from_exception(
             exc_info,
             client_options=hub.client.options,
@@ -82,6 +82,4 @@ def _capture_and_reraise():
 
 def _flush_client():
     # type: () -> None
-    hub = Hub.current
-    if hub is not None:
-        hub.flush()
+    return Hub.current.flush()
diff --git a/sentry_sdk/integrations/spark/spark_worker.py b/sentry_sdk/integrations/spark/spark_worker.py
index bae4413d11..2c27647dab 100644
--- a/sentry_sdk/integrations/spark/spark_worker.py
+++ b/sentry_sdk/integrations/spark/spark_worker.py
@@ -82,11 +82,15 @@ def process_event(event, hint):
                     return event
 
                 event.setdefault("tags", {}).setdefault(
-                    "stageId", task_context.stageId()
+                    "stageId", str(task_context.stageId())
+                )
+                event["tags"].setdefault("partitionId", str(task_context.partitionId()))
+                event["tags"].setdefault(
+                    "attemptNumber", str(task_context.attemptNumber())
+                )
+                event["tags"].setdefault(
+                    "taskAttemptId", str(task_context.taskAttemptId())
                 )
-                event["tags"].setdefault("partitionId", task_context.partitionId())
-                event["tags"].setdefault("attemptNumber", task_context.attemptNumber())
-                event["tags"].setdefault("taskAttemptId", task_context.taskAttemptId())
 
                 if task_context._localProperties:
                     if "sentry_app_name" in task_context._localProperties:
diff --git a/sentry_sdk/integrations/sqlalchemy.py b/sentry_sdk/integrations/sqlalchemy.py
index f24d2f20bf..68e671cd92 100644
--- a/sentry_sdk/integrations/sqlalchemy.py
+++ b/sentry_sdk/integrations/sqlalchemy.py
@@ -1,9 +1,11 @@
 from __future__ import absolute_import
 
+import re
+
 from sentry_sdk._types import MYPY
 from sentry_sdk.hub import Hub
 from sentry_sdk.integrations import Integration, DidNotEnable
-from sentry_sdk.tracing import record_sql_queries
+from sentry_sdk.tracing_utils import record_sql_queries
 
 try:
     from sqlalchemy.engine import Engine  # type: ignore
@@ -28,10 +30,12 @@ def setup_once():
         # type: () -> None
 
         try:
-            version = tuple(map(int, SQLALCHEMY_VERSION.split("b")[0].split(".")))
+            version = tuple(
+                map(int, re.split("b|rc", SQLALCHEMY_VERSION)[0].split("."))
+            )
         except (TypeError, ValueError):
             raise DidNotEnable(
-                "Unparseable SQLAlchemy version: {}".format(SQLALCHEMY_VERSION)
+                "Unparsable SQLAlchemy version: {}".format(SQLALCHEMY_VERSION)
             )
 
         if version < (1, 2):
@@ -58,29 +62,43 @@ def _before_cursor_execute(
         paramstyle=context and context.dialect and context.dialect.paramstyle or None,
         executemany=executemany,
     )
-    conn._sentry_sql_span_manager = ctx_mgr
+    context._sentry_sql_span_manager = ctx_mgr
 
     span = ctx_mgr.__enter__()
 
     if span is not None:
-        conn._sentry_sql_span = span
+        context._sentry_sql_span = span
 
 
-def _after_cursor_execute(conn, cursor, statement, *args):
-    # type: (Any, Any, Any, *Any) -> None
+def _after_cursor_execute(conn, cursor, statement, parameters, context, *args):
+    # type: (Any, Any, Any, Any, Any, *Any) -> None
     ctx_mgr = getattr(
-        conn, "_sentry_sql_span_manager", None
-    )  # type: ContextManager[Any]
+        context, "_sentry_sql_span_manager", None
+    )  # type: Optional[ContextManager[Any]]
 
     if ctx_mgr is not None:
-        conn._sentry_sql_span_manager = None
+        context._sentry_sql_span_manager = None
         ctx_mgr.__exit__(None, None, None)
 
 
 def _handle_error(context, *args):
     # type: (Any, *Any) -> None
-    conn = context.connection
-    span = getattr(conn, "_sentry_sql_span", None)  # type: Optional[Span]
+    execution_context = context.execution_context
+    if execution_context is None:
+        return
+
+    span = getattr(execution_context, "_sentry_sql_span", None)  # type: Optional[Span]
 
     if span is not None:
         span.set_status("internal_error")
+
+    # _after_cursor_execute does not get called for crashing SQL stmts. Judging
+    # from SQLAlchemy codebase it does seem like any error coming into this
+    # handler is going to be fatal.
+    ctx_mgr = getattr(
+        execution_context, "_sentry_sql_span_manager", None
+    )  # type: Optional[ContextManager[Any]]
+
+    if ctx_mgr is not None:
+        execution_context._sentry_sql_span_manager = None
+        ctx_mgr.__exit__(None, None, None)
diff --git a/sentry_sdk/integrations/starlette.py b/sentry_sdk/integrations/starlette.py
new file mode 100644
index 0000000000..aec194a779
--- /dev/null
+++ b/sentry_sdk/integrations/starlette.py
@@ -0,0 +1,597 @@
+from __future__ import absolute_import
+
+import asyncio
+import functools
+import threading
+
+from sentry_sdk._compat import iteritems
+from sentry_sdk._types import MYPY
+from sentry_sdk.consts import OP
+from sentry_sdk.hub import Hub, _should_send_default_pii
+from sentry_sdk.integrations import DidNotEnable, Integration
+from sentry_sdk.integrations._wsgi_common import (
+    _is_json_content_type,
+    request_body_within_bounds,
+)
+from sentry_sdk.integrations.asgi import SentryAsgiMiddleware
+from sentry_sdk.tracing import SOURCE_FOR_STYLE, TRANSACTION_SOURCE_ROUTE
+from sentry_sdk.utils import (
+    AnnotatedValue,
+    capture_internal_exceptions,
+    event_from_exception,
+    transaction_from_function,
+)
+
+if MYPY:
+    from typing import Any, Awaitable, Callable, Dict, Optional
+
+    from sentry_sdk.scope import Scope as SentryScope
+
+try:
+    import starlette  # type: ignore
+    from starlette.applications import Starlette  # type: ignore
+    from starlette.datastructures import UploadFile  # type: ignore
+    from starlette.middleware import Middleware  # type: ignore
+    from starlette.middleware.authentication import (  # type: ignore
+        AuthenticationMiddleware,
+    )
+    from starlette.requests import Request  # type: ignore
+    from starlette.routing import Match  # type: ignore
+    from starlette.types import ASGIApp, Receive, Scope as StarletteScope, Send  # type: ignore
+except ImportError:
+    raise DidNotEnable("Starlette is not installed")
+
+try:
+    # Starlette 0.20
+    from starlette.middleware.exceptions import ExceptionMiddleware  # type: ignore
+except ImportError:
+    # Startlette 0.19.1
+    from starlette.exceptions import ExceptionMiddleware  # type: ignore
+
+try:
+    # Optional dependency of Starlette to parse form data.
+    import multipart  # type: ignore
+except ImportError:
+    multipart = None
+
+
+_DEFAULT_TRANSACTION_NAME = "generic Starlette request"
+
+TRANSACTION_STYLE_VALUES = ("endpoint", "url")
+
+
+class StarletteIntegration(Integration):
+    identifier = "starlette"
+
+    transaction_style = ""
+
+    def __init__(self, transaction_style="url"):
+        # type: (str) -> None
+        if transaction_style not in TRANSACTION_STYLE_VALUES:
+            raise ValueError(
+                "Invalid value for transaction_style: %s (must be in %s)"
+                % (transaction_style, TRANSACTION_STYLE_VALUES)
+            )
+        self.transaction_style = transaction_style
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        patch_middlewares()
+        patch_asgi_app()
+        patch_request_response()
+
+
+def _enable_span_for_middleware(middleware_class):
+    # type: (Any) -> type
+    old_call = middleware_class.__call__
+
+    async def _create_span_call(app, scope, receive, send, **kwargs):
+        # type: (Any, Dict[str, Any], Callable[[], Awaitable[Dict[str, Any]]], Callable[[Dict[str, Any]], Awaitable[None]], Any) -> None
+        hub = Hub.current
+        integration = hub.get_integration(StarletteIntegration)
+        if integration is not None:
+            middleware_name = app.__class__.__name__
+
+            with hub.start_span(
+                op=OP.MIDDLEWARE_STARLETTE, description=middleware_name
+            ) as middleware_span:
+                middleware_span.set_tag("starlette.middleware_name", middleware_name)
+
+                # Creating spans for the "receive" callback
+                async def _sentry_receive(*args, **kwargs):
+                    # type: (*Any, **Any) -> Any
+                    hub = Hub.current
+                    with hub.start_span(
+                        op=OP.MIDDLEWARE_STARLETTE_RECEIVE,
+                        description=getattr(receive, "__qualname__", str(receive)),
+                    ) as span:
+                        span.set_tag("starlette.middleware_name", middleware_name)
+                        return await receive(*args, **kwargs)
+
+                receive_name = getattr(receive, "__name__", str(receive))
+                receive_patched = receive_name == "_sentry_receive"
+                new_receive = _sentry_receive if not receive_patched else receive
+
+                # Creating spans for the "send" callback
+                async def _sentry_send(*args, **kwargs):
+                    # type: (*Any, **Any) -> Any
+                    hub = Hub.current
+                    with hub.start_span(
+                        op=OP.MIDDLEWARE_STARLETTE_SEND,
+                        description=getattr(send, "__qualname__", str(send)),
+                    ) as span:
+                        span.set_tag("starlette.middleware_name", middleware_name)
+                        return await send(*args, **kwargs)
+
+                send_name = getattr(send, "__name__", str(send))
+                send_patched = send_name == "_sentry_send"
+                new_send = _sentry_send if not send_patched else send
+
+                return await old_call(app, scope, new_receive, new_send, **kwargs)
+
+        else:
+            return await old_call(app, scope, receive, send, **kwargs)
+
+    not_yet_patched = old_call.__name__ not in [
+        "_create_span_call",
+        "_sentry_authenticationmiddleware_call",
+        "_sentry_exceptionmiddleware_call",
+    ]
+
+    if not_yet_patched:
+        middleware_class.__call__ = _create_span_call
+
+    return middleware_class
+
+
+def _capture_exception(exception, handled=False):
+    # type: (BaseException, **Any) -> None
+    hub = Hub.current
+    if hub.get_integration(StarletteIntegration) is None:
+        return
+
+    event, hint = event_from_exception(
+        exception,
+        client_options=hub.client.options if hub.client else None,
+        mechanism={"type": StarletteIntegration.identifier, "handled": handled},
+    )
+
+    hub.capture_event(event, hint=hint)
+
+
+def patch_exception_middleware(middleware_class):
+    # type: (Any) -> None
+    """
+    Capture all exceptions in Starlette app and
+    also extract user information.
+    """
+    old_middleware_init = middleware_class.__init__
+
+    not_yet_patched = "_sentry_middleware_init" not in str(old_middleware_init)
+
+    if not_yet_patched:
+
+        def _sentry_middleware_init(self, *args, **kwargs):
+            # type: (Any, Any, Any) -> None
+            old_middleware_init(self, *args, **kwargs)
+
+            # Patch existing exception handlers
+            old_handlers = self._exception_handlers.copy()
+
+            async def _sentry_patched_exception_handler(self, *args, **kwargs):
+                # type: (Any, Any, Any) -> None
+                exp = args[0]
+
+                is_http_server_error = (
+                    hasattr(exp, "status_code") and exp.status_code >= 500
+                )
+                if is_http_server_error:
+                    _capture_exception(exp, handled=True)
+
+                # Find a matching handler
+                old_handler = None
+                for cls in type(exp).__mro__:
+                    if cls in old_handlers:
+                        old_handler = old_handlers[cls]
+                        break
+
+                if old_handler is None:
+                    return
+
+                if _is_async_callable(old_handler):
+                    return await old_handler(self, *args, **kwargs)
+                else:
+                    return old_handler(self, *args, **kwargs)
+
+            for key in self._exception_handlers.keys():
+                self._exception_handlers[key] = _sentry_patched_exception_handler
+
+        middleware_class.__init__ = _sentry_middleware_init
+
+        old_call = middleware_class.__call__
+
+        async def _sentry_exceptionmiddleware_call(self, scope, receive, send):
+            # type: (Dict[str, Any], Dict[str, Any], Callable[[], Awaitable[Dict[str, Any]]], Callable[[Dict[str, Any]], Awaitable[None]]) -> None
+            # Also add the user (that was eventually set by be Authentication middle
+            # that was called before this middleware). This is done because the authentication
+            # middleware sets the user in the scope and then (in the same function)
+            # calls this exception middelware. In case there is no exception (or no handler
+            # for the type of exception occuring) then the exception bubbles up and setting the
+            # user information into the sentry scope is done in auth middleware and the
+            # ASGI middleware will then send everything to Sentry and this is fine.
+            # But if there is an exception happening that the exception middleware here
+            # has a handler for, it will send the exception directly to Sentry, so we need
+            # the user information right now.
+            # This is why we do it here.
+            _add_user_to_sentry_scope(scope)
+            await old_call(self, scope, receive, send)
+
+        middleware_class.__call__ = _sentry_exceptionmiddleware_call
+
+
+def _add_user_to_sentry_scope(scope):
+    # type: (Dict[str, Any]) -> None
+    """
+    Extracts user information from the ASGI scope and
+    adds it to Sentry's scope.
+    """
+    if "user" not in scope:
+        return
+
+    if not _should_send_default_pii():
+        return
+
+    hub = Hub.current
+    if hub.get_integration(StarletteIntegration) is None:
+        return
+
+    with hub.configure_scope() as sentry_scope:
+        user_info = {}  # type: Dict[str, Any]
+        starlette_user = scope["user"]
+
+        username = getattr(starlette_user, "username", None)
+        if username:
+            user_info.setdefault("username", starlette_user.username)
+
+        user_id = getattr(starlette_user, "id", None)
+        if user_id:
+            user_info.setdefault("id", starlette_user.id)
+
+        email = getattr(starlette_user, "email", None)
+        if email:
+            user_info.setdefault("email", starlette_user.email)
+
+        sentry_scope.user = user_info
+
+
+def patch_authentication_middleware(middleware_class):
+    # type: (Any) -> None
+    """
+    Add user information to Sentry scope.
+    """
+    old_call = middleware_class.__call__
+
+    not_yet_patched = "_sentry_authenticationmiddleware_call" not in str(old_call)
+
+    if not_yet_patched:
+
+        async def _sentry_authenticationmiddleware_call(self, scope, receive, send):
+            # type: (Dict[str, Any], Dict[str, Any], Callable[[], Awaitable[Dict[str, Any]]], Callable[[Dict[str, Any]], Awaitable[None]]) -> None
+            await old_call(self, scope, receive, send)
+            _add_user_to_sentry_scope(scope)
+
+        middleware_class.__call__ = _sentry_authenticationmiddleware_call
+
+
+def patch_middlewares():
+    # type: () -> None
+    """
+    Patches Starlettes `Middleware` class to record
+    spans for every middleware invoked.
+    """
+    old_middleware_init = Middleware.__init__
+
+    not_yet_patched = "_sentry_middleware_init" not in str(old_middleware_init)
+
+    if not_yet_patched:
+
+        def _sentry_middleware_init(self, cls, **options):
+            # type: (Any, Any, Any) -> None
+            if cls == SentryAsgiMiddleware:
+                return old_middleware_init(self, cls, **options)
+
+            span_enabled_cls = _enable_span_for_middleware(cls)
+            old_middleware_init(self, span_enabled_cls, **options)
+
+            if cls == AuthenticationMiddleware:
+                patch_authentication_middleware(cls)
+
+            if cls == ExceptionMiddleware:
+                patch_exception_middleware(cls)
+
+        Middleware.__init__ = _sentry_middleware_init
+
+
+def patch_asgi_app():
+    # type: () -> None
+    """
+    Instrument Starlette ASGI app using the SentryAsgiMiddleware.
+    """
+    old_app = Starlette.__call__
+
+    async def _sentry_patched_asgi_app(self, scope, receive, send):
+        # type: (Starlette, StarletteScope, Receive, Send) -> None
+        if Hub.current.get_integration(StarletteIntegration) is None:
+            return await old_app(self, scope, receive, send)
+
+        middleware = SentryAsgiMiddleware(
+            lambda *a, **kw: old_app(self, *a, **kw),
+            mechanism_type=StarletteIntegration.identifier,
+        )
+
+        middleware.__call__ = middleware._run_asgi3
+        return await middleware(scope, receive, send)
+
+    Starlette.__call__ = _sentry_patched_asgi_app
+
+
+# This was vendored in from Starlette to support Starlette 0.19.1 because
+# this function was only introduced in 0.20.x
+def _is_async_callable(obj):
+    # type: (Any) -> bool
+    while isinstance(obj, functools.partial):
+        obj = obj.func
+
+    return asyncio.iscoroutinefunction(obj) or (
+        callable(obj) and asyncio.iscoroutinefunction(obj.__call__)
+    )
+
+
+def patch_request_response():
+    # type: () -> None
+    old_request_response = starlette.routing.request_response
+
+    def _sentry_request_response(func):
+        # type: (Callable[[Any], Any]) -> ASGIApp
+        old_func = func
+
+        is_coroutine = _is_async_callable(old_func)
+        if is_coroutine:
+
+            async def _sentry_async_func(*args, **kwargs):
+                # type: (*Any, **Any) -> Any
+                hub = Hub.current
+                integration = hub.get_integration(StarletteIntegration)
+                if integration is None:
+                    return await old_func(*args, **kwargs)
+
+                with hub.configure_scope() as sentry_scope:
+                    request = args[0]
+
+                    _set_transaction_name_and_source(
+                        sentry_scope, integration.transaction_style, request
+                    )
+
+                    extractor = StarletteRequestExtractor(request)
+                    info = await extractor.extract_request_info()
+
+                    def _make_request_event_processor(req, integration):
+                        # type: (Any, Any) -> Callable[[Dict[str, Any], Dict[str, Any]], Dict[str, Any]]
+                        def event_processor(event, hint):
+                            # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
+
+                            # Add info from request to event
+                            request_info = event.get("request", {})
+                            if info:
+                                if "cookies" in info:
+                                    request_info["cookies"] = info["cookies"]
+                                if "data" in info:
+                                    request_info["data"] = info["data"]
+                            event["request"] = request_info
+
+                            return event
+
+                        return event_processor
+
+                sentry_scope._name = StarletteIntegration.identifier
+                sentry_scope.add_event_processor(
+                    _make_request_event_processor(request, integration)
+                )
+
+                return await old_func(*args, **kwargs)
+
+            func = _sentry_async_func
+        else:
+
+            def _sentry_sync_func(*args, **kwargs):
+                # type: (*Any, **Any) -> Any
+                hub = Hub.current
+                integration = hub.get_integration(StarletteIntegration)
+                if integration is None:
+                    return old_func(*args, **kwargs)
+
+                with hub.configure_scope() as sentry_scope:
+                    if sentry_scope.profile is not None:
+                        sentry_scope.profile.active_thread_id = (
+                            threading.current_thread().ident
+                        )
+
+                    request = args[0]
+
+                    _set_transaction_name_and_source(
+                        sentry_scope, integration.transaction_style, request
+                    )
+
+                    extractor = StarletteRequestExtractor(request)
+                    cookies = extractor.extract_cookies_from_request()
+
+                    def _make_request_event_processor(req, integration):
+                        # type: (Any, Any) -> Callable[[Dict[str, Any], Dict[str, Any]], Dict[str, Any]]
+                        def event_processor(event, hint):
+                            # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
+
+                            # Extract information from request
+                            request_info = event.get("request", {})
+                            if cookies:
+                                request_info["cookies"] = cookies
+
+                            event["request"] = request_info
+
+                            return event
+
+                        return event_processor
+
+                sentry_scope._name = StarletteIntegration.identifier
+                sentry_scope.add_event_processor(
+                    _make_request_event_processor(request, integration)
+                )
+
+                return old_func(*args, **kwargs)
+
+            func = _sentry_sync_func
+
+        return old_request_response(func)
+
+    starlette.routing.request_response = _sentry_request_response
+
+
+class StarletteRequestExtractor:
+    """
+    Extracts useful information from the Starlette request
+    (like form data or cookies) and adds it to the Sentry event.
+    """
+
+    request = None  # type: Request
+
+    def __init__(self, request):
+        # type: (StarletteRequestExtractor, Request) -> None
+        self.request = request
+
+    def extract_cookies_from_request(self):
+        # type: (StarletteRequestExtractor) -> Optional[Dict[str, Any]]
+        client = Hub.current.client
+        if client is None:
+            return None
+
+        cookies = None  # type: Optional[Dict[str, Any]]
+        if _should_send_default_pii():
+            cookies = self.cookies()
+
+        return cookies
+
+    async def extract_request_info(self):
+        # type: (StarletteRequestExtractor) -> Optional[Dict[str, Any]]
+        client = Hub.current.client
+        if client is None:
+            return None
+
+        request_info = {}  # type: Dict[str, Any]
+
+        with capture_internal_exceptions():
+            # Add cookies
+            if _should_send_default_pii():
+                request_info["cookies"] = self.cookies()
+
+            # If there is no body, just return the cookies
+            content_length = await self.content_length()
+            if not content_length:
+                return request_info
+
+            # Add annotation if body is too big
+            if content_length and not request_body_within_bounds(
+                client, content_length
+            ):
+                request_info["data"] = AnnotatedValue.removed_because_over_size_limit()
+                return request_info
+
+            # Add JSON body, if it is a JSON request
+            json = await self.json()
+            if json:
+                request_info["data"] = json
+                return request_info
+
+            # Add form as key/value pairs, if request has form data
+            form = await self.form()
+            if form:
+                form_data = {}
+                for key, val in iteritems(form):
+                    is_file = isinstance(val, UploadFile)
+                    form_data[key] = (
+                        val
+                        if not is_file
+                        else AnnotatedValue.removed_because_raw_data()
+                    )
+
+                request_info["data"] = form_data
+                return request_info
+
+            # Raw data, do not add body just an annotation
+            request_info["data"] = AnnotatedValue.removed_because_raw_data()
+            return request_info
+
+    async def content_length(self):
+        # type: (StarletteRequestExtractor) -> Optional[int]
+        if "content-length" in self.request.headers:
+            return int(self.request.headers["content-length"])
+
+        return None
+
+    def cookies(self):
+        # type: (StarletteRequestExtractor) -> Dict[str, Any]
+        return self.request.cookies
+
+    async def form(self):
+        # type: (StarletteRequestExtractor) -> Any
+        if multipart is None:
+            return None
+
+        # Parse the body first to get it cached, as Starlette does not cache form() as it
+        # does with body() and json() https://github.com/encode/starlette/discussions/1933
+        # Calling `.form()` without calling `.body()` first will
+        # potentially break the users project.
+        await self.request.body()
+
+        return await self.request.form()
+
+    def is_json(self):
+        # type: (StarletteRequestExtractor) -> bool
+        return _is_json_content_type(self.request.headers.get("content-type"))
+
+    async def json(self):
+        # type: (StarletteRequestExtractor) -> Optional[Dict[str, Any]]
+        if not self.is_json():
+            return None
+
+        return await self.request.json()
+
+
+def _set_transaction_name_and_source(scope, transaction_style, request):
+    # type: (SentryScope, str, Any) -> None
+    name = ""
+
+    if transaction_style == "endpoint":
+        endpoint = request.scope.get("endpoint")
+        if endpoint:
+            name = transaction_from_function(endpoint) or ""
+
+    elif transaction_style == "url":
+        router = request.scope["router"]
+        for route in router.routes:
+            match = route.matches(request.scope)
+
+            if match[0] == Match.FULL:
+                if transaction_style == "endpoint":
+                    name = transaction_from_function(match[1]["endpoint"]) or ""
+                    break
+                elif transaction_style == "url":
+                    name = route.path
+                    break
+
+    if not name:
+        name = _DEFAULT_TRANSACTION_NAME
+        source = TRANSACTION_SOURCE_ROUTE
+    else:
+        source = SOURCE_FOR_STYLE[transaction_style]
+
+    scope.set_transaction_name(name, source=source)
diff --git a/sentry_sdk/integrations/starlite.py b/sentry_sdk/integrations/starlite.py
new file mode 100644
index 0000000000..2a5a6150bb
--- /dev/null
+++ b/sentry_sdk/integrations/starlite.py
@@ -0,0 +1,271 @@
+from typing import TYPE_CHECKING
+
+from pydantic import BaseModel  # type: ignore
+from sentry_sdk.consts import OP
+from sentry_sdk.hub import Hub, _should_send_default_pii
+from sentry_sdk.integrations import DidNotEnable, Integration
+from sentry_sdk.integrations.asgi import SentryAsgiMiddleware
+from sentry_sdk.tracing import SOURCE_FOR_STYLE, TRANSACTION_SOURCE_ROUTE
+from sentry_sdk.utils import event_from_exception, transaction_from_function
+
+try:
+    from starlite import Request, Starlite, State  # type: ignore
+    from starlite.handlers.base import BaseRouteHandler  # type: ignore
+    from starlite.middleware import DefineMiddleware  # type: ignore
+    from starlite.plugins.base import get_plugin_for_value  # type: ignore
+    from starlite.routes.http import HTTPRoute  # type: ignore
+    from starlite.utils import ConnectionDataExtractor, is_async_callable, Ref  # type: ignore
+
+    if TYPE_CHECKING:
+        from typing import Any, Dict, List, Optional, Union
+        from starlite.types import (  # type: ignore
+            ASGIApp,
+            HTTPReceiveMessage,
+            HTTPScope,
+            Message,
+            Middleware,
+            Receive,
+            Scope,
+            Send,
+            WebSocketReceiveMessage,
+        )
+        from starlite import MiddlewareProtocol
+        from sentry_sdk._types import Event
+except ImportError:
+    raise DidNotEnable("Starlite is not installed")
+
+
+_DEFAULT_TRANSACTION_NAME = "generic Starlite request"
+
+
+class SentryStarliteASGIMiddleware(SentryAsgiMiddleware):
+    def __init__(self, app: "ASGIApp"):
+        super().__init__(
+            app=app,
+            unsafe_context_data=False,
+            transaction_style="endpoint",
+            mechanism_type="asgi",
+        )
+
+
+class StarliteIntegration(Integration):
+    identifier = "starlite"
+
+    @staticmethod
+    def setup_once() -> None:
+        patch_app_init()
+        patch_middlewares()
+        patch_http_route_handle()
+
+
+def patch_app_init() -> None:
+    """
+    Replaces the Starlite class's `__init__` function in order to inject `after_exception` handlers and set the
+    `SentryStarliteASGIMiddleware` as the outmost middleware in the stack.
+    See:
+    - https://starlite-api.github.io/starlite/usage/0-the-starlite-app/5-application-hooks/#after-exception
+    - https://starlite-api.github.io/starlite/usage/7-middleware/0-middleware-intro/
+    """
+    old__init__ = Starlite.__init__
+
+    def injection_wrapper(self: "Starlite", *args: "Any", **kwargs: "Any") -> None:
+
+        after_exception = kwargs.pop("after_exception", [])
+        kwargs.update(
+            after_exception=[
+                exception_handler,
+                *(
+                    after_exception
+                    if isinstance(after_exception, list)
+                    else [after_exception]
+                ),
+            ]
+        )
+
+        SentryStarliteASGIMiddleware.__call__ = SentryStarliteASGIMiddleware._run_asgi3
+        middleware = kwargs.pop("middleware", None) or []
+        kwargs["middleware"] = [SentryStarliteASGIMiddleware, *middleware]
+        old__init__(self, *args, **kwargs)
+
+    Starlite.__init__ = injection_wrapper
+
+
+def patch_middlewares() -> None:
+    old__resolve_middleware_stack = BaseRouteHandler.resolve_middleware
+
+    def resolve_middleware_wrapper(self: "Any") -> "List[Middleware]":
+        return [
+            enable_span_for_middleware(middleware)
+            for middleware in old__resolve_middleware_stack(self)
+        ]
+
+    BaseRouteHandler.resolve_middleware = resolve_middleware_wrapper
+
+
+def enable_span_for_middleware(middleware: "Middleware") -> "Middleware":
+    if (
+        not hasattr(middleware, "__call__")  # noqa: B004
+        or middleware is SentryStarliteASGIMiddleware
+    ):
+        return middleware
+
+    if isinstance(middleware, DefineMiddleware):
+        old_call: "ASGIApp" = middleware.middleware.__call__
+    else:
+        old_call = middleware.__call__
+
+    async def _create_span_call(
+        self: "MiddlewareProtocol", scope: "Scope", receive: "Receive", send: "Send"
+    ) -> None:
+        hub = Hub.current
+        integration = hub.get_integration(StarliteIntegration)
+        if integration is not None:
+            middleware_name = self.__class__.__name__
+            with hub.start_span(
+                op=OP.MIDDLEWARE_STARLITE, description=middleware_name
+            ) as middleware_span:
+                middleware_span.set_tag("starlite.middleware_name", middleware_name)
+
+                # Creating spans for the "receive" callback
+                async def _sentry_receive(
+                    *args: "Any", **kwargs: "Any"
+                ) -> "Union[HTTPReceiveMessage, WebSocketReceiveMessage]":
+                    hub = Hub.current
+                    with hub.start_span(
+                        op=OP.MIDDLEWARE_STARLITE_RECEIVE,
+                        description=getattr(receive, "__qualname__", str(receive)),
+                    ) as span:
+                        span.set_tag("starlite.middleware_name", middleware_name)
+                        return await receive(*args, **kwargs)
+
+                receive_name = getattr(receive, "__name__", str(receive))
+                receive_patched = receive_name == "_sentry_receive"
+                new_receive = _sentry_receive if not receive_patched else receive
+
+                # Creating spans for the "send" callback
+                async def _sentry_send(message: "Message") -> None:
+                    hub = Hub.current
+                    with hub.start_span(
+                        op=OP.MIDDLEWARE_STARLITE_SEND,
+                        description=getattr(send, "__qualname__", str(send)),
+                    ) as span:
+                        span.set_tag("starlite.middleware_name", middleware_name)
+                        return await send(message)
+
+                send_name = getattr(send, "__name__", str(send))
+                send_patched = send_name == "_sentry_send"
+                new_send = _sentry_send if not send_patched else send
+
+                return await old_call(self, scope, new_receive, new_send)
+        else:
+            return await old_call(self, scope, receive, send)
+
+    not_yet_patched = old_call.__name__ not in ["_create_span_call"]
+
+    if not_yet_patched:
+        if isinstance(middleware, DefineMiddleware):
+            middleware.middleware.__call__ = _create_span_call
+        else:
+            middleware.__call__ = _create_span_call
+
+    return middleware
+
+
+def patch_http_route_handle() -> None:
+    old_handle = HTTPRoute.handle
+
+    async def handle_wrapper(
+        self: "HTTPRoute", scope: "HTTPScope", receive: "Receive", send: "Send"
+    ) -> None:
+        hub = Hub.current
+        integration: StarliteIntegration = hub.get_integration(StarliteIntegration)
+        if integration is None:
+            return await old_handle(self, scope, receive, send)
+
+        with hub.configure_scope() as sentry_scope:
+            request: "Request[Any, Any]" = scope["app"].request_class(
+                scope=scope, receive=receive, send=send
+            )
+            extracted_request_data = ConnectionDataExtractor(
+                parse_body=True, parse_query=True
+            )(request)
+            body = extracted_request_data.pop("body")
+
+            request_data = await body
+
+            def event_processor(event: "Event", _: "Dict[str, Any]") -> "Event":
+                route_handler = scope.get("route_handler")
+
+                request_info = event.get("request", {})
+                request_info["content_length"] = len(scope.get("_body", b""))
+                if _should_send_default_pii():
+                    request_info["cookies"] = extracted_request_data["cookies"]
+                if request_data is not None:
+                    request_info["data"] = request_data
+
+                func = None
+                if route_handler.name is not None:
+                    tx_name = route_handler.name
+                elif isinstance(route_handler.fn, Ref):
+                    func = route_handler.fn.value
+                else:
+                    func = route_handler.fn
+                if func is not None:
+                    tx_name = transaction_from_function(func)
+
+                tx_info = {"source": SOURCE_FOR_STYLE["endpoint"]}
+
+                if not tx_name:
+                    tx_name = _DEFAULT_TRANSACTION_NAME
+                    tx_info = {"source": TRANSACTION_SOURCE_ROUTE}
+
+                event.update(
+                    request=request_info, transaction=tx_name, transaction_info=tx_info
+                )
+                return event
+
+            sentry_scope._name = StarliteIntegration.identifier
+            sentry_scope.add_event_processor(event_processor)
+
+            return await old_handle(self, scope, receive, send)
+
+    HTTPRoute.handle = handle_wrapper
+
+
+def retrieve_user_from_scope(scope: "Scope") -> "Optional[Dict[str, Any]]":
+    scope_user = scope.get("user", {})
+    if not scope_user:
+        return None
+    if isinstance(scope_user, dict):
+        return scope_user
+    if isinstance(scope_user, BaseModel):
+        return scope_user.dict()
+    if hasattr(scope_user, "asdict"):  # dataclasses
+        return scope_user.asdict()
+
+    plugin = get_plugin_for_value(scope_user)
+    if plugin and not is_async_callable(plugin.to_dict):
+        return plugin.to_dict(scope_user)
+
+    return None
+
+
+def exception_handler(exc: Exception, scope: "Scope", _: "State") -> None:
+    hub = Hub.current
+    if hub.get_integration(StarliteIntegration) is None:
+        return
+
+    user_info: "Optional[Dict[str, Any]]" = None
+    if _should_send_default_pii():
+        user_info = retrieve_user_from_scope(scope)
+    if user_info and isinstance(user_info, dict):
+        with hub.configure_scope() as sentry_scope:
+            sentry_scope.set_user(user_info)
+
+    event, hint = event_from_exception(
+        exc,
+        client_options=hub.client.options if hub.client else None,
+        mechanism={"type": StarliteIntegration.identifier, "handled": False},
+    )
+
+    hub.capture_event(event, hint=hint)
diff --git a/sentry_sdk/integrations/stdlib.py b/sentry_sdk/integrations/stdlib.py
index 56cece70ac..687d9dd2c1 100644
--- a/sentry_sdk/integrations/stdlib.py
+++ b/sentry_sdk/integrations/stdlib.py
@@ -2,12 +2,13 @@
 import subprocess
 import sys
 import platform
+from sentry_sdk.consts import OP
 
 from sentry_sdk.hub import Hub
 from sentry_sdk.integrations import Integration
 from sentry_sdk.scope import add_global_event_processor
-from sentry_sdk.tracing import EnvironHeaders
-from sentry_sdk.utils import capture_internal_exceptions, safe_repr
+from sentry_sdk.tracing_utils import EnvironHeaders
+from sentry_sdk.utils import capture_internal_exceptions, logger, safe_repr
 
 from sentry_sdk._types import MYPY
 
@@ -70,7 +71,7 @@ def putrequest(self, method, url, *args, **kwargs):
         default_port = self.default_port
 
         real_url = url
-        if not real_url.startswith(("http://", "https://")):
+        if real_url is None or not real_url.startswith(("http://", "https://")):
             real_url = "%s://%s%s%s" % (
                 default_port == 443 and "https" or "http",
                 host,
@@ -78,14 +79,21 @@ def putrequest(self, method, url, *args, **kwargs):
                 url,
             )
 
-        span = hub.start_span(op="http", description="%s %s" % (method, real_url))
+        span = hub.start_span(
+            op=OP.HTTP_CLIENT, description="%s %s" % (method, real_url)
+        )
 
         span.set_data("method", method)
         span.set_data("url", real_url)
 
         rv = real_putrequest(self, method, url, *args, **kwargs)
 
-        for key, value in hub.iter_trace_propagation_headers():
+        for key, value in hub.iter_trace_propagation_headers(span):
+            logger.debug(
+                "[Tracing] Adding `{key}` header {value} to outgoing request to {real_url}.".format(
+                    key=key, value=value, real_url=real_url
+                )
+            )
             self.putheader(key, value)
 
         self._sentrysdk_span = span
@@ -152,7 +160,7 @@ def sentry_patched_popen_init(self, *a, **kw):
 
         hub = Hub.current
         if hub.get_integration(StdlibIntegration) is None:
-            return old_popen_init(self, *a, **kw)  # type: ignore
+            return old_popen_init(self, *a, **kw)
 
         # Convert from tuple to list to be able to set values.
         a = list(a)
@@ -178,16 +186,18 @@ def sentry_patched_popen_init(self, *a, **kw):
 
         env = None
 
-        for k, v in hub.iter_trace_propagation_headers():
-            if env is None:
-                env = _init_argument(a, kw, "env", 10, lambda x: dict(x or os.environ))
-            env["SUBPROCESS_" + k.upper().replace("-", "_")] = v
+        with hub.start_span(op=OP.SUBPROCESS, description=description) as span:
+            for k, v in hub.iter_trace_propagation_headers(span):
+                if env is None:
+                    env = _init_argument(
+                        a, kw, "env", 10, lambda x: dict(x or os.environ)
+                    )
+                env["SUBPROCESS_" + k.upper().replace("-", "_")] = v
 
-        with hub.start_span(op="subprocess", description=description) as span:
             if cwd:
                 span.set_data("subprocess.cwd", cwd)
 
-            rv = old_popen_init(self, *a, **kw)  # type: ignore
+            rv = old_popen_init(self, *a, **kw)
 
             span.set_tag("subprocess.pid", self.pid)
             return rv
@@ -203,7 +213,7 @@ def sentry_patched_popen_wait(self, *a, **kw):
         if hub.get_integration(StdlibIntegration) is None:
             return old_popen_wait(self, *a, **kw)
 
-        with hub.start_span(op="subprocess.wait") as span:
+        with hub.start_span(op=OP.SUBPROCESS_WAIT) as span:
             span.set_tag("subprocess.pid", self.pid)
             return old_popen_wait(self, *a, **kw)
 
@@ -218,7 +228,7 @@ def sentry_patched_popen_communicate(self, *a, **kw):
         if hub.get_integration(StdlibIntegration) is None:
             return old_popen_communicate(self, *a, **kw)
 
-        with hub.start_span(op="subprocess.communicate") as span:
+        with hub.start_span(op=OP.SUBPROCESS_COMMUNICATE) as span:
             span.set_tag("subprocess.pid", self.pid)
             return old_popen_communicate(self, *a, **kw)
 
diff --git a/sentry_sdk/integrations/threading.py b/sentry_sdk/integrations/threading.py
index b750257e2a..f29e5e8797 100644
--- a/sentry_sdk/integrations/threading.py
+++ b/sentry_sdk/integrations/threading.py
@@ -51,7 +51,7 @@ def sentry_start(self, *a, **kw):
                     new_run = _wrap_run(hub_, getattr(self.run, "__func__", self.run))
                     self.run = new_run  # type: ignore
 
-            return old_start(self, *a, **kw)  # type: ignore
+            return old_start(self, *a, **kw)
 
         Thread.start = sentry_start  # type: ignore
 
diff --git a/sentry_sdk/integrations/tornado.py b/sentry_sdk/integrations/tornado.py
index d3ae065690..a64f4f5b11 100644
--- a/sentry_sdk/integrations/tornado.py
+++ b/sentry_sdk/integrations/tornado.py
@@ -1,9 +1,17 @@
 import weakref
+import contextlib
 from inspect import iscoroutinefunction
+from sentry_sdk.consts import OP
 
 from sentry_sdk.hub import Hub, _should_send_default_pii
+from sentry_sdk.tracing import (
+    TRANSACTION_SOURCE_COMPONENT,
+    TRANSACTION_SOURCE_ROUTE,
+    Transaction,
+)
 from sentry_sdk.utils import (
     HAS_REAL_CONTEXTVARS,
+    CONTEXTVARS_ERROR_MESSAGE,
     event_from_exception,
     capture_internal_exceptions,
     transaction_from_function,
@@ -18,7 +26,7 @@
 from sentry_sdk._compat import iteritems
 
 try:
-    from tornado import version_info as TORNADO_VERSION  # type: ignore
+    from tornado import version_info as TORNADO_VERSION
     from tornado.web import RequestHandler, HTTPError
     from tornado.gen import coroutine
 except ImportError:
@@ -31,6 +39,7 @@
     from typing import Optional
     from typing import Dict
     from typing import Callable
+    from typing import Generator
 
     from sentry_sdk._types import EventProcessor
 
@@ -48,12 +57,13 @@ def setup_once():
             # Tornado is async. We better have contextvars or we're going to leak
             # state between requests.
             raise DidNotEnable(
-                "The tornado integration for Sentry requires Python 3.6+ or the aiocontextvars package"
+                "The tornado integration for Sentry requires Python 3.7+ or the aiocontextvars package"
+                + CONTEXTVARS_ERROR_MESSAGE
             )
 
         ignore_logger("tornado.access")
 
-        old_execute = RequestHandler._execute  # type: ignore
+        old_execute = RequestHandler._execute
 
         awaitable = iscoroutinefunction(old_execute)
 
@@ -61,51 +71,63 @@ def setup_once():
             # Starting Tornado 6 RequestHandler._execute method is a standard Python coroutine (async/await)
             # In that case our method should be a coroutine function too
             async def sentry_execute_request_handler(self, *args, **kwargs):
-                # type: (Any, *Any, **Any) -> Any
-                hub = Hub.current
-                integration = hub.get_integration(TornadoIntegration)
-                if integration is None:
-                    return await old_execute(self, *args, **kwargs)
-
-                weak_handler = weakref.ref(self)
-
-                with Hub(hub) as hub:
-                    with hub.configure_scope() as scope:
-                        scope.clear_breadcrumbs()
-                        processor = _make_event_processor(weak_handler)  # type: ignore
-                        scope.add_event_processor(processor)
+                # type: (RequestHandler, *Any, **Any) -> Any
+                with _handle_request_impl(self):
                     return await old_execute(self, *args, **kwargs)
 
         else:
 
             @coroutine  # type: ignore
-            def sentry_execute_request_handler(self, *args, **kwargs):
+            def sentry_execute_request_handler(self, *args, **kwargs):  # type: ignore
                 # type: (RequestHandler, *Any, **Any) -> Any
-                hub = Hub.current
-                integration = hub.get_integration(TornadoIntegration)
-                if integration is None:
-                    return old_execute(self, *args, **kwargs)
-
-                weak_handler = weakref.ref(self)
-
-                with Hub(hub) as hub:
-                    with hub.configure_scope() as scope:
-                        scope.clear_breadcrumbs()
-                        processor = _make_event_processor(weak_handler)  # type: ignore
-                        scope.add_event_processor(processor)
+                with _handle_request_impl(self):
                     result = yield from old_execute(self, *args, **kwargs)
                     return result
 
-        RequestHandler._execute = sentry_execute_request_handler  # type: ignore
+        RequestHandler._execute = sentry_execute_request_handler
 
         old_log_exception = RequestHandler.log_exception
 
         def sentry_log_exception(self, ty, value, tb, *args, **kwargs):
             # type: (Any, type, BaseException, Any, *Any, **Any) -> Optional[Any]
             _capture_exception(ty, value, tb)
-            return old_log_exception(self, ty, value, tb, *args, **kwargs)  # type: ignore
+            return old_log_exception(self, ty, value, tb, *args, **kwargs)
+
+        RequestHandler.log_exception = sentry_log_exception
+
+
+@contextlib.contextmanager
+def _handle_request_impl(self):
+    # type: (RequestHandler) -> Generator[None, None, None]
+    hub = Hub.current
+    integration = hub.get_integration(TornadoIntegration)
+
+    if integration is None:
+        yield
+
+    weak_handler = weakref.ref(self)
+
+    with Hub(hub) as hub:
+        with hub.configure_scope() as scope:
+            scope.clear_breadcrumbs()
+            processor = _make_event_processor(weak_handler)
+            scope.add_event_processor(processor)
+
+        transaction = Transaction.continue_from_headers(
+            self.request.headers,
+            op=OP.HTTP_SERVER,
+            # Like with all other integrations, this is our
+            # fallback transaction in case there is no route.
+            # sentry_urldispatcher_resolve is responsible for
+            # setting a transaction name later.
+            name="generic Tornado request",
+            source=TRANSACTION_SOURCE_ROUTE,
+        )
 
-        RequestHandler.log_exception = sentry_log_exception  # type: ignore
+        with hub.start_transaction(
+            transaction, custom_sampling_context={"tornado_request": self.request}
+        ):
+            yield
 
 
 def _capture_exception(ty, value, tb):
@@ -141,6 +163,7 @@ def tornado_processor(event, hint):
         with capture_internal_exceptions():
             method = getattr(handler, handler.request.method.lower())
             event["transaction"] = transaction_from_function(method)
+            event["transaction_info"] = {"source": TRANSACTION_SOURCE_COMPONENT}
 
         with capture_internal_exceptions():
             extractor = TornadoRequestExtractor(request)
diff --git a/sentry_sdk/integrations/wsgi.py b/sentry_sdk/integrations/wsgi.py
index bd87663896..03ce665489 100644
--- a/sentry_sdk/integrations/wsgi.py
+++ b/sentry_sdk/integrations/wsgi.py
@@ -1,6 +1,7 @@
 import sys
 
 from sentry_sdk._functools import partial
+from sentry_sdk.consts import OP
 from sentry_sdk.hub import Hub, _should_send_default_pii
 from sentry_sdk.utils import (
     ContextVar,
@@ -8,9 +9,10 @@
     event_from_exception,
 )
 from sentry_sdk._compat import PY2, reraise, iteritems
-from sentry_sdk.tracing import Span
+from sentry_sdk.tracing import Transaction, TRANSACTION_SOURCE_ROUTE
 from sentry_sdk.sessions import auto_session_tracking
 from sentry_sdk.integrations._wsgi_common import _filter_headers
+from sentry_sdk.profiler import start_profiling
 
 from sentry_sdk._types import MYPY
 
@@ -46,7 +48,6 @@ def wsgi_decoding_dance(s, charset="utf-8", errors="replace"):
         # type: (str, str, str) -> str
         return s.decode(charset, errors)
 
-
 else:
 
     def wsgi_decoding_dance(s, charset="utf-8", errors="replace"):
@@ -54,10 +55,16 @@ def wsgi_decoding_dance(s, charset="utf-8", errors="replace"):
         return s.encode("latin1").decode(charset, errors)
 
 
-def get_host(environ):
-    # type: (Dict[str, str]) -> str
+def get_host(environ, use_x_forwarded_for=False):
+    # type: (Dict[str, str], bool) -> str
     """Return the host for the given WSGI environment. Yanked from Werkzeug."""
-    if environ.get("HTTP_HOST"):
+    if use_x_forwarded_for and "HTTP_X_FORWARDED_HOST" in environ:
+        rv = environ["HTTP_X_FORWARDED_HOST"]
+        if environ["wsgi.url_scheme"] == "http" and rv.endswith(":80"):
+            rv = rv[:-3]
+        elif environ["wsgi.url_scheme"] == "https" and rv.endswith(":443"):
+            rv = rv[:-4]
+    elif environ.get("HTTP_HOST"):
         rv = environ["HTTP_HOST"]
         if environ["wsgi.url_scheme"] == "http" and rv.endswith(":80"):
             rv = rv[:-3]
@@ -77,23 +84,24 @@ def get_host(environ):
     return rv
 
 
-def get_request_url(environ):
-    # type: (Dict[str, str]) -> str
+def get_request_url(environ, use_x_forwarded_for=False):
+    # type: (Dict[str, str], bool) -> str
     """Return the absolute URL without query string for the given WSGI
     environment."""
     return "%s://%s/%s" % (
         environ.get("wsgi.url_scheme"),
-        get_host(environ),
+        get_host(environ, use_x_forwarded_for),
         wsgi_decoding_dance(environ.get("PATH_INFO") or "").lstrip("/"),
     )
 
 
 class SentryWsgiMiddleware(object):
-    __slots__ = ("app",)
+    __slots__ = ("app", "use_x_forwarded_for")
 
-    def __init__(self, app):
-        # type: (Callable[[Dict[str, str], Callable[..., Any]], Any]) -> None
+    def __init__(self, app, use_x_forwarded_for=False):
+        # type: (Callable[[Dict[str, str], Callable[..., Any]], Any], bool) -> None
         self.app = app
+        self.use_x_forwarded_for = use_x_forwarded_for
 
     def __call__(self, environ, start_response):
         # type: (Dict[str, str], Callable[..., Any]) -> _ScopedResponse
@@ -103,25 +111,34 @@ def __call__(self, environ, start_response):
         _wsgi_middleware_applied.set(True)
         try:
             hub = Hub(Hub.current)
-            with auto_session_tracking(hub):
+            with auto_session_tracking(hub, session_mode="request"):
                 with hub:
                     with capture_internal_exceptions():
                         with hub.configure_scope() as scope:
                             scope.clear_breadcrumbs()
                             scope._name = "wsgi"
                             scope.add_event_processor(
-                                _make_wsgi_event_processor(environ)
+                                _make_wsgi_event_processor(
+                                    environ, self.use_x_forwarded_for
+                                )
                             )
 
-                    span = Span.continue_from_environ(environ)
-                    span.op = "http.server"
-                    span.transaction = "generic WSGI request"
+                    transaction = Transaction.continue_from_environ(
+                        environ,
+                        op=OP.HTTP_SERVER,
+                        name="generic WSGI request",
+                        source=TRANSACTION_SOURCE_ROUTE,
+                    )
 
-                    with hub.start_span(span) as span:
+                    with hub.start_transaction(
+                        transaction, custom_sampling_context={"wsgi_environ": environ}
+                    ), start_profiling(transaction, hub):
                         try:
                             rv = self.app(
                                 environ,
-                                partial(_sentry_start_response, start_response, span),
+                                partial(
+                                    _sentry_start_response, start_response, transaction
+                                ),
                             )
                         except BaseException:
                             reraise(*_capture_exception(hub))
@@ -133,7 +150,7 @@ def __call__(self, environ, start_response):
 
 def _sentry_start_response(
     old_start_response,  # type: StartResponse
-    span,  # type: Span
+    transaction,  # type: Transaction
     status,  # type: str
     response_headers,  # type: WsgiResponseHeaders
     exc_info=None,  # type: Optional[WsgiExcInfo]
@@ -141,7 +158,7 @@ def _sentry_start_response(
     # type: (...) -> WsgiResponseIter
     with capture_internal_exceptions():
         status_int = int(status.split(" ", 1)[0])
-        span.set_http_status(status_int)
+        transaction.set_http_status(status_int)
 
     if exc_info is None:
         # The Django Rest Framework WSGI test client, and likely other
@@ -155,7 +172,8 @@ def _sentry_start_response(
 def _get_environ(environ):
     # type: (Dict[str, str]) -> Iterator[Tuple[str, str]]
     """
-    Returns our whitelisted environment variables.
+    Returns our explicitly included environment variables we want to
+    capture (server name, port and remote addr if pii is enabled).
     """
     keys = ["SERVER_NAME", "SERVER_PORT"]
     if _should_send_default_pii():
@@ -264,8 +282,8 @@ def close(self):
                 reraise(*_capture_exception(self._hub))
 
 
-def _make_wsgi_event_processor(environ):
-    # type: (Dict[str, str]) -> EventProcessor
+def _make_wsgi_event_processor(environ, use_x_forwarded_for):
+    # type: (Dict[str, str], bool) -> EventProcessor
     # It's a bit unfortunate that we have to extract and parse the request data
     # from the environ so eagerly, but there are a few good reasons for this.
     #
@@ -279,7 +297,7 @@ def _make_wsgi_event_processor(environ):
     # https://github.com/unbit/uwsgi/issues/1950
 
     client_ip = get_client_ip(environ)
-    request_url = get_request_url(environ)
+    request_url = get_request_url(environ, use_x_forwarded_for)
     query_string = environ.get("QUERY_STRING")
     method = environ.get("REQUEST_METHOD")
     env = dict(_get_environ(environ))
diff --git a/sentry_sdk/profiler.py b/sentry_sdk/profiler.py
new file mode 100644
index 0000000000..94080aed89
--- /dev/null
+++ b/sentry_sdk/profiler.py
@@ -0,0 +1,771 @@
+"""
+This file is originally based on code from https://github.com/nylas/nylas-perftools, which is published under the following license:
+
+The MIT License (MIT)
+
+Copyright (c) 2014 Nylas
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+"""
+
+import atexit
+import os
+import platform
+import random
+import sys
+import threading
+import time
+import uuid
+from collections import deque
+from contextlib import contextmanager
+
+import sentry_sdk
+from sentry_sdk._compat import PY33, PY311
+from sentry_sdk._types import MYPY
+from sentry_sdk.utils import (
+    filename_for_module,
+    handle_in_app_impl,
+    logger,
+    nanosecond_time,
+)
+
+if MYPY:
+    from types import FrameType
+    from typing import Any
+    from typing import Callable
+    from typing import Deque
+    from typing import Dict
+    from typing import Generator
+    from typing import List
+    from typing import Optional
+    from typing import Set
+    from typing import Sequence
+    from typing import Tuple
+    from typing_extensions import TypedDict
+    import sentry_sdk.tracing
+
+    ThreadId = str
+
+    # The exact value of this id is not very meaningful. The purpose
+    # of this id is to give us a compact and unique identifier for a
+    # raw stack that can be used as a key to a dictionary so that it
+    # can be used during the sampled format generation.
+    RawStackId = Tuple[int, int]
+
+    RawFrame = Tuple[
+        str,  # abs_path
+        Optional[str],  # module
+        Optional[str],  # filename
+        str,  # function
+        int,  # lineno
+    ]
+    RawStack = Tuple[RawFrame, ...]
+    RawSample = Sequence[Tuple[str, Tuple[RawStackId, RawStack]]]
+
+    ProcessedSample = TypedDict(
+        "ProcessedSample",
+        {
+            "elapsed_since_start_ns": str,
+            "thread_id": ThreadId,
+            "stack_id": int,
+        },
+    )
+
+    ProcessedStack = List[int]
+
+    ProcessedFrame = TypedDict(
+        "ProcessedFrame",
+        {
+            "abs_path": str,
+            "filename": Optional[str],
+            "function": str,
+            "lineno": int,
+            "module": Optional[str],
+        },
+    )
+
+    ProcessedThreadMetadata = TypedDict(
+        "ProcessedThreadMetadata",
+        {"name": str},
+    )
+
+    ProcessedProfile = TypedDict(
+        "ProcessedProfile",
+        {
+            "frames": List[ProcessedFrame],
+            "stacks": List[ProcessedStack],
+            "samples": List[ProcessedSample],
+            "thread_metadata": Dict[ThreadId, ProcessedThreadMetadata],
+        },
+    )
+
+    ProfileContext = TypedDict(
+        "ProfileContext",
+        {"profile_id": str},
+    )
+
+try:
+    from gevent.monkey import is_module_patched  # type: ignore
+except ImportError:
+
+    def is_module_patched(*args, **kwargs):
+        # type: (*Any, **Any) -> bool
+        # unable to import from gevent means no modules have been patched
+        return False
+
+
+_scheduler = None  # type: Optional[Scheduler]
+
+
+def setup_profiler(options):
+    # type: (Dict[str, Any]) -> None
+
+    """
+    `buffer_secs` determines the max time a sample will be buffered for
+    `frequency` determines the number of samples to take per second (Hz)
+    """
+
+    global _scheduler
+
+    if _scheduler is not None:
+        logger.debug("profiling is already setup")
+        return
+
+    if not PY33:
+        logger.warn("profiling is only supported on Python >= 3.3")
+        return
+
+    frequency = 101
+
+    if is_module_patched("threading") or is_module_patched("_thread"):
+        # If gevent has patched the threading modules then we cannot rely on
+        # them to spawn a native thread for sampling.
+        # Instead we default to the GeventScheduler which is capable of
+        # spawning native threads within gevent.
+        default_profiler_mode = GeventScheduler.mode
+    else:
+        default_profiler_mode = ThreadScheduler.mode
+
+    profiler_mode = options["_experiments"].get("profiler_mode", default_profiler_mode)
+
+    if (
+        profiler_mode == ThreadScheduler.mode
+        # for legacy reasons, we'll keep supporting sleep mode for this scheduler
+        or profiler_mode == "sleep"
+    ):
+        _scheduler = ThreadScheduler(frequency=frequency)
+    elif profiler_mode == GeventScheduler.mode:
+        try:
+            _scheduler = GeventScheduler(frequency=frequency)
+        except ImportError:
+            raise ValueError("Profiler mode: {} is not available".format(profiler_mode))
+    else:
+        raise ValueError("Unknown profiler mode: {}".format(profiler_mode))
+
+    _scheduler.setup()
+
+    atexit.register(teardown_profiler)
+
+
+def teardown_profiler():
+    # type: () -> None
+
+    global _scheduler
+
+    if _scheduler is not None:
+        _scheduler.teardown()
+
+    _scheduler = None
+
+
+# We want to impose a stack depth limit so that samples aren't too large.
+MAX_STACK_DEPTH = 128
+
+
+def extract_stack(
+    frame,  # type: Optional[FrameType]
+    cwd,  # type: str
+    prev_cache=None,  # type: Optional[Tuple[RawStackId, RawStack, Deque[FrameType]]]
+    max_stack_depth=MAX_STACK_DEPTH,  # type: int
+):
+    # type: (...) -> Tuple[RawStackId, RawStack, Deque[FrameType]]
+    """
+    Extracts the stack starting the specified frame. The extracted stack
+    assumes the specified frame is the top of the stack, and works back
+    to the bottom of the stack.
+
+    In the event that the stack is more than `MAX_STACK_DEPTH` frames deep,
+    only the first `MAX_STACK_DEPTH` frames will be returned.
+    """
+
+    frames = deque(maxlen=max_stack_depth)  # type: Deque[FrameType]
+
+    while frame is not None:
+        frames.append(frame)
+        frame = frame.f_back
+
+    if prev_cache is None:
+        stack = tuple(extract_frame(frame, cwd) for frame in frames)
+    else:
+        _, prev_stack, prev_frames = prev_cache
+        prev_depth = len(prev_frames)
+        depth = len(frames)
+
+        # We want to match the frame found in this sample to the frames found in the
+        # previous sample. If they are the same (using the `is` operator), we can
+        # skip the expensive work of extracting the frame information and reuse what
+        # we extracted during the last sample.
+        #
+        # Make sure to keep in mind that the stack is ordered from the inner most
+        # from to the outer most frame so be careful with the indexing.
+        stack = tuple(
+            prev_stack[i]
+            if i >= 0 and frame is prev_frames[i]
+            else extract_frame(frame, cwd)
+            for i, frame in zip(range(prev_depth - depth, prev_depth), frames)
+        )
+
+    # Instead of mapping the stack into frame ids and hashing
+    # that as a tuple, we can directly hash the stack.
+    # This saves us from having to generate yet another list.
+    # Additionally, using the stack as the key directly is
+    # costly because the stack can be large, so we pre-hash
+    # the stack, and use the hash as the key as this will be
+    # needed a few times to improve performance.
+    #
+    # To Reduce the likelihood of hash collisions, we include
+    # the stack depth. This means that only stacks of the same
+    # depth can suffer from hash collisions.
+    stack_id = len(stack), hash(stack)
+
+    return stack_id, stack, frames
+
+
+def extract_frame(frame, cwd):
+    # type: (FrameType, str) -> RawFrame
+    abs_path = frame.f_code.co_filename
+
+    try:
+        module = frame.f_globals["__name__"]
+    except Exception:
+        module = None
+
+    # namedtuples can be many times slower when initialing
+    # and accessing attribute so we opt to use a tuple here instead
+    return (
+        # This originally was `os.path.abspath(abs_path)` but that had
+        # a large performance overhead.
+        #
+        # According to docs, this is equivalent to
+        # `os.path.normpath(os.path.join(os.getcwd(), path))`.
+        # The `os.getcwd()` call is slow here, so we precompute it.
+        #
+        # Additionally, since we are using normalized path already,
+        # we skip calling `os.path.normpath` entirely.
+        os.path.join(cwd, abs_path),
+        module,
+        filename_for_module(module, abs_path) or None,
+        get_frame_name(frame),
+        frame.f_lineno,
+    )
+
+
+if PY311:
+
+    def get_frame_name(frame):
+        # type: (FrameType) -> str
+        return frame.f_code.co_qualname  # type: ignore
+
+else:
+
+    def get_frame_name(frame):
+        # type: (FrameType) -> str
+
+        f_code = frame.f_code
+        co_varnames = f_code.co_varnames
+
+        # co_name only contains the frame name.  If the frame was a method,
+        # the class name will NOT be included.
+        name = f_code.co_name
+
+        # if it was a method, we can get the class name by inspecting
+        # the f_locals for the `self` argument
+        try:
+            if (
+                # the co_varnames start with the frame's positional arguments
+                # and we expect the first to be `self` if its an instance method
+                co_varnames
+                and co_varnames[0] == "self"
+                and "self" in frame.f_locals
+            ):
+                for cls in frame.f_locals["self"].__class__.__mro__:
+                    if name in cls.__dict__:
+                        return "{}.{}".format(cls.__name__, name)
+        except AttributeError:
+            pass
+
+        # if it was a class method, (decorated with `@classmethod`)
+        # we can get the class name by inspecting the f_locals for the `cls` argument
+        try:
+            if (
+                # the co_varnames start with the frame's positional arguments
+                # and we expect the first to be `cls` if its a class method
+                co_varnames
+                and co_varnames[0] == "cls"
+                and "cls" in frame.f_locals
+            ):
+                for cls in frame.f_locals["cls"].__mro__:
+                    if name in cls.__dict__:
+                        return "{}.{}".format(cls.__name__, name)
+        except AttributeError:
+            pass
+
+        # nothing we can do if it is a staticmethod (decorated with @staticmethod)
+
+        # we've done all we can, time to give up and return what we have
+        return name
+
+
+MAX_PROFILE_DURATION_NS = int(3e10)  # 30 seconds
+
+
+class Profile(object):
+    def __init__(
+        self,
+        scheduler,  # type: Scheduler
+        transaction,  # type: sentry_sdk.tracing.Transaction
+        hub=None,  # type: Optional[sentry_sdk.Hub]
+    ):
+        # type: (...) -> None
+        self.scheduler = scheduler
+        self.transaction = transaction
+        self.hub = hub
+        self.active_thread_id = None  # type: Optional[int]
+        self.start_ns = 0  # type: int
+        self.stop_ns = 0  # type: int
+        self.active = False  # type: bool
+        self.event_id = uuid.uuid4().hex  # type: str
+
+        self.indexed_frames = {}  # type: Dict[RawFrame, int]
+        self.indexed_stacks = {}  # type: Dict[RawStackId, int]
+        self.frames = []  # type: List[ProcessedFrame]
+        self.stacks = []  # type: List[ProcessedStack]
+        self.samples = []  # type: List[ProcessedSample]
+
+        transaction._profile = self
+
+    def get_profile_context(self):
+        # type: () -> ProfileContext
+        return {"profile_id": self.event_id}
+
+    def __enter__(self):
+        # type: () -> None
+        hub = self.hub or sentry_sdk.Hub.current
+
+        _, scope = hub._stack[-1]
+        old_profile = scope.profile
+        scope.profile = self
+
+        self._context_manager_state = (hub, scope, old_profile)
+
+        self.start_ns = nanosecond_time()
+        self.scheduler.start_profiling(self)
+
+    def __exit__(self, ty, value, tb):
+        # type: (Optional[Any], Optional[Any], Optional[Any]) -> None
+        self.scheduler.stop_profiling(self)
+        self.stop_ns = nanosecond_time()
+
+        _, scope, old_profile = self._context_manager_state
+        del self._context_manager_state
+
+        scope.profile = old_profile
+
+    def write(self, ts, sample):
+        # type: (int, RawSample) -> None
+        if ts < self.start_ns:
+            return
+
+        offset = ts - self.start_ns
+        if offset > MAX_PROFILE_DURATION_NS:
+            return
+
+        elapsed_since_start_ns = str(offset)
+
+        for tid, (stack_id, stack) in sample:
+            # Check if the stack is indexed first, this lets us skip
+            # indexing frames if it's not necessary
+            if stack_id not in self.indexed_stacks:
+                for frame in stack:
+                    if frame not in self.indexed_frames:
+                        self.indexed_frames[frame] = len(self.indexed_frames)
+                        self.frames.append(
+                            {
+                                "abs_path": frame[0],
+                                "module": frame[1],
+                                "filename": frame[2],
+                                "function": frame[3],
+                                "lineno": frame[4],
+                            }
+                        )
+
+                self.indexed_stacks[stack_id] = len(self.indexed_stacks)
+                self.stacks.append([self.indexed_frames[frame] for frame in stack])
+
+            self.samples.append(
+                {
+                    "elapsed_since_start_ns": elapsed_since_start_ns,
+                    "thread_id": tid,
+                    "stack_id": self.indexed_stacks[stack_id],
+                }
+            )
+
+    def process(self):
+        # type: () -> ProcessedProfile
+
+        # This collects the thread metadata at the end of a profile. Doing it
+        # this way means that any threads that terminate before the profile ends
+        # will not have any metadata associated with it.
+        thread_metadata = {
+            str(thread.ident): {
+                "name": str(thread.name),
+            }
+            for thread in threading.enumerate()
+        }  # type: Dict[str, ProcessedThreadMetadata]
+
+        return {
+            "frames": self.frames,
+            "stacks": self.stacks,
+            "samples": self.samples,
+            "thread_metadata": thread_metadata,
+        }
+
+    def to_json(self, event_opt, options):
+        # type: (Any, Dict[str, Any]) -> Dict[str, Any]
+        profile = self.process()
+
+        handle_in_app_impl(
+            profile["frames"], options["in_app_exclude"], options["in_app_include"]
+        )
+
+        return {
+            "environment": event_opt.get("environment"),
+            "event_id": self.event_id,
+            "platform": "python",
+            "profile": profile,
+            "release": event_opt.get("release", ""),
+            "timestamp": event_opt["timestamp"],
+            "version": "1",
+            "device": {
+                "architecture": platform.machine(),
+            },
+            "os": {
+                "name": platform.system(),
+                "version": platform.release(),
+            },
+            "runtime": {
+                "name": platform.python_implementation(),
+                "version": platform.python_version(),
+            },
+            "transactions": [
+                {
+                    "id": event_opt["event_id"],
+                    "name": self.transaction.name,
+                    # we start the transaction before the profile and this is
+                    # the transaction start time relative to the profile, so we
+                    # hardcode it to 0 until we can start the profile before
+                    "relative_start_ns": "0",
+                    # use the duration of the profile instead of the transaction
+                    # because we end the transaction after the profile
+                    "relative_end_ns": str(self.stop_ns - self.start_ns),
+                    "trace_id": self.transaction.trace_id,
+                    "active_thread_id": str(
+                        self.transaction._active_thread_id
+                        if self.active_thread_id is None
+                        else self.active_thread_id
+                    ),
+                }
+            ],
+        }
+
+
+class Scheduler(object):
+    mode = "unknown"
+
+    def __init__(self, frequency):
+        # type: (int) -> None
+        self.interval = 1.0 / frequency
+
+        self.sampler = self.make_sampler()
+
+        self.new_profiles = deque()  # type: Deque[Profile]
+        self.active_profiles = set()  # type: Set[Profile]
+
+    def __enter__(self):
+        # type: () -> Scheduler
+        self.setup()
+        return self
+
+    def __exit__(self, ty, value, tb):
+        # type: (Optional[Any], Optional[Any], Optional[Any]) -> None
+        self.teardown()
+
+    def setup(self):
+        # type: () -> None
+        raise NotImplementedError
+
+    def teardown(self):
+        # type: () -> None
+        raise NotImplementedError
+
+    def start_profiling(self, profile):
+        # type: (Profile) -> None
+        profile.active = True
+        self.new_profiles.append(profile)
+
+    def stop_profiling(self, profile):
+        # type: (Profile) -> None
+        profile.active = False
+
+    def make_sampler(self):
+        # type: () -> Callable[..., None]
+        cwd = os.getcwd()
+
+        # In Python3+, we can use the `nonlocal` keyword to rebind the value,
+        # but this is not possible in Python2. To get around this, we wrap
+        # the value in a list to allow updating this value each sample.
+        last_sample = [
+            {}
+        ]  # type: List[Dict[int, Tuple[RawStackId, RawStack, Deque[FrameType]]]]
+
+        def _sample_stack(*args, **kwargs):
+            # type: (*Any, **Any) -> None
+            """
+            Take a sample of the stack on all the threads in the process.
+            This should be called at a regular interval to collect samples.
+            """
+            # no profiles taking place, so we can stop early
+            if not self.new_profiles and not self.active_profiles:
+                # make sure to clear the cache if we're not profiling so we dont
+                # keep a reference to the last stack of frames around
+                last_sample[0] = {}
+                return
+
+            # This is the number of profiles we want to pop off.
+            # It's possible another thread adds a new profile to
+            # the list and we spend longer than we want inside
+            # the loop below.
+            #
+            # Also make sure to set this value before extracting
+            # frames so we do not write to any new profiles that
+            # were started after this point.
+            new_profiles = len(self.new_profiles)
+
+            now = nanosecond_time()
+
+            raw_sample = {
+                tid: extract_stack(frame, cwd, last_sample[0].get(tid))
+                for tid, frame in sys._current_frames().items()
+            }
+
+            # make sure to update the last sample so the cache has
+            # the most recent stack for better cache hits
+            last_sample[0] = raw_sample
+
+            sample = [
+                (str(tid), (stack_id, stack))
+                for tid, (stack_id, stack, _) in raw_sample.items()
+            ]
+
+            # Move the new profiles into the active_profiles set.
+            #
+            # We cannot directly add the to active_profiles set
+            # in `start_profiling` because it is called from other
+            # threads which can cause a RuntimeError when it the
+            # set sizes changes during iteration without a lock.
+            #
+            # We also want to avoid using a lock here so threads
+            # that are starting profiles are not blocked until it
+            # can acquire the lock.
+            for _ in range(new_profiles):
+                self.active_profiles.add(self.new_profiles.popleft())
+
+            inactive_profiles = []
+
+            for profile in self.active_profiles:
+                if profile.active:
+                    profile.write(now, sample)
+                else:
+                    # If a thread is marked inactive, we buffer it
+                    # to `inactive_profiles` so it can be removed.
+                    # We cannot remove it here as it would result
+                    # in a RuntimeError.
+                    inactive_profiles.append(profile)
+
+            for profile in inactive_profiles:
+                self.active_profiles.remove(profile)
+
+        return _sample_stack
+
+
+class ThreadScheduler(Scheduler):
+    """
+    This scheduler is based on running a daemon thread that will call
+    the sampler at a regular interval.
+    """
+
+    mode = "thread"
+    name = "sentry.profiler.ThreadScheduler"
+
+    def __init__(self, frequency):
+        # type: (int) -> None
+        super(ThreadScheduler, self).__init__(frequency=frequency)
+
+        # used to signal to the thread that it should stop
+        self.event = threading.Event()
+
+        # make sure the thread is a daemon here otherwise this
+        # can keep the application running after other threads
+        # have exited
+        self.thread = threading.Thread(name=self.name, target=self.run, daemon=True)
+
+    def setup(self):
+        # type: () -> None
+        self.thread.start()
+
+    def teardown(self):
+        # type: () -> None
+        self.event.set()
+        self.thread.join()
+
+    def run(self):
+        # type: () -> None
+        last = time.perf_counter()
+
+        while True:
+            if self.event.is_set():
+                break
+
+            self.sampler()
+
+            # some time may have elapsed since the last time
+            # we sampled, so we need to account for that and
+            # not sleep for too long
+            elapsed = time.perf_counter() - last
+            if elapsed < self.interval:
+                time.sleep(self.interval - elapsed)
+
+            # after sleeping, make sure to take the current
+            # timestamp so we can use it next iteration
+            last = time.perf_counter()
+
+
+class GeventScheduler(Scheduler):
+    """
+    This scheduler is based on the thread scheduler but adapted to work with
+    gevent. When using gevent, it may monkey patch the threading modules
+    (`threading` and `_thread`). This results in the use of greenlets instead
+    of native threads.
+
+    This is an issue because the sampler CANNOT run in a greenlet because
+    1. Other greenlets doing sync work will prevent the sampler from running
+    2. The greenlet runs in the same thread as other greenlets so when taking
+       a sample, other greenlets will have been evicted from the thread. This
+       results in a sample containing only the sampler's code.
+    """
+
+    mode = "gevent"
+    name = "sentry.profiler.GeventScheduler"
+
+    def __init__(self, frequency):
+        # type: (int) -> None
+
+        # This can throw an ImportError that must be caught if `gevent` is
+        # not installed.
+        from gevent.threadpool import ThreadPool  # type: ignore
+
+        super(GeventScheduler, self).__init__(frequency=frequency)
+
+        # used to signal to the thread that it should stop
+        self.event = threading.Event()
+
+        # Using gevent's ThreadPool allows us to bypass greenlets and spawn
+        # native threads.
+        self.pool = ThreadPool(1)
+
+    def setup(self):
+        # type: () -> None
+        self.pool.spawn(self.run)
+
+    def teardown(self):
+        # type: () -> None
+        self.event.set()
+        self.pool.join()
+
+    def run(self):
+        # type: () -> None
+        last = time.perf_counter()
+
+        while True:
+            if self.event.is_set():
+                break
+
+            self.sampler()
+
+            # some time may have elapsed since the last time
+            # we sampled, so we need to account for that and
+            # not sleep for too long
+            elapsed = time.perf_counter() - last
+            if elapsed < self.interval:
+                time.sleep(self.interval - elapsed)
+
+            # after sleeping, make sure to take the current
+            # timestamp so we can use it next iteration
+            last = time.perf_counter()
+
+
+def _should_profile(transaction, hub):
+    # type: (sentry_sdk.tracing.Transaction, sentry_sdk.Hub) -> bool
+
+    # The corresponding transaction was not sampled,
+    # so don't generate a profile for it.
+    if not transaction.sampled:
+        return False
+
+    # The profiler hasn't been properly initialized.
+    if _scheduler is None:
+        return False
+
+    client = hub.client
+
+    # The client is None, so we can't get the sample rate.
+    if client is None:
+        return False
+
+    options = client.options
+    profiles_sample_rate = options["_experiments"].get("profiles_sample_rate")
+
+    # The profiles_sample_rate option was not set, so profiling
+    # was never enabled.
+    if profiles_sample_rate is None:
+        return False
+
+    return random.random() < float(profiles_sample_rate)
+
+
+@contextmanager
+def start_profiling(transaction, hub=None):
+    # type: (sentry_sdk.tracing.Transaction, Optional[sentry_sdk.Hub]) -> Generator[None, None, None]
+    hub = hub or sentry_sdk.Hub.current
+
+    # if profiling was not enabled, this should be a noop
+    if _should_profile(transaction, hub):
+        assert _scheduler is not None
+        with Profile(_scheduler, transaction, hub):
+            yield
+    else:
+        yield
diff --git a/sentry_sdk/scope.py b/sentry_sdk/scope.py
index c721b56505..7d9b4f5177 100644
--- a/sentry_sdk/scope.py
+++ b/sentry_sdk/scope.py
@@ -5,6 +5,8 @@
 from sentry_sdk._functools import wraps
 from sentry_sdk._types import MYPY
 from sentry_sdk.utils import logger, capture_internal_exceptions
+from sentry_sdk.tracing import Transaction
+from sentry_sdk.attachments import Attachment
 
 if MYPY:
     from typing import Any
@@ -25,8 +27,9 @@
         Type,
     )
 
+    from sentry_sdk.profiler import Profile
     from sentry_sdk.tracing import Span
-    from sentry_sdk.sessions import Session
+    from sentry_sdk.session import Session
 
     F = TypeVar("F", bound=Callable[..., Any])
     T = TypeVar("T")
@@ -76,7 +79,10 @@ class Scope(object):
         "_level",
         "_name",
         "_fingerprint",
+        # note that for legacy reasons, _transaction is the transaction *name*,
+        # not a Transaction object (the object is stored in _span)
         "_transaction",
+        "_transaction_info",
         "_user",
         "_tags",
         "_contexts",
@@ -87,7 +93,9 @@ class Scope(object):
         "_should_capture",
         "_span",
         "_session",
+        "_attachments",
         "_force_auto_session_tracking",
+        "_profile",
     )
 
     def __init__(self):
@@ -104,11 +112,13 @@ def clear(self):
         self._level = None  # type: Optional[str]
         self._fingerprint = None  # type: Optional[List[str]]
         self._transaction = None  # type: Optional[str]
+        self._transaction_info = {}  # type: Dict[str, str]
         self._user = None  # type: Optional[Dict[str, Any]]
 
         self._tags = {}  # type: Dict[str, Any]
         self._contexts = {}  # type: Dict[str, Dict[str, Any]]
         self._extras = {}  # type: Dict[str, Any]
+        self._attachments = []  # type: List[Attachment]
 
         self.clear_breadcrumbs()
         self._should_capture = True
@@ -117,6 +127,8 @@ def clear(self):
         self._session = None  # type: Optional[Session]
         self._force_auto_session_tracking = None  # type: Optional[bool]
 
+        self._profile = None  # type: Optional[Profile]
+
     @_attr_setter
     def level(self, value):
         # type: (Optional[str]) -> None
@@ -134,23 +146,70 @@ def fingerprint(self, value):
         """When set this overrides the default fingerprint."""
         self._fingerprint = value
 
-    @_attr_setter
+    @property
+    def transaction(self):
+        # type: () -> Any
+        # would be type: () -> Optional[Transaction], see https://github.com/python/mypy/issues/3004
+        """Return the transaction (root span) in the scope, if any."""
+
+        # there is no span/transaction on the scope
+        if self._span is None:
+            return None
+
+        # there is an orphan span on the scope
+        if self._span.containing_transaction is None:
+            return None
+
+        # there is either a transaction (which is its own containing
+        # transaction) or a non-orphan span on the scope
+        return self._span.containing_transaction
+
+    @transaction.setter
     def transaction(self, value):
-        # type: (Optional[str]) -> None
-        """When set this forces a specific transaction name to be set."""
+        # type: (Any) -> None
+        # would be type: (Optional[str]) -> None, see https://github.com/python/mypy/issues/3004
+        """When set this forces a specific transaction name to be set.
+
+        Deprecated: use set_transaction_name instead."""
+
+        # XXX: the docstring above is misleading. The implementation of
+        # apply_to_event prefers an existing value of event.transaction over
+        # anything set in the scope.
+        # XXX: note that with the introduction of the Scope.transaction getter,
+        # there is a semantic and type mismatch between getter and setter. The
+        # getter returns a Transaction, the setter sets a transaction name.
+        # Without breaking version compatibility, we could make the setter set a
+        # transaction name or transaction (self._span) depending on the type of
+        # the value argument.
+
+        logger.warning(
+            "Assigning to scope.transaction directly is deprecated: use scope.set_transaction_name() instead."
+        )
         self._transaction = value
-        span = self._span
-        if span:
-            span.transaction = value
+        if self._span and self._span.containing_transaction:
+            self._span.containing_transaction.name = value
+
+    def set_transaction_name(self, name, source=None):
+        # type: (str, Optional[str]) -> None
+        """Set the transaction name and optionally the transaction source."""
+        self._transaction = name
+
+        if self._span and self._span.containing_transaction:
+            self._span.containing_transaction.name = name
+            if source:
+                self._span.containing_transaction.source = source
+
+        if source:
+            self._transaction_info["source"] = source
 
     @_attr_setter
     def user(self, value):
-        # type: (Dict[str, Any]) -> None
+        # type: (Optional[Dict[str, Any]]) -> None
         """When set a specific user is bound to the scope. Deprecated in favor of set_user."""
         self.set_user(value)
 
     def set_user(self, value):
-        # type: (Dict[str, Any]) -> None
+        # type: (Optional[Dict[str, Any]]) -> None
         """Sets a user for the scope."""
         self._user = value
         if self._session is not None:
@@ -159,17 +218,30 @@ def set_user(self, value):
     @property
     def span(self):
         # type: () -> Optional[Span]
-        """Get/set current tracing span."""
+        """Get/set current tracing span or transaction."""
         return self._span
 
     @span.setter
     def span(self, span):
         # type: (Optional[Span]) -> None
         self._span = span
-        if span is not None:
-            span_transaction = span.transaction
-            if span_transaction:
-                self._transaction = span_transaction
+        # XXX: this differs from the implementation in JS, there Scope.setSpan
+        # does not set Scope._transactionName.
+        if isinstance(span, Transaction):
+            transaction = span
+            if transaction.name:
+                self._transaction = transaction.name
+
+    @property
+    def profile(self):
+        # type: () -> Optional[Profile]
+        return self._profile
+
+    @profile.setter
+    def profile(self, profile):
+        # type: (Optional[Profile]) -> None
+
+        self._profile = profile
 
     def set_tag(
         self,
@@ -190,7 +262,7 @@ def remove_tag(
     def set_context(
         self,
         key,  # type: str
-        value,  # type: Any
+        value,  # type: Dict[str, Any]
     ):
         # type: (...) -> None
         """Binds a context at a certain key to a specific value."""
@@ -224,6 +296,26 @@ def clear_breadcrumbs(self):
         """Clears breadcrumb buffer."""
         self._breadcrumbs = deque()  # type: Deque[Breadcrumb]
 
+    def add_attachment(
+        self,
+        bytes=None,  # type: Optional[bytes]
+        filename=None,  # type: Optional[str]
+        path=None,  # type: Optional[str]
+        content_type=None,  # type: Optional[str]
+        add_to_transactions=False,  # type: bool
+    ):
+        # type: (...) -> None
+        """Adds an attachment to future events sent."""
+        self._attachments.append(
+            Attachment(
+                bytes=bytes,
+                path=path,
+                filename=filename,
+                content_type=content_type,
+                add_to_transactions=add_to_transactions,
+            )
+        )
+
     def add_event_processor(
         self, func  # type: EventProcessor
     ):
@@ -283,11 +375,24 @@ def _drop(event, cause, ty):
             logger.info("%s (%s) dropped event (%s)", ty, cause, event)
             return None
 
+        is_transaction = event.get("type") == "transaction"
+
+        # put all attachments into the hint. This lets callbacks play around
+        # with attachments. We also later pull this out of the hint when we
+        # create the envelope.
+        attachments_to_send = hint.get("attachments") or []
+        for attachment in self._attachments:
+            if not is_transaction or attachment.add_to_transactions:
+                attachments_to_send.append(attachment)
+        hint["attachments"] = attachments_to_send
+
         if self._level is not None:
             event["level"] = self._level
 
-        if event.get("type") != "transaction":
-            event.setdefault("breadcrumbs", []).extend(self._breadcrumbs)
+        if not is_transaction:
+            event.setdefault("breadcrumbs", {}).setdefault("values", []).extend(
+                self._breadcrumbs
+            )
 
         if event.get("user") is None and self._user is not None:
             event["user"] = self._user
@@ -295,6 +400,9 @@ def _drop(event, cause, ty):
         if event.get("transaction") is None and self._transaction is not None:
             event["transaction"] = self._transaction
 
+        if event.get("transaction_info") is None and self._transaction_info is not None:
+            event["transaction_info"] = self._transaction_info
+
         if event.get("fingerprint") is None and self._fingerprint is not None:
             event["fingerprint"] = self._fingerprint
 
@@ -338,6 +446,8 @@ def update_from_scope(self, scope):
             self._fingerprint = scope._fingerprint
         if scope._transaction is not None:
             self._transaction = scope._transaction
+        if scope._transaction_info is not None:
+            self._transaction_info.update(scope._transaction_info)
         if scope._user is not None:
             self._user = scope._user
         if scope._tags:
@@ -350,6 +460,10 @@ def update_from_scope(self, scope):
             self._breadcrumbs.extend(scope._breadcrumbs)
         if scope._span:
             self._span = scope._span
+        if scope._attachments:
+            self._attachments.extend(scope._attachments)
+        if scope._profile:
+            self._profile = scope._profile
 
     def update_from_kwargs(
         self,
@@ -382,6 +496,7 @@ def __copy__(self):
         rv._name = self._name
         rv._fingerprint = self._fingerprint
         rv._transaction = self._transaction
+        rv._transaction_info = dict(self._transaction_info)
         rv._user = self._user
 
         rv._tags = dict(self._tags)
@@ -396,6 +511,9 @@ def __copy__(self):
         rv._span = self._span
         rv._session = self._session
         rv._force_auto_session_tracking = self._force_auto_session_tracking
+        rv._attachments = list(self._attachments)
+
+        rv._profile = self._profile
 
         return rv
 
diff --git a/sentry_sdk/serializer.py b/sentry_sdk/serializer.py
index 3940947553..c1631e47f4 100644
--- a/sentry_sdk/serializer.py
+++ b/sentry_sdk/serializer.py
@@ -1,4 +1,5 @@
 import sys
+import math
 
 from datetime import datetime
 
@@ -6,29 +7,44 @@
     AnnotatedValue,
     capture_internal_exception,
     disable_capture_event,
+    format_timestamp,
+    json_dumps,
     safe_repr,
     strip_string,
-    format_timestamp,
 )
 
-from sentry_sdk._compat import text_type, PY2, string_types, number_types, iteritems
+import sentry_sdk.utils
+
+from sentry_sdk._compat import (
+    text_type,
+    PY2,
+    string_types,
+    number_types,
+    iteritems,
+    binary_sequence_types,
+)
 
 from sentry_sdk._types import MYPY
 
 if MYPY:
+    from datetime import timedelta
+
     from types import TracebackType
 
     from typing import Any
+    from typing import Callable
+    from typing import ContextManager
     from typing import Dict
     from typing import List
     from typing import Optional
-    from typing import Callable
-    from typing import Union
-    from typing import ContextManager
+    from typing import Tuple
     from typing import Type
+    from typing import Union
 
     from sentry_sdk._types import NotImplementedType, Event
 
+    Span = Dict[str, Any]
+
     ReprProcessor = Callable[[Any, Dict[str, Any]], Union[NotImplementedType, str]]
     Segment = Union[str, int]
 
@@ -36,21 +52,32 @@
 if PY2:
     # Importing ABCs from collections is deprecated, and will stop working in 3.8
     # https://github.com/python/cpython/blob/master/Lib/collections/__init__.py#L49
-    from collections import Mapping, Sequence
+    from collections import Mapping, Sequence, Set
 
-    serializable_str_types = string_types
+    serializable_str_types = string_types + binary_sequence_types
 
 else:
     # New in 3.3
     # https://docs.python.org/3/library/collections.abc.html
-    from collections.abc import Mapping, Sequence
+    from collections.abc import Mapping, Sequence, Set
 
     # Bytes are technically not strings in Python 3, but we can serialize them
-    serializable_str_types = (str, bytes)
+    serializable_str_types = string_types + binary_sequence_types
+
+
+# Maximum length of JSON-serialized event payloads that can be safely sent
+# before the server may reject the event due to its size. This is not intended
+# to reflect actual values defined server-side, but rather only be an upper
+# bound for events sent by the SDK.
+#
+# Can be overwritten if wanting to send more bytes, e.g. with a custom server.
+# When changing this, keep in mind that events may be a little bit larger than
+# this value due to attached metadata, so keep the number conservative.
+MAX_EVENT_BYTES = 10**6
 
 MAX_DATABAG_DEPTH = 5
 MAX_DATABAG_BREADTH = 10
-CYCLE_MARKER = u""
+CYCLE_MARKER = ""
 
 
 global_repr_processors = []  # type: List[ReprProcessor]
@@ -93,11 +120,12 @@ def __exit__(
         self._ids.pop(id(self._objs.pop()), None)
 
 
-def serialize(event, **kwargs):
-    # type: (Event, **Any) -> Event
+def serialize(event, smart_transaction_trimming=False, **kwargs):
+    # type: (Event, bool, **Any) -> Event
     memo = Memo()
     path = []  # type: List[Segment]
     meta_stack = []  # type: List[Dict[str, Any]]
+    span_description_bytes = []  # type: List[int]
 
     def _annotate(**meta):
         # type: (**Any) -> None
@@ -167,8 +195,8 @@ def _is_databag():
             if p0 == "request" and path[1] == "data":
                 return True
 
-            if p0 == "breadcrumbs":
-                path[1]
+            if p0 == "breadcrumbs" and path[1] == "values":
+                path[2]
                 return True
 
             if p0 == "extra":
@@ -207,7 +235,7 @@ def _serialize_node(
             capture_internal_exception(sys.exc_info())
 
             if is_databag:
-                return u""
+                return ""
 
             return None
         finally:
@@ -252,8 +280,18 @@ def _serialize_node_impl(
                 if result is not NotImplemented:
                     return _flatten_annotated(result)
 
+        sentry_repr = getattr(type(obj), "__sentry_repr__", None)
+
         if obj is None or isinstance(obj, (bool, number_types)):
-            return obj if not should_repr_strings else safe_repr(obj)
+            if should_repr_strings or (
+                isinstance(obj, float) and (math.isinf(obj) or math.isnan(obj))
+            ):
+                return safe_repr(obj)
+            else:
+                return obj
+
+        elif callable(sentry_repr):
+            return sentry_repr(obj)
 
         elif isinstance(obj, datetime):
             return (
@@ -291,7 +329,9 @@ def _serialize_node_impl(
 
             return rv_dict
 
-        elif not isinstance(obj, serializable_str_types) and isinstance(obj, Sequence):
+        elif not isinstance(obj, serializable_str_types) and isinstance(
+            obj, (Set, Sequence)
+        ):
             rv_list = []
 
             for i, v in enumerate(obj):
@@ -317,20 +357,119 @@ def _serialize_node_impl(
         if should_repr_strings:
             obj = safe_repr(obj)
         else:
-            if isinstance(obj, bytes):
+            if isinstance(obj, bytes) or isinstance(obj, bytearray):
                 obj = obj.decode("utf-8", "replace")
 
             if not isinstance(obj, string_types):
                 obj = safe_repr(obj)
 
+        # Allow span descriptions to be longer than other strings.
+        #
+        # For database auto-instrumented spans, the description contains
+        # potentially long SQL queries that are most useful when not truncated.
+        # Because arbitrarily large events may be discarded by the server as a
+        # protection mechanism, we dynamically limit the description length
+        # later in _truncate_span_descriptions.
+        if (
+            smart_transaction_trimming
+            and len(path) == 3
+            and path[0] == "spans"
+            and path[-1] == "description"
+        ):
+            span_description_bytes.append(len(obj))
+            return obj
         return _flatten_annotated(strip_string(obj))
 
+    def _truncate_span_descriptions(serialized_event, event, excess_bytes):
+        # type: (Event, Event, int) -> None
+        """
+        Modifies serialized_event in-place trying to remove excess_bytes from
+        span descriptions. The original event is used read-only to access the
+        span timestamps (represented as RFC3399-formatted strings in
+        serialized_event).
+
+        It uses heuristics to prioritize preserving the description of spans
+        that might be the most interesting ones in terms of understanding and
+        optimizing performance.
+        """
+        # When truncating a description, preserve a small prefix.
+        min_length = 10
+
+        def shortest_duration_longest_description_first(args):
+            # type: (Tuple[int, Span]) -> Tuple[timedelta, int]
+            i, serialized_span = args
+            span = event["spans"][i]
+            now = datetime.utcnow()
+            start = span.get("start_timestamp") or now
+            end = span.get("timestamp") or now
+            duration = end - start
+            description = serialized_span.get("description") or ""
+            return (duration, -len(description))
+
+        # Note: for simplicity we sort spans by exact duration and description
+        # length. If ever needed, we could have a more involved heuristic, e.g.
+        # replacing exact durations with "buckets" and/or looking at other span
+        # properties.
+        path.append("spans")
+        for i, span in sorted(
+            enumerate(serialized_event.get("spans") or []),
+            key=shortest_duration_longest_description_first,
+        ):
+            description = span.get("description") or ""
+            if len(description) <= min_length:
+                continue
+            excess_bytes -= len(description) - min_length
+            path.extend([i, "description"])
+            # Note: the last time we call strip_string we could preserve a few
+            # more bytes up to a total length of MAX_EVENT_BYTES. Since that's
+            # not strictly required, we leave it out for now for simplicity.
+            span["description"] = _flatten_annotated(
+                strip_string(description, max_length=min_length)
+            )
+            del path[-2:]
+            del meta_stack[len(path) + 1 :]
+
+            if excess_bytes <= 0:
+                break
+        path.pop()
+        del meta_stack[len(path) + 1 :]
+
     disable_capture_event.set(True)
     try:
         rv = _serialize_node(event, **kwargs)
         if meta_stack and isinstance(rv, dict):
             rv["_meta"] = meta_stack[0]
 
+        sum_span_description_bytes = sum(span_description_bytes)
+        if smart_transaction_trimming and sum_span_description_bytes > 0:
+            span_count = len(event.get("spans") or [])
+            # This is an upper bound of how many bytes all descriptions would
+            # consume if the usual string truncation in _serialize_node_impl
+            # would have taken place, not accounting for the metadata attached
+            # as event["_meta"].
+            descriptions_budget_bytes = span_count * sentry_sdk.utils.MAX_STRING_LENGTH
+
+            # If by not truncating descriptions we ended up with more bytes than
+            # per the usual string truncation, check if the event is too large
+            # and we need to truncate some descriptions.
+            #
+            # This is guarded with an if statement to avoid JSON-encoding the
+            # event unnecessarily.
+            if sum_span_description_bytes > descriptions_budget_bytes:
+                original_bytes = len(json_dumps(rv))
+                excess_bytes = original_bytes - MAX_EVENT_BYTES
+                if excess_bytes > 0:
+                    # Event is too large, will likely be discarded by the
+                    # server. Trim it down before sending.
+                    _truncate_span_descriptions(rv, event, excess_bytes)
+
+                    # Span descriptions truncated, set or reset _meta.
+                    #
+                    # We run the same code earlier because we want to account
+                    # for _meta when calculating original_bytes, the number of
+                    # bytes in the JSON-encoded event.
+                    if meta_stack and isinstance(rv, dict):
+                        rv["_meta"] = meta_stack[0]
         return rv
     finally:
         disable_capture_event.set(False)
diff --git a/sentry_sdk/session.py b/sentry_sdk/session.py
new file mode 100644
index 0000000000..98a8c72cbb
--- /dev/null
+++ b/sentry_sdk/session.py
@@ -0,0 +1,174 @@
+import uuid
+from datetime import datetime
+
+from sentry_sdk._types import MYPY
+from sentry_sdk.utils import format_timestamp
+
+if MYPY:
+    from typing import Optional
+    from typing import Union
+    from typing import Any
+    from typing import Dict
+
+    from sentry_sdk._types import SessionStatus
+
+
+def _minute_trunc(ts):
+    # type: (datetime) -> datetime
+    return ts.replace(second=0, microsecond=0)
+
+
+def _make_uuid(
+    val,  # type: Union[str, uuid.UUID]
+):
+    # type: (...) -> uuid.UUID
+    if isinstance(val, uuid.UUID):
+        return val
+    return uuid.UUID(val)
+
+
+class Session(object):
+    def __init__(
+        self,
+        sid=None,  # type: Optional[Union[str, uuid.UUID]]
+        did=None,  # type: Optional[str]
+        timestamp=None,  # type: Optional[datetime]
+        started=None,  # type: Optional[datetime]
+        duration=None,  # type: Optional[float]
+        status=None,  # type: Optional[SessionStatus]
+        release=None,  # type: Optional[str]
+        environment=None,  # type: Optional[str]
+        user_agent=None,  # type: Optional[str]
+        ip_address=None,  # type: Optional[str]
+        errors=None,  # type: Optional[int]
+        user=None,  # type: Optional[Any]
+        session_mode="application",  # type: str
+    ):
+        # type: (...) -> None
+        if sid is None:
+            sid = uuid.uuid4()
+        if started is None:
+            started = datetime.utcnow()
+        if status is None:
+            status = "ok"
+        self.status = status
+        self.did = None  # type: Optional[str]
+        self.started = started
+        self.release = None  # type: Optional[str]
+        self.environment = None  # type: Optional[str]
+        self.duration = None  # type: Optional[float]
+        self.user_agent = None  # type: Optional[str]
+        self.ip_address = None  # type: Optional[str]
+        self.session_mode = session_mode  # type: str
+        self.errors = 0
+
+        self.update(
+            sid=sid,
+            did=did,
+            timestamp=timestamp,
+            duration=duration,
+            release=release,
+            environment=environment,
+            user_agent=user_agent,
+            ip_address=ip_address,
+            errors=errors,
+            user=user,
+        )
+
+    @property
+    def truncated_started(self):
+        # type: (...) -> datetime
+        return _minute_trunc(self.started)
+
+    def update(
+        self,
+        sid=None,  # type: Optional[Union[str, uuid.UUID]]
+        did=None,  # type: Optional[str]
+        timestamp=None,  # type: Optional[datetime]
+        started=None,  # type: Optional[datetime]
+        duration=None,  # type: Optional[float]
+        status=None,  # type: Optional[SessionStatus]
+        release=None,  # type: Optional[str]
+        environment=None,  # type: Optional[str]
+        user_agent=None,  # type: Optional[str]
+        ip_address=None,  # type: Optional[str]
+        errors=None,  # type: Optional[int]
+        user=None,  # type: Optional[Any]
+    ):
+        # type: (...) -> None
+        # If a user is supplied we pull some data form it
+        if user:
+            if ip_address is None:
+                ip_address = user.get("ip_address")
+            if did is None:
+                did = user.get("id") or user.get("email") or user.get("username")
+
+        if sid is not None:
+            self.sid = _make_uuid(sid)
+        if did is not None:
+            self.did = str(did)
+        if timestamp is None:
+            timestamp = datetime.utcnow()
+        self.timestamp = timestamp
+        if started is not None:
+            self.started = started
+        if duration is not None:
+            self.duration = duration
+        if release is not None:
+            self.release = release
+        if environment is not None:
+            self.environment = environment
+        if ip_address is not None:
+            self.ip_address = ip_address
+        if user_agent is not None:
+            self.user_agent = user_agent
+        if errors is not None:
+            self.errors = errors
+
+        if status is not None:
+            self.status = status
+
+    def close(
+        self, status=None  # type: Optional[SessionStatus]
+    ):
+        # type: (...) -> Any
+        if status is None and self.status == "ok":
+            status = "exited"
+        if status is not None:
+            self.update(status=status)
+
+    def get_json_attrs(
+        self, with_user_info=True  # type: Optional[bool]
+    ):
+        # type: (...) -> Any
+        attrs = {}
+        if self.release is not None:
+            attrs["release"] = self.release
+        if self.environment is not None:
+            attrs["environment"] = self.environment
+        if with_user_info:
+            if self.ip_address is not None:
+                attrs["ip_address"] = self.ip_address
+            if self.user_agent is not None:
+                attrs["user_agent"] = self.user_agent
+        return attrs
+
+    def to_json(self):
+        # type: (...) -> Any
+        rv = {
+            "sid": str(self.sid),
+            "init": True,
+            "started": format_timestamp(self.started),
+            "timestamp": format_timestamp(self.timestamp),
+            "status": self.status,
+        }  # type: Dict[str, Any]
+        if self.errors:
+            rv["errors"] = self.errors
+        if self.did is not None:
+            rv["did"] = self.did
+        if self.duration is not None:
+            rv["duration"] = self.duration
+        attrs = self.get_json_attrs()
+        if attrs:
+            rv["attrs"] = attrs
+        return rv
diff --git a/sentry_sdk/sessions.py b/sentry_sdk/sessions.py
index b8ef201e2a..4e4d21b89c 100644
--- a/sentry_sdk/sessions.py
+++ b/sentry_sdk/sessions.py
@@ -1,46 +1,48 @@
 import os
-import uuid
 import time
-from datetime import datetime
 from threading import Thread, Lock
 from contextlib import contextmanager
 
+import sentry_sdk
+from sentry_sdk.envelope import Envelope
+from sentry_sdk.session import Session
 from sentry_sdk._types import MYPY
 from sentry_sdk.utils import format_timestamp
 
 if MYPY:
-    import sentry_sdk
-
-    from typing import Optional
-    from typing import Union
     from typing import Any
+    from typing import Callable
     from typing import Dict
     from typing import Generator
-
-    from sentry_sdk._types import SessionStatus
+    from typing import List
+    from typing import Optional
+    from typing import Union
 
 
 def is_auto_session_tracking_enabled(hub=None):
-    # type: (Optional[sentry_sdk.Hub]) -> bool
+    # type: (Optional[sentry_sdk.Hub]) -> Union[Any, bool, None]
     """Utility function to find out if session tracking is enabled."""
     if hub is None:
         hub = sentry_sdk.Hub.current
+
     should_track = hub.scope._force_auto_session_tracking
+
     if should_track is None:
-        exp = hub.client.options["_experiments"] if hub.client else {}
-        should_track = exp.get("auto_session_tracking")
+        client_options = hub.client.options if hub.client else {}
+        should_track = client_options.get("auto_session_tracking", False)
+
     return should_track
 
 
 @contextmanager
-def auto_session_tracking(hub=None):
-    # type: (Optional[sentry_sdk.Hub]) -> Generator[None, None, None]
+def auto_session_tracking(hub=None, session_mode="application"):
+    # type: (Optional[sentry_sdk.Hub], str) -> Generator[None, None, None]
     """Starts and stops a session automatically around a block."""
     if hub is None:
         hub = sentry_sdk.Hub.current
     should_track = is_auto_session_tracking_enabled(hub)
     if should_track:
-        hub.start_session()
+        hub.start_session(session_mode=session_mode)
     try:
         yield
     finally:
@@ -48,38 +50,58 @@ def auto_session_tracking(hub=None):
             hub.end_session()
 
 
-def _make_uuid(
-    val,  # type: Union[str, uuid.UUID]
-):
-    # type: (...) -> uuid.UUID
-    if isinstance(val, uuid.UUID):
-        return val
-    return uuid.UUID(val)
+TERMINAL_SESSION_STATES = ("exited", "abnormal", "crashed")
+MAX_ENVELOPE_ITEMS = 100
 
 
-TERMINAL_SESSION_STATES = ("exited", "abnormal", "crashed")
+def make_aggregate_envelope(aggregate_states, attrs):
+    # type: (Any, Any) -> Any
+    return {"attrs": dict(attrs), "aggregates": list(aggregate_states.values())}
 
 
 class SessionFlusher(object):
     def __init__(
         self,
-        flush_func,  # type: Any
-        flush_interval=10,  # type: int
+        capture_func,  # type: Callable[[Envelope], None]
+        flush_interval=60,  # type: int
     ):
         # type: (...) -> None
-        self.flush_func = flush_func
+        self.capture_func = capture_func
         self.flush_interval = flush_interval
-        self.pending = {}  # type: Dict[str, Any]
+        self.pending_sessions = []  # type: List[Any]
+        self.pending_aggregates = {}  # type: Dict[Any, Any]
         self._thread = None  # type: Optional[Thread]
         self._thread_lock = Lock()
+        self._aggregate_lock = Lock()
         self._thread_for_pid = None  # type: Optional[int]
         self._running = True
 
     def flush(self):
         # type: (...) -> None
-        pending = self.pending
-        self.pending = {}
-        self.flush_func(list(pending.values()))
+        pending_sessions = self.pending_sessions
+        self.pending_sessions = []
+
+        with self._aggregate_lock:
+            pending_aggregates = self.pending_aggregates
+            self.pending_aggregates = {}
+
+        envelope = Envelope()
+        for session in pending_sessions:
+            if len(envelope.items) == MAX_ENVELOPE_ITEMS:
+                self.capture_func(envelope)
+                envelope = Envelope()
+
+            envelope.add_session(session)
+
+        for (attrs, states) in pending_aggregates.items():
+            if len(envelope.items) == MAX_ENVELOPE_ITEMS:
+                self.capture_func(envelope)
+                envelope = Envelope()
+
+            envelope.add_sessions(make_aggregate_envelope(states, attrs))
+
+        if len(envelope.items) > 0:
+            self.capture_func(envelope)
 
     def _ensure_running(self):
         # type: (...) -> None
@@ -93,7 +115,7 @@ def _thread():
                 # type: (...) -> None
                 while self._running:
                     time.sleep(self.flush_interval)
-                    if self.pending and self._running:
+                    if self._running:
                         self.flush()
 
             thread = Thread(target=_thread)
@@ -103,11 +125,45 @@ def _thread():
             self._thread_for_pid = os.getpid()
         return None
 
+    def add_aggregate_session(
+        self, session  # type: Session
+    ):
+        # type: (...) -> None
+        # NOTE on `session.did`:
+        # the protocol can deal with buckets that have a distinct-id, however
+        # in practice we expect the python SDK to have an extremely high cardinality
+        # here, effectively making aggregation useless, therefore we do not
+        # aggregate per-did.
+
+        # For this part we can get away with using the global interpreter lock
+        with self._aggregate_lock:
+            attrs = session.get_json_attrs(with_user_info=False)
+            primary_key = tuple(sorted(attrs.items()))
+            secondary_key = session.truncated_started  # (, session.did)
+            states = self.pending_aggregates.setdefault(primary_key, {})
+            state = states.setdefault(secondary_key, {})
+
+            if "started" not in state:
+                state["started"] = format_timestamp(session.truncated_started)
+            # if session.did is not None:
+            #     state["did"] = session.did
+            if session.status == "crashed":
+                state["crashed"] = state.get("crashed", 0) + 1
+            elif session.status == "abnormal":
+                state["abnormal"] = state.get("abnormal", 0) + 1
+            elif session.errors > 0:
+                state["errored"] = state.get("errored", 0) + 1
+            else:
+                state["exited"] = state.get("exited", 0) + 1
+
     def add_session(
         self, session  # type: Session
     ):
         # type: (...) -> None
-        self.pending[session.sid.hex] = session.to_json()
+        if session.session_mode == "request":
+            self.add_aggregate_session(session)
+        else:
+            self.pending_sessions.append(session.to_json())
         self._ensure_running()
 
     def kill(self):
@@ -117,136 +173,3 @@ def kill(self):
     def __del__(self):
         # type: (...) -> None
         self.kill()
-
-
-class Session(object):
-    def __init__(
-        self,
-        sid=None,  # type: Optional[Union[str, uuid.UUID]]
-        did=None,  # type: Optional[str]
-        timestamp=None,  # type: Optional[datetime]
-        started=None,  # type: Optional[datetime]
-        duration=None,  # type: Optional[float]
-        status=None,  # type: Optional[SessionStatus]
-        release=None,  # type: Optional[str]
-        environment=None,  # type: Optional[str]
-        user_agent=None,  # type: Optional[str]
-        ip_address=None,  # type: Optional[str]
-        errors=None,  # type: Optional[int]
-        user=None,  # type: Optional[Any]
-    ):
-        # type: (...) -> None
-        if sid is None:
-            sid = uuid.uuid4()
-        if started is None:
-            started = datetime.utcnow()
-        if status is None:
-            status = "ok"
-        self.status = status
-        self.did = None  # type: Optional[str]
-        self.started = started
-        self.release = None  # type: Optional[str]
-        self.environment = None  # type: Optional[str]
-        self.duration = None  # type: Optional[float]
-        self.user_agent = None  # type: Optional[str]
-        self.ip_address = None  # type: Optional[str]
-        self.errors = 0
-
-        self.update(
-            sid=sid,
-            did=did,
-            timestamp=timestamp,
-            duration=duration,
-            release=release,
-            environment=environment,
-            user_agent=user_agent,
-            ip_address=ip_address,
-            errors=errors,
-            user=user,
-        )
-
-    def update(
-        self,
-        sid=None,  # type: Optional[Union[str, uuid.UUID]]
-        did=None,  # type: Optional[str]
-        timestamp=None,  # type: Optional[datetime]
-        started=None,  # type: Optional[datetime]
-        duration=None,  # type: Optional[float]
-        status=None,  # type: Optional[SessionStatus]
-        release=None,  # type: Optional[str]
-        environment=None,  # type: Optional[str]
-        user_agent=None,  # type: Optional[str]
-        ip_address=None,  # type: Optional[str]
-        errors=None,  # type: Optional[int]
-        user=None,  # type: Optional[Any]
-    ):
-        # type: (...) -> None
-        # If a user is supplied we pull some data form it
-        if user:
-            if ip_address is None:
-                ip_address = user.get("ip_address")
-            if did is None:
-                did = user.get("id") or user.get("email") or user.get("username")
-
-        if sid is not None:
-            self.sid = _make_uuid(sid)
-        if did is not None:
-            self.did = str(did)
-        if timestamp is None:
-            timestamp = datetime.utcnow()
-        self.timestamp = timestamp
-        if started is not None:
-            self.started = started
-        if duration is not None:
-            self.duration = duration
-        if release is not None:
-            self.release = release
-        if environment is not None:
-            self.environment = environment
-        if ip_address is not None:
-            self.ip_address = ip_address
-        if user_agent is not None:
-            self.user_agent = user_agent
-        if errors is not None:
-            self.errors = errors
-
-        if status is not None:
-            self.status = status
-
-    def close(
-        self, status=None  # type: Optional[SessionStatus]
-    ):
-        # type: (...) -> Any
-        if status is None and self.status == "ok":
-            status = "exited"
-        if status is not None:
-            self.update(status=status)
-
-    def to_json(self):
-        # type: (...) -> Any
-        rv = {
-            "sid": str(self.sid),
-            "init": True,
-            "started": format_timestamp(self.started),
-            "timestamp": format_timestamp(self.timestamp),
-            "status": self.status,
-        }  # type: Dict[str, Any]
-        if self.errors:
-            rv["errors"] = self.errors
-        if self.did is not None:
-            rv["did"] = self.did
-        if self.duration is not None:
-            rv["duration"] = self.duration
-
-        attrs = {}
-        if self.release is not None:
-            attrs["release"] = self.release
-        if self.environment is not None:
-            attrs["environment"] = self.environment
-        if self.ip_address is not None:
-            attrs["ip_address"] = self.ip_address
-        if self.user_agent is not None:
-            attrs["user_agent"] = self.user_agent
-        if attrs:
-            rv["attrs"] = attrs
-        return rv
diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py
index 9293365b83..61c6a7190b 100644
--- a/sentry_sdk/tracing.py
+++ b/sentry_sdk/tracing.py
@@ -1,94 +1,81 @@
-import re
 import uuid
-import contextlib
+import random
+import threading
 import time
 
 from datetime import datetime, timedelta
 
 import sentry_sdk
-
-from sentry_sdk.utils import capture_internal_exceptions, logger, to_string
-from sentry_sdk._compat import PY2
+from sentry_sdk.consts import INSTRUMENTER
+from sentry_sdk.utils import logger
 from sentry_sdk._types import MYPY
 
-if PY2:
-    from collections import Mapping
-else:
-    from collections.abc import Mapping
 
 if MYPY:
     import typing
 
-    from typing import Generator
     from typing import Optional
     from typing import Any
     from typing import Dict
     from typing import List
     from typing import Tuple
-
-_traceparent_header_format_re = re.compile(
-    "^[ \t]*"  # whitespace
-    "([0-9a-f]{32})?"  # trace_id
-    "-?([0-9a-f]{16})?"  # span_id
-    "-?([01])?"  # sampled
-    "[ \t]*$"  # whitespace
-)
-
-
-class EnvironHeaders(Mapping):  # type: ignore
-    def __init__(
-        self,
-        environ,  # type: typing.Mapping[str, str]
-        prefix="HTTP_",  # type: str
-    ):
-        # type: (...) -> None
-        self.environ = environ
-        self.prefix = prefix
-
-    def __getitem__(self, key):
-        # type: (str) -> Optional[Any]
-        return self.environ[self.prefix + key.replace("-", "_").upper()]
-
-    def __len__(self):
-        # type: () -> int
-        return sum(1 for _ in iter(self))
-
-    def __iter__(self):
-        # type: () -> Generator[str, None, None]
-        for k in self.environ:
-            if not isinstance(k, str):
-                continue
-
-            k = k.replace("-", "_").upper()
-            if not k.startswith(self.prefix):
-                continue
-
-            yield k[len(self.prefix) :]
+    from typing import Iterator
+
+    import sentry_sdk.profiler
+    from sentry_sdk._types import Event, SamplingContext, MeasurementUnit
+
+BAGGAGE_HEADER_NAME = "baggage"
+SENTRY_TRACE_HEADER_NAME = "sentry-trace"
+
+
+# Transaction source
+# see https://develop.sentry.dev/sdk/event-payloads/transaction/#transaction-annotations
+TRANSACTION_SOURCE_CUSTOM = "custom"
+TRANSACTION_SOURCE_URL = "url"
+TRANSACTION_SOURCE_ROUTE = "route"
+TRANSACTION_SOURCE_VIEW = "view"
+TRANSACTION_SOURCE_COMPONENT = "component"
+TRANSACTION_SOURCE_TASK = "task"
+
+# These are typically high cardinality and the server hates them
+LOW_QUALITY_TRANSACTION_SOURCES = [
+    TRANSACTION_SOURCE_URL,
+]
+
+SOURCE_FOR_STYLE = {
+    "endpoint": TRANSACTION_SOURCE_COMPONENT,
+    "function_name": TRANSACTION_SOURCE_COMPONENT,
+    "handler_name": TRANSACTION_SOURCE_COMPONENT,
+    "method_and_path_pattern": TRANSACTION_SOURCE_ROUTE,
+    "path": TRANSACTION_SOURCE_URL,
+    "route_name": TRANSACTION_SOURCE_COMPONENT,
+    "route_pattern": TRANSACTION_SOURCE_ROUTE,
+    "uri_template": TRANSACTION_SOURCE_ROUTE,
+    "url": TRANSACTION_SOURCE_ROUTE,
+}
 
 
 class _SpanRecorder(object):
-    __slots__ = ("maxlen", "finished_spans", "open_span_count")
+    """Limits the number of spans recorded in a transaction."""
+
+    __slots__ = ("maxlen", "spans")
 
     def __init__(self, maxlen):
         # type: (int) -> None
-        self.maxlen = maxlen
-        self.open_span_count = 0  # type: int
-        self.finished_spans = []  # type: List[Span]
-
-    def start_span(self, span):
+        # FIXME: this is `maxlen - 1` only to preserve historical behavior
+        # enforced by tests.
+        # Either this should be changed to `maxlen` or the JS SDK implementation
+        # should be changed to match a consistent interpretation of what maxlen
+        # limits: either transaction+spans or only child spans.
+        self.maxlen = maxlen - 1
+        self.spans = []  # type: List[Span]
+
+    def add(self, span):
         # type: (Span) -> None
-
-        # This is just so that we don't run out of memory while recording a lot
-        # of spans. At some point we just stop and flush out the start of the
-        # trace tree (i.e. the first n spans with the smallest
-        # start_timestamp).
-        self.open_span_count += 1
-        if self.open_span_count > self.maxlen:
+        if len(self.spans) > self.maxlen:
             span._span_recorder = None
-
-    def finish_span(self, span):
-        # type: (Span) -> None
-        self.finished_spans.append(span)
+        else:
+            self.spans.append(span)
 
 
 class Span(object):
@@ -98,7 +85,6 @@ class Span(object):
         "parent_span_id",
         "same_process_as_parent",
         "sampled",
-        "transaction",
         "op",
         "description",
         "start_timestamp",
@@ -110,8 +96,23 @@ class Span(object):
         "_span_recorder",
         "hub",
         "_context_manager_state",
+        "_containing_transaction",
     )
 
+    def __new__(cls, **kwargs):
+        # type: (**Any) -> Any
+        """
+        Backwards-compatible implementation of Span and Transaction
+        creation.
+        """
+
+        # TODO: consider removing this in a future release.
+        # This is for backwards compatibility with releases before Transaction
+        # existed, to allow for a smoother transition.
+        if "transaction" in kwargs:
+            return object.__new__(Transaction)
+        return object.__new__(cls)
+
     def __init__(
         self,
         trace_id=None,  # type: Optional[str]
@@ -119,11 +120,13 @@ def __init__(
         parent_span_id=None,  # type: Optional[str]
         same_process_as_parent=True,  # type: bool
         sampled=None,  # type: Optional[bool]
-        transaction=None,  # type: Optional[str]
         op=None,  # type: Optional[str]
         description=None,  # type: Optional[str]
         hub=None,  # type: Optional[sentry_sdk.Hub]
         status=None,  # type: Optional[str]
+        transaction=None,  # type: Optional[str] # deprecated
+        containing_transaction=None,  # type: Optional[Transaction]
+        start_timestamp=None,  # type: Optional[datetime]
     ):
         # type: (...) -> None
         self.trace_id = trace_id or uuid.uuid4().hex
@@ -131,14 +134,14 @@ def __init__(
         self.parent_span_id = parent_span_id
         self.same_process_as_parent = same_process_as_parent
         self.sampled = sampled
-        self.transaction = transaction
         self.op = op
         self.description = description
         self.status = status
         self.hub = hub
         self._tags = {}  # type: Dict[str, str]
         self._data = {}  # type: Dict[str, Any]
-        self.start_timestamp = datetime.utcnow()
+        self._containing_transaction = containing_transaction
+        self.start_timestamp = start_timestamp or datetime.utcnow()
         try:
             # TODO: For Python 3.7+, we could use a clock with ns resolution:
             # self._start_timestamp_monotonic = time.perf_counter_ns()
@@ -153,19 +156,21 @@ def __init__(
 
         self._span_recorder = None  # type: Optional[_SpanRecorder]
 
-    def init_finished_spans(self, maxlen):
+    # TODO this should really live on the Transaction class rather than the Span
+    # class
+    def init_span_recorder(self, maxlen):
         # type: (int) -> None
         if self._span_recorder is None:
             self._span_recorder = _SpanRecorder(maxlen)
-        self._span_recorder.start_span(self)
 
     def __repr__(self):
         # type: () -> str
         return (
-            "<%s(transaction=%r, trace_id=%r, span_id=%r, parent_span_id=%r, sampled=%r)>"
+            "<%s(op=%r, description:%r, trace_id=%r, span_id=%r, parent_span_id=%r, sampled=%r)>"
             % (
                 self.__class__.__name__,
-                self.transaction,
+                self.op,
+                self.description,
                 self.trace_id,
                 self.span_id,
                 self.parent_span_id,
@@ -194,63 +199,167 @@ def __exit__(self, ty, value, tb):
         self.finish(hub)
         scope.span = old_span
 
-    def new_span(self, **kwargs):
-        # type: (**Any) -> Span
-        rv = type(self)(
+    @property
+    def containing_transaction(self):
+        # type: () -> Optional[Transaction]
+
+        # this is a getter rather than a regular attribute so that transactions
+        # can return `self` here instead (as a way to prevent them circularly
+        # referencing themselves)
+        return self._containing_transaction
+
+    def start_child(self, instrumenter=INSTRUMENTER.SENTRY, **kwargs):
+        # type: (str, **Any) -> Span
+        """
+        Start a sub-span from the current span or transaction.
+
+        Takes the same arguments as the initializer of :py:class:`Span`. The
+        trace id, sampling decision, transaction pointer, and span recorder are
+        inherited from the current span/transaction.
+        """
+        hub = self.hub or sentry_sdk.Hub.current
+        client = hub.client
+        configuration_instrumenter = client and client.options["instrumenter"]
+
+        if instrumenter != configuration_instrumenter:
+            return NoOpSpan()
+
+        kwargs.setdefault("sampled", self.sampled)
+
+        child = Span(
             trace_id=self.trace_id,
-            span_id=None,
             parent_span_id=self.span_id,
-            sampled=self.sampled,
+            containing_transaction=self.containing_transaction,
             **kwargs
         )
 
-        rv._span_recorder = self._span_recorder
-        return rv
+        span_recorder = (
+            self.containing_transaction and self.containing_transaction._span_recorder
+        )
+        if span_recorder:
+            span_recorder.add(child)
+        return child
+
+    def new_span(self, **kwargs):
+        # type: (**Any) -> Span
+        """Deprecated: use start_child instead."""
+        logger.warning("Deprecated: use Span.start_child instead of Span.new_span.")
+        return self.start_child(**kwargs)
 
     @classmethod
-    def continue_from_environ(cls, environ):
-        # type: (typing.Mapping[str, str]) -> Span
-        return cls.continue_from_headers(EnvironHeaders(environ))
+    def continue_from_environ(
+        cls,
+        environ,  # type: typing.Mapping[str, str]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> Transaction
+        """
+        Create a Transaction with the given params, then add in data pulled from
+        the 'sentry-trace', 'baggage' and 'tracestate' headers from the environ (if any)
+        before returning the Transaction.
+
+        This is different from `continue_from_headers` in that it assumes header
+        names in the form "HTTP_HEADER_NAME" - such as you would get from a wsgi
+        environ - rather than the form "header-name".
+        """
+        if cls is Span:
+            logger.warning(
+                "Deprecated: use Transaction.continue_from_environ "
+                "instead of Span.continue_from_environ."
+            )
+        return Transaction.continue_from_headers(EnvironHeaders(environ), **kwargs)
 
     @classmethod
-    def continue_from_headers(cls, headers):
-        # type: (typing.Mapping[str, str]) -> Span
-        parent = cls.from_traceparent(headers.get("sentry-trace"))
-        if parent is None:
-            return cls()
-        parent.same_process_as_parent = False
-        return parent
+    def continue_from_headers(
+        cls,
+        headers,  # type: typing.Mapping[str, str]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> Transaction
+        """
+        Create a transaction with the given params (including any data pulled from
+        the 'sentry-trace', 'baggage' and 'tracestate' headers).
+        """
+        # TODO move this to the Transaction class
+        if cls is Span:
+            logger.warning(
+                "Deprecated: use Transaction.continue_from_headers "
+                "instead of Span.continue_from_headers."
+            )
 
-    def iter_headers(self):
-        # type: () -> Generator[Tuple[str, str], None, None]
-        yield "sentry-trace", self.to_traceparent()
+        # TODO-neel move away from this kwargs stuff, it's confusing and opaque
+        # make more explicit
+        baggage = Baggage.from_incoming_header(headers.get(BAGGAGE_HEADER_NAME))
+        kwargs.update({BAGGAGE_HEADER_NAME: baggage})
 
-    @classmethod
-    def from_traceparent(cls, traceparent):
-        # type: (Optional[str]) -> Optional[Span]
-        if not traceparent:
-            return None
+        sentrytrace_kwargs = extract_sentrytrace_data(
+            headers.get(SENTRY_TRACE_HEADER_NAME)
+        )
 
-        if traceparent.startswith("00-") and traceparent.endswith("-00"):
-            traceparent = traceparent[3:-3]
+        if sentrytrace_kwargs is not None:
+            kwargs.update(sentrytrace_kwargs)
 
-        match = _traceparent_header_format_re.match(str(traceparent))
-        if match is None:
-            return None
+            # If there's an incoming sentry-trace but no incoming baggage header,
+            # for instance in traces coming from older SDKs,
+            # baggage will be empty and immutable and won't be populated as head SDK.
+            baggage.freeze()
 
-        trace_id, span_id, sampled_str = match.groups()
+        kwargs.update(extract_tracestate_data(headers.get("tracestate")))
 
-        if trace_id is not None:
-            trace_id = "{:032x}".format(int(trace_id, 16))
-        if span_id is not None:
-            span_id = "{:016x}".format(int(span_id, 16))
+        transaction = Transaction(**kwargs)
+        transaction.same_process_as_parent = False
 
-        if sampled_str:
-            sampled = sampled_str != "0"  # type: Optional[bool]
-        else:
-            sampled = None
+        return transaction
 
-        return cls(trace_id=trace_id, parent_span_id=span_id, sampled=sampled)
+    def iter_headers(self):
+        # type: () -> Iterator[Tuple[str, str]]
+        """
+        Creates a generator which returns the span's `sentry-trace`, `baggage` and
+        `tracestate` headers.
+
+        If the span's containing transaction doesn't yet have a
+        `sentry_tracestate` value, this will cause one to be generated and
+        stored.
+        """
+        yield SENTRY_TRACE_HEADER_NAME, self.to_traceparent()
+
+        tracestate = self.to_tracestate() if has_tracestate_enabled(self) else None
+        # `tracestate` will only be `None` if there's no client or no DSN
+        # TODO (kmclb) the above will be true once the feature is no longer
+        # behind a flag
+        if tracestate:
+            yield "tracestate", tracestate
+
+        if self.containing_transaction:
+            baggage = self.containing_transaction.get_baggage().serialize()
+            if baggage:
+                yield BAGGAGE_HEADER_NAME, baggage
+
+    @classmethod
+    def from_traceparent(
+        cls,
+        traceparent,  # type: Optional[str]
+        **kwargs  # type: Any
+    ):
+        # type: (...) -> Optional[Transaction]
+        """
+        DEPRECATED: Use Transaction.continue_from_headers(headers, **kwargs)
+
+        Create a Transaction with the given params, then add in data pulled from
+        the given 'sentry-trace' header value before returning the Transaction.
+
+        """
+        logger.warning(
+            "Deprecated: Use Transaction.continue_from_headers(headers, **kwargs) "
+            "instead of from_traceparent(traceparent, **kwargs)"
+        )
+
+        if not traceparent:
+            return None
+
+        return cls.continue_from_headers(
+            {SENTRY_TRACE_HEADER_NAME: traceparent}, **kwargs
+        )
 
     def to_traceparent(self):
         # type: () -> str
@@ -261,9 +370,56 @@ def to_traceparent(self):
             sampled = "0"
         return "%s-%s-%s" % (self.trace_id, self.span_id, sampled)
 
-    def to_legacy_traceparent(self):
-        # type: () -> str
-        return "00-%s-%s-00" % (self.trace_id, self.span_id)
+    def to_tracestate(self):
+        # type: () -> Optional[str]
+        """
+        Computes the `tracestate` header value using data from the containing
+        transaction.
+
+        If the containing transaction doesn't yet have a `sentry_tracestate`
+        value, this will cause one to be generated and stored.
+
+        If there is no containing transaction, a value will be generated but not
+        stored.
+
+        Returns None if there's no client and/or no DSN.
+        """
+
+        sentry_tracestate = self.get_or_set_sentry_tracestate()
+        third_party_tracestate = (
+            self.containing_transaction._third_party_tracestate
+            if self.containing_transaction
+            else None
+        )
+
+        if not sentry_tracestate:
+            return None
+
+        header_value = sentry_tracestate
+
+        if third_party_tracestate:
+            header_value = header_value + "," + third_party_tracestate
+
+        return header_value
+
+    def get_or_set_sentry_tracestate(self):
+        # type: (Span) -> Optional[str]
+        """
+        Read sentry tracestate off of the span's containing transaction.
+
+        If the transaction doesn't yet have a `_sentry_tracestate` value,
+        compute one and store it.
+        """
+        transaction = self.containing_transaction
+
+        if transaction:
+            if not transaction._sentry_tracestate:
+                transaction._sentry_tracestate = compute_tracestate_entry(self)
+
+            return transaction._sentry_tracestate
+
+        # orphan span - nowhere to store the value, so just return it
+        return compute_tracestate_entry(self)
 
     def set_tag(self, key, value):
         # type: (str, Any) -> None
@@ -279,7 +435,7 @@ def set_status(self, value):
 
     def set_http_status(self, http_status):
         # type: (int) -> None
-        self.set_tag("http.status_code", http_status)
+        self.set_tag("http.status_code", str(http_status))
 
         if http_status < 400:
             self.set_status("ok")
@@ -314,67 +470,32 @@ def is_success(self):
         # type: () -> bool
         return self.status == "ok"
 
-    def finish(self, hub=None):
-        # type: (Optional[sentry_sdk.Hub]) -> Optional[str]
-        hub = hub or self.hub or sentry_sdk.Hub.current
-
+    def finish(self, hub=None, end_timestamp=None):
+        # type: (Optional[sentry_sdk.Hub], Optional[datetime]) -> Optional[str]
+        # XXX: would be type: (Optional[sentry_sdk.Hub]) -> None, but that leads
+        # to incompatible return types for Span.finish and Transaction.finish.
         if self.timestamp is not None:
-            # This transaction is already finished, so we should not flush it again.
+            # This span is already finished, ignore.
             return None
 
+        hub = hub or self.hub or sentry_sdk.Hub.current
+
         try:
-            duration_seconds = time.perf_counter() - self._start_timestamp_monotonic
-            self.timestamp = self.start_timestamp + timedelta(seconds=duration_seconds)
+            if end_timestamp:
+                self.timestamp = end_timestamp
+            else:
+                duration_seconds = time.perf_counter() - self._start_timestamp_monotonic
+                self.timestamp = self.start_timestamp + timedelta(
+                    seconds=duration_seconds
+                )
         except AttributeError:
             self.timestamp = datetime.utcnow()
 
-        _maybe_create_breadcrumbs_from_span(hub, self)
+        maybe_create_breadcrumbs_from_span(hub, self)
+        return None
 
-        if self._span_recorder is None:
-            return None
-
-        self._span_recorder.finish_span(self)
-
-        if self.transaction is None:
-            # If this has no transaction set we assume there's a parent
-            # transaction for this span that would be flushed out eventually.
-            return None
-
-        client = hub.client
-
-        if client is None:
-            # We have no client and therefore nowhere to send this transaction
-            # event.
-            return None
-
-        if not self.sampled:
-            # At this point a `sampled = None` should have already been
-            # resolved to a concrete decision. If `sampled` is `None`, it's
-            # likely that somebody used `with sentry_sdk.Hub.start_span(..)` on a
-            # non-transaction span and later decided to make it a transaction.
-            if self.sampled is None:
-                logger.warning("Discarding transaction Span without sampling decision")
-
-            return None
-
-        return hub.capture_event(
-            {
-                "type": "transaction",
-                "transaction": self.transaction,
-                "contexts": {"trace": self.get_trace_context()},
-                "tags": self._tags,
-                "timestamp": self.timestamp,
-                "start_timestamp": self.start_timestamp,
-                "spans": [
-                    s.to_json(client)
-                    for s in self._span_recorder.finished_spans
-                    if s is not self
-                ],
-            }
-        )
-
-    def to_json(self, client):
-        # type: (Optional[sentry_sdk.Client]) -> Dict[str, Any]
+    def to_json(self):
+        # type: () -> Dict[str, Any]
         rv = {
             "trace_id": self.trace_id,
             "span_id": self.span_id,
@@ -386,10 +507,6 @@ def to_json(self, client):
             "timestamp": self.timestamp,
         }  # type: Dict[str, Any]
 
-        transaction = self.transaction
-        if transaction:
-            rv["transaction"] = transaction
-
         if self.status:
             self._tags["status"] = self.status
 
@@ -411,88 +528,378 @@ def get_trace_context(self):
             "parent_span_id": self.parent_span_id,
             "op": self.op,
             "description": self.description,
-        }
+        }  # type: Dict[str, Any]
         if self.status:
             rv["status"] = self.status
 
+        # if the transaction didn't inherit a tracestate value, and no outgoing
+        # requests - whose need for headers would have caused a tracestate value
+        # to be created - were made as part of the transaction, the transaction
+        # still won't have a tracestate value, so compute one now
+        sentry_tracestate = self.get_or_set_sentry_tracestate()
+
+        if sentry_tracestate:
+            rv["tracestate"] = sentry_tracestate
+
+        if self.containing_transaction:
+            rv[
+                "dynamic_sampling_context"
+            ] = self.containing_transaction.get_baggage().dynamic_sampling_context()
+
         return rv
 
 
-def _format_sql(cursor, sql):
-    # type: (Any, str) -> Optional[str]
-
-    real_sql = None
-
-    # If we're using psycopg2, it could be that we're
-    # looking at a query that uses Composed objects. Use psycopg2's mogrify
-    # function to format the query. We lose per-parameter trimming but gain
-    # accuracy in formatting.
-    try:
-        if hasattr(cursor, "mogrify"):
-            real_sql = cursor.mogrify(sql)
-            if isinstance(real_sql, bytes):
-                real_sql = real_sql.decode(cursor.connection.encoding)
-    except Exception:
-        real_sql = None
-
-    return real_sql or to_string(sql)
-
-
-@contextlib.contextmanager
-def record_sql_queries(
-    hub,  # type: sentry_sdk.Hub
-    cursor,  # type: Any
-    query,  # type: Any
-    params_list,  # type:  Any
-    paramstyle,  # type: Optional[str]
-    executemany,  # type: bool
-):
-    # type: (...) -> Generator[Span, None, None]
-
-    # TODO: Bring back capturing of params by default
-    if hub.client and hub.client.options["_experiments"].get(
-        "record_sql_params", False
+class Transaction(Span):
+    __slots__ = (
+        "name",
+        "source",
+        "parent_sampled",
+        # used to create baggage value for head SDKs in dynamic sampling
+        "sample_rate",
+        # the sentry portion of the `tracestate` header used to transmit
+        # correlation context for server-side dynamic sampling, of the form
+        # `sentry=xxxxx`, where `xxxxx` is the base64-encoded json of the
+        # correlation context data, missing trailing any =
+        "_sentry_tracestate",
+        # tracestate data from other vendors, of the form `dogs=yes,cats=maybe`
+        "_third_party_tracestate",
+        "_measurements",
+        "_contexts",
+        "_profile",
+        "_baggage",
+        "_active_thread_id",
+    )
+
+    def __init__(
+        self,
+        name="",  # type: str
+        parent_sampled=None,  # type: Optional[bool]
+        sentry_tracestate=None,  # type: Optional[str]
+        third_party_tracestate=None,  # type: Optional[str]
+        baggage=None,  # type: Optional[Baggage]
+        source=TRANSACTION_SOURCE_CUSTOM,  # type: str
+        **kwargs  # type: Any
     ):
-        if not params_list or params_list == [None]:
-            params_list = None
-
-        if paramstyle == "pyformat":
-            paramstyle = "format"
-    else:
-        params_list = None
-        paramstyle = None
-
-    query = _format_sql(cursor, query)
-
-    data = {}
-    if params_list is not None:
-        data["db.params"] = params_list
-    if paramstyle is not None:
-        data["db.paramstyle"] = paramstyle
-    if executemany:
-        data["db.executemany"] = True
-
-    with capture_internal_exceptions():
-        hub.add_breadcrumb(message=query, category="query", data=data)
-
-    with hub.start_span(op="db", description=query) as span:
-        for k, v in data.items():
-            span.set_data(k, v)
-        yield span
-
-
-def _maybe_create_breadcrumbs_from_span(hub, span):
-    # type: (sentry_sdk.Hub, Span) -> None
-    if span.op == "redis":
-        hub.add_breadcrumb(
-            message=span.description, type="redis", category="redis", data=span._tags
+        # type: (...) -> None
+        # TODO: consider removing this in a future release.
+        # This is for backwards compatibility with releases before Transaction
+        # existed, to allow for a smoother transition.
+        if not name and "transaction" in kwargs:
+            logger.warning(
+                "Deprecated: use Transaction(name=...) to create transactions "
+                "instead of Span(transaction=...)."
+            )
+            name = kwargs.pop("transaction")
+
+        Span.__init__(self, **kwargs)
+
+        self.name = name
+        self.source = source
+        self.sample_rate = None  # type: Optional[float]
+        self.parent_sampled = parent_sampled
+        # if tracestate isn't inherited and set here, it will get set lazily,
+        # either the first time an outgoing request needs it for a header or the
+        # first time an event needs it for inclusion in the captured data
+        self._sentry_tracestate = sentry_tracestate
+        self._third_party_tracestate = third_party_tracestate
+        self._measurements = {}  # type: Dict[str, Any]
+        self._contexts = {}  # type: Dict[str, Any]
+        self._profile = None  # type: Optional[sentry_sdk.profiler.Profile]
+        self._baggage = baggage
+        # for profiling, we want to know on which thread a transaction is started
+        # to accurately show the active thread in the UI
+        self._active_thread_id = (
+            threading.current_thread().ident
+        )  # used by profiling.py
+
+    def __repr__(self):
+        # type: () -> str
+        return (
+            "<%s(name=%r, op=%r, trace_id=%r, span_id=%r, parent_span_id=%r, sampled=%r, source=%r)>"
+            % (
+                self.__class__.__name__,
+                self.name,
+                self.op,
+                self.trace_id,
+                self.span_id,
+                self.parent_span_id,
+                self.sampled,
+                self.source,
+            )
+        )
+
+    @property
+    def containing_transaction(self):
+        # type: () -> Transaction
+
+        # Transactions (as spans) belong to themselves (as transactions). This
+        # is a getter rather than a regular attribute to avoid having a circular
+        # reference.
+        return self
+
+    def finish(self, hub=None, end_timestamp=None):
+        # type: (Optional[sentry_sdk.Hub], Optional[datetime]) -> Optional[str]
+        if self.timestamp is not None:
+            # This transaction is already finished, ignore.
+            return None
+
+        hub = hub or self.hub or sentry_sdk.Hub.current
+        client = hub.client
+
+        if client is None:
+            # We have no client and therefore nowhere to send this transaction.
+            return None
+
+        # This is a de facto proxy for checking if sampled = False
+        if self._span_recorder is None:
+            logger.debug("Discarding transaction because sampled = False")
+
+            # This is not entirely accurate because discards here are not
+            # exclusively based on sample rate but also traces sampler, but
+            # we handle this the same here.
+            if client.transport and has_tracing_enabled(client.options):
+                client.transport.record_lost_event(
+                    "sample_rate", data_category="transaction"
+                )
+
+            return None
+
+        if not self.name:
+            logger.warning(
+                "Transaction has no name, falling back to ``."
+            )
+            self.name = ""
+
+        Span.finish(self, hub, end_timestamp)
+
+        if not self.sampled:
+            # At this point a `sampled = None` should have already been resolved
+            # to a concrete decision.
+            if self.sampled is None:
+                logger.warning("Discarding transaction without sampling decision.")
+
+            return None
+
+        finished_spans = [
+            span.to_json()
+            for span in self._span_recorder.spans
+            if span.timestamp is not None
+        ]
+
+        # we do this to break the circular reference of transaction -> span
+        # recorder -> span -> containing transaction (which is where we started)
+        # before either the spans or the transaction goes out of scope and has
+        # to be garbage collected
+        self._span_recorder = None
+
+        contexts = {}
+        contexts.update(self._contexts)
+        contexts.update({"trace": self.get_trace_context()})
+
+        event = {
+            "type": "transaction",
+            "transaction": self.name,
+            "transaction_info": {"source": self.source},
+            "contexts": contexts,
+            "tags": self._tags,
+            "timestamp": self.timestamp,
+            "start_timestamp": self.start_timestamp,
+            "spans": finished_spans,
+        }  # type: Event
+
+        if hub.client is not None and self._profile is not None:
+            event["profile"] = self._profile
+            contexts.update({"profile": self._profile.get_profile_context()})
+
+        if has_custom_measurements_enabled():
+            event["measurements"] = self._measurements
+
+        return hub.capture_event(event)
+
+    def set_measurement(self, name, value, unit=""):
+        # type: (str, float, MeasurementUnit) -> None
+        if not has_custom_measurements_enabled():
+            logger.debug(
+                "[Tracing] Experimental custom_measurements feature is disabled"
+            )
+            return
+
+        self._measurements[name] = {"value": value, "unit": unit}
+
+    def set_context(self, key, value):
+        # type: (str, Any) -> None
+        self._contexts[key] = value
+
+    def to_json(self):
+        # type: () -> Dict[str, Any]
+        rv = super(Transaction, self).to_json()
+
+        rv["name"] = self.name
+        rv["source"] = self.source
+        rv["sampled"] = self.sampled
+
+        return rv
+
+    def get_baggage(self):
+        # type: () -> Baggage
+        """
+        The first time a new baggage with sentry items is made,
+        it will be frozen.
+        """
+        if not self._baggage or self._baggage.mutable:
+            self._baggage = Baggage.populate_from_transaction(self)
+
+        return self._baggage
+
+    def _set_initial_sampling_decision(self, sampling_context):
+        # type: (SamplingContext) -> None
+        """
+        Sets the transaction's sampling decision, according to the following
+        precedence rules:
+
+        1. If a sampling decision is passed to `start_transaction`
+        (`start_transaction(name: "my transaction", sampled: True)`), that
+        decision will be used, regardless of anything else
+
+        2. If `traces_sampler` is defined, its decision will be used. It can
+        choose to keep or ignore any parent sampling decision, or use the
+        sampling context data to make its own decision or to choose a sample
+        rate for the transaction.
+
+        3. If `traces_sampler` is not defined, but there's a parent sampling
+        decision, the parent sampling decision will be used.
+
+        4. If `traces_sampler` is not defined and there's no parent sampling
+        decision, `traces_sample_rate` will be used.
+        """
+
+        hub = self.hub or sentry_sdk.Hub.current
+        client = hub.client
+        options = (client and client.options) or {}
+        transaction_description = "{op}transaction <{name}>".format(
+            op=("<" + self.op + "> " if self.op else ""), name=self.name
         )
-    elif span.op == "http":
-        hub.add_breadcrumb(type="http", category="httplib", data=span._data)
-    elif span.op == "subprocess":
-        hub.add_breadcrumb(
-            type="subprocess",
-            category="subprocess",
-            message=span.description,
-            data=span._data,
+
+        # nothing to do if there's no client or if tracing is disabled
+        if not client or not has_tracing_enabled(options):
+            self.sampled = False
+            return
+
+        # if the user has forced a sampling decision by passing a `sampled`
+        # value when starting the transaction, go with that
+        if self.sampled is not None:
+            self.sample_rate = float(self.sampled)
+            return
+
+        # we would have bailed already if neither `traces_sampler` nor
+        # `traces_sample_rate` were defined, so one of these should work; prefer
+        # the hook if so
+        sample_rate = (
+            options["traces_sampler"](sampling_context)
+            if callable(options.get("traces_sampler"))
+            else (
+                # default inheritance behavior
+                sampling_context["parent_sampled"]
+                if sampling_context["parent_sampled"] is not None
+                else options["traces_sample_rate"]
+            )
         )
+
+        # Since this is coming from the user (or from a function provided by the
+        # user), who knows what we might get. (The only valid values are
+        # booleans or numbers between 0 and 1.)
+        if not is_valid_sample_rate(sample_rate):
+            logger.warning(
+                "[Tracing] Discarding {transaction_description} because of invalid sample rate.".format(
+                    transaction_description=transaction_description,
+                )
+            )
+            self.sampled = False
+            return
+
+        self.sample_rate = float(sample_rate)
+
+        # if the function returned 0 (or false), or if `traces_sample_rate` is
+        # 0, it's a sign the transaction should be dropped
+        if not sample_rate:
+            logger.debug(
+                "[Tracing] Discarding {transaction_description} because {reason}".format(
+                    transaction_description=transaction_description,
+                    reason=(
+                        "traces_sampler returned 0 or False"
+                        if callable(options.get("traces_sampler"))
+                        else "traces_sample_rate is set to 0"
+                    ),
+                )
+            )
+            self.sampled = False
+            return
+
+        # Now we roll the dice. random.random is inclusive of 0, but not of 1,
+        # so strict < is safe here. In case sample_rate is a boolean, cast it
+        # to a float (True becomes 1.0 and False becomes 0.0)
+        self.sampled = random.random() < float(sample_rate)
+
+        if self.sampled:
+            logger.debug(
+                "[Tracing] Starting {transaction_description}".format(
+                    transaction_description=transaction_description,
+                )
+            )
+        else:
+            logger.debug(
+                "[Tracing] Discarding {transaction_description} because it's not included in the random sample (sampling rate = {sample_rate})".format(
+                    transaction_description=transaction_description,
+                    sample_rate=float(sample_rate),
+                )
+            )
+
+
+class NoOpSpan(Span):
+    def __repr__(self):
+        # type: () -> str
+        return self.__class__.__name__
+
+    def start_child(self, instrumenter=INSTRUMENTER.SENTRY, **kwargs):
+        # type: (str, **Any) -> NoOpSpan
+        return NoOpSpan()
+
+    def new_span(self, **kwargs):
+        # type: (**Any) -> NoOpSpan
+        pass
+
+    def set_tag(self, key, value):
+        # type: (str, Any) -> None
+        pass
+
+    def set_data(self, key, value):
+        # type: (str, Any) -> None
+        pass
+
+    def set_status(self, value):
+        # type: (str) -> None
+        pass
+
+    def set_http_status(self, http_status):
+        # type: (int) -> None
+        pass
+
+    def finish(self, hub=None, end_timestamp=None):
+        # type: (Optional[sentry_sdk.Hub], Optional[datetime]) -> Optional[str]
+        pass
+
+
+# Circular imports
+
+from sentry_sdk.tracing_utils import (
+    Baggage,
+    EnvironHeaders,
+    compute_tracestate_entry,
+    extract_sentrytrace_data,
+    extract_tracestate_data,
+    has_tracestate_enabled,
+    has_tracing_enabled,
+    is_valid_sample_rate,
+    maybe_create_breadcrumbs_from_span,
+    has_custom_measurements_enabled,
+)
diff --git a/sentry_sdk/tracing_utils.py b/sentry_sdk/tracing_utils.py
new file mode 100644
index 0000000000..cc1851ff46
--- /dev/null
+++ b/sentry_sdk/tracing_utils.py
@@ -0,0 +1,559 @@
+import re
+import contextlib
+import json
+import math
+
+from numbers import Real
+from decimal import Decimal
+
+import sentry_sdk
+from sentry_sdk.consts import OP
+
+from sentry_sdk.utils import (
+    capture_internal_exceptions,
+    Dsn,
+    logger,
+    safe_str,
+    to_base64,
+    to_string,
+    from_base64,
+)
+from sentry_sdk._compat import PY2, iteritems
+from sentry_sdk._types import MYPY
+
+if PY2:
+    from collections import Mapping
+    from urllib import quote, unquote
+else:
+    from collections.abc import Mapping
+    from urllib.parse import quote, unquote
+
+if MYPY:
+    import typing
+
+    from typing import Generator
+    from typing import Optional
+    from typing import Any
+    from typing import Dict
+    from typing import Union
+
+
+SENTRY_TRACE_REGEX = re.compile(
+    "^[ \t]*"  # whitespace
+    "([0-9a-f]{32})?"  # trace_id
+    "-?([0-9a-f]{16})?"  # span_id
+    "-?([01])?"  # sampled
+    "[ \t]*$"  # whitespace
+)
+
+# This is a normal base64 regex, modified to reflect that fact that we strip the
+# trailing = or == off
+base64_stripped = (
+    # any of the characters in the base64 "alphabet", in multiples of 4
+    "([a-zA-Z0-9+/]{4})*"
+    # either nothing or 2 or 3 base64-alphabet characters (see
+    # https://en.wikipedia.org/wiki/Base64#Decoding_Base64_without_padding for
+    # why there's never only 1 extra character)
+    "([a-zA-Z0-9+/]{2,3})?"
+)
+
+# comma-delimited list of entries of the form `xxx=yyy`
+tracestate_entry = "[^=]+=[^=]+"
+TRACESTATE_ENTRIES_REGEX = re.compile(
+    # one or more xxxxx=yyyy entries
+    "^({te})+"
+    # each entry except the last must be followed by a comma
+    "(,|$)".format(te=tracestate_entry)
+)
+
+# this doesn't check that the value is valid, just that there's something there
+# of the form `sentry=xxxx`
+SENTRY_TRACESTATE_ENTRY_REGEX = re.compile(
+    # either sentry is the first entry or there's stuff immediately before it,
+    # ending in a comma (this prevents matching something like `coolsentry=xxx`)
+    "(?:^|.+,)"
+    # sentry's part, not including the potential comma
+    "(sentry=[^,]*)"
+    # either there's a comma and another vendor's entry or we end
+    "(?:,.+|$)"
+)
+
+
+class EnvironHeaders(Mapping):  # type: ignore
+    def __init__(
+        self,
+        environ,  # type: typing.Mapping[str, str]
+        prefix="HTTP_",  # type: str
+    ):
+        # type: (...) -> None
+        self.environ = environ
+        self.prefix = prefix
+
+    def __getitem__(self, key):
+        # type: (str) -> Optional[Any]
+        return self.environ[self.prefix + key.replace("-", "_").upper()]
+
+    def __len__(self):
+        # type: () -> int
+        return sum(1 for _ in iter(self))
+
+    def __iter__(self):
+        # type: () -> Generator[str, None, None]
+        for k in self.environ:
+            if not isinstance(k, str):
+                continue
+
+            k = k.replace("-", "_").upper()
+            if not k.startswith(self.prefix):
+                continue
+
+            yield k[len(self.prefix) :]
+
+
+def has_tracing_enabled(options):
+    # type: (Dict[str, Any]) -> bool
+    """
+    Returns True if either traces_sample_rate or traces_sampler is
+    defined, False otherwise.
+    """
+
+    return bool(
+        options.get("traces_sample_rate") is not None
+        or options.get("traces_sampler") is not None
+    )
+
+
+def is_valid_sample_rate(rate):
+    # type: (Any) -> bool
+    """
+    Checks the given sample rate to make sure it is valid type and value (a
+    boolean or a number between 0 and 1, inclusive).
+    """
+
+    # both booleans and NaN are instances of Real, so a) checking for Real
+    # checks for the possibility of a boolean also, and b) we have to check
+    # separately for NaN and Decimal does not derive from Real so need to check that too
+    if not isinstance(rate, (Real, Decimal)) or math.isnan(rate):
+        logger.warning(
+            "[Tracing] Given sample rate is invalid. Sample rate must be a boolean or a number between 0 and 1. Got {rate} of type {type}.".format(
+                rate=rate, type=type(rate)
+            )
+        )
+        return False
+
+    # in case rate is a boolean, it will get cast to 1 if it's True and 0 if it's False
+    rate = float(rate)
+    if rate < 0 or rate > 1:
+        logger.warning(
+            "[Tracing] Given sample rate is invalid. Sample rate must be between 0 and 1. Got {rate}.".format(
+                rate=rate
+            )
+        )
+        return False
+
+    return True
+
+
+@contextlib.contextmanager
+def record_sql_queries(
+    hub,  # type: sentry_sdk.Hub
+    cursor,  # type: Any
+    query,  # type: Any
+    params_list,  # type:  Any
+    paramstyle,  # type: Optional[str]
+    executemany,  # type: bool
+):
+    # type: (...) -> Generator[Span, None, None]
+
+    # TODO: Bring back capturing of params by default
+    if hub.client and hub.client.options["_experiments"].get(
+        "record_sql_params", False
+    ):
+        if not params_list or params_list == [None]:
+            params_list = None
+
+        if paramstyle == "pyformat":
+            paramstyle = "format"
+    else:
+        params_list = None
+        paramstyle = None
+
+    query = _format_sql(cursor, query)
+
+    data = {}
+    if params_list is not None:
+        data["db.params"] = params_list
+    if paramstyle is not None:
+        data["db.paramstyle"] = paramstyle
+    if executemany:
+        data["db.executemany"] = True
+
+    with capture_internal_exceptions():
+        hub.add_breadcrumb(message=query, category="query", data=data)
+
+    with hub.start_span(op=OP.DB, description=query) as span:
+        for k, v in data.items():
+            span.set_data(k, v)
+        yield span
+
+
+def maybe_create_breadcrumbs_from_span(hub, span):
+    # type: (sentry_sdk.Hub, Span) -> None
+    if span.op == OP.DB_REDIS:
+        hub.add_breadcrumb(
+            message=span.description, type="redis", category="redis", data=span._tags
+        )
+    elif span.op == OP.HTTP_CLIENT:
+        hub.add_breadcrumb(type="http", category="httplib", data=span._data)
+    elif span.op == "subprocess":
+        hub.add_breadcrumb(
+            type="subprocess",
+            category="subprocess",
+            message=span.description,
+            data=span._data,
+        )
+
+
+def extract_sentrytrace_data(header):
+    # type: (Optional[str]) -> Optional[typing.Mapping[str, Union[str, bool, None]]]
+    """
+    Given a `sentry-trace` header string, return a dictionary of data.
+    """
+    if not header:
+        return None
+
+    if header.startswith("00-") and header.endswith("-00"):
+        header = header[3:-3]
+
+    match = SENTRY_TRACE_REGEX.match(header)
+    if not match:
+        return None
+
+    trace_id, parent_span_id, sampled_str = match.groups()
+    parent_sampled = None
+
+    if trace_id:
+        trace_id = "{:032x}".format(int(trace_id, 16))
+    if parent_span_id:
+        parent_span_id = "{:016x}".format(int(parent_span_id, 16))
+    if sampled_str:
+        parent_sampled = sampled_str != "0"
+
+    return {
+        "trace_id": trace_id,
+        "parent_span_id": parent_span_id,
+        "parent_sampled": parent_sampled,
+    }
+
+
+def extract_tracestate_data(header):
+    # type: (Optional[str]) -> typing.Mapping[str, Optional[str]]
+    """
+    Extracts the sentry tracestate value and any third-party data from the given
+    tracestate header, returning a dictionary of data.
+    """
+    sentry_entry = third_party_entry = None
+    before = after = ""
+
+    if header:
+        # find sentry's entry, if any
+        sentry_match = SENTRY_TRACESTATE_ENTRY_REGEX.search(header)
+
+        if sentry_match:
+            sentry_entry = sentry_match.group(1)
+
+            # remove the commas after the split so we don't end up with
+            # `xxx=yyy,,zzz=qqq` (double commas) when we put them back together
+            before, after = map(lambda s: s.strip(","), header.split(sentry_entry))
+
+            # extract sentry's value from its entry and test to make sure it's
+            # valid; if it isn't, discard the entire entry so that a new one
+            # will be created
+            sentry_value = sentry_entry.replace("sentry=", "")
+            if not re.search("^{b64}$".format(b64=base64_stripped), sentry_value):
+                sentry_entry = None
+        else:
+            after = header
+
+        # if either part is invalid or empty, remove it before gluing them together
+        third_party_entry = (
+            ",".join(filter(TRACESTATE_ENTRIES_REGEX.search, [before, after])) or None
+        )
+
+    return {
+        "sentry_tracestate": sentry_entry,
+        "third_party_tracestate": third_party_entry,
+    }
+
+
+def compute_tracestate_value(data):
+    # type: (typing.Mapping[str, str]) -> str
+    """
+    Computes a new tracestate value using the given data.
+
+    Note: Returns just the base64-encoded data, NOT the full `sentry=...`
+    tracestate entry.
+    """
+
+    tracestate_json = json.dumps(data, default=safe_str)
+
+    # Base64-encoded strings always come out with a length which is a multiple
+    # of 4. In order to achieve this, the end is padded with one or more `=`
+    # signs. Because the tracestate standard calls for using `=` signs between
+    # vendor name and value (`sentry=xxx,dogsaregreat=yyy`), to avoid confusion
+    # we strip the `=`
+    return (to_base64(tracestate_json) or "").rstrip("=")
+
+
+def compute_tracestate_entry(span):
+    # type: (Span) -> Optional[str]
+    """
+    Computes a new sentry tracestate for the span. Includes the `sentry=`.
+
+    Will return `None` if there's no client and/or no DSN.
+    """
+    data = {}
+
+    hub = span.hub or sentry_sdk.Hub.current
+
+    client = hub.client
+    scope = hub.scope
+
+    if client and client.options.get("dsn"):
+        options = client.options
+        user = scope._user
+
+        data = {
+            "trace_id": span.trace_id,
+            "environment": options["environment"],
+            "release": options.get("release"),
+            "public_key": Dsn(options["dsn"]).public_key,
+        }
+
+        if user and (user.get("id") or user.get("segment")):
+            user_data = {}
+
+            if user.get("id"):
+                user_data["id"] = user["id"]
+
+            if user.get("segment"):
+                user_data["segment"] = user["segment"]
+
+            data["user"] = user_data
+
+        if span.containing_transaction:
+            data["transaction"] = span.containing_transaction.name
+
+        return "sentry=" + compute_tracestate_value(data)
+
+    return None
+
+
+def reinflate_tracestate(encoded_tracestate):
+    # type: (str) -> typing.Optional[Mapping[str, str]]
+    """
+    Given a sentry tracestate value in its encoded form, translate it back into
+    a dictionary of data.
+    """
+    inflated_tracestate = None
+
+    if encoded_tracestate:
+        # Base64-encoded strings always come out with a length which is a
+        # multiple of 4. In order to achieve this, the end is padded with one or
+        # more `=` signs. Because the tracestate standard calls for using `=`
+        # signs between vendor name and value (`sentry=xxx,dogsaregreat=yyy`),
+        # to avoid confusion we strip the `=` when the data is initially
+        # encoded. Python's decoding function requires they be put back.
+        # Fortunately, it doesn't complain if there are too many, so we just
+        # attach two `=` on spec (there will never be more than 2, see
+        # https://en.wikipedia.org/wiki/Base64#Decoding_Base64_without_padding).
+        tracestate_json = from_base64(encoded_tracestate + "==")
+
+        try:
+            assert tracestate_json is not None
+            inflated_tracestate = json.loads(tracestate_json)
+        except Exception as err:
+            logger.warning(
+                (
+                    "Unable to attach tracestate data to envelope header: {err}"
+                    + "\nTracestate value is {encoded_tracestate}"
+                ).format(err=err, encoded_tracestate=encoded_tracestate),
+            )
+
+    return inflated_tracestate
+
+
+def _format_sql(cursor, sql):
+    # type: (Any, str) -> Optional[str]
+
+    real_sql = None
+
+    # If we're using psycopg2, it could be that we're
+    # looking at a query that uses Composed objects. Use psycopg2's mogrify
+    # function to format the query. We lose per-parameter trimming but gain
+    # accuracy in formatting.
+    try:
+        if hasattr(cursor, "mogrify"):
+            real_sql = cursor.mogrify(sql)
+            if isinstance(real_sql, bytes):
+                real_sql = real_sql.decode(cursor.connection.encoding)
+    except Exception:
+        real_sql = None
+
+    return real_sql or to_string(sql)
+
+
+def has_tracestate_enabled(span=None):
+    # type: (Optional[Span]) -> bool
+
+    client = ((span and span.hub) or sentry_sdk.Hub.current).client
+    options = client and client.options
+
+    return bool(options and options["_experiments"].get("propagate_tracestate"))
+
+
+def has_custom_measurements_enabled():
+    # type: () -> bool
+    client = sentry_sdk.Hub.current.client
+    options = client and client.options
+    return bool(options and options["_experiments"].get("custom_measurements"))
+
+
+class Baggage(object):
+    __slots__ = ("sentry_items", "third_party_items", "mutable")
+
+    SENTRY_PREFIX = "sentry-"
+    SENTRY_PREFIX_REGEX = re.compile("^sentry-")
+
+    # DynamicSamplingContext
+    DSC_KEYS = [
+        "trace_id",
+        "public_key",
+        "sample_rate",
+        "release",
+        "environment",
+        "transaction",
+        "user_id",
+        "user_segment",
+    ]
+
+    def __init__(
+        self,
+        sentry_items,  # type: Dict[str, str]
+        third_party_items="",  # type: str
+        mutable=True,  # type: bool
+    ):
+        self.sentry_items = sentry_items
+        self.third_party_items = third_party_items
+        self.mutable = mutable
+
+    @classmethod
+    def from_incoming_header(cls, header):
+        # type: (Optional[str]) -> Baggage
+        """
+        freeze if incoming header already has sentry baggage
+        """
+        sentry_items = {}
+        third_party_items = ""
+        mutable = True
+
+        if header:
+            for item in header.split(","):
+                if "=" not in item:
+                    continue
+
+                with capture_internal_exceptions():
+                    item = item.strip()
+                    key, val = item.split("=")
+                    if Baggage.SENTRY_PREFIX_REGEX.match(key):
+                        baggage_key = unquote(key.split("-")[1])
+                        sentry_items[baggage_key] = unquote(val)
+                        mutable = False
+                    else:
+                        third_party_items += ("," if third_party_items else "") + item
+
+        return Baggage(sentry_items, third_party_items, mutable)
+
+    @classmethod
+    def populate_from_transaction(cls, transaction):
+        # type: (Transaction) -> Baggage
+        """
+        Populate fresh baggage entry with sentry_items and make it immutable
+        if this is the head SDK which originates traces.
+        """
+        hub = transaction.hub or sentry_sdk.Hub.current
+        client = hub.client
+        sentry_items = {}  # type: Dict[str, str]
+
+        if not client:
+            return Baggage(sentry_items)
+
+        options = client.options or {}
+        user = (hub.scope and hub.scope._user) or {}
+
+        sentry_items["trace_id"] = transaction.trace_id
+
+        if options.get("environment"):
+            sentry_items["environment"] = options["environment"]
+
+        if options.get("release"):
+            sentry_items["release"] = options["release"]
+
+        if options.get("dsn"):
+            sentry_items["public_key"] = Dsn(options["dsn"]).public_key
+
+        if (
+            transaction.name
+            and transaction.source not in LOW_QUALITY_TRANSACTION_SOURCES
+        ):
+            sentry_items["transaction"] = transaction.name
+
+        if user.get("segment"):
+            sentry_items["user_segment"] = user["segment"]
+
+        if transaction.sample_rate is not None:
+            sentry_items["sample_rate"] = str(transaction.sample_rate)
+
+        # there's an existing baggage but it was mutable,
+        # which is why we are creating this new baggage.
+        # However, if by chance the user put some sentry items in there, give them precedence.
+        if transaction._baggage and transaction._baggage.sentry_items:
+            sentry_items.update(transaction._baggage.sentry_items)
+
+        return Baggage(sentry_items, mutable=False)
+
+    def freeze(self):
+        # type: () -> None
+        self.mutable = False
+
+    def dynamic_sampling_context(self):
+        # type: () -> Dict[str, str]
+        header = {}
+
+        for key in Baggage.DSC_KEYS:
+            item = self.sentry_items.get(key)
+            if item:
+                header[key] = item
+
+        return header
+
+    def serialize(self, include_third_party=False):
+        # type: (bool) -> str
+        items = []
+
+        for key, val in iteritems(self.sentry_items):
+            with capture_internal_exceptions():
+                item = Baggage.SENTRY_PREFIX + quote(key) + "=" + quote(str(val))
+                items.append(item)
+
+        if include_third_party:
+            items.append(self.third_party_items)
+
+        return ",".join(items)
+
+
+# Circular imports
+from sentry_sdk.tracing import LOW_QUALITY_TRANSACTION_SOURCES
+
+if MYPY:
+    from sentry_sdk.tracing import Span, Transaction
diff --git a/sentry_sdk/transport.py b/sentry_sdk/transport.py
index c6f926a353..4937668cc7 100644
--- a/sentry_sdk/transport.py
+++ b/sentry_sdk/transport.py
@@ -1,16 +1,17 @@
 from __future__ import print_function
 
-import json
 import io
 import urllib3  # type: ignore
 import certifi
 import gzip
+import time
 
 from datetime import datetime, timedelta
+from collections import defaultdict
 
-from sentry_sdk.utils import Dsn, logger, capture_internal_exceptions
+from sentry_sdk.utils import Dsn, logger, capture_internal_exceptions, json_dumps
 from sentry_sdk.worker import BackgroundWorker
-from sentry_sdk.envelope import Envelope, get_event_data_category
+from sentry_sdk.envelope import Envelope, Item, PayloadRef
 
 from sentry_sdk._types import MYPY
 
@@ -23,11 +24,12 @@
     from typing import Tuple
     from typing import Type
     from typing import Union
+    from typing import DefaultDict
 
     from urllib3.poolmanager import PoolManager  # type: ignore
     from urllib3.poolmanager import ProxyManager
 
-    from sentry_sdk._types import Event
+    from sentry_sdk._types import Event, EndpointType
 
     DataCategory = Optional[str]
 
@@ -59,7 +61,8 @@ def capture_event(
         self, event  # type: Event
     ):
         # type: (...) -> None
-        """This gets invoked with the event dictionary when an event should
+        """
+        This gets invoked with the event dictionary when an event should
         be sent to sentry.
         """
         raise NotImplementedError()
@@ -68,14 +71,15 @@ def capture_envelope(
         self, envelope  # type: Envelope
     ):
         # type: (...) -> None
-        """This gets invoked with an envelope when an event should
-        be sent to sentry.  The default implementation invokes `capture_event`
-        if the envelope contains an event and ignores all other envelopes.
         """
-        event = envelope.get_event()
-        if event is not None:
-            self.capture_event(event)
-        return None
+        Send an envelope to Sentry.
+
+        Envelopes are a data container format that can hold any type of data
+        submitted to Sentry. We use it for transactions and sessions, but
+        regular "error" events should go through `capture_event` for backwards
+        compat.
+        """
+        raise NotImplementedError()
 
     def flush(
         self,
@@ -91,6 +95,18 @@ def kill(self):
         """Forcefully kills the transport."""
         pass
 
+    def record_lost_event(
+        self,
+        reason,  # type: str
+        data_category=None,  # type: Optional[str]
+        item=None,  # type: Optional[Item]
+    ):
+        # type: (...) -> None
+        """This increments a counter for event loss by reason and
+        data category.
+        """
+        return None
+
     def __del__(self):
         # type: () -> None
         try:
@@ -125,23 +141,50 @@ def __init__(
 
         Transport.__init__(self, options)
         assert self.parsed_dsn is not None
-        self._worker = BackgroundWorker()
+        self.options = options  # type: Dict[str, Any]
+        self._worker = BackgroundWorker(queue_size=options["transport_queue_size"])
         self._auth = self.parsed_dsn.to_auth("sentry.python/%s" % VERSION)
         self._disabled_until = {}  # type: Dict[DataCategory, datetime]
         self._retry = urllib3.util.Retry()
-        self.options = options
+        self._discarded_events = defaultdict(
+            int
+        )  # type: DefaultDict[Tuple[str, str], int]
+        self._last_client_report_sent = time.time()
 
         self._pool = self._make_pool(
             self.parsed_dsn,
             http_proxy=options["http_proxy"],
             https_proxy=options["https_proxy"],
             ca_certs=options["ca_certs"],
+            proxy_headers=options["proxy_headers"],
         )
 
         from sentry_sdk import Hub
 
         self.hub_cls = Hub
 
+    def record_lost_event(
+        self,
+        reason,  # type: str
+        data_category=None,  # type: Optional[str]
+        item=None,  # type: Optional[Item]
+    ):
+        # type: (...) -> None
+        if not self.options["send_client_reports"]:
+            return
+
+        quantity = 1
+        if item is not None:
+            data_category = item.data_category
+            if data_category == "attachment":
+                # quantity of 0 is actually 1 as we do not want to count
+                # empty attachments as actually empty.
+                quantity = len(item.get_bytes()) or 1
+        elif data_category is None:
+            raise TypeError("data category not provided")
+
+        self._discarded_events[data_category, reason] += quantity
+
     def _update_rate_limits(self, response):
         # type: (urllib3.HTTPResponse) -> None
 
@@ -149,12 +192,14 @@ def _update_rate_limits(self, response):
         # no matter of the status code to update our internal rate limits.
         header = response.headers.get("x-sentry-rate-limits")
         if header:
+            logger.warning("Rate-limited via x-sentry-rate-limits")
             self._disabled_until.update(_parse_rate_limits(header))
 
         # old sentries only communicate global rate limit hits via the
         # retry-after header on 429.  This header can also be emitted on new
         # sentries if a proxy in front wants to globally slow things down.
         elif response.status == 429:
+            logger.warning("Rate-limited via 429")
             self._disabled_until[None] = datetime.utcnow() + timedelta(
                 seconds=self._retry.get_retry_after(response) or 60
             )
@@ -163,24 +208,46 @@ def _send_request(
         self,
         body,  # type: bytes
         headers,  # type: Dict[str, str]
+        endpoint_type="store",  # type: EndpointType
+        envelope=None,  # type: Optional[Envelope]
     ):
         # type: (...) -> None
+
+        def record_loss(reason):
+            # type: (str) -> None
+            if envelope is None:
+                self.record_lost_event(reason, data_category="error")
+            else:
+                for item in envelope.items:
+                    self.record_lost_event(reason, item=item)
+
         headers.update(
             {
                 "User-Agent": str(self._auth.client),
                 "X-Sentry-Auth": str(self._auth.to_header()),
             }
         )
-        response = self._pool.request(
-            "POST", str(self._auth.store_api_url), body=body, headers=headers
-        )
+        try:
+            response = self._pool.request(
+                "POST",
+                str(self._auth.get_api_url(endpoint_type)),
+                body=body,
+                headers=headers,
+            )
+        except Exception:
+            self.on_dropped_event("network")
+            record_loss("network_error")
+            raise
 
         try:
             self._update_rate_limits(response)
 
             if response.status == 429:
                 # if we hit a 429.  Something was rate limited but we already
-                # acted on this in `self._update_rate_limits`.
+                # acted on this in `self._update_rate_limits`.  Note that we
+                # do not want to record event loss here as we will have recorded
+                # an outcome in relay already.
+                self.on_dropped_event("status_429")
                 pass
 
             elif response.status >= 300 or response.status < 200:
@@ -189,9 +256,52 @@ def _send_request(
                     response.status,
                     response.data,
                 )
+                self.on_dropped_event("status_{}".format(response.status))
+                record_loss("network_error")
         finally:
             response.close()
 
+    def on_dropped_event(self, reason):
+        # type: (str) -> None
+        return None
+
+    def _fetch_pending_client_report(self, force=False, interval=60):
+        # type: (bool, int) -> Optional[Item]
+        if not self.options["send_client_reports"]:
+            return None
+
+        if not (force or self._last_client_report_sent < time.time() - interval):
+            return None
+
+        discarded_events = self._discarded_events
+        self._discarded_events = defaultdict(int)
+        self._last_client_report_sent = time.time()
+
+        if not discarded_events:
+            return None
+
+        return Item(
+            PayloadRef(
+                json={
+                    "timestamp": time.time(),
+                    "discarded_events": [
+                        {"reason": reason, "category": category, "quantity": quantity}
+                        for (
+                            (category, reason),
+                            quantity,
+                        ) in discarded_events.items()
+                    ],
+                }
+            ),
+            type="client_report",
+        )
+
+    def _flush_client_reports(self, force=False):
+        # type: (bool) -> None
+        client_report = self._fetch_pending_client_report(force=force, interval=60)
+        if client_report is not None:
+            self.capture_envelope(Envelope(items=[client_report]))
+
     def _check_disabled(self, category):
         # type: (str) -> bool
         def _disabled(bucket):
@@ -205,12 +315,15 @@ def _send_event(
         self, event  # type: Event
     ):
         # type: (...) -> None
-        if self._check_disabled(get_event_data_category(event)):
+
+        if self._check_disabled("error"):
+            self.on_dropped_event("self_rate_limits")
+            self.record_lost_event("ratelimit_backoff", data_category="error")
             return None
 
         body = io.BytesIO()
         with gzip.GzipFile(fileobj=body, mode="w") as f:
-            f.write(json.dumps(event, allow_nan=False).encode("utf-8"))
+            f.write(json_dumps(event))
 
         assert self.parsed_dsn is not None
         logger.debug(
@@ -235,12 +348,31 @@ def _send_envelope(
         # type: (...) -> None
 
         # remove all items from the envelope which are over quota
-        envelope.items[:] = [
-            x for x in envelope.items if not self._check_disabled(x.data_category)
-        ]
+        new_items = []
+        for item in envelope.items:
+            if self._check_disabled(item.data_category):
+                if item.data_category in ("transaction", "error", "default"):
+                    self.on_dropped_event("self_rate_limits")
+                self.record_lost_event("ratelimit_backoff", item=item)
+            else:
+                new_items.append(item)
+
+        # Since we're modifying the envelope here make a copy so that others
+        # that hold references do not see their envelope modified.
+        envelope = Envelope(headers=envelope.headers, items=new_items)
+
         if not envelope.items:
             return None
 
+        # since we're already in the business of sending out an envelope here
+        # check if we have one pending for the stats session envelopes so we
+        # can attach it to this enveloped scheduled for sending.  This will
+        # currently typically attach the client report to the most recent
+        # session update.
+        client_report_item = self._fetch_pending_client_report(interval=30)
+        if client_report_item is not None:
+            envelope.items.append(client_report_item)
+
         body = io.BytesIO()
         with gzip.GzipFile(fileobj=body, mode="w") as f:
             envelope.serialize_into(f)
@@ -252,12 +384,15 @@ def _send_envelope(
             self.parsed_dsn.project_id,
             self.parsed_dsn.host,
         )
+
         self._send_request(
             body.getvalue(),
             headers={
                 "Content-Type": "application/x-sentry-envelope",
                 "Content-Encoding": "gzip",
             },
+            endpoint_type="envelope",
+            envelope=envelope,
         )
         return None
 
@@ -269,27 +404,43 @@ def _get_pool_options(self, ca_certs):
             "ca_certs": ca_certs or certifi.where(),
         }
 
+    def _in_no_proxy(self, parsed_dsn):
+        # type: (Dsn) -> bool
+        no_proxy = getproxies().get("no")
+        if not no_proxy:
+            return False
+        for host in no_proxy.split(","):
+            host = host.strip()
+            if parsed_dsn.host.endswith(host) or parsed_dsn.netloc.endswith(host):
+                return True
+        return False
+
     def _make_pool(
         self,
         parsed_dsn,  # type: Dsn
         http_proxy,  # type: Optional[str]
         https_proxy,  # type: Optional[str]
         ca_certs,  # type: Optional[Any]
+        proxy_headers,  # type: Optional[Dict[str, str]]
     ):
         # type: (...) -> Union[PoolManager, ProxyManager]
         proxy = None
+        no_proxy = self._in_no_proxy(parsed_dsn)
 
         # try HTTPS first
         if parsed_dsn.scheme == "https" and (https_proxy != ""):
-            proxy = https_proxy or getproxies().get("https")
+            proxy = https_proxy or (not no_proxy and getproxies().get("https"))
 
         # maybe fallback to HTTP proxy
         if not proxy and (http_proxy != ""):
-            proxy = http_proxy or getproxies().get("http")
+            proxy = http_proxy or (not no_proxy and getproxies().get("http"))
 
         opts = self._get_pool_options(ca_certs)
 
         if proxy:
+            if proxy_headers:
+                opts["proxy_headers"] = proxy_headers
+
             return urllib3.ProxyManager(proxy, **opts)
         else:
             return urllib3.PoolManager(**opts)
@@ -305,8 +456,11 @@ def send_event_wrapper():
             with hub:
                 with capture_internal_exceptions():
                     self._send_event(event)
+                    self._flush_client_reports()
 
-        self._worker.submit(send_event_wrapper)
+        if not self._worker.submit(send_event_wrapper):
+            self.on_dropped_event("full_queue")
+            self.record_lost_event("queue_overflow", data_category="error")
 
     def capture_envelope(
         self, envelope  # type: Envelope
@@ -319,8 +473,12 @@ def send_envelope_wrapper():
             with hub:
                 with capture_internal_exceptions():
                     self._send_envelope(envelope)
+                    self._flush_client_reports()
 
-        self._worker.submit(send_envelope_wrapper)
+        if not self._worker.submit(send_envelope_wrapper):
+            self.on_dropped_event("full_queue")
+            for item in envelope.items:
+                self.record_lost_event("queue_overflow", item=item)
 
     def flush(
         self,
@@ -329,7 +487,9 @@ def flush(
     ):
         # type: (...) -> None
         logger.debug("Flushing HTTP transport")
+
         if timeout > 0:
+            self._worker.submit(lambda: self._flush_client_reports(force=True))
             self._worker.flush(timeout, callback)
 
     def kill(self):
@@ -368,7 +528,7 @@ def make_transport(options):
     elif callable(ref_transport):
         return _FunctionTransport(ref_transport)  # type: ignore
 
-    # if a transport class is given only instanciate it if the dsn is not
+    # if a transport class is given only instantiate it if the dsn is not
     # empty or None
     if options["dsn"]:
         return transport_cls(options)
diff --git a/sentry_sdk/utils.py b/sentry_sdk/utils.py
index d92309c5f7..3f573171a6 100644
--- a/sentry_sdk/utils.py
+++ b/sentry_sdk/utils.py
@@ -1,31 +1,45 @@
-import os
-import sys
+import base64
+import json
 import linecache
 import logging
-
+import os
+import re
+import subprocess
+import sys
+import threading
+import time
 from datetime import datetime
+from functools import partial
 
-import sentry_sdk
-from sentry_sdk._compat import urlparse, text_type, implements_str, PY2
+try:
+    from functools import partialmethod
 
+    _PARTIALMETHOD_AVAILABLE = True
+except ImportError:
+    _PARTIALMETHOD_AVAILABLE = False
+
+import sentry_sdk
+from sentry_sdk._compat import PY2, PY33, PY37, implements_str, text_type, urlparse
 from sentry_sdk._types import MYPY
 
 if MYPY:
-    from types import FrameType
-    from types import TracebackType
-    from typing import Any
-    from typing import Callable
-    from typing import Dict
-    from typing import ContextManager
-    from typing import Iterator
-    from typing import List
-    from typing import Optional
-    from typing import Set
-    from typing import Tuple
-    from typing import Union
-    from typing import Type
-
-    from sentry_sdk._types import ExcInfo
+    from types import FrameType, TracebackType
+    from typing import (
+        Any,
+        Callable,
+        ContextManager,
+        Dict,
+        Iterator,
+        List,
+        Optional,
+        Set,
+        Tuple,
+        Type,
+        Union,
+    )
+
+    from sentry_sdk._types import EndpointType, ExcInfo
+
 
 epoch = datetime(1970, 1, 1)
 
@@ -33,8 +47,14 @@
 # The logger is created here but initialized in the debug support module
 logger = logging.getLogger("sentry_sdk.errors")
 
-MAX_STRING_LENGTH = 512
-MAX_FORMAT_PARAM_LENGTH = 128
+MAX_STRING_LENGTH = 1024
+BASE64_ALPHABET = re.compile(r"^[a-zA-Z0-9/+=]*$")
+
+
+def json_dumps(data):
+    # type: (Any) -> bytes
+    """Serialize data into a compact JSON representation encoded as UTF-8."""
+    return json.dumps(data, allow_nan=False, separators=(",", ":")).encode("utf-8")
 
 
 def _get_debug_hub():
@@ -43,6 +63,79 @@ def _get_debug_hub():
     pass
 
 
+def get_default_release():
+    # type: () -> Optional[str]
+    """Try to guess a default release."""
+    release = os.environ.get("SENTRY_RELEASE")
+    if release:
+        return release
+
+    with open(os.path.devnull, "w+") as null:
+        try:
+            release = (
+                subprocess.Popen(
+                    ["git", "rev-parse", "HEAD"],
+                    stdout=subprocess.PIPE,
+                    stderr=null,
+                    stdin=null,
+                )
+                .communicate()[0]
+                .strip()
+                .decode("utf-8")
+            )
+        except (OSError, IOError):
+            pass
+
+        if release:
+            return release
+
+    for var in (
+        "HEROKU_SLUG_COMMIT",
+        "SOURCE_VERSION",
+        "CODEBUILD_RESOLVED_SOURCE_VERSION",
+        "CIRCLE_SHA1",
+        "GAE_DEPLOYMENT_ID",
+    ):
+        release = os.environ.get(var)
+        if release:
+            return release
+    return None
+
+
+def get_sdk_name(installed_integrations):
+    # type: (List[str]) -> str
+    """Return the SDK name including the name of the used web framework."""
+
+    # Note: I can not use for example sentry_sdk.integrations.django.DjangoIntegration.identifier
+    # here because if django is not installed the integration is not accessible.
+    framework_integrations = [
+        "django",
+        "flask",
+        "fastapi",
+        "bottle",
+        "falcon",
+        "quart",
+        "sanic",
+        "starlette",
+        "chalice",
+        "serverless",
+        "pyramid",
+        "tornado",
+        "aiohttp",
+        "aws_lambda",
+        "gcp",
+        "beam",
+        "asgi",
+        "wsgi",
+    ]
+
+    for integration in framework_integrations:
+        if integration in installed_integrations:
+            return "sentry.python.{}".format(integration)
+
+    return "sentry.python"
+
+
 class CaptureInternalException(object):
     __slots__ = ()
 
@@ -110,7 +203,7 @@ def __init__(self, value):
             return
         parts = urlparse.urlsplit(text_type(value))
 
-        if parts.scheme not in (u"http", u"https"):
+        if parts.scheme not in ("http", "https"):
             raise BadDsn("Unsupported scheme %r" % parts.scheme)
         self.scheme = parts.scheme
 
@@ -120,7 +213,7 @@ def __init__(self, value):
         self.host = parts.hostname
 
         if parts.port is None:
-            self.port = self.scheme == "https" and 443 or 80
+            self.port = self.scheme == "https" and 443 or 80  # type: int
         else:
             self.port = parts.port
 
@@ -200,28 +293,44 @@ def __init__(
     @property
     def store_api_url(self):
         # type: () -> str
+        """Returns the API url for storing events.
+
+        Deprecated: use get_api_url instead.
+        """
+        return self.get_api_url(type="store")
+
+    def get_api_url(
+        self, type="store"  # type: EndpointType
+    ):
+        # type: (...) -> str
         """Returns the API url for storing events."""
-        return "%s://%s%sapi/%s/store/" % (
+        return "%s://%s%sapi/%s/%s/" % (
             self.scheme,
             self.host,
             self.path,
             self.project_id,
+            type,
         )
 
-    def to_header(self, timestamp=None):
-        # type: (Optional[datetime]) -> str
+    def to_header(self):
+        # type: () -> str
         """Returns the auth header a string."""
         rv = [("sentry_key", self.public_key), ("sentry_version", self.version)]
-        if timestamp is not None:
-            rv.append(("sentry_timestamp", str(to_timestamp(timestamp))))
         if self.client is not None:
             rv.append(("sentry_client", self.client))
         if self.secret_key is not None:
             rv.append(("sentry_secret", self.secret_key))
-        return u"Sentry " + u", ".join("%s=%s" % (key, value) for key, value in rv)
+        return "Sentry " + ", ".join("%s=%s" % (key, value) for key, value in rv)
 
 
 class AnnotatedValue(object):
+    """
+    Meta information for a data field in the event payload.
+    This is to tell Relay that we have tampered with the fields value.
+    See:
+    https://github.com/getsentry/relay/blob/be12cd49a0f06ea932ed9b9f93a655de5d6ad6d1/relay-general/src/types/meta.rs#L407-L423
+    """
+
     __slots__ = ("value", "metadata")
 
     def __init__(self, value, metadata):
@@ -229,6 +338,56 @@ def __init__(self, value, metadata):
         self.value = value
         self.metadata = metadata
 
+    @classmethod
+    def removed_because_raw_data(cls):
+        # type: () -> AnnotatedValue
+        """The value was removed because it could not be parsed. This is done for request body values that are not json nor a form."""
+        return AnnotatedValue(
+            value="",
+            metadata={
+                "rem": [  # Remark
+                    [
+                        "!raw",  # Unparsable raw data
+                        "x",  # The fields original value was removed
+                    ]
+                ]
+            },
+        )
+
+    @classmethod
+    def removed_because_over_size_limit(cls):
+        # type: () -> AnnotatedValue
+        """The actual value was removed because the size of the field exceeded the configured maximum size (specified with the request_bodies sdk option)"""
+        return AnnotatedValue(
+            value="",
+            metadata={
+                "rem": [  # Remark
+                    [
+                        "!config",  # Because of configured maximum size
+                        "x",  # The fields original value was removed
+                    ]
+                ]
+            },
+        )
+
+    @classmethod
+    def substituted_because_contains_sensitive_data(cls):
+        # type: () -> AnnotatedValue
+        """The actual value was removed because it contained sensitive information."""
+        from sentry_sdk.consts import SENSITIVE_DATA_SUBSTITUTE
+
+        return AnnotatedValue(
+            value=SENSITIVE_DATA_SUBSTITUTE,
+            metadata={
+                "rem": [  # Remark
+                    [
+                        "!config",  # Because of SDK configuration (in this case the config is the hard coded removal of certain django cookies)
+                        "s",  # The fields original value was substituted
+                    ]
+                ]
+            },
+        )
+
 
 if MYPY:
     from typing import TypeVar
@@ -378,8 +537,7 @@ def safe_repr(value):
                 return rv
         except Exception:
             # If e.g. the call to `repr` already fails
-            return u""
-
+            return ""
 
 else:
 
@@ -405,6 +563,9 @@ def filename_for_module(module, abs_path):
             return os.path.basename(abs_path)
 
         base_module_path = sys.modules[base_module].__file__
+        if not base_module_path:
+            return abs_path
+
         return abs_path.split(base_module_path.rsplit(os.sep, 2)[0], 1)[-1].lstrip(
             os.sep
         )
@@ -447,18 +608,6 @@ def serialize_frame(frame, tb_lineno=None, with_locals=True):
     return rv
 
 
-def stacktrace_from_traceback(tb=None, with_locals=True):
-    # type: (Optional[TracebackType], bool) -> Dict[str, List[Dict[str, Any]]]
-    return {
-        "frames": [
-            serialize_frame(
-                tb.tb_frame, tb_lineno=tb.tb_lineno, with_locals=with_locals
-            )
-            for tb in iter_stacks(tb)
-        ]
-    }
-
-
 def current_stacktrace(with_locals=True):
     # type: (bool) -> Any
     __tracebackhide__ = True
@@ -494,7 +643,7 @@ def single_exception_from_error_tuple(
         errno = None
 
     if errno is not None:
-        mechanism = mechanism or {}
+        mechanism = mechanism or {"type": "generic"}
         mechanism.setdefault("meta", {}).setdefault("errno", {}).setdefault(
             "number", errno
         )
@@ -504,14 +653,23 @@ def single_exception_from_error_tuple(
     else:
         with_locals = client_options["with_locals"]
 
-    return {
+    frames = [
+        serialize_frame(tb.tb_frame, tb_lineno=tb.tb_lineno, with_locals=with_locals)
+        for tb in iter_stacks(tb)
+    ]
+
+    rv = {
         "module": get_type_module(exc_type),
         "type": get_type_name(exc_type),
         "value": safe_str(exc_value),
         "mechanism": mechanism,
-        "stacktrace": stacktrace_from_traceback(tb, with_locals),
     }
 
+    if frames:
+        rv["stacktrace"] = {"frames": frames}
+
+    return rv
+
 
 HAS_CHAINED_EXCEPTIONS = hasattr(Exception, "__suppress_context__")
 
@@ -547,7 +705,6 @@ def walk_exception_chain(exc_info):
             exc_value = cause
             tb = getattr(cause, "__traceback__", None)
 
-
 else:
 
     def walk_exception_chain(exc_info):
@@ -709,11 +866,11 @@ def strip_string(value, max_length=None):
         # This is intentionally not just the default such that one can patch `MAX_STRING_LENGTH` and affect `strip_string`.
         max_length = MAX_STRING_LENGTH
 
-    length = len(value)
+    length = len(value.encode("utf-8"))
 
     if length > max_length:
         return AnnotatedValue(
-            value=value[: max_length - 3] + u"...",
+            value=value[: max_length - 3] + "...",
             metadata={
                 "len": length,
                 "rem": [["!limit", "x", max_length - 3, max_length]],
@@ -722,12 +879,34 @@ def strip_string(value, max_length=None):
     return value
 
 
-def _is_threading_local_monkey_patched():
+def _is_contextvars_broken():
     # type: () -> bool
+    """
+    Returns whether gevent/eventlet have patched the stdlib in a way where thread locals are now more "correct" than contextvars.
+    """
     try:
+        import gevent  # type: ignore
         from gevent.monkey import is_object_patched  # type: ignore
 
+        # Get the MAJOR and MINOR version numbers of Gevent
+        version_tuple = tuple(
+            [int(part) for part in re.split(r"a|b|rc|\.", gevent.__version__)[:2]]
+        )
         if is_object_patched("threading", "local"):
+            # Gevent 20.9.0 depends on Greenlet 0.4.17 which natively handles switching
+            # context vars when greenlets are switched, so, Gevent 20.9.0+ is all fine.
+            # Ref: https://github.com/gevent/gevent/blob/83c9e2ae5b0834b8f84233760aabe82c3ba065b4/src/gevent/monkey.py#L604-L609
+            # Gevent 20.5, that doesn't depend on Greenlet 0.4.17 with native support
+            # for contextvars, is able to patch both thread locals and contextvars, in
+            # that case, check if contextvars are effectively patched.
+            if (
+                # Gevent 20.9.0+
+                (sys.version_info >= (3, 7) and version_tuple >= (20, 9))
+                # Gevent 20.5.0+ or Python < 3.7
+                or (is_object_patched("contextvars", "ContextVar"))
+            ):
+                return False
+
             return True
     except ImportError:
         pass
@@ -743,62 +922,83 @@ def _is_threading_local_monkey_patched():
     return False
 
 
+def _make_threadlocal_contextvars(local):
+    # type: (type) -> type
+    class ContextVar(object):
+        # Super-limited impl of ContextVar
+
+        def __init__(self, name):
+            # type: (str) -> None
+            self._name = name
+            self._local = local()
+
+        def get(self, default):
+            # type: (Any) -> Any
+            return getattr(self._local, "value", default)
+
+        def set(self, value):
+            # type: (Any) -> None
+            self._local.value = value
+
+    return ContextVar
+
+
 def _get_contextvars():
     # type: () -> Tuple[bool, type]
     """
-    Try to import contextvars and use it if it's deemed safe. We should not use
-    contextvars if gevent or eventlet have patched thread locals, as
-    contextvars are unaffected by that patch.
+    Figure out the "right" contextvars installation to use. Returns a
+    `contextvars.ContextVar`-like class with a limited API.
 
-    https://github.com/gevent/gevent/issues/1407
+    See https://docs.sentry.io/platforms/python/contextvars/ for more information.
     """
-    if not _is_threading_local_monkey_patched():
+    if not _is_contextvars_broken():
         # aiocontextvars is a PyPI package that ensures that the contextvars
         # backport (also a PyPI package) works with asyncio under Python 3.6
         #
         # Import it if available.
-        if not PY2 and sys.version_info < (3, 7):
+        if sys.version_info < (3, 7):
+            # `aiocontextvars` is absolutely required for functional
+            # contextvars on Python 3.6.
             try:
-                from aiocontextvars import ContextVar  # noqa
+                from aiocontextvars import ContextVar
 
                 return True, ContextVar
             except ImportError:
                 pass
+        else:
+            # On Python 3.7 contextvars are functional.
+            try:
+                from contextvars import ContextVar
 
-        try:
-            from contextvars import ContextVar
+                return True, ContextVar
+            except ImportError:
+                pass
 
-            return True, ContextVar
-        except ImportError:
-            pass
+    # Fall back to basic thread-local usage.
 
     from threading import local
 
-    class ContextVar(object):
-        # Super-limited impl of ContextVar
-
-        def __init__(self, name):
-            # type: (str) -> None
-            self._name = name
-            self._local = local()
+    return False, _make_threadlocal_contextvars(local)
 
-        def get(self, default):
-            # type: (Any) -> Any
-            return getattr(self._local, "value", default)
 
-        def set(self, value):
-            # type: (Any) -> None
-            self._local.value = value
+HAS_REAL_CONTEXTVARS, ContextVar = _get_contextvars()
 
-    return False, ContextVar
+CONTEXTVARS_ERROR_MESSAGE = """
 
+With asyncio/ASGI applications, the Sentry SDK requires a functional
+installation of `contextvars` to avoid leaking scope/context data across
+requests.
 
-HAS_REAL_CONTEXTVARS, ContextVar = _get_contextvars()
+Please refer to https://docs.sentry.io/platforms/python/contextvars/ for more information.
+"""
 
 
-def transaction_from_function(func):
+def qualname_from_function(func):
     # type: (Callable[..., Any]) -> Optional[str]
-    # Methods in Python 2
+    """Return the qualified name of func. Works with regular function, lambda, partial and partialmethod."""
+    func_qualname = None  # type: Optional[str]
+
+    # Python 2
     try:
         return "%s.%s.%s" % (
             func.im_class.__module__,  # type: ignore
@@ -808,24 +1008,140 @@ def transaction_from_function(func):
     except Exception:
         pass
 
-    func_qualname = (
-        getattr(func, "__qualname__", None) or getattr(func, "__name__", None) or None
-    )  # type: Optional[str]
+    prefix, suffix = "", ""
 
-    if not func_qualname:
-        # No idea what it is
-        return None
-
-    # Methods in Python 3
-    # Functions
-    # Classes
-    try:
-        return "%s.%s" % (func.__module__, func_qualname)
-    except Exception:
-        pass
+    if (
+        _PARTIALMETHOD_AVAILABLE
+        and hasattr(func, "_partialmethod")
+        and isinstance(func._partialmethod, partialmethod)  # type: ignore
+    ):
+        prefix, suffix = "partialmethod()"
+        func = func._partialmethod.func  # type: ignore
+    elif isinstance(func, partial) and hasattr(func.func, "__name__"):
+        prefix, suffix = "partial()"
+        func = func.func
+
+    if hasattr(func, "__qualname__"):
+        func_qualname = func.__qualname__
+    elif hasattr(func, "__name__"):  # Python 2.7 has no __qualname__
+        func_qualname = func.__name__
+
+    # Python 3: methods, functions, classes
+    if func_qualname is not None:
+        if hasattr(func, "__module__"):
+            func_qualname = func.__module__ + "." + func_qualname
+        func_qualname = prefix + func_qualname + suffix
 
-    # Possibly a lambda
     return func_qualname
 
 
+def transaction_from_function(func):
+    # type: (Callable[..., Any]) -> Optional[str]
+    return qualname_from_function(func)
+
+
 disable_capture_event = ContextVar("disable_capture_event")
+
+
+class ServerlessTimeoutWarning(Exception):  # noqa: N818
+    """Raised when a serverless method is about to reach its timeout."""
+
+    pass
+
+
+class TimeoutThread(threading.Thread):
+    """Creates a Thread which runs (sleeps) for a time duration equal to
+    waiting_time and raises a custom ServerlessTimeout exception.
+    """
+
+    def __init__(self, waiting_time, configured_timeout):
+        # type: (float, int) -> None
+        threading.Thread.__init__(self)
+        self.waiting_time = waiting_time
+        self.configured_timeout = configured_timeout
+        self._stop_event = threading.Event()
+
+    def stop(self):
+        # type: () -> None
+        self._stop_event.set()
+
+    def run(self):
+        # type: () -> None
+
+        self._stop_event.wait(self.waiting_time)
+
+        if self._stop_event.is_set():
+            return
+
+        integer_configured_timeout = int(self.configured_timeout)
+
+        # Setting up the exact integer value of configured time(in seconds)
+        if integer_configured_timeout < self.configured_timeout:
+            integer_configured_timeout = integer_configured_timeout + 1
+
+        # Raising Exception after timeout duration is reached
+        raise ServerlessTimeoutWarning(
+            "WARNING : Function is expected to get timed out. Configured timeout duration = {} seconds.".format(
+                integer_configured_timeout
+            )
+        )
+
+
+def to_base64(original):
+    # type: (str) -> Optional[str]
+    """
+    Convert a string to base64, via UTF-8. Returns None on invalid input.
+    """
+    base64_string = None
+
+    try:
+        utf8_bytes = original.encode("UTF-8")
+        base64_bytes = base64.b64encode(utf8_bytes)
+        base64_string = base64_bytes.decode("UTF-8")
+    except Exception as err:
+        logger.warning("Unable to encode {orig} to base64:".format(orig=original), err)
+
+    return base64_string
+
+
+def from_base64(base64_string):
+    # type: (str) -> Optional[str]
+    """
+    Convert a string from base64, via UTF-8. Returns None on invalid input.
+    """
+    utf8_string = None
+
+    try:
+        only_valid_chars = BASE64_ALPHABET.match(base64_string)
+        assert only_valid_chars
+
+        base64_bytes = base64_string.encode("UTF-8")
+        utf8_bytes = base64.b64decode(base64_bytes)
+        utf8_string = utf8_bytes.decode("UTF-8")
+    except Exception as err:
+        logger.warning(
+            "Unable to decode {b64} from base64:".format(b64=base64_string), err
+        )
+
+    return utf8_string
+
+
+if PY37:
+
+    def nanosecond_time():
+        # type: () -> int
+        return time.perf_counter_ns()
+
+elif PY33:
+
+    def nanosecond_time():
+        # type: () -> int
+
+        return int(time.perf_counter() * 1e9)
+
+else:
+
+    def nanosecond_time():
+        # type: () -> int
+
+        raise AttributeError
diff --git a/sentry_sdk/worker.py b/sentry_sdk/worker.py
index b5f2ea8ae6..310ba3bfb4 100644
--- a/sentry_sdk/worker.py
+++ b/sentry_sdk/worker.py
@@ -1,14 +1,15 @@
 import os
+import threading
 
-from threading import Thread, Lock
 from time import sleep, time
-from sentry_sdk._compat import queue, check_thread_support
+from sentry_sdk._compat import check_thread_support
+from sentry_sdk._queue import Queue, FullError
 from sentry_sdk.utils import logger
+from sentry_sdk.consts import DEFAULT_QUEUE_SIZE
 
 from sentry_sdk._types import MYPY
 
 if MYPY:
-    from queue import Queue
     from typing import Any
     from typing import Optional
     from typing import Callable
@@ -18,12 +19,12 @@
 
 
 class BackgroundWorker(object):
-    def __init__(self):
-        # type: () -> None
+    def __init__(self, queue_size=DEFAULT_QUEUE_SIZE):
+        # type: (int) -> None
         check_thread_support()
-        self._queue = queue.Queue(30)  # type: Queue[Any]
-        self._lock = Lock()
-        self._thread = None  # type: Optional[Thread]
+        self._queue = Queue(queue_size)  # type: Queue
+        self._lock = threading.Lock()
+        self._thread = None  # type: Optional[threading.Thread]
         self._thread_for_pid = None  # type: Optional[int]
 
     @property
@@ -45,41 +46,27 @@ def _timed_queue_join(self, timeout):
         deadline = time() + timeout
         queue = self._queue
 
-        real_all_tasks_done = getattr(
-            queue, "all_tasks_done", None
-        )  # type: Optional[Any]
-        if real_all_tasks_done is not None:
-            real_all_tasks_done.acquire()
-            all_tasks_done = real_all_tasks_done  # type: Optional[Any]
-        elif queue.__module__.startswith("eventlet."):
-            all_tasks_done = getattr(queue, "_cond", None)
-        else:
-            all_tasks_done = None
+        queue.all_tasks_done.acquire()
 
         try:
             while queue.unfinished_tasks:
                 delay = deadline - time()
                 if delay <= 0:
                     return False
-                if all_tasks_done is not None:
-                    all_tasks_done.wait(timeout=delay)
-                else:
-                    # worst case, we just poll the number of remaining tasks
-                    sleep(0.1)
+                queue.all_tasks_done.wait(timeout=delay)
 
             return True
         finally:
-            if real_all_tasks_done is not None:
-                real_all_tasks_done.release()
+            queue.all_tasks_done.release()
 
     def start(self):
         # type: () -> None
         with self._lock:
             if not self.is_alive:
-                self._thread = Thread(
+                self._thread = threading.Thread(
                     target=self._target, name="raven-sentry.BackgroundWorker"
                 )
-                self._thread.setDaemon(True)
+                self._thread.daemon = True
                 self._thread.start()
                 self._thread_for_pid = os.getpid()
 
@@ -94,7 +81,7 @@ def kill(self):
             if self._thread:
                 try:
                     self._queue.put_nowait(_TERMINATOR)
-                except queue.Full:
+                except FullError:
                     logger.debug("background worker queue full, kill failed")
 
                 self._thread = None
@@ -112,19 +99,23 @@ def _wait_flush(self, timeout, callback):
         # type: (float, Optional[Any]) -> None
         initial_timeout = min(0.1, timeout)
         if not self._timed_queue_join(initial_timeout):
-            pending = self._queue.qsize()
+            pending = self._queue.qsize() + 1
             logger.debug("%d event(s) pending on flush", pending)
             if callback is not None:
                 callback(pending, timeout)
-            self._timed_queue_join(timeout - initial_timeout)
+
+            if not self._timed_queue_join(timeout - initial_timeout):
+                pending = self._queue.qsize() + 1
+                logger.error("flush timed out, dropped %s events", pending)
 
     def submit(self, callback):
-        # type: (Callable[[], None]) -> None
+        # type: (Callable[[], None]) -> bool
         self._ensure_thread()
         try:
             self._queue.put_nowait(callback)
-        except queue.Full:
-            logger.debug("background worker queue full, dropping event")
+            return True
+        except FullError:
+            return False
 
     def _target(self):
         # type: () -> None
diff --git a/setup.cfg b/setup.cfg
deleted file mode 100644
index 2a9acf13da..0000000000
--- a/setup.cfg
+++ /dev/null
@@ -1,2 +0,0 @@
-[bdist_wheel]
-universal = 1
diff --git a/setup.py b/setup.py
index 456239d09b..34810fba4b 100644
--- a/setup.py
+++ b/setup.py
@@ -8,35 +8,63 @@
 `_ to find out more.
 """
 
+import os
 from setuptools import setup, find_packages
 
+here = os.path.abspath(os.path.dirname(__file__))
+
+
+def get_file_text(file_name):
+    with open(os.path.join(here, file_name)) as in_file:
+        return in_file.read()
+
+
 setup(
     name="sentry-sdk",
-    version="0.14.4",
+    version="1.14.0",
     author="Sentry Team and Contributors",
-    author_email="hello@getsentry.com",
+    author_email="hello@sentry.io",
     url="https://github.com/getsentry/sentry-python",
-    description="Python client for Sentry (https://getsentry.com)",
-    long_description=__doc__,
+    project_urls={
+        "Documentation": "https://docs.sentry.io/platforms/python/",
+        "Changelog": "https://github.com/getsentry/sentry-python/blob/master/CHANGELOG.md",
+    },
+    description="Python client for Sentry (https://sentry.io)",
+    long_description=get_file_text("README.md"),
+    long_description_content_type="text/markdown",
     packages=find_packages(exclude=("tests", "tests.*")),
     # PEP 561
     package_data={"sentry_sdk": ["py.typed"]},
     zip_safe=False,
     license="BSD",
-    install_requires=["urllib3>=1.10.0", "certifi"],
+    install_requires=[
+        'urllib3>=1.25.7; python_version<="3.4"',
+        'urllib3>=1.26.9; python_version=="3.5"',
+        'urllib3>=1.26.11; python_version >="3.6"',
+        "certifi",
+    ],
     extras_require={
         "flask": ["flask>=0.11", "blinker>=1.1"],
+        "quart": ["quart>=0.16.1", "blinker>=1.1"],
         "bottle": ["bottle>=0.12.13"],
         "falcon": ["falcon>=1.4"],
         "django": ["django>=1.8"],
         "sanic": ["sanic>=0.8"],
         "celery": ["celery>=3"],
-        "beam": ["beam>=2.12"],
+        "beam": ["apache-beam>=2.12"],
         "rq": ["rq>=0.6"],
         "aiohttp": ["aiohttp>=3.5"],
         "tornado": ["tornado>=5"],
         "sqlalchemy": ["sqlalchemy>=1.2"],
         "pyspark": ["pyspark>=2.4.4"],
+        "pure_eval": ["pure_eval", "executing", "asttokens"],
+        "chalice": ["chalice>=1.16.0"],
+        "httpx": ["httpx>=0.16.0"],
+        "starlette": ["starlette>=0.19.1"],
+        "starlite": ["starlite>=1.48"],
+        "fastapi": ["fastapi>=0.79.0"],
+        "pymongo": ["pymongo>=3.1"],
+        "opentelemetry": ["opentelemetry-distro>=0.35b0"],
     },
     classifiers=[
         "Development Status :: 5 - Production/Stable",
@@ -53,6 +81,9 @@
         "Programming Language :: Python :: 3.6",
         "Programming Language :: Python :: 3.7",
         "Programming Language :: Python :: 3.8",
+        "Programming Language :: Python :: 3.9",
+        "Programming Language :: Python :: 3.10",
         "Topic :: Software Development :: Libraries :: Python Modules",
     ],
+    options={"bdist_wheel": {"universal": "1"}},
 )
diff --git a/test-requirements.txt b/test-requirements.txt
index be051169ad..4c40e801bf 100644
--- a/test-requirements.txt
+++ b/test-requirements.txt
@@ -1,9 +1,14 @@
-pytest==3.7.3
-pytest-forked==1.1.3
-tox==3.7.0
-Werkzeug==0.15.5
-pytest-localserver==0.5.0
+pip  # always use newest pip
+mock # for testing under python < 3.3
+pytest<7
 pytest-cov==2.8.1
-gevent
-eventlet
-newrelic
+pytest-forked<=1.4.0
+pytest-localserver==0.5.0
+pytest-watch==4.2.0
+tox==3.7.0
+Werkzeug<2.1.0
+jsonschema==3.2.0
+pyrsistent==0.16.0 # TODO(py3): 0.17.0 requires python3, see https://github.com/tobgu/pyrsistent/issues/205
+executing
+asttokens
+ipdb
diff --git a/tests/conftest.py b/tests/conftest.py
index 7687b580d8..cb1fedb4c6 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -1,26 +1,37 @@
 import os
-import subprocess
 import json
-import uuid
 
 import pytest
+import jsonschema
 
-import gevent
-import eventlet
+try:
+    import gevent
+except ImportError:
+    gevent = None
+
+try:
+    import eventlet
+except ImportError:
+    eventlet = None
 
 import sentry_sdk
-from sentry_sdk._compat import reraise, string_types, iteritems
-from sentry_sdk.transport import Transport
+from sentry_sdk._compat import iteritems, reraise, string_types
 from sentry_sdk.envelope import Envelope
+from sentry_sdk.integrations import _installed_integrations  # noqa: F401
+from sentry_sdk.profiler import teardown_profiler
+from sentry_sdk.transport import Transport
 from sentry_sdk.utils import capture_internal_exceptions
 
 from tests import _warning_recorder, _warning_recorder_mgr
 
-SENTRY_RELAY = "./relay"
 
-if not os.path.isfile(SENTRY_RELAY):
-    SENTRY_RELAY = None
+SENTRY_EVENT_SCHEMA = "./checkouts/data-schemas/relay/event.schema.json"
 
+if not os.path.isfile(SENTRY_EVENT_SCHEMA):
+    SENTRY_EVENT_SCHEMA = None
+else:
+    with open(SENTRY_EVENT_SCHEMA) as f:
+        SENTRY_EVENT_SCHEMA = json.load(f)
 
 try:
     import pytest_benchmark
@@ -30,7 +41,6 @@
     def benchmark():
         return lambda x: x()
 
-
 else:
     del pytest_benchmark
 
@@ -46,6 +56,8 @@ def _capture_internal_exception(self, exc_info):
 
     @request.addfinalizer
     def _():
+        # rerasise the errors so that this just acts as a pass-through (that
+        # happens to keep track of the errors which pass through it)
         for e in errors:
             reraise(*e)
 
@@ -118,7 +130,7 @@ def _capture_internal_warnings():
 
 
 @pytest.fixture
-def monkeypatch_test_transport(monkeypatch, relay_normalize):
+def monkeypatch_test_transport(monkeypatch, validate_event_schema):
     def check_event(event):
         def check_string_keys(map):
             for key, value in iteritems(map):
@@ -128,65 +140,52 @@ def check_string_keys(map):
 
         with capture_internal_exceptions():
             check_string_keys(event)
-            relay_normalize(event)
+            validate_event_schema(event)
+
+    def check_envelope(envelope):
+        with capture_internal_exceptions():
+            # Assert error events are sent without envelope to server, for compat.
+            # This does not apply if any item in the envelope is an attachment.
+            if not any(x.type == "attachment" for x in envelope.items):
+                assert not any(item.data_category == "error" for item in envelope.items)
+                assert not any(item.get_event() is not None for item in envelope.items)
 
     def inner(client):
-        monkeypatch.setattr(client, "transport", TestTransport(check_event))
+        monkeypatch.setattr(
+            client, "transport", TestTransport(check_event, check_envelope)
+        )
 
     return inner
 
 
-def _no_errors_in_relay_response(obj):
-    """Assert that relay didn't throw any errors when processing the
-    event."""
-
-    def inner(obj):
-        if not isinstance(obj, dict):
-            return
-
-        assert "err" not in obj
-
-        for value in obj.values():
-            inner(value)
-
-    try:
-        inner(obj.get("_meta"))
-        inner(obj.get(""))
-    except AssertionError:
-        raise AssertionError(obj)
-
-
 @pytest.fixture
-def relay_normalize(tmpdir):
+def validate_event_schema(tmpdir):
     def inner(event):
-        if not SENTRY_RELAY:
-            return
-
-        # Disable subprocess integration
-        with sentry_sdk.Hub(None):
-            # not dealing with the subprocess API right now
-            file = tmpdir.join("event-{}".format(uuid.uuid4().hex))
-            file.write(json.dumps(dict(event)))
-            with file.open() as f:
-                output = json.loads(
-                    subprocess.check_output(
-                        [SENTRY_RELAY, "process-event"], stdin=f
-                    ).decode("utf-8")
-                )
-            _no_errors_in_relay_response(output)
-            output.pop("_meta", None)
-            return output
+        if SENTRY_EVENT_SCHEMA:
+            jsonschema.validate(instance=event, schema=SENTRY_EVENT_SCHEMA)
 
     return inner
 
 
+@pytest.fixture
+def reset_integrations():
+    """
+    Use with caution, sometimes we really need to start
+    with a clean slate to ensure monkeypatching works well,
+    but this also means some other stuff will be monkeypatched twice.
+    """
+    global _installed_integrations
+    _installed_integrations.clear()
+
+
 @pytest.fixture
 def sentry_init(monkeypatch_test_transport, request):
     def inner(*a, **kw):
         hub = sentry_sdk.Hub.current
         client = sentry_sdk.Client(*a, **kw)
         hub.bind_client(client)
-        monkeypatch_test_transport(sentry_sdk.Hub.current.client)
+        if "transport" not in kw:
+            monkeypatch_test_transport(sentry_sdk.Hub.current.client)
 
     if request.node.get_closest_marker("forked"):
         # Do not run isolation if the test is already running in
@@ -199,9 +198,10 @@ def inner(*a, **kw):
 
 
 class TestTransport(Transport):
-    def __init__(self, capture_event_callback):
+    def __init__(self, capture_event_callback, capture_envelope_callback):
         Transport.__init__(self)
         self.capture_event = capture_event_callback
+        self.capture_envelope = capture_envelope_callback
         self._queue = None
 
 
@@ -211,12 +211,20 @@ def inner():
         events = []
         test_client = sentry_sdk.Hub.current.client
         old_capture_event = test_client.transport.capture_event
+        old_capture_envelope = test_client.transport.capture_envelope
 
-        def append(event):
+        def append_event(event):
             events.append(event)
             return old_capture_event(event)
 
-        monkeypatch.setattr(test_client.transport, "capture_event", append)
+        def append_envelope(envelope):
+            for item in envelope:
+                if item.headers.get("type") in ("event", "transaction"):
+                    test_client.transport.capture_event(item.payload.json)
+            return old_capture_envelope(envelope)
+
+        monkeypatch.setattr(test_client.transport, "capture_event", append_event)
+        monkeypatch.setattr(test_client.transport, "capture_envelope", append_envelope)
         return events
 
     return inner
@@ -248,8 +256,29 @@ def append_envelope(envelope):
 
 
 @pytest.fixture
-def capture_events_forksafe(monkeypatch):
+def capture_client_reports(monkeypatch):
+    def inner():
+        reports = []
+        test_client = sentry_sdk.Hub.current.client
+
+        def record_lost_event(reason, data_category=None, item=None):
+            if data_category is None:
+                data_category = item.data_category
+            return reports.append((reason, data_category))
+
+        monkeypatch.setattr(
+            test_client.transport, "record_lost_event", record_lost_event
+        )
+        return reports
+
+    return inner
+
+
+@pytest.fixture
+def capture_events_forksafe(monkeypatch, capture_events, request):
     def inner():
+        capture_events()
+
         events_r, events_w = os.pipe()
         events_r = os.fdopen(events_r, "rb", 0)
         events_w = os.fdopen(events_w, "wb", 0)
@@ -293,6 +322,9 @@ def read_flush(self):
 )
 def maybe_monkeypatched_threading(request):
     if request.param == "eventlet":
+        if eventlet is None:
+            pytest.skip("no eventlet installed")
+
         try:
             eventlet.monkey_patch()
         except AttributeError as e:
@@ -302,6 +334,8 @@ def maybe_monkeypatched_threading(request):
             else:
                 raise
     elif request.param == "gevent":
+        if gevent is None:
+            pytest.skip("no gevent installed")
         try:
             gevent.monkey.patch_all()
         except Exception as e:
@@ -313,3 +347,217 @@ def maybe_monkeypatched_threading(request):
         assert request.param is None
 
     return request.param
+
+
+@pytest.fixture
+def render_span_tree():
+    def inner(event):
+        assert event["type"] == "transaction"
+
+        by_parent = {}
+        for span in event["spans"]:
+            by_parent.setdefault(span["parent_span_id"], []).append(span)
+
+        def render_span(span):
+            yield "- op={}: description={}".format(
+                json.dumps(span.get("op")), json.dumps(span.get("description"))
+            )
+            for subspan in by_parent.get(span["span_id"]) or ():
+                for line in render_span(subspan):
+                    yield "  {}".format(line)
+
+        root_span = event["contexts"]["trace"]
+
+        # Return a list instead of a multiline string because black will know better how to format that
+        return "\n".join(render_span(root_span))
+
+    return inner
+
+
+@pytest.fixture(name="StringContaining")
+def string_containing_matcher():
+    """
+    An object which matches any string containing the substring passed to the
+    object at instantiation time.
+
+    Useful for assert_called_with, assert_any_call, etc.
+
+    Used like this:
+
+    >>> f = mock.Mock()
+    >>> f("dogs are great")
+    >>> f.assert_any_call("dogs") # will raise AssertionError
+    Traceback (most recent call last):
+        ...
+    AssertionError: mock('dogs') call not found
+    >>> f.assert_any_call(StringContaining("dogs")) # no AssertionError
+
+    """
+
+    class StringContaining(object):
+        def __init__(self, substring):
+            self.substring = substring
+
+            try:
+                # the `unicode` type only exists in python 2, so if this blows up,
+                # we must be in py3 and have the `bytes` type
+                self.valid_types = (str, unicode)
+            except NameError:
+                self.valid_types = (str, bytes)
+
+        def __eq__(self, test_string):
+            if not isinstance(test_string, self.valid_types):
+                return False
+
+            # this is safe even in py2 because as of 2.6, `bytes` exists in py2
+            # as an alias for `str`
+            if isinstance(test_string, bytes):
+                test_string = test_string.decode()
+
+            if len(self.substring) > len(test_string):
+                return False
+
+            return self.substring in test_string
+
+        def __ne__(self, test_string):
+            return not self.__eq__(test_string)
+
+    return StringContaining
+
+
+def _safe_is_equal(x, y):
+    """
+    Compares two values, preferring to use the first's __eq__ method if it
+    exists and is implemented.
+
+    Accounts for py2/py3 differences (like ints in py2 not having a __eq__
+    method), as well as the incomparability of certain types exposed by using
+    raw __eq__ () rather than ==.
+    """
+
+    # Prefer using __eq__ directly to ensure that examples like
+    #
+    #   maisey = Dog()
+    #   maisey.name = "Maisey the Dog"
+    #   maisey == ObjectDescribedBy(attrs={"name": StringContaining("Maisey")})
+    #
+    # evaluate to True (in other words, examples where the values in self.attrs
+    # might also have custom __eq__ methods; this makes sure those methods get
+    # used if possible)
+    try:
+        is_equal = x.__eq__(y)
+    except AttributeError:
+        is_equal = NotImplemented
+
+    # this can happen on its own, too (i.e. without an AttributeError being
+    # thrown), which is why this is separate from the except block above
+    if is_equal == NotImplemented:
+        # using == smoothes out weird variations exposed by raw __eq__
+        return x == y
+
+    return is_equal
+
+
+@pytest.fixture(name="DictionaryContaining")
+def dictionary_containing_matcher():
+    """
+    An object which matches any dictionary containing all key-value pairs from
+    the dictionary passed to the object at instantiation time.
+
+    Useful for assert_called_with, assert_any_call, etc.
+
+    Used like this:
+
+    >>> f = mock.Mock()
+    >>> f({"dogs": "yes", "cats": "maybe"})
+    >>> f.assert_any_call({"dogs": "yes"}) # will raise AssertionError
+    Traceback (most recent call last):
+        ...
+    AssertionError: mock({'dogs': 'yes'}) call not found
+    >>> f.assert_any_call(DictionaryContaining({"dogs": "yes"})) # no AssertionError
+    """
+
+    class DictionaryContaining(object):
+        def __init__(self, subdict):
+            self.subdict = subdict
+
+        def __eq__(self, test_dict):
+            if not isinstance(test_dict, dict):
+                return False
+
+            if len(self.subdict) > len(test_dict):
+                return False
+
+            for key, value in self.subdict.items():
+                try:
+                    test_value = test_dict[key]
+                except KeyError:  # missing key
+                    return False
+
+                if not _safe_is_equal(value, test_value):
+                    return False
+
+            return True
+
+        def __ne__(self, test_dict):
+            return not self.__eq__(test_dict)
+
+    return DictionaryContaining
+
+
+@pytest.fixture(name="ObjectDescribedBy")
+def object_described_by_matcher():
+    """
+    An object which matches any other object with the given properties.
+
+    Available properties currently are "type" (a type object) and "attrs" (a
+    dictionary).
+
+    Useful for assert_called_with, assert_any_call, etc.
+
+    Used like this:
+
+    >>> class Dog(object):
+    ...     pass
+    ...
+    >>> maisey = Dog()
+    >>> maisey.name = "Maisey"
+    >>> maisey.age = 7
+    >>> f = mock.Mock()
+    >>> f(maisey)
+    >>> f.assert_any_call(ObjectDescribedBy(type=Dog)) # no AssertionError
+    >>> f.assert_any_call(ObjectDescribedBy(attrs={"name": "Maisey"})) # no AssertionError
+    """
+
+    class ObjectDescribedBy(object):
+        def __init__(self, type=None, attrs=None):
+            self.type = type
+            self.attrs = attrs
+
+        def __eq__(self, test_obj):
+            if self.type:
+                if not isinstance(test_obj, self.type):
+                    return False
+
+            if self.attrs:
+                for attr_name, attr_value in self.attrs.items():
+                    try:
+                        test_value = getattr(test_obj, attr_name)
+                    except AttributeError:  # missing attribute
+                        return False
+
+                    if not _safe_is_equal(attr_value, test_value):
+                        return False
+
+            return True
+
+        def __ne__(self, test_obj):
+            return not self.__eq__(test_obj)
+
+    return ObjectDescribedBy
+
+
+@pytest.fixture
+def teardown_profiling():
+    yield
+    teardown_profiler()
diff --git a/tests/integrations/aiohttp/test_aiohttp.py b/tests/integrations/aiohttp/test_aiohttp.py
index 0b2819f2cc..7e49a285c3 100644
--- a/tests/integrations/aiohttp/test_aiohttp.py
+++ b/tests/integrations/aiohttp/test_aiohttp.py
@@ -2,11 +2,18 @@
 import json
 from contextlib import suppress
 
+import pytest
 from aiohttp import web
 from aiohttp.client import ServerDisconnectedError
+from aiohttp.web_request import Request
 
 from sentry_sdk.integrations.aiohttp import AioHttpIntegration
 
+try:
+    from unittest import mock  # python 3.3 and above
+except ImportError:
+    import mock  # python < 3.3
+
 
 async def test_basic(sentry_init, aiohttp_client, loop, capture_events):
     sentry_init(integrations=[AioHttpIntegration()])
@@ -186,3 +193,85 @@ async def hello(request):
         event["transaction"]
         == "tests.integrations.aiohttp.test_aiohttp.test_tracing..hello"
     )
+
+
+@pytest.mark.parametrize(
+    "url,transaction_style,expected_transaction,expected_source",
+    [
+        (
+            "/message",
+            "handler_name",
+            "tests.integrations.aiohttp.test_aiohttp.test_transaction_style..hello",
+            "component",
+        ),
+        (
+            "/message",
+            "method_and_path_pattern",
+            "GET /{var}",
+            "route",
+        ),
+    ],
+)
+async def test_transaction_style(
+    sentry_init,
+    aiohttp_client,
+    capture_events,
+    url,
+    transaction_style,
+    expected_transaction,
+    expected_source,
+):
+    sentry_init(
+        integrations=[AioHttpIntegration(transaction_style=transaction_style)],
+        traces_sample_rate=1.0,
+    )
+
+    async def hello(request):
+        return web.Response(text="hello")
+
+    app = web.Application()
+    app.router.add_get(r"/{var}", hello)
+
+    events = capture_events()
+
+    client = await aiohttp_client(app)
+    resp = await client.get(url)
+    assert resp.status == 200
+
+    (event,) = events
+
+    assert event["type"] == "transaction"
+    assert event["transaction"] == expected_transaction
+    assert event["transaction_info"] == {"source": expected_source}
+
+
+async def test_traces_sampler_gets_request_object_in_sampling_context(
+    sentry_init,
+    aiohttp_client,
+    DictionaryContaining,  # noqa:N803
+    ObjectDescribedBy,
+):
+    traces_sampler = mock.Mock()
+    sentry_init(
+        integrations=[AioHttpIntegration()],
+        traces_sampler=traces_sampler,
+    )
+
+    async def kangaroo_handler(request):
+        return web.Response(text="dogs are great")
+
+    app = web.Application()
+    app.router.add_get("/tricks/kangaroo", kangaroo_handler)
+
+    client = await aiohttp_client(app)
+    await client.get("/tricks/kangaroo")
+
+    traces_sampler.assert_any_call(
+        DictionaryContaining(
+            {
+                "aiohttp_request": ObjectDescribedBy(
+                    type=Request, attrs={"method": "GET", "path": "/tricks/kangaroo"}
+                )
+            }
+        )
+    )
diff --git a/tests/integrations/asgi/__init__.py b/tests/integrations/asgi/__init__.py
index c89ddf99a8..1fb057c1fc 100644
--- a/tests/integrations/asgi/__init__.py
+++ b/tests/integrations/asgi/__init__.py
@@ -1,3 +1,4 @@
 import pytest
 
-pytest.importorskip("starlette")
+asyncio = pytest.importorskip("asyncio")
+pytest_asyncio = pytest.importorskip("pytest_asyncio")
diff --git a/tests/integrations/asgi/test_asgi.py b/tests/integrations/asgi/test_asgi.py
index 9da20199ca..ce28b1e8b9 100644
--- a/tests/integrations/asgi/test_asgi.py
+++ b/tests/integrations/asgi/test_asgi.py
@@ -1,121 +1,444 @@
 import sys
 
+from collections import Counter
+
 import pytest
+import sentry_sdk
 from sentry_sdk import capture_message
-from sentry_sdk.integrations.asgi import SentryAsgiMiddleware
-from starlette.applications import Starlette
-from starlette.responses import PlainTextResponse
-from starlette.testclient import TestClient
+from sentry_sdk.integrations.asgi import SentryAsgiMiddleware, _looks_like_asgi3
+
+async_asgi_testclient = pytest.importorskip("async_asgi_testclient")
+from async_asgi_testclient import TestClient
+
+
+minimum_python_36 = pytest.mark.skipif(
+    sys.version_info < (3, 6), reason="ASGI is only supported in Python >= 3.6"
+)
 
 
 @pytest.fixture
-def app():
-    app = Starlette()
+def asgi3_app():
+    async def app(scope, receive, send):
+        if (
+            scope["type"] == "http"
+            and "route" in scope
+            and scope["route"] == "/trigger/error"
+        ):
+            division_by_zero = 1 / 0  # noqa
+
+        await send(
+            {
+                "type": "http.response.start",
+                "status": 200,
+                "headers": [
+                    [b"content-type", b"text/plain"],
+                ],
+            }
+        )
 
-    @app.route("/sync-message")
-    def hi(request):
-        capture_message("hi", level="error")
-        return PlainTextResponse("ok")
+        await send(
+            {
+                "type": "http.response.body",
+                "body": b"Hello, world!",
+            }
+        )
+
+    return app
+
+
+@pytest.fixture
+def asgi3_app_with_error():
+    async def app(scope, receive, send):
+        await send(
+            {
+                "type": "http.response.start",
+                "status": 200,
+                "headers": [
+                    [b"content-type", b"text/plain"],
+                ],
+            }
+        )
 
-    @app.route("/async-message")
-    async def hi2(request):
-        capture_message("hi", level="error")
-        return PlainTextResponse("ok")
+        division_by_zero = 1 / 0  # noqa
 
-    app.add_middleware(SentryAsgiMiddleware)
+        await send(
+            {
+                "type": "http.response.body",
+                "body": b"Hello, world!",
+            }
+        )
 
     return app
 
 
-@pytest.mark.skipif(sys.version_info < (3, 7), reason="requires python3.7 or higher")
-def test_sync_request_data(sentry_init, app, capture_events):
-    sentry_init(send_default_pii=True)
-    events = capture_events()
+@pytest.fixture
+def asgi3_ws_app():
+    def message():
+        capture_message("Some message to the world!")
+        raise ValueError("Oh no")
 
-    client = TestClient(app)
-    response = client.get("/sync-message?foo=bar", headers={"Foo": u"ä"})
-
-    assert response.status_code == 200
-
-    (event,) = events
-    assert event["transaction"] == "tests.integrations.asgi.test_asgi.app..hi"
-    assert event["request"]["env"] == {"REMOTE_ADDR": "testclient"}
-    assert set(event["request"]["headers"]) == {
-        "accept",
-        "accept-encoding",
-        "connection",
-        "host",
-        "user-agent",
-        "foo",
-    }
-    assert event["request"]["query_string"] == "foo=bar"
-    assert event["request"]["url"].endswith("/sync-message")
-    assert event["request"]["method"] == "GET"
+    async def app(scope, receive, send):
+        await send(
+            {
+                "type": "websocket.send",
+                "text": message(),
+            }
+        )
 
-    # Assert that state is not leaked
-    events.clear()
-    capture_message("foo")
-    (event,) = events
+    return app
 
-    assert "request" not in event
-    assert "transaction" not in event
 
+@minimum_python_36
+def test_invalid_transaction_style(asgi3_app):
+    with pytest.raises(ValueError) as exp:
+        SentryAsgiMiddleware(asgi3_app, transaction_style="URL")
+
+    assert (
+        str(exp.value)
+        == "Invalid value for transaction_style: URL (must be in ('endpoint', 'url'))"
+    )
 
-def test_async_request_data(sentry_init, app, capture_events):
-    sentry_init(send_default_pii=True)
-    events = capture_events()
 
-    client = TestClient(app)
-    response = client.get("/async-message?foo=bar")
+@minimum_python_36
+@pytest.mark.asyncio
+async def test_capture_transaction(
+    sentry_init,
+    asgi3_app,
+    capture_events,
+):
+    sentry_init(send_default_pii=True, traces_sample_rate=1.0)
+    app = SentryAsgiMiddleware(asgi3_app)
 
-    assert response.status_code == 200
+    async with TestClient(app) as client:
+        events = capture_events()
+        await client.get("/?somevalue=123")
 
-    (event,) = events
-    assert event["transaction"] == "tests.integrations.asgi.test_asgi.app..hi2"
-    assert event["request"]["env"] == {"REMOTE_ADDR": "testclient"}
-    assert set(event["request"]["headers"]) == {
-        "accept",
-        "accept-encoding",
-        "connection",
-        "host",
-        "user-agent",
+    (transaction_event,) = events
+
+    assert transaction_event["type"] == "transaction"
+    assert transaction_event["transaction"] == "generic ASGI request"
+    assert transaction_event["contexts"]["trace"]["op"] == "http.server"
+    assert transaction_event["request"] == {
+        "headers": {
+            "host": "localhost",
+            "remote-addr": "127.0.0.1",
+            "user-agent": "ASGI-Test-Client",
+        },
+        "method": "GET",
+        "query_string": "somevalue=123",
+        "url": "http://localhost/",
     }
-    assert event["request"]["query_string"] == "foo=bar"
-    assert event["request"]["url"].endswith("/async-message")
-    assert event["request"]["method"] == "GET"
 
-    # Assert that state is not leaked
-    events.clear()
-    capture_message("foo")
-    (event,) = events
 
-    assert "request" not in event
-    assert "transaction" not in event
+@minimum_python_36
+@pytest.mark.asyncio
+async def test_capture_transaction_with_error(
+    sentry_init,
+    asgi3_app_with_error,
+    capture_events,
+    DictionaryContaining,  # noqa: N803
+):
+    sentry_init(send_default_pii=True, traces_sample_rate=1.0)
+    app = SentryAsgiMiddleware(asgi3_app_with_error)
+
+    with pytest.raises(ZeroDivisionError):
+        async with TestClient(app) as client:
+            events = capture_events()
+            await client.get("/")
 
+    (error_event, transaction_event) = events
+
+    assert error_event["transaction"] == "generic ASGI request"
+    assert error_event["contexts"]["trace"]["op"] == "http.server"
+    assert error_event["exception"]["values"][0]["type"] == "ZeroDivisionError"
+    assert error_event["exception"]["values"][0]["value"] == "division by zero"
+    assert error_event["exception"]["values"][0]["mechanism"]["handled"] is False
+    assert error_event["exception"]["values"][0]["mechanism"]["type"] == "asgi"
+
+    assert transaction_event["type"] == "transaction"
+    assert transaction_event["contexts"]["trace"] == DictionaryContaining(
+        error_event["contexts"]["trace"]
+    )
+    assert transaction_event["contexts"]["trace"]["status"] == "internal_error"
+    assert transaction_event["transaction"] == error_event["transaction"]
+    assert transaction_event["request"] == error_event["request"]
+
+
+@minimum_python_36
+@pytest.mark.asyncio
+async def test_websocket(sentry_init, asgi3_ws_app, capture_events, request):
+    sentry_init(debug=True, send_default_pii=True)
 
-def test_errors(sentry_init, app, capture_events):
-    sentry_init(send_default_pii=True)
     events = capture_events()
 
-    @app.route("/error")
-    def myerror(request):
-        raise ValueError("oh no")
+    asgi3_ws_app = SentryAsgiMiddleware(asgi3_ws_app)
 
-    client = TestClient(app, raise_server_exceptions=False)
-    response = client.get("/error")
+    scope = {
+        "type": "websocket",
+        "endpoint": asgi3_app,
+        "client": ("127.0.0.1", 60457),
+        "route": "some_url",
+        "headers": [
+            ("accept", "*/*"),
+        ],
+    }
 
-    assert response.status_code == 500
+    with pytest.raises(ValueError):
+        async with TestClient(asgi3_ws_app, scope=scope) as client:
+            async with client.websocket_connect("/ws") as ws:
+                await ws.receive_text()
+
+    msg_event, error_event = events
+
+    assert msg_event["message"] == "Some message to the world!"
+
+    (exc,) = error_event["exception"]["values"]
+    assert exc["type"] == "ValueError"
+    assert exc["value"] == "Oh no"
+
+
+@minimum_python_36
+@pytest.mark.asyncio
+async def test_auto_session_tracking_with_aggregates(
+    sentry_init, asgi3_app, capture_envelopes
+):
+    sentry_init(send_default_pii=True, traces_sample_rate=1.0)
+    app = SentryAsgiMiddleware(asgi3_app)
+
+    scope = {
+        "endpoint": asgi3_app,
+        "client": ("127.0.0.1", 60457),
+    }
+    with pytest.raises(ZeroDivisionError):
+        envelopes = capture_envelopes()
+        async with TestClient(app, scope=scope) as client:
+            scope["route"] = "/some/fine/url"
+            await client.get("/some/fine/url")
+            scope["route"] = "/some/fine/url"
+            await client.get("/some/fine/url")
+            scope["route"] = "/trigger/error"
+            await client.get("/trigger/error")
+
+    sentry_sdk.flush()
+
+    count_item_types = Counter()
+    for envelope in envelopes:
+        count_item_types[envelope.items[0].type] += 1
+
+    assert count_item_types["transaction"] == 4
+    assert count_item_types["event"] == 1
+    assert count_item_types["sessions"] == 1
+    assert len(envelopes) == 6
+
+    session_aggregates = envelopes[-1].items[0].payload.json["aggregates"]
+    assert session_aggregates[0]["exited"] == 3
+    assert session_aggregates[0]["crashed"] == 1
+    assert len(session_aggregates) == 1
 
-    (event,) = events
-    assert (
-        event["transaction"]
-        == "tests.integrations.asgi.test_asgi.test_errors..myerror"
-    )
-    (exception,) = event["exception"]["values"]
 
-    assert exception["type"] == "ValueError"
-    assert exception["value"] == "oh no"
-    assert any(
-        frame["filename"].endswith("tests/integrations/asgi/test_asgi.py")
-        for frame in exception["stacktrace"]["frames"]
+@minimum_python_36
+@pytest.mark.parametrize(
+    "url,transaction_style,expected_transaction,expected_source",
+    [
+        (
+            "/message",
+            "url",
+            "generic ASGI request",
+            "route",
+        ),
+        (
+            "/message",
+            "endpoint",
+            "tests.integrations.asgi.test_asgi.asgi3_app_with_error..app",
+            "component",
+        ),
+    ],
+)
+@pytest.mark.asyncio
+async def test_transaction_style(
+    sentry_init,
+    asgi3_app_with_error,
+    capture_events,
+    url,
+    transaction_style,
+    expected_transaction,
+    expected_source,
+):
+    sentry_init(send_default_pii=True, traces_sample_rate=1.0)
+    app = SentryAsgiMiddleware(
+        asgi3_app_with_error, transaction_style=transaction_style
     )
+
+    scope = {
+        "endpoint": asgi3_app_with_error,
+        "route": url,
+        "client": ("127.0.0.1", 60457),
+    }
+
+    with pytest.raises(ZeroDivisionError):
+        async with TestClient(app, scope=scope) as client:
+            events = capture_events()
+            await client.get(url)
+
+    (_, transaction_event) = events
+
+    assert transaction_event["transaction"] == expected_transaction
+    assert transaction_event["transaction_info"] == {"source": expected_source}
+
+
+def mock_asgi2_app():
+    pass
+
+
+class MockAsgi2App:
+    def __call__():
+        pass
+
+
+class MockAsgi3App(MockAsgi2App):
+    def __await__():
+        pass
+
+    async def __call__():
+        pass
+
+
+@minimum_python_36
+def test_looks_like_asgi3(asgi3_app):
+    # branch: inspect.isclass(app)
+    assert _looks_like_asgi3(MockAsgi3App)
+    assert not _looks_like_asgi3(MockAsgi2App)
+
+    # branch: inspect.isfunction(app)
+    assert _looks_like_asgi3(asgi3_app)
+    assert not _looks_like_asgi3(mock_asgi2_app)
+
+    # breanch: else
+    asgi3 = MockAsgi3App()
+    assert _looks_like_asgi3(asgi3)
+    asgi2 = MockAsgi2App()
+    assert not _looks_like_asgi3(asgi2)
+
+
+@minimum_python_36
+def test_get_ip_x_forwarded_for():
+    headers = [
+        (b"x-forwarded-for", b"8.8.8.8"),
+    ]
+    scope = {
+        "client": ("127.0.0.1", 60457),
+        "headers": headers,
+    }
+    middleware = SentryAsgiMiddleware({})
+    ip = middleware._get_ip(scope)
+    assert ip == "8.8.8.8"
+
+    # x-forwarded-for overrides x-real-ip
+    headers = [
+        (b"x-forwarded-for", b"8.8.8.8"),
+        (b"x-real-ip", b"10.10.10.10"),
+    ]
+    scope = {
+        "client": ("127.0.0.1", 60457),
+        "headers": headers,
+    }
+    middleware = SentryAsgiMiddleware({})
+    ip = middleware._get_ip(scope)
+    assert ip == "8.8.8.8"
+
+    # when multiple x-forwarded-for headers are, the first is taken
+    headers = [
+        (b"x-forwarded-for", b"5.5.5.5"),
+        (b"x-forwarded-for", b"6.6.6.6"),
+        (b"x-forwarded-for", b"7.7.7.7"),
+    ]
+    scope = {
+        "client": ("127.0.0.1", 60457),
+        "headers": headers,
+    }
+    middleware = SentryAsgiMiddleware({})
+    ip = middleware._get_ip(scope)
+    assert ip == "5.5.5.5"
+
+
+@minimum_python_36
+def test_get_ip_x_real_ip():
+    headers = [
+        (b"x-real-ip", b"10.10.10.10"),
+    ]
+    scope = {
+        "client": ("127.0.0.1", 60457),
+        "headers": headers,
+    }
+    middleware = SentryAsgiMiddleware({})
+    ip = middleware._get_ip(scope)
+    assert ip == "10.10.10.10"
+
+    # x-forwarded-for overrides x-real-ip
+    headers = [
+        (b"x-forwarded-for", b"8.8.8.8"),
+        (b"x-real-ip", b"10.10.10.10"),
+    ]
+    scope = {
+        "client": ("127.0.0.1", 60457),
+        "headers": headers,
+    }
+    middleware = SentryAsgiMiddleware({})
+    ip = middleware._get_ip(scope)
+    assert ip == "8.8.8.8"
+
+
+@minimum_python_36
+def test_get_ip():
+    # if now headers are provided the ip is taken from the client.
+    headers = []
+    scope = {
+        "client": ("127.0.0.1", 60457),
+        "headers": headers,
+    }
+    middleware = SentryAsgiMiddleware({})
+    ip = middleware._get_ip(scope)
+    assert ip == "127.0.0.1"
+
+    # x-forwarded-for header overides the ip from client
+    headers = [
+        (b"x-forwarded-for", b"8.8.8.8"),
+    ]
+    scope = {
+        "client": ("127.0.0.1", 60457),
+        "headers": headers,
+    }
+    middleware = SentryAsgiMiddleware({})
+    ip = middleware._get_ip(scope)
+    assert ip == "8.8.8.8"
+
+    # x-real-for header overides the ip from client
+    headers = [
+        (b"x-real-ip", b"10.10.10.10"),
+    ]
+    scope = {
+        "client": ("127.0.0.1", 60457),
+        "headers": headers,
+    }
+    middleware = SentryAsgiMiddleware({})
+    ip = middleware._get_ip(scope)
+    assert ip == "10.10.10.10"
+
+
+@minimum_python_36
+def test_get_headers():
+    headers = [
+        (b"x-real-ip", b"10.10.10.10"),
+        (b"some_header", b"123"),
+        (b"some_header", b"abc"),
+    ]
+    scope = {
+        "client": ("127.0.0.1", 60457),
+        "headers": headers,
+    }
+    middleware = SentryAsgiMiddleware({})
+    headers = middleware._get_headers(scope)
+    assert headers == {
+        "x-real-ip": "10.10.10.10",
+        "some_header": "123, abc",
+    }
diff --git a/tests/integrations/asyncio/__init__.py b/tests/integrations/asyncio/__init__.py
new file mode 100644
index 0000000000..1b887a03fe
--- /dev/null
+++ b/tests/integrations/asyncio/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+pytest.importorskip("pytest_asyncio")
diff --git a/tests/integrations/asyncio/test_asyncio.py b/tests/integrations/asyncio/test_asyncio.py
new file mode 100644
index 0000000000..380c614f65
--- /dev/null
+++ b/tests/integrations/asyncio/test_asyncio.py
@@ -0,0 +1,157 @@
+import asyncio
+import sys
+
+import pytest
+import pytest_asyncio
+
+import sentry_sdk
+from sentry_sdk.consts import OP
+from sentry_sdk.integrations.asyncio import AsyncioIntegration
+
+
+minimum_python_36 = pytest.mark.skipif(
+    sys.version_info < (3, 6), reason="ASGI is only supported in Python >= 3.6"
+)
+
+
+async def foo():
+    await asyncio.sleep(0.01)
+
+
+async def bar():
+    await asyncio.sleep(0.01)
+
+
+async def boom():
+    1 / 0
+
+
+@pytest_asyncio.fixture(scope="session")
+def event_loop(request):
+    """Create an instance of the default event loop for each test case."""
+    loop = asyncio.get_event_loop_policy().new_event_loop()
+    yield loop
+    loop.close()
+
+
+@minimum_python_36
+@pytest.mark.asyncio
+async def test_create_task(
+    sentry_init,
+    capture_events,
+    event_loop,
+):
+    sentry_init(
+        traces_sample_rate=1.0,
+        send_default_pii=True,
+        debug=True,
+        integrations=[
+            AsyncioIntegration(),
+        ],
+    )
+
+    events = capture_events()
+
+    with sentry_sdk.start_transaction(name="test_transaction_for_create_task"):
+        with sentry_sdk.start_span(op="root", description="not so important"):
+            tasks = [event_loop.create_task(foo()), event_loop.create_task(bar())]
+            await asyncio.wait(tasks, return_when=asyncio.FIRST_EXCEPTION)
+
+            sentry_sdk.flush()
+
+    (transaction_event,) = events
+
+    assert transaction_event["spans"][0]["op"] == "root"
+    assert transaction_event["spans"][0]["description"] == "not so important"
+
+    assert transaction_event["spans"][1]["op"] == OP.FUNCTION
+    assert transaction_event["spans"][1]["description"] == "foo"
+    assert (
+        transaction_event["spans"][1]["parent_span_id"]
+        == transaction_event["spans"][0]["span_id"]
+    )
+
+    assert transaction_event["spans"][2]["op"] == OP.FUNCTION
+    assert transaction_event["spans"][2]["description"] == "bar"
+    assert (
+        transaction_event["spans"][2]["parent_span_id"]
+        == transaction_event["spans"][0]["span_id"]
+    )
+
+
+@minimum_python_36
+@pytest.mark.asyncio
+async def test_gather(
+    sentry_init,
+    capture_events,
+):
+    sentry_init(
+        traces_sample_rate=1.0,
+        send_default_pii=True,
+        debug=True,
+        integrations=[
+            AsyncioIntegration(),
+        ],
+    )
+
+    events = capture_events()
+
+    with sentry_sdk.start_transaction(name="test_transaction_for_gather"):
+        with sentry_sdk.start_span(op="root", description="not so important"):
+            await asyncio.gather(foo(), bar(), return_exceptions=True)
+
+        sentry_sdk.flush()
+
+    (transaction_event,) = events
+
+    assert transaction_event["spans"][0]["op"] == "root"
+    assert transaction_event["spans"][0]["description"] == "not so important"
+
+    assert transaction_event["spans"][1]["op"] == OP.FUNCTION
+    assert transaction_event["spans"][1]["description"] == "foo"
+    assert (
+        transaction_event["spans"][1]["parent_span_id"]
+        == transaction_event["spans"][0]["span_id"]
+    )
+
+    assert transaction_event["spans"][2]["op"] == OP.FUNCTION
+    assert transaction_event["spans"][2]["description"] == "bar"
+    assert (
+        transaction_event["spans"][2]["parent_span_id"]
+        == transaction_event["spans"][0]["span_id"]
+    )
+
+
+@minimum_python_36
+@pytest.mark.asyncio
+async def test_exception(
+    sentry_init,
+    capture_events,
+    event_loop,
+):
+    sentry_init(
+        traces_sample_rate=1.0,
+        send_default_pii=True,
+        debug=True,
+        integrations=[
+            AsyncioIntegration(),
+        ],
+    )
+
+    events = capture_events()
+
+    with sentry_sdk.start_transaction(name="test_exception"):
+        with sentry_sdk.start_span(op="root", description="not so important"):
+            tasks = [event_loop.create_task(boom()), event_loop.create_task(bar())]
+            await asyncio.wait(tasks, return_when=asyncio.FIRST_EXCEPTION)
+
+            sentry_sdk.flush()
+
+    (error_event, _) = events
+
+    assert error_event["transaction"] == "test_exception"
+    assert error_event["contexts"]["trace"]["op"] == "function"
+    assert error_event["exception"]["values"][0]["type"] == "ZeroDivisionError"
+    assert error_event["exception"]["values"][0]["value"] == "division by zero"
+    assert error_event["exception"]["values"][0]["mechanism"]["handled"] is False
+    assert error_event["exception"]["values"][0]["mechanism"]["type"] == "asyncio"
diff --git a/tests/integrations/aws_lambda/client.py b/tests/integrations/aws_lambda/client.py
new file mode 100644
index 0000000000..d8e430f3d7
--- /dev/null
+++ b/tests/integrations/aws_lambda/client.py
@@ -0,0 +1,239 @@
+import sys
+import os
+import shutil
+import tempfile
+import subprocess
+import boto3
+import uuid
+import base64
+
+
+def get_boto_client():
+    return boto3.client(
+        "lambda",
+        aws_access_key_id=os.environ["SENTRY_PYTHON_TEST_AWS_ACCESS_KEY_ID"],
+        aws_secret_access_key=os.environ["SENTRY_PYTHON_TEST_AWS_SECRET_ACCESS_KEY"],
+        region_name="us-east-1",
+    )
+
+
+def build_no_code_serverless_function_and_layer(
+    client, tmpdir, fn_name, runtime, timeout, initial_handler
+):
+    """
+    Util function that auto instruments the no code implementation of the python
+    sdk by creating a layer containing the Python-sdk, and then creating a func
+    that uses that layer
+    """
+    from scripts.build_aws_lambda_layer import build_layer_dir
+
+    build_layer_dir(dest_abs_path=tmpdir)
+
+    with open(os.path.join(tmpdir, "serverless-ball.zip"), "rb") as serverless_zip:
+        response = client.publish_layer_version(
+            LayerName="python-serverless-sdk-test",
+            Description="Created as part of testsuite for getsentry/sentry-python",
+            Content={"ZipFile": serverless_zip.read()},
+        )
+
+    with open(os.path.join(tmpdir, "ball.zip"), "rb") as zip:
+        client.create_function(
+            FunctionName=fn_name,
+            Runtime=runtime,
+            Timeout=timeout,
+            Environment={
+                "Variables": {
+                    "SENTRY_INITIAL_HANDLER": initial_handler,
+                    "SENTRY_DSN": "https://123abc@example.com/123",
+                    "SENTRY_TRACES_SAMPLE_RATE": "1.0",
+                }
+            },
+            Role=os.environ["SENTRY_PYTHON_TEST_AWS_IAM_ROLE"],
+            Handler="sentry_sdk.integrations.init_serverless_sdk.sentry_lambda_handler",
+            Layers=[response["LayerVersionArn"]],
+            Code={"ZipFile": zip.read()},
+            Description="Created as part of testsuite for getsentry/sentry-python",
+        )
+
+
+def run_lambda_function(
+    client,
+    runtime,
+    code,
+    payload,
+    add_finalizer,
+    syntax_check=True,
+    timeout=30,
+    layer=None,
+    initial_handler=None,
+    subprocess_kwargs=(),
+):
+    subprocess_kwargs = dict(subprocess_kwargs)
+
+    with tempfile.TemporaryDirectory() as tmpdir:
+        if initial_handler:
+            # If Initial handler value is provided i.e. it is not the default
+            # `test_lambda.test_handler`, then create another dir level so that our path is
+            # test_dir.test_lambda.test_handler
+            test_dir_path = os.path.join(tmpdir, "test_dir")
+            python_init_file = os.path.join(test_dir_path, "__init__.py")
+            os.makedirs(test_dir_path)
+            with open(python_init_file, "w"):
+                # Create __init__ file to make it a python package
+                pass
+
+            test_lambda_py = os.path.join(tmpdir, "test_dir", "test_lambda.py")
+        else:
+            test_lambda_py = os.path.join(tmpdir, "test_lambda.py")
+
+        with open(test_lambda_py, "w") as f:
+            f.write(code)
+
+        if syntax_check:
+            # Check file for valid syntax first, and that the integration does not
+            # crash when not running in Lambda (but rather a local deployment tool
+            # such as chalice's)
+            subprocess.check_call([sys.executable, test_lambda_py])
+
+        fn_name = "test_function_{}".format(uuid.uuid4())
+
+        if layer is None:
+            setup_cfg = os.path.join(tmpdir, "setup.cfg")
+            with open(setup_cfg, "w") as f:
+                f.write("[install]\nprefix=")
+
+            subprocess.check_call(
+                [sys.executable, "setup.py", "sdist", "-d", os.path.join(tmpdir, "..")],
+                **subprocess_kwargs
+            )
+
+            subprocess.check_call(
+                "pip install mock==3.0.0 funcsigs -t .",
+                cwd=tmpdir,
+                shell=True,
+                **subprocess_kwargs
+            )
+
+            # https://docs.aws.amazon.com/lambda/latest/dg/lambda-python-how-to-create-deployment-package.html
+            subprocess.check_call(
+                "pip install ../*.tar.gz -t .",
+                cwd=tmpdir,
+                shell=True,
+                **subprocess_kwargs
+            )
+
+            shutil.make_archive(os.path.join(tmpdir, "ball"), "zip", tmpdir)
+
+            with open(os.path.join(tmpdir, "ball.zip"), "rb") as zip:
+                client.create_function(
+                    FunctionName=fn_name,
+                    Runtime=runtime,
+                    Timeout=timeout,
+                    Role=os.environ["SENTRY_PYTHON_TEST_AWS_IAM_ROLE"],
+                    Handler="test_lambda.test_handler",
+                    Code={"ZipFile": zip.read()},
+                    Description="Created as part of testsuite for getsentry/sentry-python",
+                )
+        else:
+            subprocess.run(
+                ["zip", "-q", "-x", "**/__pycache__/*", "-r", "ball.zip", "./"],
+                cwd=tmpdir,
+                check=True,
+            )
+
+            # Default initial handler
+            if not initial_handler:
+                initial_handler = "test_lambda.test_handler"
+
+            build_no_code_serverless_function_and_layer(
+                client, tmpdir, fn_name, runtime, timeout, initial_handler
+            )
+
+        @add_finalizer
+        def clean_up():
+            client.delete_function(FunctionName=fn_name)
+
+            # this closes the web socket so we don't get a
+            #   ResourceWarning: unclosed 
+            # warning on every test
+            # based on https://github.com/boto/botocore/pull/1810
+            # (if that's ever merged, this can just become client.close())
+            session = client._endpoint.http_session
+            managers = [session._manager] + list(session._proxy_managers.values())
+            for manager in managers:
+                manager.clear()
+
+        response = client.invoke(
+            FunctionName=fn_name,
+            InvocationType="RequestResponse",
+            LogType="Tail",
+            Payload=payload,
+        )
+
+        assert 200 <= response["StatusCode"] < 300, response
+        return response
+
+
+_REPL_CODE = """
+import os
+
+def test_handler(event, context):
+    line = {line!r}
+    if line.startswith(">>> "):
+        exec(line[4:])
+    elif line.startswith("$ "):
+        os.system(line[2:])
+    else:
+        print("Start a line with $ or >>>")
+
+    return b""
+"""
+
+try:
+    import click
+except ImportError:
+    pass
+else:
+
+    @click.command()
+    @click.option(
+        "--runtime", required=True, help="name of the runtime to use, eg python3.8"
+    )
+    @click.option("--verbose", is_flag=True, default=False)
+    def repl(runtime, verbose):
+        """
+        Launch a "REPL" against AWS Lambda to inspect their runtime.
+        """
+
+        cleanup = []
+        client = get_boto_client()
+
+        print("Start a line with `$ ` to run shell commands, or `>>> ` to run Python")
+
+        while True:
+            line = input()
+
+            response = run_lambda_function(
+                client,
+                runtime,
+                _REPL_CODE.format(line=line),
+                b"",
+                cleanup.append,
+                subprocess_kwargs={
+                    "stdout": subprocess.DEVNULL,
+                    "stderr": subprocess.DEVNULL,
+                }
+                if not verbose
+                else {},
+            )
+
+            for line in base64.b64decode(response["LogResult"]).splitlines():
+                print(line.decode("utf8"))
+
+            for f in cleanup:
+                f()
+
+            cleanup = []
+
+    if __name__ == "__main__":
+        repl()
diff --git a/tests/integrations/aws_lambda/test_aws.py b/tests/integrations/aws_lambda/test_aws.py
index 9ce0b56b20..78c9770317 100644
--- a/tests/integrations/aws_lambda/test_aws.py
+++ b/tests/integrations/aws_lambda/test_aws.py
@@ -1,11 +1,23 @@
+"""
+# AWS Lambda system tests
+
+This testsuite uses boto3 to upload actual lambda functions to AWS, execute
+them and assert some things about the externally observed behavior. What that
+means for you is that those tests won't run without AWS access keys:
+
+    export SENTRY_PYTHON_TEST_AWS_ACCESS_KEY_ID=..
+    export SENTRY_PYTHON_TEST_AWS_SECRET_ACCESS_KEY=...
+    export SENTRY_PYTHON_TEST_AWS_IAM_ROLE="arn:aws:iam::920901907255:role/service-role/lambda"
+
+If you need to debug a new runtime, use this REPL to figure things out:
+
+    pip3 install click
+    python3 tests/integrations/aws_lambda/client.py --runtime=python4.0
+"""
 import base64
 import json
 import os
 import re
-import shutil
-import subprocess
-import sys
-import uuid
 from textwrap import dedent
 
 import pytest
@@ -15,27 +27,68 @@
 LAMBDA_PRELUDE = """
 from __future__ import print_function
 
-import time
-
-from sentry_sdk.integrations.aws_lambda import AwsLambdaIntegration
+from sentry_sdk.integrations.aws_lambda import AwsLambdaIntegration, get_lambda_bootstrap
 import sentry_sdk
 import json
+import time
+
 from sentry_sdk.transport import HttpTransport
 
+def event_processor(event):
+    # AWS Lambda truncates the log output to 4kb, which is small enough to miss
+    # parts of even a single error-event/transaction-envelope pair if considered
+    # in full, so only grab the data we need.
+
+    event_data = {}
+    event_data["contexts"] = {}
+    event_data["contexts"]["trace"] = event.get("contexts", {}).get("trace")
+    event_data["exception"] = event.get("exception")
+    event_data["extra"] = event.get("extra")
+    event_data["level"] = event.get("level")
+    event_data["request"] = event.get("request")
+    event_data["tags"] = event.get("tags")
+    event_data["transaction"] = event.get("transaction")
+
+    return event_data
+
+def envelope_processor(envelope):
+    # AWS Lambda truncates the log output to 4kb, which is small enough to miss
+    # parts of even a single error-event/transaction-envelope pair if considered
+    # in full, so only grab the data we need.
+
+    (item,) = envelope.items
+    envelope_json = json.loads(item.get_bytes())
+
+    envelope_data = {}
+    envelope_data["contexts"] = {}
+    envelope_data["type"] = envelope_json["type"]
+    envelope_data["transaction"] = envelope_json["transaction"]
+    envelope_data["contexts"]["trace"] = envelope_json["contexts"]["trace"]
+    envelope_data["request"] = envelope_json["request"]
+    envelope_data["tags"] = envelope_json["tags"]
+
+    return envelope_data
+
+
 class TestTransport(HttpTransport):
     def _send_event(self, event):
-        # Delay event output like this to test proper shutdown
-        # Note that AWS Lambda trunchates the log output to 4kb, so you better
-        # pray that your events are smaller than that or else tests start
-        # failing.
-        time.sleep(1)
-        print("\\nEVENT:", json.dumps(event))
-
-def init_sdk(**extra_init_args):
+        event = event_processor(event)
+        # Writing a single string to stdout holds the GIL (seems like) and
+        # therefore cannot be interleaved with other threads. This is why we
+        # explicitly add a newline at the end even though `print` would provide
+        # us one.
+        print("\\nEVENT: {}\\n".format(json.dumps(event)))
+
+    def _send_envelope(self, envelope):
+        envelope = envelope_processor(envelope)
+        print("\\nENVELOPE: {}\\n".format(json.dumps(envelope)))
+
+
+def init_sdk(timeout_warning=False, **extra_init_args):
     sentry_sdk.init(
         dsn="https://123abc@example.com/123",
         transport=TestTransport,
-        integrations=[AwsLambdaIntegration()],
+        integrations=[AwsLambdaIntegration(timeout_warning=timeout_warning)],
         shutdown_timeout=10,
         **extra_init_args
     )
@@ -47,80 +100,73 @@ def lambda_client():
     if "SENTRY_PYTHON_TEST_AWS_ACCESS_KEY_ID" not in os.environ:
         pytest.skip("AWS environ vars not set")
 
-    return boto3.client(
-        "lambda",
-        aws_access_key_id=os.environ["SENTRY_PYTHON_TEST_AWS_ACCESS_KEY_ID"],
-        aws_secret_access_key=os.environ["SENTRY_PYTHON_TEST_AWS_SECRET_ACCESS_KEY"],
-        region_name="us-east-1",
-    )
-
-
-@pytest.fixture(params=["python3.6", "python3.7", "python3.8", "python2.7"])
-def run_lambda_function(tmpdir, lambda_client, request, relay_normalize):
-    def inner(code, payload):
-        runtime = request.param
-        tmpdir.ensure_dir("lambda_tmp").remove()
-        tmp = tmpdir.ensure_dir("lambda_tmp")
+    from tests.integrations.aws_lambda.client import get_boto_client
 
-        tmp.join("test_lambda.py").write(code)
+    return get_boto_client()
 
-        # Check file for valid syntax first, and that the integration does not
-        # crash when not running in Lambda (but rather a local deployment tool
-        # such as chalice's)
-        subprocess.check_call([sys.executable, str(tmp.join("test_lambda.py"))])
 
-        tmp.join("setup.cfg").write("[install]\nprefix=")
-        subprocess.check_call([sys.executable, "setup.py", "sdist", "-d", str(tmpdir)])
-
-        # https://docs.aws.amazon.com/lambda/latest/dg/lambda-python-how-to-create-deployment-package.html
-        subprocess.check_call("pip install ../*.tar.gz -t .", cwd=str(tmp), shell=True)
-        shutil.make_archive(tmpdir.join("ball"), "zip", str(tmp))
-
-        fn_name = "test_function_{}".format(uuid.uuid4())
-
-        lambda_client.create_function(
-            FunctionName=fn_name,
-            Runtime=runtime,
-            Role=os.environ["SENTRY_PYTHON_TEST_AWS_IAM_ROLE"],
-            Handler="test_lambda.test_handler",
-            Code={"ZipFile": tmpdir.join("ball.zip").read(mode="rb")},
-            Description="Created as part of testsuite for getsentry/sentry-python",
-        )
+@pytest.fixture(
+    params=["python3.6", "python3.7", "python3.8", "python3.9", "python2.7"]
+)
+def lambda_runtime(request):
+    return request.param
 
-        @request.addfinalizer
-        def delete_function():
-            lambda_client.delete_function(FunctionName=fn_name)
 
-        response = lambda_client.invoke(
-            FunctionName=fn_name,
-            InvocationType="RequestResponse",
-            LogType="Tail",
-            Payload=payload,
+@pytest.fixture
+def run_lambda_function(request, lambda_client, lambda_runtime):
+    def inner(
+        code, payload, timeout=30, syntax_check=True, layer=None, initial_handler=None
+    ):
+        from tests.integrations.aws_lambda.client import run_lambda_function
+
+        response = run_lambda_function(
+            client=lambda_client,
+            runtime=lambda_runtime,
+            code=code,
+            payload=payload,
+            add_finalizer=request.addfinalizer,
+            timeout=timeout,
+            syntax_check=syntax_check,
+            layer=layer,
+            initial_handler=initial_handler,
         )
 
-        assert 200 <= response["StatusCode"] < 300, response
+        # for better debugging
+        response["LogResult"] = base64.b64decode(response["LogResult"]).splitlines()
+        response["Payload"] = json.loads(response["Payload"].read().decode("utf-8"))
+        del response["ResponseMetadata"]
 
         events = []
+        envelopes = []
 
-        for line in base64.b64decode(response["LogResult"]).splitlines():
+        for line in response["LogResult"]:
             print("AWS:", line)
-            if not line.startswith(b"EVENT: "):
+            if line.startswith(b"EVENT: "):
+                line = line[len(b"EVENT: ") :]
+                events.append(json.loads(line.decode("utf-8")))
+            elif line.startswith(b"ENVELOPE: "):
+                line = line[len(b"ENVELOPE: ") :]
+                envelopes.append(json.loads(line.decode("utf-8")))
+            else:
                 continue
-            line = line[len(b"EVENT: ") :]
-            events.append(json.loads(line.decode("utf-8")))
-            relay_normalize(events[-1])
 
-        return events, response
+        return envelopes, events, response
 
     return inner
 
 
 def test_basic(run_lambda_function):
-    events, response = run_lambda_function(
+    envelopes, events, response = run_lambda_function(
         LAMBDA_PRELUDE
         + dedent(
             """
         init_sdk()
+
+        def event_processor(event):
+            # Delay event output like this to test proper shutdown
+            time.sleep(1)
+            return event
+
         def test_handler(event, context):
             raise Exception("something went wrong")
         """
@@ -165,7 +211,7 @@ def test_initialization_order(run_lambda_function):
     as seen by AWS already runs. At this point at least draining the queue
     should work."""
 
-    events, _response = run_lambda_function(
+    envelopes, events, _response = run_lambda_function(
         LAMBDA_PRELUDE
         + dedent(
             """
@@ -185,7 +231,7 @@ def test_handler(event, context):
 
 
 def test_request_data(run_lambda_function):
-    events, _response = run_lambda_function(
+    envelopes, events, _response = run_lambda_function(
         LAMBDA_PRELUDE
         + dedent(
             """
@@ -234,3 +280,387 @@ def test_handler(event, context):
         "query_string": {"bonkers": "true"},
         "url": "https://iwsz2c7uwi.execute-api.us-east-1.amazonaws.com/asd",
     }
+
+
+def test_init_error(run_lambda_function, lambda_runtime):
+    if lambda_runtime == "python2.7":
+        pytest.skip("initialization error not supported on Python 2.7")
+
+    envelopes, events, response = run_lambda_function(
+        LAMBDA_PRELUDE
+        + (
+            "def event_processor(event):\n"
+            '    return event["exception"]["values"][0]["value"]\n'
+            "init_sdk()\n"
+            "func()"
+        ),
+        b'{"foo": "bar"}',
+        syntax_check=False,
+    )
+
+    (event,) = events
+    assert "name 'func' is not defined" in event
+
+
+def test_timeout_error(run_lambda_function):
+    envelopes, events, response = run_lambda_function(
+        LAMBDA_PRELUDE
+        + dedent(
+            """
+        init_sdk(timeout_warning=True)
+
+        def test_handler(event, context):
+            time.sleep(10)
+            return 0
+        """
+        ),
+        b'{"foo": "bar"}',
+        timeout=3,
+    )
+
+    (event,) = events
+    assert event["level"] == "error"
+    (exception,) = event["exception"]["values"]
+    assert exception["type"] == "ServerlessTimeoutWarning"
+    assert exception["value"] in (
+        "WARNING : Function is expected to get timed out. Configured timeout duration = 4 seconds.",
+        "WARNING : Function is expected to get timed out. Configured timeout duration = 3 seconds.",
+    )
+
+    assert exception["mechanism"] == {"type": "threading", "handled": False}
+
+    assert event["extra"]["lambda"]["function_name"].startswith("test_function_")
+
+    logs_url = event["extra"]["cloudwatch logs"]["url"]
+    assert logs_url.startswith("https://console.aws.amazon.com/cloudwatch/home?region=")
+    assert not re.search("(=;|=$)", logs_url)
+    assert event["extra"]["cloudwatch logs"]["log_group"].startswith(
+        "/aws/lambda/test_function_"
+    )
+
+    log_stream_re = "^[0-9]{4}/[0-9]{2}/[0-9]{2}/\\[[^\\]]+][a-f0-9]+$"
+    log_stream = event["extra"]["cloudwatch logs"]["log_stream"]
+
+    assert re.match(log_stream_re, log_stream)
+
+
+def test_performance_no_error(run_lambda_function):
+    envelopes, events, response = run_lambda_function(
+        LAMBDA_PRELUDE
+        + dedent(
+            """
+        init_sdk(traces_sample_rate=1.0)
+
+        def test_handler(event, context):
+            return "test_string"
+        """
+        ),
+        b'{"foo": "bar"}',
+    )
+
+    (envelope,) = envelopes
+    assert envelope["type"] == "transaction"
+    assert envelope["contexts"]["trace"]["op"] == "function.aws.lambda"
+    assert envelope["transaction"].startswith("test_function_")
+    assert envelope["transaction_info"] == {"source": "component"}
+    assert envelope["transaction"] in envelope["request"]["url"]
+
+
+def test_performance_error(run_lambda_function):
+    envelopes, events, response = run_lambda_function(
+        LAMBDA_PRELUDE
+        + dedent(
+            """
+        init_sdk(traces_sample_rate=1.0)
+
+        def test_handler(event, context):
+            raise Exception("something went wrong")
+        """
+        ),
+        b'{"foo": "bar"}',
+    )
+
+    (event,) = events
+    assert event["level"] == "error"
+    (exception,) = event["exception"]["values"]
+    assert exception["type"] == "Exception"
+    assert exception["value"] == "something went wrong"
+
+    (envelope,) = envelopes
+
+    assert envelope["type"] == "transaction"
+    assert envelope["contexts"]["trace"]["op"] == "function.aws.lambda"
+    assert envelope["transaction"].startswith("test_function_")
+    assert envelope["transaction_info"] == {"source": "component"}
+    assert envelope["transaction"] in envelope["request"]["url"]
+
+
+@pytest.mark.parametrize(
+    "aws_event, has_request_data, batch_size",
+    [
+        (b"1231", False, 1),
+        (b"11.21", False, 1),
+        (b'"Good dog!"', False, 1),
+        (b"true", False, 1),
+        (
+            b"""
+            [
+                {"good dog": "Maisey"},
+                {"good dog": "Charlie"},
+                {"good dog": "Cory"},
+                {"good dog": "Bodhi"}
+            ]
+            """,
+            False,
+            4,
+        ),
+        (
+            b"""
+            [
+                {
+                    "headers": {
+                        "Host": "dogs.are.great",
+                        "X-Forwarded-Proto": "http"
+                    },
+                    "httpMethod": "GET",
+                    "path": "/tricks/kangaroo",
+                    "queryStringParameters": {
+                        "completed_successfully": "true",
+                        "treat_provided": "true",
+                        "treat_type": "cheese"
+                    },
+                    "dog": "Maisey"
+                },
+                {
+                    "headers": {
+                        "Host": "dogs.are.great",
+                        "X-Forwarded-Proto": "http"
+                    },
+                    "httpMethod": "GET",
+                    "path": "/tricks/kangaroo",
+                    "queryStringParameters": {
+                        "completed_successfully": "true",
+                        "treat_provided": "true",
+                        "treat_type": "cheese"
+                    },
+                    "dog": "Charlie"
+                }
+            ]
+            """,
+            True,
+            2,
+        ),
+    ],
+)
+def test_non_dict_event(
+    run_lambda_function,
+    aws_event,
+    has_request_data,
+    batch_size,
+    DictionaryContaining,  # noqa:N803
+):
+    envelopes, events, response = run_lambda_function(
+        LAMBDA_PRELUDE
+        + dedent(
+            """
+        init_sdk(traces_sample_rate=1.0)
+
+        def test_handler(event, context):
+            raise Exception("More treats, please!")
+        """
+        ),
+        aws_event,
+    )
+
+    assert response["FunctionError"] == "Unhandled"
+
+    error_event = events[0]
+    assert error_event["level"] == "error"
+    assert error_event["contexts"]["trace"]["op"] == "function.aws.lambda"
+
+    function_name = error_event["extra"]["lambda"]["function_name"]
+    assert function_name.startswith("test_function_")
+    assert error_event["transaction"] == function_name
+
+    exception = error_event["exception"]["values"][0]
+    assert exception["type"] == "Exception"
+    assert exception["value"] == "More treats, please!"
+    assert exception["mechanism"]["type"] == "aws_lambda"
+
+    envelope = envelopes[0]
+    assert envelope["type"] == "transaction"
+    assert envelope["contexts"]["trace"] == DictionaryContaining(
+        error_event["contexts"]["trace"]
+    )
+    assert envelope["contexts"]["trace"]["status"] == "internal_error"
+    assert envelope["transaction"] == error_event["transaction"]
+    assert envelope["request"]["url"] == error_event["request"]["url"]
+
+    if has_request_data:
+        request_data = {
+            "headers": {"Host": "dogs.are.great", "X-Forwarded-Proto": "http"},
+            "method": "GET",
+            "url": "http://dogs.are.great/tricks/kangaroo",
+            "query_string": {
+                "completed_successfully": "true",
+                "treat_provided": "true",
+                "treat_type": "cheese",
+            },
+        }
+    else:
+        request_data = {"url": "awslambda:///{}".format(function_name)}
+
+    assert error_event["request"] == request_data
+    assert envelope["request"] == request_data
+
+    if batch_size > 1:
+        assert error_event["tags"]["batch_size"] == batch_size
+        assert error_event["tags"]["batch_request"] is True
+        assert envelope["tags"]["batch_size"] == batch_size
+        assert envelope["tags"]["batch_request"] is True
+
+
+def test_traces_sampler_gets_correct_values_in_sampling_context(
+    run_lambda_function,
+    DictionaryContaining,  # noqa:N803
+    ObjectDescribedBy,
+    StringContaining,
+):
+    # TODO: This whole thing is a little hacky, specifically around the need to
+    # get `conftest.py` code into the AWS runtime, which is why there's both
+    # `inspect.getsource` and a copy of `_safe_is_equal` included directly in
+    # the code below. Ideas which have been discussed to fix this:
+
+    # - Include the test suite as a module installed in the package which is
+    #   shot up to AWS
+    # - In client.py, copy `conftest.py` (or wherever the necessary code lives)
+    #   from the test suite into the main SDK directory so it gets included as
+    #   "part of the SDK"
+
+    # It's also worth noting why it's necessary to run the assertions in the AWS
+    # runtime rather than asserting on side effects the way we do with events
+    # and envelopes. The reasons are two-fold:
+
+    # - We're testing against the `LambdaContext` class, which only exists in
+    #   the AWS runtime
+    # - If we were to transmit call args data they way we transmit event and
+    #   envelope data (through JSON), we'd quickly run into the problem that all
+    #   sorts of stuff isn't serializable by `json.dumps` out of the box, up to
+    #   and including `datetime` objects (so anything with a timestamp is
+    #   automatically out)
+
+    # Perhaps these challenges can be solved in a cleaner and more systematic
+    # way if we ever decide to refactor the entire AWS testing apparatus.
+
+    import inspect
+
+    envelopes, events, response = run_lambda_function(
+        LAMBDA_PRELUDE
+        + dedent(inspect.getsource(StringContaining))
+        + dedent(inspect.getsource(DictionaryContaining))
+        + dedent(inspect.getsource(ObjectDescribedBy))
+        + dedent(
+            """
+            try:
+                from unittest import mock  # python 3.3 and above
+            except ImportError:
+                import mock  # python < 3.3
+
+            def _safe_is_equal(x, y):
+                # copied from conftest.py - see docstring and comments there
+                try:
+                    is_equal = x.__eq__(y)
+                except AttributeError:
+                    is_equal = NotImplemented
+
+                if is_equal == NotImplemented:
+                    # using == smoothes out weird variations exposed by raw __eq__
+                    return x == y
+
+                return is_equal
+
+            def test_handler(event, context):
+                # this runs after the transaction has started, which means we
+                # can make assertions about traces_sampler
+                try:
+                    traces_sampler.assert_any_call(
+                        DictionaryContaining(
+                            {
+                                "aws_event": DictionaryContaining({
+                                    "httpMethod": "GET",
+                                    "path": "/sit/stay/rollover",
+                                    "headers": {"Host": "dogs.are.great", "X-Forwarded-Proto": "http"},
+                                }),
+                                "aws_context": ObjectDescribedBy(
+                                    type=get_lambda_bootstrap().LambdaContext,
+                                    attrs={
+                                        'function_name': StringContaining("test_function"),
+                                        'function_version': '$LATEST',
+                                    }
+                                )
+                            }
+                        )
+                    )
+                except AssertionError:
+                    # catch the error and return it because the error itself will
+                    # get swallowed by the SDK as an "internal exception"
+                    return {"AssertionError raised": True,}
+
+                return {"AssertionError raised": False,}
+
+
+            traces_sampler = mock.Mock(return_value=True)
+
+            init_sdk(
+                traces_sampler=traces_sampler,
+            )
+        """
+        ),
+        b'{"httpMethod": "GET", "path": "/sit/stay/rollover", "headers": {"Host": "dogs.are.great", "X-Forwarded-Proto": "http"}}',
+    )
+
+    assert response["Payload"]["AssertionError raised"] is False
+
+
+def test_serverless_no_code_instrumentation(run_lambda_function):
+    """
+    Test that ensures that just by adding a lambda layer containing the
+    python sdk, with no code changes sentry is able to capture errors
+    """
+
+    for initial_handler in [
+        None,
+        "test_dir/test_lambda.test_handler",
+        "test_dir.test_lambda.test_handler",
+    ]:
+        print("Testing Initial Handler ", initial_handler)
+        _, _, response = run_lambda_function(
+            dedent(
+                """
+            import sentry_sdk
+
+            def test_handler(event, context):
+                current_client = sentry_sdk.Hub.current.client
+
+                assert current_client is not None
+
+                assert len(current_client.options['integrations']) == 1
+                assert isinstance(current_client.options['integrations'][0],
+                                  sentry_sdk.integrations.aws_lambda.AwsLambdaIntegration)
+
+                raise Exception("something went wrong")
+            """
+            ),
+            b'{"foo": "bar"}',
+            layer=True,
+            initial_handler=initial_handler,
+        )
+        assert response["FunctionError"] == "Unhandled"
+        assert response["StatusCode"] == 200
+
+        assert response["Payload"]["errorType"] != "AssertionError"
+
+        assert response["Payload"]["errorType"] == "Exception"
+        assert response["Payload"]["errorMessage"] == "something went wrong"
+
+        assert "sentry_handler" in response["LogResult"][3].decode("utf-8")
diff --git a/tests/integrations/beam/test_beam.py b/tests/integrations/beam/test_beam.py
index 8beb9b80a1..7aeb617e3c 100644
--- a/tests/integrations/beam/test_beam.py
+++ b/tests/integrations/beam/test_beam.py
@@ -152,7 +152,9 @@ def test_monkey_patch_signature(f, args, kwargs):
 
 
 class _OutputProcessor(OutputProcessor):
-    def process_outputs(self, windowed_input_element, results):
+    def process_outputs(
+        self, windowed_input_element, results, watermark_estimator=None
+    ):
         print(windowed_input_element)
         try:
             for result in results:
diff --git a/tests/integrations/boto3/__init__.py b/tests/integrations/boto3/__init__.py
new file mode 100644
index 0000000000..09738c40c7
--- /dev/null
+++ b/tests/integrations/boto3/__init__.py
@@ -0,0 +1,10 @@
+import pytest
+import os
+
+pytest.importorskip("boto3")
+xml_fixture_path = os.path.dirname(os.path.abspath(__file__))
+
+
+def read_fixture(name):
+    with open(os.path.join(xml_fixture_path, name), "rb") as f:
+        return f.read()
diff --git a/tests/integrations/boto3/aws_mock.py b/tests/integrations/boto3/aws_mock.py
new file mode 100644
index 0000000000..84ff23f466
--- /dev/null
+++ b/tests/integrations/boto3/aws_mock.py
@@ -0,0 +1,33 @@
+from io import BytesIO
+from botocore.awsrequest import AWSResponse
+
+
+class Body(BytesIO):
+    def stream(self, **kwargs):
+        contents = self.read()
+        while contents:
+            yield contents
+            contents = self.read()
+
+
+class MockResponse(object):
+    def __init__(self, client, status_code, headers, body):
+        self._client = client
+        self._status_code = status_code
+        self._headers = headers
+        self._body = body
+
+    def __enter__(self):
+        self._client.meta.events.register("before-send", self)
+        return self
+
+    def __exit__(self, exc_type, exc_value, traceback):
+        self._client.meta.events.unregister("before-send", self)
+
+    def __call__(self, request, **kwargs):
+        return AWSResponse(
+            request.url,
+            self._status_code,
+            self._headers,
+            Body(self._body),
+        )
diff --git a/tests/integrations/boto3/s3_list.xml b/tests/integrations/boto3/s3_list.xml
new file mode 100644
index 0000000000..10d5b16340
--- /dev/null
+++ b/tests/integrations/boto3/s3_list.xml
@@ -0,0 +1,2 @@
+
+marshalls-furious-bucket1000urlfalsefoo.txt2020-10-24T00:13:39.000Z"a895ba674b4abd01b5d67cfd7074b827"2064537bef397f7e536914d1ff1bbdb105ed90bcfd06269456bf4a06c6e2e54564daf7STANDARDbar.txt2020-10-02T15:15:20.000Z"a895ba674b4abd01b5d67cfd7074b827"2064537bef397f7e536914d1ff1bbdb105ed90bcfd06269456bf4a06c6e2e54564daf7STANDARD
diff --git a/tests/integrations/boto3/test_s3.py b/tests/integrations/boto3/test_s3.py
new file mode 100644
index 0000000000..7f02d422a0
--- /dev/null
+++ b/tests/integrations/boto3/test_s3.py
@@ -0,0 +1,85 @@
+from sentry_sdk import Hub
+from sentry_sdk.integrations.boto3 import Boto3Integration
+from tests.integrations.boto3.aws_mock import MockResponse
+from tests.integrations.boto3 import read_fixture
+
+import boto3
+
+session = boto3.Session(
+    aws_access_key_id="-",
+    aws_secret_access_key="-",
+)
+
+
+def test_basic(sentry_init, capture_events):
+    sentry_init(traces_sample_rate=1.0, integrations=[Boto3Integration()])
+    events = capture_events()
+
+    s3 = session.resource("s3")
+    with Hub.current.start_transaction() as transaction, MockResponse(
+        s3.meta.client, 200, {}, read_fixture("s3_list.xml")
+    ):
+        bucket = s3.Bucket("bucket")
+        items = [obj for obj in bucket.objects.all()]
+        assert len(items) == 2
+        assert items[0].key == "foo.txt"
+        assert items[1].key == "bar.txt"
+        transaction.finish()
+
+    (event,) = events
+    assert event["type"] == "transaction"
+    assert len(event["spans"]) == 1
+    (span,) = event["spans"]
+    assert span["op"] == "http.client"
+    assert span["description"] == "aws.s3.ListObjects"
+
+
+def test_streaming(sentry_init, capture_events):
+    sentry_init(traces_sample_rate=1.0, integrations=[Boto3Integration()])
+    events = capture_events()
+
+    s3 = session.resource("s3")
+    with Hub.current.start_transaction() as transaction, MockResponse(
+        s3.meta.client, 200, {}, b"hello"
+    ):
+        obj = s3.Bucket("bucket").Object("foo.pdf")
+        body = obj.get()["Body"]
+        assert body.read(1) == b"h"
+        assert body.read(2) == b"el"
+        assert body.read(3) == b"lo"
+        assert body.read(1) == b""
+        transaction.finish()
+
+    (event,) = events
+    assert event["type"] == "transaction"
+    assert len(event["spans"]) == 2
+    span1 = event["spans"][0]
+    assert span1["op"] == "http.client"
+    assert span1["description"] == "aws.s3.GetObject"
+    span2 = event["spans"][1]
+    assert span2["op"] == "http.client.stream"
+    assert span2["description"] == "aws.s3.GetObject"
+    assert span2["parent_span_id"] == span1["span_id"]
+
+
+def test_streaming_close(sentry_init, capture_events):
+    sentry_init(traces_sample_rate=1.0, integrations=[Boto3Integration()])
+    events = capture_events()
+
+    s3 = session.resource("s3")
+    with Hub.current.start_transaction() as transaction, MockResponse(
+        s3.meta.client, 200, {}, b"hello"
+    ):
+        obj = s3.Bucket("bucket").Object("foo.pdf")
+        body = obj.get()["Body"]
+        assert body.read(1) == b"h"
+        body.close()  # close partially-read stream
+        transaction.finish()
+
+    (event,) = events
+    assert event["type"] == "transaction"
+    assert len(event["spans"]) == 2
+    span1 = event["spans"][0]
+    assert span1["op"] == "http.client"
+    span2 = event["spans"][1]
+    assert span2["op"] == "http.client.stream"
diff --git a/tests/integrations/bottle/test_bottle.py b/tests/integrations/bottle/test_bottle.py
index 16aacb55c5..dfd6e52f80 100644
--- a/tests/integrations/bottle/test_bottle.py
+++ b/tests/integrations/bottle/test_bottle.py
@@ -24,6 +24,11 @@ def hi():
         capture_message("hi")
         return "ok"
 
+    @app.route("/message/")
+    def hi_with_id(message_id):
+        capture_message("hi")
+        return "ok"
+
     @app.route("/message-named-route", name="hi")
     def named_hi():
         capture_message("hi")
@@ -55,20 +60,21 @@ def test_has_context(sentry_init, app, capture_events, get_client):
 
 
 @pytest.mark.parametrize(
-    "url,transaction_style,expected_transaction",
+    "url,transaction_style,expected_transaction,expected_source",
     [
-        ("/message", "endpoint", "hi"),
-        ("/message", "url", "/message"),
-        ("/message-named-route", "endpoint", "hi"),
+        ("/message", "endpoint", "hi", "component"),
+        ("/message", "url", "/message", "route"),
+        ("/message/123456", "url", "/message/", "route"),
+        ("/message-named-route", "endpoint", "hi", "component"),
     ],
 )
 def test_transaction_style(
     sentry_init,
-    app,
-    capture_events,
+    url,
     transaction_style,
     expected_transaction,
-    url,
+    expected_source,
+    capture_events,
     get_client,
 ):
     sentry_init(
@@ -79,11 +85,14 @@ def test_transaction_style(
     events = capture_events()
 
     client = get_client()
-    response = client.get("/message")
+    response = client.get(url)
     assert response[1] == "200 OK"
 
     (event,) = events
+    # We use endswith() because in Python 2.7 it is "test_bottle.hi"
+    # and in later Pythons "test_bottle.app..hi"
     assert event["transaction"].endswith(expected_transaction)
+    assert event["transaction_info"] == {"source": expected_source}
 
 
 @pytest.mark.parametrize("debug", (True, False), ids=["debug", "nodebug"])
@@ -141,9 +150,9 @@ def index():
 
     (event,) = events
     assert event["_meta"]["request"]["data"]["foo"]["bar"] == {
-        "": {"len": 2000, "rem": [["!limit", "x", 509, 512]]}
+        "": {"len": 2000, "rem": [["!limit", "x", 1021, 1024]]}
     }
-    assert len(event["request"]["data"]["foo"]["bar"]) == 512
+    assert len(event["request"]["data"]["foo"]["bar"]) == 1024
 
 
 @pytest.mark.parametrize("data", [{}, []], ids=["empty-dict", "empty-list"])
@@ -191,12 +200,12 @@ def index():
 
     (event,) = events
     assert event["_meta"]["request"]["data"]["foo"] == {
-        "": {"len": 2000, "rem": [["!limit", "x", 509, 512]]}
+        "": {"len": 2000, "rem": [["!limit", "x", 1021, 1024]]}
     }
-    assert len(event["request"]["data"]["foo"]) == 512
+    assert len(event["request"]["data"]["foo"]) == 1024
 
 
-@pytest.mark.parametrize("input_char", [u"a", b"a"])
+@pytest.mark.parametrize("input_char", ["a", b"a"])
 def test_too_large_raw_request(
     sentry_init, input_char, capture_events, app, get_client
 ):
@@ -225,9 +234,7 @@ def index():
     assert response[1] == "200 OK"
 
     (event,) = events
-    assert event["_meta"]["request"]["data"] == {
-        "": {"len": 2000, "rem": [["!config", "x", 0, 2000]]}
-    }
+    assert event["_meta"]["request"]["data"] == {"": {"rem": [["!config", "x"]]}}
     assert not event["request"]["data"]
 
 
@@ -256,15 +263,14 @@ def index():
 
     (event,) = events
     assert event["_meta"]["request"]["data"]["foo"] == {
-        "": {"len": 2000, "rem": [["!limit", "x", 509, 512]]}
+        "": {"len": 2000, "rem": [["!limit", "x", 1021, 1024]]}
     }
-    assert len(event["request"]["data"]["foo"]) == 512
+    assert len(event["request"]["data"]["foo"]) == 1024
 
     assert event["_meta"]["request"]["data"]["file"] == {
         "": {
-            "len": -1,
-            "rem": [["!raw", "x", 0, -1]],
-        }  # bottle default content-length is -1
+            "rem": [["!raw", "x"]],
+        }
     }
     assert not event["request"]["data"]["file"]
 
diff --git a/tests/integrations/celery/test_celery.py b/tests/integrations/celery/test_celery.py
index ea475f309a..a2c8fa1594 100644
--- a/tests/integrations/celery/test_celery.py
+++ b/tests/integrations/celery/test_celery.py
@@ -4,13 +4,18 @@
 
 pytest.importorskip("celery")
 
-from sentry_sdk import Hub, configure_scope
+from sentry_sdk import Hub, configure_scope, start_transaction
 from sentry_sdk.integrations.celery import CeleryIntegration
 from sentry_sdk._compat import text_type
 
 from celery import Celery, VERSION
 from celery.bin import worker
 
+try:
+    from unittest import mock  # python 3.3 and above
+except ImportError:
+    import mock  # python < 3.3
+
 
 @pytest.fixture
 def connect_signal(request):
@@ -22,17 +27,51 @@ def inner(signal, f):
 
 
 @pytest.fixture
-def init_celery(sentry_init):
-    def inner(propagate_traces=True, **kwargs):
+def init_celery(sentry_init, request):
+    def inner(propagate_traces=True, backend="always_eager", **kwargs):
         sentry_init(
             integrations=[CeleryIntegration(propagate_traces=propagate_traces)],
             **kwargs
         )
         celery = Celery(__name__)
-        if VERSION < (4,):
-            celery.conf.CELERY_ALWAYS_EAGER = True
+
+        if backend == "always_eager":
+            if VERSION < (4,):
+                celery.conf.CELERY_ALWAYS_EAGER = True
+            else:
+                celery.conf.task_always_eager = True
+        elif backend == "redis":
+            # broken on celery 3
+            if VERSION < (4,):
+                pytest.skip("Redis backend broken for some reason")
+
+            # this backend requires capture_events_forksafe
+            celery.conf.worker_max_tasks_per_child = 1
+            celery.conf.worker_concurrency = 1
+            celery.conf.broker_url = "redis://127.0.0.1:6379"
+            celery.conf.result_backend = "redis://127.0.0.1:6379"
+            celery.conf.task_always_eager = False
+
+            Hub.main.bind_client(Hub.current.client)
+            request.addfinalizer(lambda: Hub.main.bind_client(None))
+
+            # Once we drop celery 3 we can use the celery_worker fixture
+            if VERSION < (5,):
+                worker_fn = worker.worker(app=celery).run
+            else:
+                from celery.bin.base import CLIContext
+
+                worker_fn = lambda: worker.worker(
+                    obj=CLIContext(app=celery, no_color=True, workdir=".", quiet=False),
+                    args=[],
+                )
+
+            worker_thread = threading.Thread(target=worker_fn)
+            worker_thread.daemon = True
+            worker_thread.start()
         else:
-            celery.conf.task_always_eager = True
+            raise ValueError(backend)
+
         return celery
 
     return inner
@@ -74,14 +113,14 @@ def dummy_task(x, y):
         foo = 42  # noqa
         return x / y
 
-    with Hub.current.start_span() as span:
+    with start_transaction() as transaction:
         celery_invocation(dummy_task, 1, 2)
         _, expected_context = celery_invocation(dummy_task, 1, 0)
 
     (event,) = events
 
-    assert event["contexts"]["trace"]["trace_id"] == span.trace_id
-    assert event["contexts"]["trace"]["span_id"] != span.span_id
+    assert event["contexts"]["trace"]["trace_id"] == transaction.trace_id
+    assert event["contexts"]["trace"]["span_id"] != transaction.span_id
     assert event["transaction"] == "dummy_task"
     assert "celery_task_id" in event["tags"]
     assert event["extra"]["celery-job"] == dict(
@@ -107,22 +146,24 @@ def dummy_task(x, y):
 
     events = capture_events()
 
-    with Hub.current.start_span(transaction="submission") as span:
+    with start_transaction(name="submission") as transaction:
         celery_invocation(dummy_task, 1, 0 if task_fails else 1)
 
     if task_fails:
         error_event = events.pop(0)
-        assert error_event["contexts"]["trace"]["trace_id"] == span.trace_id
+        assert error_event["contexts"]["trace"]["trace_id"] == transaction.trace_id
         assert error_event["exception"]["values"][0]["type"] == "ZeroDivisionError"
 
     execution_event, submission_event = events
-
     assert execution_event["transaction"] == "dummy_task"
+    assert execution_event["transaction_info"] == {"source": "task"}
+
     assert submission_event["transaction"] == "submission"
+    assert submission_event["transaction_info"] == {"source": "custom"}
 
     assert execution_event["type"] == submission_event["type"] == "transaction"
-    assert execution_event["contexts"]["trace"]["trace_id"] == span.trace_id
-    assert submission_event["contexts"]["trace"]["trace_id"] == span.trace_id
+    assert execution_event["contexts"]["trace"]["trace_id"] == transaction.trace_id
+    assert submission_event["contexts"]["trace"]["trace_id"] == transaction.trace_id
 
     if task_fails:
         assert execution_event["contexts"]["trace"]["status"] == "internal_error"
@@ -132,14 +173,14 @@ def dummy_task(x, y):
     assert execution_event["spans"] == []
     assert submission_event["spans"] == [
         {
-            u"description": u"dummy_task",
-            u"op": "celery.submit",
-            u"parent_span_id": submission_event["contexts"]["trace"]["span_id"],
-            u"same_process_as_parent": True,
-            u"span_id": submission_event["spans"][0]["span_id"],
-            u"start_timestamp": submission_event["spans"][0]["start_timestamp"],
-            u"timestamp": submission_event["spans"][0]["timestamp"],
-            u"trace_id": text_type(span.trace_id),
+            "description": "dummy_task",
+            "op": "queue.submit.celery",
+            "parent_span_id": submission_event["contexts"]["trace"]["span_id"],
+            "same_process_as_parent": True,
+            "span_id": submission_event["spans"][0]["span_id"],
+            "start_timestamp": submission_event["spans"][0]["start_timestamp"],
+            "timestamp": submission_event["spans"][0]["timestamp"],
+            "trace_id": text_type(transaction.trace_id),
         }
     ]
 
@@ -177,11 +218,11 @@ def test_simple_no_propagation(capture_events, init_celery):
     def dummy_task():
         1 / 0
 
-    with Hub.current.start_span() as span:
+    with start_transaction() as transaction:
         dummy_task.delay()
 
     (event,) = events
-    assert event["contexts"]["trace"]["trace_id"] != span.trace_id
+    assert event["contexts"]["trace"]["trace_id"] != transaction.trace_id
     assert event["transaction"] == "dummy_task"
     (exception,) = event["exception"]["values"]
     assert exception["type"] == "ZeroDivisionError"
@@ -236,7 +277,7 @@ def dummy_task(x, y):
 
 
 @pytest.mark.xfail(
-    (4, 2, 0) <= VERSION,
+    (4, 2, 0) <= VERSION < (4, 4, 3),
     strict=True,
     reason="https://github.com/celery/celery/issues/4661",
 )
@@ -272,16 +313,13 @@ def dummy_task(self):
         assert e["type"] == "ZeroDivisionError"
 
 
+# TODO: This test is hanging when running test with `tox --parallel auto`. Find out why and fix it!
+@pytest.mark.skip
 @pytest.mark.forked
-@pytest.mark.skipif(VERSION < (4,), reason="in-memory backend broken")
-def test_transport_shutdown(request, celery, capture_events_forksafe, tmpdir):
-    events = capture_events_forksafe()
+def test_redis_backend_trace_propagation(init_celery, capture_events_forksafe, tmpdir):
+    celery = init_celery(traces_sample_rate=1.0, backend="redis", debug=True)
 
-    celery.conf.worker_max_tasks_per_child = 1
-    celery.conf.broker_url = "memory://localhost/"
-    celery.conf.broker_backend = "memory"
-    celery.conf.result_backend = "file://{}".format(tmpdir.mkdir("celery-results"))
-    celery.conf.task_always_eager = False
+    events = capture_events_forksafe()
 
     runs = []
 
@@ -290,21 +328,39 @@ def dummy_task(self):
         runs.append(1)
         1 / 0
 
-    res = dummy_task.delay()
-
-    w = worker.worker(app=celery)
-    t = threading.Thread(target=w.run)
-    t.daemon = True
-    t.start()
+    with start_transaction(name="submit_celery"):
+        # Curious: Cannot use delay() here or py2.7-celery-4.2 crashes
+        res = dummy_task.apply_async()
 
     with pytest.raises(Exception):
         # Celery 4.1 raises a gibberish exception
         res.wait()
 
+    # if this is nonempty, the worker never really forked
+    assert not runs
+
+    submit_transaction = events.read_event()
+    assert submit_transaction["type"] == "transaction"
+    assert submit_transaction["transaction"] == "submit_celery"
+
+    assert len(
+        submit_transaction["spans"]
+    ), 4  # Because redis integration was auto enabled
+    span = submit_transaction["spans"][0]
+    assert span["op"] == "queue.submit.celery"
+    assert span["description"] == "dummy_task"
+
     event = events.read_event()
     (exception,) = event["exception"]["values"]
     assert exception["type"] == "ZeroDivisionError"
 
+    transaction = events.read_event()
+    assert (
+        transaction["contexts"]["trace"]["trace_id"]
+        == event["contexts"]["trace"]["trace_id"]
+        == submit_transaction["contexts"]["trace"]["trace_id"]
+    )
+
     events.read_flush()
 
     # if this is nonempty, the worker never really forked
@@ -336,3 +392,48 @@ def dummy_task(self, x, y):
 
     assert dummy_task.apply(kwargs={"x": 1, "y": 1}).wait() == 1
     assert celery_invocation(dummy_task, 1, 1)[0].wait() == 1
+
+
+def test_traces_sampler_gets_task_info_in_sampling_context(
+    init_celery, celery_invocation, DictionaryContaining  # noqa:N803
+):
+    traces_sampler = mock.Mock()
+    celery = init_celery(traces_sampler=traces_sampler)
+
+    @celery.task(name="dog_walk")
+    def walk_dogs(x, y):
+        dogs, route = x
+        num_loops = y
+        return dogs, route, num_loops
+
+    _, args_kwargs = celery_invocation(
+        walk_dogs, [["Maisey", "Charlie", "Bodhi", "Cory"], "Dog park round trip"], 1
+    )
+
+    traces_sampler.assert_any_call(
+        # depending on the iteration of celery_invocation, the data might be
+        # passed as args or as kwargs, so make this generic
+        DictionaryContaining({"celery_job": dict(task="dog_walk", **args_kwargs)})
+    )
+
+
+def test_abstract_task(capture_events, celery, celery_invocation):
+    events = capture_events()
+
+    class AbstractTask(celery.Task):
+        abstract = True
+
+        def __call__(self, *args, **kwargs):
+            try:
+                return self.run(*args, **kwargs)
+            except ZeroDivisionError:
+                return None
+
+    @celery.task(name="dummy_task", base=AbstractTask)
+    def dummy_task(x, y):
+        return x / y
+
+    with start_transaction():
+        celery_invocation(dummy_task, 1, 0)
+
+    assert not events
diff --git a/tests/integrations/chalice/__init__.py b/tests/integrations/chalice/__init__.py
new file mode 100644
index 0000000000..9f8680b4b2
--- /dev/null
+++ b/tests/integrations/chalice/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+pytest.importorskip("chalice")
diff --git a/tests/integrations/chalice/test_chalice.py b/tests/integrations/chalice/test_chalice.py
new file mode 100644
index 0000000000..4162a55623
--- /dev/null
+++ b/tests/integrations/chalice/test_chalice.py
@@ -0,0 +1,147 @@
+import pytest
+import time
+from chalice import Chalice, BadRequestError
+from chalice.local import LambdaContext, LocalGateway
+
+from sentry_sdk.integrations.chalice import ChaliceIntegration
+from sentry_sdk import capture_message
+
+from pytest_chalice.handlers import RequestHandler
+
+
+def _generate_lambda_context(self):
+    # Monkeypatch of the function _generate_lambda_context
+    # from the class LocalGateway
+    # for mock the timeout
+    # type: () -> LambdaContext
+    if self._config.lambda_timeout is None:
+        timeout = 10 * 1000
+    else:
+        timeout = self._config.lambda_timeout * 1000
+    return LambdaContext(
+        function_name=self._config.function_name,
+        memory_size=self._config.lambda_memory_size,
+        max_runtime_ms=timeout,
+    )
+
+
+@pytest.fixture
+def app(sentry_init):
+    sentry_init(integrations=[ChaliceIntegration()])
+    app = Chalice(app_name="sentry_chalice")
+
+    @app.route("/boom")
+    def boom():
+        raise Exception("boom goes the dynamite!")
+
+    @app.route("/context")
+    def has_request():
+        raise Exception("boom goes the dynamite!")
+
+    @app.route("/badrequest")
+    def badrequest():
+        raise BadRequestError("bad-request")
+
+    @app.route("/message")
+    def hi():
+        capture_message("hi")
+        return {"status": "ok"}
+
+    @app.route("/message/{message_id}")
+    def hi_with_id(message_id):
+        capture_message("hi again")
+        return {"status": "ok"}
+
+    LocalGateway._generate_lambda_context = _generate_lambda_context
+
+    return app
+
+
+@pytest.fixture
+def lambda_context_args():
+    return ["lambda_name", 256]
+
+
+def test_exception_boom(app, client: RequestHandler) -> None:
+    response = client.get("/boom")
+    assert response.status_code == 500
+    assert response.json == dict(
+        [
+            ("Code", "InternalServerError"),
+            ("Message", "An internal server error occurred."),
+        ]
+    )
+
+
+def test_has_request(app, capture_events, client: RequestHandler):
+    events = capture_events()
+
+    response = client.get("/context")
+    assert response.status_code == 500
+
+    (event,) = events
+    assert event["level"] == "error"
+    (exception,) = event["exception"]["values"]
+    assert exception["type"] == "Exception"
+
+
+def test_scheduled_event(app, lambda_context_args):
+    @app.schedule("rate(1 minutes)")
+    def every_hour(event):
+        raise Exception("schedule event!")
+
+    context = LambdaContext(
+        *lambda_context_args, max_runtime_ms=10000, time_source=time
+    )
+
+    lambda_event = {
+        "version": "0",
+        "account": "120987654312",
+        "region": "us-west-1",
+        "detail": {},
+        "detail-type": "Scheduled Event",
+        "source": "aws.events",
+        "time": "1970-01-01T00:00:00Z",
+        "id": "event-id",
+        "resources": ["arn:aws:events:us-west-1:120987654312:rule/my-schedule"],
+    }
+    with pytest.raises(Exception) as exc_info:
+        every_hour(lambda_event, context=context)
+    assert str(exc_info.value) == "schedule event!"
+
+
+def test_bad_reques(client: RequestHandler) -> None:
+    response = client.get("/badrequest")
+
+    assert response.status_code == 400
+    assert response.json == dict(
+        [
+            ("Code", "BadRequestError"),
+            ("Message", "BadRequestError: bad-request"),
+        ]
+    )
+
+
+@pytest.mark.parametrize(
+    "url,expected_transaction,expected_source",
+    [
+        ("/message", "api_handler", "component"),
+        ("/message/123456", "api_handler", "component"),
+    ],
+)
+def test_transaction(
+    app,
+    client: RequestHandler,
+    capture_events,
+    url,
+    expected_transaction,
+    expected_source,
+):
+    events = capture_events()
+
+    response = client.get(url)
+    assert response.status_code == 200
+
+    (event,) = events
+    assert event["transaction"] == expected_transaction
+    assert event["transaction_info"] == {"source": expected_source}
diff --git a/tests/integrations/django/asgi/test_asgi.py b/tests/integrations/django/asgi/test_asgi.py
index da493b8328..0652a5fdcb 100644
--- a/tests/integrations/django/asgi/test_asgi.py
+++ b/tests/integrations/django/asgi/test_asgi.py
@@ -1,12 +1,10 @@
-import pytest
+import json
 
 import django
-
+import pytest
 from channels.testing import HttpCommunicator
-
 from sentry_sdk import capture_message
 from sentry_sdk.integrations.django import DjangoIntegration
-
 from tests.integrations.django.myapp.asgi import channels_application
 
 APPS = [channels_application]
@@ -20,6 +18,7 @@
 @pytest.mark.asyncio
 async def test_basic(sentry_init, capture_events, application):
     sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
+
     events = capture_events()
 
     comm = HttpCommunicator(application, "GET", "/view-exc?test=query")
@@ -45,3 +44,183 @@ async def test_basic(sentry_init, capture_events, application):
     capture_message("hi")
     event = events[-1]
     assert "request" not in event
+
+
+@pytest.mark.parametrize("application", APPS)
+@pytest.mark.asyncio
+@pytest.mark.skipif(
+    django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1"
+)
+async def test_async_views(sentry_init, capture_events, application):
+    sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
+
+    events = capture_events()
+
+    comm = HttpCommunicator(application, "GET", "/async_message")
+    response = await comm.get_response()
+    assert response["status"] == 200
+
+    (event,) = events
+
+    assert event["transaction"] == "/async_message"
+    assert event["request"] == {
+        "cookies": {},
+        "headers": {},
+        "method": "GET",
+        "query_string": None,
+        "url": "/async_message",
+    }
+
+
+@pytest.mark.parametrize("application", APPS)
+@pytest.mark.parametrize("endpoint", ["/sync/thread_ids", "/async/thread_ids"])
+@pytest.mark.asyncio
+@pytest.mark.skipif(
+    django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1"
+)
+async def test_active_thread_id(sentry_init, capture_envelopes, endpoint, application):
+    sentry_init(
+        integrations=[DjangoIntegration()],
+        traces_sample_rate=1.0,
+        _experiments={"profiles_sample_rate": 1.0},
+    )
+
+    envelopes = capture_envelopes()
+
+    comm = HttpCommunicator(application, "GET", endpoint)
+    response = await comm.get_response()
+    assert response["status"] == 200, response["body"]
+
+    await comm.wait()
+
+    data = json.loads(response["body"])
+
+    envelopes = [envelope for envelope in envelopes]
+    assert len(envelopes) == 1
+
+    profiles = [item for item in envelopes[0].items if item.type == "profile"]
+    assert len(profiles) == 1
+
+    for profile in profiles:
+        transactions = profile.payload.json["transactions"]
+        assert len(transactions) == 1
+        assert str(data["active"]) == transactions[0]["active_thread_id"]
+
+
+@pytest.mark.asyncio
+@pytest.mark.skipif(
+    django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1"
+)
+async def test_async_views_concurrent_execution(sentry_init, capture_events, settings):
+    import asyncio
+    import time
+
+    settings.MIDDLEWARE = []
+    asgi_application.load_middleware(is_async=True)
+
+    sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
+
+    comm = HttpCommunicator(asgi_application, "GET", "/my_async_view")
+    comm2 = HttpCommunicator(asgi_application, "GET", "/my_async_view")
+
+    loop = asyncio.get_event_loop()
+
+    start = time.time()
+
+    r1 = loop.create_task(comm.get_response(timeout=5))
+    r2 = loop.create_task(comm2.get_response(timeout=5))
+
+    (resp1, resp2), _ = await asyncio.wait({r1, r2})
+
+    end = time.time()
+
+    assert resp1.result()["status"] == 200
+    assert resp2.result()["status"] == 200
+
+    assert end - start < 1.5
+
+
+@pytest.mark.asyncio
+@pytest.mark.skipif(
+    django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1"
+)
+async def test_async_middleware_that_is_function_concurrent_execution(
+    sentry_init, capture_events, settings
+):
+    import asyncio
+    import time
+
+    settings.MIDDLEWARE = [
+        "tests.integrations.django.myapp.middleware.simple_middleware"
+    ]
+    asgi_application.load_middleware(is_async=True)
+
+    sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
+
+    comm = HttpCommunicator(asgi_application, "GET", "/my_async_view")
+    comm2 = HttpCommunicator(asgi_application, "GET", "/my_async_view")
+
+    loop = asyncio.get_event_loop()
+
+    start = time.time()
+
+    r1 = loop.create_task(comm.get_response(timeout=5))
+    r2 = loop.create_task(comm2.get_response(timeout=5))
+
+    (resp1, resp2), _ = await asyncio.wait({r1, r2})
+
+    end = time.time()
+
+    assert resp1.result()["status"] == 200
+    assert resp2.result()["status"] == 200
+
+    assert end - start < 1.5
+
+
+@pytest.mark.asyncio
+@pytest.mark.skipif(
+    django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1"
+)
+async def test_async_middleware_spans(
+    sentry_init, render_span_tree, capture_events, settings
+):
+    settings.MIDDLEWARE = [
+        "django.contrib.sessions.middleware.SessionMiddleware",
+        "django.contrib.auth.middleware.AuthenticationMiddleware",
+        "django.middleware.csrf.CsrfViewMiddleware",
+        "tests.integrations.django.myapp.settings.TestMiddleware",
+    ]
+    asgi_application.load_middleware(is_async=True)
+
+    sentry_init(
+        integrations=[DjangoIntegration(middleware_spans=True)],
+        traces_sample_rate=1.0,
+        _experiments={"record_sql_params": True},
+    )
+
+    events = capture_events()
+
+    comm = HttpCommunicator(asgi_application, "GET", "/async_message")
+    response = await comm.get_response()
+    assert response["status"] == 200
+
+    await comm.wait()
+
+    message, transaction = events
+
+    assert (
+        render_span_tree(transaction)
+        == """\
+- op="http.server": description=null
+  - op="event.django": description="django.db.reset_queries"
+  - op="event.django": description="django.db.close_old_connections"
+  - op="middleware.django": description="django.contrib.sessions.middleware.SessionMiddleware.__acall__"
+    - op="middleware.django": description="django.contrib.auth.middleware.AuthenticationMiddleware.__acall__"
+      - op="middleware.django": description="django.middleware.csrf.CsrfViewMiddleware.__acall__"
+        - op="middleware.django": description="tests.integrations.django.myapp.settings.TestMiddleware.__acall__"
+          - op="middleware.django": description="django.middleware.csrf.CsrfViewMiddleware.process_view"
+          - op="view.render": description="async_message"
+  - op="event.django": description="django.db.close_old_connections"
+  - op="event.django": description="django.core.cache.close_caches"
+  - op="event.django": description="django.core.handlers.base.reset_urlconf\""""
+    )
diff --git a/tests/integrations/django/myapp/custom_urls.py b/tests/integrations/django/myapp/custom_urls.py
new file mode 100644
index 0000000000..6dfa2ed2f1
--- /dev/null
+++ b/tests/integrations/django/myapp/custom_urls.py
@@ -0,0 +1,32 @@
+"""myapp URL Configuration
+
+The `urlpatterns` list routes URLs to views. For more information please see:
+    https://docs.djangoproject.com/en/2.0/topics/http/urls/
+Examples:
+Function views
+    1. Add an import:  from my_app import views
+    2. Add a URL to urlpatterns:  path('', views.home, name='home')
+Class-based views
+    1. Add an import:  from other_app.views import Home
+    2. Add a URL to urlpatterns:  path('', Home.as_view(), name='home')
+Including another URLconf
+    1. Import the include() function: from django.urls import include, path
+    2. Add a URL to urlpatterns:  path('blog/', include('blog.urls'))
+"""
+from __future__ import absolute_import
+
+try:
+    from django.urls import path
+except ImportError:
+    from django.conf.urls import url
+
+    def path(path, *args, **kwargs):
+        return url("^{}$".format(path), *args, **kwargs)
+
+
+from . import views
+
+urlpatterns = [
+    path("custom/ok", views.custom_ok, name="custom_ok"),
+    path("custom/exc", views.custom_exc, name="custom_exc"),
+]
diff --git a/tests/integrations/django/myapp/middleware.py b/tests/integrations/django/myapp/middleware.py
new file mode 100644
index 0000000000..a6c847deba
--- /dev/null
+++ b/tests/integrations/django/myapp/middleware.py
@@ -0,0 +1,30 @@
+import django
+
+if django.VERSION >= (3, 1):
+    import asyncio
+    from django.utils.decorators import sync_and_async_middleware
+
+    @sync_and_async_middleware
+    def simple_middleware(get_response):
+        if asyncio.iscoroutinefunction(get_response):
+
+            async def middleware(request):
+                response = await get_response(request)
+                return response
+
+        else:
+
+            def middleware(request):
+                response = get_response(request)
+                return response
+
+        return middleware
+
+
+def custom_urlconf_middleware(get_response):
+    def middleware(request):
+        request.urlconf = "tests.integrations.django.myapp.custom_urls"
+        response = get_response(request)
+        return response
+
+    return middleware
diff --git a/tests/integrations/django/myapp/routing.py b/tests/integrations/django/myapp/routing.py
index 796d3d7d56..30cab968ad 100644
--- a/tests/integrations/django/myapp/routing.py
+++ b/tests/integrations/django/myapp/routing.py
@@ -1,4 +1,18 @@
-from channels.http import AsgiHandler
+import channels
 from channels.routing import ProtocolTypeRouter
 
-application = ProtocolTypeRouter({"http": AsgiHandler})
+try:
+    from channels.http import AsgiHandler
+
+    if channels.__version__ < "3.0.0":
+        django_asgi_app = AsgiHandler
+    else:
+        django_asgi_app = AsgiHandler()
+
+except ModuleNotFoundError:
+    # Since channels 4.0 ASGI handling is done by Django itself
+    from django.core.asgi import get_asgi_application
+
+    django_asgi_app = get_asgi_application()
+
+application = ProtocolTypeRouter({"http": django_asgi_app})
diff --git a/tests/integrations/django/myapp/settings.py b/tests/integrations/django/myapp/settings.py
index d46928bb9b..cc4d249082 100644
--- a/tests/integrations/django/myapp/settings.py
+++ b/tests/integrations/django/myapp/settings.py
@@ -59,6 +59,11 @@
 
 class TestMiddleware(MiddlewareMixin):
     def process_request(self, request):
+        # https://github.com/getsentry/sentry-python/issues/837 -- We should
+        # not touch the resolver_match because apparently people rely on it.
+        if request.resolver_match:
+            assert not getattr(request.resolver_match.callback, "__wrapped__", None)
+
         if "middleware-exc" in request.path:
             1 / 0
 
@@ -76,6 +81,7 @@ def middleware(request):
 MIDDLEWARE_CLASSES = [
     "django.contrib.sessions.middleware.SessionMiddleware",
     "django.contrib.auth.middleware.AuthenticationMiddleware",
+    "django.middleware.csrf.CsrfViewMiddleware",
     "tests.integrations.django.myapp.settings.TestMiddleware",
 ]
 
@@ -119,6 +125,7 @@ def middleware(request):
         "ENGINE": "django.db.backends.postgresql_psycopg2",
         "NAME": os.environ["SENTRY_PYTHON_TEST_POSTGRES_NAME"],
         "USER": os.environ["SENTRY_PYTHON_TEST_POSTGRES_USER"],
+        "PASSWORD": os.environ["SENTRY_PYTHON_TEST_POSTGRES_PASSWORD"],
         "HOST": "localhost",
         "PORT": 5432,
     }
@@ -150,7 +157,7 @@ def middleware(request):
 
 USE_L10N = True
 
-USE_TZ = True
+USE_TZ = False
 
 TEMPLATE_DEBUG = True
 
diff --git a/tests/integrations/django/myapp/templates/user_name.html b/tests/integrations/django/myapp/templates/user_name.html
new file mode 100644
index 0000000000..970107349f
--- /dev/null
+++ b/tests/integrations/django/myapp/templates/user_name.html
@@ -0,0 +1 @@
+{{ request.user }}: {{ user_age }}
diff --git a/tests/integrations/django/myapp/urls.py b/tests/integrations/django/myapp/urls.py
index 482d194dd6..ee357c843b 100644
--- a/tests/integrations/django/myapp/urls.py
+++ b/tests/integrations/django/myapp/urls.py
@@ -18,7 +18,11 @@
 try:
     from django.urls import path
 except ImportError:
-    from django.conf.urls import url as path
+    from django.conf.urls import url
+
+    def path(path, *args, **kwargs):
+        return url("^{}$".format(path), *args, **kwargs)
+
 
 from . import views
 
@@ -33,16 +37,43 @@
     path("message", views.message, name="message"),
     path("mylogin", views.mylogin, name="mylogin"),
     path("classbased", views.ClassBasedView.as_view(), name="classbased"),
+    path("sentryclass", views.SentryClassBasedView(), name="sentryclass"),
+    path(
+        "sentryclass-csrf",
+        views.SentryClassBasedViewWithCsrf(),
+        name="sentryclass_csrf",
+    ),
     path("post-echo", views.post_echo, name="post_echo"),
     path("template-exc", views.template_exc, name="template_exc"),
+    path("template-test", views.template_test, name="template_test"),
+    path("template-test2", views.template_test2, name="template_test2"),
+    path("postgres-select", views.postgres_select, name="postgres_select"),
     path(
         "permission-denied-exc",
         views.permission_denied_exc,
         name="permission_denied_exc",
     ),
+    path(
+        "csrf-hello-not-exempt",
+        views.csrf_hello_not_exempt,
+        name="csrf_hello_not_exempt",
+    ),
+    path("sync/thread_ids", views.thread_ids_sync, name="thread_ids_sync"),
 ]
 
+# async views
+if views.async_message is not None:
+    urlpatterns.append(path("async_message", views.async_message, name="async_message"))
 
+if views.my_async_view is not None:
+    urlpatterns.append(path("my_async_view", views.my_async_view, name="my_async_view"))
+
+if views.thread_ids_async is not None:
+    urlpatterns.append(
+        path("async/thread_ids", views.thread_ids_async, name="thread_ids_async")
+    )
+
+# rest framework
 try:
     urlpatterns.append(
         path("rest-framework-exc", views.rest_framework_exc, name="rest_framework_exc")
@@ -55,6 +86,9 @@
         )
     )
     urlpatterns.append(path("rest-hello", views.rest_hello, name="rest_hello"))
+    urlpatterns.append(
+        path("rest-json-response", views.rest_json_response, name="rest_json_response")
+    )
     urlpatterns.append(
         path(
             "rest-permission-denied-exc",
diff --git a/tests/integrations/django/myapp/views.py b/tests/integrations/django/myapp/views.py
index ebe667c6e6..dbf266e1ab 100644
--- a/tests/integrations/django/myapp/views.py
+++ b/tests/integrations/django/myapp/views.py
@@ -1,12 +1,20 @@
+import json
+import threading
+
+from django import VERSION
 from django.contrib.auth import login
 from django.contrib.auth.models import User
 from django.core.exceptions import PermissionDenied
-from django.http import HttpResponse, HttpResponseServerError, HttpResponseNotFound
+from django.http import HttpResponse, HttpResponseNotFound, HttpResponseServerError
 from django.shortcuts import render
+from django.template.response import TemplateResponse
+from django.utils.decorators import method_decorator
+from django.views.decorators.csrf import csrf_exempt
 from django.views.generic import ListView
 
 try:
     from rest_framework.decorators import api_view
+    from rest_framework.response import Response
 
     @api_view(["POST"])
     def rest_framework_exc(request):
@@ -25,6 +33,9 @@ def rest_hello(request):
     def rest_permission_denied_exc(request):
         raise PermissionDenied("bye")
 
+    @api_view(["GET"])
+    def rest_json_response(request):
+        return Response(dict(ok=True))
 
 except ImportError:
     pass
@@ -33,20 +44,40 @@ def rest_permission_denied_exc(request):
 import sentry_sdk
 
 
+@csrf_exempt
 def view_exc(request):
     1 / 0
 
 
+# This is a "class based view" as previously found in the sentry codebase. The
+# interesting property of this one is that csrf_exempt, as a class attribute,
+# is not in __dict__, so regular use of functools.wraps will not forward the
+# attribute.
+class SentryClassBasedView(object):
+    csrf_exempt = True
+
+    def __call__(self, request):
+        return HttpResponse("ok")
+
+
+class SentryClassBasedViewWithCsrf(object):
+    def __call__(self, request):
+        return HttpResponse("ok")
+
+
+@csrf_exempt
 def read_body_and_view_exc(request):
     request.read()
     1 / 0
 
 
+@csrf_exempt
 def message(request):
     sentry_sdk.capture_message("hi")
     return HttpResponse("ok")
 
 
+@csrf_exempt
 def mylogin(request):
     user = User.objects.create_user("john", "lennon@thebeatles.com", "johnpassword")
     user.backend = "django.contrib.auth.backends.ModelBackend"
@@ -54,6 +85,7 @@ def mylogin(request):
     return HttpResponse("ok")
 
 
+@csrf_exempt
 def handler500(request):
     return HttpResponseServerError("Sentry error: %s" % sentry_sdk.last_event_id())
 
@@ -61,24 +93,109 @@ def handler500(request):
 class ClassBasedView(ListView):
     model = None
 
+    @method_decorator(csrf_exempt)
+    def dispatch(self, request, *args, **kwargs):
+        return super(ClassBasedView, self).dispatch(request, *args, **kwargs)
+
     def head(self, *args, **kwargs):
         sentry_sdk.capture_message("hi")
         return HttpResponse("")
 
+    def post(self, *args, **kwargs):
+        return HttpResponse("ok")
+
 
+@csrf_exempt
 def post_echo(request):
     sentry_sdk.capture_message("hi")
     return HttpResponse(request.body)
 
 
+@csrf_exempt
 def handler404(*args, **kwargs):
     sentry_sdk.capture_message("not found", level="error")
     return HttpResponseNotFound("404")
 
 
+@csrf_exempt
 def template_exc(request, *args, **kwargs):
     return render(request, "error.html")
 
 
+@csrf_exempt
+def template_test(request, *args, **kwargs):
+    return render(request, "user_name.html", {"user_age": 20})
+
+
+@csrf_exempt
+def custom_ok(request, *args, **kwargs):
+    return HttpResponse("custom ok")
+
+
+@csrf_exempt
+def custom_exc(request, *args, **kwargs):
+    1 / 0
+
+
+@csrf_exempt
+def template_test2(request, *args, **kwargs):
+    return TemplateResponse(
+        request, ("user_name.html", "another_template.html"), {"user_age": 25}
+    )
+
+
+@csrf_exempt
+def postgres_select(request, *args, **kwargs):
+    from django.db import connections
+
+    cursor = connections["postgres"].cursor()
+    cursor.execute("SELECT 1;")
+    return HttpResponse("ok")
+
+
+@csrf_exempt
 def permission_denied_exc(*args, **kwargs):
     raise PermissionDenied("bye")
+
+
+def csrf_hello_not_exempt(*args, **kwargs):
+    return HttpResponse("ok")
+
+
+def thread_ids_sync(*args, **kwargs):
+    response = json.dumps(
+        {
+            "main": threading.main_thread().ident,
+            "active": threading.current_thread().ident,
+        }
+    )
+    return HttpResponse(response)
+
+
+if VERSION >= (3, 1):
+    # Use exec to produce valid Python 2
+    exec(
+        """async def async_message(request):
+    sentry_sdk.capture_message("hi")
+    return HttpResponse("ok")"""
+    )
+
+    exec(
+        """async def my_async_view(request):
+    import asyncio
+    await asyncio.sleep(1)
+    return HttpResponse('Hello World')"""
+    )
+
+    exec(
+        """async def thread_ids_async(request):
+    response = json.dumps({
+        "main": threading.main_thread().ident,
+        "active": threading.current_thread().ident,
+    })
+    return HttpResponse(response)"""
+    )
+else:
+    async_message = None
+    my_async_view = None
+    thread_ids_async = None
diff --git a/tests/integrations/django/test_basic.py b/tests/integrations/django/test_basic.py
index b3a08f5c50..fee2b34afc 100644
--- a/tests/integrations/django/test_basic.py
+++ b/tests/integrations/django/test_basic.py
@@ -1,7 +1,9 @@
 from __future__ import absolute_import
 
-import pytest
 import json
+import pytest
+import pytest_django
+from functools import partial
 
 from werkzeug.test import Client
 from django import VERSION as DJANGO_VERSION
@@ -9,17 +11,36 @@
 from django.core.management import execute_from_command_line
 from django.db.utils import OperationalError, ProgrammingError, DataError
 
-
 try:
     from django.urls import reverse
 except ImportError:
     from django.core.urlresolvers import reverse
 
-from sentry_sdk import capture_message, capture_exception
+from sentry_sdk._compat import PY2, PY310
+from sentry_sdk import capture_message, capture_exception, configure_scope
 from sentry_sdk.integrations.django import DjangoIntegration
+from sentry_sdk.integrations.django.signals_handlers import _get_receiver_name
+from sentry_sdk.integrations.executing import ExecutingIntegration
 
 from tests.integrations.django.myapp.wsgi import application
 
+# Hack to prevent from experimental feature introduced in version `4.3.0` in `pytest-django` that
+# requires explicit database allow from failing the test
+pytest_mark_django_db_decorator = partial(pytest.mark.django_db)
+try:
+    pytest_version = tuple(map(int, pytest_django.__version__.split(".")))
+    if pytest_version > (4, 2, 0):
+        pytest_mark_django_db_decorator = partial(
+            pytest.mark.django_db, databases="__all__"
+        )
+except ValueError:
+    if "dev" in pytest_django.__version__:
+        pytest_mark_django_db_decorator = partial(
+            pytest.mark.django_db, databases="__all__"
+        )
+except AttributeError:
+    pass
+
 
 @pytest.fixture
 def client():
@@ -39,6 +60,46 @@ def test_view_exceptions(sentry_init, client, capture_exceptions, capture_events
     assert event["exception"]["values"][0]["mechanism"]["type"] == "django"
 
 
+def test_ensures_x_forwarded_header_is_honored_in_sdk_when_enabled_in_django(
+    sentry_init, client, capture_exceptions, capture_events, settings
+):
+    """
+    Test that ensures if django settings.USE_X_FORWARDED_HOST is set to True
+    then the SDK sets the request url to the `HTTP_X_FORWARDED_FOR`
+    """
+    settings.USE_X_FORWARDED_HOST = True
+
+    sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
+    exceptions = capture_exceptions()
+    events = capture_events()
+    client.get(reverse("view_exc"), headers={"X_FORWARDED_HOST": "example.com"})
+
+    (error,) = exceptions
+    assert isinstance(error, ZeroDivisionError)
+
+    (event,) = events
+    assert event["request"]["url"] == "http://example.com/view-exc"
+
+
+def test_ensures_x_forwarded_header_is_not_honored_when_unenabled_in_django(
+    sentry_init, client, capture_exceptions, capture_events
+):
+    """
+    Test that ensures if django settings.USE_X_FORWARDED_HOST is set to False
+    then the SDK sets the request url to the `HTTP_POST`
+    """
+    sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
+    exceptions = capture_exceptions()
+    events = capture_events()
+    client.get(reverse("view_exc"), headers={"X_FORWARDED_HOST": "example.com"})
+
+    (error,) = exceptions
+    assert isinstance(error, ZeroDivisionError)
+
+    (event,) = events
+    assert event["request"]["url"] == "http://localhost/view-exc"
+
+
 def test_middleware_exceptions(sentry_init, client, capture_exceptions):
     sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
     exceptions = capture_exceptions()
@@ -181,16 +242,13 @@ def test_sql_queries(sentry_init, capture_events, with_integration):
 
     from django.db import connection
 
-    sentry_init(
-        integrations=[DjangoIntegration()],
-        send_default_pii=True,
-        _experiments={"record_sql_params": True},
-    )
-
     events = capture_events()
 
     sql = connection.cursor()
 
+    with configure_scope() as scope:
+        scope.clear_breadcrumbs()
+
     with pytest.raises(OperationalError):
         # table doesn't even exist
         sql.execute("""SELECT count(*) FROM people_person WHERE foo = %s""", [123])
@@ -200,14 +258,14 @@ def test_sql_queries(sentry_init, capture_events, with_integration):
     (event,) = events
 
     if with_integration:
-        crumb = event["breadcrumbs"][-1]
+        crumb = event["breadcrumbs"]["values"][-1]
 
         assert crumb["message"] == "SELECT count(*) FROM people_person WHERE foo = %s"
         assert crumb["data"]["db.params"] == [123]
 
 
 @pytest.mark.forked
-@pytest.mark.django_db
+@pytest_mark_django_db_decorator()
 def test_sql_dict_query_params(sentry_init, capture_events):
     sentry_init(
         integrations=[DjangoIntegration()],
@@ -223,6 +281,9 @@ def test_sql_dict_query_params(sentry_init, capture_events):
     sql = connections["postgres"].cursor()
 
     events = capture_events()
+    with configure_scope() as scope:
+        scope.clear_breadcrumbs()
+
     with pytest.raises(ProgrammingError):
         sql.execute(
             """SELECT count(*) FROM people_person WHERE foo = %(my_foo)s""",
@@ -232,13 +293,32 @@ def test_sql_dict_query_params(sentry_init, capture_events):
     capture_message("HI")
     (event,) = events
 
-    crumb = event["breadcrumbs"][-1]
+    crumb = event["breadcrumbs"]["values"][-1]
     assert crumb["message"] == (
         "SELECT count(*) FROM people_person WHERE foo = %(my_foo)s"
     )
     assert crumb["data"]["db.params"] == {"my_foo": 10}
 
 
+@pytest.mark.forked
+@pytest_mark_django_db_decorator()
+def test_response_trace(sentry_init, client, capture_events, render_span_tree):
+    pytest.importorskip("rest_framework")
+    sentry_init(
+        integrations=[DjangoIntegration()],
+        traces_sample_rate=1.0,
+    )
+
+    events = capture_events()
+    content, status, headers = client.get(reverse("rest_json_response"))
+    assert status == "200 OK"
+
+    assert (
+        '- op="view.response.render": description="serialize response"'
+        in render_span_tree(events[0])
+    )
+
+
 @pytest.mark.parametrize(
     "query",
     [
@@ -249,7 +329,7 @@ def test_sql_dict_query_params(sentry_init, capture_events):
     ],
 )
 @pytest.mark.forked
-@pytest.mark.django_db
+@pytest_mark_django_db_decorator()
 def test_sql_psycopg2_string_composition(sentry_init, capture_events, query):
     sentry_init(
         integrations=[DjangoIntegration()],
@@ -265,20 +345,24 @@ def test_sql_psycopg2_string_composition(sentry_init, capture_events, query):
 
     sql = connections["postgres"].cursor()
 
+    with configure_scope() as scope:
+        scope.clear_breadcrumbs()
+
     events = capture_events()
+
     with pytest.raises(ProgrammingError):
         sql.execute(query(psycopg2.sql), {"my_param": 10})
 
     capture_message("HI")
 
     (event,) = events
-    crumb = event["breadcrumbs"][-1]
+    crumb = event["breadcrumbs"]["values"][-1]
     assert crumb["message"] == ('SELECT %(my_param)s FROM "foobar"')
     assert crumb["data"]["db.params"] == {"my_param": 10}
 
 
 @pytest.mark.forked
-@pytest.mark.django_db
+@pytest_mark_django_db_decorator()
 def test_sql_psycopg2_placeholders(sentry_init, capture_events):
     sentry_init(
         integrations=[DjangoIntegration()],
@@ -295,6 +379,9 @@ def test_sql_psycopg2_placeholders(sentry_init, capture_events):
     sql = connections["postgres"].cursor()
 
     events = capture_events()
+    with configure_scope() as scope:
+        scope.clear_breadcrumbs()
+
     with pytest.raises(DataError):
         names = ["foo", "bar"]
         identifiers = [psycopg2.sql.Identifier(name) for name in names]
@@ -312,10 +399,10 @@ def test_sql_psycopg2_placeholders(sentry_init, capture_events):
     capture_message("HI")
 
     (event,) = events
-    for crumb in event["breadcrumbs"]:
+    for crumb in event["breadcrumbs"]["values"]:
         del crumb["timestamp"]
 
-    assert event["breadcrumbs"][-2:] == [
+    assert event["breadcrumbs"]["values"][-2:] == [
         {
             "category": "query",
             "data": {"db.paramstyle": "format"},
@@ -335,26 +422,107 @@ def test_sql_psycopg2_placeholders(sentry_init, capture_events):
     ]
 
 
+@pytest.mark.forked
+@pytest_mark_django_db_decorator(transaction=True)
+def test_django_connect_trace(sentry_init, client, capture_events, render_span_tree):
+    """
+    Verify we record a span when opening a new database.
+    """
+    sentry_init(
+        integrations=[DjangoIntegration()],
+        send_default_pii=True,
+        traces_sample_rate=1.0,
+    )
+
+    from django.db import connections
+
+    if "postgres" not in connections:
+        pytest.skip("postgres tests disabled")
+
+    # trigger Django to open a new connection by marking the existing one as None.
+    connections["postgres"].connection = None
+
+    events = capture_events()
+
+    content, status, headers = client.get(reverse("postgres_select"))
+    assert status == "200 OK"
+
+    assert '- op="db": description="connect"' in render_span_tree(events[0])
+
+
+@pytest.mark.forked
+@pytest_mark_django_db_decorator(transaction=True)
+def test_django_connect_breadcrumbs(
+    sentry_init, client, capture_events, render_span_tree
+):
+    """
+    Verify we record a breadcrumb when opening a new database.
+    """
+    sentry_init(
+        integrations=[DjangoIntegration()],
+        send_default_pii=True,
+    )
+
+    from django.db import connections
+
+    if "postgres" not in connections:
+        pytest.skip("postgres tests disabled")
+
+    # trigger Django to open a new connection by marking the existing one as None.
+    connections["postgres"].connection = None
+
+    events = capture_events()
+
+    cursor = connections["postgres"].cursor()
+    cursor.execute("select 1")
+
+    # trigger recording of event.
+    capture_message("HI")
+    (event,) = events
+    for crumb in event["breadcrumbs"]["values"]:
+        del crumb["timestamp"]
+
+    assert event["breadcrumbs"]["values"][-2:] == [
+        {"message": "connect", "category": "query", "type": "default"},
+        {"message": "select 1", "category": "query", "data": {}, "type": "default"},
+    ]
+
+
 @pytest.mark.parametrize(
-    "transaction_style,expected_transaction",
+    "transaction_style,client_url,expected_transaction,expected_source,expected_response",
     [
-        ("function_name", "tests.integrations.django.myapp.views.message"),
-        ("url", "/message"),
+        (
+            "function_name",
+            "/message",
+            "tests.integrations.django.myapp.views.message",
+            "component",
+            b"ok",
+        ),
+        ("url", "/message", "/message", "route", b"ok"),
+        ("url", "/404", "/404", "url", b"404"),
     ],
 )
 def test_transaction_style(
-    sentry_init, client, capture_events, transaction_style, expected_transaction
+    sentry_init,
+    client,
+    capture_events,
+    transaction_style,
+    client_url,
+    expected_transaction,
+    expected_source,
+    expected_response,
 ):
     sentry_init(
         integrations=[DjangoIntegration(transaction_style=transaction_style)],
         send_default_pii=True,
     )
     events = capture_events()
-    content, status, headers = client.get(reverse("message"))
-    assert b"".join(content) == b"ok"
+    content, status, headers = client.get(client_url)
+    assert b"".join(content) == expected_response
 
     (event,) = events
     assert event["transaction"] == expected_transaction
+    assert event["transaction_info"] == {"source": expected_source}
 
 
 def test_request_body(sentry_init, client, capture_events):
@@ -371,8 +539,7 @@ def test_request_body(sentry_init, client, capture_events):
     assert event["message"] == "hi"
     assert event["request"]["data"] == ""
     assert event["_meta"]["request"]["data"][""] == {
-        "len": 6,
-        "rem": [["!raw", "x", 0, 6]],
+        "rem": [["!raw", "x"]],
     }
 
     del events[:]
@@ -408,8 +575,11 @@ def test_read_request(sentry_init, client, capture_events):
     assert "data" not in event["request"]
 
 
-def test_template_exception(sentry_init, client, capture_events):
-    sentry_init(integrations=[DjangoIntegration()])
+@pytest.mark.parametrize("with_executing_integration", [[], [ExecutingIntegration()]])
+def test_template_exception(
+    sentry_init, client, capture_events, with_executing_integration
+):
+    sentry_init(integrations=[DjangoIntegration()] + with_executing_integration)
     events = capture_events()
 
     content, status, headers = client.get(reverse("template_exc"))
@@ -437,11 +607,19 @@ def test_template_exception(sentry_init, client, capture_events):
     filenames = [
         (f.get("function"), f.get("module")) for f in exception["stacktrace"]["frames"]
     ]
-    assert filenames[-3:] == [
-        (u"parse", u"django.template.base"),
-        (None, None),
-        (u"invalid_block_tag", u"django.template.base"),
-    ]
+
+    if with_executing_integration:
+        assert filenames[-3:] == [
+            ("Parser.parse", "django.template.base"),
+            (None, None),
+            ("Parser.invalid_block_tag", "django.template.base"),
+        ]
+    else:
+        assert filenames[-3:] == [
+            ("parse", "django.template.base"),
+            (None, None),
+            ("invalid_block_tag", "django.template.base"),
+        ]
 
 
 @pytest.mark.parametrize(
@@ -471,7 +649,7 @@ def test_rest_framework_basic(
     elif ct == "application/x-www-form-urlencoded":
         client.post(reverse(route), data=body)
     else:
-        assert False
+        raise AssertionError("unreachable")
 
     (error,) = exceptions
     assert isinstance(error, ZeroDivisionError)
@@ -499,7 +677,33 @@ def test_does_not_capture_403(sentry_init, client, capture_events, endpoint):
     assert not events
 
 
-def test_middleware_spans(sentry_init, client, capture_events):
+def test_render_spans(sentry_init, client, capture_events, render_span_tree):
+    sentry_init(
+        integrations=[DjangoIntegration()],
+        traces_sample_rate=1.0,
+    )
+    views_tests = [
+        (
+            reverse("template_test2"),
+            '- op="template.render": description="[user_name.html, ...]"',
+        ),
+    ]
+    if DJANGO_VERSION >= (1, 7):
+        views_tests.append(
+            (
+                reverse("template_test"),
+                '- op="template.render": description="user_name.html"',
+            ),
+        )
+
+    for url, expected_line in views_tests:
+        events = capture_events()
+        _content, status, _headers = client.get(url)
+        transaction = events[0]
+        assert expected_line in render_span_tree(transaction)
+
+
+def test_middleware_spans(sentry_init, client, capture_events, render_span_tree):
     sentry_init(
         integrations=[DjangoIntegration()],
         traces_sample_rate=1.0,
@@ -513,26 +717,40 @@ def test_middleware_spans(sentry_init, client, capture_events):
 
     assert message["message"] == "hi"
 
-    for middleware in transaction["spans"]:
-        assert middleware["op"] == "django.middleware"
-
     if DJANGO_VERSION >= (1, 10):
-        reference_value = [
-            "tests.integrations.django.myapp.settings.TestFunctionMiddleware.__call__",
-            "tests.integrations.django.myapp.settings.TestMiddleware.__call__",
-            "django.contrib.auth.middleware.AuthenticationMiddleware.__call__",
-            "django.contrib.sessions.middleware.SessionMiddleware.__call__",
-        ]
-    else:
-        reference_value = [
-            "django.contrib.sessions.middleware.SessionMiddleware.process_request",
-            "django.contrib.auth.middleware.AuthenticationMiddleware.process_request",
-            "tests.integrations.django.myapp.settings.TestMiddleware.process_request",
-            "tests.integrations.django.myapp.settings.TestMiddleware.process_response",
-            "django.contrib.sessions.middleware.SessionMiddleware.process_response",
-        ]
+        assert (
+            render_span_tree(transaction)
+            == """\
+- op="http.server": description=null
+  - op="event.django": description="django.db.reset_queries"
+  - op="event.django": description="django.db.close_old_connections"
+  - op="middleware.django": description="django.contrib.sessions.middleware.SessionMiddleware.__call__"
+    - op="middleware.django": description="django.contrib.auth.middleware.AuthenticationMiddleware.__call__"
+      - op="middleware.django": description="django.middleware.csrf.CsrfViewMiddleware.__call__"
+        - op="middleware.django": description="tests.integrations.django.myapp.settings.TestMiddleware.__call__"
+          - op="middleware.django": description="tests.integrations.django.myapp.settings.TestFunctionMiddleware.__call__"
+            - op="middleware.django": description="django.middleware.csrf.CsrfViewMiddleware.process_view"
+            - op="view.render": description="message"\
+"""
+        )
 
-    assert [t["description"] for t in transaction["spans"]] == reference_value
+    else:
+        assert (
+            render_span_tree(transaction)
+            == """\
+- op="http.server": description=null
+  - op="event.django": description="django.db.reset_queries"
+  - op="event.django": description="django.db.close_old_connections"
+  - op="middleware.django": description="django.contrib.sessions.middleware.SessionMiddleware.process_request"
+  - op="middleware.django": description="django.contrib.auth.middleware.AuthenticationMiddleware.process_request"
+  - op="middleware.django": description="tests.integrations.django.myapp.settings.TestMiddleware.process_request"
+  - op="middleware.django": description="django.middleware.csrf.CsrfViewMiddleware.process_view"
+  - op="view.render": description="message"
+  - op="middleware.django": description="tests.integrations.django.myapp.settings.TestMiddleware.process_response"
+  - op="middleware.django": description="django.middleware.csrf.CsrfViewMiddleware.process_response"
+  - op="middleware.django": description="django.contrib.sessions.middleware.SessionMiddleware.process_response"\
+"""
+        )
 
 
 def test_middleware_spans_disabled(sentry_init, client, capture_events):
@@ -547,4 +765,95 @@ def test_middleware_spans_disabled(sentry_init, client, capture_events):
 
     assert message["message"] == "hi"
 
-    assert not transaction["spans"]
+    assert len(transaction["spans"]) == 2
+
+    assert transaction["spans"][0]["op"] == "event.django"
+    assert transaction["spans"][0]["description"] == "django.db.reset_queries"
+
+    assert transaction["spans"][1]["op"] == "event.django"
+    assert transaction["spans"][1]["description"] == "django.db.close_old_connections"
+
+
+def test_csrf(sentry_init, client):
+    """
+    Assert that CSRF view decorator works even with the view wrapped in our own
+    callable.
+    """
+
+    sentry_init(integrations=[DjangoIntegration()])
+
+    content, status, _headers = client.post(reverse("csrf_hello_not_exempt"))
+    assert status.lower() == "403 forbidden"
+
+    content, status, _headers = client.post(reverse("sentryclass_csrf"))
+    assert status.lower() == "403 forbidden"
+
+    content, status, _headers = client.post(reverse("sentryclass"))
+    assert status.lower() == "200 ok"
+    assert b"".join(content) == b"ok"
+
+    content, status, _headers = client.post(reverse("classbased"))
+    assert status.lower() == "200 ok"
+    assert b"".join(content) == b"ok"
+
+    content, status, _headers = client.post(reverse("message"))
+    assert status.lower() == "200 ok"
+    assert b"".join(content) == b"ok"
+
+
+@pytest.mark.skipif(DJANGO_VERSION < (2, 0), reason="Requires Django > 2.0")
+def test_custom_urlconf_middleware(
+    settings, sentry_init, client, capture_events, render_span_tree
+):
+    """
+    Some middlewares (for instance in django-tenants) overwrite request.urlconf.
+    Test that the resolver picks up the correct urlconf for transaction naming.
+    """
+    urlconf = "tests.integrations.django.myapp.middleware.custom_urlconf_middleware"
+    settings.ROOT_URLCONF = ""
+    settings.MIDDLEWARE.insert(0, urlconf)
+    client.application.load_middleware()
+
+    sentry_init(integrations=[DjangoIntegration()], traces_sample_rate=1.0)
+    events = capture_events()
+
+    content, status, _headers = client.get("/custom/ok")
+    assert status.lower() == "200 ok"
+    assert b"".join(content) == b"custom ok"
+
+    event = events.pop(0)
+    assert event["transaction"] == "/custom/ok"
+    assert "custom_urlconf_middleware" in render_span_tree(event)
+
+    _content, status, _headers = client.get("/custom/exc")
+    assert status.lower() == "500 internal server error"
+
+    error_event, transaction_event = events
+    assert error_event["transaction"] == "/custom/exc"
+    assert error_event["exception"]["values"][-1]["mechanism"]["type"] == "django"
+    assert transaction_event["transaction"] == "/custom/exc"
+    assert "custom_urlconf_middleware" in render_span_tree(transaction_event)
+
+    settings.MIDDLEWARE.pop(0)
+
+
+def test_get_receiver_name():
+    def dummy(a, b):
+        return a + b
+
+    name = _get_receiver_name(dummy)
+
+    if PY2:
+        assert name == "tests.integrations.django.test_basic.dummy"
+    else:
+        assert (
+            name
+            == "tests.integrations.django.test_basic.test_get_receiver_name..dummy"
+        )
+
+    a_partial = partial(dummy)
+    name = _get_receiver_name(a_partial)
+    if PY310:
+        assert name == "functools.partial()"
+    else:
+        assert name == "partial()"
diff --git a/tests/integrations/django/test_data_scrubbing.py b/tests/integrations/django/test_data_scrubbing.py
new file mode 100644
index 0000000000..c0ab14ae63
--- /dev/null
+++ b/tests/integrations/django/test_data_scrubbing.py
@@ -0,0 +1,103 @@
+from functools import partial
+import pytest
+import pytest_django
+
+from werkzeug.test import Client
+
+from sentry_sdk.integrations.django import DjangoIntegration
+
+from tests.integrations.django.myapp.wsgi import application
+
+try:
+    from django.urls import reverse
+except ImportError:
+    from django.core.urlresolvers import reverse
+
+
+# Hack to prevent from experimental feature introduced in version `4.3.0` in `pytest-django` that
+# requires explicit database allow from failing the test
+pytest_mark_django_db_decorator = partial(pytest.mark.django_db)
+try:
+    pytest_version = tuple(map(int, pytest_django.__version__.split(".")))
+    if pytest_version > (4, 2, 0):
+        pytest_mark_django_db_decorator = partial(
+            pytest.mark.django_db, databases="__all__"
+        )
+except ValueError:
+    if "dev" in pytest_django.__version__:
+        pytest_mark_django_db_decorator = partial(
+            pytest.mark.django_db, databases="__all__"
+        )
+except AttributeError:
+    pass
+
+
+@pytest.fixture
+def client():
+    return Client(application)
+
+
+@pytest.mark.forked
+@pytest_mark_django_db_decorator()
+def test_scrub_django_session_cookies_removed(
+    sentry_init,
+    client,
+    capture_events,
+):
+    sentry_init(integrations=[DjangoIntegration()], send_default_pii=False)
+    events = capture_events()
+    client.set_cookie("localhost", "sessionid", "123")
+    client.set_cookie("localhost", "csrftoken", "456")
+    client.set_cookie("localhost", "foo", "bar")
+    client.get(reverse("view_exc"))
+
+    (event,) = events
+    assert "cookies" not in event["request"]
+
+
+@pytest.mark.forked
+@pytest_mark_django_db_decorator()
+def test_scrub_django_session_cookies_filtered(
+    sentry_init,
+    client,
+    capture_events,
+):
+    sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
+    events = capture_events()
+    client.set_cookie("localhost", "sessionid", "123")
+    client.set_cookie("localhost", "csrftoken", "456")
+    client.set_cookie("localhost", "foo", "bar")
+    client.get(reverse("view_exc"))
+
+    (event,) = events
+    assert event["request"]["cookies"] == {
+        "sessionid": "[Filtered]",
+        "csrftoken": "[Filtered]",
+        "foo": "bar",
+    }
+
+
+@pytest.mark.forked
+@pytest_mark_django_db_decorator()
+def test_scrub_django_custom_session_cookies_filtered(
+    sentry_init,
+    client,
+    capture_events,
+    settings,
+):
+    settings.SESSION_COOKIE_NAME = "my_sess"
+    settings.CSRF_COOKIE_NAME = "csrf_secret"
+
+    sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
+    events = capture_events()
+    client.set_cookie("localhost", "my_sess", "123")
+    client.set_cookie("localhost", "csrf_secret", "456")
+    client.set_cookie("localhost", "foo", "bar")
+    client.get(reverse("view_exc"))
+
+    (event,) = events
+    assert event["request"]["cookies"] == {
+        "my_sess": "[Filtered]",
+        "csrf_secret": "[Filtered]",
+        "foo": "bar",
+    }
diff --git a/tests/integrations/django/test_transactions.py b/tests/integrations/django/test_transactions.py
index 5cf3f17c32..6f16d88cec 100644
--- a/tests/integrations/django/test_transactions.py
+++ b/tests/integrations/django/test_transactions.py
@@ -3,22 +3,25 @@
 import pytest
 import django
 
-try:
+if django.VERSION >= (2, 0):
+    # TODO: once we stop supporting django < 2, use the real name of this
+    # function (re_path)
+    from django.urls import re_path as url
+    from django.conf.urls import include
+else:
     from django.conf.urls import url, include
-except ImportError:
-    # for Django version less than 1.4
-    from django.conf.urls.defaults import url, include  # NOQA
-
-from sentry_sdk.integrations.django.transactions import RavenResolver
-
 
 if django.VERSION < (1, 9):
     included_url_conf = (url(r"^foo/bar/(?P[\w]+)", lambda x: ""),), "", ""
 else:
     included_url_conf = ((url(r"^foo/bar/(?P[\w]+)", lambda x: ""),), "")
 
+from sentry_sdk.integrations.django.transactions import RavenResolver
+
+
 example_url_conf = (
     url(r"^api/(?P[\w_-]+)/store/$", lambda x: ""),
+    url(r"^api/(?P(v1|v2))/author/$", lambda x: ""),
     url(r"^report/", lambda x: ""),
     url(r"^example/", include(included_url_conf)),
 )
@@ -27,7 +30,7 @@
 def test_legacy_resolver_no_match():
     resolver = RavenResolver()
     result = resolver.resolve("/foo/bar", example_url_conf)
-    assert result == "/foo/bar"
+    assert result is None
 
 
 def test_legacy_resolver_complex_match():
@@ -36,6 +39,14 @@ def test_legacy_resolver_complex_match():
     assert result == "/api/{project_id}/store/"
 
 
+def test_legacy_resolver_complex_either_match():
+    resolver = RavenResolver()
+    result = resolver.resolve("/api/v1/author/", example_url_conf)
+    assert result == "/api/{version}/author/"
+    result = resolver.resolve("/api/v2/author/", example_url_conf)
+    assert result == "/api/{version}/author/"
+
+
 def test_legacy_resolver_included_match():
     resolver = RavenResolver()
     result = resolver.resolve("/example/foo/bar/baz", example_url_conf)
diff --git a/tests/integrations/falcon/test_falcon.py b/tests/integrations/falcon/test_falcon.py
index a810da33c5..dd7aa80dfe 100644
--- a/tests/integrations/falcon/test_falcon.py
+++ b/tests/integrations/falcon/test_falcon.py
@@ -21,8 +21,14 @@ def on_get(self, req, resp):
                 sentry_sdk.capture_message("hi")
                 resp.media = "hi"
 
+        class MessageByIdResource:
+            def on_get(self, req, resp, message_id):
+                sentry_sdk.capture_message("hi")
+                resp.media = "hi"
+
         app = falcon.API()
         app.add_route("/message", MessageResource())
+        app.add_route("/message/{message_id:int}", MessageByIdResource())
 
         return app
 
@@ -53,33 +59,45 @@ def test_has_context(sentry_init, capture_events, make_client):
 
 
 @pytest.mark.parametrize(
-    "transaction_style,expected_transaction",
-    [("uri_template", "/message"), ("path", "/message")],
+    "url,transaction_style,expected_transaction,expected_source",
+    [
+        ("/message", "uri_template", "/message", "route"),
+        ("/message", "path", "/message", "url"),
+        ("/message/123456", "uri_template", "/message/{message_id:int}", "route"),
+        ("/message/123456", "path", "/message/123456", "url"),
+    ],
 )
 def test_transaction_style(
-    sentry_init, make_client, capture_events, transaction_style, expected_transaction
+    sentry_init,
+    make_client,
+    capture_events,
+    url,
+    transaction_style,
+    expected_transaction,
+    expected_source,
 ):
     integration = FalconIntegration(transaction_style=transaction_style)
     sentry_init(integrations=[integration])
     events = capture_events()
 
     client = make_client()
-    response = client.simulate_get("/message")
+    response = client.simulate_get(url)
     assert response.status == falcon.HTTP_200
 
     (event,) = events
     assert event["transaction"] == expected_transaction
+    assert event["transaction_info"] == {"source": expected_source}
 
 
-def test_errors(sentry_init, capture_exceptions, capture_events):
+def test_unhandled_errors(sentry_init, capture_exceptions, capture_events):
     sentry_init(integrations=[FalconIntegration()], debug=True)
 
-    class ZeroDivisionErrorResource:
+    class Resource:
         def on_get(self, req, resp):
             1 / 0
 
     app = falcon.API()
-    app.add_route("/", ZeroDivisionErrorResource())
+    app.add_route("/", Resource())
 
     exceptions = capture_exceptions()
     events = capture_events()
@@ -96,6 +114,75 @@ def on_get(self, req, resp):
 
     (event,) = events
     assert event["exception"]["values"][0]["mechanism"]["type"] == "falcon"
+    assert " by zero" in event["exception"]["values"][0]["value"]
+
+
+def test_raised_5xx_errors(sentry_init, capture_exceptions, capture_events):
+    sentry_init(integrations=[FalconIntegration()], debug=True)
+
+    class Resource:
+        def on_get(self, req, resp):
+            raise falcon.HTTPError(falcon.HTTP_502)
+
+    app = falcon.API()
+    app.add_route("/", Resource())
+
+    exceptions = capture_exceptions()
+    events = capture_events()
+
+    client = falcon.testing.TestClient(app)
+    client.simulate_get("/")
+
+    (exc,) = exceptions
+    assert isinstance(exc, falcon.HTTPError)
+
+    (event,) = events
+    assert event["exception"]["values"][0]["mechanism"]["type"] == "falcon"
+    assert event["exception"]["values"][0]["type"] == "HTTPError"
+
+
+def test_raised_4xx_errors(sentry_init, capture_exceptions, capture_events):
+    sentry_init(integrations=[FalconIntegration()], debug=True)
+
+    class Resource:
+        def on_get(self, req, resp):
+            raise falcon.HTTPError(falcon.HTTP_400)
+
+    app = falcon.API()
+    app.add_route("/", Resource())
+
+    exceptions = capture_exceptions()
+    events = capture_events()
+
+    client = falcon.testing.TestClient(app)
+    client.simulate_get("/")
+
+    assert len(exceptions) == 0
+    assert len(events) == 0
+
+
+def test_http_status(sentry_init, capture_exceptions, capture_events):
+    """
+    This just demonstrates, that if Falcon raises a HTTPStatus with code 500
+    (instead of a HTTPError with code 500) Sentry will not capture it.
+    """
+    sentry_init(integrations=[FalconIntegration()], debug=True)
+
+    class Resource:
+        def on_get(self, req, resp):
+            raise falcon.http_status.HTTPStatus(falcon.HTTP_508)
+
+    app = falcon.API()
+    app.add_route("/", Resource())
+
+    exceptions = capture_exceptions()
+    events = capture_events()
+
+    client = falcon.testing.TestClient(app)
+    client.simulate_get("/")
+
+    assert len(exceptions) == 0
+    assert len(events) == 0
 
 
 def test_falcon_large_json_request(sentry_init, capture_events):
@@ -120,9 +207,9 @@ def on_post(self, req, resp):
 
     (event,) = events
     assert event["_meta"]["request"]["data"]["foo"]["bar"] == {
-        "": {"len": 2000, "rem": [["!limit", "x", 509, 512]]}
+        "": {"len": 2000, "rem": [["!limit", "x", 1021, 1024]]}
     }
-    assert len(event["request"]["data"]["foo"]["bar"]) == 512
+    assert len(event["request"]["data"]["foo"]["bar"]) == 1024
 
 
 @pytest.mark.parametrize("data", [{}, []], ids=["empty-dict", "empty-list"])
diff --git a/tests/integrations/fastapi/__init__.py b/tests/integrations/fastapi/__init__.py
new file mode 100644
index 0000000000..7f667e6f75
--- /dev/null
+++ b/tests/integrations/fastapi/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+pytest.importorskip("fastapi")
diff --git a/tests/integrations/fastapi/test_fastapi.py b/tests/integrations/fastapi/test_fastapi.py
new file mode 100644
index 0000000000..9c24ce2e44
--- /dev/null
+++ b/tests/integrations/fastapi/test_fastapi.py
@@ -0,0 +1,183 @@
+import json
+import threading
+
+import pytest
+from sentry_sdk.integrations.fastapi import FastApiIntegration
+
+fastapi = pytest.importorskip("fastapi")
+
+from fastapi import FastAPI
+from fastapi.testclient import TestClient
+from sentry_sdk import capture_message
+from sentry_sdk.integrations.starlette import StarletteIntegration
+from sentry_sdk.integrations.asgi import SentryAsgiMiddleware
+
+
+def fastapi_app_factory():
+    app = FastAPI()
+
+    @app.get("/message")
+    async def _message():
+        capture_message("Hi")
+        return {"message": "Hi"}
+
+    @app.get("/message/{message_id}")
+    async def _message_with_id(message_id):
+        capture_message("Hi")
+        return {"message": "Hi"}
+
+    @app.get("/sync/thread_ids")
+    def _thread_ids_sync():
+        return {
+            "main": str(threading.main_thread().ident),
+            "active": str(threading.current_thread().ident),
+        }
+
+    @app.get("/async/thread_ids")
+    async def _thread_ids_async():
+        return {
+            "main": str(threading.main_thread().ident),
+            "active": str(threading.current_thread().ident),
+        }
+
+    return app
+
+
+@pytest.mark.asyncio
+async def test_response(sentry_init, capture_events):
+    # FastAPI is heavily based on Starlette so we also need
+    # to enable StarletteIntegration.
+    # In the future this will be auto enabled.
+    sentry_init(
+        integrations=[StarletteIntegration(), FastApiIntegration()],
+        traces_sample_rate=1.0,
+        send_default_pii=True,
+        debug=True,
+    )
+
+    app = fastapi_app_factory()
+
+    events = capture_events()
+
+    client = TestClient(app)
+    response = client.get("/message")
+
+    assert response.json() == {"message": "Hi"}
+
+    assert len(events) == 2
+
+    (message_event, transaction_event) = events
+    assert message_event["message"] == "Hi"
+    assert transaction_event["transaction"] == "/message"
+
+
+@pytest.mark.parametrize(
+    "url,transaction_style,expected_transaction,expected_source",
+    [
+        (
+            "/message",
+            "url",
+            "/message",
+            "route",
+        ),
+        (
+            "/message",
+            "endpoint",
+            "tests.integrations.fastapi.test_fastapi.fastapi_app_factory.._message",
+            "component",
+        ),
+        (
+            "/message/123456",
+            "url",
+            "/message/{message_id}",
+            "route",
+        ),
+        (
+            "/message/123456",
+            "endpoint",
+            "tests.integrations.fastapi.test_fastapi.fastapi_app_factory.._message_with_id",
+            "component",
+        ),
+    ],
+)
+def test_transaction_style(
+    sentry_init,
+    capture_events,
+    url,
+    transaction_style,
+    expected_transaction,
+    expected_source,
+):
+    sentry_init(
+        integrations=[
+            StarletteIntegration(transaction_style=transaction_style),
+            FastApiIntegration(transaction_style=transaction_style),
+        ],
+    )
+    app = fastapi_app_factory()
+
+    events = capture_events()
+
+    client = TestClient(app)
+    client.get(url)
+
+    (event,) = events
+    assert event["transaction"] == expected_transaction
+    assert event["transaction_info"] == {"source": expected_source}
+
+    # Assert that state is not leaked
+    events.clear()
+    capture_message("foo")
+    (event,) = events
+
+    assert "request" not in event
+    assert "transaction" not in event
+
+
+def test_legacy_setup(
+    sentry_init,
+    capture_events,
+):
+    # Check that behaviour does not change
+    # if the user just adds the new Integrations
+    # and forgets to remove SentryAsgiMiddleware
+    sentry_init()
+    app = fastapi_app_factory()
+    asgi_app = SentryAsgiMiddleware(app)
+
+    events = capture_events()
+
+    client = TestClient(asgi_app)
+    client.get("/message/123456")
+
+    (event,) = events
+    assert event["transaction"] == "/message/{message_id}"
+
+
+@pytest.mark.parametrize("endpoint", ["/sync/thread_ids", "/async/thread_ids"])
+def test_active_thread_id(sentry_init, capture_envelopes, endpoint):
+    sentry_init(
+        traces_sample_rate=1.0,
+        _experiments={"profiles_sample_rate": 1.0},
+    )
+    app = fastapi_app_factory()
+    asgi_app = SentryAsgiMiddleware(app)
+
+    envelopes = capture_envelopes()
+
+    client = TestClient(asgi_app)
+    response = client.get(endpoint)
+    assert response.status_code == 200
+
+    data = json.loads(response.content)
+
+    envelopes = [envelope for envelope in envelopes]
+    assert len(envelopes) == 1
+
+    profiles = [item for item in envelopes[0].items if item.type == "profile"]
+    assert len(profiles) == 1
+
+    for profile in profiles:
+        transactions = profile.payload.json["transactions"]
+        assert len(transactions) == 1
+        assert str(data["active"]) == transactions[0]["active_thread_id"]
diff --git a/tests/integrations/flask/test_flask.py b/tests/integrations/flask/test_flask.py
index 96d45af6a3..8983c4e5ff 100644
--- a/tests/integrations/flask/test_flask.py
+++ b/tests/integrations/flask/test_flask.py
@@ -6,12 +6,20 @@
 
 flask = pytest.importorskip("flask")
 
-from flask import Flask, Response, request, abort, stream_with_context
+from flask import (
+    Flask,
+    Response,
+    request,
+    abort,
+    stream_with_context,
+    render_template_string,
+)
 from flask.views import View
 
 from flask_login import LoginManager, login_user
 
 from sentry_sdk import (
+    set_tag,
     configure_scope,
     capture_message,
     capture_exception,
@@ -38,13 +46,18 @@ def hi():
         capture_message("hi")
         return "ok"
 
+    @app.route("/message/")
+    def hi_with_id(message_id):
+        capture_message("hi again")
+        return "ok"
+
     return app
 
 
 @pytest.fixture(params=("auto", "manual"))
 def integration_enabled_params(request):
     if request.param == "auto":
-        return {"_experiments": {"auto_enabling_integrations": True}}
+        return {"auto_enabling_integrations": True}
     elif request.param == "manual":
         return {"integrations": [flask_sentry.FlaskIntegration()]}
     else:
@@ -66,10 +79,22 @@ def test_has_context(sentry_init, app, capture_events):
 
 
 @pytest.mark.parametrize(
-    "transaction_style,expected_transaction", [("endpoint", "hi"), ("url", "/message")]
+    "url,transaction_style,expected_transaction,expected_source",
+    [
+        ("/message", "endpoint", "hi", "component"),
+        ("/message", "url", "/message", "route"),
+        ("/message/123456", "endpoint", "hi_with_id", "component"),
+        ("/message/123456", "url", "/message/", "route"),
+    ],
 )
 def test_transaction_style(
-    sentry_init, app, capture_events, transaction_style, expected_transaction
+    sentry_init,
+    app,
+    capture_events,
+    url,
+    transaction_style,
+    expected_transaction,
+    expected_source,
 ):
     sentry_init(
         integrations=[
@@ -79,11 +104,12 @@ def test_transaction_style(
     events = capture_events()
 
     client = app.test_client()
-    response = client.get("/message")
+    response = client.get(url)
     assert response.status_code == 200
 
     (event,) = events
     assert event["transaction"] == expected_transaction
+    assert event["transaction_info"] == {"source": expected_source}
 
 
 @pytest.mark.parametrize("debug", (True, False))
@@ -237,22 +263,21 @@ def index():
 
     (event,) = events
     assert event["_meta"]["request"]["data"]["foo"]["bar"] == {
-        "": {"len": 2000, "rem": [["!limit", "x", 509, 512]]}
+        "": {"len": 2000, "rem": [["!limit", "x", 1021, 1024]]}
     }
-    assert len(event["request"]["data"]["foo"]["bar"]) == 512
+    assert len(event["request"]["data"]["foo"]["bar"]) == 1024
 
 
 def test_flask_session_tracking(sentry_init, capture_envelopes, app):
     sentry_init(
         integrations=[flask_sentry.FlaskIntegration()],
         release="demo-release",
-        _experiments=dict(auto_session_tracking=True,),
     )
 
     @app.route("/")
     def index():
         with configure_scope() as scope:
-            scope.set_user({"ip_address": "1.2.3.4", "id": 42})
+            scope.set_user({"ip_address": "1.2.3.4", "id": "42"})
         try:
             raise ValueError("stuff")
         except Exception:
@@ -273,16 +298,15 @@ def index():
     first_event = first_event.get_event()
     error_event = error_event.get_event()
     session = session.items[0].payload.json
+    aggregates = session["aggregates"]
 
     assert first_event["exception"]["values"][0]["type"] == "ValueError"
     assert error_event["exception"]["values"][0]["type"] == "ZeroDivisionError"
-    assert session["status"] == "crashed"
-    assert session["did"] == "42"
-    assert session["errors"] == 2
-    assert session["init"]
+
+    assert len(aggregates) == 1
+    assert aggregates[0]["crashed"] == 1
+    assert aggregates[0]["started"]
     assert session["attrs"]["release"] == "demo-release"
-    assert session["attrs"]["ip_address"] == "1.2.3.4"
-    assert session["attrs"]["user_agent"] == "blafasel/1.0"
 
 
 @pytest.mark.parametrize("data", [{}, []], ids=["empty-dict", "empty-list"])
@@ -328,12 +352,45 @@ def index():
 
     (event,) = events
     assert event["_meta"]["request"]["data"]["foo"] == {
-        "": {"len": 2000, "rem": [["!limit", "x", 509, 512]]}
+        "": {"len": 2000, "rem": [["!limit", "x", 1021, 1024]]}
     }
-    assert len(event["request"]["data"]["foo"]) == 512
+    assert len(event["request"]["data"]["foo"]) == 1024
+
+
+def test_flask_formdata_request_appear_transaction_body(
+    sentry_init, capture_events, app
+):
+    """
+    Test that ensures that transaction request data contains body, even if no exception was raised
+    """
+    sentry_init(integrations=[flask_sentry.FlaskIntegration()], traces_sample_rate=1.0)
+
+    data = {"username": "sentry-user", "age": "26"}
+
+    @app.route("/", methods=["POST"])
+    def index():
+        assert request.form["username"] == data["username"]
+        assert request.form["age"] == data["age"]
+        assert not request.get_data()
+        assert not request.get_json()
+        set_tag("view", "yes")
+        capture_message("hi")
+        return "ok"
+
+    events = capture_events()
+
+    client = app.test_client()
+    response = client.post("/", data=data)
+    assert response.status_code == 200
+
+    event, transaction_event = events
 
+    assert "request" in transaction_event
+    assert "data" in transaction_event["request"]
+    assert transaction_event["request"]["data"] == data
 
-@pytest.mark.parametrize("input_char", [u"a", b"a"])
+
+@pytest.mark.parametrize("input_char", ["a", b"a"])
 def test_flask_too_large_raw_request(sentry_init, input_char, capture_events, app):
     sentry_init(integrations=[flask_sentry.FlaskIntegration()], request_bodies="small")
 
@@ -357,9 +414,7 @@ def index():
     assert response.status_code == 200
 
     (event,) = events
-    assert event["_meta"]["request"]["data"] == {
-        "": {"len": 2000, "rem": [["!config", "x", 0, 2000]]}
-    }
+    assert event["_meta"]["request"]["data"] == {"": {"rem": [["!config", "x"]]}}
     assert not event["request"]["data"]
 
 
@@ -384,13 +439,11 @@ def index():
 
     (event,) = events
     assert event["_meta"]["request"]["data"]["foo"] == {
-        "": {"len": 2000, "rem": [["!limit", "x", 509, 512]]}
+        "": {"len": 2000, "rem": [["!limit", "x", 1021, 1024]]}
     }
-    assert len(event["request"]["data"]["foo"]) == 512
+    assert len(event["request"]["data"]["foo"]) == 1024
 
-    assert event["_meta"]["request"]["data"]["file"] == {
-        "": {"len": 0, "rem": [["!raw", "x", 0, 0]]}
-    }
+    assert event["_meta"]["request"]["data"]["file"] == {"": {"rem": [["!raw", "x"]]}}
     assert not event["request"]["data"]["file"]
 
 
@@ -630,20 +683,34 @@ def zerodivision(e):
 def test_tracing_success(sentry_init, capture_events, app):
     sentry_init(traces_sample_rate=1.0, integrations=[flask_sentry.FlaskIntegration()])
 
+    @app.before_request
+    def _():
+        set_tag("before_request", "yes")
+
+    @app.route("/message_tx")
+    def hi_tx():
+        set_tag("view", "yes")
+        capture_message("hi")
+        return "ok"
+
     events = capture_events()
 
     with app.test_client() as client:
-        response = client.get("/message")
+        response = client.get("/message_tx")
         assert response.status_code == 200
 
     message_event, transaction_event = events
 
     assert transaction_event["type"] == "transaction"
-    assert transaction_event["transaction"] == "hi"
+    assert transaction_event["transaction"] == "hi_tx"
     assert transaction_event["contexts"]["trace"]["status"] == "ok"
+    assert transaction_event["tags"]["view"] == "yes"
+    assert transaction_event["tags"]["before_request"] == "yes"
 
     assert message_event["message"] == "hi"
-    assert message_event["transaction"] == "hi"
+    assert message_event["transaction"] == "hi_tx"
+    assert message_event["tags"]["view"] == "yes"
+    assert message_event["tags"]["before_request"] == "yes"
 
 
 def test_tracing_error(sentry_init, capture_events, app):
@@ -691,3 +758,34 @@ def dispatch_request(self):
 
     assert event["message"] == "hi"
     assert event["transaction"] == "hello_class"
+
+
+def test_sentry_trace_context(sentry_init, app, capture_events):
+    sentry_init(integrations=[flask_sentry.FlaskIntegration()])
+    events = capture_events()
+
+    @app.route("/")
+    def index():
+        sentry_span = Hub.current.scope.span
+        capture_message(sentry_span.to_traceparent())
+        return render_template_string("{{ sentry_trace }}")
+
+    with app.test_client() as client:
+        response = client.get("/")
+        assert response.status_code == 200
+        assert response.data.decode(
+            "utf-8"
+        ) == '' % (events[0]["message"],)
+
+
+def test_dont_override_sentry_trace_context(sentry_init, app):
+    sentry_init(integrations=[flask_sentry.FlaskIntegration()])
+
+    @app.route("/")
+    def index():
+        return render_template_string("{{ sentry_trace }}", sentry_trace="hi")
+
+    with app.test_client() as client:
+        response = client.get("/")
+        assert response.status_code == 200
+        assert response.data == b"hi"
diff --git a/tests/integrations/gcp/test_gcp.py b/tests/integrations/gcp/test_gcp.py
new file mode 100644
index 0000000000..3ccdbd752a
--- /dev/null
+++ b/tests/integrations/gcp/test_gcp.py
@@ -0,0 +1,369 @@
+"""
+# GCP Cloud Functions unit tests
+
+"""
+import json
+from textwrap import dedent
+import tempfile
+import sys
+import subprocess
+
+import pytest
+import os.path
+import os
+
+pytestmark = pytest.mark.skipif(
+    not hasattr(tempfile, "TemporaryDirectory"), reason="need Python 3.2+"
+)
+
+
+FUNCTIONS_PRELUDE = """
+from unittest.mock import Mock
+import __main__ as gcp_functions
+import os
+
+# Initializing all the necessary environment variables
+os.environ["FUNCTION_TIMEOUT_SEC"] = "3"
+os.environ["FUNCTION_NAME"] = "Google Cloud function"
+os.environ["ENTRY_POINT"] = "cloud_function"
+os.environ["FUNCTION_IDENTITY"] = "func_ID"
+os.environ["FUNCTION_REGION"] = "us-central1"
+os.environ["GCP_PROJECT"] = "serverless_project"
+
+def log_return_value(func):
+    def inner(*args, **kwargs):
+        rv = func(*args, **kwargs)
+
+        print("\\nRETURN VALUE: {}\\n".format(json.dumps(rv)))
+
+        return rv
+
+    return inner
+
+gcp_functions.worker_v1 = Mock()
+gcp_functions.worker_v1.FunctionHandler = Mock()
+gcp_functions.worker_v1.FunctionHandler.invoke_user_function = log_return_value(cloud_function)
+
+
+import sentry_sdk
+from sentry_sdk.integrations.gcp import GcpIntegration
+import json
+import time
+
+from sentry_sdk.transport import HttpTransport
+
+def event_processor(event):
+    # Adding delay which would allow us to capture events.
+    time.sleep(1)
+    return event
+
+def envelope_processor(envelope):
+    (item,) = envelope.items
+    return item.get_bytes()
+
+class TestTransport(HttpTransport):
+    def _send_event(self, event):
+        event = event_processor(event)
+        # Writing a single string to stdout holds the GIL (seems like) and
+        # therefore cannot be interleaved with other threads. This is why we
+        # explicitly add a newline at the end even though `print` would provide
+        # us one.
+        print("\\nEVENT: {}\\n".format(json.dumps(event)))
+
+    def _send_envelope(self, envelope):
+        envelope = envelope_processor(envelope)
+        print("\\nENVELOPE: {}\\n".format(envelope.decode(\"utf-8\")))
+
+
+def init_sdk(timeout_warning=False, **extra_init_args):
+    sentry_sdk.init(
+        dsn="https://123abc@example.com/123",
+        transport=TestTransport,
+        integrations=[GcpIntegration(timeout_warning=timeout_warning)],
+        shutdown_timeout=10,
+        # excepthook -> dedupe -> event_processor client report gets added
+        # which we don't really care about for these tests
+        send_client_reports=False,
+        **extra_init_args
+    )
+
+"""
+
+
+@pytest.fixture
+def run_cloud_function():
+    def inner(code, subprocess_kwargs=()):
+
+        event = []
+        envelope = []
+        return_value = None
+
+        # STEP : Create a zip of cloud function
+
+        subprocess_kwargs = dict(subprocess_kwargs)
+
+        with tempfile.TemporaryDirectory() as tmpdir:
+            main_py = os.path.join(tmpdir, "main.py")
+            with open(main_py, "w") as f:
+                f.write(code)
+
+            setup_cfg = os.path.join(tmpdir, "setup.cfg")
+
+            with open(setup_cfg, "w") as f:
+                f.write("[install]\nprefix=")
+
+            subprocess.check_call(
+                [sys.executable, "setup.py", "sdist", "-d", os.path.join(tmpdir, "..")],
+                **subprocess_kwargs
+            )
+
+            subprocess.check_call(
+                "pip install ../*.tar.gz -t .",
+                cwd=tmpdir,
+                shell=True,
+                **subprocess_kwargs
+            )
+
+            stream = os.popen("python {}/main.py".format(tmpdir))
+            stream_data = stream.read()
+
+            stream.close()
+
+            for line in stream_data.splitlines():
+                print("GCP:", line)
+                if line.startswith("EVENT: "):
+                    line = line[len("EVENT: ") :]
+                    event = json.loads(line)
+                elif line.startswith("ENVELOPE: "):
+                    line = line[len("ENVELOPE: ") :]
+                    envelope = json.loads(line)
+                elif line.startswith("RETURN VALUE: "):
+                    line = line[len("RETURN VALUE: ") :]
+                    return_value = json.loads(line)
+                else:
+                    continue
+
+            stream.close()
+
+        return envelope, event, return_value
+
+    return inner
+
+
+def test_handled_exception(run_cloud_function):
+    envelope, event, return_value = run_cloud_function(
+        dedent(
+            """
+        functionhandler = None
+        event = {}
+        def cloud_function(functionhandler, event):
+            raise Exception("something went wrong")
+        """
+        )
+        + FUNCTIONS_PRELUDE
+        + dedent(
+            """
+        init_sdk(timeout_warning=False)
+        gcp_functions.worker_v1.FunctionHandler.invoke_user_function(functionhandler, event)
+        """
+        )
+    )
+    assert event["level"] == "error"
+    (exception,) = event["exception"]["values"]
+
+    assert exception["type"] == "Exception"
+    assert exception["value"] == "something went wrong"
+    assert exception["mechanism"] == {"type": "gcp", "handled": False}
+
+
+def test_unhandled_exception(run_cloud_function):
+    envelope, event, return_value = run_cloud_function(
+        dedent(
+            """
+        functionhandler = None
+        event = {}
+        def cloud_function(functionhandler, event):
+            x = 3/0
+            return "3"
+        """
+        )
+        + FUNCTIONS_PRELUDE
+        + dedent(
+            """
+        init_sdk(timeout_warning=False)
+        gcp_functions.worker_v1.FunctionHandler.invoke_user_function(functionhandler, event)
+        """
+        )
+    )
+    assert event["level"] == "error"
+    (exception,) = event["exception"]["values"]
+
+    assert exception["type"] == "ZeroDivisionError"
+    assert exception["value"] == "division by zero"
+    assert exception["mechanism"] == {"type": "gcp", "handled": False}
+
+
+def test_timeout_error(run_cloud_function):
+    envelope, event, return_value = run_cloud_function(
+        dedent(
+            """
+        functionhandler = None
+        event = {}
+        def cloud_function(functionhandler, event):
+            time.sleep(10)
+            return "3"
+        """
+        )
+        + FUNCTIONS_PRELUDE
+        + dedent(
+            """
+        init_sdk(timeout_warning=True)
+        gcp_functions.worker_v1.FunctionHandler.invoke_user_function(functionhandler, event)
+        """
+        )
+    )
+    assert event["level"] == "error"
+    (exception,) = event["exception"]["values"]
+
+    assert exception["type"] == "ServerlessTimeoutWarning"
+    assert (
+        exception["value"]
+        == "WARNING : Function is expected to get timed out. Configured timeout duration = 3 seconds."
+    )
+    assert exception["mechanism"] == {"type": "threading", "handled": False}
+
+
+def test_performance_no_error(run_cloud_function):
+    envelope, event, return_value = run_cloud_function(
+        dedent(
+            """
+        functionhandler = None
+        event = {}
+        def cloud_function(functionhandler, event):
+            return "test_string"
+        """
+        )
+        + FUNCTIONS_PRELUDE
+        + dedent(
+            """
+        init_sdk(traces_sample_rate=1.0)
+        gcp_functions.worker_v1.FunctionHandler.invoke_user_function(functionhandler, event)
+        """
+        )
+    )
+
+    assert envelope["type"] == "transaction"
+    assert envelope["contexts"]["trace"]["op"] == "function.gcp"
+    assert envelope["transaction"].startswith("Google Cloud function")
+    assert envelope["transaction_info"] == {"source": "component"}
+    assert envelope["transaction"] in envelope["request"]["url"]
+
+
+def test_performance_error(run_cloud_function):
+    envelope, event, return_value = run_cloud_function(
+        dedent(
+            """
+        functionhandler = None
+        event = {}
+        def cloud_function(functionhandler, event):
+            raise Exception("something went wrong")
+        """
+        )
+        + FUNCTIONS_PRELUDE
+        + dedent(
+            """
+        init_sdk(traces_sample_rate=1.0)
+        gcp_functions.worker_v1.FunctionHandler.invoke_user_function(functionhandler, event)
+        """
+        )
+    )
+
+    assert envelope["type"] == "transaction"
+    assert envelope["contexts"]["trace"]["op"] == "function.gcp"
+    assert envelope["transaction"].startswith("Google Cloud function")
+    assert envelope["transaction"] in envelope["request"]["url"]
+    assert event["level"] == "error"
+    (exception,) = event["exception"]["values"]
+
+    assert exception["type"] == "Exception"
+    assert exception["value"] == "something went wrong"
+    assert exception["mechanism"] == {"type": "gcp", "handled": False}
+
+
+def test_traces_sampler_gets_correct_values_in_sampling_context(
+    run_cloud_function, DictionaryContaining  # noqa:N803
+):
+    # TODO: There are some decent sized hacks below. For more context, see the
+    # long comment in the test of the same name in the AWS integration. The
+    # situations there and here aren't identical, but they're similar enough
+    # that solving one would probably solve both.
+
+    import inspect
+
+    envelopes, events, return_value = run_cloud_function(
+        dedent(
+            """
+            functionhandler = None
+            event = {
+                "type": "chase",
+                "chasers": ["Maisey", "Charlie"],
+                "num_squirrels": 2,
+            }
+            def cloud_function(functionhandler, event):
+                # this runs after the transaction has started, which means we
+                # can make assertions about traces_sampler
+                try:
+                    traces_sampler.assert_any_call(
+                        DictionaryContaining({
+                            "gcp_env": DictionaryContaining({
+                                "function_name": "chase_into_tree",
+                                "function_region": "dogpark",
+                                "function_project": "SquirrelChasing",
+                            }),
+                            "gcp_event": {
+                                "type": "chase",
+                                "chasers": ["Maisey", "Charlie"],
+                                "num_squirrels": 2,
+                            },
+                        })
+                    )
+                except AssertionError:
+                    # catch the error and return it because the error itself will
+                    # get swallowed by the SDK as an "internal exception"
+                    return {"AssertionError raised": True,}
+
+                return {"AssertionError raised": False,}
+            """
+        )
+        + FUNCTIONS_PRELUDE
+        + dedent(inspect.getsource(DictionaryContaining))
+        + dedent(
+            """
+            os.environ["FUNCTION_NAME"] = "chase_into_tree"
+            os.environ["FUNCTION_REGION"] = "dogpark"
+            os.environ["GCP_PROJECT"] = "SquirrelChasing"
+
+            def _safe_is_equal(x, y):
+                # copied from conftest.py - see docstring and comments there
+                try:
+                    is_equal = x.__eq__(y)
+                except AttributeError:
+                    is_equal = NotImplemented
+
+                if is_equal == NotImplemented:
+                    return x == y
+
+                return is_equal
+
+            traces_sampler = Mock(return_value=True)
+
+            init_sdk(
+                traces_sampler=traces_sampler,
+            )
+
+            gcp_functions.worker_v1.FunctionHandler.invoke_user_function(functionhandler, event)
+            """
+        )
+    )
+
+    assert return_value["AssertionError raised"] is False
diff --git a/tests/integrations/httpx/__init__.py b/tests/integrations/httpx/__init__.py
new file mode 100644
index 0000000000..1afd90ea3a
--- /dev/null
+++ b/tests/integrations/httpx/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+pytest.importorskip("httpx")
diff --git a/tests/integrations/httpx/test_httpx.py b/tests/integrations/httpx/test_httpx.py
new file mode 100644
index 0000000000..4623f13348
--- /dev/null
+++ b/tests/integrations/httpx/test_httpx.py
@@ -0,0 +1,66 @@
+import asyncio
+
+import httpx
+
+from sentry_sdk import capture_message, start_transaction
+from sentry_sdk.integrations.httpx import HttpxIntegration
+
+
+def test_crumb_capture_and_hint(sentry_init, capture_events):
+    def before_breadcrumb(crumb, hint):
+        crumb["data"]["extra"] = "foo"
+        return crumb
+
+    sentry_init(integrations=[HttpxIntegration()], before_breadcrumb=before_breadcrumb)
+    clients = (httpx.Client(), httpx.AsyncClient())
+    for i, c in enumerate(clients):
+        with start_transaction():
+            events = capture_events()
+
+            url = "https://httpbin.org/status/200"
+            if not asyncio.iscoroutinefunction(c.get):
+                response = c.get(url)
+            else:
+                response = asyncio.get_event_loop().run_until_complete(c.get(url))
+
+            assert response.status_code == 200
+            capture_message("Testing!")
+
+            (event,) = events
+            # send request twice so we need get breadcrumb by index
+            crumb = event["breadcrumbs"]["values"][i]
+            assert crumb["type"] == "http"
+            assert crumb["category"] == "httplib"
+            assert crumb["data"] == {
+                "url": url,
+                "method": "GET",
+                "status_code": 200,
+                "reason": "OK",
+                "extra": "foo",
+            }
+
+
+def test_outgoing_trace_headers(sentry_init):
+    sentry_init(traces_sample_rate=1.0, integrations=[HttpxIntegration()])
+    clients = (httpx.Client(), httpx.AsyncClient())
+    for i, c in enumerate(clients):
+        with start_transaction(
+            name="/interactions/other-dogs/new-dog",
+            op="greeting.sniff",
+            # make trace_id difference between transactions
+            trace_id=f"012345678901234567890123456789{i}",
+        ) as transaction:
+            url = "https://httpbin.org/status/200"
+            if not asyncio.iscoroutinefunction(c.get):
+                response = c.get(url)
+            else:
+                response = asyncio.get_event_loop().run_until_complete(c.get(url))
+
+            request_span = transaction._span_recorder.spans[-1]
+            assert response.request.headers[
+                "sentry-trace"
+            ] == "{trace_id}-{parent_span_id}-{sampled}".format(
+                trace_id=transaction.trace_id,
+                parent_span_id=request_span.span_id,
+                sampled=1,
+            )
diff --git a/tests/integrations/logging/test_logging.py b/tests/integrations/logging/test_logging.py
index 222906e7e2..de1c55e26f 100644
--- a/tests/integrations/logging/test_logging.py
+++ b/tests/integrations/logging/test_logging.py
@@ -1,9 +1,11 @@
+# coding: utf-8
 import sys
 
 import pytest
 import logging
+import warnings
 
-from sentry_sdk.integrations.logging import LoggingIntegration
+from sentry_sdk.integrations.logging import LoggingIntegration, ignore_logger
 
 other_logger = logging.getLogger("testfoo")
 logger = logging.getLogger(__name__)
@@ -26,21 +28,26 @@ def test_logging_works_with_many_loggers(sentry_init, capture_events, logger):
     assert event["level"] == "fatal"
     assert not event["logentry"]["params"]
     assert event["logentry"]["message"] == "LOL"
-    assert any(crumb["message"] == "bread" for crumb in event["breadcrumbs"])
+    assert any(crumb["message"] == "bread" for crumb in event["breadcrumbs"]["values"])
 
 
 @pytest.mark.parametrize("integrations", [None, [], [LoggingIntegration()]])
-def test_logging_defaults(integrations, sentry_init, capture_events):
+@pytest.mark.parametrize(
+    "kwargs", [{"exc_info": None}, {}, {"exc_info": 0}, {"exc_info": False}]
+)
+def test_logging_defaults(integrations, sentry_init, capture_events, kwargs):
     sentry_init(integrations=integrations)
     events = capture_events()
 
     logger.info("bread")
-    logger.critical("LOL")
+    logger.critical("LOL", **kwargs)
     (event,) = events
 
     assert event["level"] == "fatal"
-    assert any(crumb["message"] == "bread" for crumb in event["breadcrumbs"])
-    assert not any(crumb["message"] == "LOL" for crumb in event["breadcrumbs"])
+    assert any(crumb["message"] == "bread" for crumb in event["breadcrumbs"]["values"])
+    assert not any(
+        crumb["message"] == "LOL" for crumb in event["breadcrumbs"]["values"]
+    )
     assert "threads" not in event
 
 
@@ -57,7 +64,7 @@ def test_logging_extra_data(sentry_init, capture_events):
     assert event["extra"] == {"bar": 69}
     assert any(
         crumb["message"] == "bread" and crumb["data"] == {"foo": 42}
-        for crumb in event["breadcrumbs"]
+        for crumb in event["breadcrumbs"]["values"]
     )
 
 
@@ -80,7 +87,10 @@ def test_logging_stack(sentry_init, capture_events):
     logger.error("first", exc_info=True)
     logger.error("second")
 
-    event_with, event_without, = events
+    (
+        event_with,
+        event_without,
+    ) = events
 
     assert event_with["level"] == "error"
     assert event_with["threads"]["values"][0]["stacktrace"]["frames"]
@@ -106,6 +116,45 @@ def test_logging_level(sentry_init, capture_events):
     assert not events
 
 
+def test_custom_log_level_names(sentry_init, capture_events):
+    levels = {
+        logging.DEBUG: "debug",
+        logging.INFO: "info",
+        logging.WARN: "warning",
+        logging.WARNING: "warning",
+        logging.ERROR: "error",
+        logging.CRITICAL: "fatal",
+        logging.FATAL: "fatal",
+    }
+
+    # set custom log level names
+    # fmt: off
+    logging.addLevelName(logging.DEBUG, u"custom level debüg: ")
+    # fmt: on
+    logging.addLevelName(logging.INFO, "")
+    logging.addLevelName(logging.WARN, "custom level warn: ")
+    logging.addLevelName(logging.WARNING, "custom level warning: ")
+    logging.addLevelName(logging.ERROR, None)
+    logging.addLevelName(logging.CRITICAL, "custom level critical: ")
+    logging.addLevelName(logging.FATAL, "custom level 🔥: ")
+
+    for logging_level, sentry_level in levels.items():
+        logger.setLevel(logging_level)
+        sentry_init(
+            integrations=[LoggingIntegration(event_level=logging_level)],
+            default_integrations=False,
+        )
+        events = capture_events()
+
+        logger.log(logging_level, "Trying level %s", logging_level)
+        assert events
+        assert events[0]["level"] == sentry_level
+        assert events[0]["logentry"]["message"] == "Trying level %s"
+        assert events[0]["logentry"]["params"] == [logging_level]
+
+        del events[:]
+
+
 def test_logging_filters(sentry_init, capture_events):
     sentry_init(integrations=[LoggingIntegration()], default_integrations=False)
     events = capture_events()
@@ -126,3 +175,60 @@ def filter(self, record):
 
     (event,) = events
     assert event["logentry"]["message"] == "hi"
+
+
+def test_logging_captured_warnings(sentry_init, capture_events, recwarn):
+    sentry_init(
+        integrations=[LoggingIntegration(event_level="WARNING")],
+        default_integrations=False,
+    )
+    events = capture_events()
+
+    logging.captureWarnings(True)
+    warnings.warn("first")
+    warnings.warn("second")
+    logging.captureWarnings(False)
+
+    warnings.warn("third")
+
+    assert len(events) == 2
+
+    assert events[0]["level"] == "warning"
+    # Captured warnings start with the path where the warning was raised
+    assert "UserWarning: first" in events[0]["logentry"]["message"]
+    assert events[0]["logentry"]["params"] == []
+
+    assert events[1]["level"] == "warning"
+    assert "UserWarning: second" in events[1]["logentry"]["message"]
+    assert events[1]["logentry"]["params"] == []
+
+    # Using recwarn suppresses the "third" warning in the test output
+    assert len(recwarn) == 1
+    assert str(recwarn[0].message) == "third"
+
+
+def test_ignore_logger(sentry_init, capture_events):
+    sentry_init(integrations=[LoggingIntegration()], default_integrations=False)
+    events = capture_events()
+
+    ignore_logger("testfoo")
+
+    other_logger.error("hi")
+
+    assert not events
+
+
+def test_ignore_logger_wildcard(sentry_init, capture_events):
+    sentry_init(integrations=[LoggingIntegration()], default_integrations=False)
+    events = capture_events()
+
+    ignore_logger("testfoo.*")
+
+    nested_logger = logging.getLogger("testfoo.submodule")
+
+    logger.error("hi")
+
+    nested_logger.error("bye")
+
+    (event,) = events
+    assert event["logentry"]["message"] == "hi"
diff --git a/tests/integrations/opentelemetry/__init__.py b/tests/integrations/opentelemetry/__init__.py
new file mode 100644
index 0000000000..39ecc610d5
--- /dev/null
+++ b/tests/integrations/opentelemetry/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+django = pytest.importorskip("opentelemetry")
diff --git a/tests/integrations/opentelemetry/test_propagator.py b/tests/integrations/opentelemetry/test_propagator.py
new file mode 100644
index 0000000000..529aa99c09
--- /dev/null
+++ b/tests/integrations/opentelemetry/test_propagator.py
@@ -0,0 +1,248 @@
+from mock import MagicMock
+import mock
+
+from opentelemetry.context import get_current
+from opentelemetry.trace.propagation import get_current_span
+from opentelemetry.trace import (
+    set_span_in_context,
+    TraceFlags,
+    SpanContext,
+)
+from sentry_sdk.integrations.opentelemetry.consts import (
+    SENTRY_BAGGAGE_KEY,
+    SENTRY_TRACE_KEY,
+)
+
+from sentry_sdk.integrations.opentelemetry.propagator import SentryPropagator
+from sentry_sdk.integrations.opentelemetry.span_processor import SentrySpanProcessor
+from sentry_sdk.tracing_utils import Baggage
+
+
+def test_extract_no_context_no_sentry_trace_header():
+    """
+    No context and NO Sentry trace data in getter.
+    Extract should return empty context.
+    """
+    carrier = None
+    context = None
+    getter = MagicMock()
+    getter.get.return_value = None
+
+    modified_context = SentryPropagator().extract(carrier, context, getter)
+
+    assert modified_context == {}
+
+
+def test_extract_context_no_sentry_trace_header():
+    """
+    Context but NO Sentry trace data in getter.
+    Extract should return context as is.
+    """
+    carrier = None
+    context = {"some": "value"}
+    getter = MagicMock()
+    getter.get.return_value = None
+
+    modified_context = SentryPropagator().extract(carrier, context, getter)
+
+    assert modified_context == context
+
+
+def test_extract_empty_context_sentry_trace_header_no_baggage():
+    """
+    Empty context but Sentry trace data but NO Baggage in getter.
+    Extract should return context that has empty baggage in it and also a NoopSpan with span_id and trace_id.
+    """
+    carrier = None
+    context = {}
+    getter = MagicMock()
+    getter.get.side_effect = [
+        ["1234567890abcdef1234567890abcdef-1234567890abcdef-1"],
+        None,
+    ]
+
+    modified_context = SentryPropagator().extract(carrier, context, getter)
+
+    assert len(modified_context.keys()) == 3
+
+    assert modified_context[SENTRY_TRACE_KEY] == {
+        "trace_id": "1234567890abcdef1234567890abcdef",
+        "parent_span_id": "1234567890abcdef",
+        "parent_sampled": True,
+    }
+    assert modified_context[SENTRY_BAGGAGE_KEY].serialize() == ""
+
+    span_context = get_current_span(modified_context).get_span_context()
+    assert span_context.span_id == int("1234567890abcdef", 16)
+    assert span_context.trace_id == int("1234567890abcdef1234567890abcdef", 16)
+
+
+def test_extract_context_sentry_trace_header_baggage():
+    """
+    Empty context but Sentry trace data and Baggage in getter.
+    Extract should return context that has baggage in it and also a NoopSpan with span_id and trace_id.
+    """
+    baggage_header = (
+        "other-vendor-value-1=foo;bar;baz, sentry-trace_id=771a43a4192642f0b136d5159a501700, "
+        "sentry-public_key=49d0f7386ad645858ae85020e393bef3, sentry-sample_rate=0.01337, "
+        "sentry-user_id=Am%C3%A9lie, other-vendor-value-2=foo;bar;"
+    )
+
+    carrier = None
+    context = {"some": "value"}
+    getter = MagicMock()
+    getter.get.side_effect = [
+        ["1234567890abcdef1234567890abcdef-1234567890abcdef-1"],
+        [baggage_header],
+    ]
+
+    modified_context = SentryPropagator().extract(carrier, context, getter)
+
+    assert len(modified_context.keys()) == 4
+
+    assert modified_context[SENTRY_TRACE_KEY] == {
+        "trace_id": "1234567890abcdef1234567890abcdef",
+        "parent_span_id": "1234567890abcdef",
+        "parent_sampled": True,
+    }
+
+    assert modified_context[SENTRY_BAGGAGE_KEY].serialize() == (
+        "sentry-trace_id=771a43a4192642f0b136d5159a501700,"
+        "sentry-public_key=49d0f7386ad645858ae85020e393bef3,"
+        "sentry-sample_rate=0.01337,sentry-user_id=Am%C3%A9lie"
+    )
+
+    span_context = get_current_span(modified_context).get_span_context()
+    assert span_context.span_id == int("1234567890abcdef", 16)
+    assert span_context.trace_id == int("1234567890abcdef1234567890abcdef", 16)
+
+
+def test_inject_empty_otel_span_map():
+    """
+    Empty otel_span_map.
+    So there is no sentry_span to be found in inject()
+    and the function is returned early and no setters are called.
+    """
+    carrier = None
+    context = get_current()
+    setter = MagicMock()
+    setter.set = MagicMock()
+
+    span_context = SpanContext(
+        trace_id=int("1234567890abcdef1234567890abcdef", 16),
+        span_id=int("1234567890abcdef", 16),
+        trace_flags=TraceFlags(TraceFlags.SAMPLED),
+        is_remote=True,
+    )
+    span = MagicMock()
+    span.context = span_context
+
+    with mock.patch(
+        "sentry_sdk.integrations.opentelemetry.propagator.trace.get_current_span",
+        return_value=span,
+    ):
+        full_context = set_span_in_context(span, context)
+        SentryPropagator().inject(carrier, full_context, setter)
+
+        setter.set.assert_not_called()
+
+
+def test_inject_sentry_span_no_baggage():
+    """
+    Inject a sentry span with no baggage.
+    """
+    carrier = None
+    context = get_current()
+    setter = MagicMock()
+    setter.set = MagicMock()
+
+    trace_id = "1234567890abcdef1234567890abcdef"
+    span_id = "1234567890abcdef"
+
+    span_context = SpanContext(
+        trace_id=int(trace_id, 16),
+        span_id=int(span_id, 16),
+        trace_flags=TraceFlags(TraceFlags.SAMPLED),
+        is_remote=True,
+    )
+    span = MagicMock()
+    span.context = span_context
+
+    sentry_span = MagicMock()
+    sentry_span.to_traceparent = mock.Mock(
+        return_value="1234567890abcdef1234567890abcdef-1234567890abcdef-1"
+    )
+    sentry_span.containing_transaction.get_baggage = mock.Mock(return_value=None)
+
+    span_processor = SentrySpanProcessor()
+    span_processor.otel_span_map[span_id] = sentry_span
+
+    with mock.patch(
+        "sentry_sdk.integrations.opentelemetry.propagator.trace.get_current_span",
+        return_value=span,
+    ):
+        full_context = set_span_in_context(span, context)
+        SentryPropagator().inject(carrier, full_context, setter)
+
+        setter.set.assert_called_once_with(
+            carrier,
+            "sentry-trace",
+            "1234567890abcdef1234567890abcdef-1234567890abcdef-1",
+        )
+
+
+def test_inject_sentry_span_baggage():
+    """
+    Inject a sentry span with baggage.
+    """
+    carrier = None
+    context = get_current()
+    setter = MagicMock()
+    setter.set = MagicMock()
+
+    trace_id = "1234567890abcdef1234567890abcdef"
+    span_id = "1234567890abcdef"
+
+    span_context = SpanContext(
+        trace_id=int(trace_id, 16),
+        span_id=int(span_id, 16),
+        trace_flags=TraceFlags(TraceFlags.SAMPLED),
+        is_remote=True,
+    )
+    span = MagicMock()
+    span.context = span_context
+
+    sentry_span = MagicMock()
+    sentry_span.to_traceparent = mock.Mock(
+        return_value="1234567890abcdef1234567890abcdef-1234567890abcdef-1"
+    )
+    sentry_items = {
+        "sentry-trace_id": "771a43a4192642f0b136d5159a501700",
+        "sentry-public_key": "49d0f7386ad645858ae85020e393bef3",
+        "sentry-sample_rate": 0.01337,
+        "sentry-user_id": "Amélie",
+    }
+    baggage = Baggage(sentry_items=sentry_items)
+    sentry_span.containing_transaction.get_baggage = MagicMock(return_value=baggage)
+
+    span_processor = SentrySpanProcessor()
+    span_processor.otel_span_map[span_id] = sentry_span
+
+    with mock.patch(
+        "sentry_sdk.integrations.opentelemetry.propagator.trace.get_current_span",
+        return_value=span,
+    ):
+        full_context = set_span_in_context(span, context)
+        SentryPropagator().inject(carrier, full_context, setter)
+
+        setter.set.assert_any_call(
+            carrier,
+            "sentry-trace",
+            "1234567890abcdef1234567890abcdef-1234567890abcdef-1",
+        )
+
+        setter.set.assert_any_call(
+            carrier,
+            "baggage",
+            baggage.serialize(),
+        )
diff --git a/tests/integrations/opentelemetry/test_span_processor.py b/tests/integrations/opentelemetry/test_span_processor.py
new file mode 100644
index 0000000000..d7dc6b66df
--- /dev/null
+++ b/tests/integrations/opentelemetry/test_span_processor.py
@@ -0,0 +1,468 @@
+from datetime import datetime
+from mock import MagicMock
+import mock
+import time
+from sentry_sdk.integrations.opentelemetry.span_processor import (
+    SentrySpanProcessor,
+    link_trace_context_to_error_event,
+)
+from sentry_sdk.tracing import Span, Transaction
+
+from opentelemetry.trace import SpanKind, SpanContext
+from sentry_sdk.tracing_utils import extract_sentrytrace_data
+
+
+def test_is_sentry_span():
+    otel_span = MagicMock()
+
+    hub = MagicMock()
+    hub.client = None
+
+    span_processor = SentrySpanProcessor()
+    assert not span_processor._is_sentry_span(hub, otel_span)
+
+    client = MagicMock()
+    client.options = {"instrumenter": "otel"}
+    client.dsn = "https://1234567890abcdef@o123456.ingest.sentry.io/123456"
+
+    hub.client = client
+    assert not span_processor._is_sentry_span(hub, otel_span)
+
+    otel_span.attributes = {
+        "http.url": "https://example.com",
+    }
+    assert not span_processor._is_sentry_span(hub, otel_span)
+
+    otel_span.attributes = {
+        "http.url": "https://o123456.ingest.sentry.io/api/123/envelope",
+    }
+    assert span_processor._is_sentry_span(hub, otel_span)
+
+
+def test_get_otel_context():
+    otel_span = MagicMock()
+    otel_span.attributes = {"foo": "bar"}
+    otel_span.resource = MagicMock()
+    otel_span.resource.attributes = {"baz": "qux"}
+
+    span_processor = SentrySpanProcessor()
+    otel_context = span_processor._get_otel_context(otel_span)
+
+    assert otel_context == {
+        "attributes": {"foo": "bar"},
+        "resource": {"baz": "qux"},
+    }
+
+
+def test_get_trace_data_with_span_and_trace():
+    otel_span = MagicMock()
+    otel_span.context = MagicMock()
+    otel_span.context.trace_id = int("1234567890abcdef1234567890abcdef", 16)
+    otel_span.context.span_id = int("1234567890abcdef", 16)
+    otel_span.parent = None
+
+    parent_context = {}
+
+    span_processor = SentrySpanProcessor()
+    sentry_trace_data = span_processor._get_trace_data(otel_span, parent_context)
+    assert sentry_trace_data["trace_id"] == "1234567890abcdef1234567890abcdef"
+    assert sentry_trace_data["span_id"] == "1234567890abcdef"
+    assert sentry_trace_data["parent_span_id"] is None
+    assert sentry_trace_data["parent_sampled"] is None
+    assert sentry_trace_data["baggage"] is None
+
+
+def test_get_trace_data_with_span_and_trace_and_parent():
+    otel_span = MagicMock()
+    otel_span.context = MagicMock()
+    otel_span.context.trace_id = int("1234567890abcdef1234567890abcdef", 16)
+    otel_span.context.span_id = int("1234567890abcdef", 16)
+    otel_span.parent = MagicMock()
+    otel_span.parent.span_id = int("abcdef1234567890", 16)
+
+    parent_context = {}
+
+    span_processor = SentrySpanProcessor()
+    sentry_trace_data = span_processor._get_trace_data(otel_span, parent_context)
+    assert sentry_trace_data["trace_id"] == "1234567890abcdef1234567890abcdef"
+    assert sentry_trace_data["span_id"] == "1234567890abcdef"
+    assert sentry_trace_data["parent_span_id"] == "abcdef1234567890"
+    assert sentry_trace_data["parent_sampled"] is None
+    assert sentry_trace_data["baggage"] is None
+
+
+def test_get_trace_data_with_sentry_trace():
+    otel_span = MagicMock()
+    otel_span.context = MagicMock()
+    otel_span.context.trace_id = int("1234567890abcdef1234567890abcdef", 16)
+    otel_span.context.span_id = int("1234567890abcdef", 16)
+    otel_span.parent = MagicMock()
+    otel_span.parent.span_id = int("abcdef1234567890", 16)
+
+    parent_context = {}
+
+    with mock.patch(
+        "sentry_sdk.integrations.opentelemetry.span_processor.get_value",
+        side_effect=[
+            extract_sentrytrace_data(
+                "1234567890abcdef1234567890abcdef-1234567890abcdef-1"
+            ),
+            None,
+        ],
+    ):
+        span_processor = SentrySpanProcessor()
+        sentry_trace_data = span_processor._get_trace_data(otel_span, parent_context)
+        assert sentry_trace_data["trace_id"] == "1234567890abcdef1234567890abcdef"
+        assert sentry_trace_data["span_id"] == "1234567890abcdef"
+        assert sentry_trace_data["parent_span_id"] == "abcdef1234567890"
+        assert sentry_trace_data["parent_sampled"] is True
+        assert sentry_trace_data["baggage"] is None
+
+    with mock.patch(
+        "sentry_sdk.integrations.opentelemetry.span_processor.get_value",
+        side_effect=[
+            extract_sentrytrace_data(
+                "1234567890abcdef1234567890abcdef-1234567890abcdef-0"
+            ),
+            None,
+        ],
+    ):
+        span_processor = SentrySpanProcessor()
+        sentry_trace_data = span_processor._get_trace_data(otel_span, parent_context)
+        assert sentry_trace_data["trace_id"] == "1234567890abcdef1234567890abcdef"
+        assert sentry_trace_data["span_id"] == "1234567890abcdef"
+        assert sentry_trace_data["parent_span_id"] == "abcdef1234567890"
+        assert sentry_trace_data["parent_sampled"] is False
+        assert sentry_trace_data["baggage"] is None
+
+
+def test_get_trace_data_with_sentry_trace_and_baggage():
+    otel_span = MagicMock()
+    otel_span.context = MagicMock()
+    otel_span.context.trace_id = int("1234567890abcdef1234567890abcdef", 16)
+    otel_span.context.span_id = int("1234567890abcdef", 16)
+    otel_span.parent = MagicMock()
+    otel_span.parent.span_id = int("abcdef1234567890", 16)
+
+    parent_context = {}
+
+    baggage = (
+        "sentry-trace_id=771a43a4192642f0b136d5159a501700,"
+        "sentry-public_key=49d0f7386ad645858ae85020e393bef3,"
+        "sentry-sample_rate=0.01337,sentry-user_id=Am%C3%A9lie"
+    )
+
+    with mock.patch(
+        "sentry_sdk.integrations.opentelemetry.span_processor.get_value",
+        side_effect=[
+            extract_sentrytrace_data(
+                "1234567890abcdef1234567890abcdef-1234567890abcdef-1"
+            ),
+            baggage,
+        ],
+    ):
+        span_processor = SentrySpanProcessor()
+        sentry_trace_data = span_processor._get_trace_data(otel_span, parent_context)
+        assert sentry_trace_data["trace_id"] == "1234567890abcdef1234567890abcdef"
+        assert sentry_trace_data["span_id"] == "1234567890abcdef"
+        assert sentry_trace_data["parent_span_id"] == "abcdef1234567890"
+        assert sentry_trace_data["parent_sampled"]
+        assert sentry_trace_data["baggage"] == baggage
+
+
+def test_update_span_with_otel_data_http_method():
+    sentry_span = Span()
+
+    otel_span = MagicMock()
+    otel_span.name = "Test OTel Span"
+    otel_span.kind = SpanKind.CLIENT
+    otel_span.attributes = {
+        "http.method": "GET",
+        "http.status_code": 429,
+        "http.status_text": "xxx",
+        "http.user_agent": "curl/7.64.1",
+        "net.peer.name": "example.com",
+        "http.target": "/",
+    }
+
+    span_processor = SentrySpanProcessor()
+    span_processor._update_span_with_otel_data(sentry_span, otel_span)
+
+    assert sentry_span.op == "http.client"
+    assert sentry_span.description == "GET example.com /"
+    assert sentry_span._tags["http.status_code"] == "429"
+    assert sentry_span.status == "resource_exhausted"
+
+    assert sentry_span._data["http.method"] == "GET"
+    assert sentry_span._data["http.status_code"] == 429
+    assert sentry_span._data["http.status_text"] == "xxx"
+    assert sentry_span._data["http.user_agent"] == "curl/7.64.1"
+    assert sentry_span._data["net.peer.name"] == "example.com"
+    assert sentry_span._data["http.target"] == "/"
+
+
+def test_update_span_with_otel_data_http_method2():
+    sentry_span = Span()
+
+    otel_span = MagicMock()
+    otel_span.name = "Test OTel Span"
+    otel_span.kind = SpanKind.SERVER
+    otel_span.attributes = {
+        "http.method": "GET",
+        "http.status_code": 429,
+        "http.status_text": "xxx",
+        "http.user_agent": "curl/7.64.1",
+        "http.url": "https://httpbin.org/status/403?password=123&username=test@example.com&author=User123&auth=1234567890abcdef",
+    }
+
+    span_processor = SentrySpanProcessor()
+    span_processor._update_span_with_otel_data(sentry_span, otel_span)
+
+    assert sentry_span.op == "http.server"
+    assert sentry_span.description == "GET https://httpbin.org/status/403"
+    assert sentry_span._tags["http.status_code"] == "429"
+    assert sentry_span.status == "resource_exhausted"
+
+    assert sentry_span._data["http.method"] == "GET"
+    assert sentry_span._data["http.status_code"] == 429
+    assert sentry_span._data["http.status_text"] == "xxx"
+    assert sentry_span._data["http.user_agent"] == "curl/7.64.1"
+    assert (
+        sentry_span._data["http.url"]
+        == "https://httpbin.org/status/403?password=123&username=test@example.com&author=User123&auth=1234567890abcdef"
+    )
+
+
+def test_update_span_with_otel_data_db_query():
+    sentry_span = Span()
+
+    otel_span = MagicMock()
+    otel_span.name = "Test OTel Span"
+    otel_span.attributes = {
+        "db.system": "postgresql",
+        "db.statement": "SELECT * FROM table where pwd = '123456'",
+    }
+
+    span_processor = SentrySpanProcessor()
+    span_processor._update_span_with_otel_data(sentry_span, otel_span)
+
+    assert sentry_span.op == "db"
+    assert sentry_span.description == "SELECT * FROM table where pwd = '123456'"
+
+    assert sentry_span._data["db.system"] == "postgresql"
+    assert (
+        sentry_span._data["db.statement"] == "SELECT * FROM table where pwd = '123456'"
+    )
+
+
+def test_on_start_transaction():
+    otel_span = MagicMock()
+    otel_span.name = "Sample OTel Span"
+    otel_span.start_time = time.time_ns()
+    otel_span.context = MagicMock()
+    otel_span.context.trace_id = int("1234567890abcdef1234567890abcdef", 16)
+    otel_span.context.span_id = int("1234567890abcdef", 16)
+    otel_span.parent = MagicMock()
+    otel_span.parent.span_id = int("abcdef1234567890", 16)
+
+    parent_context = {}
+
+    fake_client = MagicMock()
+    fake_client.options = {"instrumenter": "otel"}
+    fake_client.dsn = "https://1234567890abcdef@o123456.ingest.sentry.io/123456"
+
+    current_hub = MagicMock()
+    current_hub.client = fake_client
+
+    fake_hub = MagicMock()
+    fake_hub.current = current_hub
+
+    with mock.patch(
+        "sentry_sdk.integrations.opentelemetry.span_processor.Hub", fake_hub
+    ):
+        span_processor = SentrySpanProcessor()
+        span_processor.on_start(otel_span, parent_context)
+
+        fake_hub.current.start_transaction.assert_called_once_with(
+            name="Sample OTel Span",
+            span_id="1234567890abcdef",
+            parent_span_id="abcdef1234567890",
+            trace_id="1234567890abcdef1234567890abcdef",
+            baggage=None,
+            start_timestamp=datetime.fromtimestamp(otel_span.start_time / 1e9),
+            instrumenter="otel",
+        )
+
+        assert len(span_processor.otel_span_map.keys()) == 1
+        assert list(span_processor.otel_span_map.keys())[0] == "1234567890abcdef"
+
+
+def test_on_start_child():
+    otel_span = MagicMock()
+    otel_span.name = "Sample OTel Span"
+    otel_span.start_time = time.time_ns()
+    otel_span.context = MagicMock()
+    otel_span.context.trace_id = int("1234567890abcdef1234567890abcdef", 16)
+    otel_span.context.span_id = int("1234567890abcdef", 16)
+    otel_span.parent = MagicMock()
+    otel_span.parent.span_id = int("abcdef1234567890", 16)
+
+    parent_context = {}
+
+    fake_client = MagicMock()
+    fake_client.options = {"instrumenter": "otel"}
+    fake_client.dsn = "https://1234567890abcdef@o123456.ingest.sentry.io/123456"
+
+    current_hub = MagicMock()
+    current_hub.client = fake_client
+
+    fake_hub = MagicMock()
+    fake_hub.current = current_hub
+
+    with mock.patch(
+        "sentry_sdk.integrations.opentelemetry.span_processor.Hub", fake_hub
+    ):
+        fake_span = MagicMock()
+
+        span_processor = SentrySpanProcessor()
+        span_processor.otel_span_map["abcdef1234567890"] = fake_span
+        span_processor.on_start(otel_span, parent_context)
+
+        fake_span.start_child.assert_called_once_with(
+            span_id="1234567890abcdef",
+            description="Sample OTel Span",
+            start_timestamp=datetime.fromtimestamp(otel_span.start_time / 1e9),
+            instrumenter="otel",
+        )
+
+        assert len(span_processor.otel_span_map.keys()) == 2
+        assert "abcdef1234567890" in span_processor.otel_span_map.keys()
+        assert "1234567890abcdef" in span_processor.otel_span_map.keys()
+
+
+def test_on_end_no_sentry_span():
+    """
+    If on_end is called on a span that is not in the otel_span_map, it should be a no-op.
+    """
+    otel_span = MagicMock()
+    otel_span.name = "Sample OTel Span"
+    otel_span.end_time = time.time_ns()
+    otel_span.context = MagicMock()
+    otel_span.context.span_id = int("1234567890abcdef", 16)
+
+    span_processor = SentrySpanProcessor()
+    span_processor.otel_span_map = {}
+    span_processor._get_otel_context = MagicMock()
+    span_processor._update_span_with_otel_data = MagicMock()
+
+    span_processor.on_end(otel_span)
+
+    span_processor._get_otel_context.assert_not_called()
+    span_processor._update_span_with_otel_data.assert_not_called()
+
+
+def test_on_end_sentry_transaction():
+    """
+    Test on_end for a sentry Transaction.
+    """
+    otel_span = MagicMock()
+    otel_span.name = "Sample OTel Span"
+    otel_span.end_time = time.time_ns()
+    otel_span.context = MagicMock()
+    otel_span.context.span_id = int("1234567890abcdef", 16)
+
+    fake_sentry_span = MagicMock(spec=Transaction)
+    fake_sentry_span.set_context = MagicMock()
+    fake_sentry_span.finish = MagicMock()
+
+    span_processor = SentrySpanProcessor()
+    span_processor._get_otel_context = MagicMock()
+    span_processor._update_span_with_otel_data = MagicMock()
+    span_processor.otel_span_map["1234567890abcdef"] = fake_sentry_span
+
+    span_processor.on_end(otel_span)
+
+    fake_sentry_span.set_context.assert_called_once()
+    span_processor._update_span_with_otel_data.assert_not_called()
+    fake_sentry_span.finish.assert_called_once()
+
+
+def test_on_end_sentry_span():
+    """
+    Test on_end for a sentry Span.
+    """
+    otel_span = MagicMock()
+    otel_span.name = "Sample OTel Span"
+    otel_span.end_time = time.time_ns()
+    otel_span.context = MagicMock()
+    otel_span.context.span_id = int("1234567890abcdef", 16)
+
+    fake_sentry_span = MagicMock(spec=Span)
+    fake_sentry_span.set_context = MagicMock()
+    fake_sentry_span.finish = MagicMock()
+
+    span_processor = SentrySpanProcessor()
+    span_processor._get_otel_context = MagicMock()
+    span_processor._update_span_with_otel_data = MagicMock()
+    span_processor.otel_span_map["1234567890abcdef"] = fake_sentry_span
+
+    span_processor.on_end(otel_span)
+
+    fake_sentry_span.set_context.assert_not_called()
+    span_processor._update_span_with_otel_data.assert_called_once_with(
+        fake_sentry_span, otel_span
+    )
+    fake_sentry_span.finish.assert_called_once()
+
+
+def test_link_trace_context_to_error_event():
+    """
+    Test that the trace context is added to the error event.
+    """
+    fake_client = MagicMock()
+    fake_client.options = {"instrumenter": "otel"}
+    fake_client
+
+    current_hub = MagicMock()
+    current_hub.client = fake_client
+
+    fake_hub = MagicMock()
+    fake_hub.current = current_hub
+
+    span_id = "1234567890abcdef"
+    trace_id = "1234567890abcdef1234567890abcdef"
+
+    fake_trace_context = {
+        "bla": "blub",
+        "foo": "bar",
+        "baz": 123,
+    }
+
+    sentry_span = MagicMock()
+    sentry_span.get_trace_context = MagicMock(return_value=fake_trace_context)
+
+    otel_span_map = {
+        span_id: sentry_span,
+    }
+
+    span_context = SpanContext(
+        trace_id=int(trace_id, 16),
+        span_id=int(span_id, 16),
+        is_remote=True,
+    )
+    otel_span = MagicMock()
+    otel_span.get_span_context = MagicMock(return_value=span_context)
+
+    fake_event = {"event_id": "1234567890abcdef1234567890abcdef"}
+
+    with mock.patch(
+        "sentry_sdk.integrations.opentelemetry.span_processor.get_current_span",
+        return_value=otel_span,
+    ):
+        event = link_trace_context_to_error_event(fake_event, otel_span_map)
+
+        assert event
+        assert event == fake_event  # the event is changed in place inside the function
+        assert "contexts" in event
+        assert "trace" in event["contexts"]
+        assert event["contexts"]["trace"] == fake_trace_context
diff --git a/tests/integrations/pure_eval/__init__.py b/tests/integrations/pure_eval/__init__.py
new file mode 100644
index 0000000000..3f645e75f6
--- /dev/null
+++ b/tests/integrations/pure_eval/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+pure_eval = pytest.importorskip("pure_eval")
diff --git a/tests/integrations/pure_eval/test_pure_eval.py b/tests/integrations/pure_eval/test_pure_eval.py
new file mode 100644
index 0000000000..e7da025144
--- /dev/null
+++ b/tests/integrations/pure_eval/test_pure_eval.py
@@ -0,0 +1,92 @@
+import sys
+from types import SimpleNamespace
+
+import pytest
+
+from sentry_sdk import capture_exception, serializer
+from sentry_sdk.integrations.pure_eval import PureEvalIntegration
+
+
+@pytest.mark.parametrize("integrations", [[], [PureEvalIntegration()]])
+def test_with_locals_enabled(sentry_init, capture_events, integrations):
+    sentry_init(with_locals=True, integrations=integrations)
+    events = capture_events()
+
+    def foo():
+        namespace = SimpleNamespace()
+        q = 1
+        w = 2
+        e = 3
+        r = 4
+        t = 5
+        y = 6
+        u = 7
+        i = 8
+        o = 9
+        p = 10
+        a = 11
+        s = 12
+        str((q, w, e, r, t, y, u, i, o, p, a, s))  # use variables for linter
+        namespace.d = {1: 2}
+        print(namespace.d[1] / 0)
+
+        # Appearances of variables after the main statement don't affect order
+        print(q)
+        print(s)
+        print(events)
+
+    try:
+        foo()
+    except Exception:
+        capture_exception()
+
+    (event,) = events
+
+    assert all(
+        frame["vars"]
+        for frame in event["exception"]["values"][0]["stacktrace"]["frames"]
+    )
+
+    frame_vars = event["exception"]["values"][0]["stacktrace"]["frames"][-1]["vars"]
+
+    if integrations:
+        # Values closest to the exception line appear first
+        # Test this order if possible given the Python version and dict order
+        expected_keys = [
+            "namespace",
+            "namespace.d",
+            "namespace.d[1]",
+            "s",
+            "a",
+            "p",
+            "o",
+            "i",
+            "u",
+            "y",
+        ]
+        if sys.version_info[:2] == (3, 5):
+            assert frame_vars.keys() == set(expected_keys)
+        else:
+            assert list(frame_vars.keys()) == expected_keys
+        assert frame_vars["namespace.d"] == {"1": "2"}
+        assert frame_vars["namespace.d[1]"] == "2"
+    else:
+        # Without pure_eval, the variables are unpredictable.
+        # In later versions, those at the top appear first and are thus included
+        assert frame_vars.keys() <= {
+            "namespace",
+            "q",
+            "w",
+            "e",
+            "r",
+            "t",
+            "y",
+            "u",
+            "i",
+            "o",
+            "p",
+            "a",
+            "s",
+            "events",
+        }
+        assert len(frame_vars) == serializer.MAX_DATABAG_BREADTH
diff --git a/tests/integrations/pymongo/__init__.py b/tests/integrations/pymongo/__init__.py
new file mode 100644
index 0000000000..91223b0630
--- /dev/null
+++ b/tests/integrations/pymongo/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+pytest.importorskip("pymongo")
diff --git a/tests/integrations/pymongo/test_pymongo.py b/tests/integrations/pymongo/test_pymongo.py
new file mode 100644
index 0000000000..16438ac971
--- /dev/null
+++ b/tests/integrations/pymongo/test_pymongo.py
@@ -0,0 +1,419 @@
+from sentry_sdk import capture_message, start_transaction
+from sentry_sdk.integrations.pymongo import PyMongoIntegration, _strip_pii
+
+from mockupdb import MockupDB, OpQuery
+from pymongo import MongoClient
+import pytest
+
+
+@pytest.fixture(scope="session")
+def mongo_server():
+    server = MockupDB(verbose=True)
+    server.autoresponds("ismaster", maxWireVersion=6)
+    server.run()
+    server.autoresponds(
+        {"find": "test_collection"}, cursor={"id": 123, "firstBatch": []}
+    )
+    # Find query changed somewhere between PyMongo 3.1 and 3.12.
+    # This line is to respond to "find" queries sent by old PyMongo the same way it's done above.
+    server.autoresponds(OpQuery({"foobar": 1}), cursor={"id": 123, "firstBatch": []})
+    server.autoresponds({"insert": "test_collection"}, ok=1)
+    server.autoresponds({"insert": "erroneous"}, ok=0, errmsg="test error")
+    yield server
+    server.stop()
+
+
+@pytest.mark.parametrize("with_pii", [False, True])
+def test_transactions(sentry_init, capture_events, mongo_server, with_pii):
+    sentry_init(
+        integrations=[PyMongoIntegration()],
+        traces_sample_rate=1.0,
+        send_default_pii=with_pii,
+    )
+    events = capture_events()
+
+    connection = MongoClient(mongo_server.uri)
+
+    with start_transaction():
+        list(
+            connection["test_db"]["test_collection"].find({"foobar": 1})
+        )  # force query execution
+        connection["test_db"]["test_collection"].insert_one({"foo": 2})
+        try:
+            connection["test_db"]["erroneous"].insert_many([{"bar": 3}, {"baz": 4}])
+            pytest.fail("Request should raise")
+        except Exception:
+            pass
+
+    (event,) = events
+    (find, insert_success, insert_fail) = event["spans"]
+
+    common_tags = {
+        "db.name": "test_db",
+        "db.system": "mongodb",
+        "net.peer.name": mongo_server.host,
+        "net.peer.port": str(mongo_server.port),
+    }
+    for span in find, insert_success, insert_fail:
+        for field, value in common_tags.items():
+            assert span["tags"][field] == value
+
+    assert find["op"] == "db.query"
+    assert insert_success["op"] == "db.query"
+    assert insert_fail["op"] == "db.query"
+
+    assert find["tags"]["db.operation"] == "find"
+    assert insert_success["tags"]["db.operation"] == "insert"
+    assert insert_fail["tags"]["db.operation"] == "insert"
+
+    assert find["description"].startswith("find {")
+    assert insert_success["description"].startswith("insert {")
+    assert insert_fail["description"].startswith("insert {")
+    if with_pii:
+        assert "1" in find["description"]
+        assert "2" in insert_success["description"]
+        assert "3" in insert_fail["description"] and "4" in insert_fail["description"]
+    else:
+        # All values in filter replaced by "%s"
+        assert "1" not in find["description"]
+        # All keys below top level replaced by "%s"
+        assert "2" not in insert_success["description"]
+        assert (
+            "3" not in insert_fail["description"]
+            and "4" not in insert_fail["description"]
+        )
+
+    assert find["tags"]["status"] == "ok"
+    assert insert_success["tags"]["status"] == "ok"
+    assert insert_fail["tags"]["status"] == "internal_error"
+
+
+@pytest.mark.parametrize("with_pii", [False, True])
+def test_breadcrumbs(sentry_init, capture_events, mongo_server, with_pii):
+    sentry_init(
+        integrations=[PyMongoIntegration()],
+        traces_sample_rate=1.0,
+        send_default_pii=with_pii,
+    )
+    events = capture_events()
+
+    connection = MongoClient(mongo_server.uri)
+
+    list(
+        connection["test_db"]["test_collection"].find({"foobar": 1})
+    )  # force query execution
+    capture_message("hi")
+
+    (event,) = events
+    (crumb,) = event["breadcrumbs"]["values"]
+
+    assert crumb["category"] == "query"
+    assert crumb["message"].startswith("find {")
+    if with_pii:
+        assert "1" in crumb["message"]
+    else:
+        assert "1" not in crumb["message"]
+    assert crumb["type"] == "db.query"
+    assert crumb["data"] == {
+        "db.name": "test_db",
+        "db.system": "mongodb",
+        "db.operation": "find",
+        "net.peer.name": mongo_server.host,
+        "net.peer.port": str(mongo_server.port),
+    }
+
+
+@pytest.mark.parametrize(
+    "testcase",
+    [
+        {
+            "command": {
+                "insert": "my_collection",
+                "ordered": True,
+                "documents": [
+                    {
+                        "username": "anton2",
+                        "email": "anton@somewhere.io",
+                        "password": "c4e86722fb56d946f7ddeecdae47e1c4458bf98a0a3ee5d5113111adf7bf0175",
+                        "_id": "635bc7403cb4f8a736f61cf2",
+                    }
+                ],
+            },
+            "command_stripped": {
+                "insert": "my_collection",
+                "ordered": True,
+                "documents": [
+                    {"username": "%s", "email": "%s", "password": "%s", "_id": "%s"}
+                ],
+            },
+        },
+        {
+            "command": {
+                "insert": "my_collection",
+                "ordered": True,
+                "documents": [
+                    {
+                        "username": "indiana4",
+                        "email": "indy@jones.org",
+                        "password": "63e86722fb56d946f7ddeecdae47e1c4458bf98a0a3ee5d5113111adf7bf016b",
+                        "_id": "635bc7403cb4f8a736f61cf3",
+                    }
+                ],
+            },
+            "command_stripped": {
+                "insert": "my_collection",
+                "ordered": True,
+                "documents": [
+                    {"username": "%s", "email": "%s", "password": "%s", "_id": "%s"}
+                ],
+            },
+        },
+        {
+            "command": {
+                "find": "my_collection",
+                "filter": {},
+                "limit": 1,
+                "singleBatch": True,
+            },
+            "command_stripped": {
+                "find": "my_collection",
+                "filter": {},
+                "limit": 1,
+                "singleBatch": True,
+            },
+        },
+        {
+            "command": {
+                "find": "my_collection",
+                "filter": {"username": "notthere"},
+                "limit": 1,
+                "singleBatch": True,
+            },
+            "command_stripped": {
+                "find": "my_collection",
+                "filter": {"username": "%s"},
+                "limit": 1,
+                "singleBatch": True,
+            },
+        },
+        {
+            "command": {
+                "insert": "my_collection",
+                "ordered": True,
+                "documents": [
+                    {
+                        "username": "userx1",
+                        "email": "x@somewhere.io",
+                        "password": "ccc86722fb56d946f7ddeecdae47e1c4458bf98a0a3ee5d5113111adf7bf0175",
+                        "_id": "635bc7403cb4f8a736f61cf4",
+                    },
+                    {
+                        "username": "userx2",
+                        "email": "x@somewhere.io",
+                        "password": "xxx86722fb56d946f7ddeecdae47e1c4458bf98a0a3ee5d5113111adf7bf0175",
+                        "_id": "635bc7403cb4f8a736f61cf5",
+                    },
+                ],
+            },
+            "command_stripped": {
+                "insert": "my_collection",
+                "ordered": True,
+                "documents": [
+                    {"username": "%s", "email": "%s", "password": "%s", "_id": "%s"},
+                    {"username": "%s", "email": "%s", "password": "%s", "_id": "%s"},
+                ],
+            },
+        },
+        {
+            "command": {
+                "find": "my_collection",
+                "filter": {"email": "ada@lovelace.com"},
+            },
+            "command_stripped": {"find": "my_collection", "filter": {"email": "%s"}},
+        },
+        {
+            "command": {
+                "aggregate": "my_collection",
+                "pipeline": [{"$match": {}}, {"$group": {"_id": 1, "n": {"$sum": 1}}}],
+                "cursor": {},
+            },
+            "command_stripped": {
+                "aggregate": "my_collection",
+                "pipeline": [{"$match": {}}, {"$group": {"_id": 1, "n": {"$sum": 1}}}],
+                "cursor": "%s",
+            },
+        },
+        {
+            "command": {
+                "aggregate": "my_collection",
+                "pipeline": [
+                    {"$match": {"email": "x@somewhere.io"}},
+                    {"$group": {"_id": 1, "n": {"$sum": 1}}},
+                ],
+                "cursor": {},
+            },
+            "command_stripped": {
+                "aggregate": "my_collection",
+                "pipeline": [
+                    {"$match": {"email": "%s"}},
+                    {"$group": {"_id": 1, "n": {"$sum": 1}}},
+                ],
+                "cursor": "%s",
+            },
+        },
+        {
+            "command": {
+                "createIndexes": "my_collection",
+                "indexes": [{"name": "username_1", "key": [("username", 1)]}],
+            },
+            "command_stripped": {
+                "createIndexes": "my_collection",
+                "indexes": [{"name": "username_1", "key": [("username", 1)]}],
+            },
+        },
+        {
+            "command": {
+                "update": "my_collection",
+                "ordered": True,
+                "updates": [
+                    ("q", {"email": "anton@somewhere.io"}),
+                    (
+                        "u",
+                        {
+                            "email": "anton2@somwehre.io",
+                            "extra_field": "extra_content",
+                            "new": "bla",
+                        },
+                    ),
+                    ("multi", False),
+                    ("upsert", False),
+                ],
+            },
+            "command_stripped": {
+                "update": "my_collection",
+                "ordered": True,
+                "updates": "%s",
+            },
+        },
+        {
+            "command": {
+                "update": "my_collection",
+                "ordered": True,
+                "updates": [
+                    ("q", {"email": "anton2@somwehre.io"}),
+                    ("u", {"$rename": {"new": "new_field"}}),
+                    ("multi", False),
+                    ("upsert", False),
+                ],
+            },
+            "command_stripped": {
+                "update": "my_collection",
+                "ordered": True,
+                "updates": "%s",
+            },
+        },
+        {
+            "command": {
+                "update": "my_collection",
+                "ordered": True,
+                "updates": [
+                    ("q", {"email": "x@somewhere.io"}),
+                    ("u", {"$rename": {"password": "pwd"}}),
+                    ("multi", True),
+                    ("upsert", False),
+                ],
+            },
+            "command_stripped": {
+                "update": "my_collection",
+                "ordered": True,
+                "updates": "%s",
+            },
+        },
+        {
+            "command": {
+                "delete": "my_collection",
+                "ordered": True,
+                "deletes": [("q", {"username": "userx2"}), ("limit", 1)],
+            },
+            "command_stripped": {
+                "delete": "my_collection",
+                "ordered": True,
+                "deletes": "%s",
+            },
+        },
+        {
+            "command": {
+                "delete": "my_collection",
+                "ordered": True,
+                "deletes": [("q", {"email": "xplus@somewhere.io"}), ("limit", 0)],
+            },
+            "command_stripped": {
+                "delete": "my_collection",
+                "ordered": True,
+                "deletes": "%s",
+            },
+        },
+        {
+            "command": {
+                "findAndModify": "my_collection",
+                "query": {"email": "ada@lovelace.com"},
+                "new": False,
+                "remove": True,
+            },
+            "command_stripped": {
+                "findAndModify": "my_collection",
+                "query": {"email": "%s"},
+                "new": "%s",
+                "remove": "%s",
+            },
+        },
+        {
+            "command": {
+                "findAndModify": "my_collection",
+                "query": {"email": "anton2@somewhere.io"},
+                "new": False,
+                "update": {"email": "anton3@somwehre.io", "extra_field": "xxx"},
+                "upsert": False,
+            },
+            "command_stripped": {
+                "findAndModify": "my_collection",
+                "query": {"email": "%s"},
+                "new": "%s",
+                "update": {"email": "%s", "extra_field": "%s"},
+                "upsert": "%s",
+            },
+        },
+        {
+            "command": {
+                "findAndModify": "my_collection",
+                "query": {"email": "anton3@somewhere.io"},
+                "new": False,
+                "update": {"$rename": {"extra_field": "extra_field2"}},
+                "upsert": False,
+            },
+            "command_stripped": {
+                "findAndModify": "my_collection",
+                "query": {"email": "%s"},
+                "new": "%s",
+                "update": {"$rename": "%s"},
+                "upsert": "%s",
+            },
+        },
+        {
+            "command": {
+                "renameCollection": "test.my_collection",
+                "to": "test.new_collection",
+            },
+            "command_stripped": {
+                "renameCollection": "test.my_collection",
+                "to": "test.new_collection",
+            },
+        },
+        {
+            "command": {"drop": "new_collection"},
+            "command_stripped": {"drop": "new_collection"},
+        },
+    ],
+)
+def test_strip_pii(testcase):
+    assert _strip_pii(testcase["command"]) == testcase["command_stripped"]
diff --git a/tests/integrations/pyramid/test_pyramid.py b/tests/integrations/pyramid/test_pyramid.py
index bc74fd8a80..0f8755ac6b 100644
--- a/tests/integrations/pyramid/test_pyramid.py
+++ b/tests/integrations/pyramid/test_pyramid.py
@@ -26,12 +26,19 @@ def hi(request):
     return Response("hi")
 
 
+def hi_with_id(request):
+    capture_message("hi with id")
+    return Response("hi with id")
+
+
 @pytest.fixture
 def pyramid_config():
     config = pyramid.testing.setUp()
     try:
         config.add_route("hi", "/message")
         config.add_view(hi, route_name="hi")
+        config.add_route("hi_with_id", "/message/{message_id}")
+        config.add_view(hi_with_id, route_name="hi_with_id")
         yield config
     finally:
         pyramid.testing.tearDown()
@@ -80,7 +87,7 @@ def errors(request):
     assert isinstance(error, ZeroDivisionError)
 
     (event,) = events
-    (breadcrumb,) = event["breadcrumbs"]
+    (breadcrumb,) = event["breadcrumbs"]["values"]
     assert breadcrumb["message"] == "hi2"
     assert event["exception"]["values"][0]["mechanism"]["type"] == "pyramid"
 
@@ -89,13 +96,13 @@ def test_has_context(route, get_client, sentry_init, capture_events):
     sentry_init(integrations=[PyramidIntegration()])
     events = capture_events()
 
-    @route("/message/{msg}")
+    @route("/context_message/{msg}")
     def hi2(request):
         capture_message(request.matchdict["msg"])
         return Response("hi")
 
     client = get_client()
-    client.get("/message/yoo")
+    client.get("/context_message/yoo")
 
     (event,) = events
     assert event["message"] == "yoo"
@@ -104,26 +111,38 @@ def hi2(request):
         "headers": {"Host": "localhost"},
         "method": "GET",
         "query_string": "",
-        "url": "http://localhost/message/yoo",
+        "url": "http://localhost/context_message/yoo",
     }
     assert event["transaction"] == "hi2"
 
 
 @pytest.mark.parametrize(
-    "transaction_style,expected_transaction",
-    [("route_name", "hi"), ("route_pattern", "/message")],
+    "url,transaction_style,expected_transaction,expected_source",
+    [
+        ("/message", "route_name", "hi", "component"),
+        ("/message", "route_pattern", "/message", "route"),
+        ("/message/123456", "route_name", "hi_with_id", "component"),
+        ("/message/123456", "route_pattern", "/message/{message_id}", "route"),
+    ],
 )
 def test_transaction_style(
-    sentry_init, get_client, capture_events, transaction_style, expected_transaction
+    sentry_init,
+    get_client,
+    capture_events,
+    url,
+    transaction_style,
+    expected_transaction,
+    expected_source,
 ):
     sentry_init(integrations=[PyramidIntegration(transaction_style=transaction_style)])
 
     events = capture_events()
     client = get_client()
-    client.get("/message")
+    client.get(url)
 
     (event,) = events
     assert event["transaction"] == expected_transaction
+    assert event["transaction_info"] == {"source": expected_source}
 
 
 def test_large_json_request(sentry_init, capture_events, route, get_client):
@@ -146,9 +165,9 @@ def index(request):
 
     (event,) = events
     assert event["_meta"]["request"]["data"]["foo"]["bar"] == {
-        "": {"len": 2000, "rem": [["!limit", "x", 509, 512]]}
+        "": {"len": 2000, "rem": [["!limit", "x", 1021, 1024]]}
     }
-    assert len(event["request"]["data"]["foo"]["bar"]) == 512
+    assert len(event["request"]["data"]["foo"]["bar"]) == 1024
 
 
 @pytest.mark.parametrize("data", [{}, []], ids=["empty-dict", "empty-list"])
@@ -190,13 +209,11 @@ def index(request):
 
     (event,) = events
     assert event["_meta"]["request"]["data"]["foo"] == {
-        "": {"len": 2000, "rem": [["!limit", "x", 509, 512]]}
+        "": {"len": 2000, "rem": [["!limit", "x", 1021, 1024]]}
     }
-    assert len(event["request"]["data"]["foo"]) == 512
+    assert len(event["request"]["data"]["foo"]) == 1024
 
-    assert event["_meta"]["request"]["data"]["file"] == {
-        "": {"len": 0, "rem": [["!raw", "x", 0, 0]]}
-    }
+    assert event["_meta"]["request"]["data"]["file"] == {"": {"rem": [["!raw", "x"]]}}
     assert not event["request"]["data"]["file"]
 
 
diff --git a/tests/integrations/quart/__init__.py b/tests/integrations/quart/__init__.py
new file mode 100644
index 0000000000..ea02dfb3a6
--- /dev/null
+++ b/tests/integrations/quart/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+quart = pytest.importorskip("quart")
diff --git a/tests/integrations/quart/test_quart.py b/tests/integrations/quart/test_quart.py
new file mode 100644
index 0000000000..6d2c590a53
--- /dev/null
+++ b/tests/integrations/quart/test_quart.py
@@ -0,0 +1,525 @@
+import pytest
+import pytest_asyncio
+
+quart = pytest.importorskip("quart")
+
+from quart import Quart, Response, abort, stream_with_context
+from quart.views import View
+
+from quart_auth import AuthManager, AuthUser, login_user
+
+from sentry_sdk import (
+    set_tag,
+    configure_scope,
+    capture_message,
+    capture_exception,
+    last_event_id,
+)
+from sentry_sdk.integrations.logging import LoggingIntegration
+import sentry_sdk.integrations.quart as quart_sentry
+
+
+auth_manager = AuthManager()
+
+
+@pytest_asyncio.fixture
+async def app():
+    app = Quart(__name__)
+    app.debug = True
+    app.config["TESTING"] = True
+    app.secret_key = "haha"
+
+    auth_manager.init_app(app)
+
+    @app.route("/message")
+    async def hi():
+        capture_message("hi")
+        return "ok"
+
+    @app.route("/message/")
+    async def hi_with_id(message_id):
+        capture_message("hi with id")
+        return "ok with id"
+
+    return app
+
+
+@pytest.fixture(params=("manual",))
+def integration_enabled_params(request):
+    if request.param == "manual":
+        return {"integrations": [quart_sentry.QuartIntegration()]}
+    else:
+        raise ValueError(request.param)
+
+
+@pytest.mark.asyncio
+async def test_has_context(sentry_init, app, capture_events):
+    sentry_init(integrations=[quart_sentry.QuartIntegration()])
+    events = capture_events()
+
+    client = app.test_client()
+    response = await client.get("/message")
+    assert response.status_code == 200
+
+    (event,) = events
+    assert event["transaction"] == "hi"
+    assert "data" not in event["request"]
+    assert event["request"]["url"] == "http://localhost/message"
+
+
+@pytest.mark.asyncio
+@pytest.mark.parametrize(
+    "url,transaction_style,expected_transaction,expected_source",
+    [
+        ("/message", "endpoint", "hi", "component"),
+        ("/message", "url", "/message", "route"),
+        ("/message/123456", "endpoint", "hi_with_id", "component"),
+        ("/message/123456", "url", "/message/", "route"),
+    ],
+)
+async def test_transaction_style(
+    sentry_init,
+    app,
+    capture_events,
+    url,
+    transaction_style,
+    expected_transaction,
+    expected_source,
+):
+    sentry_init(
+        integrations=[
+            quart_sentry.QuartIntegration(transaction_style=transaction_style)
+        ]
+    )
+    events = capture_events()
+
+    client = app.test_client()
+    response = await client.get(url)
+    assert response.status_code == 200
+
+    (event,) = events
+    assert event["transaction"] == expected_transaction
+
+
+@pytest.mark.asyncio
+@pytest.mark.parametrize("debug", (True, False))
+@pytest.mark.parametrize("testing", (True, False))
+async def test_errors(
+    sentry_init,
+    capture_exceptions,
+    capture_events,
+    app,
+    debug,
+    testing,
+    integration_enabled_params,
+):
+    sentry_init(debug=True, **integration_enabled_params)
+
+    app.debug = debug
+    app.testing = testing
+
+    @app.route("/")
+    async def index():
+        1 / 0
+
+    exceptions = capture_exceptions()
+    events = capture_events()
+
+    client = app.test_client()
+    try:
+        await client.get("/")
+    except ZeroDivisionError:
+        pass
+
+    (exc,) = exceptions
+    assert isinstance(exc, ZeroDivisionError)
+
+    (event,) = events
+    assert event["exception"]["values"][0]["mechanism"]["type"] == "quart"
+
+
+@pytest.mark.asyncio
+async def test_quart_auth_not_installed(
+    sentry_init, app, capture_events, monkeypatch, integration_enabled_params
+):
+    sentry_init(**integration_enabled_params)
+
+    monkeypatch.setattr(quart_sentry, "quart_auth", None)
+
+    events = capture_events()
+
+    client = app.test_client()
+    await client.get("/message")
+
+    (event,) = events
+    assert event.get("user", {}).get("id") is None
+
+
+@pytest.mark.asyncio
+async def test_quart_auth_not_configured(
+    sentry_init, app, capture_events, monkeypatch, integration_enabled_params
+):
+    sentry_init(**integration_enabled_params)
+
+    assert quart_sentry.quart_auth
+
+    events = capture_events()
+    client = app.test_client()
+    await client.get("/message")
+
+    (event,) = events
+    assert event.get("user", {}).get("id") is None
+
+
+@pytest.mark.asyncio
+async def test_quart_auth_partially_configured(
+    sentry_init, app, capture_events, monkeypatch, integration_enabled_params
+):
+    sentry_init(**integration_enabled_params)
+
+    events = capture_events()
+
+    client = app.test_client()
+    await client.get("/message")
+
+    (event,) = events
+    assert event.get("user", {}).get("id") is None
+
+
+@pytest.mark.asyncio
+@pytest.mark.parametrize("send_default_pii", [True, False])
+@pytest.mark.parametrize("user_id", [None, "42", "3"])
+async def test_quart_auth_configured(
+    send_default_pii,
+    sentry_init,
+    app,
+    user_id,
+    capture_events,
+    monkeypatch,
+    integration_enabled_params,
+):
+    sentry_init(send_default_pii=send_default_pii, **integration_enabled_params)
+
+    @app.route("/login")
+    async def login():
+        if user_id is not None:
+            login_user(AuthUser(user_id))
+        return "ok"
+
+    events = capture_events()
+
+    client = app.test_client()
+    assert (await client.get("/login")).status_code == 200
+    assert not events
+
+    assert (await client.get("/message")).status_code == 200
+
+    (event,) = events
+    if user_id is None or not send_default_pii:
+        assert event.get("user", {}).get("id") is None
+    else:
+        assert event["user"]["id"] == str(user_id)
+
+
+@pytest.mark.asyncio
+@pytest.mark.parametrize(
+    "integrations",
+    [
+        [quart_sentry.QuartIntegration()],
+        [quart_sentry.QuartIntegration(), LoggingIntegration(event_level="ERROR")],
+    ],
+)
+async def test_errors_not_reported_twice(
+    sentry_init, integrations, capture_events, app
+):
+    sentry_init(integrations=integrations)
+
+    @app.route("/")
+    async def index():
+        try:
+            1 / 0
+        except Exception as e:
+            app.logger.exception(e)
+            raise e
+
+    events = capture_events()
+
+    client = app.test_client()
+    # with pytest.raises(ZeroDivisionError):
+    await client.get("/")
+
+    assert len(events) == 1
+
+
+@pytest.mark.asyncio
+async def test_logging(sentry_init, capture_events, app):
+    # ensure that Quart's logger magic doesn't break ours
+    sentry_init(
+        integrations=[
+            quart_sentry.QuartIntegration(),
+            LoggingIntegration(event_level="ERROR"),
+        ]
+    )
+
+    @app.route("/")
+    async def index():
+        app.logger.error("hi")
+        return "ok"
+
+    events = capture_events()
+
+    client = app.test_client()
+    await client.get("/")
+
+    (event,) = events
+    assert event["level"] == "error"
+
+
+@pytest.mark.asyncio
+async def test_no_errors_without_request(app, sentry_init):
+    sentry_init(integrations=[quart_sentry.QuartIntegration()])
+    async with app.app_context():
+        capture_exception(ValueError())
+
+
+def test_cli_commands_raise(app):
+    if not hasattr(app, "cli"):
+        pytest.skip("Too old quart version")
+
+    from quart.cli import ScriptInfo
+
+    @app.cli.command()
+    def foo():
+        1 / 0
+
+    with pytest.raises(ZeroDivisionError):
+        app.cli.main(
+            args=["foo"], prog_name="myapp", obj=ScriptInfo(create_app=lambda _: app)
+        )
+
+
+@pytest.mark.asyncio
+async def test_500(sentry_init, capture_events, app):
+    sentry_init(integrations=[quart_sentry.QuartIntegration()])
+
+    app.debug = False
+    app.testing = False
+
+    @app.route("/")
+    async def index():
+        1 / 0
+
+    @app.errorhandler(500)
+    async def error_handler(err):
+        return "Sentry error: %s" % last_event_id()
+
+    events = capture_events()
+
+    client = app.test_client()
+    response = await client.get("/")
+
+    (event,) = events
+    assert (await response.get_data(as_text=True)) == "Sentry error: %s" % event[
+        "event_id"
+    ]
+
+
+@pytest.mark.asyncio
+async def test_error_in_errorhandler(sentry_init, capture_events, app):
+    sentry_init(integrations=[quart_sentry.QuartIntegration()])
+
+    app.debug = False
+    app.testing = False
+
+    @app.route("/")
+    async def index():
+        raise ValueError()
+
+    @app.errorhandler(500)
+    async def error_handler(err):
+        1 / 0
+
+    events = capture_events()
+
+    client = app.test_client()
+
+    with pytest.raises(ZeroDivisionError):
+        await client.get("/")
+
+    event1, event2 = events
+
+    (exception,) = event1["exception"]["values"]
+    assert exception["type"] == "ValueError"
+
+    exception = event2["exception"]["values"][-1]
+    assert exception["type"] == "ZeroDivisionError"
+
+
+@pytest.mark.asyncio
+async def test_bad_request_not_captured(sentry_init, capture_events, app):
+    sentry_init(integrations=[quart_sentry.QuartIntegration()])
+    events = capture_events()
+
+    @app.route("/")
+    async def index():
+        abort(400)
+
+    client = app.test_client()
+
+    await client.get("/")
+
+    assert not events
+
+
+@pytest.mark.asyncio
+async def test_does_not_leak_scope(sentry_init, capture_events, app):
+    sentry_init(integrations=[quart_sentry.QuartIntegration()])
+    events = capture_events()
+
+    with configure_scope() as scope:
+        scope.set_tag("request_data", False)
+
+    @app.route("/")
+    async def index():
+        with configure_scope() as scope:
+            scope.set_tag("request_data", True)
+
+        async def generate():
+            for row in range(1000):
+                with configure_scope() as scope:
+                    assert scope._tags["request_data"]
+
+                yield str(row) + "\n"
+
+        return Response(stream_with_context(generate)(), mimetype="text/csv")
+
+    client = app.test_client()
+    response = await client.get("/")
+    assert (await response.get_data(as_text=True)) == "".join(
+        str(row) + "\n" for row in range(1000)
+    )
+    assert not events
+
+    with configure_scope() as scope:
+        assert not scope._tags["request_data"]
+
+
+@pytest.mark.asyncio
+async def test_scoped_test_client(sentry_init, app):
+    sentry_init(integrations=[quart_sentry.QuartIntegration()])
+
+    @app.route("/")
+    async def index():
+        return "ok"
+
+    async with app.test_client() as client:
+        response = await client.get("/")
+        assert response.status_code == 200
+
+
+@pytest.mark.asyncio
+@pytest.mark.parametrize("exc_cls", [ZeroDivisionError, Exception])
+async def test_errorhandler_for_exception_swallows_exception(
+    sentry_init, app, capture_events, exc_cls
+):
+    # In contrast to error handlers for a status code, error
+    # handlers for exceptions can swallow the exception (this is
+    # just how the Quart signal works)
+    sentry_init(integrations=[quart_sentry.QuartIntegration()])
+    events = capture_events()
+
+    @app.route("/")
+    async def index():
+        1 / 0
+
+    @app.errorhandler(exc_cls)
+    async def zerodivision(e):
+        return "ok"
+
+    async with app.test_client() as client:
+        response = await client.get("/")
+        assert response.status_code == 200
+
+    assert not events
+
+
+@pytest.mark.asyncio
+async def test_tracing_success(sentry_init, capture_events, app):
+    sentry_init(traces_sample_rate=1.0, integrations=[quart_sentry.QuartIntegration()])
+
+    @app.before_request
+    async def _():
+        set_tag("before_request", "yes")
+
+    @app.route("/message_tx")
+    async def hi_tx():
+        set_tag("view", "yes")
+        capture_message("hi")
+        return "ok"
+
+    events = capture_events()
+
+    async with app.test_client() as client:
+        response = await client.get("/message_tx")
+        assert response.status_code == 200
+
+    message_event, transaction_event = events
+
+    assert transaction_event["type"] == "transaction"
+    assert transaction_event["transaction"] == "hi_tx"
+    assert transaction_event["tags"]["view"] == "yes"
+    assert transaction_event["tags"]["before_request"] == "yes"
+
+    assert message_event["message"] == "hi"
+    assert message_event["transaction"] == "hi_tx"
+    assert message_event["tags"]["view"] == "yes"
+    assert message_event["tags"]["before_request"] == "yes"
+
+
+@pytest.mark.asyncio
+async def test_tracing_error(sentry_init, capture_events, app):
+    sentry_init(traces_sample_rate=1.0, integrations=[quart_sentry.QuartIntegration()])
+
+    events = capture_events()
+
+    @app.route("/error")
+    async def error():
+        1 / 0
+
+    async with app.test_client() as client:
+        response = await client.get("/error")
+        assert response.status_code == 500
+
+    error_event, transaction_event = events
+
+    assert transaction_event["type"] == "transaction"
+    assert transaction_event["transaction"] == "error"
+
+    assert error_event["transaction"] == "error"
+    (exception,) = error_event["exception"]["values"]
+    assert exception["type"] == "ZeroDivisionError"
+
+
+@pytest.mark.asyncio
+async def test_class_based_views(sentry_init, app, capture_events):
+    sentry_init(integrations=[quart_sentry.QuartIntegration()])
+    events = capture_events()
+
+    @app.route("/")
+    class HelloClass(View):
+        methods = ["GET"]
+
+        async def dispatch_request(self):
+            capture_message("hi")
+            return "ok"
+
+    app.add_url_rule("/hello-class/", view_func=HelloClass.as_view("hello_class"))
+
+    async with app.test_client() as client:
+        response = await client.get("/hello-class/")
+        assert response.status_code == 200
+
+    (event,) = events
+
+    assert event["message"] == "hi"
+    assert event["transaction"] == "hello_class"
diff --git a/tests/integrations/redis/test_redis.py b/tests/integrations/redis/test_redis.py
index f3ea410a53..9a6d066e03 100644
--- a/tests/integrations/redis/test_redis.py
+++ b/tests/integrations/redis/test_redis.py
@@ -1,7 +1,8 @@
-from sentry_sdk import capture_message
+from sentry_sdk import capture_message, start_transaction
 from sentry_sdk.integrations.redis import RedisIntegration
 
 from fakeredis import FakeStrictRedis
+import pytest
 
 
 def test_basic(sentry_init, capture_events):
@@ -14,12 +15,46 @@ def test_basic(sentry_init, capture_events):
     capture_message("hi")
 
     (event,) = events
-    (crumb,) = event["breadcrumbs"]
+    (crumb,) = event["breadcrumbs"]["values"]
 
     assert crumb == {
         "category": "redis",
         "message": "GET 'foobar'",
-        "data": {"redis.key": "foobar", "redis.command": "GET"},
+        "data": {
+            "redis.key": "foobar",
+            "redis.command": "GET",
+            "redis.is_cluster": False,
+        },
         "timestamp": crumb["timestamp"],
         "type": "redis",
     }
+
+
+@pytest.mark.parametrize("is_transaction", [False, True])
+def test_redis_pipeline(sentry_init, capture_events, is_transaction):
+    sentry_init(integrations=[RedisIntegration()], traces_sample_rate=1.0)
+    events = capture_events()
+
+    connection = FakeStrictRedis()
+    with start_transaction():
+
+        pipeline = connection.pipeline(transaction=is_transaction)
+        pipeline.get("foo")
+        pipeline.set("bar", 1)
+        pipeline.set("baz", 2)
+        pipeline.execute()
+
+    (event,) = events
+    (span,) = event["spans"]
+    assert span["op"] == "db.redis"
+    assert span["description"] == "redis.pipeline.execute"
+    assert span["data"] == {
+        "redis.commands": {
+            "count": 3,
+            "first_ten": ["GET 'foo'", "SET 'bar' 1", "SET 'baz' 2"],
+        }
+    }
+    assert span["tags"] == {
+        "redis.transaction": is_transaction,
+        "redis.is_cluster": False,
+    }
diff --git a/tests/integrations/rediscluster/__init__.py b/tests/integrations/rediscluster/__init__.py
new file mode 100644
index 0000000000..b292f63ec8
--- /dev/null
+++ b/tests/integrations/rediscluster/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+pytest.importorskip("rediscluster")
diff --git a/tests/integrations/rediscluster/test_rediscluster.py b/tests/integrations/rediscluster/test_rediscluster.py
new file mode 100644
index 0000000000..6c7e5f90a4
--- /dev/null
+++ b/tests/integrations/rediscluster/test_rediscluster.py
@@ -0,0 +1,79 @@
+import pytest
+from sentry_sdk import capture_message
+from sentry_sdk.api import start_transaction
+from sentry_sdk.integrations.redis import RedisIntegration
+
+import rediscluster
+
+rediscluster_classes = [rediscluster.RedisCluster]
+
+if hasattr(rediscluster, "StrictRedisCluster"):
+    rediscluster_classes.append(rediscluster.StrictRedisCluster)
+
+
+@pytest.fixture(autouse=True)
+def monkeypatch_rediscluster_classes(reset_integrations):
+
+    try:
+        pipeline_cls = rediscluster.pipeline.ClusterPipeline
+    except AttributeError:
+        pipeline_cls = rediscluster.StrictClusterPipeline
+    rediscluster.RedisCluster.pipeline = lambda *_, **__: pipeline_cls(
+        connection_pool=True
+    )
+    pipeline_cls.execute = lambda *_, **__: None
+    for cls in rediscluster_classes:
+        cls.execute_command = lambda *_, **__: None
+
+
+@pytest.mark.parametrize("rediscluster_cls", rediscluster_classes)
+def test_rediscluster_basic(rediscluster_cls, sentry_init, capture_events):
+    sentry_init(integrations=[RedisIntegration()])
+    events = capture_events()
+
+    rc = rediscluster_cls(connection_pool=True)
+    rc.get("foobar")
+    capture_message("hi")
+
+    (event,) = events
+    (crumb,) = event["breadcrumbs"]["values"]
+
+    assert crumb == {
+        "category": "redis",
+        "message": "GET 'foobar'",
+        "data": {
+            "redis.key": "foobar",
+            "redis.command": "GET",
+            "redis.is_cluster": True,
+        },
+        "timestamp": crumb["timestamp"],
+        "type": "redis",
+    }
+
+
+def test_rediscluster_pipeline(sentry_init, capture_events):
+    sentry_init(integrations=[RedisIntegration()], traces_sample_rate=1.0)
+    events = capture_events()
+
+    rc = rediscluster.RedisCluster(connection_pool=True)
+    with start_transaction():
+        pipeline = rc.pipeline()
+        pipeline.get("foo")
+        pipeline.set("bar", 1)
+        pipeline.set("baz", 2)
+        pipeline.execute()
+
+    (event,) = events
+    (span,) = event["spans"]
+    assert span["op"] == "db.redis"
+    assert span["description"] == "redis.pipeline.execute"
+    assert span["data"] == {
+        "redis.commands": {
+            "count": 3,
+            "first_ten": ["GET 'foo'", "SET 'bar' 1", "SET 'baz' 2"],
+        }
+    }
+    assert span["tags"] == {
+        "redis.transaction": False,  # For Cluster, this is always False
+        "redis.is_cluster": True,
+    }
diff --git a/tests/integrations/requests/test_requests.py b/tests/integrations/requests/test_requests.py
index 55b8a37962..02c6636853 100644
--- a/tests/integrations/requests/test_requests.py
+++ b/tests/integrations/requests/test_requests.py
@@ -11,16 +11,15 @@ def test_crumb_capture(sentry_init, capture_events):
     events = capture_events()
 
     response = requests.get("https://httpbin.org/status/418")
-    assert response.status_code == 418
     capture_message("Testing!")
 
     (event,) = events
-    (crumb,) = event["breadcrumbs"]
+    (crumb,) = event["breadcrumbs"]["values"]
     assert crumb["type"] == "http"
     assert crumb["category"] == "httplib"
     assert crumb["data"] == {
         "url": "https://httpbin.org/status/418",
         "method": "GET",
-        "status_code": 418,
-        "reason": "I'M A TEAPOT",
+        "status_code": response.status_code,
+        "reason": response.reason,
     }
diff --git a/tests/integrations/rq/test_rq.py b/tests/integrations/rq/test_rq.py
index 35832ffedf..fb25b65a03 100644
--- a/tests/integrations/rq/test_rq.py
+++ b/tests/integrations/rq/test_rq.py
@@ -1,13 +1,45 @@
+import pytest
+from fakeredis import FakeStrictRedis
 from sentry_sdk.integrations.rq import RqIntegration
 
-from fakeredis import FakeStrictRedis
 import rq
 
+try:
+    from unittest import mock  # python 3.3 and above
+except ImportError:
+    import mock  # python < 3.3
+
+
+@pytest.fixture(autouse=True)
+def _patch_rq_get_server_version(monkeypatch):
+    """
+    Patch up RQ 1.5 to work with fakeredis.
+
+    https://github.com/jamesls/fakeredis/issues/273
+    """
+
+    from distutils.version import StrictVersion
+
+    if tuple(map(int, rq.VERSION.split("."))) >= (1, 5):
+        for k in (
+            "rq.job.Job.get_redis_server_version",
+            "rq.worker.Worker.get_redis_server_version",
+        ):
+            monkeypatch.setattr(k, lambda _: StrictVersion("4.0.0"))
+
 
 def crashing_job(foo):
     1 / 0
 
 
+def chew_up_shoes(dog, human, shoes):
+    raise Exception("{}!! Why did you eat {}'s {}??".format(dog, human, shoes))
+
+
+def do_trick(dog, trick):
+    return "{}, can you {}? Good dog!".format(dog, trick)
+
+
 def test_basic(sentry_init, capture_events):
     sentry_init(integrations=[RqIntegration()])
     events = capture_events()
@@ -26,13 +58,18 @@ def test_basic(sentry_init, capture_events):
     assert exception["stacktrace"]["frames"][-1]["vars"]["foo"] == "42"
 
     assert event["transaction"] == "tests.integrations.rq.test_rq.crashing_job"
-    assert event["extra"]["rq-job"] == {
-        "args": [],
-        "description": "tests.integrations.rq.test_rq.crashing_job(foo=42)",
-        "func": "tests.integrations.rq.test_rq.crashing_job",
-        "job_id": event["extra"]["rq-job"]["job_id"],
-        "kwargs": {"foo": 42},
-    }
+
+    extra = event["extra"]["rq-job"]
+    assert extra["args"] == []
+    assert extra["kwargs"] == {"foo": 42}
+    assert extra["description"] == "tests.integrations.rq.test_rq.crashing_job(foo=42)"
+    assert extra["func"] == "tests.integrations.rq.test_rq.crashing_job"
+    assert "job_id" in extra
+    assert "enqueued_at" in extra
+
+    # older versions don't persist started_at correctly
+    if tuple(map(int, rq.VERSION.split("."))) >= (0, 9):
+        assert "started_at" in extra
 
 
 def test_transport_shutdown(sentry_init, capture_events_forksafe):
@@ -51,3 +88,112 @@ def test_transport_shutdown(sentry_init, capture_events_forksafe):
 
     (exception,) = event["exception"]["values"]
     assert exception["type"] == "ZeroDivisionError"
+
+
+def test_transaction_with_error(
+    sentry_init, capture_events, DictionaryContaining  # noqa:N803
+):
+
+    sentry_init(integrations=[RqIntegration()], traces_sample_rate=1.0)
+    events = capture_events()
+
+    queue = rq.Queue(connection=FakeStrictRedis())
+    worker = rq.SimpleWorker([queue], connection=queue.connection)
+
+    queue.enqueue(chew_up_shoes, "Charlie", "Katie", shoes="flip-flops")
+    worker.work(burst=True)
+
+    error_event, envelope = events
+
+    assert error_event["transaction"] == "tests.integrations.rq.test_rq.chew_up_shoes"
+    assert error_event["contexts"]["trace"]["op"] == "queue.task.rq"
+    assert error_event["exception"]["values"][0]["type"] == "Exception"
+    assert (
+        error_event["exception"]["values"][0]["value"]
+        == "Charlie!! Why did you eat Katie's flip-flops??"
+    )
+
+    assert envelope["type"] == "transaction"
+    assert envelope["contexts"]["trace"] == error_event["contexts"]["trace"]
+    assert envelope["transaction"] == error_event["transaction"]
+    assert envelope["extra"]["rq-job"] == DictionaryContaining(
+        {
+            "args": ["Charlie", "Katie"],
+            "kwargs": {"shoes": "flip-flops"},
+            "func": "tests.integrations.rq.test_rq.chew_up_shoes",
+            "description": "tests.integrations.rq.test_rq.chew_up_shoes('Charlie', 'Katie', shoes='flip-flops')",
+        }
+    )
+
+
+def test_transaction_no_error(
+    sentry_init, capture_events, DictionaryContaining  # noqa:N803
+):
+    sentry_init(integrations=[RqIntegration()], traces_sample_rate=1.0)
+    events = capture_events()
+
+    queue = rq.Queue(connection=FakeStrictRedis())
+    worker = rq.SimpleWorker([queue], connection=queue.connection)
+
+    queue.enqueue(do_trick, "Maisey", trick="kangaroo")
+    worker.work(burst=True)
+
+    envelope = events[0]
+
+    assert envelope["type"] == "transaction"
+    assert envelope["contexts"]["trace"]["op"] == "queue.task.rq"
+    assert envelope["transaction"] == "tests.integrations.rq.test_rq.do_trick"
+    assert envelope["extra"]["rq-job"] == DictionaryContaining(
+        {
+            "args": ["Maisey"],
+            "kwargs": {"trick": "kangaroo"},
+            "func": "tests.integrations.rq.test_rq.do_trick",
+            "description": "tests.integrations.rq.test_rq.do_trick('Maisey', trick='kangaroo')",
+        }
+    )
+
+
+def test_traces_sampler_gets_correct_values_in_sampling_context(
+    sentry_init, DictionaryContaining, ObjectDescribedBy  # noqa:N803
+):
+    traces_sampler = mock.Mock(return_value=True)
+    sentry_init(integrations=[RqIntegration()], traces_sampler=traces_sampler)
+
+    queue = rq.Queue(connection=FakeStrictRedis())
+    worker = rq.SimpleWorker([queue], connection=queue.connection)
+
+    queue.enqueue(do_trick, "Bodhi", trick="roll over")
+    worker.work(burst=True)
+
+    traces_sampler.assert_any_call(
+        DictionaryContaining(
+            {
+                "rq_job": ObjectDescribedBy(
+                    type=rq.job.Job,
+                    attrs={
+                        "description": "tests.integrations.rq.test_rq.do_trick('Bodhi', trick='roll over')",
+                        "result": "Bodhi, can you roll over? Good dog!",
+                        "func_name": "tests.integrations.rq.test_rq.do_trick",
+                        "args": ("Bodhi",),
+                        "kwargs": {"trick": "roll over"},
+                    },
+                ),
+            }
+        )
+    )
+
+
+@pytest.mark.skipif(
+    rq.__version__.split(".") < ["1", "5"], reason="At least rq-1.5 required"
+)
+def test_job_with_retries(sentry_init, capture_events):
+    sentry_init(integrations=[RqIntegration()])
+    events = capture_events()
+
+    queue = rq.Queue(connection=FakeStrictRedis())
+    worker = rq.SimpleWorker([queue], connection=queue.connection)
+
+    queue.enqueue(crashing_job, foo=42, retry=rq.Retry(max=1))
+    worker.work(burst=True)
+
+    assert len(events) == 1
diff --git a/tests/integrations/sanic/test_sanic.py b/tests/integrations/sanic/test_sanic.py
index 72425abbcb..de84845cf4 100644
--- a/tests/integrations/sanic/test_sanic.py
+++ b/tests/integrations/sanic/test_sanic.py
@@ -1,7 +1,8 @@
+import os
 import sys
-
 import random
 import asyncio
+from unittest.mock import Mock
 
 import pytest
 
@@ -9,20 +10,45 @@
 from sentry_sdk.integrations.sanic import SanicIntegration
 
 from sanic import Sanic, request, response, __version__ as SANIC_VERSION_RAW
-from sanic.exceptions import abort
+from sanic.response import HTTPResponse
+from sanic.exceptions import SanicException
 
 SANIC_VERSION = tuple(map(int, SANIC_VERSION_RAW.split(".")))
 
 
 @pytest.fixture
 def app():
-    app = Sanic(__name__)
+    if SANIC_VERSION < (19,):
+        """
+        Older Sanic versions 0.8 and 18 bind to the same fixed port which
+        creates problems when we run tests concurrently.
+        """
+        old_test_client = Sanic.test_client.__get__
+
+        def new_test_client(self):
+            client = old_test_client(self, Sanic)
+            client.port += os.getpid() % 100
+            return client
+
+        Sanic.test_client = property(new_test_client)
+
+    if SANIC_VERSION >= (20, 12) and SANIC_VERSION < (22, 6):
+        # Some builds (20.12.0 intruduced and 22.6.0 removed again) have a feature where the instance is stored in an internal class
+        # registry for later retrieval, and so add register=False to disable that
+        app = Sanic("Test", register=False)
+    else:
+        app = Sanic("Test")
 
     @app.route("/message")
     def hi(request):
         capture_message("hi")
         return response.text("ok")
 
+    @app.route("/message/")
+    def hi_with_id(request, message_id):
+        capture_message("hi with id")
+        return response.text("ok with id")
+
     return app
 
 
@@ -55,6 +81,27 @@ def test_request_data(sentry_init, app, capture_events):
     assert "transaction" not in event
 
 
+@pytest.mark.parametrize(
+    "url,expected_transaction,expected_source",
+    [
+        ("/message", "hi", "component"),
+        ("/message/123456", "hi_with_id", "component"),
+    ],
+)
+def test_transaction(
+    sentry_init, app, capture_events, url, expected_transaction, expected_source
+):
+    sentry_init(integrations=[SanicIntegration()])
+    events = capture_events()
+
+    request, response = app.test_client.get(url)
+    assert response.status == 200
+
+    (event,) = events
+    assert event["transaction"] == expected_transaction
+    assert event["transaction_info"] == {"source": expected_source}
+
+
 def test_errors(sentry_init, app, capture_events):
     sentry_init(integrations=[SanicIntegration()])
     events = capture_events()
@@ -84,7 +131,7 @@ def test_bad_request_not_captured(sentry_init, app, capture_events):
 
     @app.route("/")
     def index(request):
-        abort(400)
+        raise SanicException("...", status_code=400)
 
     request, response = app.test_client.get("/")
     assert response.status == 400
@@ -166,16 +213,66 @@ async def task(i):
         if SANIC_VERSION >= (19,):
             kwargs["app"] = app
 
-        await app.handle_request(
-            request.Request(**kwargs),
-            write_callback=responses.append,
-            stream_callback=responses.append,
-        )
+        if SANIC_VERSION >= (21, 3):
+
+            class MockAsyncStreamer:
+                def __init__(self, request_body):
+                    self.request_body = request_body
+                    self.iter = iter(self.request_body)
+
+                    if SANIC_VERSION >= (21, 12):
+                        self.response = None
+                        self.stage = Mock()
+                    else:
+                        self.response = b"success"
+
+                def respond(self, response):
+                    responses.append(response)
+                    patched_response = HTTPResponse()
+                    return patched_response
+
+                def __aiter__(self):
+                    return self
+
+                async def __anext__(self):
+                    try:
+                        return next(self.iter)
+                    except StopIteration:
+                        raise StopAsyncIteration
+
+            patched_request = request.Request(**kwargs)
+            patched_request.stream = MockAsyncStreamer([b"hello", b"foo"])
+
+            if SANIC_VERSION >= (21, 9):
+                await app.dispatch(
+                    "http.lifecycle.request",
+                    context={"request": patched_request},
+                    inline=True,
+                )
+
+            await app.handle_request(
+                patched_request,
+            )
+        else:
+            await app.handle_request(
+                request.Request(**kwargs),
+                write_callback=responses.append,
+                stream_callback=responses.append,
+            )
 
         (r,) = responses
         assert r.status == 200
 
     async def runner():
+        if SANIC_VERSION >= (21, 3):
+            if SANIC_VERSION >= (21, 9):
+                await app._startup()
+            else:
+                try:
+                    app.router.reset()
+                    app.router.finalize()
+                except AttributeError:
+                    ...
         await asyncio.gather(*(task(i) for i in range(1000)))
 
     if sys.version_info < (3, 7):
diff --git a/tests/integrations/spark/test_spark.py b/tests/integrations/spark/test_spark.py
index c1dfcc1195..00c0055f12 100644
--- a/tests/integrations/spark/test_spark.py
+++ b/tests/integrations/spark/test_spark.py
@@ -235,8 +235,8 @@ def mock_main():
     assert events[0]["exception"]["values"][0]["type"] == "ZeroDivisionError"
 
     assert events[0]["tags"] == {
-        "stageId": 0,
-        "attemptNumber": 1,
-        "partitionId": 2,
-        "taskAttemptId": 3,
+        "stageId": "0",
+        "attemptNumber": "1",
+        "partitionId": "2",
+        "taskAttemptId": "3",
     }
diff --git a/tests/integrations/sqlalchemy/test_sqlalchemy.py b/tests/integrations/sqlalchemy/test_sqlalchemy.py
index e931b97189..e9d8c4e849 100644
--- a/tests/integrations/sqlalchemy/test_sqlalchemy.py
+++ b/tests/integrations/sqlalchemy/test_sqlalchemy.py
@@ -1,10 +1,15 @@
-from sqlalchemy import Column, ForeignKey, Integer, String
+import sys
+import pytest
+
+from sqlalchemy import Column, ForeignKey, Integer, String, create_engine
+from sqlalchemy.exc import IntegrityError
 from sqlalchemy.ext.declarative import declarative_base
 from sqlalchemy.orm import relationship, sessionmaker
-from sqlalchemy import create_engine
 
-from sentry_sdk import capture_message
+from sentry_sdk import capture_message, start_transaction, configure_scope
 from sentry_sdk.integrations.sqlalchemy import SqlalchemyIntegration
+from sentry_sdk.utils import json_dumps, MAX_STRING_LENGTH
+from sentry_sdk.serializer import MAX_EVENT_BYTES
 
 
 def test_orm_queries(sentry_init, capture_events):
@@ -44,10 +49,10 @@ class Address(Base):
 
     (event,) = events
 
-    for crumb in event["breadcrumbs"]:
+    for crumb in event["breadcrumbs"]["values"]:
         del crumb["timestamp"]
 
-    assert event["breadcrumbs"][-2:] == [
+    assert event["breadcrumbs"]["values"][-2:] == [
         {
             "category": "query",
             "data": {"db.params": ["Bob"], "db.paramstyle": "qmark"},
@@ -63,3 +68,151 @@ class Address(Base):
             "type": "default",
         },
     ]
+
+
+@pytest.mark.skipif(
+    sys.version_info < (3,), reason="This sqla usage seems to be broken on Py2"
+)
+def test_transactions(sentry_init, capture_events, render_span_tree):
+
+    sentry_init(
+        integrations=[SqlalchemyIntegration()],
+        _experiments={"record_sql_params": True},
+        traces_sample_rate=1.0,
+    )
+    events = capture_events()
+
+    Base = declarative_base()  # noqa: N806
+
+    class Person(Base):
+        __tablename__ = "person"
+        id = Column(Integer, primary_key=True)
+        name = Column(String(250), nullable=False)
+
+    class Address(Base):
+        __tablename__ = "address"
+        id = Column(Integer, primary_key=True)
+        street_name = Column(String(250))
+        street_number = Column(String(250))
+        post_code = Column(String(250), nullable=False)
+        person_id = Column(Integer, ForeignKey("person.id"))
+        person = relationship(Person)
+
+    engine = create_engine("sqlite:///:memory:")
+    Base.metadata.create_all(engine)
+
+    Session = sessionmaker(bind=engine)  # noqa: N806
+    session = Session()
+
+    with start_transaction(name="test_transaction", sampled=True):
+        with session.begin_nested():
+            session.query(Person).first()
+
+        for _ in range(2):
+            with pytest.raises(IntegrityError):
+                with session.begin_nested():
+                    session.add(Person(id=1, name="bob"))
+                    session.add(Person(id=1, name="bob"))
+
+        with session.begin_nested():
+            session.query(Person).first()
+
+    (event,) = events
+
+    assert (
+        render_span_tree(event)
+        == """\
+- op=null: description=null
+  - op="db": description="SAVEPOINT sa_savepoint_1"
+  - op="db": description="SELECT person.id AS person_id, person.name AS person_name \\nFROM person\\n LIMIT ? OFFSET ?"
+  - op="db": description="RELEASE SAVEPOINT sa_savepoint_1"
+  - op="db": description="SAVEPOINT sa_savepoint_2"
+  - op="db": description="INSERT INTO person (id, name) VALUES (?, ?)"
+  - op="db": description="ROLLBACK TO SAVEPOINT sa_savepoint_2"
+  - op="db": description="SAVEPOINT sa_savepoint_3"
+  - op="db": description="INSERT INTO person (id, name) VALUES (?, ?)"
+  - op="db": description="ROLLBACK TO SAVEPOINT sa_savepoint_3"
+  - op="db": description="SAVEPOINT sa_savepoint_4"
+  - op="db": description="SELECT person.id AS person_id, person.name AS person_name \\nFROM person\\n LIMIT ? OFFSET ?"
+  - op="db": description="RELEASE SAVEPOINT sa_savepoint_4"\
+"""
+    )
+
+
+def test_long_sql_query_preserved(sentry_init, capture_events):
+    sentry_init(
+        traces_sample_rate=1,
+        integrations=[SqlalchemyIntegration()],
+        _experiments={"smart_transaction_trimming": True},
+    )
+    events = capture_events()
+
+    engine = create_engine("sqlite:///:memory:")
+    with start_transaction(name="test"):
+        with engine.connect() as con:
+            con.execute(" UNION ".join("SELECT {}".format(i) for i in range(100)))
+
+    (event,) = events
+    description = event["spans"][0]["description"]
+    assert description.startswith("SELECT 0 UNION SELECT 1")
+    assert description.endswith("SELECT 98 UNION SELECT 99")
+
+
+def test_too_large_event_truncated(sentry_init, capture_events):
+    sentry_init(
+        traces_sample_rate=1,
+        integrations=[SqlalchemyIntegration()],
+        _experiments={"smart_transaction_trimming": True},
+    )
+    events = capture_events()
+
+    long_str = "x" * (MAX_STRING_LENGTH + 10)
+
+    with configure_scope() as scope:
+
+        @scope.add_event_processor
+        def processor(event, hint):
+            event["message"] = long_str
+            return event
+
+    engine = create_engine("sqlite:///:memory:")
+    with start_transaction(name="test"):
+        with engine.connect() as con:
+            for _ in range(2000):
+                con.execute(" UNION ".join("SELECT {}".format(i) for i in range(100)))
+
+    (event,) = events
+
+    # Because of attached metadata in the "_meta" key, we may send out a little
+    # bit more than MAX_EVENT_BYTES.
+    max_bytes = 1.2 * MAX_EVENT_BYTES
+    assert len(json_dumps(event)) < max_bytes
+
+    # Some spans are discarded.
+    assert len(event["spans"]) == 1000
+
+    for i, span in enumerate(event["spans"]):
+        description = span["description"]
+
+        assert description.startswith("SELECT ")
+        if str(i) in event["_meta"]["spans"]:
+            # Description must have been truncated
+            assert len(description) == 10
+            assert description.endswith("...")
+        else:
+            # Description was not truncated, check for original length
+            assert len(description) == 1583
+            assert description.endswith("SELECT 98 UNION SELECT 99")
+
+    # Smoke check the meta info for one of the spans.
+    assert next(iter(event["_meta"]["spans"].values())) == {
+        "description": {"": {"len": 1583, "rem": [["!limit", "x", 7, 10]]}}
+    }
+
+    # Smoke check that truncation of other fields has not changed.
+    assert len(event["message"]) == MAX_STRING_LENGTH
+
+    # The _meta for other truncated fields should be there as well.
+    assert event["_meta"]["message"] == {
+        "": {"len": 1034, "rem": [["!limit", "x", 1021, 1024]]}
+    }
diff --git a/tests/integrations/starlette/__init__.py b/tests/integrations/starlette/__init__.py
new file mode 100644
index 0000000000..c89ddf99a8
--- /dev/null
+++ b/tests/integrations/starlette/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+pytest.importorskip("starlette")
diff --git a/tests/integrations/starlette/photo.jpg b/tests/integrations/starlette/photo.jpg
new file mode 100644
index 0000000000..52fbeef721
Binary files /dev/null and b/tests/integrations/starlette/photo.jpg differ
diff --git a/tests/integrations/starlette/test_starlette.py b/tests/integrations/starlette/test_starlette.py
new file mode 100644
index 0000000000..a279142995
--- /dev/null
+++ b/tests/integrations/starlette/test_starlette.py
@@ -0,0 +1,874 @@
+import asyncio
+import base64
+import functools
+import json
+import os
+import threading
+
+import pytest
+
+from sentry_sdk import last_event_id, capture_exception
+from sentry_sdk.integrations.asgi import SentryAsgiMiddleware
+
+try:
+    from unittest import mock  # python 3.3 and above
+except ImportError:
+    import mock  # python < 3.3
+
+from sentry_sdk import capture_message
+from sentry_sdk.integrations.starlette import (
+    StarletteIntegration,
+    StarletteRequestExtractor,
+)
+
+starlette = pytest.importorskip("starlette")
+from starlette.authentication import (
+    AuthCredentials,
+    AuthenticationBackend,
+    AuthenticationError,
+    SimpleUser,
+)
+from starlette.middleware import Middleware
+from starlette.middleware.authentication import AuthenticationMiddleware
+from starlette.testclient import TestClient
+
+STARLETTE_VERSION = tuple([int(x) for x in starlette.__version__.split(".")])
+
+PICTURE = os.path.join(os.path.dirname(os.path.abspath(__file__)), "photo.jpg")
+
+BODY_JSON = {"some": "json", "for": "testing", "nested": {"numbers": 123}}
+
+BODY_FORM = """--fd721ef49ea403a6\r\nContent-Disposition: form-data; name="username"\r\n\r\nJane\r\n--fd721ef49ea403a6\r\nContent-Disposition: form-data; name="password"\r\n\r\nhello123\r\n--fd721ef49ea403a6\r\nContent-Disposition: form-data; name="photo"; filename="photo.jpg"\r\nContent-Type: image/jpg\r\nContent-Transfer-Encoding: base64\r\n\r\n{{image_data}}\r\n--fd721ef49ea403a6--\r\n""".replace(
+    "{{image_data}}", str(base64.b64encode(open(PICTURE, "rb").read()))
+)
+
+FORM_RECEIVE_MESSAGES = [
+    {"type": "http.request", "body": BODY_FORM.encode("utf-8")},
+    {"type": "http.disconnect"},
+]
+
+JSON_RECEIVE_MESSAGES = [
+    {"type": "http.request", "body": json.dumps(BODY_JSON).encode("utf-8")},
+    {"type": "http.disconnect"},
+]
+
+PARSED_FORM = starlette.datastructures.FormData(
+    [
+        ("username", "Jane"),
+        ("password", "hello123"),
+        (
+            "photo",
+            starlette.datastructures.UploadFile(
+                filename="photo.jpg",
+                file=open(PICTURE, "rb"),
+                content_type="image/jpeg",
+            ),
+        ),
+    ]
+)
+
+# Dummy ASGI scope for creating mock Starlette requests
+SCOPE = {
+    "client": ("172.29.0.10", 34784),
+    "headers": [
+        [b"host", b"example.com"],
+        [b"user-agent", b"Mozilla/5.0 Gecko/20100101 Firefox/60.0"],
+        [b"content-type", b"application/json"],
+        [b"accept-language", b"en-US,en;q=0.5"],
+        [b"accept-encoding", b"gzip, deflate, br"],
+        [b"upgrade-insecure-requests", b"1"],
+        [b"cookie", b"yummy_cookie=choco; tasty_cookie=strawberry"],
+    ],
+    "http_version": "0.0",
+    "method": "GET",
+    "path": "/path",
+    "query_string": b"qs=hello",
+    "scheme": "http",
+    "server": ("172.28.0.10", 8000),
+    "type": "http",
+}
+
+
+async def _mock_receive(msg):
+    return msg
+
+
+def starlette_app_factory(middleware=None, debug=True):
+    async def _homepage(request):
+        1 / 0
+        return starlette.responses.JSONResponse({"status": "ok"})
+
+    async def _custom_error(request):
+        raise Exception("Too Hot")
+
+    async def _message(request):
+        capture_message("hi")
+        return starlette.responses.JSONResponse({"status": "ok"})
+
+    async def _message_with_id(request):
+        capture_message("hi")
+        return starlette.responses.JSONResponse({"status": "ok"})
+
+    def _thread_ids_sync(request):
+        return starlette.responses.JSONResponse(
+            {
+                "main": threading.main_thread().ident,
+                "active": threading.current_thread().ident,
+            }
+        )
+
+    async def _thread_ids_async(request):
+        return starlette.responses.JSONResponse(
+            {
+                "main": threading.main_thread().ident,
+                "active": threading.current_thread().ident,
+            }
+        )
+
+    app = starlette.applications.Starlette(
+        debug=debug,
+        routes=[
+            starlette.routing.Route("/some_url", _homepage),
+            starlette.routing.Route("/custom_error", _custom_error),
+            starlette.routing.Route("/message", _message),
+            starlette.routing.Route("/message/{message_id}", _message_with_id),
+            starlette.routing.Route("/sync/thread_ids", _thread_ids_sync),
+            starlette.routing.Route("/async/thread_ids", _thread_ids_async),
+        ],
+        middleware=middleware,
+    )
+
+    return app
+
+
+def async_return(result):
+    f = asyncio.Future()
+    f.set_result(result)
+    return f
+
+
+class BasicAuthBackend(AuthenticationBackend):
+    async def authenticate(self, conn):
+        if "Authorization" not in conn.headers:
+            return
+
+        auth = conn.headers["Authorization"]
+        try:
+            scheme, credentials = auth.split()
+            if scheme.lower() != "basic":
+                return
+            decoded = base64.b64decode(credentials).decode("ascii")
+        except (ValueError, UnicodeDecodeError):
+            raise AuthenticationError("Invalid basic auth credentials")
+
+        username, _, password = decoded.partition(":")
+
+        # TODO: You'd want to verify the username and password here.
+
+        return AuthCredentials(["authenticated"]), SimpleUser(username)
+
+
+class AsyncIterator:
+    def __init__(self, data):
+        self.iter = iter(bytes(data, "utf-8"))
+
+    def __aiter__(self):
+        return self
+
+    async def __anext__(self):
+        try:
+            return bytes([next(self.iter)])
+        except StopIteration:
+            raise StopAsyncIteration
+
+
+class SampleMiddleware:
+    def __init__(self, app):
+        self.app = app
+
+    async def __call__(self, scope, receive, send):
+        # only handle http requests
+        if scope["type"] != "http":
+            await self.app(scope, receive, send)
+            return
+
+        async def do_stuff(message):
+            if message["type"] == "http.response.start":
+                # do something here.
+                pass
+
+            await send(message)
+
+        await self.app(scope, receive, do_stuff)
+
+
+class SampleReceiveSendMiddleware:
+    def __init__(self, app):
+        self.app = app
+
+    async def __call__(self, scope, receive, send):
+        message = await receive()
+        assert message
+        assert message["type"] == "http.request"
+
+        send_output = await send({"type": "something-unimportant"})
+        assert send_output is None
+
+        await self.app(scope, receive, send)
+
+
+class SamplePartialReceiveSendMiddleware:
+    def __init__(self, app):
+        self.app = app
+
+    async def __call__(self, scope, receive, send):
+        message = await receive()
+        assert message
+        assert message["type"] == "http.request"
+
+        send_output = await send({"type": "something-unimportant"})
+        assert send_output is None
+
+        async def my_receive(*args, **kwargs):
+            pass
+
+        async def my_send(*args, **kwargs):
+            pass
+
+        partial_receive = functools.partial(my_receive)
+        partial_send = functools.partial(my_send)
+
+        await self.app(scope, partial_receive, partial_send)
+
+
+@pytest.mark.asyncio
+async def test_starlettrequestextractor_content_length(sentry_init):
+    scope = SCOPE.copy()
+    scope["headers"] = [
+        [b"content-length", str(len(json.dumps(BODY_JSON))).encode()],
+    ]
+    starlette_request = starlette.requests.Request(scope)
+    extractor = StarletteRequestExtractor(starlette_request)
+
+    assert await extractor.content_length() == len(json.dumps(BODY_JSON))
+
+
+@pytest.mark.asyncio
+async def test_starlettrequestextractor_cookies(sentry_init):
+    starlette_request = starlette.requests.Request(SCOPE)
+    extractor = StarletteRequestExtractor(starlette_request)
+
+    assert extractor.cookies() == {
+        "tasty_cookie": "strawberry",
+        "yummy_cookie": "choco",
+    }
+
+
+@pytest.mark.asyncio
+async def test_starlettrequestextractor_json(sentry_init):
+    starlette_request = starlette.requests.Request(SCOPE)
+
+    # Mocking async `_receive()` that works in Python 3.7+
+    side_effect = [_mock_receive(msg) for msg in JSON_RECEIVE_MESSAGES]
+    starlette_request._receive = mock.Mock(side_effect=side_effect)
+
+    extractor = StarletteRequestExtractor(starlette_request)
+
+    assert extractor.is_json()
+    assert await extractor.json() == BODY_JSON
+
+
+@pytest.mark.asyncio
+async def test_starlettrequestextractor_form(sentry_init):
+    scope = SCOPE.copy()
+    scope["headers"] = [
+        [b"content-type", b"multipart/form-data; boundary=fd721ef49ea403a6"],
+    ]
+    # TODO add test for content-type: "application/x-www-form-urlencoded"
+
+    starlette_request = starlette.requests.Request(scope)
+
+    # Mocking async `_receive()` that works in Python 3.7+
+    side_effect = [_mock_receive(msg) for msg in FORM_RECEIVE_MESSAGES]
+    starlette_request._receive = mock.Mock(side_effect=side_effect)
+
+    extractor = StarletteRequestExtractor(starlette_request)
+
+    form_data = await extractor.form()
+    assert form_data.keys() == PARSED_FORM.keys()
+    assert form_data["username"] == PARSED_FORM["username"]
+    assert form_data["password"] == PARSED_FORM["password"]
+    assert form_data["photo"].filename == PARSED_FORM["photo"].filename
+
+    # Make sure we still can read the body
+    # after alreading it with extractor.form() above.
+    body = await extractor.request.body()
+    assert body
+
+
+@pytest.mark.asyncio
+async def test_starlettrequestextractor_body_consumed_twice(
+    sentry_init, capture_events
+):
+    """
+    Starlette does cache when you read the request data via `request.json()`
+    or `request.body()`, but it does NOT when using `request.form()`.
+    So we have an edge case when the Sentry Starlette reads the body using `.form()`
+    and the user wants to read the body using `.body()`.
+    Because the underlying stream can not be consumed twice and is not cached.
+
+    We have fixed this in `StarletteRequestExtractor.form()` by consuming the body
+    first with `.body()` (to put it into the `_body` cache and then consume it with `.form()`.
+
+    If this behavior is changed in Starlette and the `request.form()` in Starlette
+    is also caching the body, this test will fail.
+
+    See also https://github.com/encode/starlette/discussions/1933
+    """
+    scope = SCOPE.copy()
+    scope["headers"] = [
+        [b"content-type", b"multipart/form-data; boundary=fd721ef49ea403a6"],
+    ]
+
+    starlette_request = starlette.requests.Request(scope)
+
+    # Mocking async `_receive()` that works in Python 3.7+
+    side_effect = [_mock_receive(msg) for msg in FORM_RECEIVE_MESSAGES]
+    starlette_request._receive = mock.Mock(side_effect=side_effect)
+
+    extractor = StarletteRequestExtractor(starlette_request)
+
+    await extractor.request.form()
+
+    with pytest.raises(RuntimeError):
+        await extractor.request.body()
+
+
+@pytest.mark.asyncio
+async def test_starlettrequestextractor_extract_request_info_too_big(sentry_init):
+    sentry_init(
+        send_default_pii=True,
+        integrations=[StarletteIntegration()],
+    )
+    scope = SCOPE.copy()
+    scope["headers"] = [
+        [b"content-type", b"multipart/form-data; boundary=fd721ef49ea403a6"],
+        [b"content-length", str(len(BODY_FORM)).encode()],
+        [b"cookie", b"yummy_cookie=choco; tasty_cookie=strawberry"],
+    ]
+    starlette_request = starlette.requests.Request(scope)
+
+    # Mocking async `_receive()` that works in Python 3.7+
+    side_effect = [_mock_receive(msg) for msg in FORM_RECEIVE_MESSAGES]
+    starlette_request._receive = mock.Mock(side_effect=side_effect)
+
+    extractor = StarletteRequestExtractor(starlette_request)
+
+    request_info = await extractor.extract_request_info()
+
+    assert request_info
+    assert request_info["cookies"] == {
+        "tasty_cookie": "strawberry",
+        "yummy_cookie": "choco",
+    }
+    # Because request is too big only the AnnotatedValue is extracted.
+    assert request_info["data"].metadata == {"rem": [["!config", "x"]]}
+
+
+@pytest.mark.asyncio
+async def test_starlettrequestextractor_extract_request_info(sentry_init):
+    sentry_init(
+        send_default_pii=True,
+        integrations=[StarletteIntegration()],
+    )
+    scope = SCOPE.copy()
+    scope["headers"] = [
+        [b"content-type", b"application/json"],
+        [b"content-length", str(len(json.dumps(BODY_JSON))).encode()],
+        [b"cookie", b"yummy_cookie=choco; tasty_cookie=strawberry"],
+    ]
+
+    starlette_request = starlette.requests.Request(scope)
+
+    # Mocking async `_receive()` that works in Python 3.7+
+    side_effect = [_mock_receive(msg) for msg in JSON_RECEIVE_MESSAGES]
+    starlette_request._receive = mock.Mock(side_effect=side_effect)
+
+    extractor = StarletteRequestExtractor(starlette_request)
+
+    request_info = await extractor.extract_request_info()
+
+    assert request_info
+    assert request_info["cookies"] == {
+        "tasty_cookie": "strawberry",
+        "yummy_cookie": "choco",
+    }
+    assert request_info["data"] == BODY_JSON
+
+
+@pytest.mark.asyncio
+async def test_starlettrequestextractor_extract_request_info_no_pii(sentry_init):
+    sentry_init(
+        send_default_pii=False,
+        integrations=[StarletteIntegration()],
+    )
+    scope = SCOPE.copy()
+    scope["headers"] = [
+        [b"content-type", b"application/json"],
+        [b"content-length", str(len(json.dumps(BODY_JSON))).encode()],
+        [b"cookie", b"yummy_cookie=choco; tasty_cookie=strawberry"],
+    ]
+
+    starlette_request = starlette.requests.Request(scope)
+
+    # Mocking async `_receive()` that works in Python 3.7+
+    side_effect = [_mock_receive(msg) for msg in JSON_RECEIVE_MESSAGES]
+    starlette_request._receive = mock.Mock(side_effect=side_effect)
+
+    extractor = StarletteRequestExtractor(starlette_request)
+
+    request_info = await extractor.extract_request_info()
+
+    assert request_info
+    assert "cookies" not in request_info
+    assert request_info["data"] == BODY_JSON
+
+
+@pytest.mark.parametrize(
+    "url,transaction_style,expected_transaction,expected_source",
+    [
+        (
+            "/message",
+            "url",
+            "/message",
+            "route",
+        ),
+        (
+            "/message",
+            "endpoint",
+            "tests.integrations.starlette.test_starlette.starlette_app_factory.._message",
+            "component",
+        ),
+        (
+            "/message/123456",
+            "url",
+            "/message/{message_id}",
+            "route",
+        ),
+        (
+            "/message/123456",
+            "endpoint",
+            "tests.integrations.starlette.test_starlette.starlette_app_factory.._message_with_id",
+            "component",
+        ),
+    ],
+)
+def test_transaction_style(
+    sentry_init,
+    capture_events,
+    url,
+    transaction_style,
+    expected_transaction,
+    expected_source,
+):
+    sentry_init(
+        integrations=[StarletteIntegration(transaction_style=transaction_style)],
+    )
+    starlette_app = starlette_app_factory()
+
+    events = capture_events()
+
+    client = TestClient(starlette_app)
+    client.get(url)
+
+    (event,) = events
+    assert event["transaction"] == expected_transaction
+    assert event["transaction_info"] == {"source": expected_source}
+
+
+@pytest.mark.parametrize(
+    "test_url,expected_error,expected_message",
+    [
+        ("/some_url", ZeroDivisionError, "division by zero"),
+        ("/custom_error", Exception, "Too Hot"),
+    ],
+)
+def test_catch_exceptions(
+    sentry_init,
+    capture_exceptions,
+    capture_events,
+    test_url,
+    expected_error,
+    expected_message,
+):
+    sentry_init(integrations=[StarletteIntegration()])
+    starlette_app = starlette_app_factory()
+    exceptions = capture_exceptions()
+    events = capture_events()
+
+    client = TestClient(starlette_app)
+    try:
+        client.get(test_url)
+    except Exception:
+        pass
+
+    (exc,) = exceptions
+    assert isinstance(exc, expected_error)
+    assert str(exc) == expected_message
+
+    (event,) = events
+    assert event["exception"]["values"][0]["mechanism"]["type"] == "starlette"
+
+
+def test_user_information_error(sentry_init, capture_events):
+    sentry_init(
+        send_default_pii=True,
+        integrations=[StarletteIntegration()],
+    )
+    starlette_app = starlette_app_factory(
+        middleware=[Middleware(AuthenticationMiddleware, backend=BasicAuthBackend())]
+    )
+    events = capture_events()
+
+    client = TestClient(starlette_app, raise_server_exceptions=False)
+    try:
+        client.get("/custom_error", auth=("Gabriela", "hello123"))
+    except Exception:
+        pass
+
+    (event,) = events
+    user = event.get("user", None)
+    assert user
+    assert "username" in user
+    assert user["username"] == "Gabriela"
+
+
+def test_user_information_error_no_pii(sentry_init, capture_events):
+    sentry_init(
+        send_default_pii=False,
+        integrations=[StarletteIntegration()],
+    )
+    starlette_app = starlette_app_factory(
+        middleware=[Middleware(AuthenticationMiddleware, backend=BasicAuthBackend())]
+    )
+    events = capture_events()
+
+    client = TestClient(starlette_app, raise_server_exceptions=False)
+    try:
+        client.get("/custom_error", auth=("Gabriela", "hello123"))
+    except Exception:
+        pass
+
+    (event,) = events
+    assert "user" not in event
+
+
+def test_user_information_transaction(sentry_init, capture_events):
+    sentry_init(
+        traces_sample_rate=1.0,
+        send_default_pii=True,
+        integrations=[StarletteIntegration()],
+    )
+    starlette_app = starlette_app_factory(
+        middleware=[Middleware(AuthenticationMiddleware, backend=BasicAuthBackend())]
+    )
+    events = capture_events()
+
+    client = TestClient(starlette_app, raise_server_exceptions=False)
+    client.get("/message", auth=("Gabriela", "hello123"))
+
+    (_, transaction_event) = events
+    user = transaction_event.get("user", None)
+    assert user
+    assert "username" in user
+    assert user["username"] == "Gabriela"
+
+
+def test_user_information_transaction_no_pii(sentry_init, capture_events):
+    sentry_init(
+        traces_sample_rate=1.0,
+        send_default_pii=False,
+        integrations=[StarletteIntegration()],
+    )
+    starlette_app = starlette_app_factory(
+        middleware=[Middleware(AuthenticationMiddleware, backend=BasicAuthBackend())]
+    )
+    events = capture_events()
+
+    client = TestClient(starlette_app, raise_server_exceptions=False)
+    client.get("/message", auth=("Gabriela", "hello123"))
+
+    (_, transaction_event) = events
+    assert "user" not in transaction_event
+
+
+def test_middleware_spans(sentry_init, capture_events):
+    sentry_init(
+        traces_sample_rate=1.0,
+        integrations=[StarletteIntegration()],
+    )
+    starlette_app = starlette_app_factory(
+        middleware=[Middleware(AuthenticationMiddleware, backend=BasicAuthBackend())]
+    )
+    events = capture_events()
+
+    client = TestClient(starlette_app, raise_server_exceptions=False)
+    try:
+        client.get("/message", auth=("Gabriela", "hello123"))
+    except Exception:
+        pass
+
+    (_, transaction_event) = events
+
+    expected = [
+        "ServerErrorMiddleware",
+        "AuthenticationMiddleware",
+        "ExceptionMiddleware",
+    ]
+
+    idx = 0
+    for span in transaction_event["spans"]:
+        if span["op"] == "middleware.starlette":
+            assert span["description"] == expected[idx]
+            assert span["tags"]["starlette.middleware_name"] == expected[idx]
+            idx += 1
+
+
+def test_middleware_callback_spans(sentry_init, capture_events):
+    sentry_init(
+        traces_sample_rate=1.0,
+        integrations=[StarletteIntegration()],
+    )
+    starlette_app = starlette_app_factory(middleware=[Middleware(SampleMiddleware)])
+    events = capture_events()
+
+    client = TestClient(starlette_app, raise_server_exceptions=False)
+    try:
+        client.get("/message", auth=("Gabriela", "hello123"))
+    except Exception:
+        pass
+
+    (_, transaction_event) = events
+
+    expected = [
+        {
+            "op": "middleware.starlette",
+            "description": "ServerErrorMiddleware",
+            "tags": {"starlette.middleware_name": "ServerErrorMiddleware"},
+        },
+        {
+            "op": "middleware.starlette",
+            "description": "SampleMiddleware",
+            "tags": {"starlette.middleware_name": "SampleMiddleware"},
+        },
+        {
+            "op": "middleware.starlette",
+            "description": "ExceptionMiddleware",
+            "tags": {"starlette.middleware_name": "ExceptionMiddleware"},
+        },
+        {
+            "op": "middleware.starlette.send",
+            "description": "SampleMiddleware.__call__..do_stuff",
+            "tags": {"starlette.middleware_name": "ExceptionMiddleware"},
+        },
+        {
+            "op": "middleware.starlette.send",
+            "description": "ServerErrorMiddleware.__call__.._send",
+            "tags": {"starlette.middleware_name": "SampleMiddleware"},
+        },
+        {
+            "op": "middleware.starlette.send",
+            "description": "_ASGIAdapter.send..send"
+            if STARLETTE_VERSION < (0, 21)
+            else "_TestClientTransport.handle_request..send",
+            "tags": {"starlette.middleware_name": "ServerErrorMiddleware"},
+        },
+        {
+            "op": "middleware.starlette.send",
+            "description": "SampleMiddleware.__call__..do_stuff",
+            "tags": {"starlette.middleware_name": "ExceptionMiddleware"},
+        },
+        {
+            "op": "middleware.starlette.send",
+            "description": "ServerErrorMiddleware.__call__.._send",
+            "tags": {"starlette.middleware_name": "SampleMiddleware"},
+        },
+        {
+            "op": "middleware.starlette.send",
+            "description": "_ASGIAdapter.send..send"
+            if STARLETTE_VERSION < (0, 21)
+            else "_TestClientTransport.handle_request..send",
+            "tags": {"starlette.middleware_name": "ServerErrorMiddleware"},
+        },
+    ]
+
+    idx = 0
+    for span in transaction_event["spans"]:
+        assert span["op"] == expected[idx]["op"]
+        assert span["description"] == expected[idx]["description"]
+        assert span["tags"] == expected[idx]["tags"]
+        idx += 1
+
+
+def test_middleware_receive_send(sentry_init, capture_events):
+    sentry_init(
+        traces_sample_rate=1.0,
+        integrations=[StarletteIntegration()],
+    )
+    starlette_app = starlette_app_factory(
+        middleware=[Middleware(SampleReceiveSendMiddleware)]
+    )
+
+    client = TestClient(starlette_app, raise_server_exceptions=False)
+    try:
+        # NOTE: the assert statements checking
+        # for correct behaviour are in `SampleReceiveSendMiddleware`!
+        client.get("/message", auth=("Gabriela", "hello123"))
+    except Exception:
+        pass
+
+
+def test_middleware_partial_receive_send(sentry_init, capture_events):
+    sentry_init(
+        traces_sample_rate=1.0,
+        integrations=[StarletteIntegration()],
+    )
+    starlette_app = starlette_app_factory(
+        middleware=[Middleware(SamplePartialReceiveSendMiddleware)]
+    )
+    events = capture_events()
+
+    client = TestClient(starlette_app, raise_server_exceptions=False)
+    try:
+        client.get("/message", auth=("Gabriela", "hello123"))
+    except Exception:
+        pass
+
+    (_, transaction_event) = events
+
+    expected = [
+        {
+            "op": "middleware.starlette",
+            "description": "ServerErrorMiddleware",
+            "tags": {"starlette.middleware_name": "ServerErrorMiddleware"},
+        },
+        {
+            "op": "middleware.starlette",
+            "description": "SamplePartialReceiveSendMiddleware",
+            "tags": {"starlette.middleware_name": "SamplePartialReceiveSendMiddleware"},
+        },
+        {
+            "op": "middleware.starlette.receive",
+            "description": "_ASGIAdapter.send..receive"
+            if STARLETTE_VERSION < (0, 21)
+            else "_TestClientTransport.handle_request..receive",
+            "tags": {"starlette.middleware_name": "ServerErrorMiddleware"},
+        },
+        {
+            "op": "middleware.starlette.send",
+            "description": "ServerErrorMiddleware.__call__.._send",
+            "tags": {"starlette.middleware_name": "SamplePartialReceiveSendMiddleware"},
+        },
+        {
+            "op": "middleware.starlette.send",
+            "description": "_ASGIAdapter.send..send"
+            if STARLETTE_VERSION < (0, 21)
+            else "_TestClientTransport.handle_request..send",
+            "tags": {"starlette.middleware_name": "ServerErrorMiddleware"},
+        },
+        {
+            "op": "middleware.starlette",
+            "description": "ExceptionMiddleware",
+            "tags": {"starlette.middleware_name": "ExceptionMiddleware"},
+        },
+        {
+            "op": "middleware.starlette.send",
+            "description": "functools.partial(.my_send at ",
+            "tags": {"starlette.middleware_name": "ExceptionMiddleware"},
+        },
+        {
+            "op": "middleware.starlette.send",
+            "description": "functools.partial(.my_send at ",
+            "tags": {"starlette.middleware_name": "ExceptionMiddleware"},
+        },
+    ]
+
+    idx = 0
+    for span in transaction_event["spans"]:
+        assert span["op"] == expected[idx]["op"]
+        assert span["description"].startswith(expected[idx]["description"])
+        assert span["tags"] == expected[idx]["tags"]
+        idx += 1
+
+
+def test_last_event_id(sentry_init, capture_events):
+    sentry_init(
+        integrations=[StarletteIntegration()],
+    )
+    events = capture_events()
+
+    def handler(request, exc):
+        capture_exception(exc)
+        return starlette.responses.PlainTextResponse(last_event_id(), status_code=500)
+
+    app = starlette_app_factory(debug=False)
+    app.add_exception_handler(500, handler)
+
+    client = TestClient(SentryAsgiMiddleware(app), raise_server_exceptions=False)
+    response = client.get("/custom_error")
+    assert response.status_code == 500
+
+    event = events[0]
+    assert response.content.strip().decode("ascii") == event["event_id"]
+    (exception,) = event["exception"]["values"]
+    assert exception["type"] == "Exception"
+    assert exception["value"] == "Too Hot"
+
+
+def test_legacy_setup(
+    sentry_init,
+    capture_events,
+):
+    # Check that behaviour does not change
+    # if the user just adds the new Integration
+    # and forgets to remove SentryAsgiMiddleware
+    sentry_init()
+    app = starlette_app_factory()
+    asgi_app = SentryAsgiMiddleware(app)
+
+    events = capture_events()
+
+    client = TestClient(asgi_app)
+    client.get("/message/123456")
+
+    (event,) = events
+    assert event["transaction"] == "/message/{message_id}"
+
+
+@pytest.mark.parametrize("endpoint", ["/sync/thread_ids", "/async/thread_ids"])
+def test_active_thread_id(sentry_init, capture_envelopes, endpoint):
+    sentry_init(
+        traces_sample_rate=1.0,
+        _experiments={"profiles_sample_rate": 1.0},
+    )
+    app = starlette_app_factory()
+    asgi_app = SentryAsgiMiddleware(app)
+
+    envelopes = capture_envelopes()
+
+    client = TestClient(asgi_app)
+    response = client.get(endpoint)
+    assert response.status_code == 200
+
+    data = json.loads(response.content)
+
+    envelopes = [envelope for envelope in envelopes]
+    assert len(envelopes) == 1
+
+    profiles = [item for item in envelopes[0].items if item.type == "profile"]
+    assert len(profiles) == 1
+
+    for profile in profiles:
+        transactions = profile.payload.json["transactions"]
+        assert len(transactions) == 1
+        assert str(data["active"]) == transactions[0]["active_thread_id"]
diff --git a/tests/integrations/starlite/__init__.py b/tests/integrations/starlite/__init__.py
new file mode 100644
index 0000000000..4c1037671d
--- /dev/null
+++ b/tests/integrations/starlite/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+pytest.importorskip("starlite")
diff --git a/tests/integrations/starlite/test_starlite.py b/tests/integrations/starlite/test_starlite.py
new file mode 100644
index 0000000000..603697ce8b
--- /dev/null
+++ b/tests/integrations/starlite/test_starlite.py
@@ -0,0 +1,325 @@
+import functools
+
+import pytest
+
+from sentry_sdk import capture_exception, capture_message, last_event_id
+from sentry_sdk.integrations.starlite import StarliteIntegration
+
+starlite = pytest.importorskip("starlite")
+
+from typing import Any, Dict
+
+from starlite import AbstractMiddleware, LoggingConfig, Starlite, get, Controller
+from starlite.middleware import LoggingMiddlewareConfig, RateLimitConfig
+from starlite.middleware.session.memory_backend import MemoryBackendConfig
+from starlite.status_codes import HTTP_500_INTERNAL_SERVER_ERROR
+from starlite.testing import TestClient
+
+
+class SampleMiddleware(AbstractMiddleware):
+    async def __call__(self, scope, receive, send) -> None:
+        async def do_stuff(message):
+            if message["type"] == "http.response.start":
+                # do something here.
+                pass
+            await send(message)
+
+        await self.app(scope, receive, do_stuff)
+
+
+class SampleReceiveSendMiddleware(AbstractMiddleware):
+    async def __call__(self, scope, receive, send):
+        message = await receive()
+        assert message
+        assert message["type"] == "http.request"
+
+        send_output = await send({"type": "something-unimportant"})
+        assert send_output is None
+
+        await self.app(scope, receive, send)
+
+
+class SamplePartialReceiveSendMiddleware(AbstractMiddleware):
+    async def __call__(self, scope, receive, send):
+        message = await receive()
+        assert message
+        assert message["type"] == "http.request"
+
+        send_output = await send({"type": "something-unimportant"})
+        assert send_output is None
+
+        async def my_receive(*args, **kwargs):
+            pass
+
+        async def my_send(*args, **kwargs):
+            pass
+
+        partial_receive = functools.partial(my_receive)
+        partial_send = functools.partial(my_send)
+
+        await self.app(scope, partial_receive, partial_send)
+
+
+def starlite_app_factory(middleware=None, debug=True, exception_handlers=None):
+    class MyController(Controller):
+        path = "/controller"
+
+        @get("/error")
+        async def controller_error(self) -> None:
+            raise Exception("Whoa")
+
+    @get("/some_url")
+    async def homepage_handler() -> Dict[str, Any]:
+        1 / 0
+        return {"status": "ok"}
+
+    @get("/custom_error", name="custom_name")
+    async def custom_error() -> Any:
+        raise Exception("Too Hot")
+
+    @get("/message")
+    async def message() -> Dict[str, Any]:
+        capture_message("hi")
+        return {"status": "ok"}
+
+    @get("/message/{message_id:str}")
+    async def message_with_id() -> Dict[str, Any]:
+        capture_message("hi")
+        return {"status": "ok"}
+
+    logging_config = LoggingConfig()
+
+    app = Starlite(
+        route_handlers=[
+            homepage_handler,
+            custom_error,
+            message,
+            message_with_id,
+            MyController,
+        ],
+        debug=debug,
+        middleware=middleware,
+        logging_config=logging_config,
+        exception_handlers=exception_handlers,
+    )
+
+    return app
+
+
+@pytest.mark.parametrize(
+    "test_url,expected_error,expected_message,expected_tx_name",
+    [
+        (
+            "/some_url",
+            ZeroDivisionError,
+            "division by zero",
+            "tests.integrations.starlite.test_starlite.starlite_app_factory..homepage_handler",
+        ),
+        (
+            "/custom_error",
+            Exception,
+            "Too Hot",
+            "custom_name",
+        ),
+        (
+            "/controller/error",
+            Exception,
+            "Whoa",
+            "partial(.MyController.controller_error>)",
+        ),
+    ],
+)
+def test_catch_exceptions(
+    sentry_init,
+    capture_exceptions,
+    capture_events,
+    test_url,
+    expected_error,
+    expected_message,
+    expected_tx_name,
+):
+    sentry_init(integrations=[StarliteIntegration()])
+    starlite_app = starlite_app_factory()
+    exceptions = capture_exceptions()
+    events = capture_events()
+
+    client = TestClient(starlite_app)
+    try:
+        client.get(test_url)
+    except Exception:
+        pass
+
+    (exc,) = exceptions
+    assert isinstance(exc, expected_error)
+    assert str(exc) == expected_message
+
+    (event,) = events
+    assert event["exception"]["values"][0]["mechanism"]["type"] == "starlite"
+    assert event["transaction"] == expected_tx_name
+
+
+def test_middleware_spans(sentry_init, capture_events):
+    sentry_init(
+        traces_sample_rate=1.0,
+        integrations=[StarliteIntegration()],
+    )
+
+    logging_config = LoggingMiddlewareConfig()
+    session_config = MemoryBackendConfig()
+    rate_limit_config = RateLimitConfig(rate_limit=("hour", 5))
+
+    starlite_app = starlite_app_factory(
+        middleware=[
+            session_config.middleware,
+            logging_config.middleware,
+            rate_limit_config.middleware,
+        ]
+    )
+    events = capture_events()
+
+    client = TestClient(
+        starlite_app, raise_server_exceptions=False, base_url="http://testserver.local"
+    )
+    try:
+        client.get("/message")
+    except Exception:
+        pass
+
+    (_, transaction_event) = events
+
+    expected = ["SessionMiddleware", "LoggingMiddleware", "RateLimitMiddleware"]
+
+    idx = 0
+    for span in transaction_event["spans"]:
+        if span["op"] == "middleware.starlite":
+            assert span["description"] == expected[idx]
+            assert span["tags"]["starlite.middleware_name"] == expected[idx]
+            idx += 1
+
+
+def test_middleware_callback_spans(sentry_init, capture_events):
+    sentry_init(
+        traces_sample_rate=1.0,
+        integrations=[StarliteIntegration()],
+    )
+    starlette_app = starlite_app_factory(middleware=[SampleMiddleware])
+    events = capture_events()
+
+    client = TestClient(starlette_app, raise_server_exceptions=False)
+    try:
+        client.get("/message")
+    except Exception:
+        pass
+
+    (_, transaction_event) = events
+
+    expected = [
+        {
+            "op": "middleware.starlite",
+            "description": "SampleMiddleware",
+            "tags": {"starlite.middleware_name": "SampleMiddleware"},
+        },
+        {
+            "op": "middleware.starlite.send",
+            "description": "TestClientTransport.create_send..send",
+            "tags": {"starlite.middleware_name": "SampleMiddleware"},
+        },
+        {
+            "op": "middleware.starlite.send",
+            "description": "TestClientTransport.create_send..send",
+            "tags": {"starlite.middleware_name": "SampleMiddleware"},
+        },
+    ]
+    print(transaction_event["spans"])
+    idx = 0
+    for span in transaction_event["spans"]:
+        assert span["op"] == expected[idx]["op"]
+        assert span["description"] == expected[idx]["description"]
+        assert span["tags"] == expected[idx]["tags"]
+        idx += 1
+
+
+def test_middleware_receive_send(sentry_init, capture_events):
+    sentry_init(
+        traces_sample_rate=1.0,
+        integrations=[StarliteIntegration()],
+    )
+    starlette_app = starlite_app_factory(middleware=[SampleReceiveSendMiddleware])
+
+    client = TestClient(starlette_app, raise_server_exceptions=False)
+    try:
+        # NOTE: the assert statements checking
+        # for correct behaviour are in `SampleReceiveSendMiddleware`!
+        client.get("/message")
+    except Exception:
+        pass
+
+
+def test_middleware_partial_receive_send(sentry_init, capture_events):
+    sentry_init(
+        traces_sample_rate=1.0,
+        integrations=[StarliteIntegration()],
+    )
+    starlette_app = starlite_app_factory(
+        middleware=[SamplePartialReceiveSendMiddleware]
+    )
+    events = capture_events()
+
+    client = TestClient(starlette_app, raise_server_exceptions=False)
+    try:
+        client.get("/message")
+    except Exception:
+        pass
+
+    (_, transaction_event) = events
+
+    expected = [
+        {
+            "op": "middleware.starlite",
+            "description": "SamplePartialReceiveSendMiddleware",
+            "tags": {"starlite.middleware_name": "SamplePartialReceiveSendMiddleware"},
+        },
+        {
+            "op": "middleware.starlite.receive",
+            "description": "TestClientTransport.create_receive..receive",
+            "tags": {"starlite.middleware_name": "SamplePartialReceiveSendMiddleware"},
+        },
+        {
+            "op": "middleware.starlite.send",
+            "description": "TestClientTransport.create_send..send",
+            "tags": {"starlite.middleware_name": "SamplePartialReceiveSendMiddleware"},
+        },
+    ]
+
+    print(transaction_event["spans"])
+    idx = 0
+    for span in transaction_event["spans"]:
+        assert span["op"] == expected[idx]["op"]
+        assert span["description"].startswith(expected[idx]["description"])
+        assert span["tags"] == expected[idx]["tags"]
+        idx += 1
+
+
+def test_last_event_id(sentry_init, capture_events):
+    sentry_init(
+        integrations=[StarliteIntegration()],
+    )
+    events = capture_events()
+
+    def handler(request, exc):
+        capture_exception(exc)
+        return starlite.response.Response(last_event_id(), status_code=500)
+
+    app = starlite_app_factory(
+        debug=False, exception_handlers={HTTP_500_INTERNAL_SERVER_ERROR: handler}
+    )
+
+    client = TestClient(app, raise_server_exceptions=False)
+    response = client.get("/custom_error")
+    assert response.status_code == 500
+    print(events)
+    event = events[-1]
+    assert response.content.strip().decode("ascii").strip('"') == event["event_id"]
+    (exception,) = event["exception"]["values"]
+    assert exception["type"] == "Exception"
+    assert exception["value"] == "Too Hot"
diff --git a/tests/integrations/stdlib/test_httplib.py b/tests/integrations/stdlib/test_httplib.py
index be3d85e008..952bcca371 100644
--- a/tests/integrations/stdlib/test_httplib.py
+++ b/tests/integrations/stdlib/test_httplib.py
@@ -1,19 +1,29 @@
 import platform
 import sys
-
+import random
 import pytest
 
 try:
+    # py3
     from urllib.request import urlopen
 except ImportError:
+    # py2
     from urllib import urlopen
 
 try:
-    from httplib import HTTPSConnection
+    # py2
+    from httplib import HTTPConnection, HTTPSConnection
+except ImportError:
+    # py3
+    from http.client import HTTPConnection, HTTPSConnection
+
+try:
+    from unittest import mock  # python 3.3 and above
 except ImportError:
-    from http.client import HTTPSConnection
+    import mock  # python < 3.3
 
-from sentry_sdk import capture_message
+from sentry_sdk import capture_message, start_transaction
+from sentry_sdk.tracing import Transaction
 from sentry_sdk.integrations.stdlib import StdlibIntegration
 
 
@@ -27,7 +37,7 @@ def test_crumb_capture(sentry_init, capture_events):
     capture_message("Testing!")
 
     (event,) = events
-    (crumb,) = event["breadcrumbs"]
+    (crumb,) = event["breadcrumbs"]["values"]
     assert crumb["type"] == "http"
     assert crumb["category"] == "httplib"
     assert crumb["data"] == {
@@ -52,7 +62,7 @@ def before_breadcrumb(crumb, hint):
     capture_message("Testing!")
 
     (event,) = events
-    (crumb,) = event["breadcrumbs"]
+    (crumb,) = event["breadcrumbs"]["values"]
     assert crumb["type"] == "http"
     assert crumb["category"] == "httplib"
     assert crumb["data"] == {
@@ -67,7 +77,17 @@ def before_breadcrumb(crumb, hint):
         assert sys.getrefcount(response) == 2
 
 
-def test_httplib_misuse(sentry_init, capture_events):
+def test_empty_realurl(sentry_init, capture_events):
+    """
+    Ensure that after using sentry_sdk.init you can putrequest a
+    None url.
+    """
+
+    sentry_init(dsn="")
+    HTTPConnection("httpbin.org", port=443).putrequest("POST", None)
+
+
+def test_httplib_misuse(sentry_init, capture_events, request):
     """HTTPConnection.getresponse must be called after every call to
     HTTPConnection.request. However, if somebody does not abide by
     this contract, we still should handle this gracefully and not
@@ -81,6 +101,10 @@ def test_httplib_misuse(sentry_init, capture_events):
     events = capture_events()
 
     conn = HTTPSConnection("httpbin.org", 443)
+
+    # make sure we release the resource, even if the test fails
+    request.addfinalizer(conn.close)
+
     conn.request("GET", "/anything/foo")
 
     with pytest.raises(Exception):
@@ -96,7 +120,7 @@ def test_httplib_misuse(sentry_init, capture_events):
     capture_message("Testing!")
 
     (event,) = events
-    (crumb,) = event["breadcrumbs"]
+    (crumb,) = event["breadcrumbs"]["values"]
 
     assert crumb["type"] == "http"
     assert crumb["category"] == "httplib"
@@ -106,3 +130,100 @@ def test_httplib_misuse(sentry_init, capture_events):
         "status_code": 200,
         "reason": "OK",
     }
+
+
+def test_outgoing_trace_headers(sentry_init, monkeypatch):
+    # HTTPSConnection.send is passed a string containing (among other things)
+    # the headers on the request. Mock it so we can check the headers, and also
+    # so it doesn't try to actually talk to the internet.
+    mock_send = mock.Mock()
+    monkeypatch.setattr(HTTPSConnection, "send", mock_send)
+
+    sentry_init(traces_sample_rate=1.0)
+
+    headers = {}
+    headers["baggage"] = (
+        "other-vendor-value-1=foo;bar;baz, sentry-trace_id=771a43a4192642f0b136d5159a501700, "
+        "sentry-public_key=49d0f7386ad645858ae85020e393bef3, sentry-sample_rate=0.01337, "
+        "sentry-user_id=Am%C3%A9lie, other-vendor-value-2=foo;bar;"
+    )
+
+    transaction = Transaction.continue_from_headers(headers)
+
+    with start_transaction(
+        transaction=transaction,
+        name="/interactions/other-dogs/new-dog",
+        op="greeting.sniff",
+        trace_id="12312012123120121231201212312012",
+    ) as transaction:
+
+        HTTPSConnection("www.squirrelchasers.com").request("GET", "/top-chasers")
+
+        (request_str,) = mock_send.call_args[0]
+        request_headers = {}
+        for line in request_str.decode("utf-8").split("\r\n")[1:]:
+            if line:
+                key, val = line.split(": ")
+                request_headers[key] = val
+
+        request_span = transaction._span_recorder.spans[-1]
+        expected_sentry_trace = "{trace_id}-{parent_span_id}-{sampled}".format(
+            trace_id=transaction.trace_id,
+            parent_span_id=request_span.span_id,
+            sampled=1,
+        )
+        assert request_headers["sentry-trace"] == expected_sentry_trace
+
+        expected_outgoing_baggage_items = [
+            "sentry-trace_id=771a43a4192642f0b136d5159a501700",
+            "sentry-public_key=49d0f7386ad645858ae85020e393bef3",
+            "sentry-sample_rate=0.01337",
+            "sentry-user_id=Am%C3%A9lie",
+        ]
+
+        assert sorted(request_headers["baggage"].split(",")) == sorted(
+            expected_outgoing_baggage_items
+        )
+
+
+def test_outgoing_trace_headers_head_sdk(sentry_init, monkeypatch):
+    # HTTPSConnection.send is passed a string containing (among other things)
+    # the headers on the request. Mock it so we can check the headers, and also
+    # so it doesn't try to actually talk to the internet.
+    mock_send = mock.Mock()
+    monkeypatch.setattr(HTTPSConnection, "send", mock_send)
+
+    # make sure transaction is always sampled
+    monkeypatch.setattr(random, "random", lambda: 0.1)
+
+    sentry_init(traces_sample_rate=0.5, release="foo")
+    transaction = Transaction.continue_from_headers({})
+
+    with start_transaction(transaction=transaction, name="Head SDK tx") as transaction:
+        HTTPSConnection("www.squirrelchasers.com").request("GET", "/top-chasers")
+
+        (request_str,) = mock_send.call_args[0]
+        request_headers = {}
+        for line in request_str.decode("utf-8").split("\r\n")[1:]:
+            if line:
+                key, val = line.split(": ")
+                request_headers[key] = val
+
+        request_span = transaction._span_recorder.spans[-1]
+        expected_sentry_trace = "{trace_id}-{parent_span_id}-{sampled}".format(
+            trace_id=transaction.trace_id,
+            parent_span_id=request_span.span_id,
+            sampled=1,
+        )
+        assert request_headers["sentry-trace"] == expected_sentry_trace
+
+        expected_outgoing_baggage_items = [
+            "sentry-trace_id=%s" % transaction.trace_id,
+            "sentry-sample_rate=0.5",
+            "sentry-release=foo",
+            "sentry-environment=production",
+        ]
+
+        assert sorted(request_headers["baggage"].split(",")) == sorted(
+            expected_outgoing_baggage_items
+        )
diff --git a/tests/integrations/stdlib/test_subprocess.py b/tests/integrations/stdlib/test_subprocess.py
index ee6e7c8c60..31da043ac3 100644
--- a/tests/integrations/stdlib/test_subprocess.py
+++ b/tests/integrations/stdlib/test_subprocess.py
@@ -5,7 +5,7 @@
 
 import pytest
 
-from sentry_sdk import Hub, capture_message
+from sentry_sdk import capture_message, start_transaction
 from sentry_sdk._compat import PY2
 from sentry_sdk.integrations.stdlib import StdlibIntegration
 
@@ -63,7 +63,7 @@ def test_subprocess_basic(
     sentry_init(integrations=[StdlibIntegration()], traces_sample_rate=1.0)
     events = capture_events()
 
-    with Hub.current.start_span(transaction="foo", op="foo") as span:
+    with start_transaction(name="foo") as transaction:
         args = [
             sys.executable,
             "-c",
@@ -114,17 +114,20 @@ def test_subprocess_basic(
 
     assert os.environ == old_environ
 
-    assert span.trace_id in str(output)
+    assert transaction.trace_id in str(output)
 
     capture_message("hi")
 
-    transaction_event, message_event, = events
+    (
+        transaction_event,
+        message_event,
+    ) = events
 
     assert message_event["message"] == "hi"
 
     data = {"subprocess.cwd": os.getcwd()} if with_cwd else {}
 
-    (crumb,) = message_event["breadcrumbs"]
+    (crumb,) = message_event["breadcrumbs"]["values"]
     assert crumb == {
         "category": "subprocess",
         "data": data,
@@ -140,13 +143,15 @@ def test_subprocess_basic(
 
     (
         subprocess_init_span,
-        subprocess_wait_span,
         subprocess_communicate_span,
+        subprocess_wait_span,
     ) = transaction_event["spans"]
 
-    assert subprocess_init_span["op"] == "subprocess"
-    assert subprocess_communicate_span["op"] == "subprocess.communicate"
-    assert subprocess_wait_span["op"] == "subprocess.wait"
+    assert (
+        subprocess_init_span["op"],
+        subprocess_communicate_span["op"],
+        subprocess_wait_span["op"],
+    ) == ("subprocess", "subprocess.communicate", "subprocess.wait")
 
     # span hierarchy
     assert (
@@ -178,9 +183,6 @@ def test_subprocess_invalid_args(sentry_init):
     sentry_init(integrations=[StdlibIntegration()])
 
     with pytest.raises(TypeError) as excinfo:
-        subprocess.Popen()
+        subprocess.Popen(1)
 
-    if PY2:
-        assert "__init__() takes at least 2 arguments (1 given)" in str(excinfo.value)
-    else:
-        assert "missing 1 required positional argument: 'args" in str(excinfo.value)
+    assert "'int' object is not iterable" in str(excinfo.value)
diff --git a/tests/integrations/threading/test_threading.py b/tests/integrations/threading/test_threading.py
index 015d2b8221..67b79e2080 100644
--- a/tests/integrations/threading/test_threading.py
+++ b/tests/integrations/threading/test_threading.py
@@ -42,7 +42,7 @@ def test_propagates_hub(sentry_init, capture_events, propagate_hub):
 
     def stage1():
         with configure_scope() as scope:
-            scope.set_tag("stage1", True)
+            scope.set_tag("stage1", "true")
 
         t = Thread(target=stage2)
         t.start()
@@ -63,7 +63,7 @@ def stage2():
     assert exception["mechanism"] == {"type": "threading", "handled": False}
 
     if propagate_hub:
-        assert event["tags"]["stage1"] is True
+        assert event["tags"]["stage1"] == "true"
     else:
         assert "stage1" not in event.get("tags", {})
 
diff --git a/tests/integrations/tornado/test_tornado.py b/tests/integrations/tornado/test_tornado.py
index 76a8689d69..c0dac2d93f 100644
--- a/tests/integrations/tornado/test_tornado.py
+++ b/tests/integrations/tornado/test_tornado.py
@@ -2,7 +2,7 @@
 
 import pytest
 
-from sentry_sdk import configure_scope
+from sentry_sdk import configure_scope, start_transaction
 from sentry_sdk.integrations.tornado import TornadoIntegration
 
 from tornado.web import RequestHandler, Application, HTTPError
@@ -37,9 +37,28 @@ def bogustest(self):
 class CrashingHandler(RequestHandler):
     def get(self):
         with configure_scope() as scope:
-            scope.set_tag("foo", 42)
+            scope.set_tag("foo", "42")
         1 / 0
 
+    def post(self):
+        with configure_scope() as scope:
+            scope.set_tag("foo", "43")
+        1 / 0
+
+
+class HelloHandler(RequestHandler):
+    async def get(self):
+        with configure_scope() as scope:
+            scope.set_tag("foo", "42")
+
+        return b"hello"
+
+    async def post(self):
+        with configure_scope() as scope:
+            scope.set_tag("foo", "43")
+
+        return b"hello"
+
 
 def test_basic(tornado_testcase, sentry_init, capture_events):
     sentry_init(integrations=[TornadoIntegration()], send_default_pii=True)
@@ -63,8 +82,8 @@ def test_basic(tornado_testcase, sentry_init, capture_events):
         "headers": {
             "Accept-Encoding": "gzip",
             "Connection": "close",
-            "Host": host,
             "Cookie": "name=value; name2=value2; name3=value3",
+            **request["headers"],
         },
         "cookies": {"name": "value", "name2": "value2", "name3": "value3"},
         "method": "GET",
@@ -72,16 +91,98 @@ def test_basic(tornado_testcase, sentry_init, capture_events):
         "url": "http://{host}/hi".format(host=host),
     }
 
-    assert event["tags"] == {"foo": 42}
+    assert event["tags"] == {"foo": "42"}
     assert (
         event["transaction"]
         == "tests.integrations.tornado.test_tornado.CrashingHandler.get"
     )
+    assert event["transaction_info"] == {"source": "component"}
 
     with configure_scope() as scope:
         assert not scope._tags
 
 
+@pytest.mark.parametrize(
+    "handler,code",
+    [
+        (CrashingHandler, 500),
+        (HelloHandler, 200),
+    ],
+)
+def test_transactions(tornado_testcase, sentry_init, capture_events, handler, code):
+    sentry_init(integrations=[TornadoIntegration()], traces_sample_rate=1.0, debug=True)
+    events = capture_events()
+    client = tornado_testcase(Application([(r"/hi", handler)]))
+
+    with start_transaction(name="client") as span:
+        pass
+
+    response = client.fetch(
+        "/hi", method="POST", body=b"heyoo", headers=dict(span.iter_headers())
+    )
+    assert response.code == code
+
+    if code == 200:
+        client_tx, server_tx = events
+        server_error = None
+    else:
+        client_tx, server_error, server_tx = events
+
+    assert client_tx["type"] == "transaction"
+    assert client_tx["transaction"] == "client"
+    assert client_tx["transaction_info"] == {
+        "source": "custom"
+    }  # because this is just the start_transaction() above.
+
+    if server_error is not None:
+        assert server_error["exception"]["values"][0]["type"] == "ZeroDivisionError"
+        assert (
+            server_error["transaction"]
+            == "tests.integrations.tornado.test_tornado.CrashingHandler.post"
+        )
+        assert server_error["transaction_info"] == {"source": "component"}
+
+    if code == 200:
+        assert (
+            server_tx["transaction"]
+            == "tests.integrations.tornado.test_tornado.HelloHandler.post"
+        )
+    else:
+        assert (
+            server_tx["transaction"]
+            == "tests.integrations.tornado.test_tornado.CrashingHandler.post"
+        )
+
+    assert server_tx["transaction_info"] == {"source": "component"}
+    assert server_tx["type"] == "transaction"
+
+    request = server_tx["request"]
+    host = request["headers"]["Host"]
+    assert server_tx["request"] == {
+        "env": {"REMOTE_ADDR": "127.0.0.1"},
+        "headers": {
+            "Accept-Encoding": "gzip",
+            "Connection": "close",
+            **request["headers"],
+        },
+        "method": "POST",
+        "query_string": "",
+        "data": {"heyoo": [""]},
+        "url": "http://{host}/hi".format(host=host),
+    }
+
+    assert (
+        client_tx["contexts"]["trace"]["trace_id"]
+        == server_tx["contexts"]["trace"]["trace_id"]
+    )
+
+    if server_error is not None:
+        assert (
+            server_error["contexts"]["trace"]["trace_id"]
+            == server_tx["contexts"]["trace"]["trace_id"]
+        )
+
+
 def test_400_not_logged(tornado_testcase, sentry_init, capture_events):
     sentry_init(integrations=[TornadoIntegration()])
     events = capture_events()
diff --git a/tests/integrations/wsgi/test_wsgi.py b/tests/integrations/wsgi/test_wsgi.py
index 67bfe055d1..dae9b26c13 100644
--- a/tests/integrations/wsgi/test_wsgi.py
+++ b/tests/integrations/wsgi/test_wsgi.py
@@ -1,7 +1,17 @@
+import sys
+
 from werkzeug.test import Client
+
 import pytest
 
+import sentry_sdk
 from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware
+from collections import Counter
+
+try:
+    from unittest import mock  # python 3.3 and above
+except ImportError:
+    import mock  # python < 3.3
 
 
 @pytest.fixture
@@ -109,3 +119,240 @@ def test_keyboard_interrupt_is_captured(sentry_init, capture_events):
     assert exc["type"] == "KeyboardInterrupt"
     assert exc["value"] == ""
     assert event["level"] == "error"
+
+
+def test_transaction_with_error(
+    sentry_init, crashing_app, capture_events, DictionaryContaining  # noqa:N803
+):
+    def dogpark(environ, start_response):
+        raise Exception("Fetch aborted. The ball was not returned.")
+
+    sentry_init(send_default_pii=True, traces_sample_rate=1.0)
+    app = SentryWsgiMiddleware(dogpark)
+    client = Client(app)
+    events = capture_events()
+
+    with pytest.raises(Exception):
+        client.get("http://dogs.are.great/sit/stay/rollover/")
+
+    error_event, envelope = events
+
+    assert error_event["transaction"] == "generic WSGI request"
+    assert error_event["contexts"]["trace"]["op"] == "http.server"
+    assert error_event["exception"]["values"][0]["type"] == "Exception"
+    assert (
+        error_event["exception"]["values"][0]["value"]
+        == "Fetch aborted. The ball was not returned."
+    )
+
+    assert envelope["type"] == "transaction"
+
+    # event trace context is a subset of envelope trace context
+    assert envelope["contexts"]["trace"] == DictionaryContaining(
+        error_event["contexts"]["trace"]
+    )
+    assert envelope["contexts"]["trace"]["status"] == "internal_error"
+    assert envelope["transaction"] == error_event["transaction"]
+    assert envelope["request"] == error_event["request"]
+
+
+def test_transaction_no_error(
+    sentry_init, capture_events, DictionaryContaining  # noqa:N803
+):
+    def dogpark(environ, start_response):
+        start_response("200 OK", [])
+        return ["Go get the ball! Good dog!"]
+
+    sentry_init(send_default_pii=True, traces_sample_rate=1.0)
+    app = SentryWsgiMiddleware(dogpark)
+    client = Client(app)
+    events = capture_events()
+
+    client.get("/dogs/are/great/")
+
+    envelope = events[0]
+
+    assert envelope["type"] == "transaction"
+    assert envelope["transaction"] == "generic WSGI request"
+    assert envelope["contexts"]["trace"]["op"] == "http.server"
+    assert envelope["request"] == DictionaryContaining(
+        {"method": "GET", "url": "http://localhost/dogs/are/great/"}
+    )
+
+
+def test_traces_sampler_gets_correct_values_in_sampling_context(
+    sentry_init, DictionaryContaining, ObjectDescribedBy  # noqa:N803
+):
+    def app(environ, start_response):
+        start_response("200 OK", [])
+        return ["Go get the ball! Good dog!"]
+
+    traces_sampler = mock.Mock(return_value=True)
+    sentry_init(send_default_pii=True, traces_sampler=traces_sampler)
+    app = SentryWsgiMiddleware(app)
+    client = Client(app)
+
+    client.get("/dogs/are/great/")
+
+    traces_sampler.assert_any_call(
+        DictionaryContaining(
+            {
+                "wsgi_environ": DictionaryContaining(
+                    {
+                        "PATH_INFO": "/dogs/are/great/",
+                        "REQUEST_METHOD": "GET",
+                    },
+                ),
+            }
+        )
+    )
+
+
+def test_session_mode_defaults_to_request_mode_in_wsgi_handler(
+    capture_envelopes, sentry_init
+):
+    """
+    Test that ensures that even though the default `session_mode` for
+    auto_session_tracking is `application`, that flips to `request` when we are
+    in the WSGI handler
+    """
+
+    def app(environ, start_response):
+        start_response("200 OK", [])
+        return ["Go get the ball! Good dog!"]
+
+    traces_sampler = mock.Mock(return_value=True)
+    sentry_init(send_default_pii=True, traces_sampler=traces_sampler)
+    app = SentryWsgiMiddleware(app)
+    envelopes = capture_envelopes()
+
+    client = Client(app)
+
+    client.get("/dogs/are/great/")
+
+    sentry_sdk.flush()
+
+    sess = envelopes[1]
+    assert len(sess.items) == 1
+    sess_event = sess.items[0].payload.json
+
+    aggregates = sess_event["aggregates"]
+    assert len(aggregates) == 1
+    assert aggregates[0]["exited"] == 1
+
+
+def test_auto_session_tracking_with_aggregates(sentry_init, capture_envelopes):
+    """
+    Test for correct session aggregates in auto session tracking.
+    """
+
+    def sample_app(environ, start_response):
+        if environ["REQUEST_URI"] != "/dogs/are/great/":
+            1 / 0
+
+        start_response("200 OK", [])
+        return ["Go get the ball! Good dog!"]
+
+    traces_sampler = mock.Mock(return_value=True)
+    sentry_init(send_default_pii=True, traces_sampler=traces_sampler)
+    app = SentryWsgiMiddleware(sample_app)
+    envelopes = capture_envelopes()
+    assert len(envelopes) == 0
+
+    client = Client(app)
+    client.get("/dogs/are/great/")
+    client.get("/dogs/are/great/")
+    try:
+        client.get("/trigger/an/error/")
+    except ZeroDivisionError:
+        pass
+
+    sentry_sdk.flush()
+
+    count_item_types = Counter()
+    for envelope in envelopes:
+        count_item_types[envelope.items[0].type] += 1
+
+    assert count_item_types["transaction"] == 3
+    assert count_item_types["event"] == 1
+    assert count_item_types["sessions"] == 1
+    assert len(envelopes) == 5
+
+    session_aggregates = envelopes[-1].items[0].payload.json["aggregates"]
+    assert session_aggregates[0]["exited"] == 2
+    assert session_aggregates[0]["crashed"] == 1
+    assert len(session_aggregates) == 1
+
+
+@pytest.mark.skipif(
+    sys.version_info < (3, 3), reason="Profiling is only supported in Python >= 3.3"
+)
+@pytest.mark.parametrize(
+    "profiles_sample_rate,profile_count",
+    [
+        pytest.param(1.0, 1, id="profiler sampled at 1.0"),
+        pytest.param(0.75, 1, id="profiler sampled at 0.75"),
+        pytest.param(0.25, 0, id="profiler not sampled at 0.25"),
+        pytest.param(None, 0, id="profiler not enabled"),
+    ],
+)
+def test_profile_sent(
+    sentry_init,
+    capture_envelopes,
+    teardown_profiling,
+    profiles_sample_rate,
+    profile_count,
+):
+    def test_app(environ, start_response):
+        start_response("200 OK", [])
+        return ["Go get the ball! Good dog!"]
+
+    sentry_init(
+        traces_sample_rate=1.0,
+        _experiments={"profiles_sample_rate": profiles_sample_rate},
+    )
+    app = SentryWsgiMiddleware(test_app)
+    envelopes = capture_envelopes()
+
+    with mock.patch("sentry_sdk.profiler.random.random", return_value=0.5):
+        client = Client(app)
+        client.get("/")
+
+    count_item_types = Counter()
+    for envelope in envelopes:
+        for item in envelope.items:
+            count_item_types[item.type] += 1
+    assert count_item_types["profile"] == profile_count
+
+
+def test_profile_context_sent(sentry_init, capture_envelopes, teardown_profiling):
+    def test_app(environ, start_response):
+        start_response("200 OK", [])
+        return ["Go get the ball! Good dog!"]
+
+    sentry_init(
+        traces_sample_rate=1.0,
+        _experiments={"profiles_sample_rate": 1.0},
+    )
+    app = SentryWsgiMiddleware(test_app)
+    envelopes = capture_envelopes()
+
+    client = Client(app)
+    client.get("/")
+
+    transaction = None
+    profile = None
+    for envelope in envelopes:
+        for item in envelope.items:
+            if item.type == "profile":
+                assert profile is None  # should only have 1 profile
+                profile = item
+            elif item.type == "transaction":
+                assert transaction is None  # should only have 1 transaction
+                transaction = item
+
+    assert transaction is not None
+    assert profile is not None
+    assert transaction.payload.json["contexts"]["profile"] == {
+        "profile_id": profile.payload.json["event_id"],
+    }
diff --git a/tests/test_basics.py b/tests/test_basics.py
index 3e5bbf0fc6..0d87e049eb 100644
--- a/tests/test_basics.py
+++ b/tests/test_basics.py
@@ -1,3 +1,5 @@
+import os
+import sys
 import logging
 
 import pytest
@@ -9,13 +11,20 @@
     capture_event,
     capture_exception,
     capture_message,
+    start_transaction,
     add_breadcrumb,
     last_event_id,
     Hub,
 )
 
+from sentry_sdk._compat import reraise
 from sentry_sdk.integrations import _AUTO_ENABLING_INTEGRATIONS
 from sentry_sdk.integrations.logging import LoggingIntegration
+from sentry_sdk.scope import (  # noqa: F401
+    add_global_event_processor,
+    global_event_processors,
+)
+from sentry_sdk.utils import get_sdk_name
 
 
 def test_processors(sentry_init, capture_events):
@@ -42,16 +51,22 @@ def error_processor(event, exc_info):
 
 def test_auto_enabling_integrations_catches_import_error(sentry_init, caplog):
     caplog.set_level(logging.DEBUG)
+    REDIS = 12  # noqa: N806
 
-    sentry_init(_experiments={"auto_enabling_integrations": True}, debug=True)
+    sentry_init(auto_enabling_integrations=True, debug=True)
 
     for import_string in _AUTO_ENABLING_INTEGRATIONS:
+        # Ignore redis in the test case, because it is installed as a
+        # dependency for running tests, and therefore always enabled.
+        if _AUTO_ENABLING_INTEGRATIONS[REDIS] == import_string:
+            continue
+
         assert any(
             record.message.startswith(
                 "Did not import default integration {}:".format(import_string)
             )
             for record in caplog.records
-        )
+        ), "Problem with checking auto enabling {}".format(import_string)
 
 
 def test_event_id(sentry_init, capture_events):
@@ -70,10 +85,91 @@ def test_event_id(sentry_init, capture_events):
     assert last_event_id() == event_id
     assert Hub.current.last_event_id() == event_id
 
+    new_event_id = Hub.current.capture_event({"type": "transaction"})
+    assert new_event_id is not None
+    assert new_event_id != event_id
+    assert Hub.current.last_event_id() == event_id
+
+
+def test_option_before_send(sentry_init, capture_events):
+    def before_send(event, hint):
+        event["extra"] = {"before_send_called": True}
+        return event
+
+    def do_this():
+        try:
+            raise ValueError("aha!")
+        except Exception:
+            capture_exception()
+
+    sentry_init(before_send=before_send)
+    events = capture_events()
+
+    do_this()
+
+    (event,) = events
+    assert event["extra"] == {"before_send_called": True}
+
+
+def test_option_before_send_discard(sentry_init, capture_events):
+    def before_send_discard(event, hint):
+        return None
+
+    def do_this():
+        try:
+            raise ValueError("aha!")
+        except Exception:
+            capture_exception()
+
+    sentry_init(before_send=before_send_discard)
+    events = capture_events()
+
+    do_this()
+
+    assert len(events) == 0
+
+
+def test_option_before_send_transaction(sentry_init, capture_events):
+    def before_send_transaction(event, hint):
+        assert event["type"] == "transaction"
+        event["extra"] = {"before_send_transaction_called": True}
+        return event
+
+    sentry_init(
+        before_send_transaction=before_send_transaction,
+        traces_sample_rate=1.0,
+    )
+    events = capture_events()
+    transaction = start_transaction(name="foo")
+    transaction.finish()
+
+    (event,) = events
+    assert event["transaction"] == "foo"
+    assert event["extra"] == {"before_send_transaction_called": True}
+
+
+def test_option_before_send_transaction_discard(sentry_init, capture_events):
+    def before_send_transaction_discard(event, hint):
+        return None
+
+    sentry_init(
+        before_send_transaction=before_send_transaction_discard,
+        traces_sample_rate=1.0,
+    )
+    events = capture_events()
+    transaction = start_transaction(name="foo")
+    transaction.finish()
 
-def test_option_callback(sentry_init, capture_events):
+    assert len(events) == 0
+
+
+def test_option_before_breadcrumb(sentry_init, capture_events, monkeypatch):
     drop_events = False
     drop_breadcrumbs = False
+    reports = []
+
+    def record_lost_event(reason, data_category=None, item=None):
+        reports.append((reason, data_category))
 
     def before_send(event, hint):
         assert isinstance(hint["exc_info"][1], ValueError)
@@ -90,6 +186,10 @@ def before_breadcrumb(crumb, hint):
     sentry_init(before_send=before_send, before_breadcrumb=before_breadcrumb)
     events = capture_events()
 
+    monkeypatch.setattr(
+        Hub.current.client.transport, "record_lost_event", record_lost_event
+    )
+
     def do_this():
         add_breadcrumb(message="Hello", hint={"foo": 42})
         try:
@@ -100,13 +200,15 @@ def do_this():
     do_this()
     drop_breadcrumbs = True
     do_this()
+    assert not reports
     drop_events = True
     do_this()
+    assert reports == [("before_send", "error")]
 
     normal, no_crumbs = events
 
     assert normal["exception"]["values"][0]["type"] == "ValueError"
-    (crumb,) = normal["breadcrumbs"]
+    (crumb,) = normal["breadcrumbs"]["values"]
     assert "timestamp" in crumb
     assert crumb["message"] == "Hello"
     assert crumb["data"] == {"foo": "bar"}
@@ -172,13 +274,13 @@ def test_push_scope_callback(sentry_init, null_client, capture_events):
     if null_client:
         Hub.current.bind_client(None)
 
-    outer_scope = Hub.current._stack[-1][1]
+    outer_scope = Hub.current.scope
 
     calls = []
 
     @push_scope
     def _(scope):
-        assert scope is Hub.current._stack[-1][1]
+        assert scope is Hub.current.scope
         assert scope is not outer_scope
         calls.append(1)
 
@@ -188,7 +290,7 @@ def _(scope):
     assert calls == [1]
 
     # Assert scope gets popped correctly
-    assert Hub.current._stack[-1][1] is outer_scope
+    assert Hub.current.scope is outer_scope
 
 
 def test_breadcrumbs(sentry_init, capture_events):
@@ -203,9 +305,9 @@ def test_breadcrumbs(sentry_init, capture_events):
     capture_exception(ValueError())
     (event,) = events
 
-    assert len(event["breadcrumbs"]) == 10
-    assert "user 10" in event["breadcrumbs"][0]["message"]
-    assert "user 19" in event["breadcrumbs"][-1]["message"]
+    assert len(event["breadcrumbs"]["values"]) == 10
+    assert "user 10" in event["breadcrumbs"]["values"][0]["message"]
+    assert "user 19" in event["breadcrumbs"]["values"][-1]["message"]
 
     del events[:]
 
@@ -219,7 +321,40 @@ def test_breadcrumbs(sentry_init, capture_events):
 
     capture_exception(ValueError())
     (event,) = events
-    assert len(event["breadcrumbs"]) == 0
+    assert len(event["breadcrumbs"]["values"]) == 0
+
+
+def test_attachments(sentry_init, capture_envelopes):
+    sentry_init()
+    envelopes = capture_envelopes()
+
+    this_file = os.path.abspath(__file__.rstrip("c"))
+
+    with configure_scope() as scope:
+        scope.add_attachment(bytes=b"Hello World!", filename="message.txt")
+        scope.add_attachment(path=this_file)
+
+    capture_exception(ValueError())
+
+    (envelope,) = envelopes
+
+    assert len(envelope.items) == 3
+    assert envelope.get_event()["exception"] is not None
+
+    attachments = [x for x in envelope.items if x.type == "attachment"]
+    (message, pyfile) = attachments
+
+    assert message.headers["filename"] == "message.txt"
+    assert message.headers["type"] == "attachment"
+    assert message.headers["content_type"] == "text/plain"
+    assert message.payload.bytes == message.payload.get_bytes() == b"Hello World!"
+
+    assert pyfile.headers["filename"] == os.path.basename(this_file)
+    assert pyfile.headers["type"] == "attachment"
+    assert pyfile.headers["content_type"].startswith("text/")
+    assert pyfile.payload.bytes is None
+    with open(this_file, "rb") as f:
+        assert pyfile.payload.get_bytes() == f.read()
 
 
 def test_integration_scoping(sentry_init, capture_events):
@@ -322,3 +457,122 @@ def test_capture_event_with_scope_kwargs(sentry_init, capture_events):
     (event,) = events
     assert event["level"] == "info"
     assert event["extra"]["foo"] == "bar"
+
+
+def test_dedupe_event_processor_drop_records_client_report(
+    sentry_init, capture_events, capture_client_reports
+):
+    """
+    DedupeIntegration internally has an event_processor that filters duplicate exceptions.
+    We want a duplicate exception to be captured only once and the drop being recorded as
+    a client report.
+    """
+    sentry_init()
+    events = capture_events()
+    reports = capture_client_reports()
+
+    try:
+        raise ValueError("aha!")
+    except Exception:
+        try:
+            capture_exception()
+            reraise(*sys.exc_info())
+        except Exception:
+            capture_exception()
+
+    (event,) = events
+    (report,) = reports
+
+    assert event["level"] == "error"
+    assert "exception" in event
+    assert report == ("event_processor", "error")
+
+
+def test_event_processor_drop_records_client_report(
+    sentry_init, capture_events, capture_client_reports
+):
+    sentry_init(traces_sample_rate=1.0)
+    events = capture_events()
+    reports = capture_client_reports()
+
+    global global_event_processors
+
+    @add_global_event_processor
+    def foo(event, hint):
+        return None
+
+    capture_message("dropped")
+
+    with start_transaction(name="dropped"):
+        pass
+
+    assert len(events) == 0
+    assert reports == [("event_processor", "error"), ("event_processor", "transaction")]
+
+    global_event_processors.pop()
+
+
+@pytest.mark.parametrize(
+    "installed_integrations, expected_name",
+    [
+        # integrations with own name
+        (["django"], "sentry.python.django"),
+        (["flask"], "sentry.python.flask"),
+        (["fastapi"], "sentry.python.fastapi"),
+        (["bottle"], "sentry.python.bottle"),
+        (["falcon"], "sentry.python.falcon"),
+        (["quart"], "sentry.python.quart"),
+        (["sanic"], "sentry.python.sanic"),
+        (["starlette"], "sentry.python.starlette"),
+        (["chalice"], "sentry.python.chalice"),
+        (["serverless"], "sentry.python.serverless"),
+        (["pyramid"], "sentry.python.pyramid"),
+        (["tornado"], "sentry.python.tornado"),
+        (["aiohttp"], "sentry.python.aiohttp"),
+        (["aws_lambda"], "sentry.python.aws_lambda"),
+        (["gcp"], "sentry.python.gcp"),
+        (["beam"], "sentry.python.beam"),
+        (["asgi"], "sentry.python.asgi"),
+        (["wsgi"], "sentry.python.wsgi"),
+        # integrations without name
+        (["argv"], "sentry.python"),
+        (["atexit"], "sentry.python"),
+        (["boto3"], "sentry.python"),
+        (["celery"], "sentry.python"),
+        (["dedupe"], "sentry.python"),
+        (["excepthook"], "sentry.python"),
+        (["executing"], "sentry.python"),
+        (["modules"], "sentry.python"),
+        (["pure_eval"], "sentry.python"),
+        (["redis"], "sentry.python"),
+        (["rq"], "sentry.python"),
+        (["sqlalchemy"], "sentry.python"),
+        (["stdlib"], "sentry.python"),
+        (["threading"], "sentry.python"),
+        (["trytond"], "sentry.python"),
+        (["logging"], "sentry.python"),
+        (["gnu_backtrace"], "sentry.python"),
+        (["httpx"], "sentry.python"),
+        # precedence of frameworks
+        (["flask", "django", "celery"], "sentry.python.django"),
+        (["fastapi", "flask", "redis"], "sentry.python.flask"),
+        (["bottle", "fastapi", "httpx"], "sentry.python.fastapi"),
+        (["falcon", "bottle", "logging"], "sentry.python.bottle"),
+        (["quart", "falcon", "gnu_backtrace"], "sentry.python.falcon"),
+        (["sanic", "quart", "sqlalchemy"], "sentry.python.quart"),
+        (["starlette", "sanic", "rq"], "sentry.python.sanic"),
+        (["chalice", "starlette", "modules"], "sentry.python.starlette"),
+        (["serverless", "chalice", "pure_eval"], "sentry.python.chalice"),
+        (["pyramid", "serverless", "modules"], "sentry.python.serverless"),
+        (["tornado", "pyramid", "executing"], "sentry.python.pyramid"),
+        (["aiohttp", "tornado", "dedupe"], "sentry.python.tornado"),
+        (["aws_lambda", "aiohttp", "boto3"], "sentry.python.aiohttp"),
+        (["gcp", "aws_lambda", "atexit"], "sentry.python.aws_lambda"),
+        (["beam", "gcp", "argv"], "sentry.python.gcp"),
+        (["asgi", "beam", "stdtlib"], "sentry.python.beam"),
+        (["wsgi", "asgi", "boto3"], "sentry.python.asgi"),
+        (["wsgi", "celery", "redis"], "sentry.python.wsgi"),
+    ],
+)
+def test_get_sdk_name(installed_integrations, expected_name):
+    assert get_sdk_name(installed_integrations) == expected_name
diff --git a/tests/test_client.py b/tests/test_client.py
index ff5623e8b5..c0f380d770 100644
--- a/tests/test_client.py
+++ b/tests/test_client.py
@@ -7,10 +7,23 @@
 import time
 
 from textwrap import dedent
-from sentry_sdk import Hub, Client, configure_scope, capture_message, capture_exception
+from sentry_sdk import (
+    Hub,
+    Client,
+    add_breadcrumb,
+    configure_scope,
+    capture_message,
+    capture_exception,
+    capture_event,
+    start_transaction,
+    set_tag,
+)
+from sentry_sdk.integrations.executing import ExecutingIntegration
 from sentry_sdk.transport import Transport
 from sentry_sdk._compat import reraise, text_type, PY2
 from sentry_sdk.utils import HAS_CHAINED_EXCEPTIONS
+from sentry_sdk.serializer import MAX_DATABAG_BREADTH
+from sentry_sdk.consts import DEFAULT_MAX_BREADCRUMBS
 
 if PY2:
     # Importing ABCs from collections is deprecated, and will stop working in 3.8
@@ -22,13 +35,13 @@
     from collections.abc import Mapping
 
 
-class EventCaptured(Exception):
+class EventCapturedError(Exception):
     pass
 
 
 class _TestTransport(Transport):
     def capture_event(self, event):
-        raise EventCaptured(event)
+        raise EventCapturedError(event)
 
 
 def test_transport_option(monkeypatch):
@@ -46,144 +59,245 @@ def test_transport_option(monkeypatch):
     assert str(Client(transport=transport).dsn) == dsn
 
 
-def test_proxy_http_use(monkeypatch):
-    client = Client("http://foo@sentry.io/123", http_proxy="http://localhost/123")
-    assert client.transport._pool.proxy.scheme == "http"
-
-
-def test_proxy_https_use(monkeypatch):
-    client = Client("https://foo@sentry.io/123", http_proxy="https://localhost/123")
-    assert client.transport._pool.proxy.scheme == "https"
-
-
-def test_proxy_both_select_http(monkeypatch):
-    client = Client(
-        "http://foo@sentry.io/123",
-        https_proxy="https://localhost/123",
-        http_proxy="http://localhost/123",
-    )
-    assert client.transport._pool.proxy.scheme == "http"
-
-
-def test_proxy_both_select_https(monkeypatch):
-    client = Client(
-        "https://foo@sentry.io/123",
-        https_proxy="https://localhost/123",
-        http_proxy="http://localhost/123",
-    )
-    assert client.transport._pool.proxy.scheme == "https"
-
-
-def test_proxy_http_fallback_http(monkeypatch):
-    client = Client("https://foo@sentry.io/123", http_proxy="http://localhost/123")
-    assert client.transport._pool.proxy.scheme == "http"
-
-
-def test_proxy_none_noenv(monkeypatch):
-    client = Client("http://foo@sentry.io/123")
-    assert client.transport._pool.proxy is None
-
-
-def test_proxy_none_httpenv_select(monkeypatch):
-    monkeypatch.setenv("HTTP_PROXY", "http://localhost/123")
-    client = Client("http://foo@sentry.io/123")
-    assert client.transport._pool.proxy.scheme == "http"
-
-
-def test_proxy_none_httpsenv_select(monkeypatch):
-    monkeypatch.setenv("HTTPS_PROXY", "https://localhost/123")
-    client = Client("https://foo@sentry.io/123")
-    assert client.transport._pool.proxy.scheme == "https"
-
-
-def test_proxy_none_httpenv_fallback(monkeypatch):
-    monkeypatch.setenv("HTTP_PROXY", "http://localhost/123")
-    client = Client("https://foo@sentry.io/123")
-    assert client.transport._pool.proxy.scheme == "http"
-
-
-def test_proxy_bothselect_bothen(monkeypatch):
-    monkeypatch.setenv("HTTP_PROXY", "http://localhost/123")
-    monkeypatch.setenv("HTTPS_PROXY", "https://localhost/123")
-    client = Client("https://foo@sentry.io/123", http_proxy="", https_proxy="")
-    assert client.transport._pool.proxy is None
-
-
-def test_proxy_bothavoid_bothenv(monkeypatch):
-    monkeypatch.setenv("HTTP_PROXY", "http://localhost/123")
-    monkeypatch.setenv("HTTPS_PROXY", "https://localhost/123")
-    client = Client("https://foo@sentry.io/123", http_proxy=None, https_proxy=None)
-    assert client.transport._pool.proxy.scheme == "https"
-
-
-def test_proxy_bothselect_httpenv(monkeypatch):
-    monkeypatch.setenv("HTTP_PROXY", "http://localhost/123")
-    client = Client("https://foo@sentry.io/123", http_proxy=None, https_proxy=None)
-    assert client.transport._pool.proxy.scheme == "http"
-
-
-def test_proxy_httpselect_bothenv(monkeypatch):
-    monkeypatch.setenv("HTTP_PROXY", "http://localhost/123")
-    monkeypatch.setenv("HTTPS_PROXY", "https://localhost/123")
-    client = Client("https://foo@sentry.io/123", http_proxy=None, https_proxy="")
-    assert client.transport._pool.proxy.scheme == "http"
-
-
-def test_proxy_httpsselect_bothenv(monkeypatch):
-    monkeypatch.setenv("HTTP_PROXY", "http://localhost/123")
-    monkeypatch.setenv("HTTPS_PROXY", "https://localhost/123")
-    client = Client("https://foo@sentry.io/123", http_proxy="", https_proxy=None)
-    assert client.transport._pool.proxy.scheme == "https"
-
-
-def test_proxy_httpselect_httpsenv(monkeypatch):
-    monkeypatch.setenv("HTTPS_PROXY", "https://localhost/123")
-    client = Client("https://foo@sentry.io/123", http_proxy=None, https_proxy="")
-    assert client.transport._pool.proxy is None
-
-
-def test_proxy_httpsselect_bothenv_http(monkeypatch):
-    monkeypatch.setenv("HTTP_PROXY", "http://localhost/123")
-    monkeypatch.setenv("HTTPS_PROXY", "https://localhost/123")
-    client = Client("http://foo@sentry.io/123", http_proxy=None, https_proxy=None)
-    assert client.transport._pool.proxy.scheme == "http"
-
-
-def test_simple_transport():
+@pytest.mark.parametrize(
+    "testcase",
+    [
+        {
+            "dsn": "http://foo@sentry.io/123",
+            "env_http_proxy": None,
+            "env_https_proxy": None,
+            "arg_http_proxy": "http://localhost/123",
+            "arg_https_proxy": None,
+            "expected_proxy_scheme": "http",
+        },
+        {
+            "dsn": "https://foo@sentry.io/123",
+            "env_http_proxy": None,
+            "env_https_proxy": None,
+            "arg_http_proxy": "https://localhost/123",
+            "arg_https_proxy": None,
+            "expected_proxy_scheme": "https",
+        },
+        {
+            "dsn": "http://foo@sentry.io/123",
+            "env_http_proxy": None,
+            "env_https_proxy": None,
+            "arg_http_proxy": "http://localhost/123",
+            "arg_https_proxy": "https://localhost/123",
+            "expected_proxy_scheme": "http",
+        },
+        {
+            "dsn": "https://foo@sentry.io/123",
+            "env_http_proxy": None,
+            "env_https_proxy": None,
+            "arg_http_proxy": "http://localhost/123",
+            "arg_https_proxy": "https://localhost/123",
+            "expected_proxy_scheme": "https",
+        },
+        {
+            "dsn": "https://foo@sentry.io/123",
+            "env_http_proxy": None,
+            "env_https_proxy": None,
+            "arg_http_proxy": "http://localhost/123",
+            "arg_https_proxy": None,
+            "expected_proxy_scheme": "http",
+        },
+        {
+            "dsn": "http://foo@sentry.io/123",
+            "env_http_proxy": None,
+            "env_https_proxy": None,
+            "arg_http_proxy": None,
+            "arg_https_proxy": None,
+            "expected_proxy_scheme": None,
+        },
+        {
+            "dsn": "http://foo@sentry.io/123",
+            "env_http_proxy": "http://localhost/123",
+            "env_https_proxy": None,
+            "arg_http_proxy": None,
+            "arg_https_proxy": None,
+            "expected_proxy_scheme": "http",
+        },
+        {
+            "dsn": "https://foo@sentry.io/123",
+            "env_http_proxy": None,
+            "env_https_proxy": "https://localhost/123",
+            "arg_http_proxy": None,
+            "arg_https_proxy": None,
+            "expected_proxy_scheme": "https",
+        },
+        {
+            "dsn": "https://foo@sentry.io/123",
+            "env_http_proxy": "http://localhost/123",
+            "env_https_proxy": None,
+            "arg_http_proxy": None,
+            "arg_https_proxy": None,
+            "expected_proxy_scheme": "http",
+        },
+        {
+            "dsn": "https://foo@sentry.io/123",
+            "env_http_proxy": "http://localhost/123",
+            "env_https_proxy": "https://localhost/123",
+            "arg_http_proxy": "",
+            "arg_https_proxy": "",
+            "expected_proxy_scheme": None,
+        },
+        {
+            "dsn": "https://foo@sentry.io/123",
+            "env_http_proxy": "http://localhost/123",
+            "env_https_proxy": "https://localhost/123",
+            "arg_http_proxy": None,
+            "arg_https_proxy": None,
+            "expected_proxy_scheme": "https",
+        },
+        {
+            "dsn": "https://foo@sentry.io/123",
+            "env_http_proxy": "http://localhost/123",
+            "env_https_proxy": None,
+            "arg_http_proxy": None,
+            "arg_https_proxy": None,
+            "expected_proxy_scheme": "http",
+        },
+        {
+            "dsn": "https://foo@sentry.io/123",
+            "env_http_proxy": "http://localhost/123",
+            "env_https_proxy": "https://localhost/123",
+            "arg_http_proxy": None,
+            "arg_https_proxy": "",
+            "expected_proxy_scheme": "http",
+        },
+        {
+            "dsn": "https://foo@sentry.io/123",
+            "env_http_proxy": "http://localhost/123",
+            "env_https_proxy": "https://localhost/123",
+            "arg_http_proxy": "",
+            "arg_https_proxy": None,
+            "expected_proxy_scheme": "https",
+        },
+        {
+            "dsn": "https://foo@sentry.io/123",
+            "env_http_proxy": None,
+            "env_https_proxy": "https://localhost/123",
+            "arg_http_proxy": None,
+            "arg_https_proxy": "",
+            "expected_proxy_scheme": None,
+        },
+        {
+            "dsn": "http://foo@sentry.io/123",
+            "env_http_proxy": "http://localhost/123",
+            "env_https_proxy": "https://localhost/123",
+            "arg_http_proxy": None,
+            "arg_https_proxy": None,
+            "expected_proxy_scheme": "http",
+        },
+        # NO_PROXY testcases
+        {
+            "dsn": "http://foo@sentry.io/123",
+            "env_http_proxy": "http://localhost/123",
+            "env_https_proxy": None,
+            "env_no_proxy": "sentry.io,example.com",
+            "arg_http_proxy": None,
+            "arg_https_proxy": None,
+            "expected_proxy_scheme": None,
+        },
+        {
+            "dsn": "https://foo@sentry.io/123",
+            "env_http_proxy": None,
+            "env_https_proxy": "https://localhost/123",
+            "env_no_proxy": "example.com,sentry.io",
+            "arg_http_proxy": None,
+            "arg_https_proxy": None,
+            "expected_proxy_scheme": None,
+        },
+        {
+            "dsn": "http://foo@sentry.io/123",
+            "env_http_proxy": None,
+            "env_https_proxy": None,
+            "env_no_proxy": "sentry.io,example.com",
+            "arg_http_proxy": "http://localhost/123",
+            "arg_https_proxy": None,
+            "expected_proxy_scheme": "http",
+        },
+        {
+            "dsn": "https://foo@sentry.io/123",
+            "env_http_proxy": None,
+            "env_https_proxy": None,
+            "env_no_proxy": "sentry.io,example.com",
+            "arg_http_proxy": None,
+            "arg_https_proxy": "https://localhost/123",
+            "expected_proxy_scheme": "https",
+        },
+        {
+            "dsn": "https://foo@sentry.io/123",
+            "env_http_proxy": None,
+            "env_https_proxy": None,
+            "env_no_proxy": "sentry.io,example.com",
+            "arg_http_proxy": None,
+            "arg_https_proxy": "https://localhost/123",
+            "expected_proxy_scheme": "https",
+            "arg_proxy_headers": {"Test-Header": "foo-bar"},
+        },
+    ],
+)
+def test_proxy(monkeypatch, testcase):
+    if testcase["env_http_proxy"] is not None:
+        monkeypatch.setenv("HTTP_PROXY", testcase["env_http_proxy"])
+    if testcase["env_https_proxy"] is not None:
+        monkeypatch.setenv("HTTPS_PROXY", testcase["env_https_proxy"])
+    if testcase.get("env_no_proxy") is not None:
+        monkeypatch.setenv("NO_PROXY", testcase["env_no_proxy"])
+    kwargs = {}
+    if testcase["arg_http_proxy"] is not None:
+        kwargs["http_proxy"] = testcase["arg_http_proxy"]
+    if testcase["arg_https_proxy"] is not None:
+        kwargs["https_proxy"] = testcase["arg_https_proxy"]
+    if testcase.get("arg_proxy_headers") is not None:
+        kwargs["proxy_headers"] = testcase["arg_proxy_headers"]
+    client = Client(testcase["dsn"], **kwargs)
+    if testcase["expected_proxy_scheme"] is None:
+        assert client.transport._pool.proxy is None
+    else:
+        assert client.transport._pool.proxy.scheme == testcase["expected_proxy_scheme"]
+
+        if testcase.get("arg_proxy_headers") is not None:
+            assert client.transport._pool.proxy_headers == testcase["arg_proxy_headers"]
+
+
+def test_simple_transport(sentry_init):
     events = []
-    with Hub(Client(transport=events.append)):
-        capture_message("Hello World!")
+    sentry_init(transport=events.append)
+    capture_message("Hello World!")
     assert events[0]["message"] == "Hello World!"
 
 
-def test_ignore_errors():
+def test_ignore_errors(sentry_init, capture_events):
     class MyDivisionError(ZeroDivisionError):
         pass
 
     def raise_it(exc_info):
         reraise(*exc_info)
 
-    hub = Hub(Client(ignore_errors=[ZeroDivisionError], transport=_TestTransport()))
-    hub._capture_internal_exception = raise_it
+    sentry_init(ignore_errors=[ZeroDivisionError], transport=_TestTransport())
+    Hub.current._capture_internal_exception = raise_it
 
     def e(exc):
         try:
             raise exc
         except Exception:
-            hub.capture_exception()
+            capture_exception()
 
     e(ZeroDivisionError())
     e(MyDivisionError())
-    pytest.raises(EventCaptured, lambda: e(ValueError()))
+    pytest.raises(EventCapturedError, lambda: e(ValueError()))
 
 
-def test_with_locals_enabled():
-    events = []
-    hub = Hub(Client(with_locals=True, transport=events.append))
+def test_with_locals_enabled(sentry_init, capture_events):
+    sentry_init(with_locals=True)
+    events = capture_events()
     try:
         1 / 0
     except Exception:
-        hub.capture_exception()
+        capture_exception()
 
     (event,) = events
 
@@ -193,13 +307,13 @@ def test_with_locals_enabled():
     )
 
 
-def test_with_locals_disabled():
-    events = []
-    hub = Hub(Client(with_locals=False, transport=events.append))
+def test_with_locals_disabled(sentry_init, capture_events):
+    sentry_init(with_locals=False)
+    events = capture_events()
     try:
         1 / 0
     except Exception:
-        hub.capture_exception()
+        capture_exception()
 
     (event,) = events
 
@@ -209,35 +323,63 @@ def test_with_locals_disabled():
     )
 
 
-def test_attach_stacktrace_enabled():
-    events = []
-    hub = Hub(Client(attach_stacktrace=True, transport=events.append))
+@pytest.mark.parametrize("integrations", [[], [ExecutingIntegration()]])
+def test_function_names(sentry_init, capture_events, integrations):
+    sentry_init(integrations=integrations)
+    events = capture_events()
+
+    def foo():
+        try:
+            bar()
+        except Exception:
+            capture_exception()
+
+    def bar():
+        1 / 0
+
+    foo()
+
+    (event,) = events
+    (thread,) = event["exception"]["values"]
+    functions = [x["function"] for x in thread["stacktrace"]["frames"]]
+
+    if integrations:
+        assert functions == [
+            "test_function_names..foo",
+            "test_function_names..bar",
+        ]
+    else:
+        assert functions == ["foo", "bar"]
+
+
+def test_attach_stacktrace_enabled(sentry_init, capture_events):
+    sentry_init(attach_stacktrace=True)
+    events = capture_events()
 
     def foo():
         bar()
 
     def bar():
-        hub.capture_message("HI")
+        capture_message("HI")
 
     foo()
 
     (event,) = events
     (thread,) = event["threads"]["values"]
     functions = [x["function"] for x in thread["stacktrace"]["frames"]]
+
     assert functions[-2:] == ["foo", "bar"]
 
 
-def test_attach_stacktrace_enabled_no_locals():
-    events = []
-    hub = Hub(
-        Client(attach_stacktrace=True, with_locals=False, transport=events.append)
-    )
+def test_attach_stacktrace_enabled_no_locals(sentry_init, capture_events):
+    sentry_init(attach_stacktrace=True, with_locals=False)
+    events = capture_events()
 
     def foo():
         bar()
 
     def bar():
-        hub.capture_message("HI")
+        capture_message("HI")
 
     foo()
 
@@ -262,19 +404,19 @@ def test_attach_stacktrace_in_app(sentry_init, capture_events):
     assert any(f["in_app"] for f in frames)
 
 
-def test_attach_stacktrace_disabled():
-    events = []
-    hub = Hub(Client(attach_stacktrace=False, transport=events.append))
-    hub.capture_message("HI")
+def test_attach_stacktrace_disabled(sentry_init, capture_events):
+    sentry_init(attach_stacktrace=False)
+    events = capture_events()
+    capture_message("HI")
 
     (event,) = events
     assert "threads" not in event
 
 
-def test_capture_event_works():
-    c = Client(transport=_TestTransport())
-    pytest.raises(EventCaptured, lambda: c.capture_event({}))
-    pytest.raises(EventCaptured, lambda: c.capture_event({}))
+def test_capture_event_works(sentry_init):
+    sentry_init(transport=_TestTransport())
+    pytest.raises(EventCapturedError, lambda: capture_event({}))
+    pytest.raises(EventCapturedError, lambda: capture_event({}))
 
 
 @pytest.mark.parametrize("num_messages", [10, 20])
@@ -316,7 +458,7 @@ def test_configure_scope_available(sentry_init, request, monkeypatch):
     sentry_init()
 
     with configure_scope() as scope:
-        assert scope is Hub.current._stack[-1][1]
+        assert scope is Hub.current.scope
         scope.set_tag("foo", "bar")
 
     calls = []
@@ -327,7 +469,7 @@ def callback(scope):
 
     assert configure_scope(callback) is None
     assert len(calls) == 1
-    assert calls[0] is Hub.current._stack[-1][1]
+    assert calls[0] is Hub.current.scope
 
 
 @pytest.mark.tests_internal_exceptions
@@ -369,7 +511,9 @@ def test_scope_initialized_before_client(sentry_init, capture_events):
 def test_weird_chars(sentry_init, capture_events):
     sentry_init()
     events = capture_events()
+    # fmt: off
     capture_message(u"föö".encode("latin1"))
+    # fmt: on
     (event,) = events
     assert json.loads(json.dumps(event)) == event
 
@@ -379,6 +523,10 @@ def test_nan(sentry_init, capture_events):
     events = capture_events()
 
     try:
+        # should_repr_strings=False
+        set_tag("mynan", float("nan"))
+
+        # should_repr_strings=True
         nan = float("nan")  # noqa
         1 / 0
     except Exception:
@@ -388,6 +536,7 @@ def test_nan(sentry_init, capture_events):
     frames = event["exception"]["values"][0]["stacktrace"]["frames"]
     (frame,) = frames
     assert frame["vars"]["nan"] == "nan"
+    assert event["tags"]["mynan"] == "nan"
 
 
 def test_cyclic_frame_vars(sentry_init, capture_events):
@@ -482,6 +631,10 @@ def inner():
 
         (event,) = events
 
+        assert (
+            len(event["exception"]["values"][0]["stacktrace"]["frames"][0]["vars"]["a"])
+            == MAX_DATABAG_BREADTH
+        )
         assert len(json.dumps(event)) < 10000
 
 
@@ -606,10 +759,10 @@ def test_errno_errors(sentry_init, capture_events):
     sentry_init()
     events = capture_events()
 
-    class Foo(Exception):
+    class FooError(Exception):
         errno = 69
 
-    capture_exception(Foo())
+    capture_exception(FooError())
 
     (event,) = events
 
@@ -676,7 +829,7 @@ def __repr__(self):
     "dsn",
     [
         "http://894b7d594095440f8dfea9b300e6f572@localhost:8000/2",
-        u"http://894b7d594095440f8dfea9b300e6f572@localhost:8000/2",
+        "http://894b7d594095440f8dfea9b300e6f572@localhost:8000/2",
     ],
 )
 def test_init_string_types(dsn, sentry_init):
@@ -690,3 +843,68 @@ def test_init_string_types(dsn, sentry_init):
         Hub.current.client.dsn
         == "http://894b7d594095440f8dfea9b300e6f572@localhost:8000/2"
     )
+
+
+def test_envelope_types():
+    """
+    Tests for calling the right transport method (capture_event vs
+    capture_envelope) from the SDK client for different data types.
+    """
+
+    envelopes = []
+    events = []
+
+    class CustomTransport(Transport):
+        def capture_envelope(self, envelope):
+            envelopes.append(envelope)
+
+        def capture_event(self, event):
+            events.append(event)
+
+    with Hub(Client(traces_sample_rate=1.0, transport=CustomTransport())):
+        event_id = capture_message("hello")
+
+        # Assert error events get passed in via capture_event
+        assert not envelopes
+        event = events.pop()
+
+        assert event["event_id"] == event_id
+        assert "type" not in event
+
+        with start_transaction(name="foo"):
+            pass
+
+        # Assert transactions get passed in via capture_envelope
+        assert not events
+        envelope = envelopes.pop()
+
+        (item,) = envelope.items
+        assert item.data_category == "transaction"
+        assert item.headers.get("type") == "transaction"
+
+    assert not envelopes
+    assert not events
+
+
+@pytest.mark.parametrize(
+    "sdk_options, expected_breadcrumbs",
+    [({}, DEFAULT_MAX_BREADCRUMBS), ({"max_breadcrumbs": 50}, 50)],
+)
+def test_max_breadcrumbs_option(
+    sentry_init, capture_events, sdk_options, expected_breadcrumbs
+):
+    sentry_init(sdk_options)
+    events = capture_events()
+
+    for _ in range(1231):
+        add_breadcrumb({"type": "sourdough"})
+
+    capture_message("dogs are great")
+
+    assert len(events[0]["breadcrumbs"]["values"]) == expected_breadcrumbs
+
+
+def test_multiple_positional_args(sentry_init):
+    with pytest.raises(TypeError) as exinfo:
+        sentry_init(1, None)
+    assert "Only single positional argument is expected" in str(exinfo.value)
diff --git a/tests/test_conftest.py b/tests/test_conftest.py
new file mode 100644
index 0000000000..8a2d4cee24
--- /dev/null
+++ b/tests/test_conftest.py
@@ -0,0 +1,110 @@
+import pytest
+
+
+@pytest.mark.parametrize(
+    "test_string, expected_result",
+    [
+        # type matches
+        ("dogs are great!", True),  # full containment - beginning
+        ("go, dogs, go!", True),  # full containment - middle
+        ("I like dogs", True),  # full containment - end
+        ("dogs", True),  # equality
+        ("", False),  # reverse containment
+        ("dog", False),  # reverse containment
+        ("good dog!", False),  # partial overlap
+        ("cats", False),  # no overlap
+        # type mismatches
+        (1231, False),
+        (11.21, False),
+        ([], False),
+        ({}, False),
+        (True, False),
+    ],
+)
+def test_string_containing(
+    test_string, expected_result, StringContaining  # noqa: N803
+):
+
+    assert (test_string == StringContaining("dogs")) is expected_result
+
+
+@pytest.mark.parametrize(
+    "test_dict, expected_result",
+    [
+        # type matches
+        ({"dogs": "yes", "cats": "maybe", "spiders": "nope"}, True),  # full containment
+        ({"dogs": "yes", "cats": "maybe"}, True),  # equality
+        ({}, False),  # reverse containment
+        ({"dogs": "yes"}, False),  # reverse containment
+        ({"dogs": "yes", "birds": "only outside"}, False),  # partial overlap
+        ({"coyotes": "from afar"}, False),  # no overlap
+        # type mismatches
+        ('{"dogs": "yes", "cats": "maybe"}', False),
+        (1231, False),
+        (11.21, False),
+        ([], False),
+        (True, False),
+    ],
+)
+def test_dictionary_containing(
+    test_dict, expected_result, DictionaryContaining  # noqa: N803
+):
+
+    assert (
+        test_dict == DictionaryContaining({"dogs": "yes", "cats": "maybe"})
+    ) is expected_result
+
+
+class Animal(object):  # noqa: B903
+    def __init__(self, name=None, age=None, description=None):
+        self.name = name
+        self.age = age
+        self.description = description
+
+
+class Dog(Animal):
+    pass
+
+
+class Cat(Animal):
+    pass
+
+
+@pytest.mark.parametrize(
+    "test_obj, type_and_attrs_result, type_only_result, attrs_only_result",
+    [
+        # type matches
+        (Dog("Maisey", 7, "silly"), True, True, True),  # full attr containment
+        (Dog("Maisey", 7), True, True, True),  # type and attr equality
+        (Dog(), False, True, False),  # reverse attr containment
+        (Dog("Maisey"), False, True, False),  # reverse attr containment
+        (Dog("Charlie", 7, "goofy"), False, True, False),  # partial attr overlap
+        (Dog("Bodhi", 6, "floppy"), False, True, False),  # no attr overlap
+        # type mismatches
+        (Cat("Maisey", 7), False, False, True),  # attr equality
+        (Cat("Piper", 1, "doglike"), False, False, False),
+        ("Good girl, Maisey", False, False, False),
+        ({"name": "Maisey", "age": 7}, False, False, False),
+        (1231, False, False, False),
+        (11.21, False, False, False),
+        ([], False, False, False),
+        (True, False, False, False),
+    ],
+)
+def test_object_described_by(
+    test_obj,
+    type_and_attrs_result,
+    type_only_result,
+    attrs_only_result,
+    ObjectDescribedBy,  # noqa: N803
+):
+
+    assert (
+        test_obj == ObjectDescribedBy(type=Dog, attrs={"name": "Maisey", "age": 7})
+    ) is type_and_attrs_result
+
+    assert (test_obj == ObjectDescribedBy(type=Dog)) is type_only_result
+
+    assert (
+        test_obj == ObjectDescribedBy(attrs={"name": "Maisey", "age": 7})
+    ) is attrs_only_result
diff --git a/tests/test_envelope.py b/tests/test_envelope.py
new file mode 100644
index 0000000000..b6a3ddf8be
--- /dev/null
+++ b/tests/test_envelope.py
@@ -0,0 +1,266 @@
+from sentry_sdk.envelope import Envelope
+from sentry_sdk.session import Session
+from sentry_sdk import capture_event
+from sentry_sdk.tracing_utils import compute_tracestate_value
+import sentry_sdk.client
+
+import pytest
+
+try:
+    from unittest import mock  # python 3.3 and above
+except ImportError:
+    import mock  # python < 3.3
+
+
+def generate_transaction_item():
+    return {
+        "event_id": "15210411201320122115110420122013",
+        "type": "transaction",
+        "transaction": "/interactions/other-dogs/new-dog",
+        "start_timestamp": 1353568872.11122131,
+        "timestamp": 1356942672.09040815,
+        "contexts": {
+            "trace": {
+                "trace_id": "12312012123120121231201212312012",
+                "span_id": "0415201309082013",
+                "parent_span_id": None,
+                "description": "",
+                "op": "greeting.sniff",
+                "tracestate": compute_tracestate_value(
+                    {
+                        "trace_id": "12312012123120121231201212312012",
+                        "environment": "dogpark",
+                        "release": "off.leash.park",
+                        "public_key": "dogsarebadatkeepingsecrets",
+                        "user": {"id": 12312013, "segment": "bigs"},
+                        "transaction": "/interactions/other-dogs/new-dog",
+                    }
+                ),
+            }
+        },
+        "spans": [
+            {
+                "description": "",
+                "op": "greeting.sniff",
+                "parent_span_id": None,
+                "span_id": "0415201309082013",
+                "start_timestamp": 1353568872.11122131,
+                "timestamp": 1356942672.09040815,
+                "trace_id": "12312012123120121231201212312012",
+            }
+        ],
+    }
+
+
+def test_add_and_get_basic_event():
+    envelope = Envelope()
+
+    expected = {"message": "Hello, World!"}
+    envelope.add_event(expected)
+
+    assert envelope.get_event() == {"message": "Hello, World!"}
+
+
+def test_add_and_get_transaction_event():
+    envelope = Envelope()
+
+    transaction_item = generate_transaction_item()
+    transaction_item.update({"event_id": "a" * 32})
+    envelope.add_transaction(transaction_item)
+
+    # typically it should not be possible to be able to add a second transaction;
+    # but we do it anyways
+    another_transaction_item = generate_transaction_item()
+    envelope.add_transaction(another_transaction_item)
+
+    # should only fetch the first inserted transaction event
+    assert envelope.get_transaction_event() == transaction_item
+
+
+def test_add_and_get_session():
+    envelope = Envelope()
+
+    expected = Session()
+    envelope.add_session(expected)
+
+    for item in envelope:
+        if item.type == "session":
+            assert item.payload.json == expected.to_json()
+
+
+# TODO (kmclb) remove this parameterization once tracestate is a real feature
+@pytest.mark.parametrize("tracestate_enabled", [True, False])
+def test_envelope_headers(
+    sentry_init, capture_envelopes, monkeypatch, tracestate_enabled
+):
+    monkeypatch.setattr(
+        sentry_sdk.client,
+        "format_timestamp",
+        lambda x: "2012-11-21T12:31:12.415908Z",
+    )
+
+    monkeypatch.setattr(
+        sentry_sdk.client,
+        "has_tracestate_enabled",
+        mock.Mock(return_value=tracestate_enabled),
+    )
+
+    sentry_init(
+        dsn="https://dogsarebadatkeepingsecrets@squirrelchasers.ingest.sentry.io/12312012",
+    )
+    envelopes = capture_envelopes()
+
+    capture_event(generate_transaction_item())
+
+    assert len(envelopes) == 1
+
+    if tracestate_enabled:
+        assert envelopes[0].headers == {
+            "event_id": "15210411201320122115110420122013",
+            "sent_at": "2012-11-21T12:31:12.415908Z",
+            "trace": {
+                "trace_id": "12312012123120121231201212312012",
+                "environment": "dogpark",
+                "release": "off.leash.park",
+                "public_key": "dogsarebadatkeepingsecrets",
+                "user": {"id": 12312013, "segment": "bigs"},
+                "transaction": "/interactions/other-dogs/new-dog",
+            },
+        }
+    else:
+        assert envelopes[0].headers == {
+            "event_id": "15210411201320122115110420122013",
+            "sent_at": "2012-11-21T12:31:12.415908Z",
+        }
+
+
+def test_envelope_with_sized_items():
+    """
+    Tests that it successfully parses envelopes with
+    the item size specified in the header
+    """
+    envelope_raw = (
+        b'{"event_id":"9ec79c33ec9942ab8353589fcb2e04dc"}\n'
+        b'{"type":"type1","length":4 }\n1234\n'
+        b'{"type":"type2","length":4 }\nabcd\n'
+        b'{"type":"type3","length":0}\n\n'
+        b'{"type":"type4","length":4 }\nab12\n'
+    )
+    envelope_raw_eof_terminated = envelope_raw[:-1]
+
+    for envelope in (envelope_raw, envelope_raw_eof_terminated):
+        actual = Envelope.deserialize(envelope)
+
+        items = [item for item in actual]
+
+        assert len(items) == 4
+
+        assert items[0].type == "type1"
+        assert items[0].get_bytes() == b"1234"
+
+        assert items[1].type == "type2"
+        assert items[1].get_bytes() == b"abcd"
+
+        assert items[2].type == "type3"
+        assert items[2].get_bytes() == b""
+
+        assert items[3].type == "type4"
+        assert items[3].get_bytes() == b"ab12"
+
+        assert actual.headers["event_id"] == "9ec79c33ec9942ab8353589fcb2e04dc"
+
+
+def test_envelope_with_implicitly_sized_items():
+    """
+    Tests that it successfully parses envelopes with
+    the item size not specified in the header
+    """
+    envelope_raw = (
+        b'{"event_id":"9ec79c33ec9942ab8353589fcb2e04dc"}\n'
+        b'{"type":"type1"}\n1234\n'
+        b'{"type":"type2"}\nabcd\n'
+        b'{"type":"type3"}\n\n'
+        b'{"type":"type4"}\nab12\n'
+    )
+    envelope_raw_eof_terminated = envelope_raw[:-1]
+
+    for envelope in (envelope_raw, envelope_raw_eof_terminated):
+        actual = Envelope.deserialize(envelope)
+        assert actual.headers["event_id"] == "9ec79c33ec9942ab8353589fcb2e04dc"
+
+        items = [item for item in actual]
+
+        assert len(items) == 4
+
+        assert items[0].type == "type1"
+        assert items[0].get_bytes() == b"1234"
+
+        assert items[1].type == "type2"
+        assert items[1].get_bytes() == b"abcd"
+
+        assert items[2].type == "type3"
+        assert items[2].get_bytes() == b""
+
+        assert items[3].type == "type4"
+        assert items[3].get_bytes() == b"ab12"
+
+
+def test_envelope_with_two_attachments():
+    """
+    Test that items are correctly parsed in an envelope with to size specified items
+    """
+    two_attachments = (
+        b'{"event_id":"9ec79c33ec9942ab8353589fcb2e04dc","dsn":"https://e12d836b15bb49d7bbf99e64295d995b:@sentry.io/42"}\n'
+        + b'{"type":"attachment","length":10,"content_type":"text/plain","filename":"hello.txt"}\n'
+        + b"\xef\xbb\xbfHello\r\n\n"
+        + b'{"type":"event","length":41,"content_type":"application/json","filename":"application.log"}\n'
+        + b'{"message":"hello world","level":"error"}\n'
+    )
+    two_attachments_eof_terminated = two_attachments[
+        :-1
+    ]  # last \n is optional, without it should still be a valid envelope
+
+    for envelope_raw in (two_attachments, two_attachments_eof_terminated):
+        actual = Envelope.deserialize(envelope_raw)
+        items = [item for item in actual]
+
+        assert len(items) == 2
+        assert items[0].get_bytes() == b"\xef\xbb\xbfHello\r\n"
+        assert items[1].payload.json == {"message": "hello world", "level": "error"}
+
+
+def test_envelope_with_empty_attachments():
+    """
+    Test that items are correctly parsed in an envelope with two 0 length items (with size specified in the header
+    """
+    two_empty_attachments = (
+        b'{"event_id":"9ec79c33ec9942ab8353589fcb2e04dc"}\n'
+        + b'{"type":"attachment","length":0}\n\n'
+        + b'{"type":"attachment","length":0}\n\n'
+    )
+
+    two_empty_attachments_eof_terminated = two_empty_attachments[
+        :-1
+    ]  # last \n is optional, without it should still be a valid envelope
+
+    for envelope_raw in (two_empty_attachments, two_empty_attachments_eof_terminated):
+        actual = Envelope.deserialize(envelope_raw)
+        items = [item for item in actual]
+
+        assert len(items) == 2
+        assert items[0].get_bytes() == b""
+        assert items[1].get_bytes() == b""
+
+
+def test_envelope_without_headers():
+    """
+    Test that an envelope without headers is parsed successfully
+    """
+    envelope_without_headers = (
+        b"{}\n" + b'{"type":"session"}\n' + b'{"started": "2020-02-07T14:16:00Z"}'
+    )
+    actual = Envelope.deserialize(envelope_without_headers)
+    items = [item for item in actual]
+
+    assert len(items) == 1
+    assert items[0].payload.get_bytes() == b'{"started": "2020-02-07T14:16:00Z"}'
diff --git a/tests/test_profiler.py b/tests/test_profiler.py
new file mode 100644
index 0000000000..f0613c9c65
--- /dev/null
+++ b/tests/test_profiler.py
@@ -0,0 +1,651 @@
+import inspect
+import os
+import sys
+import threading
+
+import pytest
+
+from sentry_sdk.profiler import (
+    GeventScheduler,
+    Profile,
+    ThreadScheduler,
+    extract_frame,
+    extract_stack,
+    get_frame_name,
+    setup_profiler,
+)
+from sentry_sdk.tracing import Transaction
+
+try:
+    import gevent
+except ImportError:
+    gevent = None
+
+
+def requires_python_version(major, minor, reason=None):
+    if reason is None:
+        reason = "Requires Python {}.{}".format(major, minor)
+    return pytest.mark.skipif(sys.version_info < (major, minor), reason=reason)
+
+
+requires_gevent = pytest.mark.skipif(gevent is None, reason="gevent not enabled")
+
+
+def process_test_sample(sample):
+    return [(tid, (stack, stack)) for tid, stack in sample]
+
+
+@requires_python_version(3, 3)
+@pytest.mark.parametrize(
+    "mode",
+    [
+        pytest.param("foo"),
+        pytest.param(
+            "gevent",
+            marks=pytest.mark.skipif(gevent is not None, reason="gevent not enabled"),
+        ),
+    ],
+)
+def test_profiler_invalid_mode(mode, teardown_profiling):
+    with pytest.raises(ValueError):
+        setup_profiler({"_experiments": {"profiler_mode": mode}})
+
+
+@pytest.mark.parametrize(
+    "mode",
+    [
+        pytest.param("thread"),
+        pytest.param("sleep"),
+        pytest.param("gevent", marks=requires_gevent),
+    ],
+)
+def test_profiler_valid_mode(mode, teardown_profiling):
+    # should not raise any exceptions
+    setup_profiler({"_experiments": {"profiler_mode": mode}})
+
+
+def get_frame(depth=1):
+    """
+    This function is not exactly true to its name. Depending on
+    how it is called, the true depth of the stack can be deeper
+    than the argument implies.
+    """
+    if depth <= 0:
+        raise ValueError("only positive integers allowed")
+    if depth > 1:
+        return get_frame(depth=depth - 1)
+    return inspect.currentframe()
+
+
+class GetFrameBase:
+    def inherited_instance_method(self):
+        return inspect.currentframe()
+
+    def inherited_instance_method_wrapped(self):
+        def wrapped():
+            return inspect.currentframe()
+
+        return wrapped
+
+    @classmethod
+    def inherited_class_method(cls):
+        return inspect.currentframe()
+
+    @classmethod
+    def inherited_class_method_wrapped(cls):
+        def wrapped():
+            return inspect.currentframe()
+
+        return wrapped
+
+    @staticmethod
+    def inherited_static_method():
+        return inspect.currentframe()
+
+
+class GetFrame(GetFrameBase):
+    def instance_method(self):
+        return inspect.currentframe()
+
+    def instance_method_wrapped(self):
+        def wrapped():
+            return inspect.currentframe()
+
+        return wrapped
+
+    @classmethod
+    def class_method(cls):
+        return inspect.currentframe()
+
+    @classmethod
+    def class_method_wrapped(cls):
+        def wrapped():
+            return inspect.currentframe()
+
+        return wrapped
+
+    @staticmethod
+    def static_method():
+        return inspect.currentframe()
+
+
+@pytest.mark.parametrize(
+    ("frame", "frame_name"),
+    [
+        pytest.param(
+            get_frame(),
+            "get_frame",
+            id="function",
+        ),
+        pytest.param(
+            (lambda: inspect.currentframe())(),
+            "",
+            id="lambda",
+        ),
+        pytest.param(
+            GetFrame().instance_method(),
+            "GetFrame.instance_method",
+            id="instance_method",
+        ),
+        pytest.param(
+            GetFrame().instance_method_wrapped()(),
+            "wrapped"
+            if sys.version_info < (3, 11)
+            else "GetFrame.instance_method_wrapped..wrapped",
+            id="instance_method_wrapped",
+        ),
+        pytest.param(
+            GetFrame().class_method(),
+            "GetFrame.class_method",
+            id="class_method",
+        ),
+        pytest.param(
+            GetFrame().class_method_wrapped()(),
+            "wrapped"
+            if sys.version_info < (3, 11)
+            else "GetFrame.class_method_wrapped..wrapped",
+            id="class_method_wrapped",
+        ),
+        pytest.param(
+            GetFrame().static_method(),
+            "static_method" if sys.version_info < (3, 11) else "GetFrame.static_method",
+            id="static_method",
+        ),
+        pytest.param(
+            GetFrame().inherited_instance_method(),
+            "GetFrameBase.inherited_instance_method",
+            id="inherited_instance_method",
+        ),
+        pytest.param(
+            GetFrame().inherited_instance_method_wrapped()(),
+            "wrapped"
+            if sys.version_info < (3, 11)
+            else "GetFrameBase.inherited_instance_method_wrapped..wrapped",
+            id="instance_method_wrapped",
+        ),
+        pytest.param(
+            GetFrame().inherited_class_method(),
+            "GetFrameBase.inherited_class_method",
+            id="inherited_class_method",
+        ),
+        pytest.param(
+            GetFrame().inherited_class_method_wrapped()(),
+            "wrapped"
+            if sys.version_info < (3, 11)
+            else "GetFrameBase.inherited_class_method_wrapped..wrapped",
+            id="inherited_class_method_wrapped",
+        ),
+        pytest.param(
+            GetFrame().inherited_static_method(),
+            "inherited_static_method"
+            if sys.version_info < (3, 11)
+            else "GetFrameBase.inherited_static_method",
+            id="inherited_static_method",
+        ),
+    ],
+)
+def test_get_frame_name(frame, frame_name):
+    assert get_frame_name(frame) == frame_name
+
+
+@pytest.mark.parametrize(
+    ("get_frame", "function"),
+    [
+        pytest.param(lambda: get_frame(depth=1), "get_frame", id="simple"),
+    ],
+)
+def test_extract_frame(get_frame, function):
+    cwd = os.getcwd()
+    frame = get_frame()
+    extracted_frame = extract_frame(frame, cwd)
+
+    # the abs_path should be equal toe the normalized path of the co_filename
+    assert extracted_frame[0] == os.path.normpath(frame.f_code.co_filename)
+
+    # the module should be pull from this test module
+    assert extracted_frame[1] == __name__
+
+    # the filename should be the file starting after the cwd
+    assert extracted_frame[2] == __file__[len(cwd) + 1 :]
+
+    assert extracted_frame[3] == function
+
+    # the lineno will shift over time as this file is modified so just check
+    # that it is an int
+    assert isinstance(extracted_frame[4], int)
+
+
+@pytest.mark.parametrize(
+    ("depth", "max_stack_depth", "actual_depth"),
+    [
+        pytest.param(1, 128, 1, id="less than"),
+        pytest.param(256, 128, 128, id="greater than"),
+        pytest.param(128, 128, 128, id="equals"),
+    ],
+)
+def test_extract_stack_with_max_depth(depth, max_stack_depth, actual_depth):
+    # introduce a lambda that we'll be looking for in the stack
+    frame = (lambda: get_frame(depth=depth))()
+
+    # plus 1 because we introduced a lambda intentionally that we'll
+    # look for in the final stack to make sure its in the right position
+    base_stack_depth = len(inspect.stack()) + 1
+
+    # increase the max_depth by the `base_stack_depth` to account
+    # for the extra frames pytest will add
+    _, stack, _ = extract_stack(
+        frame, os.getcwd(), max_stack_depth=max_stack_depth + base_stack_depth
+    )
+    assert len(stack) == base_stack_depth + actual_depth
+
+    for i in range(actual_depth):
+        assert stack[i][3] == "get_frame", i
+
+    # index 0 contains the inner most frame on the stack, so the lamdba
+    # should be at index `actual_depth`
+    assert stack[actual_depth][3] == "", actual_depth
+
+
+def test_extract_stack_with_cache():
+    frame = get_frame(depth=1)
+
+    prev_cache = extract_stack(frame, os.getcwd())
+    _, stack1, _ = prev_cache
+    _, stack2, _ = extract_stack(frame, os.getcwd(), prev_cache)
+
+    assert len(stack1) == len(stack2)
+    for i, (frame1, frame2) in enumerate(zip(stack1, stack2)):
+        # DO NOT use `==` for the assertion here since we are
+        # testing for identity, and using `==` would test for
+        # equality which would always pass since we're extract
+        # the same stack.
+        assert frame1 is frame2, i
+
+
+def get_scheduler_threads(scheduler):
+    return [thread for thread in threading.enumerate() if thread.name == scheduler.name]
+
+
+@requires_python_version(3, 3)
+@pytest.mark.parametrize(
+    ("scheduler_class",),
+    [
+        pytest.param(ThreadScheduler, id="thread scheduler"),
+        pytest.param(
+            GeventScheduler,
+            marks=[
+                requires_gevent,
+                pytest.mark.skip(
+                    reason="cannot find this thread via threading.enumerate()"
+                ),
+            ],
+            id="gevent scheduler",
+        ),
+    ],
+)
+def test_thread_scheduler_single_background_thread(scheduler_class):
+    scheduler = scheduler_class(frequency=1000)
+
+    # not yet setup, no scheduler threads yet
+    assert len(get_scheduler_threads(scheduler)) == 0
+
+    scheduler.setup()
+
+    # the scheduler will start always 1 thread
+    assert len(get_scheduler_threads(scheduler)) == 1
+
+    scheduler.teardown()
+
+    # once finished, the thread should stop
+    assert len(get_scheduler_threads(scheduler)) == 0
+
+
+current_thread = threading.current_thread()
+thread_metadata = {
+    str(current_thread.ident): {
+        "name": str(current_thread.name),
+    },
+}
+
+
+@pytest.mark.parametrize(
+    ("capacity", "start_ns", "stop_ns", "samples", "expected"),
+    [
+        pytest.param(
+            10,
+            0,
+            1,
+            [],
+            {
+                "frames": [],
+                "samples": [],
+                "stacks": [],
+                "thread_metadata": thread_metadata,
+            },
+            id="empty",
+        ),
+        pytest.param(
+            10,
+            1,
+            2,
+            [
+                (
+                    0,
+                    [
+                        (
+                            "1",
+                            (("/path/to/file.py", "file", "file.py", "name", 1),),
+                        )
+                    ],
+                )
+            ],
+            {
+                "frames": [],
+                "samples": [],
+                "stacks": [],
+                "thread_metadata": thread_metadata,
+            },
+            id="single sample out of range",
+        ),
+        pytest.param(
+            10,
+            0,
+            1,
+            [
+                (
+                    0,
+                    [
+                        (
+                            "1",
+                            (("/path/to/file.py", "file", "file.py", "name", 1),),
+                        )
+                    ],
+                )
+            ],
+            {
+                "frames": [
+                    {
+                        "abs_path": "/path/to/file.py",
+                        "function": "name",
+                        "filename": "file.py",
+                        "lineno": 1,
+                        "module": "file",
+                    },
+                ],
+                "samples": [
+                    {
+                        "elapsed_since_start_ns": "0",
+                        "thread_id": "1",
+                        "stack_id": 0,
+                    },
+                ],
+                "stacks": [[0]],
+                "thread_metadata": thread_metadata,
+            },
+            id="single sample in range",
+        ),
+        pytest.param(
+            10,
+            0,
+            1,
+            [
+                (
+                    0,
+                    [
+                        (
+                            "1",
+                            (("/path/to/file.py", "file", "file.py", "name", 1),),
+                        )
+                    ],
+                ),
+                (
+                    1,
+                    [
+                        (
+                            "1",
+                            (("/path/to/file.py", "file", "file.py", "name", 1),),
+                        )
+                    ],
+                ),
+            ],
+            {
+                "frames": [
+                    {
+                        "abs_path": "/path/to/file.py",
+                        "function": "name",
+                        "filename": "file.py",
+                        "lineno": 1,
+                        "module": "file",
+                    },
+                ],
+                "samples": [
+                    {
+                        "elapsed_since_start_ns": "0",
+                        "thread_id": "1",
+                        "stack_id": 0,
+                    },
+                    {
+                        "elapsed_since_start_ns": "1",
+                        "thread_id": "1",
+                        "stack_id": 0,
+                    },
+                ],
+                "stacks": [[0]],
+                "thread_metadata": thread_metadata,
+            },
+            id="two identical stacks",
+        ),
+        pytest.param(
+            10,
+            0,
+            1,
+            [
+                (
+                    0,
+                    [
+                        (
+                            "1",
+                            (("/path/to/file.py", "file", "file.py", "name1", 1),),
+                        )
+                    ],
+                ),
+                (
+                    1,
+                    [
+                        (
+                            "1",
+                            (
+                                ("/path/to/file.py", "file", "file.py", "name1", 1),
+                                ("/path/to/file.py", "file", "file.py", "name2", 2),
+                            ),
+                        )
+                    ],
+                ),
+            ],
+            {
+                "frames": [
+                    {
+                        "abs_path": "/path/to/file.py",
+                        "function": "name1",
+                        "filename": "file.py",
+                        "lineno": 1,
+                        "module": "file",
+                    },
+                    {
+                        "abs_path": "/path/to/file.py",
+                        "function": "name2",
+                        "filename": "file.py",
+                        "lineno": 2,
+                        "module": "file",
+                    },
+                ],
+                "samples": [
+                    {
+                        "elapsed_since_start_ns": "0",
+                        "thread_id": "1",
+                        "stack_id": 0,
+                    },
+                    {
+                        "elapsed_since_start_ns": "1",
+                        "thread_id": "1",
+                        "stack_id": 1,
+                    },
+                ],
+                "stacks": [[0], [0, 1]],
+                "thread_metadata": thread_metadata,
+            },
+            id="two identical frames",
+        ),
+        pytest.param(
+            10,
+            0,
+            1,
+            [
+                (
+                    0,
+                    [
+                        (
+                            "1",
+                            (
+                                ("/path/to/file.py", "file", "file.py", "name1", 1),
+                                (
+                                    "/path/to/file.py",
+                                    "file",
+                                    "file.py",
+                                    "name2",
+                                    2,
+                                    "file",
+                                ),
+                            ),
+                        )
+                    ],
+                ),
+                (
+                    1,
+                    [
+                        (
+                            "1",
+                            (
+                                (
+                                    "/path/to/file.py",
+                                    "file",
+                                    "file.py",
+                                    "name3",
+                                    3,
+                                    "file",
+                                ),
+                                (
+                                    "/path/to/file.py",
+                                    "file",
+                                    "file.py",
+                                    "name4",
+                                    4,
+                                    "file",
+                                ),
+                            ),
+                        )
+                    ],
+                ),
+            ],
+            {
+                "frames": [
+                    {
+                        "abs_path": "/path/to/file.py",
+                        "function": "name1",
+                        "filename": "file.py",
+                        "lineno": 1,
+                        "module": "file",
+                    },
+                    {
+                        "abs_path": "/path/to/file.py",
+                        "function": "name2",
+                        "filename": "file.py",
+                        "lineno": 2,
+                        "module": "file",
+                    },
+                    {
+                        "abs_path": "/path/to/file.py",
+                        "function": "name3",
+                        "filename": "file.py",
+                        "lineno": 3,
+                        "module": "file",
+                    },
+                    {
+                        "abs_path": "/path/to/file.py",
+                        "function": "name4",
+                        "filename": "file.py",
+                        "lineno": 4,
+                        "module": "file",
+                    },
+                ],
+                "samples": [
+                    {
+                        "elapsed_since_start_ns": "0",
+                        "thread_id": "1",
+                        "stack_id": 0,
+                    },
+                    {
+                        "elapsed_since_start_ns": "1",
+                        "thread_id": "1",
+                        "stack_id": 1,
+                    },
+                ],
+                "stacks": [[0, 1], [2, 3]],
+                "thread_metadata": thread_metadata,
+            },
+            id="two unique stacks",
+        ),
+    ],
+)
+@pytest.mark.parametrize(
+    ("scheduler_class",),
+    [
+        pytest.param(ThreadScheduler, id="thread scheduler"),
+        pytest.param(GeventScheduler, marks=requires_gevent, id="gevent scheduler"),
+    ],
+)
+def test_profile_processing(
+    DictionaryContaining,  # noqa: N803
+    scheduler_class,
+    capacity,
+    start_ns,
+    stop_ns,
+    samples,
+    expected,
+):
+    with scheduler_class(frequency=1000) as scheduler:
+        transaction = Transaction()
+        profile = Profile(scheduler, transaction)
+        profile.start_ns = start_ns
+        for ts, sample in samples:
+            profile.write(ts, process_test_sample(sample))
+        profile.stop_ns = stop_ns
+
+        processed = profile.process()
+
+        assert processed["thread_metadata"] == DictionaryContaining(
+            expected["thread_metadata"]
+        )
+        assert processed["frames"] == expected["frames"]
+        assert processed["stacks"] == expected["stacks"]
+        assert processed["samples"] == expected["samples"]
diff --git a/tests/test_scope.py b/tests/test_scope.py
index 0e73584985..d90a89f490 100644
--- a/tests/test_scope.py
+++ b/tests/test_scope.py
@@ -22,14 +22,14 @@ def test_merging(sentry_init, capture_events):
     sentry_init()
 
     s = Scope()
-    s.set_user({"id": 42})
+    s.set_user({"id": "42"})
 
     events = capture_events()
 
     capture_exception(NameError(), scope=s)
 
     (event,) = events
-    assert event["user"] == {"id": 42}
+    assert event["user"] == {"id": "42"}
 
 
 def test_common_args():
diff --git a/tests/test_serializer.py b/tests/test_serializer.py
index 13fb05717c..1e28daa2f1 100644
--- a/tests/test_serializer.py
+++ b/tests/test_serializer.py
@@ -1,56 +1,49 @@
-from datetime import datetime
+import re
 import sys
-
 import pytest
 
 from sentry_sdk.serializer import serialize
 
 try:
-    from hypothesis import given, example
+    from hypothesis import given
     import hypothesis.strategies as st
 except ImportError:
     pass
 else:
 
-    @given(
-        dt=st.datetimes(
-            min_value=datetime(2000, 1, 1, 0, 0, 0), timezones=st.just(None)
-        )
-    )
-    @example(dt=datetime(2001, 1, 1, 0, 0, 0, 999500))
-    def test_datetime_precision(dt, relay_normalize):
-        event = serialize({"timestamp": dt})
-        normalized = relay_normalize(event)
+    def test_bytes_serialization_decode_many(message_normalizer):
+        @given(binary=st.binary(min_size=1))
+        def inner(binary):
+            result = message_normalizer(binary, should_repr_strings=False)
+            assert result == binary.decode("utf-8", "replace")
 
-        if normalized is None:
-            pytest.skip("no relay available")
+        inner()
 
-        dt2 = datetime.utcfromtimestamp(normalized["timestamp"])
+    def test_bytes_serialization_repr_many(message_normalizer):
+        @given(binary=st.binary(min_size=1))
+        def inner(binary):
+            result = message_normalizer(binary, should_repr_strings=True)
+            assert result == repr(binary)
 
-        # Float glitches can happen, and more glitches can happen
-        # because we try to work around some float glitches in relay
-        assert (dt - dt2).total_seconds() < 1.0
+        inner()
 
-    @given(binary=st.binary(min_size=1))
-    def test_bytes_serialization_decode_many(binary, message_normalizer):
-        result = message_normalizer(binary, should_repr_strings=False)
-        assert result == binary.decode("utf-8", "replace")
 
-    @given(binary=st.binary(min_size=1))
-    def test_bytes_serialization_repr_many(binary, message_normalizer):
-        result = message_normalizer(binary, should_repr_strings=True)
-        assert result == repr(binary)
+@pytest.fixture
+def message_normalizer(validate_event_schema):
+    def inner(message, **kwargs):
+        event = serialize({"logentry": {"message": message}}, **kwargs)
+        validate_event_schema(event)
+        return event["logentry"]["message"]
 
+    return inner
 
-@pytest.fixture
-def message_normalizer(relay_normalize):
-    if relay_normalize({"test": "test"}) is None:
-        pytest.skip("no relay available")
 
+@pytest.fixture
+def extra_normalizer(validate_event_schema):
     def inner(message, **kwargs):
-        event = serialize({"logentry": {"message": message}}, **kwargs)
-        normalized = relay_normalize(event)
-        return normalized["logentry"]["message"]
+        event = serialize({"extra": {"foo": message}}, **kwargs)
+        validate_event_schema(event)
+        return event["extra"]["foo"]
 
     return inner
 
@@ -58,7 +51,9 @@ def inner(message, **kwargs):
 def test_bytes_serialization_decode(message_normalizer):
     binary = b"abc123\x80\xf0\x9f\x8d\x95"
     result = message_normalizer(binary, should_repr_strings=False)
+    # fmt: off
     assert result == u"abc123\ufffd\U0001f355"
+    # fmt: on
 
 
 @pytest.mark.xfail(sys.version_info < (3,), reason="Known safe_repr bugs in Py2.7")
@@ -66,3 +61,48 @@ def test_bytes_serialization_repr(message_normalizer):
     binary = b"abc123\x80\xf0\x9f\x8d\x95"
     result = message_normalizer(binary, should_repr_strings=True)
     assert result == r"b'abc123\x80\xf0\x9f\x8d\x95'"
+
+
+def test_bytearray_serialization_decode(message_normalizer):
+    binary = bytearray(b"abc123\x80\xf0\x9f\x8d\x95")
+    result = message_normalizer(binary, should_repr_strings=False)
+    assert result == "abc123\ufffd\U0001f355"
+
+
+@pytest.mark.xfail(sys.version_info < (3,), reason="Known safe_repr bugs in Py2.7")
+def test_bytearray_serialization_repr(message_normalizer):
+    binary = bytearray(b"abc123\x80\xf0\x9f\x8d\x95")
+    result = message_normalizer(binary, should_repr_strings=True)
+    assert result == r"bytearray(b'abc123\x80\xf0\x9f\x8d\x95')"
+
+
+def test_memoryview_serialization_repr(message_normalizer):
+    binary = memoryview(b"abc123\x80\xf0\x9f\x8d\x95")
+    result = message_normalizer(binary, should_repr_strings=False)
+    assert re.match(r"^$", result)
+
+
+def test_serialize_sets(extra_normalizer):
+    result = extra_normalizer({1, 2, 3})
+    assert result == [1, 2, 3]
+
+
+def test_serialize_custom_mapping(extra_normalizer):
+    class CustomReprDict(dict):
+        def __sentry_repr__(self):
+            return "custom!"
+
+    result = extra_normalizer(CustomReprDict(one=1, two=2))
+    assert result == "custom!"
+
+
+def test_custom_mapping_doesnt_mess_with_mock(extra_normalizer):
+    """
+    Adding the __sentry_repr__ magic method check in the serializer
+    shouldn't mess with how mock works. This broke some stuff when we added
+    sentry_repr without the dunders.
+    """
+    mock = pytest.importorskip("unittest.mock")
+    m = mock.Mock()
+    extra_normalizer(m)
+    assert len(m.mock_calls) == 0
diff --git a/tests/test_sessions.py b/tests/test_sessions.py
index 78c87a61bd..09b42b70a4 100644
--- a/tests/test_sessions.py
+++ b/tests/test_sessions.py
@@ -1,4 +1,13 @@
+import sentry_sdk
+
 from sentry_sdk import Hub
+from sentry_sdk.sessions import auto_session_tracking
+
+
+def sorted_aggregates(item):
+    aggregates = item["aggregates"]
+    aggregates.sort(key=lambda item: (item["started"], item.get("did", "")))
+    return aggregates
 
 
 def test_basic(sentry_init, capture_envelopes):
@@ -10,7 +19,7 @@ def test_basic(sentry_init, capture_envelopes):
 
     try:
         with hub.configure_scope() as scope:
-            scope.set_user({"id": 42})
+            scope.set_user({"id": "42"})
             raise Exception("all is wrong")
     except Exception:
         hub.capture_exception()
@@ -24,11 +33,89 @@ def test_basic(sentry_init, capture_envelopes):
     assert len(sess.items) == 1
     sess_event = sess.items[0].payload.json
 
+    assert sess_event["attrs"] == {
+        "release": "fun-release",
+        "environment": "not-fun-env",
+    }
     assert sess_event["did"] == "42"
     assert sess_event["init"]
     assert sess_event["status"] == "exited"
     assert sess_event["errors"] == 1
+
+
+def test_aggregates(sentry_init, capture_envelopes):
+    sentry_init(
+        release="fun-release",
+        environment="not-fun-env",
+    )
+    envelopes = capture_envelopes()
+
+    hub = Hub.current
+
+    with auto_session_tracking(session_mode="request"):
+        with sentry_sdk.push_scope():
+            try:
+                with sentry_sdk.configure_scope() as scope:
+                    scope.set_user({"id": "42"})
+                    raise Exception("all is wrong")
+            except Exception:
+                sentry_sdk.capture_exception()
+
+    with auto_session_tracking(session_mode="request"):
+        pass
+
+    hub.start_session(session_mode="request")
+    hub.end_session()
+
+    sentry_sdk.flush()
+
+    assert len(envelopes) == 2
+    assert envelopes[0].get_event() is not None
+
+    sess = envelopes[1]
+    assert len(sess.items) == 1
+    sess_event = sess.items[0].payload.json
     assert sess_event["attrs"] == {
         "release": "fun-release",
         "environment": "not-fun-env",
     }
+
+    aggregates = sorted_aggregates(sess_event)
+    assert len(aggregates) == 1
+    assert aggregates[0]["exited"] == 2
+    assert aggregates[0]["errored"] == 1
+
+
+def test_aggregates_explicitly_disabled_session_tracking_request_mode(
+    sentry_init, capture_envelopes
+):
+    sentry_init(
+        release="fun-release", environment="not-fun-env", auto_session_tracking=False
+    )
+    envelopes = capture_envelopes()
+
+    hub = Hub.current
+
+    with auto_session_tracking(session_mode="request"):
+        with sentry_sdk.push_scope():
+            try:
+                raise Exception("all is wrong")
+            except Exception:
+                sentry_sdk.capture_exception()
+
+    with auto_session_tracking(session_mode="request"):
+        pass
+
+    hub.start_session(session_mode="request")
+    hub.end_session()
+
+    sentry_sdk.flush()
+
+    sess = envelopes[1]
+    assert len(sess.items) == 1
+    sess_event = sess.items[0].payload.json
+
+    aggregates = sorted_aggregates(sess_event)
+    assert len(aggregates) == 1
+    assert aggregates[0]["exited"] == 1
+    assert "errored" not in aggregates[0]
diff --git a/tests/test_tracing.py b/tests/test_tracing.py
deleted file mode 100644
index 237c0e6ebb..0000000000
--- a/tests/test_tracing.py
+++ /dev/null
@@ -1,150 +0,0 @@
-import weakref
-import gc
-
-import pytest
-
-from sentry_sdk import Hub, capture_message
-from sentry_sdk.tracing import Span
-
-
-@pytest.mark.parametrize("sample_rate", [0.0, 1.0])
-def test_basic(sentry_init, capture_events, sample_rate):
-    sentry_init(traces_sample_rate=sample_rate)
-    events = capture_events()
-
-    with Hub.current.start_span(transaction="hi") as span:
-        span.set_status("ok")
-        with pytest.raises(ZeroDivisionError):
-            with Hub.current.start_span(op="foo", description="foodesc"):
-                1 / 0
-
-        with Hub.current.start_span(op="bar", description="bardesc"):
-            pass
-
-    if sample_rate:
-        (event,) = events
-
-        span1, span2 = event["spans"]
-        parent_span = event
-        assert span1["tags"]["status"] == "internal_error"
-        assert span1["op"] == "foo"
-        assert span1["description"] == "foodesc"
-        assert "status" not in span2.get("tags", {})
-        assert span2["op"] == "bar"
-        assert span2["description"] == "bardesc"
-        assert parent_span["transaction"] == "hi"
-        assert "status" not in event["tags"]
-        assert event["contexts"]["trace"]["status"] == "ok"
-    else:
-        assert not events
-
-
-@pytest.mark.parametrize("sampled", [True, False, None])
-def test_continue_from_headers(sentry_init, capture_events, sampled):
-    sentry_init(traces_sample_rate=1.0, traceparent_v2=True)
-    events = capture_events()
-
-    with Hub.current.start_span(transaction="hi"):
-        with Hub.current.start_span() as old_span:
-            old_span.sampled = sampled
-            headers = dict(Hub.current.iter_trace_propagation_headers())
-
-    header = headers["sentry-trace"]
-    if sampled is True:
-        assert header.endswith("-1")
-    if sampled is False:
-        assert header.endswith("-0")
-    if sampled is None:
-        assert header.endswith("-")
-
-    span = Span.continue_from_headers(headers)
-    span.transaction = "WRONG"
-    assert span is not None
-    assert span.sampled == sampled
-    assert span.trace_id == old_span.trace_id
-    assert span.same_process_as_parent is False
-    assert span.parent_span_id == old_span.span_id
-    assert span.span_id != old_span.span_id
-
-    with Hub.current.start_span(span):
-        with Hub.current.configure_scope() as scope:
-            scope.transaction = "ho"
-        capture_message("hello")
-
-    if sampled is False:
-        trace1, message = events
-
-        assert trace1["transaction"] == "hi"
-    else:
-        trace1, message, trace2 = events
-
-        assert trace1["transaction"] == "hi"
-        assert trace2["transaction"] == "ho"
-
-        assert (
-            trace1["contexts"]["trace"]["trace_id"]
-            == trace2["contexts"]["trace"]["trace_id"]
-            == span.trace_id
-            == message["contexts"]["trace"]["trace_id"]
-        )
-
-    assert message["message"] == "hello"
-
-
-def test_sampling_decided_only_for_transactions(sentry_init, capture_events):
-    sentry_init(traces_sample_rate=0.5)
-
-    with Hub.current.start_span(transaction="hi") as trace:
-        assert trace.sampled is not None
-
-        with Hub.current.start_span() as span:
-            assert span.sampled == trace.sampled
-
-    with Hub.current.start_span() as span:
-        assert span.sampled is None
-
-
-@pytest.mark.parametrize(
-    "args,expected_refcount",
-    [({"traces_sample_rate": 1.0}, 100), ({"traces_sample_rate": 0.0}, 0)],
-)
-def test_memory_usage(sentry_init, capture_events, args, expected_refcount):
-    sentry_init(**args)
-
-    references = weakref.WeakSet()
-
-    with Hub.current.start_span(transaction="hi"):
-        for i in range(100):
-            with Hub.current.start_span(
-                op="helloworld", description="hi {}".format(i)
-            ) as span:
-
-                def foo():
-                    pass
-
-                references.add(foo)
-                span.set_tag("foo", foo)
-                pass
-
-        del foo
-        del span
-
-        # required only for pypy (cpython frees immediately)
-        gc.collect()
-
-        assert len(references) == expected_refcount
-
-
-def test_span_trimming(sentry_init, capture_events):
-    sentry_init(traces_sample_rate=1.0, _experiments={"max_spans": 3})
-    events = capture_events()
-
-    with Hub.current.start_span(transaction="hi"):
-        for i in range(10):
-            with Hub.current.start_span(op="foo{}".format(i)):
-                pass
-
-    (event,) = events
-    span1, span2 = event["spans"]
-    assert span1["op"] == "foo0"
-    assert span2["op"] == "foo1"
diff --git a/tests/test_transport.py b/tests/test_transport.py
index 05dd47f612..a837182f6d 100644
--- a/tests/test_transport.py
+++ b/tests/test_transport.py
@@ -1,24 +1,78 @@
 # coding: utf-8
 import logging
 import pickle
+import gzip
+import io
 
 from datetime import datetime, timedelta
 
 import pytest
+from collections import namedtuple
+from werkzeug.wrappers import Request, Response
 
-from sentry_sdk import Hub, Client, add_breadcrumb, capture_message
+from pytest_localserver.http import WSGIServer
+
+from sentry_sdk import Hub, Client, add_breadcrumb, capture_message, Scope
 from sentry_sdk.transport import _parse_rate_limits
+from sentry_sdk.envelope import Envelope, parse_json
 from sentry_sdk.integrations.logging import LoggingIntegration
 
 
-@pytest.fixture(params=[True, False])
-def make_client(request):
-    def inner(*args, **kwargs):
-        client = Client(*args, **kwargs)
-        if request.param:
-            client = pickle.loads(pickle.dumps(client))
+CapturedData = namedtuple("CapturedData", ["path", "event", "envelope"])
+
+
+class CapturingServer(WSGIServer):
+    def __init__(self, host="127.0.0.1", port=0, ssl_context=None):
+        WSGIServer.__init__(self, host, port, self, ssl_context=ssl_context)
+        self.code = 204
+        self.headers = {}
+        self.captured = []
+
+    def respond_with(self, code=200, headers=None):
+        self.code = code
+        if headers:
+            self.headers = headers
+
+    def clear_captured(self):
+        del self.captured[:]
+
+    def __call__(self, environ, start_response):
+        """
+        This is the WSGI application.
+        """
+        request = Request(environ)
+        event = envelope = None
+        if request.mimetype == "application/json":
+            event = parse_json(gzip.GzipFile(fileobj=io.BytesIO(request.data)).read())
+        else:
+            envelope = Envelope.deserialize_from(
+                gzip.GzipFile(fileobj=io.BytesIO(request.data))
+            )
+
+        self.captured.append(
+            CapturedData(path=request.path, event=event, envelope=envelope)
+        )
+
+        response = Response(status=self.code)
+        response.headers.extend(self.headers)
+        return response(environ, start_response)
 
-        return client
+
+@pytest.fixture
+def capturing_server(request):
+    server = CapturingServer()
+    server.start()
+    request.addfinalizer(server.stop)
+    return server
+
+
+@pytest.fixture
+def make_client(request, capturing_server):
+    def inner(**kwargs):
+        return Client(
+            "http://foobar@{}/132".format(capturing_server.url[len("http://") :]),
+            **kwargs
+        )
 
     return inner
 
@@ -26,23 +80,24 @@ def inner(*args, **kwargs):
 @pytest.mark.forked
 @pytest.mark.parametrize("debug", (True, False))
 @pytest.mark.parametrize("client_flush_method", ["close", "flush"])
+@pytest.mark.parametrize("use_pickle", (True, False))
 def test_transport_works(
-    httpserver,
+    capturing_server,
     request,
     capsys,
     caplog,
     debug,
     make_client,
     client_flush_method,
+    use_pickle,
     maybe_monkeypatched_threading,
 ):
-    httpserver.serve_content("ok", 200)
-
     caplog.set_level(logging.DEBUG)
+    client = make_client(debug=debug)
+
+    if use_pickle:
+        client = pickle.loads(pickle.dumps(client))
 
-    client = make_client(
-        "http://foobar@{}/123".format(httpserver.url[len("http://") :]), debug=debug
-    )
     Hub.current.bind_client(client)
     request.addfinalizer(lambda: Hub.current.bind_client(None))
 
@@ -53,16 +108,13 @@ def test_transport_works(
 
     out, err = capsys.readouterr()
     assert not err and not out
-    assert httpserver.requests
+    assert capturing_server.captured
 
     assert any("Sending event" in record.msg for record in caplog.records) == debug
 
 
-def test_transport_infinite_loop(httpserver, request):
-    httpserver.serve_content("ok", 200)
-
-    client = Client(
-        "http://foobar@{}/123".format(httpserver.url[len("http://") :]),
+def test_transport_infinite_loop(capturing_server, request, make_client):
+    client = make_client(
         debug=True,
         # Make sure we cannot create events from our own logging
         integrations=[LoggingIntegration(event_level=logging.DEBUG)],
@@ -72,7 +124,7 @@ def test_transport_infinite_loop(httpserver, request):
         capture_message("hi")
         client.flush()
 
-    assert len(httpserver.requests) == 1
+    assert len(capturing_server.captured) == 1
 
 
 NOW = datetime(2014, 6, 2)
@@ -110,15 +162,16 @@ def test_parse_rate_limits(input, expected):
     assert dict(_parse_rate_limits(input, now=NOW)) == expected
 
 
-def test_simple_rate_limits(httpserver, capsys, caplog):
-    client = Client(dsn="http://foobar@{}/123".format(httpserver.url[len("http://") :]))
-    httpserver.serve_content("no", 429, headers={"Retry-After": "4"})
+def test_simple_rate_limits(capturing_server, capsys, caplog, make_client):
+    client = make_client()
+    capturing_server.respond_with(code=429, headers={"Retry-After": "4"})
 
     client.capture_event({"type": "transaction"})
     client.flush()
 
-    assert len(httpserver.requests) == 1
-    del httpserver.requests[:]
+    assert len(capturing_server.captured) == 1
+    assert capturing_server.captured[0].path == "/api/132/envelope/"
+    capturing_server.clear_captured()
 
     assert set(client.transport._disabled_until) == set([None])
 
@@ -126,62 +179,161 @@ def test_simple_rate_limits(httpserver, capsys, caplog):
     client.capture_event({"type": "event"})
     client.flush()
 
-    assert not httpserver.requests
+    assert not capturing_server.captured
 
 
 @pytest.mark.parametrize("response_code", [200, 429])
-def test_data_category_limits(httpserver, capsys, caplog, response_code):
-    client = Client(
-        dict(dsn="http://foobar@{}/123".format(httpserver.url[len("http://") :]))
-    )
-    httpserver.serve_content(
-        "hm",
-        response_code,
+def test_data_category_limits(
+    capturing_server, capsys, caplog, response_code, make_client, monkeypatch
+):
+    client = make_client(send_client_reports=False)
+
+    captured_outcomes = []
+
+    def record_lost_event(reason, data_category=None, item=None):
+        if data_category is None:
+            data_category = item.data_category
+        return captured_outcomes.append((reason, data_category))
+
+    monkeypatch.setattr(client.transport, "record_lost_event", record_lost_event)
+
+    capturing_server.respond_with(
+        code=response_code,
         headers={"X-Sentry-Rate-Limits": "4711:transaction:organization"},
     )
 
     client.capture_event({"type": "transaction"})
     client.flush()
 
-    assert len(httpserver.requests) == 1
-    del httpserver.requests[:]
+    assert len(capturing_server.captured) == 1
+    assert capturing_server.captured[0].path == "/api/132/envelope/"
+    capturing_server.clear_captured()
 
     assert set(client.transport._disabled_until) == set(["transaction"])
 
-    client.transport.capture_event({"type": "transaction"})
-    client.transport.capture_event({"type": "transaction"})
+    client.capture_event({"type": "transaction"})
+    client.capture_event({"type": "transaction"})
     client.flush()
 
-    assert not httpserver.requests
+    assert not capturing_server.captured
 
     client.capture_event({"type": "event"})
     client.flush()
 
-    assert len(httpserver.requests) == 1
+    assert len(capturing_server.captured) == 1
+    assert capturing_server.captured[0].path == "/api/132/store/"
+
+    assert captured_outcomes == [
+        ("ratelimit_backoff", "transaction"),
+        ("ratelimit_backoff", "transaction"),
+    ]
 
 
 @pytest.mark.parametrize("response_code", [200, 429])
-def test_complex_limits_without_data_category(
-    httpserver, capsys, caplog, response_code
+def test_data_category_limits_reporting(
+    capturing_server, capsys, caplog, response_code, make_client, monkeypatch
 ):
-    client = Client(
-        dict(dsn="http://foobar@{}/123".format(httpserver.url[len("http://") :]))
+    client = make_client(send_client_reports=True)
+
+    capturing_server.respond_with(
+        code=response_code,
+        headers={
+            "X-Sentry-Rate-Limits": "4711:transaction:organization, 4711:attachment:organization"
+        },
     )
-    httpserver.serve_content(
-        "hm", response_code, headers={"X-Sentry-Rate-Limits": "4711::organization"},
+
+    outcomes_enabled = False
+    real_fetch = client.transport._fetch_pending_client_report
+
+    def intercepting_fetch(*args, **kwargs):
+        if outcomes_enabled:
+            return real_fetch(*args, **kwargs)
+
+    monkeypatch.setattr(
+        client.transport, "_fetch_pending_client_report", intercepting_fetch
+    )
+    # get rid of threading making things hard to track
+    monkeypatch.setattr(client.transport._worker, "submit", lambda x: x() or True)
+
+    client.capture_event({"type": "transaction"})
+    client.flush()
+
+    assert len(capturing_server.captured) == 1
+    assert capturing_server.captured[0].path == "/api/132/envelope/"
+    capturing_server.clear_captured()
+
+    assert set(client.transport._disabled_until) == set(["attachment", "transaction"])
+
+    client.capture_event({"type": "transaction"})
+    client.capture_event({"type": "transaction"})
+    capturing_server.clear_captured()
+
+    # flush out the events but don't flush the client reports
+    client.flush()
+    client.transport._last_client_report_sent = 0
+    outcomes_enabled = True
+
+    scope = Scope()
+    scope.add_attachment(bytes=b"Hello World", filename="hello.txt")
+    client.capture_event({"type": "error"}, scope=scope)
+    client.flush()
+
+    # this goes out with an extra envelope because it's flushed after the last item
+    # that is normally in the queue.  This is quite funny in a way because it means
+    # that the envelope that caused its own over quota report (an error with an
+    # attachment) will include its outcome since it's pending.
+    assert len(capturing_server.captured) == 1
+    envelope = capturing_server.captured[0].envelope
+    assert envelope.items[0].type == "event"
+    assert envelope.items[1].type == "client_report"
+    report = parse_json(envelope.items[1].get_bytes())
+    assert sorted(report["discarded_events"], key=lambda x: x["quantity"]) == [
+        {"category": "transaction", "reason": "ratelimit_backoff", "quantity": 2},
+        {"category": "attachment", "reason": "ratelimit_backoff", "quantity": 11},
+    ]
+    capturing_server.clear_captured()
+
+    # here we sent a normal event
+    client.capture_event({"type": "transaction"})
+    client.capture_event({"type": "error", "release": "foo"})
+    client.flush()
+
+    assert len(capturing_server.captured) == 2
+
+    event = capturing_server.captured[0].event
+    assert event["type"] == "error"
+    assert event["release"] == "foo"
+
+    envelope = capturing_server.captured[1].envelope
+    assert envelope.items[0].type == "client_report"
+    report = parse_json(envelope.items[0].get_bytes())
+    assert report["discarded_events"] == [
+        {"category": "transaction", "reason": "ratelimit_backoff", "quantity": 1},
+    ]
+
+
+@pytest.mark.parametrize("response_code", [200, 429])
+def test_complex_limits_without_data_category(
+    capturing_server, capsys, caplog, response_code, make_client
+):
+    client = make_client()
+    capturing_server.respond_with(
+        code=response_code,
+        headers={"X-Sentry-Rate-Limits": "4711::organization"},
     )
 
     client.capture_event({"type": "transaction"})
     client.flush()
 
-    assert len(httpserver.requests) == 1
-    del httpserver.requests[:]
+    assert len(capturing_server.captured) == 1
+    assert capturing_server.captured[0].path == "/api/132/envelope/"
+    capturing_server.clear_captured()
 
     assert set(client.transport._disabled_until) == set([None])
 
-    client.transport.capture_event({"type": "transaction"})
-    client.transport.capture_event({"type": "transaction"})
+    client.capture_event({"type": "transaction"})
+    client.capture_event({"type": "transaction"})
     client.capture_event({"type": "event"})
     client.flush()
 
-    assert len(httpserver.requests) == 0
+    assert len(capturing_server.captured) == 0
diff --git a/tests/tracing/test_baggage.py b/tests/tracing/test_baggage.py
new file mode 100644
index 0000000000..185a085bf6
--- /dev/null
+++ b/tests/tracing/test_baggage.py
@@ -0,0 +1,77 @@
+# coding: utf-8
+from sentry_sdk.tracing_utils import Baggage
+
+
+def test_third_party_baggage():
+    header = "other-vendor-value-1=foo;bar;baz, other-vendor-value-2=foo;bar;"
+    baggage = Baggage.from_incoming_header(header)
+
+    assert baggage.mutable
+    assert baggage.sentry_items == {}
+    assert sorted(baggage.third_party_items.split(",")) == sorted(
+        "other-vendor-value-1=foo;bar;baz,other-vendor-value-2=foo;bar;".split(",")
+    )
+
+    assert baggage.dynamic_sampling_context() == {}
+    assert baggage.serialize() == ""
+    assert sorted(baggage.serialize(include_third_party=True).split(",")) == sorted(
+        "other-vendor-value-1=foo;bar;baz,other-vendor-value-2=foo;bar;".split(",")
+    )
+
+
+def test_mixed_baggage():
+    header = (
+        "other-vendor-value-1=foo;bar;baz, sentry-trace_id=771a43a4192642f0b136d5159a501700, "
+        "sentry-public_key=49d0f7386ad645858ae85020e393bef3, sentry-sample_rate=0.01337, "
+        "sentry-user_id=Am%C3%A9lie, other-vendor-value-2=foo;bar;"
+    )
+
+    baggage = Baggage.from_incoming_header(header)
+
+    assert not baggage.mutable
+
+    assert baggage.sentry_items == {
+        "public_key": "49d0f7386ad645858ae85020e393bef3",
+        "trace_id": "771a43a4192642f0b136d5159a501700",
+        "user_id": "Amélie",
+        "sample_rate": "0.01337",
+    }
+
+    assert (
+        baggage.third_party_items
+        == "other-vendor-value-1=foo;bar;baz,other-vendor-value-2=foo;bar;"
+    )
+
+    assert baggage.dynamic_sampling_context() == {
+        "public_key": "49d0f7386ad645858ae85020e393bef3",
+        "trace_id": "771a43a4192642f0b136d5159a501700",
+        "user_id": "Amélie",
+        "sample_rate": "0.01337",
+    }
+
+    assert sorted(baggage.serialize().split(",")) == sorted(
+        (
+            "sentry-trace_id=771a43a4192642f0b136d5159a501700,"
+            "sentry-public_key=49d0f7386ad645858ae85020e393bef3,"
+            "sentry-sample_rate=0.01337,sentry-user_id=Am%C3%A9lie"
+        ).split(",")
+    )
+
+    assert sorted(baggage.serialize(include_third_party=True).split(",")) == sorted(
+        (
+            "sentry-trace_id=771a43a4192642f0b136d5159a501700,"
+            "sentry-public_key=49d0f7386ad645858ae85020e393bef3,"
+            "sentry-sample_rate=0.01337,sentry-user_id=Am%C3%A9lie,"
+            "other-vendor-value-1=foo;bar;baz,other-vendor-value-2=foo;bar;"
+        ).split(",")
+    )
+
+
+def test_malformed_baggage():
+    header = ","
+
+    baggage = Baggage.from_incoming_header(header)
+
+    assert baggage.sentry_items == {}
+    assert baggage.third_party_items == ""
+    assert baggage.mutable
diff --git a/tests/tracing/test_deprecated.py b/tests/tracing/test_deprecated.py
new file mode 100644
index 0000000000..0ce9096b6e
--- /dev/null
+++ b/tests/tracing/test_deprecated.py
@@ -0,0 +1,20 @@
+from sentry_sdk import start_span
+
+from sentry_sdk.tracing import Span
+
+
+def test_start_span_to_start_transaction(sentry_init, capture_events):
+    # XXX: this only exists for backwards compatibility with code before
+    # Transaction / start_transaction were introduced.
+    sentry_init(traces_sample_rate=1.0)
+    events = capture_events()
+
+    with start_span(transaction="/1/"):
+        pass
+
+    with start_span(Span(transaction="/2/")):
+        pass
+
+    assert len(events) == 2
+    assert events[0]["transaction"] == "/1/"
+    assert events[1]["transaction"] == "/2/"
diff --git a/tests/tracing/test_http_headers.py b/tests/tracing/test_http_headers.py
new file mode 100644
index 0000000000..3db967b24b
--- /dev/null
+++ b/tests/tracing/test_http_headers.py
@@ -0,0 +1,332 @@
+import json
+
+import pytest
+
+import sentry_sdk
+from sentry_sdk.tracing import Transaction, Span
+from sentry_sdk.tracing_utils import (
+    compute_tracestate_value,
+    extract_sentrytrace_data,
+    extract_tracestate_data,
+    reinflate_tracestate,
+)
+from sentry_sdk.utils import from_base64, to_base64
+
+
+try:
+    from unittest import mock  # python 3.3 and above
+except ImportError:
+    import mock  # python < 3.3
+
+
+def test_tracestate_computation(sentry_init):
+    sentry_init(
+        dsn="https://dogsarebadatkeepingsecrets@squirrelchasers.ingest.sentry.io/12312012",
+        environment="dogpark",
+        release="off.leash.park",
+    )
+
+    sentry_sdk.set_user({"id": 12312013, "segment": "bigs"})
+
+    transaction = Transaction(
+        name="/interactions/other-dogs/new-dog",
+        op="greeting.sniff",
+        trace_id="12312012123120121231201212312012",
+    )
+
+    # force lazy computation to create a value
+    transaction.to_tracestate()
+
+    computed_value = transaction._sentry_tracestate.replace("sentry=", "")
+    # we have to decode and reinflate the data because we can guarantee that the
+    # order of the entries in the jsonified dict will be the same here as when
+    # the tracestate is computed
+    reinflated_trace_data = json.loads(from_base64(computed_value))
+
+    assert reinflated_trace_data == {
+        "trace_id": "12312012123120121231201212312012",
+        "environment": "dogpark",
+        "release": "off.leash.park",
+        "public_key": "dogsarebadatkeepingsecrets",
+        "user": {"id": 12312013, "segment": "bigs"},
+        "transaction": "/interactions/other-dogs/new-dog",
+    }
+
+
+def test_doesnt_add_new_tracestate_to_transaction_when_none_given(sentry_init):
+    sentry_init(
+        dsn="https://dogsarebadatkeepingsecrets@squirrelchasers.ingest.sentry.io/12312012",
+        environment="dogpark",
+        release="off.leash.park",
+    )
+
+    transaction = Transaction(
+        name="/interactions/other-dogs/new-dog",
+        op="greeting.sniff",
+        # sentry_tracestate=< value would be passed here >
+    )
+
+    assert transaction._sentry_tracestate is None
+
+
+def test_adds_tracestate_to_transaction_when_to_traceparent_called(sentry_init):
+    sentry_init(
+        dsn="https://dogsarebadatkeepingsecrets@squirrelchasers.ingest.sentry.io/12312012",
+        environment="dogpark",
+        release="off.leash.park",
+    )
+
+    transaction = Transaction(
+        name="/interactions/other-dogs/new-dog",
+        op="greeting.sniff",
+    )
+
+    # no inherited tracestate, and none created in Transaction constructor
+    assert transaction._sentry_tracestate is None
+
+    transaction.to_tracestate()
+
+    assert transaction._sentry_tracestate is not None
+
+
+def test_adds_tracestate_to_transaction_when_getting_trace_context(sentry_init):
+    sentry_init(
+        dsn="https://dogsarebadatkeepingsecrets@squirrelchasers.ingest.sentry.io/12312012",
+        environment="dogpark",
+        release="off.leash.park",
+    )
+
+    transaction = Transaction(
+        name="/interactions/other-dogs/new-dog",
+        op="greeting.sniff",
+    )
+
+    # no inherited tracestate, and none created in Transaction constructor
+    assert transaction._sentry_tracestate is None
+
+    transaction.get_trace_context()
+
+    assert transaction._sentry_tracestate is not None
+
+
+@pytest.mark.parametrize(
+    "set_by", ["inheritance", "to_tracestate", "get_trace_context"]
+)
+def test_tracestate_is_immutable_once_set(sentry_init, monkeypatch, set_by):
+    monkeypatch.setattr(
+        sentry_sdk.tracing,
+        "compute_tracestate_entry",
+        mock.Mock(return_value="sentry=doGsaREgReaT"),
+    )
+
+    sentry_init(
+        dsn="https://dogsarebadatkeepingsecrets@squirrelchasers.ingest.sentry.io/12312012",
+        environment="dogpark",
+        release="off.leash.park",
+    )
+
+    # for each scenario, get to the point where tracestate has been set
+    if set_by == "inheritance":
+        transaction = Transaction(
+            name="/interactions/other-dogs/new-dog",
+            op="greeting.sniff",
+            sentry_tracestate=("sentry=doGsaREgReaT"),
+        )
+    else:
+        transaction = Transaction(
+            name="/interactions/other-dogs/new-dog",
+            op="greeting.sniff",
+        )
+
+        if set_by == "to_tracestate":
+            transaction.to_tracestate()
+        if set_by == "get_trace_context":
+            transaction.get_trace_context()
+
+    assert transaction._sentry_tracestate == "sentry=doGsaREgReaT"
+
+    # user data would be included in tracestate if it were recomputed at this point
+    sentry_sdk.set_user({"id": 12312013, "segment": "bigs"})
+
+    # value hasn't changed
+    assert transaction._sentry_tracestate == "sentry=doGsaREgReaT"
+
+
+@pytest.mark.parametrize("sampled", [True, False, None])
+def test_to_traceparent(sentry_init, sampled):
+
+    transaction = Transaction(
+        name="/interactions/other-dogs/new-dog",
+        op="greeting.sniff",
+        trace_id="12312012123120121231201212312012",
+        sampled=sampled,
+    )
+
+    traceparent = transaction.to_traceparent()
+
+    trace_id, parent_span_id, parent_sampled = traceparent.split("-")
+    assert trace_id == "12312012123120121231201212312012"
+    assert parent_span_id == transaction.span_id
+    assert parent_sampled == (
+        "1" if sampled is True else "0" if sampled is False else ""
+    )
+
+
+def test_to_tracestate(sentry_init):
+    sentry_init(
+        dsn="https://dogsarebadatkeepingsecrets@squirrelchasers.ingest.sentry.io/12312012",
+        environment="dogpark",
+        release="off.leash.park",
+    )
+
+    # it correctly uses the value from the transaction itself or the span's
+    # containing transaction
+    transaction_no_third_party = Transaction(
+        trace_id="12312012123120121231201212312012",
+        sentry_tracestate="sentry=doGsaREgReaT",
+    )
+    non_orphan_span = Span()
+    non_orphan_span._containing_transaction = transaction_no_third_party
+    assert transaction_no_third_party.to_tracestate() == "sentry=doGsaREgReaT"
+    assert non_orphan_span.to_tracestate() == "sentry=doGsaREgReaT"
+
+    # it combines sentry and third-party values correctly
+    transaction_with_third_party = Transaction(
+        trace_id="12312012123120121231201212312012",
+        sentry_tracestate="sentry=doGsaREgReaT",
+        third_party_tracestate="maisey=silly",
+    )
+    assert (
+        transaction_with_third_party.to_tracestate()
+        == "sentry=doGsaREgReaT,maisey=silly"
+    )
+
+    # it computes a tracestate from scratch for orphan transactions
+    orphan_span = Span(
+        trace_id="12312012123120121231201212312012",
+    )
+    assert orphan_span._containing_transaction is None
+    assert orphan_span.to_tracestate() == "sentry=" + compute_tracestate_value(
+        {
+            "trace_id": "12312012123120121231201212312012",
+            "environment": "dogpark",
+            "release": "off.leash.park",
+            "public_key": "dogsarebadatkeepingsecrets",
+        }
+    )
+
+
+@pytest.mark.parametrize("sampling_decision", [True, False])
+def test_sentrytrace_extraction(sampling_decision):
+    sentrytrace_header = "12312012123120121231201212312012-0415201309082013-{}".format(
+        1 if sampling_decision is True else 0
+    )
+    assert extract_sentrytrace_data(sentrytrace_header) == {
+        "trace_id": "12312012123120121231201212312012",
+        "parent_span_id": "0415201309082013",
+        "parent_sampled": sampling_decision,
+    }
+
+
+@pytest.mark.parametrize(
+    ("incoming_header", "expected_sentry_value", "expected_third_party"),
+    [
+        # sentry only
+        ("sentry=doGsaREgReaT", "sentry=doGsaREgReaT", None),
+        # sentry only, invalid (`!` isn't a valid base64 character)
+        ("sentry=doGsaREgReaT!", None, None),
+        # stuff before
+        ("maisey=silly,sentry=doGsaREgReaT", "sentry=doGsaREgReaT", "maisey=silly"),
+        # stuff after
+        ("sentry=doGsaREgReaT,maisey=silly", "sentry=doGsaREgReaT", "maisey=silly"),
+        # stuff before and after
+        (
+            "charlie=goofy,sentry=doGsaREgReaT,maisey=silly",
+            "sentry=doGsaREgReaT",
+            "charlie=goofy,maisey=silly",
+        ),
+        # multiple before
+        (
+            "charlie=goofy,maisey=silly,sentry=doGsaREgReaT",
+            "sentry=doGsaREgReaT",
+            "charlie=goofy,maisey=silly",
+        ),
+        # multiple after
+        (
+            "sentry=doGsaREgReaT,charlie=goofy,maisey=silly",
+            "sentry=doGsaREgReaT",
+            "charlie=goofy,maisey=silly",
+        ),
+        # multiple before and after
+        (
+            "charlie=goofy,maisey=silly,sentry=doGsaREgReaT,bodhi=floppy,cory=loyal",
+            "sentry=doGsaREgReaT",
+            "charlie=goofy,maisey=silly,bodhi=floppy,cory=loyal",
+        ),
+        # only third-party data
+        ("maisey=silly", None, "maisey=silly"),
+        # invalid third-party data, valid sentry data
+        ("maisey_is_silly,sentry=doGsaREgReaT", "sentry=doGsaREgReaT", None),
+        # valid third-party data, invalid sentry data
+        ("maisey=silly,sentry=doGsaREgReaT!", None, "maisey=silly"),
+        # nothing valid at all
+        ("maisey_is_silly,sentry=doGsaREgReaT!", None, None),
+    ],
+)
+def test_tracestate_extraction(
+    incoming_header, expected_sentry_value, expected_third_party
+):
+    assert extract_tracestate_data(incoming_header) == {
+        "sentry_tracestate": expected_sentry_value,
+        "third_party_tracestate": expected_third_party,
+    }
+
+
+# TODO (kmclb) remove this parameterization once tracestate is a real feature
+@pytest.mark.parametrize("tracestate_enabled", [True, False])
+def test_iter_headers(sentry_init, monkeypatch, tracestate_enabled):
+    monkeypatch.setattr(
+        Transaction,
+        "to_traceparent",
+        mock.Mock(return_value="12312012123120121231201212312012-0415201309082013-0"),
+    )
+    monkeypatch.setattr(
+        Transaction,
+        "to_tracestate",
+        mock.Mock(return_value="sentry=doGsaREgReaT,charlie=goofy"),
+    )
+    monkeypatch.setattr(
+        sentry_sdk.tracing,
+        "has_tracestate_enabled",
+        mock.Mock(return_value=tracestate_enabled),
+    )
+
+    transaction = Transaction(
+        name="/interactions/other-dogs/new-dog",
+        op="greeting.sniff",
+    )
+
+    headers = dict(transaction.iter_headers())
+    assert (
+        headers["sentry-trace"] == "12312012123120121231201212312012-0415201309082013-0"
+    )
+    if tracestate_enabled:
+        assert "tracestate" in headers
+        assert headers["tracestate"] == "sentry=doGsaREgReaT,charlie=goofy"
+    else:
+        assert "tracestate" not in headers
+
+
+@pytest.mark.parametrize(
+    "data",
+    [  # comes out with no trailing `=`
+        {"name": "Maisey", "birthday": "12/31/12"},
+        # comes out with one trailing `=`
+        {"dogs": "yes", "cats": "maybe"},
+        # comes out with two trailing `=`
+        {"name": "Charlie", "birthday": "11/21/12"},
+    ],
+)
+def test_tracestate_reinflation(data):
+    encoded_tracestate = to_base64(json.dumps(data)).strip("=")
+    assert reinflate_tracestate(encoded_tracestate) == data
diff --git a/tests/tracing/test_integration_tests.py b/tests/tracing/test_integration_tests.py
new file mode 100644
index 0000000000..f42df1091b
--- /dev/null
+++ b/tests/tracing/test_integration_tests.py
@@ -0,0 +1,284 @@
+# coding: utf-8
+import weakref
+import gc
+import re
+import pytest
+import random
+
+from sentry_sdk import (
+    capture_message,
+    configure_scope,
+    Hub,
+    start_span,
+    start_transaction,
+)
+from sentry_sdk.transport import Transport
+from sentry_sdk.tracing import Transaction
+
+
+@pytest.mark.parametrize("sample_rate", [0.0, 1.0])
+def test_basic(sentry_init, capture_events, sample_rate):
+    sentry_init(traces_sample_rate=sample_rate)
+    events = capture_events()
+
+    with start_transaction(name="hi") as transaction:
+        transaction.set_status("ok")
+        with pytest.raises(ZeroDivisionError):
+            with start_span(op="foo", description="foodesc"):
+                1 / 0
+
+        with start_span(op="bar", description="bardesc"):
+            pass
+
+    if sample_rate:
+        assert len(events) == 1
+        event = events[0]
+
+        assert event["transaction"] == "hi"
+        assert event["transaction_info"]["source"] == "custom"
+
+        span1, span2 = event["spans"]
+        parent_span = event
+        assert span1["tags"]["status"] == "internal_error"
+        assert span1["op"] == "foo"
+        assert span1["description"] == "foodesc"
+        assert "status" not in span2.get("tags", {})
+        assert span2["op"] == "bar"
+        assert span2["description"] == "bardesc"
+        assert parent_span["transaction"] == "hi"
+        assert "status" not in event["tags"]
+        assert event["contexts"]["trace"]["status"] == "ok"
+    else:
+        assert not events
+
+
+@pytest.mark.parametrize("sampled", [True, False, None])
+@pytest.mark.parametrize("sample_rate", [0.0, 1.0])
+def test_continue_from_headers(sentry_init, capture_envelopes, sampled, sample_rate):
+    """
+    Ensure data is actually passed along via headers, and that they are read
+    correctly.
+    """
+    sentry_init(traces_sample_rate=sample_rate)
+    envelopes = capture_envelopes()
+
+    # make a parent transaction (normally this would be in a different service)
+    with start_transaction(
+        name="hi", sampled=True if sample_rate == 0 else None
+    ) as parent_transaction:
+        with start_span() as old_span:
+            old_span.sampled = sampled
+            tracestate = parent_transaction._sentry_tracestate
+
+            headers = dict(Hub.current.iter_trace_propagation_headers(old_span))
+            headers["baggage"] = (
+                "other-vendor-value-1=foo;bar;baz, "
+                "sentry-trace_id=771a43a4192642f0b136d5159a501700, "
+                "sentry-public_key=49d0f7386ad645858ae85020e393bef3, "
+                "sentry-sample_rate=0.01337, sentry-user_id=Amelie, "
+                "other-vendor-value-2=foo;bar;"
+            )
+
+    # child transaction, to prove that we can read 'sentry-trace' and
+    # `tracestate` header data correctly
+    child_transaction = Transaction.continue_from_headers(headers, name="WRONG")
+    assert child_transaction is not None
+    assert child_transaction.parent_sampled == sampled
+    assert child_transaction.trace_id == old_span.trace_id
+    assert child_transaction.same_process_as_parent is False
+    assert child_transaction.parent_span_id == old_span.span_id
+    assert child_transaction.span_id != old_span.span_id
+    assert child_transaction._sentry_tracestate == tracestate
+
+    baggage = child_transaction._baggage
+    assert baggage
+    assert not baggage.mutable
+    assert baggage.sentry_items == {
+        "public_key": "49d0f7386ad645858ae85020e393bef3",
+        "trace_id": "771a43a4192642f0b136d5159a501700",
+        "user_id": "Amelie",
+        "sample_rate": "0.01337",
+    }
+
+    # add child transaction to the scope, to show that the captured message will
+    # be tagged with the trace id (since it happens while the transaction is
+    # open)
+    with start_transaction(child_transaction):
+        with configure_scope() as scope:
+            # change the transaction name from "WRONG" to make sure the change
+            # is reflected in the final data
+            scope.transaction = "ho"
+        capture_message("hello")
+
+    # in this case the child transaction won't be captured
+    if sampled is False or (sample_rate == 0 and sampled is None):
+        trace1, message = envelopes
+        message_payload = message.get_event()
+        trace1_payload = trace1.get_transaction_event()
+
+        assert trace1_payload["transaction"] == "hi"
+    else:
+        trace1, message, trace2 = envelopes
+        trace1_payload = trace1.get_transaction_event()
+        message_payload = message.get_event()
+        trace2_payload = trace2.get_transaction_event()
+
+        assert trace1_payload["transaction"] == "hi"
+        assert trace2_payload["transaction"] == "ho"
+
+        assert (
+            trace1_payload["contexts"]["trace"]["trace_id"]
+            == trace2_payload["contexts"]["trace"]["trace_id"]
+            == child_transaction.trace_id
+            == message_payload["contexts"]["trace"]["trace_id"]
+        )
+
+        assert trace2.headers["trace"] == baggage.dynamic_sampling_context()
+        assert trace2.headers["trace"] == {
+            "public_key": "49d0f7386ad645858ae85020e393bef3",
+            "trace_id": "771a43a4192642f0b136d5159a501700",
+            "user_id": "Amelie",
+            "sample_rate": "0.01337",
+        }
+
+    assert message_payload["message"] == "hello"
+
+
+@pytest.mark.parametrize("sample_rate", [0.5, 1.0])
+def test_dynamic_sampling_head_sdk_creates_dsc(
+    sentry_init, capture_envelopes, sample_rate, monkeypatch
+):
+    sentry_init(traces_sample_rate=sample_rate, release="foo")
+    envelopes = capture_envelopes()
+
+    # make sure transaction is sampled for both cases
+    monkeypatch.setattr(random, "random", lambda: 0.1)
+
+    transaction = Transaction.continue_from_headers({}, name="Head SDK tx")
+
+    # will create empty mutable baggage
+    baggage = transaction._baggage
+    assert baggage
+    assert baggage.mutable
+    assert baggage.sentry_items == {}
+    assert baggage.third_party_items == ""
+
+    with start_transaction(transaction):
+        with start_span(op="foo", description="foodesc"):
+            pass
+
+    # finish will create a new baggage entry
+    baggage = transaction._baggage
+    trace_id = transaction.trace_id
+
+    assert baggage
+    assert not baggage.mutable
+    assert baggage.third_party_items == ""
+    assert baggage.sentry_items == {
+        "environment": "production",
+        "release": "foo",
+        "sample_rate": str(sample_rate),
+        "transaction": "Head SDK tx",
+        "trace_id": trace_id,
+    }
+
+    expected_baggage = (
+        "sentry-environment=production,sentry-release=foo,sentry-sample_rate=%s,sentry-transaction=Head%%20SDK%%20tx,sentry-trace_id=%s"
+        % (sample_rate, trace_id)
+    )
+    assert sorted(baggage.serialize().split(",")) == sorted(expected_baggage.split(","))
+
+    (envelope,) = envelopes
+    assert envelope.headers["trace"] == baggage.dynamic_sampling_context()
+    assert envelope.headers["trace"] == {
+        "environment": "production",
+        "release": "foo",
+        "sample_rate": str(sample_rate),
+        "transaction": "Head SDK tx",
+        "trace_id": trace_id,
+    }
+
+
+@pytest.mark.parametrize(
+    "args,expected_refcount",
+    [({"traces_sample_rate": 1.0}, 100), ({"traces_sample_rate": 0.0}, 0)],
+)
+def test_memory_usage(sentry_init, capture_events, args, expected_refcount):
+    sentry_init(**args)
+
+    references = weakref.WeakSet()
+
+    with start_transaction(name="hi"):
+        for i in range(100):
+            with start_span(op="helloworld", description="hi {}".format(i)) as span:
+
+                def foo():
+                    pass
+
+                references.add(foo)
+                span.set_tag("foo", foo)
+                pass
+
+        del foo
+        del span
+
+        # required only for pypy (cpython frees immediately)
+        gc.collect()
+
+        assert len(references) == expected_refcount
+
+
+def test_transactions_do_not_go_through_before_send(sentry_init, capture_events):
+    def before_send(event, hint):
+        raise RuntimeError("should not be called")
+
+    sentry_init(traces_sample_rate=1.0, before_send=before_send)
+    events = capture_events()
+
+    with start_transaction(name="/"):
+        pass
+
+    assert len(events) == 1
+
+
+def test_start_span_after_finish(sentry_init, capture_events):
+    class CustomTransport(Transport):
+        def capture_envelope(self, envelope):
+            pass
+
+        def capture_event(self, event):
+            start_span(op="toolate", description="justdont")
+            pass
+
+    sentry_init(traces_sample_rate=1, transport=CustomTransport())
+    events = capture_events()
+
+    with start_transaction(name="hi"):
+        with start_span(op="bar", description="bardesc"):
+            pass
+
+    assert len(events) == 1
+
+
+def test_trace_propagation_meta_head_sdk(sentry_init):
+    sentry_init(traces_sample_rate=1.0, release="foo")
+
+    transaction = Transaction.continue_from_headers({}, name="Head SDK tx")
+    meta = None
+    span = None
+
+    with start_transaction(transaction):
+        with start_span(op="foo", description="foodesc") as current_span:
+            span = current_span
+            meta = Hub.current.trace_propagation_meta()
+
+    ind = meta.find(">") + 1
+    sentry_trace, baggage = meta[:ind], meta[ind:]
+
+    assert 'meta name="sentry-trace"' in sentry_trace
+    sentry_trace_content = re.findall('content="([^"]*)"', sentry_trace)[0]
+    assert sentry_trace_content == span.to_traceparent()
+
+    assert 'meta name="baggage"' in baggage
+    baggage_content = re.findall('content="([^"]*)"', baggage)[0]
+    assert baggage_content == transaction.get_baggage().serialize()
diff --git a/tests/tracing/test_misc.py b/tests/tracing/test_misc.py
new file mode 100644
index 0000000000..b51b5dcddb
--- /dev/null
+++ b/tests/tracing/test_misc.py
@@ -0,0 +1,276 @@
+import pytest
+import gc
+import uuid
+import os
+
+import sentry_sdk
+from sentry_sdk import Hub, start_span, start_transaction
+from sentry_sdk.tracing import Span, Transaction
+from sentry_sdk.tracing_utils import has_tracestate_enabled
+
+try:
+    from unittest import mock  # python 3.3 and above
+except ImportError:
+    import mock  # python < 3.3
+
+
+def test_span_trimming(sentry_init, capture_events):
+    sentry_init(traces_sample_rate=1.0, _experiments={"max_spans": 3})
+    events = capture_events()
+
+    with start_transaction(name="hi"):
+        for i in range(10):
+            with start_span(op="foo{}".format(i)):
+                pass
+
+    (event,) = events
+
+    assert len(event["spans"]) == 3
+
+    span1, span2, span3 = event["spans"]
+    assert span1["op"] == "foo0"
+    assert span2["op"] == "foo1"
+    assert span3["op"] == "foo2"
+
+
+def test_transaction_naming(sentry_init, capture_events):
+    sentry_init(traces_sample_rate=1.0)
+    events = capture_events()
+
+    # only transactions have names - spans don't
+    with pytest.raises(TypeError):
+        start_span(name="foo")
+    assert len(events) == 0
+
+    # default name in event if no name is passed
+    with start_transaction() as transaction:
+        pass
+    assert len(events) == 1
+    assert events[0]["transaction"] == ""
+
+    # the name can be set once the transaction's already started
+    with start_transaction() as transaction:
+        transaction.name = "name-known-after-transaction-started"
+    assert len(events) == 2
+    assert events[1]["transaction"] == "name-known-after-transaction-started"
+
+    # passing in a name works, too
+    with start_transaction(name="a"):
+        pass
+    assert len(events) == 3
+    assert events[2]["transaction"] == "a"
+
+
+def test_start_transaction(sentry_init):
+    sentry_init(traces_sample_rate=1.0)
+
+    # you can have it start a transaction for you
+    result1 = start_transaction(
+        name="/interactions/other-dogs/new-dog", op="greeting.sniff"
+    )
+    assert isinstance(result1, Transaction)
+    assert result1.name == "/interactions/other-dogs/new-dog"
+    assert result1.op == "greeting.sniff"
+
+    # or you can pass it an already-created transaction
+    preexisting_transaction = Transaction(
+        name="/interactions/other-dogs/new-dog", op="greeting.sniff"
+    )
+    result2 = start_transaction(preexisting_transaction)
+    assert result2 is preexisting_transaction
+
+
+def test_finds_transaction_on_scope(sentry_init):
+    sentry_init(traces_sample_rate=1.0)
+
+    transaction = start_transaction(name="dogpark")
+
+    scope = Hub.current.scope
+
+    # See note in Scope class re: getters and setters of the `transaction`
+    # property. For the moment, assigning to scope.transaction merely sets the
+    # transaction name, rather than putting the transaction on the scope, so we
+    # have to assign to _span directly.
+    scope._span = transaction
+
+    # Reading scope.property, however, does what you'd expect, and returns the
+    # transaction on the scope.
+    assert scope.transaction is not None
+    assert isinstance(scope.transaction, Transaction)
+    assert scope.transaction.name == "dogpark"
+
+    # If the transaction is also set as the span on the scope, it can be found
+    # by accessing _span, too.
+    assert scope._span is not None
+    assert isinstance(scope._span, Transaction)
+    assert scope._span.name == "dogpark"
+
+
+def test_finds_transaction_when_descendent_span_is_on_scope(
+    sentry_init,
+):
+    sentry_init(traces_sample_rate=1.0)
+
+    transaction = start_transaction(name="dogpark")
+    child_span = transaction.start_child(op="sniffing")
+
+    scope = Hub.current.scope
+    scope._span = child_span
+
+    # this is the same whether it's the transaction itself or one of its
+    # decedents directly attached to the scope
+    assert scope.transaction is not None
+    assert isinstance(scope.transaction, Transaction)
+    assert scope.transaction.name == "dogpark"
+
+    # here we see that it is in fact the span on the scope, rather than the
+    # transaction itself
+    assert scope._span is not None
+    assert isinstance(scope._span, Span)
+    assert scope._span.op == "sniffing"
+
+
+def test_finds_orphan_span_on_scope(sentry_init):
+    # this is deprecated behavior which may be removed at some point (along with
+    # the start_span function)
+    sentry_init(traces_sample_rate=1.0)
+
+    span = start_span(op="sniffing")
+
+    scope = Hub.current.scope
+    scope._span = span
+
+    assert scope._span is not None
+    assert isinstance(scope._span, Span)
+    assert scope._span.op == "sniffing"
+
+
+def test_finds_non_orphan_span_on_scope(sentry_init):
+    sentry_init(traces_sample_rate=1.0)
+
+    transaction = start_transaction(name="dogpark")
+    child_span = transaction.start_child(op="sniffing")
+
+    scope = Hub.current.scope
+    scope._span = child_span
+
+    assert scope._span is not None
+    assert isinstance(scope._span, Span)
+    assert scope._span.op == "sniffing"
+
+
+def test_circular_references(monkeypatch, sentry_init, request):
+    # TODO: We discovered while writing this test about transaction/span
+    # reference cycles that there's actually also a circular reference in
+    # `serializer.py`, between the functions `_serialize_node` and
+    # `_serialize_node_impl`, both of which are defined inside of the main
+    # `serialize` function, and each of which calls the other one. For now, in
+    # order to avoid having those ref cycles give us a false positive here, we
+    # can mock out `serialize`. In the long run, though, we should probably fix
+    # that. (Whenever we do work on fixing it, it may be useful to add
+    #
+    #     gc.set_debug(gc.DEBUG_LEAK)
+    #     request.addfinalizer(lambda: gc.set_debug(~gc.DEBUG_LEAK))
+    #
+    # immediately after the initial collection below, so we can see what new
+    # objects the garbage collector has to clean up once `transaction.finish` is
+    # called and the serializer runs.)
+    monkeypatch.setattr(
+        sentry_sdk.client,
+        "serialize",
+        mock.Mock(
+            return_value=None,
+        ),
+    )
+
+    # In certain versions of python, in some environments (specifically, python
+    # 3.4 when run in GH Actions), we run into a `ctypes` bug which creates
+    # circular references when `uuid4()` is called, as happens when we're
+    # generating event ids. Mocking it with an implementation which doesn't use
+    # the `ctypes` function lets us avoid having false positives when garbage
+    # collecting. See https://bugs.python.org/issue20519.
+    monkeypatch.setattr(
+        uuid,
+        "uuid4",
+        mock.Mock(
+            return_value=uuid.UUID(bytes=os.urandom(16)),
+        ),
+    )
+
+    gc.disable()
+    request.addfinalizer(gc.enable)
+
+    sentry_init(traces_sample_rate=1.0)
+
+    # Make sure that we're starting with a clean slate before we start creating
+    # transaction/span reference cycles
+    gc.collect()
+
+    dogpark_transaction = start_transaction(name="dogpark")
+    sniffing_span = dogpark_transaction.start_child(op="sniffing")
+    wagging_span = dogpark_transaction.start_child(op="wagging")
+
+    # At some point, you have to stop sniffing - there are balls to chase! - so finish
+    # this span while the dogpark transaction is still open
+    sniffing_span.finish()
+
+    # The wagging, however, continues long past the dogpark, so that span will
+    # NOT finish before the transaction ends. (Doing it in this order proves
+    # that both finished and unfinished spans get their cycles broken.)
+    dogpark_transaction.finish()
+
+    # Eventually you gotta sleep...
+    wagging_span.finish()
+
+    # assuming there are no cycles by this point, these should all be able to go
+    # out of scope and get their memory deallocated without the garbage
+    # collector having anything to do
+    del sniffing_span
+    del wagging_span
+    del dogpark_transaction
+
+    assert gc.collect() == 0
+
+
+# TODO (kmclb) remove this test once tracestate is a real feature
+@pytest.mark.parametrize("tracestate_enabled", [True, False, None])
+def test_has_tracestate_enabled(sentry_init, tracestate_enabled):
+    experiments = (
+        {"propagate_tracestate": tracestate_enabled}
+        if tracestate_enabled is not None
+        else {}
+    )
+    sentry_init(_experiments=experiments)
+
+    if tracestate_enabled is True:
+        assert has_tracestate_enabled() is True
+    else:
+        assert has_tracestate_enabled() is False
+
+
+def test_set_meaurement(sentry_init, capture_events):
+    sentry_init(traces_sample_rate=1.0, _experiments={"custom_measurements": True})
+
+    events = capture_events()
+
+    transaction = start_transaction(name="measuring stuff")
+
+    with pytest.raises(TypeError):
+        transaction.set_measurement()
+
+    with pytest.raises(TypeError):
+        transaction.set_measurement("metric.foo")
+
+    transaction.set_measurement("metric.foo", 123)
+    transaction.set_measurement("metric.bar", 456, unit="second")
+    transaction.set_measurement("metric.baz", 420.69, unit="custom")
+    transaction.set_measurement("metric.foobar", 12, unit="percent")
+    transaction.set_measurement("metric.foobar", 17.99, unit="percent")
+
+    transaction.finish()
+
+    (event,) = events
+    assert event["measurements"]["metric.foo"] == {"value": 123, "unit": ""}
+    assert event["measurements"]["metric.bar"] == {"value": 456, "unit": "second"}
+    assert event["measurements"]["metric.baz"] == {"value": 420.69, "unit": "custom"}
+    assert event["measurements"]["metric.foobar"] == {"value": 17.99, "unit": "percent"}
diff --git a/tests/tracing/test_noop_span.py b/tests/tracing/test_noop_span.py
new file mode 100644
index 0000000000..92cba75a35
--- /dev/null
+++ b/tests/tracing/test_noop_span.py
@@ -0,0 +1,52 @@
+import sentry_sdk
+from sentry_sdk.tracing import NoOpSpan
+
+# This tests make sure, that the examples from the documentation [1]
+# are working when OTel (OpenTelementry) instrumentation is turned on
+# and therefore the Senntry tracing should not do anything.
+#
+# 1: https://docs.sentry.io/platforms/python/performance/instrumentation/custom-instrumentation/
+
+
+def test_noop_start_transaction(sentry_init):
+    sentry_init(instrumenter="otel", debug=True)
+
+    with sentry_sdk.start_transaction(
+        op="task", name="test_transaction_name"
+    ) as transaction:
+        assert isinstance(transaction, NoOpSpan)
+        assert sentry_sdk.Hub.current.scope.span is transaction
+
+        transaction.name = "new name"
+
+
+def test_noop_start_span(sentry_init):
+    sentry_init(instrumenter="otel", debug=True)
+
+    with sentry_sdk.start_span(op="http", description="GET /") as span:
+        assert isinstance(span, NoOpSpan)
+        assert sentry_sdk.Hub.current.scope.span is span
+
+        span.set_tag("http.status_code", "418")
+        span.set_data("http.entity_type", "teapot")
+
+
+def test_noop_transaction_start_child(sentry_init):
+    sentry_init(instrumenter="otel", debug=True)
+
+    transaction = sentry_sdk.start_transaction(name="task")
+    assert isinstance(transaction, NoOpSpan)
+
+    with transaction.start_child(op="child_task") as child:
+        assert isinstance(child, NoOpSpan)
+        assert sentry_sdk.Hub.current.scope.span is child
+
+
+def test_noop_span_start_child(sentry_init):
+    sentry_init(instrumenter="otel", debug=True)
+    span = sentry_sdk.start_span(name="task")
+    assert isinstance(span, NoOpSpan)
+
+    with span.start_child(op="child_task") as child:
+        assert isinstance(child, NoOpSpan)
+        assert sentry_sdk.Hub.current.scope.span is child
diff --git a/tests/tracing/test_sampling.py b/tests/tracing/test_sampling.py
new file mode 100644
index 0000000000..9975abad5d
--- /dev/null
+++ b/tests/tracing/test_sampling.py
@@ -0,0 +1,344 @@
+import random
+
+import pytest
+
+from sentry_sdk import Hub, start_span, start_transaction
+from sentry_sdk.tracing import Transaction
+from sentry_sdk.tracing_utils import is_valid_sample_rate
+from sentry_sdk.utils import logger
+
+try:
+    from unittest import mock  # python 3.3 and above
+except ImportError:
+    import mock  # python < 3.3
+
+
+def test_sampling_decided_only_for_transactions(sentry_init, capture_events):
+    sentry_init(traces_sample_rate=0.5)
+
+    with start_transaction(name="hi") as transaction:
+        assert transaction.sampled is not None
+
+        with start_span() as span:
+            assert span.sampled == transaction.sampled
+
+    with start_span() as span:
+        assert span.sampled is None
+
+
+@pytest.mark.parametrize("sampled", [True, False])
+def test_nested_transaction_sampling_override(sentry_init, sampled):
+    sentry_init(traces_sample_rate=1.0)
+
+    with start_transaction(name="outer", sampled=sampled) as outer_transaction:
+        assert outer_transaction.sampled is sampled
+        with start_transaction(
+            name="inner", sampled=(not sampled)
+        ) as inner_transaction:
+            assert inner_transaction.sampled is not sampled
+        assert outer_transaction.sampled is sampled
+
+
+def test_no_double_sampling(sentry_init, capture_events):
+    # Transactions should not be subject to the global/error sample rate.
+    # Only the traces_sample_rate should apply.
+    sentry_init(traces_sample_rate=1.0, sample_rate=0.0)
+    events = capture_events()
+
+    with start_transaction(name="/"):
+        pass
+
+    assert len(events) == 1
+
+
+@pytest.mark.parametrize(
+    "rate",
+    [0.0, 0.1231, 1.0, True, False],
+)
+def test_accepts_valid_sample_rate(rate):
+    with mock.patch.object(logger, "warning", mock.Mock()):
+        result = is_valid_sample_rate(rate)
+        assert logger.warning.called is False
+        assert result is True
+
+
+@pytest.mark.parametrize(
+    "rate",
+    [
+        "dogs are great",  # wrong type
+        (0, 1),  # wrong type
+        {"Maisey": "Charllie"},  # wrong type
+        [True, True],  # wrong type
+        {0.2012},  # wrong type
+        float("NaN"),  # wrong type
+        None,  # wrong type
+        -1.121,  # wrong value
+        1.231,  # wrong value
+    ],
+)
+def test_warns_on_invalid_sample_rate(rate, StringContaining):  # noqa: N803
+    with mock.patch.object(logger, "warning", mock.Mock()):
+        result = is_valid_sample_rate(rate)
+        logger.warning.assert_any_call(StringContaining("Given sample rate is invalid"))
+        assert result is False
+
+
+@pytest.mark.parametrize("sampling_decision", [True, False])
+def test_get_transaction_and_span_from_scope_regardless_of_sampling_decision(
+    sentry_init, sampling_decision
+):
+    sentry_init(traces_sample_rate=1.0)
+
+    with start_transaction(name="/", sampled=sampling_decision):
+        with start_span(op="child-span"):
+            with start_span(op="child-child-span"):
+                scope = Hub.current.scope
+                assert scope.span.op == "child-child-span"
+                assert scope.transaction.name == "/"
+
+
+@pytest.mark.parametrize(
+    "traces_sample_rate,expected_decision",
+    [(0.0, False), (0.25, False), (0.75, True), (1.00, True)],
+)
+def test_uses_traces_sample_rate_correctly(
+    sentry_init,
+    traces_sample_rate,
+    expected_decision,
+):
+    sentry_init(traces_sample_rate=traces_sample_rate)
+
+    with mock.patch.object(random, "random", return_value=0.5):
+
+        transaction = start_transaction(name="dogpark")
+        assert transaction.sampled is expected_decision
+
+
+@pytest.mark.parametrize(
+    "traces_sampler_return_value,expected_decision",
+    [(0.0, False), (0.25, False), (0.75, True), (1.00, True)],
+)
+def test_uses_traces_sampler_return_value_correctly(
+    sentry_init,
+    traces_sampler_return_value,
+    expected_decision,
+):
+    sentry_init(traces_sampler=mock.Mock(return_value=traces_sampler_return_value))
+
+    with mock.patch.object(random, "random", return_value=0.5):
+
+        transaction = start_transaction(name="dogpark")
+        assert transaction.sampled is expected_decision
+
+
+@pytest.mark.parametrize("traces_sampler_return_value", [True, False])
+def test_tolerates_traces_sampler_returning_a_boolean(
+    sentry_init, traces_sampler_return_value
+):
+    sentry_init(traces_sampler=mock.Mock(return_value=traces_sampler_return_value))
+
+    transaction = start_transaction(name="dogpark")
+    assert transaction.sampled is traces_sampler_return_value
+
+
+@pytest.mark.parametrize("sampling_decision", [True, False])
+def test_only_captures_transaction_when_sampled_is_true(
+    sentry_init, sampling_decision, capture_events
+):
+    sentry_init(traces_sampler=mock.Mock(return_value=sampling_decision))
+    events = capture_events()
+
+    transaction = start_transaction(name="dogpark")
+    transaction.finish()
+
+    assert len(events) == (1 if sampling_decision else 0)
+
+
+@pytest.mark.parametrize(
+    "traces_sample_rate,traces_sampler_return_value", [(0, True), (1, False)]
+)
+def test_prefers_traces_sampler_to_traces_sample_rate(
+    sentry_init,
+    traces_sample_rate,
+    traces_sampler_return_value,
+):
+    # make traces_sample_rate imply the opposite of traces_sampler, to prove
+    # that traces_sampler takes precedence
+    traces_sampler = mock.Mock(return_value=traces_sampler_return_value)
+    sentry_init(
+        traces_sample_rate=traces_sample_rate,
+        traces_sampler=traces_sampler,
+    )
+
+    transaction = start_transaction(name="dogpark")
+    assert traces_sampler.called is True
+    assert transaction.sampled is traces_sampler_return_value
+
+
+@pytest.mark.parametrize("parent_sampling_decision", [True, False])
+def test_ignores_inherited_sample_decision_when_traces_sampler_defined(
+    sentry_init, parent_sampling_decision
+):
+    # make traces_sampler pick the opposite of the inherited decision, to prove
+    # that traces_sampler takes precedence
+    traces_sampler = mock.Mock(return_value=not parent_sampling_decision)
+    sentry_init(traces_sampler=traces_sampler)
+
+    transaction = start_transaction(
+        name="dogpark", parent_sampled=parent_sampling_decision
+    )
+    assert transaction.sampled is not parent_sampling_decision
+
+
+@pytest.mark.parametrize("explicit_decision", [True, False])
+def test_traces_sampler_doesnt_overwrite_explicitly_passed_sampling_decision(
+    sentry_init, explicit_decision
+):
+    # make traces_sampler pick the opposite of the explicit decision, to prove
+    # that the explicit decision takes precedence
+    traces_sampler = mock.Mock(return_value=not explicit_decision)
+    sentry_init(traces_sampler=traces_sampler)
+
+    transaction = start_transaction(name="dogpark", sampled=explicit_decision)
+    assert transaction.sampled is explicit_decision
+
+
+@pytest.mark.parametrize("parent_sampling_decision", [True, False])
+def test_inherits_parent_sampling_decision_when_traces_sampler_undefined(
+    sentry_init, parent_sampling_decision
+):
+    # make sure the parent sampling decision is the opposite of what
+    # traces_sample_rate would produce, to prove the inheritance takes
+    # precedence
+    sentry_init(traces_sample_rate=0.5)
+    mock_random_value = 0.25 if parent_sampling_decision is False else 0.75
+
+    with mock.patch.object(random, "random", return_value=mock_random_value):
+        transaction = start_transaction(
+            name="dogpark", parent_sampled=parent_sampling_decision
+        )
+        assert transaction.sampled is parent_sampling_decision
+
+
+@pytest.mark.parametrize("parent_sampling_decision", [True, False])
+def test_passes_parent_sampling_decision_in_sampling_context(
+    sentry_init, parent_sampling_decision
+):
+    sentry_init(traces_sample_rate=1.0)
+
+    sentry_trace_header = (
+        "12312012123120121231201212312012-1121201211212012-{sampled}".format(
+            sampled=int(parent_sampling_decision)
+        )
+    )
+
+    transaction = Transaction.continue_from_headers(
+        headers={"sentry-trace": sentry_trace_header}, name="dogpark"
+    )
+    spy = mock.Mock(wraps=transaction)
+    start_transaction(transaction=spy)
+
+    # there's only one call (so index at 0) and kwargs are always last in a call
+    # tuple (so index at -1)
+    sampling_context = spy._set_initial_sampling_decision.mock_calls[0][-1][
+        "sampling_context"
+    ]
+    assert "parent_sampled" in sampling_context
+    # because we passed in a spy, attribute access requires unwrapping
+    assert sampling_context["parent_sampled"]._mock_wraps is parent_sampling_decision
+
+
+def test_passes_custom_samling_context_from_start_transaction_to_traces_sampler(
+    sentry_init, DictionaryContaining  # noqa: N803
+):
+    traces_sampler = mock.Mock()
+    sentry_init(traces_sampler=traces_sampler)
+
+    start_transaction(custom_sampling_context={"dogs": "yes", "cats": "maybe"})
+
+    traces_sampler.assert_any_call(
+        DictionaryContaining({"dogs": "yes", "cats": "maybe"})
+    )
+
+
+@pytest.mark.parametrize(
+    "traces_sampler_return_value",
+    [
+        "dogs are great",  # wrong type
+        (0, 1),  # wrong type
+        {"Maisey": "Charllie"},  # wrong type
+        [True, True],  # wrong type
+        {0.2012},  # wrong type
+        float("NaN"),  # wrong type
+        None,  # wrong type
+        -1.121,  # wrong value
+        1.231,  # wrong value
+    ],
+)
+def test_warns_and_sets_sampled_to_false_on_invalid_traces_sampler_return_value(
+    sentry_init, traces_sampler_return_value, StringContaining  # noqa: N803
+):
+    sentry_init(traces_sampler=mock.Mock(return_value=traces_sampler_return_value))
+
+    with mock.patch.object(logger, "warning", mock.Mock()):
+        transaction = start_transaction(name="dogpark")
+        logger.warning.assert_any_call(StringContaining("Given sample rate is invalid"))
+        assert transaction.sampled is False
+
+
+@pytest.mark.parametrize(
+    "traces_sample_rate,sampled_output,reports_output",
+    [
+        (None, False, []),
+        (0.0, False, [("sample_rate", "transaction")]),
+        (1.0, True, []),
+    ],
+)
+def test_records_lost_event_only_if_traces_sample_rate_enabled(
+    sentry_init, traces_sample_rate, sampled_output, reports_output, monkeypatch
+):
+    reports = []
+
+    def record_lost_event(reason, data_category=None, item=None):
+        reports.append((reason, data_category))
+
+    sentry_init(traces_sample_rate=traces_sample_rate)
+
+    monkeypatch.setattr(
+        Hub.current.client.transport, "record_lost_event", record_lost_event
+    )
+
+    transaction = start_transaction(name="dogpark")
+    assert transaction.sampled is sampled_output
+    transaction.finish()
+
+    assert reports == reports_output
+
+
+@pytest.mark.parametrize(
+    "traces_sampler,sampled_output,reports_output",
+    [
+        (None, False, []),
+        (lambda _x: 0.0, False, [("sample_rate", "transaction")]),
+        (lambda _x: 1.0, True, []),
+    ],
+)
+def test_records_lost_event_only_if_traces_sampler_enabled(
+    sentry_init, traces_sampler, sampled_output, reports_output, monkeypatch
+):
+    reports = []
+
+    def record_lost_event(reason, data_category=None, item=None):
+        reports.append((reason, data_category))
+
+    sentry_init(traces_sampler=traces_sampler)
+
+    monkeypatch.setattr(
+        Hub.current.client.transport, "record_lost_event", record_lost_event
+    )
+
+    transaction = start_transaction(name="dogpark")
+    assert transaction.sampled is sampled_output
+    transaction.finish()
+
+    assert reports == reports_output
diff --git a/tests/utils/test_contextvars.py b/tests/utils/test_contextvars.py
index b54292293d..a6d296bb1f 100644
--- a/tests/utils/test_contextvars.py
+++ b/tests/utils/test_contextvars.py
@@ -3,17 +3,6 @@
 import time
 
 
-from sentry_sdk.utils import _is_threading_local_monkey_patched
-
-
-@pytest.mark.forked
-def test_thread_local_is_patched(maybe_monkeypatched_threading):
-    if maybe_monkeypatched_threading is None:
-        assert not _is_threading_local_monkey_patched()
-    else:
-        assert _is_threading_local_monkey_patched()
-
-
 @pytest.mark.forked
 def test_leaks(maybe_monkeypatched_threading):
     import threading
diff --git a/tests/utils/test_general.py b/tests/utils/test_general.py
index ff6e5f5430..f2d0069ba3 100644
--- a/tests/utils/test_general.py
+++ b/tests/utils/test_general.py
@@ -13,8 +13,12 @@
     filename_for_module,
     handle_in_app_impl,
     iter_event_stacktraces,
+    to_base64,
+    from_base64,
+    strip_string,
+    AnnotatedValue,
 )
-from sentry_sdk._compat import text_type
+from sentry_sdk._compat import text_type, string_types
 
 
 try:
@@ -29,19 +33,23 @@
     def test_safe_repr_never_broken_for_strings(x):
         r = safe_repr(x)
         assert isinstance(r, text_type)
-        assert u"broken repr" not in r
+        assert "broken repr" not in r
 
 
 def test_safe_repr_regressions():
+    # fmt: off
     assert u"лошадь" in safe_repr(u"лошадь")
+    # fmt: on
 
 
 @pytest.mark.xfail(
     sys.version_info < (3,),
     reason="Fixing this in Python 2 would break other behaviors",
 )
-@pytest.mark.parametrize("prefix", (u"", u"abcd", u"лошадь"))
+# fmt: off
+@pytest.mark.parametrize("prefix", ("", "abcd", u"лошадь"))
 @pytest.mark.parametrize("character", u"\x00\x07\x1b\n")
+# fmt: on
 def test_safe_repr_non_printable(prefix, character):
     """Check that non-printable characters are escaped"""
     string = prefix + character
@@ -76,7 +84,6 @@ def test_filename():
     assert x("bogus", "bogus") == "bogus"
 
     assert x("os", os.__file__) == "os.py"
-    assert x("pytest", pytest.__file__) == "pytest.py"
 
     import sentry_sdk.utils
 
@@ -84,20 +91,31 @@ def test_filename():
 
 
 @pytest.mark.parametrize(
-    "given,expected",
+    "given,expected_store,expected_envelope",
     [
-        ("https://foobar@sentry.io/123", "https://sentry.io/api/123/store/"),
-        ("https://foobar@sentry.io/bam/123", "https://sentry.io/bam/api/123/store/"),
+        (
+            "https://foobar@sentry.io/123",
+            "https://sentry.io/api/123/store/",
+            "https://sentry.io/api/123/envelope/",
+        ),
+        (
+            "https://foobar@sentry.io/bam/123",
+            "https://sentry.io/bam/api/123/store/",
+            "https://sentry.io/bam/api/123/envelope/",
+        ),
         (
             "https://foobar@sentry.io/bam/baz/123",
             "https://sentry.io/bam/baz/api/123/store/",
+            "https://sentry.io/bam/baz/api/123/envelope/",
         ),
     ],
 )
-def test_parse_dsn_paths(given, expected):
+def test_parse_dsn_paths(given, expected_store, expected_envelope):
     dsn = Dsn(given)
     auth = dsn.to_auth()
-    assert auth.store_api_url == expected
+    assert auth.store_api_url == expected_store
+    assert auth.get_api_url("store") == expected_store
+    assert auth.get_api_url("envelope") == expected_envelope
 
 
 @pytest.mark.parametrize(
@@ -146,3 +164,77 @@ def test_iter_stacktraces():
             }
         )
     ) == {1, 2, 3}
+
+
+# fmt: off
+@pytest.mark.parametrize(
+    ("original", "base64_encoded"),
+    [
+        # ascii only
+        ("Dogs are great!", "RG9ncyBhcmUgZ3JlYXQh"),
+        # emoji
+        (u"🐶", "8J+Qtg=="),
+        # non-ascii
+        (
+            u"Καλό κορίτσι, Μάιζεϊ!",
+            "zprOsc67z4wgzrrOv8+Bzq/PhM+DzrksIM6czqzOuc62zrXPiiE=",
+        ),
+        # mix of ascii and non-ascii
+        (
+            u"Of margir hundar! Ég geri ráð fyrir að ég þurfi stærra rúm.",
+            "T2YgbWFyZ2lyIGh1bmRhciEgw4lnIGdlcmkgcsOhw7AgZnlyaXIgYcOwIMOpZyDDvnVyZmkgc3TDpnJyYSByw7ptLg==",
+        ),
+    ],
+)
+# fmt: on
+def test_successful_base64_conversion(original, base64_encoded):
+    # all unicode characters should be handled correctly
+    assert to_base64(original) == base64_encoded
+    assert from_base64(base64_encoded) == original
+
+    # "to" and "from" should be inverses
+    assert from_base64(to_base64(original)) == original
+    assert to_base64(from_base64(base64_encoded)) == base64_encoded
+
+
+@pytest.mark.parametrize(
+    "input",
+    [
+        1231,  # incorrect type
+        True,  # incorrect type
+        [],  # incorrect type
+        {},  # incorrect type
+        None,  # incorrect type
+        "yayfordogs",  # wrong length
+        "#dog",  # invalid ascii character
+        "🐶",  # non-ascii character
+    ],
+)
+def test_failed_base64_conversion(input):
+    # conversion from base64 should fail if given input of the wrong type or
+    # input which isn't a valid base64 string
+    assert from_base64(input) is None
+
+    # any string can be converted to base64, so only type errors will cause
+    # failures
+    if type(input) not in string_types:
+        assert to_base64(input) is None
+
+
+def test_strip_string():
+    # If value is None returns None.
+    assert strip_string(None) is None
+
+    # If max_length is not passed, returns the full text (up to 1024 bytes).
+    text_1024_long = "a" * 1024
+    assert strip_string(text_1024_long).count("a") == 1024
+
+    # If value exceeds the max_length, returns an AnnotatedValue.
+    text_1025_long = "a" * 1025
+    stripped_text = strip_string(text_1025_long)
+    assert isinstance(stripped_text, AnnotatedValue)
+    assert stripped_text.value.count("a") == 1021  # + '...' is 1024
+
+    # If text has unicode characters, it counts bytes and not number of characters.
+    text_with_unicode_character = "éê"
+    assert strip_string(text_with_unicode_character, max_length=2).value == "é..."
diff --git a/tests/utils/test_transaction.py b/tests/utils/test_transaction.py
index e1aa12308f..bfb87f4c29 100644
--- a/tests/utils/test_transaction.py
+++ b/tests/utils/test_transaction.py
@@ -1,5 +1,15 @@
+import sys
+from functools import partial
+
+import pytest
+
 from sentry_sdk.utils import transaction_from_function
 
+try:
+    from functools import partialmethod
+except ImportError:
+    pass
+
 
 class MyClass:
     def myfunc(self):
@@ -10,6 +20,16 @@ def myfunc():
     pass
 
 
+@partial
+def my_partial():
+    pass
+
+
+my_lambda = lambda: None
+
+my_partial_lambda = partial(lambda: None)
+
+
 def test_transaction_from_function():
     x = transaction_from_function
     assert x(MyClass) == "tests.utils.test_transaction.MyClass"
@@ -18,3 +38,26 @@ def test_transaction_from_function():
     assert x(None) is None
     assert x(42) is None
     assert x(lambda: None).endswith("")
+    assert x(my_lambda) == "tests.utils.test_transaction."
+    assert (
+        x(my_partial) == "partial()"
+    )
+    assert (
+        x(my_partial_lambda)
+        == "partial(>)"
+    )
+
+
+@pytest.mark.skipif(sys.version_info < (3, 4), reason="Require python 3.4 or higher")
+def test_transaction_from_function_partialmethod():
+    x = transaction_from_function
+
+    class MyPartialClass:
+        @partialmethod
+        def my_partial_method(self):
+            pass
+
+    assert (
+        x(MyPartialClass.my_partial_method)
+        == "partialmethod(.MyPartialClass.my_partial_method>)"
+    )
diff --git a/tox.ini b/tox.ini
index 67e957d2ae..a64e2d4987 100644
--- a/tox.ini
+++ b/tox.ini
@@ -6,204 +6,398 @@
 [tox]
 envlist =
     # === Core ===
-    py{2.7,3.4,3.5,3.6,3.7,3.8}
-    pypy
-
+    {py2.7,py3.4,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10,py3.11}
 
     # === Integrations ===
-    # General format is {pythonversion}-{integrationname}-{frameworkversion}
+    # General format is {pythonversion}-{integrationname}-v{frameworkversion}
     # 1 blank line between different integrations
     # Each framework version should only be mentioned once. I.e:
-    #   {py2.7,py3.7}-django-{1.11}
-    #   {py3.7}-django-{2.2}
+    #   {py3.7,py3.10}-django-v{3.2}
+    #   {py3.10}-django-v{4.0}
     # instead of:
-    #   {py2.7}-django-{1.11}
-    #   {py2.7,py3.7}-django-{1.11,2.2}
+    #   {py3.7}-django-v{3.2}
+    #   {py3.7,py3.10}-django-v{3.2,4.0}
 
-    {pypy,py2.7}-django-{1.6,1.7}
-    {pypy,py2.7,py3.5}-django-{1.8,1.9,1.10,1.11}
-    {py3.5,py3.6,py3.7}-django-{2.0,2.1}
-    {py3.7,py3.8}-django-{2.2,3.0,dev}
+    # AIOHTTP
+    {py3.7}-aiohttp-v{3.5}
+    {py3.7,py3.8,py3.9,py3.10,py3.11}-aiohttp-v{3.6}
 
-    {pypy,py2.7,py3.5,py3.6,py3.7,py3.8}-flask-{1.1,1.0,0.11,0.12}
-    {py3.6,py3.7,py3.8}-flask-{1.1,1.0,0.11,0.12,dev}
+    # Asgi
+    {py3.7,py3.8,py3.9,py3.10,py3.11}-asgi
 
-    {pypy,py2.7,py3.5,py3.6,py3.7,py3.8}-bottle-0.12
+    # AWS Lambda
+    # The aws_lambda tests deploy to the real AWS and have their own matrix of Python versions.
+    {py3.7}-aws_lambda
 
-    {pypy,py2.7,py3.5,py3.6,py3.7}-falcon-1.4
-    {pypy,py2.7,py3.5,py3.6,py3.7,py3.8}-falcon-2.0
+    # Beam
+    {py3.7}-beam-v{2.12,2.13,2.32,2.33}
 
-    {py3.5,py3.6,py3.7}-sanic-{0.8,18}
-    {py3.6,py3.7}-sanic-19
+    # Boto3
+    {py2.7,py3.6,py3.7,py3.8}-boto3-v{1.9,1.10,1.11,1.12,1.13,1.14,1.15,1.16}
+
+    # Bottle
+    {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10,py3.11}-bottle-v{0.12}
 
-    {pypy,py2.7,py3.5,py3.6,py3.7,py3.8}-celery-{4.1,4.2,4.3,4.4}
-    {pypy,py2.7}-celery-3
+    # Celery
+    {py2.7}-celery-v{3}
+    {py2.7,py3.5,py3.6}-celery-v{4.1,4.2}
+    {py2.7,py3.5,py3.6,py3.7,py3.8}-celery-v{4.3,4.4}
+    {py3.6,py3.7,py3.8}-celery-v{5.0}
+    {py3.7,py3.8,py3.9,py3.10}-celery-v{5.1,5.2}
+    # TODO: enable when celery is ready {py3.7,py3.8,py3.9,py3.10,py3.11}-celery-v{5.3}
 
-    {py2.7,py3.7}-beam-{2.12,2.13}
+    # Chalice
+    {py3.6,py3.7,py3.8}-chalice-v{1.16,1.17,1.18,1.19,1.20}
 
-    # The aws_lambda tests deploy to the real AWS and have their own matrix of Python versions.
-    py3.7-aws_lambda
+    # Django
+    # - Django 1.x
+    {py2.7,py3.5}-django-v{1.8,1.9,1.10}
+    {py2.7,py3.5,py3.6,py3.7}-django-v{1.11}
+    # - Django 2.x
+    {py3.5,py3.6,py3.7}-django-v{2.0,2.1}
+    {py3.5,py3.6,py3.7,py3.8,py3.9}-django-v{2.2}
+    # - Django 3.x
+    {py3.6,py3.7,py3.8,py3.9}-django-v{3.0,3.1}
+    {py3.6,py3.7,py3.8,py3.9,py3.10,py3.11}-django-v{3.2}
+    # - Django 4.x
+    {py3.8,py3.9,py3.10,py3.11}-django-v{4.0,4.1}
 
-    {pypy,py2.7,py3.5,py3.6,py3.7,py3.8}-pyramid-{1.3,1.4,1.5,1.6,1.7,1.8,1.9,1.10}
+    # Falcon
+    {py2.7,py3.5,py3.6,py3.7}-falcon-v{1.4}
+    {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10,py3.11}-falcon-v{2.0}
 
-    {pypy,py2.7,py3.5,py3.6}-rq-{0.6,0.7,0.8,0.9,0.10,0.11}
-    {pypy,py2.7,py3.5,py3.6,py3.7,py3.8}-rq-{0.12,0.13,1.0,1.1,1.2,1.3}
+    # FastAPI
+    {py3.7,py3.8,py3.9,py3.10,py3.11}-fastapi
 
-    py3.7-aiohttp-3.5
-    {py3.7,py3.8}-aiohttp-3.6
+    # Flask
+    {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9}-flask-v{0.11,0.12,1.0}
+    {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10,py3.11}-flask-v{1.1}
+    {py3.6,py3.8,py3.9,py3.10,py3.11}-flask-v{2.0}
 
-    {py3.7,py3.8}-tornado-{5,6}
+    # GCP
+    {py3.7}-gcp
 
-    {py3.4,py3.5,py3.6,py3.7,py3.8}-trytond-{4.6,4.8,5.0}
-    {py3.5,py3.6,py3.7,py3.8}-trytond-{5.2}
-    {py3.6,py3.7,py3.8}-trytond-{5.4}
+    # HTTPX
+    {py3.6,py3.7,py3.8,py3.9,py3.10,py3.11}-httpx-v{0.16,0.17}
 
-    {py2.7,py3.8}-requests
+    # OpenTelemetry (OTel)
+    {py3.7,py3.8,py3.9,py3.10,py3.11}-opentelemetry
 
-    {py2.7,py3.7,py3.8}-redis
+    # pure_eval
+    {py3.5,py3.6,py3.7,py3.8,py3.9,py3.10,py3.11}-pure_eval
 
-    py{3.7,3.8}-asgi
+    # PyMongo (Mongo DB)
+    {py2.7,py3.6}-pymongo-v{3.1}
+    {py2.7,py3.6,py3.7,py3.8,py3.9}-pymongo-v{3.12}
+    {py3.6,py3.7,py3.8,py3.9,py3.10,py3.11}-pymongo-v{4.0}
+    {py3.7,py3.8,py3.9,py3.10,py3.11}-pymongo-v{4.1,4.2}
 
-    {py2.7,py3.7,py3.8}-sqlalchemy-{1.2,1.3}
+    # Pyramid
+    {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10,py3.11}-pyramid-v{1.6,1.7,1.8,1.9,1.10}
 
-    py3.7-spark
+    # Quart
+    {py3.7,py3.8,py3.9,py3.10,py3.11}-quart
 
-[testenv]
-deps =
-    -r test-requirements.txt
+    # Redis
+    {py2.7,py3.7,py3.8,py3.9}-redis
 
-    django-{1.11,2.0,2.1,2.2,3.0}: djangorestframework>=3.0.0,<4.0.0
-    py3.7-django-{1.11,2.0,2.1,2.2,3.0}: channels>2
-    py3.7-django-{1.11,2.0,2.1,2.2,3.0}: pytest-asyncio==0.10.0
-    {py2.7,py3.7}-django-{1.11,2.2,3.0}: psycopg2-binary
-
-    django-{1.6,1.7,1.8}: pytest-django<3.0
-    django-{1.9,1.10,1.11,2.0,2.1,2.2,3.0,dev}: pytest-django>=3.0
-
-    django-1.6: Django>=1.6,<1.7
-    django-1.7: Django>=1.7,<1.8
-    django-1.8: Django>=1.8,<1.9
-    django-1.9: Django>=1.9,<1.10
-    django-1.10: Django>=1.10,<1.11
-    django-1.11: Django>=1.11,<1.12
-    django-2.0: Django>=2.0,<2.1
-    django-2.1: Django>=2.1,<2.2
-    django-2.2: Django>=2.2,<2.3
-    django-3.0: Django>=3.0,<3.1
-    django-dev: git+https://github.com/django/django.git#egg=Django
+    # Redis Cluster
+    {py2.7,py3.7,py3.8,py3.9}-rediscluster-v{1,2.1.0,2}
 
-    flask: flask-login
-    flask-0.11: Flask>=0.11,<0.12
-    flask-0.12: Flask>=0.12,<0.13
-    flask-1.0: Flask>=1.0,<1.1
-    flask-1.1: Flask>=1.1,<1.2
-    flask-dev: git+https://github.com/pallets/flask.git#egg=flask
+    # Requests
+    {py2.7,py3.8,py3.9}-requests
 
-    bottle-0.12: bottle>=0.12,<0.13
-    bottle-dev: git+https://github.com/bottlepy/bottle#egg=bottle
+    # RQ (Redis Queue)
+    {py2.7,py3.5,py3.6}-rq-v{0.6,0.7,0.8,0.9,0.10,0.11}
+    {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9}-rq-v{0.12,0.13,1.0,1.1,1.2,1.3}
+    {py3.5,py3.6,py3.7,py3.8,py3.9,py3.10,py3.11}-rq-v{1.4,1.5}
 
-    falcon-1.4: falcon>=1.4,<1.5
-    falcon-2.0: falcon>=2.0.0rc3,<3.0
+    # Sanic
+    {py3.5,py3.6,py3.7}-sanic-v{0.8,18}
+    {py3.6,py3.7}-sanic-v{19}
+    {py3.6,py3.7,py3.8}-sanic-v{20}
+    {py3.7,py3.8,py3.9,py3.10,py3.11}-sanic-v{21}
+    {py3.7,py3.8,py3.9,py3.10,py3.11}-sanic-v{22}
 
-    sanic-0.8: sanic>=0.8,<0.9
-    sanic-18: sanic>=18.0,<19.0
-    sanic-19: sanic>=19.0,<20.0
-    {py3.5,py3.6}-sanic: aiocontextvars==0.2.1
-    # https://github.com/MagicStack/httptools/issues/48
-    py3.5-sanic: httptools==0.0.11
-    sanic: aiohttp
+    # Starlette
+    {py3.7,py3.8,py3.9,py3.10,py3.11}-starlette-v{0.19.1,0.20,0.21}
 
-    beam-2.12: apache-beam>=2.12.0, <2.13.0
-    beam-2.13: apache-beam>=2.13.0, <2.14.0
-    beam-master: git+https://github.com/apache/beam#egg=apache-beam&subdirectory=sdks/python
+    # Starlite
+    {py3.8,py3.9,py3.10,py3.11}-starlite
 
-    celery-3: Celery>=3.1,<4.0
-    celery-4.1: Celery>=4.1,<4.2
-    celery-4.2: Celery>=4.2,<4.3
-    celery-4.3: Celery>=4.3,<4.4
-    celery-4.4: Celery>=4.4,<4.5
+    # SQL Alchemy
+    {py2.7,py3.7,py3.8,py3.9,py3.10,py3.11}-sqlalchemy-v{1.2,1.3}
 
-    requests: requests>=2.0
+    # Tornado
+    {py3.7,py3.8,py3.9}-tornado-v{5}
+    {py3.7,py3.8,py3.9,py3.10,py3.11}-tornado-v{6}
 
-    aws_lambda: boto3
+    # Trytond
+    {py3.5,py3.6,py3.7,py3.8,py3.9}-trytond-v{4.6,5.0,5.2}
+    {py3.6,py3.7,py3.8,py3.9,py3.10,py3.11}-trytond-v{5.4}
 
-    pyramid-1.3: pyramid>=1.3,<1.4
-    pyramid-1.4: pyramid>=1.4,<1.5
-    pyramid-1.5: pyramid>=1.5,<1.6
-    pyramid-1.6: pyramid>=1.6,<1.7
-    pyramid-1.7: pyramid>=1.7,<1.8
-    pyramid-1.8: pyramid>=1.8,<1.9
-    pyramid-1.9: pyramid>=1.9,<1.10
-    pyramid-1.10: pyramid>=1.10,<1.11
 
-    # https://github.com/jamesls/fakeredis/issues/245
-    rq-{0.6,0.7,0.8,0.9,0.10,0.11,0.12}: fakeredis<1.0
-    rq-{0.6,0.7,0.8,0.9,0.10,0.11,0.12}: redis<3.2.2
-    rq-{0.13,1.0,1.1,1.2,1.3}: fakeredis>=1.0
-
-    rq-0.6: rq>=0.6,<0.7
-    rq-0.7: rq>=0.7,<0.8
-    rq-0.8: rq>=0.8,<0.9
-    rq-0.9: rq>=0.9,<0.10
-    rq-0.10: rq>=0.10,<0.11
-    rq-0.11: rq>=0.11,<0.12
-    rq-0.12: rq>=0.12,<0.13
-    rq-0.13: rq>=0.13,<0.14
-    rq-1.0: rq>=1.0,<1.1
-    rq-1.1: rq>=1.1,<1.2
-    rq-1.2: rq>=1.2,<1.3
-    rq-1.3: rq>=1.3,<1.4
-
-    aiohttp-3.4: aiohttp>=3.4.0,<3.5.0
-    aiohttp-3.5: aiohttp>=3.5.0,<3.6.0
+[testenv]
+deps =
+    # if you change test-requirements.txt and your change is not being reflected
+    # in what's installed by tox (when running tox locally), try running tox
+    # with the -r flag
+    -r test-requirements.txt
+
+    py3.4: colorama==0.4.1
+    py3.4: watchdog==0.10.7
+
+    py3.8: hypothesis
+
+    linters: -r linter-requirements.txt
+
+    # AIOHTTP
+    aiohttp-v3.4: aiohttp>=3.4.0,<3.5.0
+    aiohttp-v3.5: aiohttp>=3.5.0,<3.6.0
     aiohttp: pytest-aiohttp
 
-    tornado-5: tornado>=5,<6
-    tornado-6: tornado>=6.0a1
+    # Asgi
+    asgi: pytest-asyncio
+    asgi: async-asgi-testclient
 
-    trytond-5.4: trytond>=5.4,<5.5
-    trytond-5.2: trytond>=5.2,<5.3
-    trytond-5.0: trytond>=5.0,<5.1
-    trytond-4.8: trytond>=4.8,<4.9
-    trytond-4.6: trytond>=4.6,<4.7
+    # AWS Lambda
+    aws_lambda: boto3
 
-    redis: fakeredis
-    # https://github.com/jamesls/fakeredis/issues/245
-    redis: redis<3.2.2
+    # Beam
+    beam-v2.12: apache-beam>=2.12.0, <2.13.0
+    beam-v2.13: apache-beam>=2.13.0, <2.14.0
+    beam-v2.32: apache-beam>=2.32.0, <2.33.0
+    beam-v2.33: apache-beam>=2.33.0, <2.34.0
+    beam-master: git+https://github.com/apache/beam#egg=apache-beam&subdirectory=sdks/python
 
-    asgi: starlette
-    asgi: requests
+    # Boto3
+    boto3-v1.9: boto3>=1.9,<1.10
+    boto3-v1.10: boto3>=1.10,<1.11
+    boto3-v1.11: boto3>=1.11,<1.12
+    boto3-v1.12: boto3>=1.12,<1.13
+    boto3-v1.13: boto3>=1.13,<1.14
+    boto3-v1.14: boto3>=1.14,<1.15
+    boto3-v1.15: boto3>=1.15,<1.16
+    boto3-v1.16: boto3>=1.16,<1.17
+
+    # Bottle
+    bottle-v0.12: bottle>=0.12,<0.13
+
+    # Celery
+    celery: redis
+    celery-v3: Celery>=3.1,<4.0
+    celery-v4.1: Celery>=4.1,<4.2
+    celery-v4.2: Celery>=4.2,<4.3
+    celery-v4.3: Celery>=4.3,<4.4
+    # https://github.com/celery/vine/pull/29#issuecomment-689498382
+    celery-4.3: vine<5.0.0
+    # https://github.com/celery/celery/issues/6153
+    celery-v4.4: Celery>=4.4,<4.5,!=4.4.4
+    celery-v5.0: Celery>=5.0,<5.1
+    celery-v5.1: Celery>=5.1,<5.2
+    celery-v5.2: Celery>=5.2,<5.3
+
+    {py3.5}-celery: newrelic<6.0.0
+    {py3.7}-celery: importlib-metadata<5.0
+    {py2.7,py3.6,py3.7,py3.8,py3.9,py3.10,py3.11}-celery: newrelic
+
+    # Chalice
+    chalice-v1.16: chalice>=1.16.0,<1.17.0
+    chalice-v1.17: chalice>=1.17.0,<1.18.0
+    chalice-v1.18: chalice>=1.18.0,<1.19.0
+    chalice-v1.19: chalice>=1.19.0,<1.20.0
+    chalice-v1.20: chalice>=1.20.0,<1.21.0
+    chalice: pytest-chalice==0.0.5
+
+    # Django
+    django-v{1.11,2.0,2.1,2.2,3.0,3.1,3.2}: djangorestframework>=3.0.0,<4.0.0
+
+    {py3.7,py3.8,py3.9,py3.10,py3.11}-django-v{1.11,2.0,2.1,2.2,3.0,3.1,3.2}: channels[daphne]>2
+    {py3.7,py3.8,py3.9,py3.10,py3.11}-django-v{1.11,2.0,2.1,2.2,3.0,3.1,3.2}: pytest-asyncio
+    {py2.7,py3.7,py3.8,py3.9,py3.10,py3.11}-django-v{1.11,2.2,3.0,3.1,3.2}: psycopg2-binary
+
+    django-v{1.8,1.9,1.10,1.11,2.0,2.1}: pytest-django<4.0
+    django-v{2.2,3.0,3.1,3.2}: pytest-django>=4.0
+    django-v{2.2,3.0,3.1,3.2}: Werkzeug<2.0
+
+    django-v{4.0,4.1}: djangorestframework
+    django-v{4.0,4.1}: pytest-asyncio
+    django-v{4.0,4.1}: psycopg2-binary
+    django-v{4.0,4.1}: pytest-django
+    django-v{4.0,4.1}: Werkzeug
+
+    django-v1.8: Django>=1.8,<1.9
+    django-v1.9: Django>=1.9,<1.10
+    django-v1.10: Django>=1.10,<1.11
+    django-v1.11: Django>=1.11,<1.12
+    django-v2.0: Django>=2.0,<2.1
+    django-v2.1: Django>=2.1,<2.2
+    django-v2.2: Django>=2.2,<2.3
+    django-v3.0: Django>=3.0,<3.1
+    django-v3.1: Django>=3.1,<3.2
+    django-v3.2: Django>=3.2,<3.3
+    django-v4.0: Django>=4.0,<4.1
+    django-v4.1: Django>=4.1,<4.2
+
+    # Falcon
+    falcon-v1.4: falcon>=1.4,<1.5
+    falcon-v2.0: falcon>=2.0.0rc3,<3.0
+
+    # FastAPI
+    fastapi: fastapi
+    fastapi: httpx
+    fastapi: pytest-asyncio
+    fastapi: python-multipart
+    fastapi: requests
+
+    # Flask
+    flask: flask-login
+    flask-v0.11: Flask>=0.11,<0.12
+    flask-v0.12: Flask>=0.12,<0.13
+    flask-v1.0: Flask>=1.0,<1.1
+    flask-v1.1: Flask>=1.1,<1.2
+    flask-v2.0: Flask>=2.0,<2.1
+
+    # HTTPX
+    httpx-v0.16: httpx>=0.16,<0.17
+    httpx-v0.17: httpx>=0.17,<0.18
+
+    # OpenTelemetry (OTel)
+    opentelemetry: opentelemetry-distro
+
+    # pure_eval
+    pure_eval: pure_eval
+
+    # PyMongo (MongoDB)
+    pymongo: mockupdb
+    pymongo-v3.1: pymongo>=3.1,<3.2
+    pymongo-v3.12: pymongo>=3.12,<4.0
+    pymongo-v4.0: pymongo>=4.0,<4.1
+    pymongo-v4.1: pymongo>=4.1,<4.2
+    pymongo-v4.2: pymongo>=4.2,<4.3
+
+    # Pyramid
+    pyramid-v1.6: pyramid>=1.6,<1.7
+    pyramid-v1.7: pyramid>=1.7,<1.8
+    pyramid-v1.8: pyramid>=1.8,<1.9
+    pyramid-v1.9: pyramid>=1.9,<1.10
+    pyramid-v1.10: pyramid>=1.10,<1.11
+
+    # Quart
+    quart: quart>=0.16.1
+    quart: quart-auth
+    quart: pytest-asyncio
+
+    # Requests
+    requests: requests>=2.0
 
-    sqlalchemy-1.2: sqlalchemy>=1.2,<1.3
-    sqlalchemy-1.3: sqlalchemy>=1.3,<1.4
+    # Redis
+    redis: fakeredis<1.7.4
 
-    spark: pyspark==2.4.4
+    # Redis Cluster
+    rediscluster-v1: redis-py-cluster>=1.0.0,<2.0.0
+    rediscluster-v2.1.0: redis-py-cluster>=2.0.0,<2.1.1
+    rediscluster-v2: redis-py-cluster>=2.1.1,<3.0.0
 
-    linters: -r linter-requirements.txt
+    # RQ (Redis Queue)
+    # https://github.com/jamesls/fakeredis/issues/245
+    rq-v{0.6,0.7,0.8,0.9,0.10,0.11,0.12}: fakeredis<1.0
+    rq-v{0.6,0.7,0.8,0.9,0.10,0.11,0.12}: redis<3.2.2
+    rq-v{0.13,1.0,1.1,1.2,1.3,1.4,1.5}: fakeredis>=1.0,<1.7.4
+
+    rq-v0.6: rq>=0.6,<0.7
+    rq-v0.7: rq>=0.7,<0.8
+    rq-v0.8: rq>=0.8,<0.9
+    rq-v0.9: rq>=0.9,<0.10
+    rq-v0.10: rq>=0.10,<0.11
+    rq-v0.11: rq>=0.11,<0.12
+    rq-v0.12: rq>=0.12,<0.13
+    rq-v0.13: rq>=0.13,<0.14
+    rq-v1.0: rq>=1.0,<1.1
+    rq-v1.1: rq>=1.1,<1.2
+    rq-v1.2: rq>=1.2,<1.3
+    rq-v1.3: rq>=1.3,<1.4
+    rq-v1.4: rq>=1.4,<1.5
+    rq-v1.5: rq>=1.5,<1.6
+
+    # Sanic
+    sanic-v0.8: sanic>=0.8,<0.9
+    sanic-v18: sanic>=18.0,<19.0
+    sanic-v19: sanic>=19.0,<20.0
+    sanic-v20: sanic>=20.0,<21.0
+    sanic-v21: sanic>=21.0,<22.0
+    sanic-v22: sanic>=22.0,<22.9.0
 
-    py3.8: hypothesis
+    sanic: aiohttp
+    sanic-v21: sanic_testing<22
+    sanic-v22: sanic_testing<22.9.0
+    {py3.5,py3.6}-sanic: aiocontextvars==0.2.1
+    {py3.5}-sanic: ujson<4
+
+    # Starlette
+    starlette: pytest-asyncio
+    starlette: python-multipart
+    starlette: requests
+    starlette-v0.21: httpx
+    starlette-v0.19.1: starlette==0.19.1
+    starlette-v0.20: starlette>=0.20.0,<0.21.0
+    starlette-v0.21: starlette>=0.21.0,<0.22.0
+
+    # Starlite
+    starlite: starlite
+    starlite: pytest-asyncio
+    starlite: python-multipart
+    starlite: requests
+    starlite: cryptography
+
+    # SQLAlchemy
+    sqlalchemy-v1.2: sqlalchemy>=1.2,<1.3
+    sqlalchemy-v1.3: sqlalchemy>=1.3,<1.4
+
+    # Tornado
+    tornado-v5: tornado>=5,<6
+    tornado-v6: tornado>=6.0a1
+
+    # Trytond
+    trytond-v5.4: trytond>=5.4,<5.5
+    trytond-v5.2: trytond>=5.2,<5.3
+    trytond-v5.0: trytond>=5.0,<5.1
+    trytond-v4.6: trytond>=4.6,<4.7
+
+    trytond-v{4.6,4.8,5.0,5.2,5.4}: werkzeug<2.0
 
 setenv =
     PYTHONDONTWRITEBYTECODE=1
     TESTPATH=tests
+    aiohttp: TESTPATH=tests/integrations/aiohttp
+    asgi: TESTPATH=tests/integrations/asgi
+    aws_lambda: TESTPATH=tests/integrations/aws_lambda
     beam: TESTPATH=tests/integrations/beam
-    django: TESTPATH=tests/integrations/django
-    flask: TESTPATH=tests/integrations/flask
+    boto3: TESTPATH=tests/integrations/boto3
     bottle: TESTPATH=tests/integrations/bottle
-    falcon: TESTPATH=tests/integrations/falcon
     celery: TESTPATH=tests/integrations/celery
-    requests: TESTPATH=tests/integrations/requests
-    aws_lambda: TESTPATH=tests/integrations/aws_lambda
-    sanic: TESTPATH=tests/integrations/sanic
+    chalice: TESTPATH=tests/integrations/chalice
+    django: TESTPATH=tests/integrations/django
+    falcon: TESTPATH=tests/integrations/falcon
+    fastapi:  TESTPATH=tests/integrations/fastapi
+    flask: TESTPATH=tests/integrations/flask
+    gcp: TESTPATH=tests/integrations/gcp
+    httpx: TESTPATH=tests/integrations/httpx
+    opentelemetry: TESTPATH=tests/integrations/opentelemetry
+    pure_eval: TESTPATH=tests/integrations/pure_eval
+    pymongo: TESTPATH=tests/integrations/pymongo
     pyramid: TESTPATH=tests/integrations/pyramid
+    quart: TESTPATH=tests/integrations/quart
+    redis: TESTPATH=tests/integrations/redis
+    rediscluster: TESTPATH=tests/integrations/rediscluster
+    requests: TESTPATH=tests/integrations/requests
     rq: TESTPATH=tests/integrations/rq
-    aiohttp: TESTPATH=tests/integrations/aiohttp
+    sanic: TESTPATH=tests/integrations/sanic
+    starlette:  TESTPATH=tests/integrations/starlette
+    starlite:  TESTPATH=tests/integrations/starlite
+    sqlalchemy: TESTPATH=tests/integrations/sqlalchemy
     tornado: TESTPATH=tests/integrations/tornado
     trytond: TESTPATH=tests/integrations/trytond
-    redis: TESTPATH=tests/integrations/redis
-    asgi: TESTPATH=tests/integrations/asgi
-    sqlalchemy: TESTPATH=tests/integrations/sqlalchemy
-    spark: TESTPATH=tests/integrations/spark
 
     COVERAGE_FILE=.coverage-{envname}
 passenv =
@@ -211,12 +405,15 @@ passenv =
     SENTRY_PYTHON_TEST_AWS_SECRET_ACCESS_KEY
     SENTRY_PYTHON_TEST_AWS_IAM_ROLE
     SENTRY_PYTHON_TEST_POSTGRES_USER
+    SENTRY_PYTHON_TEST_POSTGRES_PASSWORD
     SENTRY_PYTHON_TEST_POSTGRES_NAME
 usedevelop = True
 extras =
-    flask: flask
     bottle: bottle
     falcon: falcon
+    flask: flask
+    pymongo: pymongo
+    quart: quart
 
 basepython =
     py2.7: python2.7
@@ -225,14 +422,36 @@ basepython =
     py3.6: python3.6
     py3.7: python3.7
     py3.8: python3.8
-    linters: python3
-    pypy: pypy
+    py3.9: python3.9
+    py3.10: python3.10
+    py3.11: python3.11
+
+    # Python version is pinned here because flake8 actually behaves differently
+    # depending on which version is used. You can patch this out to point to
+    # some random Python 3 binary, but then you get guaranteed mismatches with
+    # CI. Other tools such as mypy and black have options that pin the Python
+    # version.
+    linters: python3.9
 
 commands =
-    py.test {env:TESTPATH} {posargs}
+    ; https://github.com/pytest-dev/pytest/issues/5532
+    {py3.5,py3.6,py3.7,py3.8,py3.9}-flask-v{0.11,0.12}: pip install pytest<5
+    {py3.6,py3.7,py3.8,py3.9}-flask-v{0.11}: pip install Werkzeug<2
+    ; https://github.com/pallets/flask/issues/4455
+    {py3.7,py3.8,py3.9,py3.10,py3.11}-flask-v{0.11,0.12,1.0,1.1}: pip install "itsdangerous>=0.24,<2.0" "markupsafe<2.0.0" "jinja2<3.1.1"
+    ; https://github.com/more-itertools/more-itertools/issues/578
+    py3.5-flask-v{0.11,0.12}: pip install more-itertools<8.11.0
+
+    ; use old pytest for old Python versions:
+    {py2.7,py3.4,py3.5}: pip install pytest-forked==1.1.3
+
+    ; Running `py.test` as an executable suffers from an import error
+    ; when loading tests in scenarios. In particular, django fails to
+    ; load the settings from the test module.
+    python -m pytest --durations=5 -vvv {env:TESTPATH} {posargs}
 
 [testenv:linters]
 commands =
-    flake8 tests examples sentry_sdk
-    black --check tests examples sentry_sdk
-    mypy examples sentry_sdk
+    flake8 tests sentry_sdk
+    black --check tests sentry_sdk
+    mypy sentry_sdk