Skip to content

ref: Remove old interface validation code #13332

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 10 commits into from
May 29, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
36 changes: 0 additions & 36 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -130,11 +130,6 @@ matrix:
name: 'Backend [Postgres] (2/2)'
env: TEST_SUITE=postgres DB=postgres TOTAL_TEST_GROUPS=2 TEST_GROUP=1

# XXX(markus): Remove after rust interfaces are done
- <<: *postgres_default
name: 'Backend [Postgres] (Rust Interface Renormalization)'
env: TEST_SUITE=postgres DB=postgres SENTRY_TEST_USE_RUST_INTERFACE_RENORMALIZATION=1

- python: 2.7
name: 'Backend [Riak]'
env: TEST_SUITE=riak DB=postgres
Expand All @@ -157,12 +152,6 @@ matrix:
name: 'Acceptance'
env: TEST_SUITE=acceptance USE_SNUBA=1

# XXX(markus): Remove after rust interfaces are done
- <<: *acceptance_default
python: 2.7
name: 'Acceptance (Rust Interface Renormalization)'
env: TEST_SUITE=acceptance USE_SNUBA=1 SENTRY_TEST_USE_RUST_INTERFACE_RENORMALIZATION=1 PERCY_ENABLE=0

- python: 2.7
name: 'Frontend'
env: TEST_SUITE=js
Expand Down Expand Up @@ -224,28 +213,6 @@ matrix:
before_script:
- psql -c 'create database sentry;' -U postgres

# XXX(markus): Remove after rust interfaces are done
- python: 2.7
name: 'Snuba Integration (Rust Interface Renormalization)'
env: TEST_SUITE=snuba USE_SNUBA=1 SENTRY_ZOOKEEPER_HOSTS=localhost:2181 SENTRY_KAFKA_HOSTS=localhost:9092 SENTRY_TEST_USE_RUST_INTERFACE_RENORMALIZATION=1
services:
- docker
- memcached
- redis-server
- postgresql
before_install:
- docker run -d --network host --name zookeeper -e ZOOKEEPER_CLIENT_PORT=2181 confluentinc/cp-zookeeper:4.1.0
- docker run -d --network host --name kafka -e KAFKA_ZOOKEEPER_CONNECT=localhost:2181 -e KAFKA_ADVERTISED_LISTENERS=PLAINTEXT://localhost:9092 -e KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR=1 confluentinc/cp-kafka:4.1.0
- docker run -d --network host --name clickhouse-server --ulimit nofile=262144:262144 yandex/clickhouse-server:18.14.9
- docker run -d --network host --name snuba --env SNUBA_SETTINGS=test --env CLICKHOUSE_SERVER=localhost:9000 getsentry/snuba
- docker ps -a
install:
- python setup.py install_egg_info
- pip install -e ".[dev,tests,optional]"
- pip install confluent-kafka
before_script:
- psql -c 'create database sentry;' -U postgres

# Deploy 'storybook' (component & style guide) - allowed to fail
- language: node_js
name: 'Storybook Deploy'
Expand Down Expand Up @@ -278,9 +245,6 @@ matrix:
# is changed.
- env: TEST_SUITE=symbolicator

# XXX(markus): Remove after rust interfaces are done
- env: TEST_SUITE=postgres DB=postgres SENTRY_TEST_USE_RUST_INTERFACE_RENORMALIZATION=1

notifications:
webhooks:
urls:
Expand Down
2 changes: 1 addition & 1 deletion requirements-base.txt
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ redis>=2.10.3,<2.10.6
requests-oauthlib==0.3.3
requests[security]>=2.20.0,<2.21.0
selenium==3.141.0
semaphore>=0.4.34,<0.5.0
semaphore>=0.4.35,<0.5.0
sentry-sdk>=0.8.0
setproctitle>=1.1.7,<1.2.0
simplejson>=3.2.0,<3.9.0
Expand Down
17 changes: 3 additions & 14 deletions src/sentry/interfaces/base.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
from __future__ import absolute_import

import os

from collections import OrderedDict
import logging
Expand All @@ -19,14 +18,6 @@
logger = logging.getLogger("sentry.events")
interface_logger = logging.getLogger("sentry.interfaces")

# This flag is only effectively used for the testsuite. In production the
# return value of `sentry.models.event._should_skip_to_python` is explicitly
# passed to interfaces.
RUST_RENORMALIZED_DEFAULT = os.environ.get(
"SENTRY_TEST_USE_RUST_INTERFACE_RENORMALIZATION",
"false"
).lower() in ("true", "1")


def get_interface(name):
try:
Expand All @@ -43,7 +34,7 @@ def get_interface(name):
return interface


def get_interfaces(data, rust_renormalized=RUST_RENORMALIZED_DEFAULT):
def get_interfaces(data):
result = []
for key, data in six.iteritems(data):
# Skip invalid interfaces that were nulled out during normalization
Expand All @@ -55,9 +46,7 @@ def get_interfaces(data, rust_renormalized=RUST_RENORMALIZED_DEFAULT):
except ValueError:
continue

value = safe_execute(cls.to_python, data,
rust_renormalized=rust_renormalized,
_with_transaction=False)
value = safe_execute(cls.to_python, data, _with_transaction=False)
if not value:
continue

Expand Down Expand Up @@ -139,7 +128,7 @@ def __setattr__(self, name, value):
self._data[name] = value

@classmethod
def to_python(cls, data, rust_renormalized=RUST_RENORMALIZED_DEFAULT):
def to_python(cls, data):
"""Creates a python interface object from the given raw data.

This function can assume fully normalized and valid data. It can create
Expand Down
98 changes: 21 additions & 77 deletions src/sentry/interfaces/breadcrumbs.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,8 @@

__all__ = ('Breadcrumbs', )

import six

from sentry.constants import LOG_LEVELS_MAP
from sentry.interfaces.base import Interface, InterfaceValidationError, prune_empty_keys, RUST_RENORMALIZED_DEFAULT
from sentry.utils.safe import get_path, trim
from sentry.interfaces.base import Interface, prune_empty_keys
from sentry.utils.safe import get_path
from sentry.utils.dates import to_timestamp, to_datetime, parse_timestamp


Expand All @@ -37,19 +34,11 @@ class Breadcrumbs(Interface):
score = 800

@classmethod
def to_python(cls, data, rust_renormalized=RUST_RENORMALIZED_DEFAULT):
def to_python(cls, data):
values = []
for index, crumb in enumerate(get_path(data, 'values', filter=True, default=())):
# TODO(ja): Handle already invalid and None breadcrumbs

try:
values.append(cls.normalize_crumb(crumb, rust_renormalized=rust_renormalized))
except Exception:
# TODO(dcramer): we dont want to discard the entirety of data
# when one breadcrumb errors, but it'd be nice if we could still
# record an error
if rust_renormalized:
raise
values.append(cls.normalize_crumb(crumb))

return cls(values=values)

Expand All @@ -69,70 +58,25 @@ def to_json(self):
})

@classmethod
def normalize_crumb(cls, crumb, rust_renormalized):
if rust_renormalized:
crumb = dict(crumb)
ts = parse_timestamp(crumb.get('timestamp'))
if ts:
crumb['timestamp'] = to_timestamp(ts)
else:
crumb['timestamp'] = None

for key in (
'type',
'level',
'message',
'category',
'event_id',
'data',
):
crumb.setdefault(key, None)

return crumb

ty = crumb.get('type') or 'default'
level = crumb.get('level')
if not isinstance(level, six.string_types) or \
(level not in LOG_LEVELS_MAP and level != 'critical'):
level = 'info'

def normalize_crumb(cls, crumb):
crumb = dict(crumb)
ts = parse_timestamp(crumb.get('timestamp'))
if ts is None:
raise InterfaceValidationError('Unable to determine timestamp for crumb')
ts = to_timestamp(ts)

msg = crumb.get('message')
if msg is not None:
msg = trim(six.text_type(msg), 4096)

category = crumb.get('category')
if category is not None:
category = trim(six.text_type(category), 256)

event_id = crumb.get('event_id')

data = crumb.get('data')
if not isinstance(data, dict):
# TODO(dcramer): we dont want to discard the the rest of the
# crumb, but it'd be nice if we could record an error
# raise InterfaceValidationError(
# 'The ``data`` on breadcrumbs must be a mapping (received {})'.format(
# type(crumb['data']),
# )
# )
data = None
if ts:
crumb['timestamp'] = to_timestamp(ts)
else:
data = trim(data, 4096)

return {
'type': ty,
'level': level,
'timestamp': ts,
'message': msg,
'category': category,
'event_id': event_id,
'data': data
}
crumb['timestamp'] = None

for key in (
'type',
'level',
'message',
'category',
'event_id',
'data',
):
crumb.setdefault(key, None)

return crumb

def get_api_context(self, is_public=False, platform=None):
def _convert(x):
Expand Down
4 changes: 2 additions & 2 deletions src/sentry/interfaces/contexts.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@

from django.utils.encoding import force_text

from sentry.interfaces.base import Interface, prune_empty_keys, RUST_RENORMALIZED_DEFAULT
from sentry.interfaces.base import Interface, prune_empty_keys
from sentry.utils.contexts_normalization import normalize_os, normalize_runtime
from sentry.utils.safe import get_path, trim

Expand Down Expand Up @@ -186,7 +186,7 @@ class Contexts(Interface):
score = 800

@classmethod
def to_python(cls, data, rust_renormalized=RUST_RENORMALIZED_DEFAULT):
def to_python(cls, data):
rv = {}
for alias, value in six.iteritems(data):
# XXX(markus): The `None`-case should be handled in the UI and
Expand Down
Loading