Skip to content

ref: Remove old interface validation code #13332

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 10 commits into from
May 29, 2019
Merged
Show file tree
Hide file tree
Changes from 6 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
36 changes: 0 additions & 36 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -130,11 +130,6 @@ matrix:
name: 'Backend [Postgres] (2/2)'
env: TEST_SUITE=postgres DB=postgres TOTAL_TEST_GROUPS=2 TEST_GROUP=1

# XXX(markus): Remove after rust interfaces are done
- <<: *postgres_default
name: 'Backend [Postgres] (Rust Interface Renormalization)'
env: TEST_SUITE=postgres DB=postgres SENTRY_TEST_USE_RUST_INTERFACE_RENORMALIZATION=1

- python: 2.7
name: 'Backend [Riak]'
env: TEST_SUITE=riak DB=postgres
Expand All @@ -157,12 +152,6 @@ matrix:
name: 'Acceptance'
env: TEST_SUITE=acceptance USE_SNUBA=1

# XXX(markus): Remove after rust interfaces are done
- <<: *acceptance_default
python: 2.7
name: 'Acceptance (Rust Interface Renormalization)'
env: TEST_SUITE=acceptance USE_SNUBA=1 SENTRY_TEST_USE_RUST_INTERFACE_RENORMALIZATION=1 PERCY_ENABLE=0

- python: 2.7
name: 'Frontend'
env: TEST_SUITE=js
Expand Down Expand Up @@ -224,28 +213,6 @@ matrix:
before_script:
- psql -c 'create database sentry;' -U postgres

# XXX(markus): Remove after rust interfaces are done
- python: 2.7
name: 'Snuba Integration (Rust Interface Renormalization)'
env: TEST_SUITE=snuba USE_SNUBA=1 SENTRY_ZOOKEEPER_HOSTS=localhost:2181 SENTRY_KAFKA_HOSTS=localhost:9092 SENTRY_TEST_USE_RUST_INTERFACE_RENORMALIZATION=1
services:
- docker
- memcached
- redis-server
- postgresql
before_install:
- docker run -d --network host --name zookeeper -e ZOOKEEPER_CLIENT_PORT=2181 confluentinc/cp-zookeeper:4.1.0
- docker run -d --network host --name kafka -e KAFKA_ZOOKEEPER_CONNECT=localhost:2181 -e KAFKA_ADVERTISED_LISTENERS=PLAINTEXT://localhost:9092 -e KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR=1 confluentinc/cp-kafka:4.1.0
- docker run -d --network host --name clickhouse-server --ulimit nofile=262144:262144 yandex/clickhouse-server:18.14.9
- docker run -d --network host --name snuba --env SNUBA_SETTINGS=test --env CLICKHOUSE_SERVER=localhost:9000 getsentry/snuba
- docker ps -a
install:
- python setup.py install_egg_info
- pip install -e ".[dev,tests,optional]"
- pip install confluent-kafka
before_script:
- psql -c 'create database sentry;' -U postgres

# Deploy 'storybook' (component & style guide) - allowed to fail
- language: node_js
name: 'Storybook Deploy'
Expand Down Expand Up @@ -278,9 +245,6 @@ matrix:
# is changed.
- env: TEST_SUITE=symbolicator

# XXX(markus): Remove after rust interfaces are done
- env: TEST_SUITE=postgres DB=postgres SENTRY_TEST_USE_RUST_INTERFACE_RENORMALIZATION=1

notifications:
webhooks:
urls:
Expand Down
17 changes: 3 additions & 14 deletions src/sentry/interfaces/base.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
from __future__ import absolute_import

import os

from collections import OrderedDict
import logging
Expand All @@ -19,14 +18,6 @@
logger = logging.getLogger("sentry.events")
interface_logger = logging.getLogger("sentry.interfaces")

# This flag is only effectively used for the testsuite. In production the
# return value of `sentry.models.event._should_skip_to_python` is explicitly
# passed to interfaces.
RUST_RENORMALIZED_DEFAULT = os.environ.get(
"SENTRY_TEST_USE_RUST_INTERFACE_RENORMALIZATION",
"false"
).lower() in ("true", "1")


def get_interface(name):
try:
Expand All @@ -43,7 +34,7 @@ def get_interface(name):
return interface


def get_interfaces(data, rust_renormalized=RUST_RENORMALIZED_DEFAULT):
def get_interfaces(data):
result = []
for key, data in six.iteritems(data):
# Skip invalid interfaces that were nulled out during normalization
Expand All @@ -55,9 +46,7 @@ def get_interfaces(data, rust_renormalized=RUST_RENORMALIZED_DEFAULT):
except ValueError:
continue

value = safe_execute(cls.to_python, data,
rust_renormalized=rust_renormalized,
_with_transaction=False)
value = safe_execute(cls.to_python, data, _with_transaction=False)
if not value:
continue

Expand Down Expand Up @@ -139,7 +128,7 @@ def __setattr__(self, name, value):
self._data[name] = value

@classmethod
def to_python(cls, data, rust_renormalized=RUST_RENORMALIZED_DEFAULT):
def to_python(cls, data):
"""Creates a python interface object from the given raw data.

This function can assume fully normalized and valid data. It can create
Expand Down
98 changes: 21 additions & 77 deletions src/sentry/interfaces/breadcrumbs.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,8 @@

__all__ = ('Breadcrumbs', )

import six

from sentry.constants import LOG_LEVELS_MAP
from sentry.interfaces.base import Interface, InterfaceValidationError, prune_empty_keys, RUST_RENORMALIZED_DEFAULT
from sentry.utils.safe import get_path, trim
from sentry.interfaces.base import Interface, prune_empty_keys
from sentry.utils.safe import get_path
from sentry.utils.dates import to_timestamp, to_datetime, parse_timestamp


Expand All @@ -37,19 +34,11 @@ class Breadcrumbs(Interface):
score = 800

@classmethod
def to_python(cls, data, rust_renormalized=RUST_RENORMALIZED_DEFAULT):
def to_python(cls, data):
values = []
for index, crumb in enumerate(get_path(data, 'values', filter=True, default=())):
# TODO(ja): Handle already invalid and None breadcrumbs

try:
values.append(cls.normalize_crumb(crumb, rust_renormalized=rust_renormalized))
except Exception:
# TODO(dcramer): we dont want to discard the entirety of data
# when one breadcrumb errors, but it'd be nice if we could still
# record an error
if rust_renormalized:
raise
values.append(cls.normalize_crumb(crumb))

return cls(values=values)

Expand All @@ -69,70 +58,25 @@ def to_json(self):
})

@classmethod
def normalize_crumb(cls, crumb, rust_renormalized):
if rust_renormalized:
crumb = dict(crumb)
ts = parse_timestamp(crumb.get('timestamp'))
if ts:
crumb['timestamp'] = to_timestamp(ts)
else:
crumb['timestamp'] = None

for key in (
'type',
'level',
'message',
'category',
'event_id',
'data',
):
crumb.setdefault(key, None)

return crumb

ty = crumb.get('type') or 'default'
level = crumb.get('level')
if not isinstance(level, six.string_types) or \
(level not in LOG_LEVELS_MAP and level != 'critical'):
level = 'info'

def normalize_crumb(cls, crumb):
crumb = dict(crumb)
ts = parse_timestamp(crumb.get('timestamp'))
if ts is None:
raise InterfaceValidationError('Unable to determine timestamp for crumb')
ts = to_timestamp(ts)

msg = crumb.get('message')
if msg is not None:
msg = trim(six.text_type(msg), 4096)

category = crumb.get('category')
if category is not None:
category = trim(six.text_type(category), 256)

event_id = crumb.get('event_id')

data = crumb.get('data')
if not isinstance(data, dict):
# TODO(dcramer): we dont want to discard the the rest of the
# crumb, but it'd be nice if we could record an error
# raise InterfaceValidationError(
# 'The ``data`` on breadcrumbs must be a mapping (received {})'.format(
# type(crumb['data']),
# )
# )
data = None
if ts:
crumb['timestamp'] = to_timestamp(ts)
else:
data = trim(data, 4096)

return {
'type': ty,
'level': level,
'timestamp': ts,
'message': msg,
'category': category,
'event_id': event_id,
'data': data
}
crumb['timestamp'] = None

for key in (
'type',
'level',
'message',
'category',
'event_id',
'data',
):
crumb.setdefault(key, None)

return crumb

def get_api_context(self, is_public=False, platform=None):
def _convert(x):
Expand Down
4 changes: 2 additions & 2 deletions src/sentry/interfaces/contexts.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@

from django.utils.encoding import force_text

from sentry.interfaces.base import Interface, prune_empty_keys, RUST_RENORMALIZED_DEFAULT
from sentry.interfaces.base import Interface, prune_empty_keys
from sentry.utils.contexts_normalization import normalize_os, normalize_runtime
from sentry.utils.safe import get_path, trim

Expand Down Expand Up @@ -186,7 +186,7 @@ class Contexts(Interface):
score = 800

@classmethod
def to_python(cls, data, rust_renormalized=RUST_RENORMALIZED_DEFAULT):
def to_python(cls, data):
rv = {}
for alias, value in six.iteritems(data):
# XXX(markus): The `None`-case should be handled in the UI and
Expand Down
123 changes: 5 additions & 118 deletions src/sentry/interfaces/debug_meta.py
Original file line number Diff line number Diff line change
@@ -1,73 +1,8 @@
from __future__ import absolute_import

import six
import uuid

__all__ = ('DebugMeta', )

from sentry.interfaces.base import Interface, InterfaceValidationError, prune_empty_keys, RUST_RENORMALIZED_DEFAULT

from symbolic import parse_addr, normalize_debug_id

image_types = {}


def imagetype(name):
def decorator(f):
image_types[name] = f
return f

return decorator


def _addr(x):
if x is None:
return None
return '0x%x' % parse_addr(x)


@imagetype('apple')
@imagetype('macho')
@imagetype('elf')
@imagetype('pe')
@imagetype('symbolic')
def process_native_image(image):
# NOTE that this is dead code as soon as Rust renormalization is fully
# enabled. After that, this code should be deleted. There is a difference
# TODO(untitaker): Remove with other normalization code.
try:
native_image = {
'code_file': image.get('code_file') or image.get('name'),
'debug_id': normalize_debug_id(
image.get('debug_id') or image.get('id') or image.get('uuid')),
'image_addr': _addr(image.get('image_addr')),
'image_size': _addr(image.get('image_size')),
'image_vmaddr': _addr(image.get('image_vmaddr')),
}

if image.get('arch') is not None:
native_image['arch'] = image.get('arch')
if image.get('code_id') is not None:
native_image['code_id'] = image.get('code_id')
if image.get('debug_file') is not None:
native_image['debug_file'] = image.get('debug_file')

return native_image
except KeyError as e:
raise InterfaceValidationError('Missing value for symbolic image: %s' % e.args[0])


@imagetype('proguard')
def process_proguard_image(image):
try:
if image['uuid'] is None:
raise KeyError('uuid')

return {
'uuid': six.text_type(uuid.UUID(image['uuid'])),
}
except KeyError as e:
raise InterfaceValidationError('Missing value for proguard image: %s' % e.args[0])
from sentry.interfaces.base import Interface, prune_empty_keys


class DebugMeta(Interface):
Expand All @@ -89,25 +24,11 @@ class DebugMeta(Interface):
external_type = 'debugmeta'

@classmethod
def to_python(cls, data, rust_renormalized=RUST_RENORMALIZED_DEFAULT):
is_debug_build = data.get('is_debug_build', None)

if rust_renormalized:
images = data.get('images', None) or []
else:
if is_debug_build is not None and not isinstance(is_debug_build, bool):
raise InterfaceValidationError('Invalid value for "is_debug_build"')

images = []
for x in data.get('images', None) or ():
if x is None:
continue
images.append(cls.normalize_image(x))

def to_python(cls, data):
return cls(
images=images,
sdk_info=cls.normalize_sdk_info(data.get('sdk_info')),
is_debug_build=is_debug_build,
images=data.get('images', None) or [],
sdk_info=data.get('sdk_info'),
is_debug_build=data.get('is_debug_build'),
)

def to_json(self):
Expand All @@ -116,37 +37,3 @@ def to_json(self):
'sdk_info': self.sdk_info or None,
'is_debug_build': self.is_debug_build
})

@staticmethod
def normalize_image(image):
ty = image.get('type')
if not ty:
raise InterfaceValidationError('Image type not provided')
if ty == 'apple':
# Legacy alias. The schema is actually slightly different, but
# process_native_image can deal with this and convert to a valid
# MachO image payload.
ty = 'macho'
func = image_types.get(ty)
if func is None:
raise InterfaceValidationError('Unknown image type %r' % image)
rv = func(image)
assert 'uuid' in rv or 'debug_id' in rv, 'debug image normalizer did not produce an identifier'
rv['type'] = ty
return rv

@staticmethod
def normalize_sdk_info(sdk_info):
if not sdk_info:
return None
try:
return {
'dsym_type': sdk_info.get('dsym_type') or 'none',
'sdk_name': sdk_info['sdk_name'],
'version_major': sdk_info['version_major'],
'version_minor': sdk_info['version_minor'],
'version_patchlevel': sdk_info.get('version_patchlevel') or 0,
'build': sdk_info.get('build'),
}
except KeyError as e:
raise InterfaceValidationError('Missing value for sdk_info: %s' % e.args[0])
Loading