Skip to content

Commit

Permalink
refactor: Migration of json utilities from core (apache#28522)
Browse files Browse the repository at this point in the history
Co-authored-by: Eyal Ezer <eyal.ezer@ge.com>
  • Loading branch information
eyalezer and Eyal Ezer authored May 20, 2024
1 parent ef9191b commit acd555e
Show file tree
Hide file tree
Showing 30 changed files with 431 additions and 316 deletions.
4 changes: 2 additions & 2 deletions superset/annotation_layers/annotations/schemas.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
from marshmallow.validate import Length

from superset.exceptions import SupersetException
from superset.utils import core as utils
from superset.utils import json as json_utils

openapi_spec_methods_override = {
"get": {"get": {"summary": "Get an annotation layer"}},
Expand Down Expand Up @@ -51,7 +51,7 @@

def validate_json(value: Union[bytes, bytearray, str]) -> None:
try:
utils.validate_json(value)
json_utils.validate_json(value)
except SupersetException as ex:
raise ValidationError("JSON not valid") from ex

Expand Down
7 changes: 3 additions & 4 deletions superset/charts/data/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@
import logging
from typing import Any, TYPE_CHECKING

import simplejson
from flask import current_app, g, make_response, request, Response
from flask_appbuilder.api import expose, protect
from flask_babel import gettext as _
Expand All @@ -47,11 +46,11 @@
from superset.exceptions import QueryObjectValidationError
from superset.extensions import event_logger
from superset.models.sql_lab import Query
from superset.utils import json as json_utils
from superset.utils.core import (
create_zip,
DatasourceType,
get_user_id,
json_int_dttm_ser,
)
from superset.utils.decorators import logs_context
from superset.views.base import CsvResponse, generate_download_headers, XlsxResponse
Expand Down Expand Up @@ -396,9 +395,9 @@ def _process_data(query_data: Any) -> Any:
)

if result_format == ChartDataResultFormat.JSON:
response_data = simplejson.dumps(
response_data = json_utils.dumps(
{"result": result["queries"]},
default=json_int_dttm_ser,
default=json_utils.json_int_dttm_ser,
ignore_nan=True,
)
resp = make_response(response_data, 200)
Expand Down
2 changes: 1 addition & 1 deletion superset/common/query_object.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,10 +43,10 @@
get_column_names,
get_metric_names,
is_adhoc_metric,
json_int_dttm_ser,
QueryObjectFilterClause,
)
from superset.utils.hashing import md5_sha_from_dict
from superset.utils.json import json_int_dttm_ser

if TYPE_CHECKING:
from superset.connectors.sqla.models import BaseDatasource
Expand Down
4 changes: 2 additions & 2 deletions superset/dashboards/schemas.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
from superset import security_manager
from superset.exceptions import SupersetException
from superset.tags.models import TagType
from superset.utils import core as utils
from superset.utils import json as json_utils

get_delete_ids_schema = {"type": "array", "items": {"type": "integer"}}
get_export_ids_schema = {"type": "array", "items": {"type": "integer"}}
Expand Down Expand Up @@ -88,7 +88,7 @@

def validate_json(value: Union[bytes, bytearray, str]) -> None:
try:
utils.validate_json(value)
json_utils.validate_json(value)
except SupersetException as ex:
raise ValidationError("JSON not valid") from ex

Expand Down
7 changes: 3 additions & 4 deletions superset/embedded/view.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import json
from typing import Callable

from flask import abort, request
Expand All @@ -25,7 +24,7 @@
from superset import event_logger, is_feature_enabled
from superset.daos.dashboard import EmbeddedDashboardDAO
from superset.superset_typing import FlaskResponse
from superset.utils import core as utils
from superset.utils import json as json_utils
from superset.views.base import BaseSupersetView, common_bootstrap_payload


Expand Down Expand Up @@ -87,7 +86,7 @@ def embedded(
return self.render_template(
"superset/spa.html",
entry="embedded",
bootstrap_data=json.dumps(
bootstrap_data, default=utils.pessimistic_json_iso_dttm_ser
bootstrap_data=json_utils.dumps(
bootstrap_data, default=json_utils.pessimistic_json_iso_dttm_ser
),
)
2 changes: 1 addition & 1 deletion superset/key_value/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@

from superset.key_value.exceptions import KeyValueParseKeyError
from superset.key_value.types import KeyValueFilter, KeyValueResource
from superset.utils.core import json_dumps_w_dates
from superset.utils.json import json_dumps_w_dates

HASHIDS_MIN_LENGTH = 11

Expand Down
6 changes: 3 additions & 3 deletions superset/models/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@
from superset.result_set import SupersetResultSet
from superset.sql_parse import Table
from superset.superset_typing import OAuth2ClientConfig, ResultSetColumnType
from superset.utils import cache as cache_util, core as utils
from superset.utils import cache as cache_util, core as utils, json as json_utils
from superset.utils.backports import StrEnum
from superset.utils.core import DatasourceName, get_username
from superset.utils.oauth2 import get_oauth2_access_token
Expand Down Expand Up @@ -601,7 +601,7 @@ def column_needs_conversion(df_series: pd.Series) -> bool:

for col, coltype in df.dtypes.to_dict().items():
if coltype == numpy.object_ and column_needs_conversion(df[col]):
df[col] = df[col].apply(utils.json_dumps_w_dates)
df[col] = df[col].apply(json_utils.json_dumps_w_dates)
return df

@property
Expand Down Expand Up @@ -957,7 +957,7 @@ def get_pk_constraint(self, table: Table) -> dict[str, Any]:

def _convert(value: Any) -> Any:
try:
return utils.base_json_conv(value)
return json_utils.base_json_conv(value)
except TypeError:
return None

Expand Down
4 changes: 2 additions & 2 deletions superset/models/dashboard.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@
from superset.tasks.thumbnails import cache_dashboard_thumbnail
from superset.tasks.utils import get_current_user
from superset.thumbnails.digest import get_dashboard_digest
from superset.utils import core as utils
from superset.utils import core as utils, json as json_utils

metadata = Model.metadata # pylint: disable=no-member
config = app.config
Expand Down Expand Up @@ -372,7 +372,7 @@ def export_dashboards( # pylint: disable=too-many-locals

return json.dumps(
{"dashboards": copied_dashboards, "datasources": eager_datasources},
cls=utils.DashboardEncoder,
cls=json_utils.DashboardEncoder,
indent=4,
)

Expand Down
4 changes: 2 additions & 2 deletions superset/result_set.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@

from superset.db_engine_specs import BaseEngineSpec
from superset.superset_typing import DbapiDescription, DbapiResult, ResultSetColumnType
from superset.utils import core as utils
from superset.utils import core as utils, json as json_utils
from superset.utils.core import GenericDataType

logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -61,7 +61,7 @@ def dedup(l: list[str], suffix: str = "__", case_sensitive: bool = True) -> list


def stringify(obj: Any) -> str:
return json.dumps(obj, default=utils.json_iso_dttm_ser)
return json_utils.dumps(obj, default=json_utils.json_iso_dttm_ser)


def stringify_values(array: NDArray[Any]) -> NDArray[Any]:
Expand Down
11 changes: 7 additions & 4 deletions superset/sql_lab.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@

import backoff
import msgpack
import simplejson as json
from celery.exceptions import SoftTimeLimitExceeded
from flask_babel import gettext as __

Expand Down Expand Up @@ -59,8 +58,8 @@
)
from superset.sqllab.limiting_factor import LimitingFactor
from superset.sqllab.utils import write_ipc_buffer
from superset.utils import json as json_utils
from superset.utils.core import (
json_iso_dttm_ser,
override_user,
QuerySource,
zlib_compress,
Expand Down Expand Up @@ -349,9 +348,13 @@ def _serialize_payload(
) -> Union[bytes, str]:
logger.debug("Serializing to msgpack: %r", use_msgpack)
if use_msgpack:
return msgpack.dumps(payload, default=json_iso_dttm_ser, use_bin_type=True)
return msgpack.dumps(
payload, default=json_utils.json_iso_dttm_ser, use_bin_type=True
)

return json.dumps(payload, default=json_iso_dttm_ser, ignore_nan=True)
return json_utils.dumps(
payload, default=json_utils.json_iso_dttm_ser, ignore_nan=True
)


def _serialize_and_expand_data(
Expand Down
11 changes: 5 additions & 6 deletions superset/sqllab/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
from typing import Any, cast, Optional
from urllib import parse

import simplejson as json
from flask import request, Response
from flask_appbuilder import permission_name
from flask_appbuilder.api import expose, protect, rison, safe
Expand Down Expand Up @@ -62,7 +61,7 @@
from superset.sqllab.utils import bootstrap_sqllab_data
from superset.sqllab.validators import CanAccessQueryValidatorImpl
from superset.superset_typing import FlaskResponse
from superset.utils import core as utils
from superset.utils import core as utils, json as json_utils
from superset.views.base import CsvResponse, generate_download_headers, json_success
from superset.views.base_api import BaseSupersetApi, requires_json, statsd_metrics

Expand Down Expand Up @@ -132,9 +131,9 @@ def get(self) -> Response:
result = bootstrap_sqllab_data(user_id)

return json_success(
json.dumps(
json_utils.dumps(
{"result": result},
default=utils.json_iso_dttm_ser,
default=json_utils.json_iso_dttm_ser,
ignore_nan=True,
),
200,
Expand Down Expand Up @@ -343,9 +342,9 @@ def get_results(self, **kwargs: Any) -> FlaskResponse:

# Using pessimistic json serialization since some database drivers can return
# unserializeable types at times
payload = json.dumps(
payload = json_utils.dumps(
result,
default=utils.pessimistic_json_iso_dttm_ser,
default=json_utils.pessimistic_json_iso_dttm_ser,
ignore_nan=True,
)
return json_success(payload, 200)
Expand Down
31 changes: 11 additions & 20 deletions superset/sqllab/execution_context_convertor.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,11 +19,9 @@
import logging
from typing import Any, TYPE_CHECKING

import simplejson as json

import superset.utils.core as utils
from superset.sqllab.command_status import SqlJsonExecutionStatus
from superset.sqllab.utils import apply_display_max_row_configuration_if_require
from superset.utils import json as json_utils

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -52,23 +50,16 @@ def set_payload(

def serialize_payload(self) -> str:
if self._exc_status == SqlJsonExecutionStatus.HAS_RESULTS:
sql_results = apply_display_max_row_configuration_if_require(
self.payload, self._max_row_in_display_configuration
return json_utils.dumps(
apply_display_max_row_configuration_if_require(
self.payload, self._max_row_in_display_configuration
),
default=json_utils.pessimistic_json_iso_dttm_ser,
ignore_nan=True,
)
try:
return json.dumps(
sql_results,
default=utils.pessimistic_json_iso_dttm_ser,
ignore_nan=True,
)
except UnicodeDecodeError:
return json.dumps(
sql_results,
default=utils.pessimistic_json_iso_dttm_ser,
ensure_ascii=False,
ignore_nan=True,
)

return json.dumps(
{"query": self.payload}, default=utils.json_int_dttm_ser, ignore_nan=True
return json_utils.dumps(
{"query": self.payload},
default=json_utils.json_int_dttm_ser,
ignore_nan=True,
)
2 changes: 1 addition & 1 deletion superset/utils/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,8 @@
from superset import db
from superset.extensions import cache_manager
from superset.models.cache import CacheKey
from superset.utils.core import json_int_dttm_ser
from superset.utils.hashing import md5_sha_from_dict
from superset.utils.json import json_int_dttm_ser

if TYPE_CHECKING:
from superset.stats_logger import BaseStatsLogger
Expand Down
Loading

0 comments on commit acd555e

Please sign in to comment.