-
-
Notifications
You must be signed in to change notification settings - Fork 311
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
* Replace pickle with JSON Co-authored-by: Ivan Klass <klass.ivanklass@gmail.com>
- Loading branch information
1 parent
ce957ac
commit 3640eb2
Showing
13 changed files
with
260 additions
and
54 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,93 @@ | ||
from __future__ import annotations | ||
|
||
import json | ||
import logging | ||
import uuid | ||
from datetime import date | ||
from datetime import datetime | ||
from datetime import time | ||
from datetime import timedelta | ||
from decimal import Decimal | ||
from typing import Any | ||
from typing import Protocol | ||
from typing import TypeVar | ||
|
||
logger = logging.getLogger(__name__) | ||
|
||
DEFAULT_DISCRIMINATOR = 'default' | ||
|
||
|
||
class JSONEncoder(json.JSONEncoder): | ||
"""Django-constance custom json encoder.""" | ||
|
||
def default(self, o): | ||
for discriminator, (t, _, encoder) in _codecs.items(): | ||
if isinstance(o, t): | ||
return _as(discriminator, encoder(o)) | ||
raise TypeError(f'Object of type {o.__class__.__name__} is not JSON serializable') | ||
|
||
|
||
def _as(discriminator: str, v: Any) -> dict[str, Any]: | ||
return {'__type__': discriminator, '__value__': v} | ||
|
||
|
||
def dumps(obj, _dumps=json.dumps, cls=JSONEncoder, default_kwargs=None, **kwargs): | ||
"""Serialize object to json string.""" | ||
default_kwargs = default_kwargs or {} | ||
is_default_type = isinstance(obj, (str, int, bool, float, type(None))) | ||
return _dumps( | ||
_as(DEFAULT_DISCRIMINATOR, obj) if is_default_type else obj, cls=cls, **dict(default_kwargs, **kwargs) | ||
) | ||
|
||
|
||
def loads(s, _loads=json.loads, **kwargs): | ||
"""Deserialize json string to object.""" | ||
return _loads(s, object_hook=object_hook, **kwargs) | ||
|
||
|
||
def object_hook(o: dict) -> Any: | ||
"""Hook function to perform custom deserialization.""" | ||
if o.keys() == {'__type__', '__value__'}: | ||
if o['__type__'] == DEFAULT_DISCRIMINATOR: | ||
return o['__value__'] | ||
codec = _codecs.get(o['__type__']) | ||
if not codec: | ||
raise ValueError(f'Unsupported type: {o["__type__"]}') | ||
return codec[1](o['__value__']) | ||
logger.error('Cannot deserialize object: %s', o) | ||
raise ValueError(f'Invalid object: {o}') | ||
|
||
|
||
T = TypeVar('T') | ||
|
||
|
||
class Encoder(Protocol[T]): | ||
def __call__(self, value: T, /) -> str: ... # pragma: no cover | ||
|
||
|
||
class Decoder(Protocol[T]): | ||
def __call__(self, value: str, /) -> T: ... # pragma: no cover | ||
|
||
|
||
def register_type(t: type[T], discriminator: str, encoder: Encoder[T], decoder: Decoder[T]): | ||
if not discriminator: | ||
raise ValueError('Discriminator must be specified') | ||
if _codecs.get(discriminator) or discriminator == DEFAULT_DISCRIMINATOR: | ||
raise ValueError(f'Type with discriminator {discriminator} is already registered') | ||
_codecs[discriminator] = (t, decoder, encoder) | ||
|
||
|
||
_codecs: dict[str, tuple[type, Decoder, Encoder]] = {} | ||
|
||
|
||
def _register_default_types(): | ||
# NOTE: datetime should be registered before date, because datetime is also instance of date. | ||
register_type(datetime, 'datetime', datetime.isoformat, datetime.fromisoformat) | ||
register_type(date, 'date', lambda o: o.isoformat(), lambda o: datetime.fromisoformat(o).date()) | ||
register_type(time, 'time', lambda o: o.isoformat(), time.fromisoformat) | ||
register_type(Decimal, 'decimal', str, Decimal) | ||
register_type(uuid.UUID, 'uuid', lambda o: o.hex, uuid.UUID) | ||
register_type(timedelta, 'timedelta', lambda o: o.total_seconds(), lambda o: timedelta(seconds=o)) | ||
|
||
|
||
_register_default_types() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,53 @@ | ||
import logging | ||
import pickle | ||
from base64 import b64decode | ||
from importlib import import_module | ||
|
||
from django.db import migrations | ||
|
||
from constance import settings | ||
from constance.codecs import dumps | ||
|
||
logger = logging.getLogger(__name__) | ||
|
||
|
||
def import_module_attr(path): | ||
package, module = path.rsplit('.', 1) | ||
return getattr(import_module(package), module) | ||
|
||
|
||
def migrate_pickled_data(apps, schema_editor) -> None: # pragma: no cover | ||
Constance = apps.get_model('constance', 'Constance') | ||
|
||
for constance in Constance.objects.exclude(value=None): | ||
constance.value = dumps(pickle.loads(b64decode(constance.value.encode()))) # noqa: S301 | ||
constance.save(update_fields=['value']) | ||
|
||
if settings.BACKEND in ('constance.backends.redisd.RedisBackend', 'constance.backends.redisd.CachingRedisBackend'): | ||
import redis | ||
|
||
_prefix = settings.REDIS_PREFIX | ||
connection_cls = settings.REDIS_CONNECTION_CLASS | ||
if connection_cls is not None: | ||
_rd = import_module_attr(connection_cls)() | ||
else: | ||
if isinstance(settings.REDIS_CONNECTION, str): | ||
_rd = redis.from_url(settings.REDIS_CONNECTION) | ||
else: | ||
_rd = redis.Redis(**settings.REDIS_CONNECTION) | ||
redis_migrated_data = {} | ||
for key in settings.CONFIG: | ||
prefixed_key = f'{_prefix}{key}' | ||
value = _rd.get(prefixed_key) | ||
if value is not None: | ||
redis_migrated_data[prefixed_key] = dumps(pickle.loads(value)) # noqa: S301 | ||
for prefixed_key, value in redis_migrated_data.items(): | ||
_rd.set(prefixed_key, value) | ||
|
||
|
||
class Migration(migrations.Migration): | ||
dependencies = [('constance', '0002_migrate_from_old_table')] | ||
|
||
operations = [ | ||
migrations.RunPython(migrate_pickled_data), | ||
] |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.