2022-11-05 13:45:16 +04:00
|
|
|
import codecs
|
2020-10-16 16:55:33 +08:00
|
|
|
import datetime
|
2020-08-26 18:04:57 +08:00
|
|
|
import json
|
|
|
|
|
import pickle # nosec:B403
|
2020-10-16 16:55:33 +08:00
|
|
|
from decimal import Decimal
|
2023-05-09 12:31:19 +01:00
|
|
|
from typing import Any, Callable, ClassVar, Dict, TypeVar, overload
|
2020-08-26 18:04:57 +08:00
|
|
|
|
2021-09-17 10:19:56 +08:00
|
|
|
import pendulum
|
2021-07-26 16:33:22 +08:00
|
|
|
from fastapi.encoders import jsonable_encoder
|
2023-05-08 16:42:21 +01:00
|
|
|
from pydantic import BaseConfig, ValidationError, fields
|
2022-11-04 17:31:37 +08:00
|
|
|
from starlette.responses import JSONResponse
|
2022-09-28 17:37:05 +08:00
|
|
|
from starlette.templating import _TemplateResponse as TemplateResponse
|
2020-10-16 16:55:33 +08:00
|
|
|
|
2023-05-08 16:42:21 +01:00
|
|
|
_T = TypeVar("_T")
|
|
|
|
|
|
|
|
|
|
|
2023-04-27 16:19:02 +01:00
|
|
|
CONVERTERS: dict[str, Callable[[str], Any]] = {
|
2021-10-09 16:51:05 +08:00
|
|
|
"date": lambda x: pendulum.parse(x, exact=True),
|
2021-09-17 10:19:56 +08:00
|
|
|
"datetime": lambda x: pendulum.parse(x, exact=True),
|
2020-10-16 16:55:33 +08:00
|
|
|
"decimal": Decimal,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class JsonEncoder(json.JSONEncoder):
|
2022-10-22 20:59:37 +04:00
|
|
|
def default(self, obj: Any) -> Any:
|
2020-10-16 16:55:33 +08:00
|
|
|
if isinstance(obj, datetime.datetime):
|
2021-10-09 16:51:05 +08:00
|
|
|
return {"val": str(obj), "_spec_type": "datetime"}
|
2020-10-16 16:55:33 +08:00
|
|
|
elif isinstance(obj, datetime.date):
|
2021-10-09 16:51:05 +08:00
|
|
|
return {"val": str(obj), "_spec_type": "date"}
|
2020-10-16 16:55:33 +08:00
|
|
|
elif isinstance(obj, Decimal):
|
|
|
|
|
return {"val": str(obj), "_spec_type": "decimal"}
|
|
|
|
|
else:
|
2021-01-11 03:11:35 -08:00
|
|
|
return jsonable_encoder(obj)
|
2020-10-16 16:55:33 +08:00
|
|
|
|
|
|
|
|
|
2022-10-22 20:59:37 +04:00
|
|
|
def object_hook(obj: Any) -> Any:
|
2020-10-16 16:55:33 +08:00
|
|
|
_spec_type = obj.get("_spec_type")
|
|
|
|
|
if not _spec_type:
|
|
|
|
|
return obj
|
|
|
|
|
|
|
|
|
|
if _spec_type in CONVERTERS:
|
2023-04-27 16:19:02 +01:00
|
|
|
return CONVERTERS[_spec_type](obj["val"])
|
2020-10-16 16:55:33 +08:00
|
|
|
else:
|
|
|
|
|
raise TypeError("Unknown {}".format(_spec_type))
|
|
|
|
|
|
2020-08-26 18:04:57 +08:00
|
|
|
|
|
|
|
|
class Coder:
|
|
|
|
|
@classmethod
|
2022-10-22 21:06:38 +04:00
|
|
|
def encode(cls, value: Any) -> str:
|
2020-08-26 18:04:57 +08:00
|
|
|
raise NotImplementedError
|
|
|
|
|
|
|
|
|
|
@classmethod
|
2022-11-05 13:45:16 +04:00
|
|
|
def decode(cls, value: str) -> Any:
|
2020-08-26 18:04:57 +08:00
|
|
|
raise NotImplementedError
|
|
|
|
|
|
2023-05-09 12:31:19 +01:00
|
|
|
# (Shared) cache for endpoint return types to Pydantic model fields.
|
|
|
|
|
# Note that subclasses share this cache! If a subclass overrides the
|
|
|
|
|
# decode_as_type method and then stores a different kind of field for a
|
|
|
|
|
# given type, do make sure that the subclass provides its own class
|
|
|
|
|
# attribute for this cache.
|
|
|
|
|
_type_field_cache: ClassVar[Dict[Any, fields.ModelField]] = {}
|
|
|
|
|
|
2023-05-08 16:42:21 +01:00
|
|
|
@overload
|
|
|
|
|
@classmethod
|
|
|
|
|
def decode_as_type(cls, value: str, type_: _T) -> _T:
|
|
|
|
|
...
|
|
|
|
|
|
|
|
|
|
@overload
|
|
|
|
|
@classmethod
|
|
|
|
|
def decode_as_type(cls, value: str, *, type_: None) -> Any:
|
|
|
|
|
...
|
|
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
|
def decode_as_type(cls, value: str, *, type_: _T | None) -> _T | Any:
|
|
|
|
|
"""Decode value to the specific given type
|
|
|
|
|
|
|
|
|
|
The default implementation uses the Pydantic model system to convert the value.
|
|
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
result = cls.decode(value)
|
|
|
|
|
if type_ is not None:
|
2023-05-09 12:31:19 +01:00
|
|
|
try:
|
|
|
|
|
field = cls._type_field_cache[type_]
|
|
|
|
|
except KeyError:
|
|
|
|
|
field = cls._type_field_cache[type_] = fields.ModelField(
|
|
|
|
|
name="body", type_=type_, class_validators=None, model_config=BaseConfig
|
|
|
|
|
)
|
2023-05-08 16:42:21 +01:00
|
|
|
result, errors = field.validate(result, {}, loc=())
|
|
|
|
|
if errors is not None:
|
|
|
|
|
if not isinstance(errors, list):
|
|
|
|
|
errors = [errors]
|
|
|
|
|
raise ValidationError(errors, type_)
|
|
|
|
|
return result
|
|
|
|
|
|
2020-08-26 18:04:57 +08:00
|
|
|
|
|
|
|
|
class JsonCoder(Coder):
|
|
|
|
|
@classmethod
|
2022-10-22 20:59:37 +04:00
|
|
|
def encode(cls, value: Any) -> str:
|
2022-11-04 17:31:37 +08:00
|
|
|
if isinstance(value, JSONResponse):
|
2023-04-27 16:20:12 +01:00
|
|
|
return value.body.decode()
|
2020-10-16 16:55:33 +08:00
|
|
|
return json.dumps(value, cls=JsonEncoder)
|
2020-08-26 18:04:57 +08:00
|
|
|
|
|
|
|
|
@classmethod
|
2022-11-05 13:45:16 +04:00
|
|
|
def decode(cls, value: str) -> str:
|
2020-10-16 16:55:33 +08:00
|
|
|
return json.loads(value, object_hook=object_hook)
|
2020-08-26 18:04:57 +08:00
|
|
|
|
|
|
|
|
|
|
|
|
|
class PickleCoder(Coder):
|
|
|
|
|
@classmethod
|
2022-10-22 21:06:38 +04:00
|
|
|
def encode(cls, value: Any) -> str:
|
2022-09-28 17:37:05 +08:00
|
|
|
if isinstance(value, TemplateResponse):
|
|
|
|
|
value = value.body
|
2022-11-05 13:45:16 +04:00
|
|
|
return codecs.encode(pickle.dumps(value), "base64").decode()
|
2020-08-26 18:04:57 +08:00
|
|
|
|
|
|
|
|
@classmethod
|
2022-11-05 13:45:16 +04:00
|
|
|
def decode(cls, value: str) -> Any:
|
|
|
|
|
return pickle.loads(codecs.decode(value.encode(), "base64")) # nosec:B403,B301
|
2023-05-08 16:42:21 +01:00
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
|
def decode_as_type(cls, value: str, *, type_: Any) -> Any:
|
|
|
|
|
# Pickle already produces the correct type on decoding, no point
|
|
|
|
|
# in paying an extra performance penalty for pydantic to discover
|
|
|
|
|
# the same.
|
|
|
|
|
return cls.decode(value)
|