17 Commits

Author SHA1 Message Date
long2ice
d04be274e9 feat: upgrade deps 2023-02-15 10:43:01 +08:00
long2ice
80563fd6e7 Merge pull request #118 from naoki-jarvisml/var_keyword
Support functions with VAR_KEYWORD parameter
2023-02-15 10:30:12 +08:00
Naoki Shima
98cf8a78a1 adding test coverage 2023-02-15 10:35:41 +09:00
Naoki Shima
01c895dbbb Support functions with VAR_KEYWORD parameter
decorating function with **kwargs parameter with @cache causes ValueError.

ValueError: wrong parameter order: variadic keyword parameter before keyword-only parameter

We need to inject request / response parameters before VAR_KEYWORD parameter.
2023-02-09 15:14:20 +09:00
long2ice
e3b08dda2c Merge pull request #114 from hackjammer/feature/redisCluster
Add RedisCluster Support
2023-02-01 15:33:11 +08:00
long2ice
552a7695e8 Update fastapi_cache/decorator.py
Co-authored-by: mkdir700 <56359329+mkdir700@users.noreply.github.com>
2023-02-01 15:33:04 +08:00
hackjammer
ea1ffcd7b4 Add logging to decorator.py on backend failures 2023-01-17 12:15:53 +00:00
hackjammer
e8193b5c22 enabled redis in cluster mode 2023-01-15 21:54:16 +00:00
hackjammer
ab26fad604 passthrough for any type of backend exception 2023-01-15 17:07:37 +00:00
long2ice
7a89f28b54 Merge pull request #112 from schmocker/main
add cache-control and etag to header of fist response
2023-01-15 12:27:56 +08:00
Tobias Schmocker
334b829a80 Merge branch 'master'
# Conflicts:
#	fastapi_cache/decorator.py
2023-01-14 19:11:42 +01:00
long2ice
62ef8bed37 Merge pull request #109 from Mrreadiness/fix/piclke-coder
Fix Piclke Coder
2023-01-11 21:31:02 +08:00
Ivan Moiseev
9a39db7a73 Merge branch 'long2ice:main' into fix/piclke-coder 2023-01-11 16:26:05 +03:00
Ivan Moiseev
e23289fcbf Merge branch 'main' into fix/piclke-coder 2022-12-08 00:23:39 +04:00
Ivan Moiseev
cb9fe5c065 fix: PickleCoder and add tests for it. 2022-11-05 13:45:16 +04:00
Tobias Schmocker
e5250c7f58 remove private from cache-control 2022-02-04 16:41:42 +01:00
Tobias Schmocker
1795c048d1 add cache-control to response after setting the cache 2022-02-04 16:37:18 +01:00
9 changed files with 1112 additions and 951 deletions

View File

@@ -2,6 +2,12 @@
## 0.2
### 0.2.1
- Fix picklecoder
- Fix connection failure transparency and add logging
- Add Cache-Control and ETag on first response
- Support Async RedisCluster client from redis-py
### 0.2.0
- Make `request` and `response` optional.

View File

@@ -42,6 +42,13 @@ async def get_date():
async def get_datetime(request: Request, response: Response):
return {"now": pendulum.now()}
@cache(namespace="test")
async def func_kwargs(*unused_args, **kwargs):
return kwargs
@app.get("/kwargs")
async def get_kwargs(name: str):
return await func_kwargs(name, name=name)
@app.get("/sync-me")
@cache(namespace="test")

View File

@@ -1,16 +1,18 @@
from typing import Optional, Tuple
from redis.asyncio.client import Redis
from redis.asyncio.client import AbstractRedis
from redis.asyncio.cluster import AbstractRedisCluster
from fastapi_cache.backends import Backend
class RedisBackend(Backend):
def __init__(self, redis: Redis):
def __init__(self, redis: AbstractRedis):
self.redis = redis
self.is_cluster = isinstance(redis, AbstractRedisCluster)
async def get_with_ttl(self, key: str) -> Tuple[int, str]:
async with self.redis.pipeline(transaction=True) as pipe:
async with self.redis.pipeline(transaction=not self.is_cluster) as pipe:
return await (pipe.ttl(key).get(key).execute())
async def get(self, key: str) -> Optional[str]:

View File

@@ -1,3 +1,4 @@
import codecs
import datetime
import json
import pickle # nosec:B403
@@ -45,7 +46,7 @@ class Coder:
raise NotImplementedError
@classmethod
def decode(cls, value: Any) -> Any:
def decode(cls, value: str) -> Any:
raise NotImplementedError
@@ -57,7 +58,7 @@ class JsonCoder(Coder):
return json.dumps(value, cls=JsonEncoder)
@classmethod
def decode(cls, value: Any) -> str:
def decode(cls, value: str) -> str:
return json.loads(value, object_hook=object_hook)
@@ -66,8 +67,8 @@ class PickleCoder(Coder):
def encode(cls, value: Any) -> str:
if isinstance(value, TemplateResponse):
value = value.body
return str(pickle.dumps(value))
return codecs.encode(pickle.dumps(value), "base64").decode()
@classmethod
def decode(cls, value: Any) -> Any:
return pickle.loads(bytes(value)) # nosec:B403,B301
def decode(cls, value: str) -> Any:
return pickle.loads(codecs.decode(value.encode(), "base64")) # nosec:B403,B301

View File

@@ -1,4 +1,5 @@
import inspect
import logging
import sys
from functools import wraps
from typing import Any, Awaitable, Callable, Optional, Type, TypeVar
@@ -15,6 +16,8 @@ from starlette.responses import Response
from fastapi_cache import FastAPICache
from fastapi_cache.coder import Coder
logger = logging.getLogger(__name__)
logger.addHandler(logging.NullHandler())
P = ParamSpec("P")
R = TypeVar("R")
@@ -45,7 +48,13 @@ def cache(
(param for param in signature.parameters.values() if param.annotation is Response),
None,
)
parameters = [*signature.parameters.values()]
parameters = []
extra_params = []
for p in signature.parameters.values():
if p.kind <= inspect.Parameter.KEYWORD_ONLY:
parameters.append(p)
else:
extra_params.append(p)
if not request_param:
parameters.append(
inspect.Parameter(
@@ -62,6 +71,7 @@ def cache(
kind=inspect.Parameter.KEYWORD_ONLY,
),
)
parameters.extend(extra_params)
if parameters:
signature = signature.replace(parameters=parameters)
func.__signature__ = signature
@@ -125,18 +135,17 @@ def cache(
)
try:
ttl, ret = await backend.get_with_ttl(cache_key)
except ConnectionError:
except Exception:
logger.warning(f"Error retrieving cache key '{cache_key}' from backend:", exc_info=True)
ttl, ret = 0, None
if not request:
if ret is not None:
return coder.decode(ret)
ret = await ensure_async_func(*args, **kwargs)
try:
await backend.set(
cache_key, coder.encode(ret), expire or FastAPICache.get_expire()
)
except ConnectionError:
pass
await backend.set(cache_key, coder.encode(ret), expire)
except Exception:
logger.warning(f"Error setting cache key '{cache_key}' in backend:", exc_info=True)
return ret
if request.method != "GET":
@@ -154,11 +163,16 @@ def cache(
return coder.decode(ret)
ret = await ensure_async_func(*args, **kwargs)
encoded_ret = coder.encode(ret)
try:
await backend.set(cache_key, coder.encode(ret), expire or FastAPICache.get_expire())
except ConnectionError:
pass
await backend.set(cache_key, encoded_ret, expire)
except Exception:
logger.warning(f"Error setting cache key '{cache_key}' in backend:", exc_info=True)
response.headers["Cache-Control"] = f"max-age={expire}"
etag = f"W/{hash(encoded_ret)}"
response.headers["ETag"] = etag
return ret
return inner

1965
poetry.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
[tool.poetry]
name = "fastapi-cache2"
version = "0.2.0"
version = "0.2.1"
description = "Cache for FastAPI"
authors = ["long2ice <long2ice@gmail.com>"]
license = "Apache-2.0"

22
tests/test_codecs.py Normal file
View File

@@ -0,0 +1,22 @@
from typing import Any
import pytest
from fastapi_cache.coder import PickleCoder
@pytest.mark.parametrize(
"value",
[
1,
"some_string",
(1, 2),
[1, 2, 3],
{"some_key": 1, "other_key": 2},
],
)
def test_pickle_coder(value: Any) -> None:
encoded_value = PickleCoder.encode(value)
assert isinstance(encoded_value, str)
decoded_value = PickleCoder.decode(encoded_value)
assert decoded_value == value

View File

@@ -67,3 +67,9 @@ def test_cache_response_obj() -> None:
assert get_cache_response.json() == {"a": 1}
assert get_cache_response.headers.get("cache-control")
assert get_cache_response.headers.get("etag")
def test_kwargs() -> None:
with TestClient(app) as client:
name = "Jon"
response = client.get("/kwargs", params = {"name": name})
assert response.json() == {"name": name}