Merge pull request #114 from hackjammer/feature/redisCluster

Add RedisCluster Support
This commit is contained in:
long2ice
2023-02-01 15:33:11 +08:00
committed by GitHub
3 changed files with 21 additions and 9 deletions

View File

@@ -2,6 +2,12 @@
## 0.2 ## 0.2
### 0.2.1
- Fix picklecoder
- Fix connection failure transparency and add logging
- Add Cache-Control and ETag on first response
- Support Async RedisCluster client from redis-py
### 0.2.0 ### 0.2.0
- Make `request` and `response` optional. - Make `request` and `response` optional.

View File

@@ -1,16 +1,18 @@
from typing import Optional, Tuple from typing import Optional, Tuple
from redis.asyncio.client import Redis from redis.asyncio.client import AbstractRedis
from redis.asyncio.cluster import AbstractRedisCluster
from fastapi_cache.backends import Backend from fastapi_cache.backends import Backend
class RedisBackend(Backend): class RedisBackend(Backend):
def __init__(self, redis: Redis): def __init__(self, redis: AbstractRedis):
self.redis = redis self.redis = redis
self.is_cluster = isinstance(redis, AbstractRedisCluster)
async def get_with_ttl(self, key: str) -> Tuple[int, str]: async def get_with_ttl(self, key: str) -> Tuple[int, str]:
async with self.redis.pipeline(transaction=True) as pipe: async with self.redis.pipeline(transaction=not self.is_cluster) as pipe:
return await (pipe.ttl(key).get(key).execute()) return await (pipe.ttl(key).get(key).execute())
async def get(self, key: str) -> Optional[str]: async def get(self, key: str) -> Optional[str]:
@@ -25,4 +27,4 @@ class RedisBackend(Backend):
return await self.redis.eval(lua, numkeys=0) return await self.redis.eval(lua, numkeys=0)
elif key: elif key:
return await self.redis.delete(key) return await self.redis.delete(key)
return 0 return 0

View File

@@ -1,4 +1,5 @@
import inspect import inspect
import logging
import sys import sys
from functools import wraps from functools import wraps
from typing import Any, Awaitable, Callable, Optional, Type, TypeVar from typing import Any, Awaitable, Callable, Optional, Type, TypeVar
@@ -15,6 +16,8 @@ from starlette.responses import Response
from fastapi_cache import FastAPICache from fastapi_cache import FastAPICache
from fastapi_cache.coder import Coder from fastapi_cache.coder import Coder
logger = logging.getLogger(__name__)
logger.addHandler(logging.NullHandler())
P = ParamSpec("P") P = ParamSpec("P")
R = TypeVar("R") R = TypeVar("R")
@@ -125,7 +128,8 @@ def cache(
) )
try: try:
ttl, ret = await backend.get_with_ttl(cache_key) ttl, ret = await backend.get_with_ttl(cache_key)
except ConnectionError: except Exception:
logger.warning(f"Error retrieving cache key '{cache_key}' from backend:", exc_info=True)
ttl, ret = 0, None ttl, ret = 0, None
if not request: if not request:
if ret is not None: if ret is not None:
@@ -133,8 +137,8 @@ def cache(
ret = await ensure_async_func(*args, **kwargs) ret = await ensure_async_func(*args, **kwargs)
try: try:
await backend.set(cache_key, coder.encode(ret), expire) await backend.set(cache_key, coder.encode(ret), expire)
except ConnectionError: except Exception:
pass logger.warning(f"Error setting cache key '{cache_key}' in backend:", exc_info=True)
return ret return ret
if request.method != "GET": if request.method != "GET":
@@ -156,8 +160,8 @@ def cache(
try: try:
await backend.set(cache_key, encoded_ret, expire) await backend.set(cache_key, encoded_ret, expire)
except ConnectionError: except Exception:
pass logger.warning(f"Error setting cache key '{cache_key}' in backend:", exc_info=True)
response.headers["Cache-Control"] = f"max-age={expire}" response.headers["Cache-Control"] = f"max-age={expire}"
etag = f"W/{hash(encoded_ret)}" etag = f"W/{hash(encoded_ret)}"