Merge pull request #82 from uriyyo/feature/run_in_threadpool

Use `run_in_threadpool` instead of `asyncio.run_in_executor`
This commit is contained in:
long2ice
2022-09-10 18:43:40 +08:00
committed by GitHub

View File

@@ -1,21 +1,18 @@
import asyncio
from functools import wraps, partial
import inspect import inspect
from typing import TYPE_CHECKING, Callable, Optional, Type from functools import wraps
from typing import Callable, Optional, Type
from fastapi.concurrency import run_in_threadpool
from fastapi_cache import FastAPICache from fastapi_cache import FastAPICache
from fastapi_cache.coder import Coder from fastapi_cache.coder import Coder
if TYPE_CHECKING:
import concurrent.futures
def cache( def cache(
expire: int = None, expire: int = None,
coder: Type[Coder] = None, coder: Type[Coder] = None,
key_builder: Callable = None, key_builder: Callable = None,
namespace: Optional[str] = "", namespace: Optional[str] = "",
executor: Optional["concurrent.futures.Executor"] = None,
): ):
""" """
cache all function cache all function
@@ -23,7 +20,6 @@ def cache(
:param expire: :param expire:
:param coder: :param coder:
:param key_builder: :param key_builder:
:param executor:
:return: :return:
""" """
@@ -74,8 +70,7 @@ def cache(
if inspect.iscoroutinefunction(func): if inspect.iscoroutinefunction(func):
ret = await func(*args, **kwargs) ret = await func(*args, **kwargs)
else: else:
loop = asyncio.get_event_loop() ret = await run_in_threadpool(func, *args, **kwargs)
ret = await loop.run_in_executor(executor, partial(func, *args, **kwargs))
await backend.set(cache_key, coder.encode(ret), expire or FastAPICache.get_expire()) await backend.set(cache_key, coder.encode(ret), expire or FastAPICache.get_expire())
return ret return ret