Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
75 changes: 74 additions & 1 deletion examples/in_memory/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,21 +4,79 @@

import pendulum
import uvicorn
from fastapi import FastAPI
import logging
from fastapi import FastAPI, Query
from fastapi_cache import FastAPICache
from fastapi_cache.backends.inmemory import InMemoryBackend
from fastapi_cache.decorator import cache
from pydantic import BaseModel
from starlette.requests import Request
from starlette.responses import JSONResponse, Response

logger = logging.getLogger(__name__)
from fastapi import HTTPException

@asynccontextmanager
async def lifespan(_: FastAPI) -> AsyncIterator[None]:
FastAPICache.init(InMemoryBackend())
yield


def simulate_cache_failure():
"""Simulate cache failure by resetting FastAPICache."""
FastAPICache.reset() # Reset cache settings

def invalid_cache(expire: int = 10, namespace: str = "invalid_cache", continue_on_error: bool = True):
"""Decorator to simulate cache failure."""
print("Invalid cache decorator called", continue_on_error)
def decorator(func):
async def wrapper():
# Get the continue_on_error from kwargs if present (passed from FastAPI route)

# Simulate cache failure by resetting the cache
simulate_cache_failure()

try:
# Initialize cache settings or use defaults
prefix = FastAPICache.get_prefix() or ""
coder = FastAPICache.get_coder() or None
cache_expire = expire if expire is not None else FastAPICache.get_expire()
key_builder = FastAPICache.get_key_builder() or None
backend = FastAPICache.get_backend() or None
cache_status_header = FastAPICache.get_cache_status_header() or None

# Now execute the original function with cache logic applied
return await func()

except AssertionError as e:
logger.error(f"Error initializing cache: {e}")

if continue_on_error:
logger.warning("Continuing without caching")
return await func() # Continue without caching
else:
raise HTTPException(status_code=503, detail="Cache initialization failed.")

except Exception as e:
logger.critical(f"Critical error initializing cache: {e}")
raise HTTPException(status_code=503, detail=f"Cache failure: {str(e)}")

# Apply the cache decorator with the provided parameters (expire and namespace)
return cache(expire=expire, namespace=namespace, continue_on_error=continue_on_error)(wrapper)

return decorator


def cache_failure_example(expire: int = 5, namespace: str = "test_namespace", continue_on_error: bool = False):
# Apply the decorator with dynamic values for the parameters
@invalid_cache(expire=expire, namespace=namespace, continue_on_error=continue_on_error)
async def inner_cache_failure_example():
return {"message": "This should simulate a cache failure."}

# Return the decorated function
return inner_cache_failure_example


app = FastAPI(lifespan=lifespan)

ret = 0
Expand Down Expand Up @@ -136,5 +194,20 @@ def namespaced_injection(
}


##############################################CACHE FAILURE SIMULATION##############################################

@app.get("/cache_failure_example")
async def cache_failure_endpoint(
expire: int = Query(5, ge=1), # Default expiration is 5 seconds, but must be at least 1 second
namespace: str = Query("test_namespace"),
continue_on_error: bool = Query(False)
):
# Call the dynamically generated function with the specified parameters
func = cache_failure_example(expire=expire, namespace=namespace, continue_on_error=continue_on_error)

# Execute the function and return the result
return await func()


if __name__ == "__main__":
uvicorn.run("main:app", reload=True)
35 changes: 23 additions & 12 deletions fastapi_cache/decorator.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,7 @@ def cache(
key_builder: Optional[KeyBuilder] = None,
namespace: str = "",
injected_dependency_namespace: str = "__fastapi_cache",
continue_on_error: bool = False,
) -> Callable[[Union[Callable[P, Awaitable[R]], Callable[P, R]]], Callable[P, Awaitable[Union[R, Response]]]]:
"""
cache all function
Expand All @@ -98,6 +99,7 @@ def cache(
:param expire:
:param coder:
:param key_builder:
:param continue_on_error:

:return:
"""
Expand Down Expand Up @@ -154,13 +156,23 @@ async def ensure_async_func(*args: P.args, **kwargs: P.kwargs) -> R:

if _uncacheable(request):
return await ensure_async_func(*args, **kwargs)

prefix = FastAPICache.get_prefix()
coder = coder or FastAPICache.get_coder()
expire = expire or FastAPICache.get_expire()
key_builder = key_builder or FastAPICache.get_key_builder()
backend = FastAPICache.get_backend()
cache_status_header = FastAPICache.get_cache_status_header()

try:
prefix = FastAPICache.get_prefix()
coder = coder or FastAPICache.get_coder()
expire = expire or FastAPICache.get_expire()
key_builder = key_builder or FastAPICache.get_key_builder()
backend = FastAPICache.get_backend()
cache_status_header = FastAPICache.get_cache_status_header()
except Exception as e:
logger.error(f"Error initializing cache: {e}")
logger.info(continue_on_error)
if continue_on_error == True:
logger.warning("Continuing without caching")
return await ensure_async_func(*args, **kwargs)
else:
raise


cache_key = key_builder(
func,
Expand All @@ -173,14 +185,13 @@ async def ensure_async_func(*args: P.args, **kwargs: P.kwargs) -> R:
if isawaitable(cache_key):
cache_key = await cache_key
assert isinstance(cache_key, str) # noqa: S101 # assertion is a type guard

try:
ttl, cached = await backend.get_with_ttl(cache_key)
except Exception:
logger.warning(
f"Error retrieving cache key '{cache_key}' from backend:",
exc_info=True,
)
if continue_on_error:
return await ensure_async_func(*args, **kwargs)
logger.warning(f"Cache retrieval error for key '{cache_key}':", exc_info=True)
ttl, cached = 0, None

if cached is None or (request is not None and request.headers.get("Cache-Control") == "no-cache") : # cache miss
Expand Down
22 changes: 22 additions & 0 deletions tests/test_decorator.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import time
from http import HTTPStatus
from typing import Any, Generator
from unittest.mock import patch

import pendulum
import pytest
Expand All @@ -11,6 +12,14 @@
from fastapi_cache.backends.inmemory import InMemoryBackend


@pytest.fixture
def cache_unavailable():
# Mock cache to simulate unavailability by raising an exception in the cache retrieval method
with patch("fastapi_cache.backends.inmemory.InMemoryBackend.get") as mock_cache_get:
mock_cache_get.side_effect = Exception("Simulated cache failure")
yield


@pytest.fixture(autouse=True)
def _init_cache() -> Generator[Any, Any, None]: # pyright: ignore[reportUnusedFunction]
FastAPICache.init(InMemoryBackend())
Expand Down Expand Up @@ -136,3 +145,16 @@ def test_cache_control() -> None:

response = client.get("/cached_put")
assert response.json() == {"value": 2}

####################CACHE UNAVAILABLE TESTS####################
def test_cache_failure():
with TestClient(app) as client:
# Simulate cache failure with continue_on_error=False
response = client.get("/cache_failure_example")
assert response.status_code == 503 # Expect a 503 error due to cache failure
assert response.json() == {"detail": "Cache initialization failed."}

# Now test with continue_on_error=True (should continue without caching)
response = client.get("/cache_failure_example?continue_on_error=true")
assert response.status_code == 200 # Expect success even with cache failure
assert response.json() == {"message": "This should simulate a cache failure."}