Response caching plugin

The Cache plugin can cache any response object other than streaming responses based on different request parameters. It can be used as follows.

docs_source_code/plugin/cache_plugin/flask_with_cache_plugin_demo.py
import time

from flask import Flask, Response, make_response
from redis import Redis  # type: ignore

from pait.app.flask import pait
from pait.app.flask.plugin.cache_response import CacheRespExtraParam, CacheResponsePlugin
from pait.field import Query
from pait.model.response import HtmlResponseModel


@pait(
    response_model_list=[HtmlResponseModel],
    post_plugin_list=[CacheResponsePlugin.build(cache_time=10, enable_cache_name_merge_param=True)],
)
def demo(key1: str = Query.i(extra_param_list=[CacheRespExtraParam()]), key2: str = Query.i()) -> Response:
    return make_response(str(time.time()), 200)


app = Flask("demo")
CacheResponsePlugin.set_redis_to_app(app, Redis(decode_responses=True))
app.add_url_rule("/api/demo", view_func=demo, methods=["GET"])


if __name__ == "__main__":
    app.run(port=8000)
docs_source_code/plugin/cache_plugin/starlette_with_cache_plugin_demo.py
import time
from typing import Any

from redis.asyncio import Redis  # type: ignore
from starlette.applications import Starlette
from starlette.responses import HTMLResponse

from pait.app.starlette import pait
from pait.app.starlette.plugin.cache_response import CacheRespExtraParam, CacheResponsePlugin
from pait.field import Query
from pait.model.response import HtmlResponseModel


@pait(
    response_model_list=[HtmlResponseModel],
    post_plugin_list=[CacheResponsePlugin.build(cache_time=10, enable_cache_name_merge_param=True)],
)
async def demo(key1: str = Query.i(extra_param_list=[CacheRespExtraParam()]), key2: str = Query.i()) -> HTMLResponse:
    return HTMLResponse(str(time.time()), 200)


app = Starlette()
app.add_route("/api/demo", demo, methods=["GET"])


def before_start(*args: Any, **kwargs: Any) -> None:
    CacheResponsePlugin.set_redis_to_app(app, Redis(decode_responses=True))


app.add_event_handler("startup", before_start)


if __name__ == "__main__":
    import uvicorn

    uvicorn.run(app)
docs_source_code/plugin/cache_plugin/sanic_with_cache_plugin_demo.py
import time
from typing import Any

from redis.asyncio import Redis  # type: ignore
from sanic import Sanic, response

from pait.app.sanic import pait
from pait.app.sanic.plugin.cache_response import CacheRespExtraParam, CacheResponsePlugin
from pait.field import Query
from pait.model.response import HtmlResponseModel


@pait(
    response_model_list=[HtmlResponseModel],
    post_plugin_list=[CacheResponsePlugin.build(cache_time=10, enable_cache_name_merge_param=True)],
)
async def demo(
    key1: str = Query.i(extra_param_list=[CacheRespExtraParam()]), key2: str = Query.i()
) -> response.HTTPResponse:
    return response.html(str(time.time()), 200)


app = Sanic("demo")
app.add_route(demo, "/api/demo", methods=["GET"])


def before_start(*args: Any, **kwargs: Any) -> None:
    CacheResponsePlugin.set_redis_to_app(app, Redis(decode_responses=True))


app.before_server_start(before_start)


if __name__ == "__main__":
    app.run(port=8000)
docs_source_code/plugin/cache_plugin/tornado_with_cache_plugin_demo.py
import time

from redis.asyncio import Redis  # type: ignore
from tornado.ioloop import IOLoop
from tornado.web import Application, RequestHandler

from pait.app.tornado import pait
from pait.app.tornado.plugin.cache_response import CacheRespExtraParam, CacheResponsePlugin
from pait.field import Query
from pait.model.response import HtmlResponseModel


class DemoHandler(RequestHandler):
    @pait(
        response_model_list=[HtmlResponseModel],
        post_plugin_list=[CacheResponsePlugin.build(cache_time=10, enable_cache_name_merge_param=True)],
    )
    async def get(self, key1: str = Query.i(extra_param_list=[CacheRespExtraParam()]), key2: str = Query.i()) -> None:
        self.write(str(time.time()))


app: Application = Application([(r"/api/demo", DemoHandler)])
CacheResponsePlugin.set_redis_to_app(app, Redis(decode_responses=True))


if __name__ == "__main__":
    app.listen(8000)
    IOLoop.instance().start()

The route function uses a CachePlugin that declares a cache time of 10 seconds and enables the cache name to include the request parameters. Also, only the key1 parameter in the route function uses the CacheRespExtraParam expansion parameter, so that the CachePlugin will only take the parameter that uses the CacheRespExtraParam parameter, not all of them.

After running the code and executing the curl command, can see that the route function returns the same content when the request parameters are the same:

curl http://127.0.0.1:8000/api/demo\?key1\=1\&key2\=11695627610.021101curl http://127.0.0.1:8000/api/demo\?key1\=1\&key2\=11695627610.021101curl http://127.0.0.1:8000/api/demo\?key1\=2\&key2\=11695627613.0265439

In addition to the cache_time and enable_cache_name_merge_param parameters, CachePlugin supports other parameters, as described below:

  • redis: Specify the Redis instance used by the cache plugin, it is recommended to specify the Redis instance via the CacheResponsePlugin.set_redis_to_app method.
  • name: Specify the cache Key of the route function, if this value is null, the cache Key is the name of the route function.
  • enable_cache_name_merge_param: If True, the construction of the cached Key will include other parameter values, such as the following route function.
    from pait.app.any import pait
    from pait.plugin.cache_response import CacheResponsePlugin
    from pait.field import Query
    
    @pait(post_plugin_list=[CacheResponsePlugin.build(cache_time=10)])
    async def demo(uid: str = Query.i(), name: str = Query.i()) -> None:
        pass
    
    When the request url carries ?uid=10086&name=so1n, the cache plugin generates a cache Key of demo:10086:so1n. However, if the parameter uid uses the CacheRespExtraParam expansion parameter, then the cached Key will only include the value of the parameter that uses the CacheRespExtraParam expansion parameter, such as the following route function:
    from pait.app.any import pait
    from pait.plugin.cache_response import CacheResponsePlugin, CacheRespExtraParam
    from pait.field import Query
    
    @pait(post_plugin_list=[CacheResponsePlugin.build(cache_time=10)])
    async def demo(uid: str = Query.i(extra_param_list=[CacheRespExtraParam()]), name: str = Query.i()) -> None:
        pass
    
    When the request url carries ?uid=10086&name=so1n, the cache plugin generates a cache Key of demo:10086.
  • include_exc: Receive a Tuple that can be exception, if the error thrown by the route function belongs to one of the errors in the Tuple, the exception will be cached, otherwise the exception will be thrown.
  • cache_time: cache time in seconds.
  • timeout: To prevent cache conflicts in highly concurrent scenarios, the cache plugin uses Reids locks to prevent resource contention. timeout represents the maximum time the lock can be held.
  • sleep: When a lock is found to be held by another request, the current request will sleep for a specified amount of time before attempting to acquire the lock, and so on until it acquires the corresponding lock or times out.
  • blocking_timeout: the maximum time to try to acquire the lock, if None, it will wait forever.