feat: add Prometheus metrics for Redis cache

Adding a Prometheus counter to be able to monitor cache hits/misses
for search queries

Signed-off-by: moson-mo <mo-son@mailbox.org>
This commit is contained in:
moson-mo 2023-07-04 09:40:39 +02:00
parent 3acfb08a0f
commit 814ccf6b04
No known key found for this signature in database
GPG key ID: 4A4760AB4EE15296
2 changed files with 51 additions and 2 deletions

View file

@ -1,5 +1,6 @@
import pickle
from prometheus_client import Counter
from sqlalchemy import orm
from aurweb import config
@ -7,6 +8,11 @@ from aurweb.aur_redis import redis_connection
_redis = redis_connection()
# Prometheus metrics
SEARCH_REQUESTS = Counter(
"search_requests", "Number of search requests by cache hit/miss", ["cache"]
)
async def db_count_cache(key: str, query: orm.Query, expire: int = None) -> int:
"""Store and retrieve a query.count() via redis cache.
@ -24,7 +30,7 @@ async def db_count_cache(key: str, query: orm.Query, expire: int = None) -> int:
return int(result)
async def db_query_cache(key: str, query: orm.Query, expire: int = None):
async def db_query_cache(key: str, query: orm.Query, expire: int = None) -> list:
"""Store and retrieve query results via redis cache.
:param key: Redis key
@ -34,10 +40,13 @@ async def db_query_cache(key: str, query: orm.Query, expire: int = None):
"""
result = _redis.get(key)
if result is None:
SEARCH_REQUESTS.labels(cache="miss").inc()
if _redis.dbsize() > config.getint("cache", "max_search_entries", 50000):
return query.all()
_redis.set(key, (result := pickle.dumps(query.all())), ex=expire)
_redis.set(key, (result := pickle.dumps(query.all())))
if expire:
_redis.expire(key, expire)
else:
SEARCH_REQUESTS.labels(cache="hit").inc()
return pickle.loads(result)