Added redirector caching #21
Some checks failed
Test API before pr merge / test-lint (pull_request) Failing after 10s
Test before pr merge / test-translation-lint (pull_request) Successful in 39s
Test before pr merge / test-lint (pull_request) Successful in 42s
Test before pr merge / test-before-merge (pull_request) Successful in 1m56s

This commit is contained in:
Sven Heidemann 2025-05-02 13:32:15 +02:00
parent 38e8dc801e
commit 5c9ff4b813
2 changed files with 155 additions and 5 deletions

View File

@ -3,10 +3,8 @@ FROM python:3.12.8-alpine
WORKDIR /app
COPY ./src/api/ ./api
COPY ./src/api/middleware ./api/middleware
COPY ./src/core/ ./core
COPY ./src/data/ ./data
COPY ./src/service/ ./service
COPY ./src/static/ ./static
COPY ./src/templates/ ./templates

View File

@ -1,5 +1,6 @@
import asyncio
import sys
from datetime import datetime
from typing import Optional
import requests
@ -18,13 +19,72 @@ logger = Logger(__name__)
templates = Jinja2Templates(directory="templates")
class Cache:
CACHING_MINUTES = Environment.get("CACHING_MINUTES", int, 5)
# {shortUrlKey: ShortUrl}
_cache: dict[str, dict] = {}
_cache_timestamps: dict[str, datetime] = {}
@classmethod
def is_expired(cls, key: str) -> bool:
logger.trace(f"Check if cache for {key} is expired")
timestamp = cls._cache_timestamps.get(key)
if timestamp is None:
return True
now = datetime.now()
diff = now - timestamp
res = diff.total_seconds() > cls.CACHING_MINUTES * 60
if res:
logger.debug(f"Cache for {key} is expired")
return res
@classmethod
def remove(cls, key: str):
logger.trace(f"Remove cache for {key}")
if key in cls._cache:
del cls._cache[key]
if key in cls._cache_timestamps:
del cls._cache_timestamps[key]
@classmethod
def check_expired(cls, key: str):
logger.trace(f"Check expired cache for {key}")
if cls.is_expired(key):
cls.remove(key)
return True
return False
@classmethod
def get(cls, key: str) -> Optional[dict]:
logger.debug(f"Get cache for {key}")
value = cls._cache.get(key, None)
if value is not None:
if cls.is_expired(key):
logger.debug(f"Cache for {key} expired")
cls.remove(key)
return value
@classmethod
def set(cls, key: str, value: dict):
logger.debug(f"Set cache for {key} with value {value}")
cls._cache[key] = value
cls._cache_timestamps[key] = datetime.now()
@classmethod
def clear(cls):
logger.debug("Clear cache")
cls._cache = {}
async def index(request: Request):
return templates.TemplateResponse("404.html", {"request": request}, status_code=404)
async def handle_request(request: Request):
path = request.path_params["path"]
short_url = _find_short_url_by_path(path)
short_url = await _find_short_url_by_path(path)
if short_url is None:
return templates.TemplateResponse(
"404.html", {"request": request}, status_code=404
@ -69,7 +129,13 @@ async def handle_request(request: Request):
return await _handle_short_url(request, short_url)
def _find_short_url_by_path(path: str) -> Optional[dict]:
async def _find_short_url_by_path(path: str) -> Optional[dict]:
from_cache = Cache.get(path)
if from_cache is not None:
if Cache.check_expired(path):
asyncio.create_task(_find_short_url_by_path(path))
return from_cache
api_url = Environment.get("API_URL", str)
if api_url is None:
raise Exception("API_URL is not set")
@ -145,6 +211,8 @@ def _find_short_url_by_path(path: str) -> Optional[dict]:
if len(nodes) == 0:
return None
for node in nodes:
Cache.set(node["shortUrl"], node)
return nodes[0]
@ -206,11 +274,95 @@ def _get_redirect_url(url: str) -> str:
return url
def _get_all_short_urls():
logger.info("Loading all short urls to cache")
api_url = Environment.get("API_URL", str)
if api_url is None:
raise Exception("API_URL is not set")
api_key = Environment.get("API_KEY", str)
if api_key is None:
raise Exception("API_KEY is not set")
request = requests.post(
f"{api_url}/graphql",
json={
"query": f"""
query getShortUrlsForCache {{
shortUrls(filter: [{{ deleted: {{ equal: false }} }}, {{ group: {{ deleted: {{ equal: false }} }} }}]) {{
nodes {{
id
shortUrl
targetUrl
description
group {{
id
name
}}
domain {{
id
name
}}
loadingScreen
deleted
}}
}}
shortUrlsWithoutGroup: shortUrls(filter: [{{ deleted: {{ equal: false }} }}, {{ group: {{ isNull: true }} }}]) {{
nodes {{
id
shortUrl
targetUrl
description
group {{
id
name
}}
domain {{
id
name
}}
loadingScreen
deleted
}}
}}
}}
""",
"variables": {},
},
headers={"Authorization": f"API-Key {api_key}"},
)
data = request.json()
if "errors" in data:
logger.warning(f"Failed to get all short urls -> {data["errors"]}")
if (
"data" not in data
or "shortUrls" not in data["data"]
or "nodes" not in data["data"]["shortUrls"]
or "nodes" not in data["data"]["shortUrlsWithoutGroup"]
):
raise ValueError("Failed to get all short urls")
nodes = [
*data["data"]["shortUrls"]["nodes"],
*data["data"]["shortUrlsWithoutGroup"]["nodes"],
]
for node in nodes:
Cache.set(node["shortUrl"], node)
logger.info(f"Loaded {len(nodes)} short urls to cache")
async def configure():
Logger.set_level(Environment.get("LOG_LEVEL", str, "info"))
Environment.set_environment(Environment.get("ENVIRONMENT", str, "production"))
logger.info(f"Environment: {Environment.get_environment()}")
_get_all_short_urls()
routes = [
Route("/", endpoint=index),