Merge pull request 'Changed to asgi' (#14) from dev_asgi into dev
Reviewed-on: #14 Closes #13
This commit is contained in:
commit
1eb878477d
29
.gitea/workflows/test_api_before_merge.yaml
Normal file
29
.gitea/workflows/test_api_before_merge.yaml
Normal file
@ -0,0 +1,29 @@
|
||||
name: Test before pr merge
|
||||
run-name: Test before pr merge
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- edited
|
||||
- reopened
|
||||
- synchronize
|
||||
- ready_for_review
|
||||
|
||||
jobs:
|
||||
test-lint:
|
||||
runs-on: [ runner ]
|
||||
container: git.sh-edraft.de/sh-edraft.de/act-runner:latest
|
||||
steps:
|
||||
- name: Clone Repository
|
||||
uses: https://github.com/actions/checkout@v3
|
||||
with:
|
||||
token: ${{ secrets.CI_ACCESS_TOKEN }}
|
||||
|
||||
- name: Installing dependencies
|
||||
working-directory: ./api
|
||||
run: |
|
||||
python3.12 -m pip install -r requirements-dev.txt
|
||||
|
||||
- name: Checking black
|
||||
working-directory: ./api
|
||||
run: python3.12 -m black src --check
|
@ -1,39 +0,0 @@
|
||||
name: Test before pr merge
|
||||
run-name: Test before pr merge
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- edited
|
||||
- reopened
|
||||
- synchronize
|
||||
- ready_for_review
|
||||
|
||||
jobs:
|
||||
test-before-merge:
|
||||
runs-on: [ runner ]
|
||||
container: git.sh-edraft.de/sh-edraft.de/act-runner:latest
|
||||
steps:
|
||||
- name: Clone Repository
|
||||
uses: https://github.com/actions/checkout@v3
|
||||
with:
|
||||
token: ${{ secrets.CI_ACCESS_TOKEN }}
|
||||
|
||||
- name: Setup node
|
||||
uses: https://github.com/actions/setup-node@v3
|
||||
|
||||
- name: Installing dependencies
|
||||
run: npm ci
|
||||
|
||||
- name: Checking eslint
|
||||
run: npm run lint
|
||||
|
||||
- name: Setup chrome
|
||||
run: |
|
||||
wget -q -O - https://dl-ssl.google.com/linux/linux_signing_key.pub | apt-key add -
|
||||
echo "deb http://dl.google.com/linux/chrome/deb/ stable main" > /etc/apt/sources.list.d/google.list
|
||||
apt-get update
|
||||
apt-get install -y google-chrome-stable xvfb
|
||||
|
||||
- name: Testing
|
||||
run: npm run test:ci
|
79
.gitea/workflows/test_web_before_merge.yaml
Normal file
79
.gitea/workflows/test_web_before_merge.yaml
Normal file
@ -0,0 +1,79 @@
|
||||
name: Test before pr merge
|
||||
run-name: Test before pr merge
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- edited
|
||||
- reopened
|
||||
- synchronize
|
||||
- ready_for_review
|
||||
|
||||
jobs:
|
||||
test-lint:
|
||||
runs-on: [ runner ]
|
||||
container: git.sh-edraft.de/sh-edraft.de/act-runner:latest
|
||||
steps:
|
||||
- name: Clone Repository
|
||||
uses: https://github.com/actions/checkout@v3
|
||||
with:
|
||||
token: ${{ secrets.CI_ACCESS_TOKEN }}
|
||||
|
||||
- name: Setup node
|
||||
uses: https://github.com/actions/setup-node@v3
|
||||
|
||||
- name: Installing dependencies
|
||||
working-directory: ./web
|
||||
run: npm ci
|
||||
|
||||
- name: Checking eslint
|
||||
working-directory: ./web
|
||||
run: npm run lint
|
||||
|
||||
test-translation-lint:
|
||||
runs-on: [ runner ]
|
||||
container: git.sh-edraft.de/sh-edraft.de/act-runner:latest
|
||||
steps:
|
||||
- name: Clone Repository
|
||||
uses: https://github.com/actions/checkout@v3
|
||||
with:
|
||||
token: ${{ secrets.CI_ACCESS_TOKEN }}
|
||||
|
||||
- name: Setup node
|
||||
uses: https://github.com/actions/setup-node@v3
|
||||
|
||||
- name: Installing dependencies
|
||||
working-directory: ./web
|
||||
run: npm ci
|
||||
|
||||
- name: Checking translations
|
||||
working-directory: ./web
|
||||
run: npm run lint:translations
|
||||
|
||||
test-before-merge:
|
||||
runs-on: [ runner ]
|
||||
container: git.sh-edraft.de/sh-edraft.de/act-runner:latest
|
||||
steps:
|
||||
- name: Clone Repository
|
||||
uses: https://github.com/actions/checkout@v3
|
||||
with:
|
||||
token: ${{ secrets.CI_ACCESS_TOKEN }}
|
||||
|
||||
- name: Setup node
|
||||
uses: https://github.com/actions/setup-node@v3
|
||||
|
||||
- name: Installing dependencies
|
||||
working-directory: ./web
|
||||
run: npm ci
|
||||
|
||||
- name: Setup chrome
|
||||
working-directory: ./web
|
||||
run: |
|
||||
wget -q -O - https://dl-ssl.google.com/linux/linux_signing_key.pub | apt-key add -
|
||||
echo "deb http://dl.google.com/linux/chrome/deb/ stable main" > /etc/apt/sources.list.d/google.list
|
||||
apt-get update
|
||||
apt-get install -y google-chrome-stable xvfb
|
||||
|
||||
- name: Testing
|
||||
working-directory: ./web
|
||||
run: npm run test:ci
|
@ -1,10 +1,12 @@
|
||||
ariadne==0.23.0
|
||||
eventlet==0.37.0
|
||||
broadcaster==0.3.1
|
||||
graphql-core==3.2.5
|
||||
Flask[async]==3.1.0
|
||||
Flask-Cors==5.0.0
|
||||
async-property==0.2.2
|
||||
python-keycloak==4.7.3
|
||||
psycopg[binary]==3.2.3
|
||||
psycopg-pool==3.2.4
|
||||
Werkzeug==3.1.3
|
||||
uvicorn==0.34.0
|
||||
starlette==0.46.0
|
||||
requests==2.32.3
|
||||
python-keycloak==5.3.1
|
||||
python-multipart==0.0.20
|
||||
websockets==15.0
|
||||
|
@ -1,78 +1,45 @@
|
||||
import importlib
|
||||
import os
|
||||
import time
|
||||
from uuid import uuid4
|
||||
from typing import Optional
|
||||
|
||||
from flask import Flask, request, g
|
||||
from starlette.applications import Starlette
|
||||
from starlette.requests import Request
|
||||
from starlette.responses import JSONResponse
|
||||
|
||||
from api.route import Route
|
||||
from core.environment import Environment
|
||||
from core.logger import APILogger
|
||||
|
||||
app = Flask(__name__)
|
||||
logger = APILogger(__name__)
|
||||
|
||||
|
||||
def filter_relevant_headers(headers: dict) -> dict:
|
||||
relevant_keys = {
|
||||
"Content-Type",
|
||||
"Host",
|
||||
"Connection",
|
||||
"User-Agent",
|
||||
"Origin",
|
||||
"Referer",
|
||||
"Accept",
|
||||
}
|
||||
return {key: value for key, value in headers.items() if key in relevant_keys}
|
||||
class API:
|
||||
app: Optional[Starlette] = None
|
||||
|
||||
@classmethod
|
||||
def create(cls, app: Starlette):
|
||||
cls.app = app
|
||||
|
||||
@app.before_request
|
||||
async def log_request():
|
||||
g.request_id = uuid4()
|
||||
g.start_time = time.time()
|
||||
logger.debug(
|
||||
f"Request {g.request_id}: {request.method}@{request.path} from {request.remote_addr}"
|
||||
)
|
||||
user = await Route.get_user()
|
||||
@staticmethod
|
||||
async def handle_exception(request: Request, exc: Exception):
|
||||
logger.error(f"Request {request.state.request_id}", exc)
|
||||
return JSONResponse({"error": str(exc)}, status_code=500)
|
||||
|
||||
request_info = {
|
||||
"headers": filter_relevant_headers(dict(request.headers)),
|
||||
"args": request.args.to_dict(),
|
||||
"form-data": request.form.to_dict(),
|
||||
"payload": request.get_json(silent=True),
|
||||
"user": f"{user.id}-{user.keycloak_id}" if user else None,
|
||||
"files": (
|
||||
{key: file.filename for key, file in request.files.items()}
|
||||
if request.files
|
||||
else None
|
||||
),
|
||||
}
|
||||
@staticmethod
|
||||
def get_allowed_origins():
|
||||
client_urls = Environment.get("CLIENT_URLS", str)
|
||||
if client_urls is None or client_urls == "":
|
||||
allowed_origins = ["*"]
|
||||
logger.warning("No allowed origins specified, allowing all origins")
|
||||
else:
|
||||
allowed_origins = client_urls.split(",")
|
||||
|
||||
logger.trace(f"Request {g.request_id}: {request_info}")
|
||||
return allowed_origins
|
||||
|
||||
|
||||
@app.after_request
|
||||
def log_after_request(response):
|
||||
# calc the time it took to process the request
|
||||
duration = (time.time() - g.start_time) * 1000
|
||||
logger.info(
|
||||
f"Request finished {g.request_id}: {response.status_code}-{request.method}@{request.path} from {request.remote_addr} in {duration:.2f}ms"
|
||||
)
|
||||
return response
|
||||
|
||||
|
||||
@app.errorhandler(Exception)
|
||||
def handle_exception(e):
|
||||
logger.error(f"Request {g.request_id}", e)
|
||||
return {"error": str(e)}, 500
|
||||
|
||||
|
||||
# used to import all routes
|
||||
routes_dir = os.path.join(os.path.dirname(__file__), "routes")
|
||||
for filename in os.listdir(routes_dir):
|
||||
if filename.endswith(".py") and filename != "__init__.py":
|
||||
module_name = f"api.routes.{filename[:-3]}"
|
||||
importlib.import_module(module_name)
|
||||
|
||||
# Explicitly register the routes
|
||||
for route, (view_func, options) in Route.registered_routes.items():
|
||||
app.add_url_rule(route, view_func=view_func, **options)
|
||||
@staticmethod
|
||||
def import_routes():
|
||||
# used to import all routes
|
||||
routes_dir = os.path.join(os.path.dirname(__file__), "routes")
|
||||
for filename in os.listdir(routes_dir):
|
||||
if filename.endswith(".py") and filename != "__init__.py":
|
||||
module_name = f"api.routes.{filename[:-3]}"
|
||||
importlib.import_module(module_name)
|
||||
|
5
api/src/api/broadcast.py
Normal file
5
api/src/api/broadcast.py
Normal file
@ -0,0 +1,5 @@
|
||||
from typing import Optional
|
||||
|
||||
from broadcaster import Broadcast
|
||||
|
||||
broadcast: Optional[Broadcast] = Broadcast("memory://")
|
@ -1,9 +1,9 @@
|
||||
from flask import jsonify
|
||||
from starlette.responses import JSONResponse
|
||||
|
||||
|
||||
def unauthorized():
|
||||
return jsonify({"error": "Unauthorized"}), 401
|
||||
return JSONResponse({"error": "Unauthorized"}, 401)
|
||||
|
||||
|
||||
def forbidden():
|
||||
return jsonify({"error": "Unauthorized"}), 401
|
||||
return JSONResponse({"error": "Unauthorized"}, 401)
|
||||
|
0
api/src/api/middleware/__init__.py
Normal file
0
api/src/api/middleware/__init__.py
Normal file
73
api/src/api/middleware/logging.py
Normal file
73
api/src/api/middleware/logging.py
Normal file
@ -0,0 +1,73 @@
|
||||
import time
|
||||
from uuid import uuid4
|
||||
|
||||
from starlette.middleware.base import BaseHTTPMiddleware
|
||||
from starlette.requests import Request
|
||||
from starlette.responses import Response
|
||||
|
||||
from api.route import Route
|
||||
from core.logger import APILogger
|
||||
|
||||
logger = APILogger("api.api")
|
||||
|
||||
|
||||
class LoggingMiddleware(BaseHTTPMiddleware):
|
||||
async def dispatch(self, request: Request, call_next):
|
||||
await self._log_request(request)
|
||||
response = await call_next(request)
|
||||
await self._log_after_request(request, response)
|
||||
|
||||
return response
|
||||
|
||||
@staticmethod
|
||||
def _filter_relevant_headers(headers: dict) -> dict:
|
||||
relevant_keys = {
|
||||
"content-type",
|
||||
"host",
|
||||
"connection",
|
||||
"user-agent",
|
||||
"origin",
|
||||
"referer",
|
||||
"accept",
|
||||
}
|
||||
return {key: value for key, value in headers.items() if key in relevant_keys}
|
||||
|
||||
@classmethod
|
||||
async def _log_request(cls, request: Request):
|
||||
request.state.request_id = uuid4()
|
||||
request.state.start_time = time.time()
|
||||
logger.debug(
|
||||
f"Request {request.state.request_id}: {request.method}@{request.url.path} from {request.client.host}"
|
||||
)
|
||||
user = await Route.get_user()
|
||||
|
||||
request_info = {
|
||||
"headers": cls._filter_relevant_headers(dict(request.headers)),
|
||||
"args": dict(request.query_params),
|
||||
"form-data": (
|
||||
await request.form()
|
||||
if request.headers.get("content-type")
|
||||
== "application/x-www-form-urlencoded"
|
||||
else None
|
||||
),
|
||||
"payload": (
|
||||
await request.json()
|
||||
if request.headers.get("content-length") == "0"
|
||||
else None
|
||||
),
|
||||
"user": f"{user.id}-{user.keycloak_id}" if user else None,
|
||||
"files": (
|
||||
{key: file.filename for key, file in (await request.form()).items()}
|
||||
if await request.form()
|
||||
else None
|
||||
),
|
||||
}
|
||||
|
||||
logger.trace(f"Request {request.state.request_id}: {request_info}")
|
||||
|
||||
@staticmethod
|
||||
async def _log_after_request(request: Request, response: Response):
|
||||
duration = (time.time() - request.state.start_time) * 1000
|
||||
logger.info(
|
||||
f"Request finished {request.state.request_id}: {response.status_code}-{request.method}@{request.url.path} from {request.client.host} in {duration:.2f}ms"
|
||||
)
|
28
api/src/api/middleware/request.py
Normal file
28
api/src/api/middleware/request.py
Normal file
@ -0,0 +1,28 @@
|
||||
from contextvars import ContextVar
|
||||
from typing import Optional, Union
|
||||
|
||||
from starlette.middleware.base import BaseHTTPMiddleware
|
||||
from starlette.requests import Request
|
||||
from starlette.websockets import WebSocket
|
||||
|
||||
_request_context: ContextVar[Union[Request, None]] = ContextVar("request", default=None)
|
||||
|
||||
|
||||
class RequestMiddleware(BaseHTTPMiddleware):
|
||||
async def dispatch(self, request: Request, call_next):
|
||||
_request_context.set(request)
|
||||
|
||||
from core.logger import APILogger
|
||||
|
||||
logger = APILogger(__name__)
|
||||
logger.trace("Set new current request")
|
||||
response = await call_next(request)
|
||||
return response
|
||||
|
||||
|
||||
def set_request(request: Union[Request, WebSocket, None]):
|
||||
_request_context.set(request)
|
||||
|
||||
|
||||
def get_request() -> Optional[Request]:
|
||||
return _request_context.get()
|
26
api/src/api/middleware/websocket.py
Normal file
26
api/src/api/middleware/websocket.py
Normal file
@ -0,0 +1,26 @@
|
||||
from ariadne.asgi.handlers import GraphQLTransportWSHandler
|
||||
from starlette.datastructures import MutableHeaders
|
||||
from starlette.websockets import WebSocket
|
||||
|
||||
from api.middleware.request import set_request
|
||||
from core.logger import APILogger
|
||||
|
||||
logger = APILogger("WS")
|
||||
|
||||
|
||||
class AuthenticatedGraphQLTransportWSHandler(GraphQLTransportWSHandler):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, on_connect=self.on_connect, **kwargs)
|
||||
|
||||
@staticmethod
|
||||
async def on_connect(ws: WebSocket, message: dict):
|
||||
if "Authorization" not in message:
|
||||
return True
|
||||
|
||||
mutable_headers = MutableHeaders()
|
||||
mutable_headers["Authorization"] = message.get("Authorization", "")
|
||||
ws._headers = mutable_headers
|
||||
|
||||
set_request(ws)
|
||||
return True
|
@ -2,12 +2,12 @@ import functools
|
||||
from functools import wraps
|
||||
from inspect import iscoroutinefunction
|
||||
from typing import Callable, Union, Optional
|
||||
from urllib.request import Request
|
||||
|
||||
from flask import request
|
||||
from flask_cors import cross_origin
|
||||
from starlette.requests import Request
|
||||
from starlette.routing import Route as StarletteRoute
|
||||
|
||||
from api.errors import unauthorized
|
||||
from api.middleware.request import get_request
|
||||
from api.route_user_extension import RouteUserExtension
|
||||
from core.environment import Environment
|
||||
from data.schemas.administration.api_key import ApiKey
|
||||
@ -16,10 +16,10 @@ from data.schemas.administration.user import User
|
||||
|
||||
|
||||
class Route(RouteUserExtension):
|
||||
registered_routes = {}
|
||||
registered_routes: list[StarletteRoute] = []
|
||||
|
||||
@classmethod
|
||||
async def get_api_key(cls) -> ApiKey:
|
||||
async def get_api_key(cls, request: Request) -> ApiKey:
|
||||
auth_header = request.headers.get("Authorization", None)
|
||||
api_key = auth_header.split(" ")[1]
|
||||
return await apiKeyDao.find_by_key(api_key)
|
||||
@ -35,11 +35,13 @@ class Route(RouteUserExtension):
|
||||
return api_key_from_db is not None and not api_key_from_db.deleted
|
||||
|
||||
@classmethod
|
||||
async def _get_auth_type(cls, auth_header: str) -> Optional[Union[User, ApiKey]]:
|
||||
async def _get_auth_type(
|
||||
cls, request: Request, auth_header: str
|
||||
) -> Optional[Union[User, ApiKey]]:
|
||||
if auth_header.startswith("Bearer "):
|
||||
return await cls.get_user()
|
||||
elif auth_header.startswith("API-Key "):
|
||||
return await cls.get_api_key()
|
||||
return await cls.get_api_key(request)
|
||||
elif (
|
||||
auth_header.startswith("DEV-User ")
|
||||
and Environment.get_environment() == "development"
|
||||
@ -49,11 +51,15 @@ class Route(RouteUserExtension):
|
||||
|
||||
@classmethod
|
||||
async def get_authenticated_user_or_api_key(cls) -> Union[User, ApiKey]:
|
||||
request = get_request()
|
||||
if request is None:
|
||||
raise ValueError("No request found")
|
||||
|
||||
auth_header = request.headers.get("Authorization", None)
|
||||
if not auth_header:
|
||||
raise Exception("No Authorization header found")
|
||||
|
||||
user_or_api_key = await cls._get_auth_type(auth_header)
|
||||
user_or_api_key = await cls._get_auth_type(request, auth_header)
|
||||
if user_or_api_key is None:
|
||||
raise Exception("Invalid Authorization header")
|
||||
return user_or_api_key
|
||||
@ -62,14 +68,22 @@ class Route(RouteUserExtension):
|
||||
async def get_authenticated_user_or_api_key_or_default(
|
||||
cls,
|
||||
) -> Optional[Union[User, ApiKey]]:
|
||||
request = get_request()
|
||||
if request is None:
|
||||
return None
|
||||
|
||||
auth_header = request.headers.get("Authorization", None)
|
||||
if not auth_header:
|
||||
return None
|
||||
|
||||
return await cls._get_auth_type(auth_header)
|
||||
return await cls._get_auth_type(request, auth_header)
|
||||
|
||||
@classmethod
|
||||
async def is_authorized(cls) -> bool:
|
||||
request = get_request()
|
||||
if request is None:
|
||||
return False
|
||||
|
||||
auth_header = request.headers.get("Authorization", None)
|
||||
if not auth_header:
|
||||
return False
|
||||
@ -99,26 +113,25 @@ class Route(RouteUserExtension):
|
||||
)
|
||||
|
||||
@wraps(f)
|
||||
async def decorator(*args, **kwargs):
|
||||
async def decorator(request: Request, *args, **kwargs):
|
||||
if skip_in_dev and Environment.get_environment() == "development":
|
||||
if iscoroutinefunction(f):
|
||||
return await f(*args, **kwargs)
|
||||
return f(*args, **kwargs)
|
||||
return await f(request, *args, **kwargs)
|
||||
return f(request, *args, **kwargs)
|
||||
|
||||
if not await cls.is_authorized():
|
||||
return unauthorized()
|
||||
|
||||
if iscoroutinefunction(f):
|
||||
return await f(*args, **kwargs)
|
||||
return f(*args, **kwargs)
|
||||
return await f(request, *args, **kwargs)
|
||||
return f(request, *args, **kwargs)
|
||||
|
||||
return decorator
|
||||
|
||||
@classmethod
|
||||
def route(cls, path=None, **kwargs):
|
||||
def inner(fn):
|
||||
cross_origin(fn)
|
||||
cls.registered_routes[path] = (fn, kwargs)
|
||||
cls.registered_routes.append(StarletteRoute(path, fn, **kwargs))
|
||||
return fn
|
||||
|
||||
return inner
|
||||
|
@ -1,10 +1,11 @@
|
||||
from typing import Optional
|
||||
|
||||
from flask import request, Request, has_request_context
|
||||
from keycloak import KeycloakAuthenticationError, KeycloakConnectionError
|
||||
from starlette.requests import Request
|
||||
|
||||
from api.auth.keycloak_client import Keycloak
|
||||
from api.auth.keycloak_user import KeycloakUser
|
||||
from api.middleware.request import get_request
|
||||
from core.get_value import get_value
|
||||
from core.logger import Logger
|
||||
from data.schemas.administration.user import User
|
||||
@ -19,8 +20,8 @@ logger = Logger(__name__)
|
||||
class RouteUserExtension:
|
||||
|
||||
@classmethod
|
||||
def _get_user_id_from_token(cls) -> Optional[str]:
|
||||
token = cls.get_token()
|
||||
def _get_user_id_from_token(cls, request: Request) -> Optional[str]:
|
||||
token = cls.get_token(request)
|
||||
if not token:
|
||||
return None
|
||||
|
||||
@ -34,7 +35,7 @@ class RouteUserExtension:
|
||||
return get_value(user_info, "sub", str)
|
||||
|
||||
@staticmethod
|
||||
def get_token() -> Optional[str]:
|
||||
def get_token(request: Request) -> Optional[str]:
|
||||
if "Authorization" not in request.headers:
|
||||
return None
|
||||
|
||||
@ -45,23 +46,24 @@ class RouteUserExtension:
|
||||
|
||||
@classmethod
|
||||
async def get_user(cls) -> Optional[User]:
|
||||
if not has_request_context():
|
||||
request = get_request()
|
||||
if request is None:
|
||||
return None
|
||||
|
||||
user_id = cls._get_user_id_from_token()
|
||||
user_id = cls._get_user_id_from_token(request)
|
||||
if not user_id:
|
||||
return None
|
||||
|
||||
user = await userDao.find_by_keycloak_id(user_id)
|
||||
if user is None:
|
||||
return None
|
||||
|
||||
return user
|
||||
return await userDao.find_by_keycloak_id(user_id)
|
||||
|
||||
@classmethod
|
||||
async def get_dev_user(cls) -> Optional[User]:
|
||||
request = get_request()
|
||||
if request is None:
|
||||
return None
|
||||
|
||||
return await userDao.find_single_by(
|
||||
[{User.keycloak_id: cls.get_token()}, {User.deleted: False}]
|
||||
[{User.keycloak_id: cls.get_token(request)}, {User.deleted: False}]
|
||||
)
|
||||
|
||||
@classmethod
|
||||
@ -71,56 +73,6 @@ class RouteUserExtension:
|
||||
user = await cls.get_dev_user()
|
||||
return user
|
||||
|
||||
@classmethod
|
||||
def _flatten_groups(cls, groups):
|
||||
flat_list = []
|
||||
for group in groups:
|
||||
flat_list.append(group)
|
||||
if "subGroups" in group and group["subGroups"]:
|
||||
flat_list.extend(cls._flatten_groups(group["subGroups"]))
|
||||
return flat_list
|
||||
|
||||
@classmethod
|
||||
async def _map_keycloak_groups_with_roles(cls, user: User):
|
||||
try:
|
||||
roles = {x.name: x for x in await roleDao.get_all()}
|
||||
groups = cls._flatten_groups(Keycloak.admin.get_groups(full_hierarchy=True))
|
||||
groups_with_role = [x["name"] for x in groups if x["name"] in roles.keys()]
|
||||
|
||||
user_groups_with_role = [
|
||||
x["name"]
|
||||
for x in Keycloak.admin.get_user_groups(user.keycloak_id)
|
||||
if x["name"] in roles.keys()
|
||||
]
|
||||
user_roles = set(
|
||||
x.name for x in await user.roles if x.name in groups_with_role
|
||||
)
|
||||
missing_groups = set(user_groups_with_role) - set(user_roles)
|
||||
missing_roles = set(user_roles) - set(user_groups_with_role)
|
||||
|
||||
if len(missing_groups) > 0:
|
||||
await roleUserDao.create_many(
|
||||
[
|
||||
RoleUser(0, (await roleDao.get_by_name(group)).id, user.id)
|
||||
for group in missing_groups
|
||||
]
|
||||
)
|
||||
|
||||
if len(missing_roles) > 0:
|
||||
await roleUserDao.delete_many(
|
||||
[
|
||||
await roleUserDao.get_single_by(
|
||||
[
|
||||
{RoleUser.role_id: roles[role].id},
|
||||
{RoleUser.user_id: user.id},
|
||||
]
|
||||
)
|
||||
for role in missing_roles
|
||||
]
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error("Failed to map user groups", e)
|
||||
|
||||
@classmethod
|
||||
async def _create_user(cls, kc_user: KeycloakUser):
|
||||
try:
|
||||
@ -140,8 +92,8 @@ class RouteUserExtension:
|
||||
logger.error("Failed to find or create user", e)
|
||||
|
||||
@classmethod
|
||||
async def verify_login(cls, req: Request) -> bool:
|
||||
auth_header = req.headers.get("Authorization", None)
|
||||
async def verify_login(cls, request: Request) -> bool:
|
||||
auth_header = request.headers.get("Authorization", None)
|
||||
|
||||
if not auth_header or not auth_header.startswith("Bearer "):
|
||||
return False
|
||||
@ -155,11 +107,8 @@ class RouteUserExtension:
|
||||
|
||||
user = await cls.get_user()
|
||||
if user is None:
|
||||
u_id = await cls._create_user(KeycloakUser(user_info))
|
||||
await cls._map_keycloak_groups_with_roles(await userDao.get_by_id(u_id))
|
||||
await cls._create_user(KeycloakUser(user_info))
|
||||
return True
|
||||
else:
|
||||
await cls._map_keycloak_groups_with_roles(user)
|
||||
|
||||
if user.deleted:
|
||||
return False
|
||||
|
@ -1,7 +1,8 @@
|
||||
from uuid import uuid4
|
||||
|
||||
from flask import send_file
|
||||
from werkzeug.exceptions import NotFound
|
||||
from starlette.requests import Request
|
||||
from starlette.responses import FileResponse
|
||||
from starlette.exceptions import HTTPException
|
||||
|
||||
from api.route import Route
|
||||
from core.logger import APILogger
|
||||
@ -9,19 +10,23 @@ from core.logger import APILogger
|
||||
logger = APILogger(__name__)
|
||||
|
||||
|
||||
@Route.get(f"/api/files/<path:file_path>")
|
||||
def get_file(file_path: str):
|
||||
@Route.get("/api/files/{file_path:path}")
|
||||
async def get_file(request: Request):
|
||||
file_path = request.path_params["file_path"]
|
||||
name = file_path
|
||||
if "/" in file_path:
|
||||
name = file_path.split("/")[-1]
|
||||
|
||||
try:
|
||||
return send_file(
|
||||
f"../files/{file_path}", download_name=name, as_attachment=True
|
||||
return FileResponse(
|
||||
path=f"files/{file_path}",
|
||||
filename=name,
|
||||
media_type="application/octet-stream",
|
||||
)
|
||||
except NotFound:
|
||||
return {"error": "File not found"}, 404
|
||||
except Exception as e:
|
||||
error_id = uuid4()
|
||||
logger.error(f"Error {error_id} getting file {file_path}", e)
|
||||
return {"error": f"File error. ErrorId: {error_id}"}, 500
|
||||
except HTTPException as e:
|
||||
if e.status_code == 404:
|
||||
return {"error": "File not found"}, 404
|
||||
else:
|
||||
error_id = uuid4()
|
||||
logger.error(f"Error {error_id} getting file {file_path}", e)
|
||||
return {"error": f"File error. ErrorId: {error_id}"}, 500
|
||||
|
@ -1,5 +1,6 @@
|
||||
from ariadne import graphql
|
||||
from flask import request, jsonify
|
||||
from starlette.requests import Request
|
||||
from starlette.responses import JSONResponse
|
||||
|
||||
from api.route import Route
|
||||
from api_graphql.service.schema import schema
|
||||
@ -10,11 +11,11 @@ logger = Logger(__name__)
|
||||
|
||||
|
||||
@Route.post(f"{BasePath}")
|
||||
async def graphql_endpoint():
|
||||
data = request.get_json()
|
||||
async def graphql_endpoint(request: Request):
|
||||
data = await request.json()
|
||||
|
||||
# Note: Passing the request to the context is optional.
|
||||
# In Flask, the current request is always accessible as flask.request
|
||||
# In Starlette, the current request is accessible as request
|
||||
success, result = await graphql(schema, data, context_value=request)
|
||||
|
||||
status_code = 200
|
||||
@ -24,4 +25,4 @@ async def graphql_endpoint():
|
||||
]
|
||||
status_code = max(status_codes, default=200)
|
||||
|
||||
return jsonify(result), status_code
|
||||
return JSONResponse(result, status_code=status_code)
|
||||
|
@ -1,4 +1,6 @@
|
||||
from ariadne.explorer import ExplorerPlayground
|
||||
from starlette.requests import Request
|
||||
from starlette.responses import HTMLResponse
|
||||
|
||||
from api.route import Route
|
||||
from core.environment import Environment
|
||||
@ -10,7 +12,7 @@ logger = Logger(__name__)
|
||||
|
||||
@Route.get(f"{BasePath}/playground")
|
||||
@Route.authorize(skip_in_dev=True)
|
||||
async def playground():
|
||||
async def playground(r: Request):
|
||||
if Environment.get_environment() != "development":
|
||||
return "", 403
|
||||
|
||||
@ -19,7 +21,6 @@ async def playground():
|
||||
if dev_user:
|
||||
request_global_headers = {f"Authorization": f"DEV-User {dev_user}"}
|
||||
|
||||
return (
|
||||
ExplorerPlayground(request_global_headers=request_global_headers).html(None),
|
||||
200,
|
||||
return HTMLResponse(
|
||||
ExplorerPlayground(request_global_headers=request_global_headers).html(None)
|
||||
)
|
||||
|
@ -1,7 +1,16 @@
|
||||
from starlette.requests import Request
|
||||
from starlette.responses import JSONResponse
|
||||
|
||||
from api.route import Route
|
||||
from core.configuration.feature_flags import FeatureFlags
|
||||
from core.configuration.feature_flags_enum import FeatureFlagsEnum
|
||||
from version import VERSION
|
||||
|
||||
|
||||
@Route.get(f"/api/version")
|
||||
def version():
|
||||
return VERSION
|
||||
async def version(r: Request):
|
||||
feature = await FeatureFlags.has_feature(FeatureFlagsEnum.version_endpoint)
|
||||
if not feature:
|
||||
return JSONResponse("DISABLED", status_code=403)
|
||||
|
||||
return JSONResponse(VERSION)
|
||||
|
@ -4,6 +4,7 @@ from api_graphql.abc.filter.bool_filter import BoolFilter
|
||||
from api_graphql.abc.filter.int_filter import IntFilter
|
||||
from api_graphql.abc.filter.string_filter import StringFilter
|
||||
from api_graphql.abc.filter_abc import FilterABC
|
||||
from api_graphql.filter.fuzzy_filter import FuzzyFilter
|
||||
|
||||
|
||||
class DbModelFilterABC[T](FilterABC[T]):
|
||||
@ -18,3 +19,5 @@ class DbModelFilterABC[T](FilterABC[T]):
|
||||
self.add_field("editor", IntFilter)
|
||||
self.add_field("createdUtc", StringFilter, "created")
|
||||
self.add_field("updatedUtc", StringFilter, "updated")
|
||||
|
||||
self.add_field("fuzzy", FuzzyFilter)
|
||||
|
@ -16,7 +16,7 @@ class MutationABC(QueryABC):
|
||||
self,
|
||||
name: str,
|
||||
mutation_name: str,
|
||||
require_any_permission: list[Permissions] = None,
|
||||
require_any_permission=None,
|
||||
public: bool = False,
|
||||
):
|
||||
"""
|
||||
@ -27,6 +27,8 @@ class MutationABC(QueryABC):
|
||||
:param bool public: Define if the field can resolve without authentication
|
||||
:return:
|
||||
"""
|
||||
if require_any_permission is None:
|
||||
require_any_permission = []
|
||||
from api_graphql.definition import QUERIES
|
||||
|
||||
self.field(
|
||||
|
@ -4,12 +4,13 @@ from enum import Enum
|
||||
from types import NoneType
|
||||
from typing import Callable, Type, get_args, Any, Union
|
||||
|
||||
from ariadne import ObjectType
|
||||
from ariadne import ObjectType, SubscriptionType
|
||||
from graphql import GraphQLResolveInfo
|
||||
from typing_extensions import deprecated
|
||||
|
||||
from api.route import Route
|
||||
from api_graphql.abc.collection_filter_abc import CollectionFilterABC
|
||||
from api_graphql.abc.field_abc import FieldABC
|
||||
from api_graphql.abc.input_abc import InputABC
|
||||
from api_graphql.abc.sort_abc import Sort
|
||||
from api_graphql.field.collection_field import CollectionField
|
||||
@ -20,6 +21,7 @@ from api_graphql.field.mutation_field import MutationField
|
||||
from api_graphql.field.mutation_field_builder import MutationFieldBuilder
|
||||
from api_graphql.field.resolver_field import ResolverField
|
||||
from api_graphql.field.resolver_field_builder import ResolverFieldBuilder
|
||||
from api_graphql.field.subscription_field import SubscriptionField
|
||||
from api_graphql.service.collection_result import CollectionResult
|
||||
from api_graphql.service.exceptions import (
|
||||
UnauthorizedException,
|
||||
@ -29,6 +31,7 @@ from api_graphql.service.exceptions import (
|
||||
from api_graphql.service.query_context import QueryContext
|
||||
from api_graphql.typing import TRequireAnyPermissions, TRequireAnyResolvers
|
||||
from core.logger import APILogger
|
||||
from core.string import first_to_lower
|
||||
from service.permission.permissions_enum import Permissions
|
||||
|
||||
logger = APILogger(__name__)
|
||||
@ -40,6 +43,7 @@ class QueryABC(ObjectType):
|
||||
@abstractmethod
|
||||
def __init__(self, name: str = __name__):
|
||||
ObjectType.__init__(self, name)
|
||||
self._subscriptions: dict[str, SubscriptionType] = {}
|
||||
|
||||
@staticmethod
|
||||
async def _authorize():
|
||||
@ -67,12 +71,12 @@ class QueryABC(ObjectType):
|
||||
*args,
|
||||
**kwargs,
|
||||
):
|
||||
info = args[0]
|
||||
|
||||
if len(permissions) > 0:
|
||||
user = await Route.get_authenticated_user_or_api_key_or_default()
|
||||
perms = await user.permissions
|
||||
has_perms = [await user.has_permission(x) for x in permissions]
|
||||
if user is not None and all(
|
||||
has_perms
|
||||
[await user.has_permission(x) for x in permissions]
|
||||
):
|
||||
return
|
||||
|
||||
@ -134,7 +138,12 @@ class QueryABC(ObjectType):
|
||||
skip = kwargs["skip"]
|
||||
|
||||
collection = await field.dao.find_by(filters, sorts, take, skip)
|
||||
res = CollectionResult(await field.dao.count(), len(collection), collection)
|
||||
if field.direct_result:
|
||||
return collection
|
||||
|
||||
res = CollectionResult(
|
||||
await field.dao.count(filters), len(collection), collection
|
||||
)
|
||||
return res
|
||||
|
||||
async def collection_wrapper(*args, **kwargs):
|
||||
@ -171,11 +180,12 @@ class QueryABC(ObjectType):
|
||||
)
|
||||
|
||||
async def resolver_wrapper(*args, **kwargs):
|
||||
return (
|
||||
result = (
|
||||
await field.resolver(*args, **kwargs)
|
||||
if iscoroutinefunction(field.resolver)
|
||||
else field.resolver(*args, **kwargs)
|
||||
)
|
||||
return result
|
||||
|
||||
if isinstance(field, DaoField):
|
||||
resolver = dao_wrapper
|
||||
@ -205,6 +215,13 @@ class QueryABC(ObjectType):
|
||||
|
||||
resolver = input_wrapper
|
||||
|
||||
elif isinstance(field, SubscriptionField):
|
||||
|
||||
async def sub_wrapper(sub: QueryABC, info: GraphQLResolveInfo, **kwargs):
|
||||
return await resolver_wrapper(sub, info, **kwargs)
|
||||
|
||||
resolver = sub_wrapper
|
||||
|
||||
else:
|
||||
raise ValueError(f"Unknown field type: {field.name}")
|
||||
|
||||
@ -222,7 +239,12 @@ class QueryABC(ObjectType):
|
||||
result = await resolver(*args, **kwargs)
|
||||
|
||||
if field.require_any is not None:
|
||||
await self._require_any(result, *field.require_any, *args, **kwargs)
|
||||
await self._require_any(
|
||||
result,
|
||||
*field.require_any,
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
@ -252,6 +274,9 @@ class QueryABC(ObjectType):
|
||||
self.field(
|
||||
MutationFieldBuilder(name)
|
||||
.with_resolver(f)
|
||||
.with_change_broadcast(
|
||||
f"{first_to_lower(self.name.replace("Mutation", ""))}Change"
|
||||
)
|
||||
.with_input(input_type, input_key)
|
||||
.with_require_any_permission(require_any_permission)
|
||||
.with_public(public)
|
||||
@ -273,6 +298,8 @@ class QueryABC(ObjectType):
|
||||
for f in filters:
|
||||
collection = list(filter(lambda x: f.filter(x), collection))
|
||||
|
||||
total_count = len(collection)
|
||||
|
||||
if sort is not None:
|
||||
|
||||
def f_sort(x: object, k: str):
|
||||
|
51
api/src/api_graphql/abc/subscription_abc.py
Normal file
51
api/src/api_graphql/abc/subscription_abc.py
Normal file
@ -0,0 +1,51 @@
|
||||
from abc import abstractmethod
|
||||
from asyncio import iscoroutinefunction
|
||||
|
||||
from ariadne import SubscriptionType
|
||||
|
||||
from api.middleware.request import get_request
|
||||
from api_graphql.abc.query_abc import QueryABC
|
||||
from api_graphql.field.subscription_field_builder import SubscriptionFieldBuilder
|
||||
from core.logger import APILogger
|
||||
|
||||
logger = APILogger(__name__)
|
||||
|
||||
|
||||
class SubscriptionABC(SubscriptionType, QueryABC):
|
||||
|
||||
@abstractmethod
|
||||
def __init__(self):
|
||||
SubscriptionType.__init__(self)
|
||||
|
||||
def subscribe(self, builder: SubscriptionFieldBuilder):
|
||||
field = builder.build()
|
||||
|
||||
async def wrapper(*args, **kwargs):
|
||||
if not field.public:
|
||||
await self._authorize()
|
||||
|
||||
if (
|
||||
field.require_any is None
|
||||
and not field.public
|
||||
and field.require_any_permission
|
||||
):
|
||||
await self._require_any_permission(field.require_any_permission)
|
||||
|
||||
result = (
|
||||
await field.resolver(*args, **kwargs)
|
||||
if iscoroutinefunction(field.resolver)
|
||||
else field.resolver(*args, **kwargs)
|
||||
)
|
||||
|
||||
if field.require_any is not None:
|
||||
await self._require_any(
|
||||
result,
|
||||
*field.require_any,
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
self.set_field(field.name, wrapper)
|
||||
self.set_source(field.name, field.generator)
|
@ -4,6 +4,7 @@ import os
|
||||
from api_graphql.abc.db_model_query_abc import DbModelQueryABC
|
||||
from api_graphql.abc.mutation_abc import MutationABC
|
||||
from api_graphql.abc.query_abc import QueryABC
|
||||
from api_graphql.abc.subscription_abc import SubscriptionABC
|
||||
from api_graphql.query import Query
|
||||
|
||||
|
||||
@ -19,7 +20,7 @@ def import_graphql_schema_part(part: str):
|
||||
import_graphql_schema_part("queries")
|
||||
import_graphql_schema_part("mutations")
|
||||
|
||||
sub_query_classes = [DbModelQueryABC, MutationABC]
|
||||
sub_query_classes = [DbModelQueryABC, MutationABC, SubscriptionABC]
|
||||
query_classes = [
|
||||
*[y for x in sub_query_classes for y in x.__subclasses__()],
|
||||
*[x for x in QueryABC.__subclasses__() if x not in sub_query_classes],
|
||||
|
@ -20,6 +20,7 @@ class DaoField(FieldABC):
|
||||
dao: DataAccessObjectABC = None,
|
||||
filter_type: Type[FilterABC] = None,
|
||||
sort_type: Type[T] = None,
|
||||
direct_result: bool = False,
|
||||
):
|
||||
FieldABC.__init__(self, name, require_any_permission, require_any, public)
|
||||
self._name = name
|
||||
@ -28,6 +29,7 @@ class DaoField(FieldABC):
|
||||
self._dao = dao
|
||||
self._filter_type = filter_type
|
||||
self._sort_type = sort_type
|
||||
self._direct_result = direct_result
|
||||
|
||||
@property
|
||||
def dao(self) -> Optional[DataAccessObjectABC]:
|
||||
@ -42,3 +44,7 @@ class DaoField(FieldABC):
|
||||
@property
|
||||
def sort_type(self) -> Optional[Type[T]]:
|
||||
return self._sort_type
|
||||
|
||||
@property
|
||||
def direct_result(self) -> bool:
|
||||
return self._direct_result
|
||||
|
@ -15,6 +15,7 @@ class DaoFieldBuilder(FieldBuilderABC):
|
||||
self._dao = None
|
||||
self._filter_type = None
|
||||
self._sort_type = None
|
||||
self._direct_result = False
|
||||
|
||||
def with_dao(self, dao: DataAccessObjectABC) -> Self:
|
||||
assert dao is not None, "dao cannot be None"
|
||||
@ -31,6 +32,10 @@ class DaoFieldBuilder(FieldBuilderABC):
|
||||
self._sort_type = sort_type
|
||||
return self
|
||||
|
||||
def with_direct_result(self) -> Self:
|
||||
self._direct_result = True
|
||||
return self
|
||||
|
||||
def build(self) -> DaoField:
|
||||
assert self._dao is not None, "dao cannot be None"
|
||||
return DaoField(
|
||||
@ -41,4 +46,5 @@ class DaoFieldBuilder(FieldBuilderABC):
|
||||
self._dao,
|
||||
self._filter_type,
|
||||
self._sort_type,
|
||||
self._direct_result,
|
||||
)
|
||||
|
@ -1,7 +1,9 @@
|
||||
from asyncio import iscoroutinefunction
|
||||
from typing import Self, Type
|
||||
|
||||
from ariadne.types import Resolver
|
||||
|
||||
from api.broadcast import broadcast
|
||||
from api_graphql.abc.field_builder_abc import FieldBuilderABC
|
||||
from api_graphql.abc.input_abc import InputABC
|
||||
from api_graphql.field.mutation_field import MutationField
|
||||
@ -18,9 +20,41 @@ class MutationFieldBuilder(FieldBuilderABC):
|
||||
|
||||
def with_resolver(self, resolver: Resolver) -> Self:
|
||||
assert resolver is not None, "resolver cannot be None"
|
||||
|
||||
self._resolver = resolver
|
||||
return self
|
||||
|
||||
def with_broadcast(self, source: str):
|
||||
assert self._resolver is not None, "resolver cannot be None for broadcast"
|
||||
|
||||
resolver = self._resolver
|
||||
|
||||
async def resolver_wrapper(*args, **kwargs):
|
||||
result = (
|
||||
await resolver(*args, **kwargs)
|
||||
if iscoroutinefunction(resolver)
|
||||
else resolver(*args, **kwargs)
|
||||
)
|
||||
await broadcast.publish(f"{source}", result)
|
||||
return result
|
||||
|
||||
def with_change_broadcast(self, source: str):
|
||||
assert self._resolver is not None, "resolver cannot be None for broadcast"
|
||||
|
||||
resolver = self._resolver
|
||||
|
||||
async def resolver_wrapper(*args, **kwargs):
|
||||
result = (
|
||||
await resolver(*args, **kwargs)
|
||||
if iscoroutinefunction(resolver)
|
||||
else resolver(*args, **kwargs)
|
||||
)
|
||||
await broadcast.publish(f"{source}", {})
|
||||
return result
|
||||
|
||||
self._resolver = resolver_wrapper
|
||||
return self
|
||||
|
||||
def with_input(self, input_type: Type[InputABC], input_key: str = None) -> Self:
|
||||
self._input_type = input_type
|
||||
self._input_key = input_key
|
||||
|
@ -16,11 +16,17 @@ class ResolverField(FieldABC):
|
||||
require_any: TRequireAny = None,
|
||||
public: bool = False,
|
||||
resolver: Resolver = None,
|
||||
direct_result: bool = False,
|
||||
):
|
||||
FieldABC.__init__(self, name, require_any_permission, require_any, public)
|
||||
|
||||
self._resolver = resolver
|
||||
self._direct_result = direct_result
|
||||
|
||||
@property
|
||||
def resolver(self) -> Optional[Resolver]:
|
||||
return self._resolver
|
||||
|
||||
@property
|
||||
def direct_result(self) -> bool:
|
||||
return self._direct_result
|
||||
|
@ -12,12 +12,17 @@ class ResolverFieldBuilder(FieldBuilderABC):
|
||||
FieldBuilderABC.__init__(self, name)
|
||||
|
||||
self._resolver = None
|
||||
self._direct_result = False
|
||||
|
||||
def with_resolver(self, resolver: Resolver) -> Self:
|
||||
assert resolver is not None, "resolver cannot be None"
|
||||
self._resolver = resolver
|
||||
return self
|
||||
|
||||
def with_direct_result(self) -> Self:
|
||||
self._direct_result = True
|
||||
return self
|
||||
|
||||
def build(self) -> ResolverField:
|
||||
assert self._resolver is not None, "resolver cannot be None"
|
||||
return ResolverField(
|
||||
@ -26,4 +31,5 @@ class ResolverFieldBuilder(FieldBuilderABC):
|
||||
self._require_any,
|
||||
self._public,
|
||||
self._resolver,
|
||||
self._direct_result,
|
||||
)
|
||||
|
32
api/src/api_graphql/field/subscription_field.py
Normal file
32
api/src/api_graphql/field/subscription_field.py
Normal file
@ -0,0 +1,32 @@
|
||||
from typing import Optional
|
||||
|
||||
from ariadne.types import Resolver
|
||||
|
||||
from api_graphql.abc.field_abc import FieldABC
|
||||
from api_graphql.typing import TRequireAny
|
||||
from service.permission.permissions_enum import Permissions
|
||||
|
||||
|
||||
class SubscriptionField(FieldABC):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
name: str,
|
||||
require_any_permission: list[Permissions] = None,
|
||||
require_any: TRequireAny = None,
|
||||
public: bool = False,
|
||||
resolver: Resolver = None,
|
||||
generator: Resolver = None,
|
||||
):
|
||||
FieldABC.__init__(self, name, require_any_permission, require_any, public)
|
||||
|
||||
self._resolver = resolver
|
||||
self._generator = generator
|
||||
|
||||
@property
|
||||
def resolver(self) -> Optional[Resolver]:
|
||||
return self._resolver
|
||||
|
||||
@property
|
||||
def generator(self) -> Optional[Resolver]:
|
||||
return self._generator
|
46
api/src/api_graphql/field/subscription_field_builder.py
Normal file
46
api/src/api_graphql/field/subscription_field_builder.py
Normal file
@ -0,0 +1,46 @@
|
||||
from typing import Self, AsyncGenerator
|
||||
|
||||
from ariadne.types import Resolver
|
||||
|
||||
from api.broadcast import broadcast
|
||||
from api_graphql.abc.field_builder_abc import FieldBuilderABC
|
||||
from api_graphql.field.subscription_field import SubscriptionField
|
||||
|
||||
|
||||
class SubscriptionFieldBuilder(FieldBuilderABC):
|
||||
|
||||
def __init__(self, name: str):
|
||||
FieldBuilderABC.__init__(self, name)
|
||||
|
||||
self._resolver = None
|
||||
self._generator = None
|
||||
|
||||
def with_resolver(self, resolver: Resolver) -> Self:
|
||||
assert resolver is not None, "resolver cannot be None"
|
||||
self._resolver = resolver
|
||||
return self
|
||||
|
||||
def with_generator(self, generator: Resolver) -> Self:
|
||||
assert generator is not None, "generator cannot be None"
|
||||
self._generator = generator
|
||||
return self
|
||||
|
||||
def build(self) -> SubscriptionField:
|
||||
assert self._resolver is not None, "resolver cannot be None"
|
||||
if self._generator is None:
|
||||
|
||||
async def generator(*args, **kwargs) -> AsyncGenerator[str, None]:
|
||||
async with broadcast.subscribe(channel=self._name) as subscriber:
|
||||
async for message in subscriber:
|
||||
yield message
|
||||
|
||||
self._generator = generator
|
||||
|
||||
return SubscriptionField(
|
||||
self._name,
|
||||
self._require_any_permission,
|
||||
self._require_any,
|
||||
self._public,
|
||||
self._resolver,
|
||||
self._generator,
|
||||
)
|
15
api/src/api_graphql/filter/fuzzy_filter.py
Normal file
15
api/src/api_graphql/filter/fuzzy_filter.py
Normal file
@ -0,0 +1,15 @@
|
||||
from typing import Optional
|
||||
|
||||
from api_graphql.abc.filter_abc import FilterABC
|
||||
|
||||
|
||||
class FuzzyFilter(FilterABC):
|
||||
def __init__(
|
||||
self,
|
||||
obj: Optional[dict],
|
||||
):
|
||||
FilterABC.__init__(self, obj)
|
||||
|
||||
self.add_field("fields", list)
|
||||
self.add_field("term", str)
|
||||
self.add_field("threshold", int)
|
@ -9,6 +9,6 @@ class ShortUrlFilter(DbModelFilterABC):
|
||||
):
|
||||
DbModelFilterABC.__init__(self, obj)
|
||||
|
||||
self.add_field("short_url", StringFilter)
|
||||
self.add_field("target_url", StringFilter)
|
||||
self.add_field("shortUrl", StringFilter, db_name="short_url")
|
||||
self.add_field("targetUrl", StringFilter, db_name="target_url")
|
||||
self.add_field("description", StringFilter)
|
||||
|
@ -21,11 +21,21 @@ input ApiKeySort {
|
||||
identifier: SortOrder
|
||||
|
||||
deleted: SortOrder
|
||||
editorId: SortOrder
|
||||
editor: UserSort
|
||||
createdUtc: SortOrder
|
||||
updatedUtc: SortOrder
|
||||
}
|
||||
|
||||
enum ApiKeyFuzzyFields {
|
||||
identifier
|
||||
}
|
||||
|
||||
input ApiKeyFuzzy {
|
||||
fields: [ApiKeyFuzzyFields]
|
||||
term: String
|
||||
threshold: Int
|
||||
}
|
||||
|
||||
input ApiKeyFilter {
|
||||
id: IntFilter
|
||||
identifier: StringFilter
|
||||
|
@ -26,10 +26,22 @@ input DomainSort {
|
||||
updatedUtc: SortOrder
|
||||
}
|
||||
|
||||
enum DomainFuzzyFields {
|
||||
name
|
||||
}
|
||||
|
||||
input DomainFuzzy {
|
||||
fields: [DomainFuzzyFields]
|
||||
term: String
|
||||
threshold: Int
|
||||
}
|
||||
|
||||
input DomainFilter {
|
||||
id: IntFilter
|
||||
name: StringFilter
|
||||
|
||||
fuzzy: DomainFuzzy
|
||||
|
||||
deleted: BooleanFilter
|
||||
editor: IntFilter
|
||||
createdUtc: DateFilter
|
||||
|
19
api/src/api_graphql/graphql/feature_flag.gql
Normal file
19
api/src/api_graphql/graphql/feature_flag.gql
Normal file
@ -0,0 +1,19 @@
|
||||
type FeatureFlag implements DbModel {
|
||||
id: ID
|
||||
key: String
|
||||
value: Boolean
|
||||
|
||||
deleted: Boolean
|
||||
editor: User
|
||||
createdUtc: String
|
||||
updatedUtc: String
|
||||
}
|
||||
|
||||
type FeatureFlagMutation {
|
||||
change(input: FeatureFlagInput!): FeatureFlag
|
||||
}
|
||||
|
||||
input FeatureFlagInput {
|
||||
key: String!
|
||||
value: Boolean!
|
||||
}
|
@ -27,10 +27,22 @@ input GroupSort {
|
||||
updatedUtc: SortOrder
|
||||
}
|
||||
|
||||
enum GroupFuzzyFields {
|
||||
name
|
||||
}
|
||||
|
||||
input GroupFuzzy {
|
||||
fields: [GroupFuzzyFields]
|
||||
term: String
|
||||
threshold: Int
|
||||
}
|
||||
|
||||
input GroupFilter {
|
||||
id: IntFilter
|
||||
name: StringFilter
|
||||
|
||||
fuzzy: GroupFuzzy
|
||||
|
||||
deleted: BooleanFilter
|
||||
editor: IntFilter
|
||||
createdUtc: DateFilter
|
||||
|
@ -7,4 +7,8 @@ type Mutation {
|
||||
group: GroupMutation
|
||||
domain: DomainMutation
|
||||
shortUrl: ShortUrlMutation
|
||||
|
||||
setting: SettingMutation
|
||||
userSetting: UserSettingMutation
|
||||
featureFlag: FeatureFlagMutation
|
||||
}
|
@ -14,4 +14,8 @@ type Query {
|
||||
domains(filter: [DomainFilter], sort: [DomainSort], skip: Int, take: Int): DomainResult
|
||||
groups(filter: [GroupFilter], sort: [GroupSort], skip: Int, take: Int): GroupResult
|
||||
shortUrls(filter: [ShortUrlFilter], sort: [ShortUrlSort], skip: Int, take: Int): ShortUrlResult
|
||||
|
||||
settings(key: String): [Setting]
|
||||
userSettings(key: String): [Setting]
|
||||
featureFlags(key: String): [FeatureFlag]
|
||||
}
|
@ -23,18 +23,31 @@ input RoleSort {
|
||||
description: SortOrder
|
||||
|
||||
deleted: SortOrder
|
||||
editorId: SortOrder
|
||||
editor: UserSort
|
||||
createdUtc: SortOrder
|
||||
updatedUtc: SortOrder
|
||||
}
|
||||
|
||||
enum RoleFuzzyFields {
|
||||
name
|
||||
description
|
||||
}
|
||||
|
||||
input RoleFuzzy {
|
||||
fields: [RoleFuzzyFields]
|
||||
term: String
|
||||
threshold: Int
|
||||
}
|
||||
|
||||
input RoleFilter {
|
||||
id: IntFilter
|
||||
name: StringFilter
|
||||
description: StringFilter
|
||||
|
||||
fuzzy: RoleFuzzy
|
||||
|
||||
deleted: BooleanFilter
|
||||
editorId: IntFilter
|
||||
editor_id: IntFilter
|
||||
createdUtc: DateFilter
|
||||
updatedUtc: DateFilter
|
||||
}
|
||||
|
19
api/src/api_graphql/graphql/setting.gql
Normal file
19
api/src/api_graphql/graphql/setting.gql
Normal file
@ -0,0 +1,19 @@
|
||||
type Setting implements DbModel {
|
||||
id: ID
|
||||
key: String
|
||||
value: String
|
||||
|
||||
deleted: Boolean
|
||||
editor: User
|
||||
createdUtc: String
|
||||
updatedUtc: String
|
||||
}
|
||||
|
||||
type SettingMutation {
|
||||
change(input: SettingInput!): Setting
|
||||
}
|
||||
|
||||
input SettingInput {
|
||||
key: String!
|
||||
value: String!
|
||||
}
|
@ -32,12 +32,27 @@ input ShortUrlSort {
|
||||
updatedUtc: SortOrder
|
||||
}
|
||||
|
||||
enum ShortUrlFuzzyFields {
|
||||
shortUrl
|
||||
targetUrl
|
||||
description
|
||||
}
|
||||
|
||||
input ShortUrlFuzzy {
|
||||
fields: [ShortUrlFuzzyFields]
|
||||
term: String
|
||||
threshold: Int
|
||||
}
|
||||
|
||||
input ShortUrlFilter {
|
||||
id: IntFilter
|
||||
name: StringFilter
|
||||
shortUrl: StringFilter
|
||||
targetUrl: StringFilter
|
||||
description: StringFilter
|
||||
loadingScreen: BooleanFilter
|
||||
|
||||
fuzzy: ShortUrlFuzzy
|
||||
|
||||
deleted: BooleanFilter
|
||||
editor: IntFilter
|
||||
createdUtc: DateFilter
|
||||
@ -49,6 +64,7 @@ type ShortUrlMutation {
|
||||
update(input: ShortUrlUpdateInput!): ShortUrl
|
||||
delete(id: ID!): Boolean
|
||||
restore(id: ID!): Boolean
|
||||
trackVisit(id: ID!, agent: String): Boolean
|
||||
}
|
||||
|
||||
input ShortUrlCreateInput {
|
||||
|
16
api/src/api_graphql/graphql/subscription.gql
Normal file
16
api/src/api_graphql/graphql/subscription.gql
Normal file
@ -0,0 +1,16 @@
|
||||
scalar SubscriptionChange
|
||||
|
||||
type Subscription {
|
||||
ping: String
|
||||
|
||||
apiKeyChange: SubscriptionChange
|
||||
featureFlagChange: SubscriptionChange
|
||||
roleChange: SubscriptionChange
|
||||
settingChange: SubscriptionChange
|
||||
userChange: SubscriptionChange
|
||||
userSettingChange: SubscriptionChange
|
||||
|
||||
domainChange: SubscriptionChange
|
||||
groupChange: SubscriptionChange
|
||||
shortUrlChange: SubscriptionChange
|
||||
}
|
@ -35,19 +35,33 @@ input UserSort {
|
||||
email: SortOrder
|
||||
|
||||
deleted: SortOrder
|
||||
editorId: SortOrder
|
||||
editor: UserSort
|
||||
createdUtc: SortOrder
|
||||
updatedUtc: SortOrder
|
||||
}
|
||||
|
||||
enum UserFuzzyFields {
|
||||
keycloakId
|
||||
username
|
||||
email
|
||||
}
|
||||
|
||||
input UserFuzzy {
|
||||
fields: [UserFuzzyFields]
|
||||
term: String
|
||||
threshold: Int
|
||||
}
|
||||
|
||||
input UserFilter {
|
||||
id: IntFilter
|
||||
keycloakId: StringFilter
|
||||
username: StringFilter
|
||||
email: StringFilter
|
||||
|
||||
fuzzy: UserFuzzy
|
||||
|
||||
deleted: BooleanFilter
|
||||
editor: IntFilter
|
||||
editor: UserFilter
|
||||
createdUtc: DateFilter
|
||||
updatedUtc: DateFilter
|
||||
}
|
||||
|
19
api/src/api_graphql/graphql/user_setting.gql
Normal file
19
api/src/api_graphql/graphql/user_setting.gql
Normal file
@ -0,0 +1,19 @@
|
||||
type UserSetting implements DbModel {
|
||||
id: ID
|
||||
key: String
|
||||
value: String
|
||||
|
||||
deleted: Boolean
|
||||
editor: User
|
||||
createdUtc: String
|
||||
updatedUtc: String
|
||||
}
|
||||
|
||||
type UserSettingMutation {
|
||||
change(input: UserSettingInput!): UserSetting
|
||||
}
|
||||
|
||||
input UserSettingInput {
|
||||
key: String!
|
||||
value: String!
|
||||
}
|
18
api/src/api_graphql/input/feature_flag_input.py
Normal file
18
api/src/api_graphql/input/feature_flag_input.py
Normal file
@ -0,0 +1,18 @@
|
||||
from api_graphql.abc.input_abc import InputABC
|
||||
|
||||
|
||||
class FeatureFlagInput(InputABC):
|
||||
|
||||
def __init__(self, src: dict):
|
||||
InputABC.__init__(self, src)
|
||||
|
||||
self._key = self.option("key", str, required=True)
|
||||
self._value = self.option("value", bool, required=True)
|
||||
|
||||
@property
|
||||
def key(self) -> str:
|
||||
return self._key
|
||||
|
||||
@property
|
||||
def value(self) -> bool:
|
||||
return self._value
|
18
api/src/api_graphql/input/setting_input.py
Normal file
18
api/src/api_graphql/input/setting_input.py
Normal file
@ -0,0 +1,18 @@
|
||||
from api_graphql.abc.input_abc import InputABC
|
||||
|
||||
|
||||
class SettingInput(InputABC):
|
||||
|
||||
def __init__(self, src: dict):
|
||||
InputABC.__init__(self, src)
|
||||
|
||||
self._key = self.option("key", str, required=True)
|
||||
self._value = self.option("value", str, required=True)
|
||||
|
||||
@property
|
||||
def key(self) -> str:
|
||||
return self._key
|
||||
|
||||
@property
|
||||
def value(self) -> str:
|
||||
return self._value
|
18
api/src/api_graphql/input/user_setting_input.py
Normal file
18
api/src/api_graphql/input/user_setting_input.py
Normal file
@ -0,0 +1,18 @@
|
||||
from api_graphql.abc.input_abc import InputABC
|
||||
|
||||
|
||||
class UserSettingInput(InputABC):
|
||||
|
||||
def __init__(self, src: dict):
|
||||
InputABC.__init__(self, src)
|
||||
|
||||
self._key = self.option("key", str, required=True)
|
||||
self._value = self.option("value", str, required=True)
|
||||
|
||||
@property
|
||||
def key(self) -> str:
|
||||
return self._key
|
||||
|
||||
@property
|
||||
def value(self) -> str:
|
||||
return self._value
|
@ -60,3 +60,22 @@ class Mutation(MutationABC):
|
||||
Permissions.short_urls_delete,
|
||||
],
|
||||
)
|
||||
|
||||
self.add_mutation_type(
|
||||
"setting",
|
||||
"Setting",
|
||||
require_any_permission=[
|
||||
Permissions.settings_update,
|
||||
],
|
||||
)
|
||||
self.add_mutation_type(
|
||||
"userSetting",
|
||||
"UserSetting",
|
||||
)
|
||||
self.add_mutation_type(
|
||||
"featureFlag",
|
||||
"FeatureFlag",
|
||||
require_any_permission=[
|
||||
Permissions.administrator,
|
||||
],
|
||||
)
|
||||
|
32
api/src/api_graphql/mutations/feature_flag_mutation.py
Normal file
32
api/src/api_graphql/mutations/feature_flag_mutation.py
Normal file
@ -0,0 +1,32 @@
|
||||
from api_graphql.abc.mutation_abc import MutationABC
|
||||
from api_graphql.input.feature_flag_input import FeatureFlagInput
|
||||
from core.logger import APILogger
|
||||
from data.schemas.system.feature_flag import FeatureFlag
|
||||
from data.schemas.system.feature_flag_dao import featureFlagDao
|
||||
from service.permission.permissions_enum import Permissions
|
||||
|
||||
logger = APILogger(__name__)
|
||||
|
||||
|
||||
class FeatureFlagMutation(MutationABC):
|
||||
def __init__(self):
|
||||
MutationABC.__init__(self, "FeatureFlag")
|
||||
self.mutation(
|
||||
"change",
|
||||
self.resolve_change,
|
||||
FeatureFlagInput,
|
||||
require_any_permission=[Permissions.administrator],
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
async def resolve_change(obj: FeatureFlagInput, *_):
|
||||
logger.debug(f"create new feature flag: {input}")
|
||||
|
||||
setting = await featureFlagDao.find_single_by({FeatureFlag.key: obj.key})
|
||||
if setting is None:
|
||||
raise ValueError(f"FeatureFlag {obj.key} not found")
|
||||
|
||||
setting.value = obj.value
|
||||
await featureFlagDao.update(setting)
|
||||
|
||||
return await featureFlagDao.get_by_id(setting.id)
|
32
api/src/api_graphql/mutations/setting_mutation.py
Normal file
32
api/src/api_graphql/mutations/setting_mutation.py
Normal file
@ -0,0 +1,32 @@
|
||||
from api_graphql.abc.mutation_abc import MutationABC
|
||||
from api_graphql.input.setting_input import SettingInput
|
||||
from core.logger import APILogger
|
||||
from data.schemas.system.setting import Setting
|
||||
from data.schemas.system.setting_dao import settingsDao
|
||||
from service.permission.permissions_enum import Permissions
|
||||
|
||||
logger = APILogger(__name__)
|
||||
|
||||
|
||||
class SettingMutation(MutationABC):
|
||||
def __init__(self):
|
||||
MutationABC.__init__(self, "Setting")
|
||||
self.mutation(
|
||||
"change",
|
||||
self.resolve_change,
|
||||
SettingInput,
|
||||
require_any_permission=[Permissions.settings_update],
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
async def resolve_change(obj: SettingInput, *_):
|
||||
logger.debug(f"create new setting: {input}")
|
||||
|
||||
setting = await settingsDao.find_single_by({Setting.key: obj.key})
|
||||
if setting is None:
|
||||
raise ValueError(f"Setting with key {obj.key} not found")
|
||||
|
||||
setting.value = obj.value
|
||||
await settingsDao.update(setting)
|
||||
|
||||
return await settingsDao.get_by_id(setting.id)
|
@ -8,6 +8,8 @@ from data.schemas.public.domain_dao import domainDao
|
||||
from data.schemas.public.group_dao import groupDao
|
||||
from data.schemas.public.short_url import ShortUrl
|
||||
from data.schemas.public.short_url_dao import shortUrlDao
|
||||
from data.schemas.public.short_url_visit import ShortUrlVisit
|
||||
from data.schemas.public.short_url_visit_dao import shortUrlVisitDao
|
||||
from service.permission.permissions_enum import Permissions
|
||||
|
||||
logger = APILogger(__name__)
|
||||
@ -39,6 +41,11 @@ class ShortUrlMutation(MutationABC):
|
||||
self.resolve_restore,
|
||||
require_any_permission=[Permissions.short_urls_delete],
|
||||
)
|
||||
self.mutation(
|
||||
"trackVisit",
|
||||
self.resolve_track_visit,
|
||||
require_any_permission=[Permissions.short_urls_update],
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
async def resolve_create(obj: ShortUrlCreateInput, *_):
|
||||
@ -106,3 +113,9 @@ class ShortUrlMutation(MutationABC):
|
||||
short_url = await shortUrlDao.get_by_id(id)
|
||||
await shortUrlDao.restore(short_url)
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
async def resolve_track_visit(*_, id: int, agent: str):
|
||||
logger.debug(f"track visit: {id} -- {agent}")
|
||||
await shortUrlVisitDao.create(ShortUrlVisit(0, id, agent))
|
||||
return True
|
||||
|
40
api/src/api_graphql/mutations/user_setting_mutation.py
Normal file
40
api/src/api_graphql/mutations/user_setting_mutation.py
Normal file
@ -0,0 +1,40 @@
|
||||
from api.route import Route
|
||||
from api_graphql.abc.mutation_abc import MutationABC
|
||||
from api_graphql.input.user_setting_input import UserSettingInput
|
||||
from core.logger import APILogger
|
||||
from data.schemas.public.user_setting import UserSetting
|
||||
from data.schemas.public.user_setting_dao import userSettingsDao
|
||||
from data.schemas.system.setting_dao import settingsDao
|
||||
from service.permission.permissions_enum import Permissions
|
||||
|
||||
logger = APILogger(__name__)
|
||||
|
||||
|
||||
class UserSettingMutation(MutationABC):
|
||||
def __init__(self):
|
||||
MutationABC.__init__(self, "UserSetting")
|
||||
self.mutation(
|
||||
"change",
|
||||
self.resolve_change,
|
||||
UserSettingInput,
|
||||
require_any_permission=[Permissions.settings_update],
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
async def resolve_change(obj: UserSettingInput, *_):
|
||||
logger.debug(f"create new setting: {input}")
|
||||
user = await Route.get_user_or_default()
|
||||
if user is None:
|
||||
logger.debug("user not authorized")
|
||||
return None
|
||||
|
||||
setting = await userSettingsDao.find_single_by(
|
||||
[{UserSetting.user_id: user.id}, {UserSetting.key: obj.key}]
|
||||
)
|
||||
if setting is None:
|
||||
await userSettingsDao.create(UserSetting(0, user.id, obj.key, obj.value))
|
||||
else:
|
||||
setting.value = obj.value
|
||||
await userSettingsDao.update(setting)
|
||||
|
||||
return await userSettingsDao.find_by_key(user, obj.key)
|
@ -16,9 +16,12 @@ class GroupQuery(DbModelQueryABC):
|
||||
self.field(
|
||||
ResolverFieldBuilder("shortUrls")
|
||||
.with_resolver(self._get_urls)
|
||||
.with_require_any([
|
||||
Permissions.groups,
|
||||
], [group_by_assignment_resolver])
|
||||
.with_require_any(
|
||||
[
|
||||
Permissions.groups,
|
||||
],
|
||||
[group_by_assignment_resolver],
|
||||
)
|
||||
)
|
||||
self.set_field("roles", self._get_roles)
|
||||
|
||||
|
@ -26,6 +26,10 @@ from data.schemas.public.group import Group
|
||||
from data.schemas.public.group_dao import groupDao
|
||||
from data.schemas.public.short_url import ShortUrl
|
||||
from data.schemas.public.short_url_dao import shortUrlDao
|
||||
from data.schemas.public.user_setting import UserSetting
|
||||
from data.schemas.public.user_setting_dao import userSettingsDao
|
||||
from data.schemas.system.feature_flag_dao import featureFlagDao
|
||||
from data.schemas.system.setting_dao import settingsDao
|
||||
from service.permission.permissions_enum import Permissions
|
||||
|
||||
|
||||
@ -116,7 +120,7 @@ class Query(QueryABC):
|
||||
Permissions.short_urls_create,
|
||||
Permissions.short_urls_update,
|
||||
],
|
||||
[group_by_assignment_resolver]
|
||||
[group_by_assignment_resolver],
|
||||
)
|
||||
)
|
||||
self.field(
|
||||
@ -127,6 +131,23 @@ class Query(QueryABC):
|
||||
.with_require_any([Permissions.short_urls], [group_by_assignment_resolver])
|
||||
)
|
||||
|
||||
self.field(
|
||||
ResolverFieldBuilder("settings")
|
||||
.with_resolver(self._resolve_settings)
|
||||
.with_direct_result()
|
||||
.with_public(True)
|
||||
)
|
||||
self.field(
|
||||
ResolverFieldBuilder("userSettings")
|
||||
.with_resolver(self._resolve_user_settings)
|
||||
.with_direct_result()
|
||||
)
|
||||
self.field(
|
||||
ResolverFieldBuilder("featureFlags")
|
||||
.with_resolver(self._resolve_feature_flags)
|
||||
.with_direct_result()
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
async def _get_user(*_):
|
||||
return await Route.get_user()
|
||||
@ -157,3 +178,27 @@ class Query(QueryABC):
|
||||
for x in kc_users
|
||||
if x["id"] not in existing_user_keycloak_ids
|
||||
]
|
||||
|
||||
@staticmethod
|
||||
async def _resolve_settings(*args, **kwargs):
|
||||
if "key" in kwargs:
|
||||
return [await settingsDao.find_by_key(kwargs["key"])]
|
||||
return await settingsDao.get_all()
|
||||
|
||||
@staticmethod
|
||||
async def _resolve_user_settings(*args, **kwargs):
|
||||
user = await Route.get_user()
|
||||
if user is None:
|
||||
return None
|
||||
|
||||
if "key" in kwargs:
|
||||
return await userSettingsDao.find_by(
|
||||
{UserSetting.user_id: user.id, UserSetting.key: kwargs["key"]}
|
||||
)
|
||||
return await userSettingsDao.find_by({UserSetting.user_id: user.id})
|
||||
|
||||
@staticmethod
|
||||
async def _resolve_feature_flags(*args, **kwargs):
|
||||
if "key" in kwargs:
|
||||
return [await featureFlagDao.find_by_key(kwargs["key"])]
|
||||
return await featureFlagDao.get_all()
|
||||
|
@ -12,11 +12,19 @@ async def group_by_assignment_resolver(ctx: QueryContext) -> bool:
|
||||
groups = [await x.group for x in ctx.data.nodes]
|
||||
role_ids = {x.id for x in await ctx.user.roles}
|
||||
filtered_groups = [
|
||||
g.id for g in groups if
|
||||
g is not None and (roles := await groupDao.get_roles(g.id)) and all(r.id in role_ids for r in roles)
|
||||
g.id
|
||||
for g in groups
|
||||
if g is not None
|
||||
and (roles := await groupDao.get_roles(g.id))
|
||||
and all(r.id in role_ids for r in roles)
|
||||
]
|
||||
|
||||
ctx.data.nodes = [node for node in ctx.data.nodes if (await node.group) is not None and (await node.group).id in filtered_groups]
|
||||
ctx.data.nodes = [
|
||||
node
|
||||
for node in ctx.data.nodes
|
||||
if (await node.group) is not None
|
||||
and (await node.group).id in filtered_groups
|
||||
]
|
||||
return True
|
||||
|
||||
return True
|
||||
|
@ -5,6 +5,7 @@ from ariadne import make_executable_schema, load_schema_from_path
|
||||
from api_graphql.definition import QUERIES
|
||||
from api_graphql.mutation import Mutation
|
||||
from api_graphql.query import Query
|
||||
from api_graphql.subscription import Subscription
|
||||
|
||||
type_defs = load_schema_from_path(
|
||||
os.path.join(os.path.dirname(os.path.realpath(__file__)), "../graphql/")
|
||||
@ -13,5 +14,6 @@ schema = make_executable_schema(
|
||||
type_defs,
|
||||
Query(),
|
||||
Mutation(),
|
||||
Subscription(),
|
||||
*QUERIES,
|
||||
)
|
||||
|
66
api/src/api_graphql/subscription.py
Normal file
66
api/src/api_graphql/subscription.py
Normal file
@ -0,0 +1,66 @@
|
||||
from api_graphql.abc.subscription_abc import SubscriptionABC
|
||||
from api_graphql.field.subscription_field_builder import SubscriptionFieldBuilder
|
||||
from service.permission.permissions_enum import Permissions
|
||||
|
||||
|
||||
class Subscription(SubscriptionABC):
|
||||
def __init__(self):
|
||||
SubscriptionABC.__init__(self)
|
||||
|
||||
self.subscribe(
|
||||
SubscriptionFieldBuilder("ping")
|
||||
.with_resolver(lambda message, *_: message.message)
|
||||
.with_public(True)
|
||||
)
|
||||
|
||||
self.subscribe(
|
||||
SubscriptionFieldBuilder("apiKeyChange")
|
||||
.with_resolver(lambda message, *_: message.message)
|
||||
.with_require_any_permission([Permissions.api_keys])
|
||||
)
|
||||
|
||||
self.subscribe(
|
||||
SubscriptionFieldBuilder("featureFlagChange")
|
||||
.with_resolver(lambda message, *_: message.message)
|
||||
.with_public(True)
|
||||
)
|
||||
|
||||
self.subscribe(
|
||||
SubscriptionFieldBuilder("roleChange")
|
||||
.with_resolver(lambda message, *_: message.message)
|
||||
.with_require_any_permission([Permissions.roles])
|
||||
)
|
||||
|
||||
self.subscribe(
|
||||
SubscriptionFieldBuilder("settingChange")
|
||||
.with_resolver(lambda message, *_: message.message)
|
||||
.with_require_any_permission([Permissions.settings])
|
||||
)
|
||||
|
||||
self.subscribe(
|
||||
SubscriptionFieldBuilder("userChange")
|
||||
.with_resolver(lambda message, *_: message.message)
|
||||
.with_require_any_permission([Permissions.users])
|
||||
)
|
||||
|
||||
self.subscribe(
|
||||
SubscriptionFieldBuilder("userSettingChange")
|
||||
.with_resolver(lambda message, *_: message.message)
|
||||
.with_public(True)
|
||||
)
|
||||
|
||||
self.subscribe(
|
||||
SubscriptionFieldBuilder("domainChange")
|
||||
.with_resolver(lambda message, *_: message.message)
|
||||
.with_require_any_permission([Permissions.domains])
|
||||
)
|
||||
self.subscribe(
|
||||
SubscriptionFieldBuilder("groupChange")
|
||||
.with_resolver(lambda message, *_: message.message)
|
||||
.with_require_any_permission([Permissions.groups])
|
||||
)
|
||||
self.subscribe(
|
||||
SubscriptionFieldBuilder("shortUrlChange")
|
||||
.with_resolver(lambda message, *_: message.message)
|
||||
.with_require_any_permission([Permissions.short_urls])
|
||||
)
|
@ -9,7 +9,7 @@ TRequireAnyResolvers = list[
|
||||
Union[
|
||||
Callable[[QueryContext], bool],
|
||||
Awaitable[[QueryContext], bool],
|
||||
Callable[[QueryContext], Coroutine[Any, Any, bool]]
|
||||
Callable[[QueryContext], Coroutine[Any, Any, bool]],
|
||||
]
|
||||
]
|
||||
TRequireAny = tuple[TRequireAnyPermissions, TRequireAnyResolvers]
|
||||
|
0
api/src/core/configuration/__init__.py
Normal file
0
api/src/core/configuration/__init__.py
Normal file
20
api/src/core/configuration/feature_flags.py
Normal file
20
api/src/core/configuration/feature_flags.py
Normal file
@ -0,0 +1,20 @@
|
||||
from core.configuration.feature_flags_enum import FeatureFlagsEnum
|
||||
from data.schemas.system.feature_flag_dao import featureFlagDao
|
||||
|
||||
|
||||
class FeatureFlags:
|
||||
_flags = {
|
||||
FeatureFlagsEnum.version_endpoint.value: True, # 15.01.2025
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def get_default(key: FeatureFlagsEnum) -> bool:
|
||||
return FeatureFlags._flags[key.value]
|
||||
|
||||
@staticmethod
|
||||
async def has_feature(key: FeatureFlagsEnum) -> bool:
|
||||
value = await featureFlagDao.find_by_key(key.value)
|
||||
if value is None:
|
||||
return False
|
||||
|
||||
return value.value
|
6
api/src/core/configuration/feature_flags_enum.py
Normal file
6
api/src/core/configuration/feature_flags_enum.py
Normal file
@ -0,0 +1,6 @@
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class FeatureFlagsEnum(Enum):
|
||||
# modules
|
||||
version_endpoint = "VersionEndpoint"
|
1
api/src/core/const.py
Normal file
1
api/src/core/const.py
Normal file
@ -0,0 +1 @@
|
||||
DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S.%f %z"
|
@ -4,9 +4,12 @@ from enum import Enum
|
||||
from types import NoneType
|
||||
from typing import Generic, Optional, Union, TypeVar, Any, Type
|
||||
|
||||
from core.const import DATETIME_FORMAT
|
||||
from core.database.abc.db_model_abc import DbModelABC
|
||||
from core.database.database import Database
|
||||
from core.get_value import get_value
|
||||
from core.logger import DBLogger
|
||||
from core.string import camel_to_snake
|
||||
from core.typing import T, Attribute, AttributeFilters, AttributeSorts
|
||||
|
||||
T_DBM = TypeVar("T_DBM", bound=DbModelABC)
|
||||
@ -23,7 +26,11 @@ class DataAccessObjectABC(ABC, Database, Generic[T_DBM]):
|
||||
self._default_filter_condition = None
|
||||
|
||||
self.__attributes: dict[str, type] = {}
|
||||
self.__joins: dict[str, str] = {}
|
||||
|
||||
self.__db_names: dict[str, str] = {}
|
||||
self.__foreign_tables: dict[str, str] = {}
|
||||
|
||||
self.__date_attributes: set[str] = set()
|
||||
self.__ignored_attributes: set[str] = set()
|
||||
|
||||
@ -35,12 +42,12 @@ class DataAccessObjectABC(ABC, Database, Generic[T_DBM]):
|
||||
return self._table_name
|
||||
|
||||
def attribute(
|
||||
self,
|
||||
attr_name: Attribute,
|
||||
attr_type: type,
|
||||
db_name: str = None,
|
||||
ignore=False,
|
||||
primary_key=False,
|
||||
self,
|
||||
attr_name: Attribute,
|
||||
attr_type: type,
|
||||
db_name: str = None,
|
||||
ignore=False,
|
||||
primary_key=False,
|
||||
):
|
||||
"""
|
||||
Add an attribute for db and object mapping to the data access object
|
||||
@ -69,6 +76,40 @@ class DataAccessObjectABC(ABC, Database, Generic[T_DBM]):
|
||||
if attr_type in [datetime, datetime.datetime]:
|
||||
self.__date_attributes.add(db_name)
|
||||
|
||||
def reference(
|
||||
self,
|
||||
attr: Attribute,
|
||||
primary_attr: Attribute,
|
||||
foreign_attr: Attribute,
|
||||
table_name: str,
|
||||
):
|
||||
"""
|
||||
Add a reference to another table for the given attribute
|
||||
:param str primary_attr: Name of the primary key in the foreign object
|
||||
:param str foreign_attr: Name of the foreign key in the object
|
||||
:param str table_name: Name of the table to reference
|
||||
:return:
|
||||
"""
|
||||
if table_name == self._table_name:
|
||||
return
|
||||
if isinstance(attr, property):
|
||||
attr = attr.fget.__name__
|
||||
|
||||
if isinstance(primary_attr, property):
|
||||
primary_attr = primary_attr.fget.__name__
|
||||
|
||||
primary_attr = primary_attr.lower().replace("_", "")
|
||||
|
||||
if isinstance(foreign_attr, property):
|
||||
foreign_attr = foreign_attr.fget.__name__
|
||||
|
||||
foreign_attr = foreign_attr.lower().replace("_", "")
|
||||
|
||||
self.__joins[foreign_attr] = (
|
||||
f"LEFT JOIN {table_name} ON {table_name}.{primary_attr} = {self._table_name}.{foreign_attr}"
|
||||
)
|
||||
self.__foreign_tables[attr] = table_name
|
||||
|
||||
def to_object(self, result: dict) -> T_DBM:
|
||||
"""
|
||||
Convert a result from the database to an object
|
||||
@ -89,8 +130,13 @@ class DataAccessObjectABC(ABC, Database, Generic[T_DBM]):
|
||||
|
||||
return self._model_type(**value_map)
|
||||
|
||||
async def count(self) -> int:
|
||||
result = await self._db.select_map(f"SELECT COUNT(*) FROM {self._table_name}")
|
||||
async def count(self, filters: AttributeFilters = None) -> int:
|
||||
query = f"SELECT COUNT(*) FROM {self._table_name}"
|
||||
|
||||
if filters is not None and (not isinstance(filters, list) or len(filters) > 0):
|
||||
query += f" WHERE {self._build_conditions(filters)}"
|
||||
|
||||
result = await self._db.select_map(query)
|
||||
return result[0]["count"]
|
||||
|
||||
async def get_all(self) -> list[T_DBM]:
|
||||
@ -118,11 +164,11 @@ class DataAccessObjectABC(ABC, Database, Generic[T_DBM]):
|
||||
return self.to_object(result[0])
|
||||
|
||||
async def get_by(
|
||||
self,
|
||||
filters: AttributeFilters = None,
|
||||
sorts: AttributeSorts = None,
|
||||
take: int = None,
|
||||
skip: int = None,
|
||||
self,
|
||||
filters: AttributeFilters = None,
|
||||
sorts: AttributeSorts = None,
|
||||
take: int = None,
|
||||
skip: int = None,
|
||||
) -> list[T_DBM]:
|
||||
"""
|
||||
Get all objects by the given filters
|
||||
@ -143,11 +189,11 @@ class DataAccessObjectABC(ABC, Database, Generic[T_DBM]):
|
||||
return [self.to_object(x) for x in result]
|
||||
|
||||
async def get_single_by(
|
||||
self,
|
||||
filters: AttributeFilters = None,
|
||||
sorts: AttributeSorts = None,
|
||||
take: int = None,
|
||||
skip: int = None,
|
||||
self,
|
||||
filters: AttributeFilters = None,
|
||||
sorts: AttributeSorts = None,
|
||||
take: int = None,
|
||||
skip: int = None,
|
||||
) -> T_DBM:
|
||||
"""
|
||||
Get a single object by the given filters
|
||||
@ -168,11 +214,11 @@ class DataAccessObjectABC(ABC, Database, Generic[T_DBM]):
|
||||
return result[0]
|
||||
|
||||
async def find_by(
|
||||
self,
|
||||
filters: AttributeFilters = None,
|
||||
sorts: AttributeSorts = None,
|
||||
take: int = None,
|
||||
skip: int = None,
|
||||
self,
|
||||
filters: AttributeFilters = None,
|
||||
sorts: AttributeSorts = None,
|
||||
take: int = None,
|
||||
skip: int = None,
|
||||
) -> list[Optional[T_DBM]]:
|
||||
"""
|
||||
Find all objects by the given filters
|
||||
@ -192,11 +238,11 @@ class DataAccessObjectABC(ABC, Database, Generic[T_DBM]):
|
||||
return [self.to_object(x) for x in result]
|
||||
|
||||
async def find_single_by(
|
||||
self,
|
||||
filters: AttributeFilters = None,
|
||||
sorts: AttributeSorts = None,
|
||||
take: int = None,
|
||||
skip: int = None,
|
||||
self,
|
||||
filters: AttributeFilters = None,
|
||||
sorts: AttributeSorts = None,
|
||||
take: int = None,
|
||||
skip: int = None,
|
||||
) -> Optional[T_DBM]:
|
||||
"""
|
||||
Find a single object by the given filters
|
||||
@ -296,7 +342,7 @@ class DataAccessObjectABC(ABC, Database, Generic[T_DBM]):
|
||||
await self._db.execute(query)
|
||||
|
||||
async def _build_delete_statement(
|
||||
self, obj: T_DBM, hard_delete: bool = False
|
||||
self, obj: T_DBM, hard_delete: bool = False
|
||||
) -> str:
|
||||
if hard_delete:
|
||||
return f"""
|
||||
@ -384,6 +430,12 @@ class DataAccessObjectABC(ABC, Database, Generic[T_DBM]):
|
||||
return "ARRAY[]::text[]"
|
||||
return f"ARRAY[{", ".join([DataAccessObjectABC._get_value_sql(x) for x in value])}]"
|
||||
|
||||
if isinstance(value, datetime.datetime):
|
||||
if value.tzinfo is None:
|
||||
value = value.replace(tzinfo=datetime.timezone.utc)
|
||||
|
||||
return f"'{value.strftime(DATETIME_FORMAT)}'"
|
||||
|
||||
return str(value)
|
||||
|
||||
@staticmethod
|
||||
@ -406,13 +458,16 @@ class DataAccessObjectABC(ABC, Database, Generic[T_DBM]):
|
||||
return cast_type(value)
|
||||
|
||||
def _build_conditional_query(
|
||||
self,
|
||||
filters: AttributeFilters = None,
|
||||
sorts: AttributeSorts = None,
|
||||
take: int = None,
|
||||
skip: int = None,
|
||||
self,
|
||||
filters: AttributeFilters = None,
|
||||
sorts: AttributeSorts = None,
|
||||
take: int = None,
|
||||
skip: int = None,
|
||||
) -> str:
|
||||
query = f"SELECT * FROM {self._table_name}"
|
||||
query = f"SELECT {self._table_name}.* FROM {self._table_name}"
|
||||
|
||||
for join in self.__joins:
|
||||
query += f" {self.__joins[join]}"
|
||||
|
||||
if filters is not None and (not isinstance(filters, list) or len(filters) > 0):
|
||||
query += f" WHERE {self._build_conditions(filters)}"
|
||||
@ -438,12 +493,41 @@ class DataAccessObjectABC(ABC, Database, Generic[T_DBM]):
|
||||
for attr, values in f.items():
|
||||
if isinstance(attr, property):
|
||||
attr = attr.fget.__name__
|
||||
|
||||
if attr in self.__foreign_tables:
|
||||
foreign_table = self.__foreign_tables[attr]
|
||||
conditions.extend(
|
||||
self._build_foreign_conditions(foreign_table, values)
|
||||
)
|
||||
continue
|
||||
|
||||
if attr == "fuzzy":
|
||||
conditions.append(
|
||||
" OR ".join(
|
||||
self._build_fuzzy_conditions(
|
||||
[
|
||||
(
|
||||
self.__db_names[x]
|
||||
if x in self.__db_names
|
||||
else self.__db_names[camel_to_snake(x)]
|
||||
)
|
||||
for x in get_value(values, "fields", list[str])
|
||||
],
|
||||
get_value(values, "term", str),
|
||||
get_value(values, "threshold", int, 5),
|
||||
)
|
||||
)
|
||||
)
|
||||
continue
|
||||
|
||||
db_name = self.__db_names[attr]
|
||||
|
||||
if isinstance(values, dict):
|
||||
for operator, value in values.items():
|
||||
conditions.append(
|
||||
self._build_condition(db_name, operator, value)
|
||||
self._build_condition(
|
||||
f"{self._table_name}.{db_name}", operator, value
|
||||
)
|
||||
)
|
||||
elif isinstance(values, list):
|
||||
sub_conditions = []
|
||||
@ -451,7 +535,9 @@ class DataAccessObjectABC(ABC, Database, Generic[T_DBM]):
|
||||
if isinstance(value, dict):
|
||||
for operator, val in value.items():
|
||||
sub_conditions.append(
|
||||
self._build_condition(db_name, operator, val)
|
||||
self._build_condition(
|
||||
f"{self._table_name}.{db_name}", operator, val
|
||||
)
|
||||
)
|
||||
else:
|
||||
sub_conditions.append(
|
||||
@ -463,12 +549,65 @@ class DataAccessObjectABC(ABC, Database, Generic[T_DBM]):
|
||||
|
||||
return " AND ".join(conditions)
|
||||
|
||||
def _build_fuzzy_conditions(
|
||||
self, fields: list[str], term: str, threshold: int = 10
|
||||
) -> list[str]:
|
||||
conditions = []
|
||||
for field in fields:
|
||||
conditions.append(
|
||||
f"levenshtein({field}, '{term}') <= {threshold}"
|
||||
) # Adjust the threshold as needed
|
||||
|
||||
return conditions
|
||||
|
||||
def _build_foreign_conditions(self, table: str, values: dict) -> list[str]:
|
||||
"""
|
||||
Build SQL conditions for foreign key references
|
||||
:param table: Foreign table name
|
||||
:param values: Filter values
|
||||
:return: List of conditions
|
||||
"""
|
||||
conditions = []
|
||||
for attr, sub_values in values.items():
|
||||
if isinstance(attr, property):
|
||||
attr = attr.fget.__name__
|
||||
|
||||
if attr in self.__foreign_tables:
|
||||
foreign_table = self.__foreign_tables[attr]
|
||||
conditions.extend(
|
||||
self._build_foreign_conditions(foreign_table, sub_values)
|
||||
)
|
||||
continue
|
||||
|
||||
db_name = f"{table}.{attr.lower().replace('_', '')}"
|
||||
|
||||
if isinstance(sub_values, dict):
|
||||
for operator, value in sub_values.items():
|
||||
conditions.append(self._build_condition(db_name, operator, value))
|
||||
elif isinstance(sub_values, list):
|
||||
sub_conditions = []
|
||||
for value in sub_values:
|
||||
if isinstance(value, dict):
|
||||
for operator, val in value.items():
|
||||
sub_conditions.append(
|
||||
self._build_condition(db_name, operator, val)
|
||||
)
|
||||
else:
|
||||
sub_conditions.append(
|
||||
self._get_value_validation_sql(db_name, value)
|
||||
)
|
||||
conditions.append(f"({' OR '.join(sub_conditions)})")
|
||||
else:
|
||||
conditions.append(self._get_value_validation_sql(db_name, sub_values))
|
||||
|
||||
return conditions
|
||||
|
||||
def _get_value_validation_sql(self, field: str, value: Any):
|
||||
value = self._get_value_sql(value)
|
||||
|
||||
if value == "NULL":
|
||||
return f"{field} IS NULL"
|
||||
return f"{field} = {value}"
|
||||
return f"{self._table_name}.{field} IS NULL"
|
||||
return f"{self._table_name}.{field} = {value}"
|
||||
|
||||
def _build_condition(self, db_name: str, operator: str, value: Any) -> str:
|
||||
"""
|
||||
@ -530,6 +669,13 @@ class DataAccessObjectABC(ABC, Database, Generic[T_DBM]):
|
||||
if isinstance(attr, property):
|
||||
attr = attr.fget.__name__
|
||||
|
||||
if attr in self.__foreign_tables:
|
||||
foreign_table = self.__foreign_tables[attr]
|
||||
sort_clauses.extend(
|
||||
self._build_foreign_order_by(foreign_table, direction)
|
||||
)
|
||||
continue
|
||||
|
||||
match attr:
|
||||
case "createdUtc":
|
||||
attr = "created"
|
||||
@ -547,6 +693,30 @@ class DataAccessObjectABC(ABC, Database, Generic[T_DBM]):
|
||||
|
||||
return ", ".join(sort_clauses)
|
||||
|
||||
def _build_foreign_order_by(self, table: str, direction: str) -> list[str]:
|
||||
"""
|
||||
Build SQL order by clause for foreign key references
|
||||
:param table: Foreign table name
|
||||
:param direction: Sort direction
|
||||
:return: List of order by clauses
|
||||
"""
|
||||
sort_clauses = []
|
||||
for attr, sub_direction in direction.items():
|
||||
if isinstance(attr, property):
|
||||
attr = attr.fget.__name__
|
||||
|
||||
if attr in self.__foreign_tables:
|
||||
foreign_table = self.__foreign_tables[attr]
|
||||
sort_clauses.extend(
|
||||
self._build_foreign_order_by(foreign_table, sub_direction)
|
||||
)
|
||||
continue
|
||||
|
||||
db_name = f"{table}.{attr.lower().replace('_', '')}"
|
||||
sort_clauses.append(f"{db_name} {sub_direction.upper()}")
|
||||
|
||||
return sort_clauses
|
||||
|
||||
@staticmethod
|
||||
async def _get_editor_id(obj: T_DBM):
|
||||
editor_id = obj.editor_id
|
||||
|
@ -4,11 +4,11 @@ from core.typing import T
|
||||
|
||||
|
||||
def get_value(
|
||||
source: dict,
|
||||
key: str,
|
||||
cast_type: Type[T],
|
||||
default: Optional[T] = None,
|
||||
list_delimiter: str = ",",
|
||||
source: dict,
|
||||
key: str,
|
||||
cast_type: Type[T],
|
||||
default: Optional[T] = None,
|
||||
list_delimiter: str = ",",
|
||||
) -> Optional[T]:
|
||||
"""
|
||||
Get value from source dictionary and cast it to a specified type.
|
||||
@ -26,8 +26,8 @@ def get_value(
|
||||
|
||||
value = source[key]
|
||||
if isinstance(
|
||||
value,
|
||||
cast_type if not hasattr(cast_type, "__origin__") else cast_type.__origin__,
|
||||
value,
|
||||
cast_type if not hasattr(cast_type, "__origin__") else cast_type.__origin__,
|
||||
):
|
||||
return value
|
||||
|
||||
@ -36,10 +36,15 @@ def get_value(
|
||||
return value.lower() in ["true", "1"]
|
||||
|
||||
if (
|
||||
cast_type if not hasattr(cast_type, "__origin__") else cast_type.__origin__
|
||||
cast_type if not hasattr(cast_type, "__origin__") else cast_type.__origin__
|
||||
) == list:
|
||||
if not (value.startswith("[") and value.endswith("]")) and list_delimiter not in value:
|
||||
raise ValueError("List values must be enclosed in square brackets or use a delimiter.")
|
||||
if (
|
||||
not (value.startswith("[") and value.endswith("]"))
|
||||
and list_delimiter not in value
|
||||
):
|
||||
raise ValueError(
|
||||
"List values must be enclosed in square brackets or use a delimiter."
|
||||
)
|
||||
|
||||
if value.startswith("[") and value.endswith("]"):
|
||||
value = value[1:-1]
|
||||
|
@ -1,8 +1,13 @@
|
||||
import asyncio
|
||||
import os
|
||||
import traceback
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
from api.middleware.request import get_request
|
||||
from core.environment import Environment
|
||||
|
||||
|
||||
class Logger:
|
||||
_level = "info"
|
||||
_levels = ["trace", "debug", "info", "warning", "error", "fatal"]
|
||||
@ -54,6 +59,30 @@ class Logger:
|
||||
else:
|
||||
raise ValueError(f"Invalid log level: {level}")
|
||||
|
||||
def _get_structured_message(self, level: str, timestamp: str, messages: str) -> str:
|
||||
structured_message = {
|
||||
"timestamp": timestamp,
|
||||
"level": level.upper(),
|
||||
"source": self.source,
|
||||
"messages": messages,
|
||||
}
|
||||
|
||||
request = get_request()
|
||||
|
||||
if request is not None:
|
||||
structured_message["request"] = {
|
||||
"url": str(request.url),
|
||||
"method": request.method,
|
||||
"data": asyncio.create_task(request.body()),
|
||||
}
|
||||
return str(structured_message)
|
||||
|
||||
def _write_log_to_file(self, content: str):
|
||||
self._ensure_file_size()
|
||||
with open(self.log_file, "a") as log_file:
|
||||
log_file.write(content + "\n")
|
||||
log_file.close()
|
||||
|
||||
def _log(self, level: str, *messages):
|
||||
try:
|
||||
if self._levels.index(level) < self._levels.index(self._level):
|
||||
@ -63,17 +92,18 @@ class Logger:
|
||||
timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f")
|
||||
formatted_message = f"<{timestamp}> [{level.upper():^7}] [{self._file_prefix:^5}] - [{self.source}]: {' '.join(messages)}"
|
||||
|
||||
self._ensure_file_size()
|
||||
with open(self.log_file, "a") as log_file:
|
||||
log_file.write(formatted_message + "\n")
|
||||
log_file.close()
|
||||
if Environment.get("STRUCTURED_LOGGING", bool, False):
|
||||
self._write_log_to_file(
|
||||
self._get_structured_message(level, timestamp, " ".join(messages))
|
||||
)
|
||||
else:
|
||||
self._write_log_to_file(formatted_message)
|
||||
|
||||
color = self.COLORS.get(level, self.COLORS["reset"])
|
||||
reset_color = self.COLORS["reset"]
|
||||
|
||||
print(f"{color}{formatted_message}{reset_color}")
|
||||
print(
|
||||
f"{self.COLORS.get(level, self.COLORS["reset"])}{formatted_message}{self.COLORS["reset"]}"
|
||||
)
|
||||
except Exception as e:
|
||||
print(f"Error while logging: {e}")
|
||||
print(f"Error while logging: {e} -> {traceback.format_exc()}")
|
||||
|
||||
def trace(self, *messages):
|
||||
self._log("trace", *messages)
|
||||
|
9
api/src/core/string.py
Normal file
9
api/src/core/string.py
Normal file
@ -0,0 +1,9 @@
|
||||
import re
|
||||
|
||||
|
||||
def first_to_lower(s: str) -> str:
|
||||
return s[0].lower() + s[1:] if s else s
|
||||
|
||||
|
||||
def camel_to_snake(s: str) -> str:
|
||||
return re.sub(r"(?<!^)(?=[A-Z])", "_", s).lower()
|
48
api/src/data/schemas/public/user_setting.py
Normal file
48
api/src/data/schemas/public/user_setting.py
Normal file
@ -0,0 +1,48 @@
|
||||
from datetime import datetime
|
||||
from typing import Optional, Union
|
||||
|
||||
from async_property import async_property
|
||||
|
||||
from core.database.abc.db_model_abc import DbModelABC
|
||||
from core.typing import SerialId
|
||||
|
||||
|
||||
class UserSetting(DbModelABC):
|
||||
def __init__(
|
||||
self,
|
||||
id: SerialId,
|
||||
user_id: SerialId,
|
||||
key: str,
|
||||
value: str,
|
||||
deleted: bool = False,
|
||||
editor_id: Optional[SerialId] = None,
|
||||
created: Optional[datetime] = None,
|
||||
updated: Optional[datetime] = None,
|
||||
):
|
||||
DbModelABC.__init__(self, id, deleted, editor_id, created, updated)
|
||||
|
||||
self._user_id = user_id
|
||||
self._key = key
|
||||
self._value = value
|
||||
|
||||
@property
|
||||
def user_id(self) -> SerialId:
|
||||
return self._user_id
|
||||
|
||||
@async_property
|
||||
async def user(self):
|
||||
from data.schemas.administration.user_dao import userDao
|
||||
|
||||
return await userDao.get_by_id(self._user_id)
|
||||
|
||||
@property
|
||||
def key(self) -> str:
|
||||
return self._key
|
||||
|
||||
@property
|
||||
def value(self) -> str:
|
||||
return self._value
|
||||
|
||||
@value.setter
|
||||
def value(self, value: Union[str, int, float, bool]):
|
||||
self._value = str(value)
|
24
api/src/data/schemas/public/user_setting_dao.py
Normal file
24
api/src/data/schemas/public/user_setting_dao.py
Normal file
@ -0,0 +1,24 @@
|
||||
from core.database.abc.db_model_dao_abc import DbModelDaoABC
|
||||
from core.logger import DBLogger
|
||||
from data.schemas.administration.user import User
|
||||
from data.schemas.public.user_setting import UserSetting
|
||||
|
||||
logger = DBLogger(__name__)
|
||||
|
||||
|
||||
class UserSettingDao(DbModelDaoABC[UserSetting]):
|
||||
|
||||
def __init__(self):
|
||||
DbModelDaoABC.__init__(self, __name__, UserSetting, "public.user_settings")
|
||||
|
||||
self.attribute(UserSetting.user_id, int)
|
||||
self.attribute(UserSetting.key, str)
|
||||
self.attribute(UserSetting.value, str)
|
||||
|
||||
async def find_by_key(self, user: User, key: str) -> UserSetting:
|
||||
return await self.find_single_by(
|
||||
[{UserSetting.user_id: user.id}, {UserSetting.key: key}]
|
||||
)
|
||||
|
||||
|
||||
userSettingsDao = UserSettingDao()
|
34
api/src/data/schemas/system/feature_flag.py
Normal file
34
api/src/data/schemas/system/feature_flag.py
Normal file
@ -0,0 +1,34 @@
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from core.database.abc.db_model_abc import DbModelABC
|
||||
from core.typing import SerialId
|
||||
|
||||
|
||||
class FeatureFlag(DbModelABC):
|
||||
def __init__(
|
||||
self,
|
||||
id: SerialId,
|
||||
key: str,
|
||||
value: bool,
|
||||
deleted: bool = False,
|
||||
editor_id: Optional[SerialId] = None,
|
||||
created: Optional[datetime] = None,
|
||||
updated: Optional[datetime] = None,
|
||||
):
|
||||
DbModelABC.__init__(self, id, deleted, editor_id, created, updated)
|
||||
|
||||
self._key = key
|
||||
self._value = value
|
||||
|
||||
@property
|
||||
def key(self) -> str:
|
||||
return self._key
|
||||
|
||||
@property
|
||||
def value(self) -> bool:
|
||||
return self._value
|
||||
|
||||
@value.setter
|
||||
def value(self, value: bool):
|
||||
self._value = value
|
20
api/src/data/schemas/system/feature_flag_dao.py
Normal file
20
api/src/data/schemas/system/feature_flag_dao.py
Normal file
@ -0,0 +1,20 @@
|
||||
from core.database.abc.db_model_dao_abc import DbModelDaoABC
|
||||
from core.logger import DBLogger
|
||||
from data.schemas.system.feature_flag import FeatureFlag
|
||||
|
||||
logger = DBLogger(__name__)
|
||||
|
||||
|
||||
class FeatureFlagDao(DbModelDaoABC[FeatureFlag]):
|
||||
|
||||
def __init__(self):
|
||||
DbModelDaoABC.__init__(self, __name__, FeatureFlag, "system.feature_flags")
|
||||
|
||||
self.attribute(FeatureFlag.key, str)
|
||||
self.attribute(FeatureFlag.value, bool)
|
||||
|
||||
async def find_by_key(self, key: str) -> FeatureFlag:
|
||||
return await self.find_single_by({FeatureFlag.key: key})
|
||||
|
||||
|
||||
featureFlagDao = FeatureFlagDao()
|
34
api/src/data/schemas/system/setting.py
Normal file
34
api/src/data/schemas/system/setting.py
Normal file
@ -0,0 +1,34 @@
|
||||
from datetime import datetime
|
||||
from typing import Optional, Union
|
||||
|
||||
from core.database.abc.db_model_abc import DbModelABC
|
||||
from core.typing import SerialId
|
||||
|
||||
|
||||
class Setting(DbModelABC):
|
||||
def __init__(
|
||||
self,
|
||||
id: SerialId,
|
||||
key: str,
|
||||
value: str,
|
||||
deleted: bool = False,
|
||||
editor_id: Optional[SerialId] = None,
|
||||
created: Optional[datetime] = None,
|
||||
updated: Optional[datetime] = None,
|
||||
):
|
||||
DbModelABC.__init__(self, id, deleted, editor_id, created, updated)
|
||||
|
||||
self._key = key
|
||||
self._value = value
|
||||
|
||||
@property
|
||||
def key(self) -> str:
|
||||
return self._key
|
||||
|
||||
@property
|
||||
def value(self) -> str:
|
||||
return self._value
|
||||
|
||||
@value.setter
|
||||
def value(self, value: Union[str, int, float, bool]):
|
||||
self._value = str(value)
|
20
api/src/data/schemas/system/setting_dao.py
Normal file
20
api/src/data/schemas/system/setting_dao.py
Normal file
@ -0,0 +1,20 @@
|
||||
from core.database.abc.db_model_dao_abc import DbModelDaoABC
|
||||
from core.logger import DBLogger
|
||||
from data.schemas.system.setting import Setting
|
||||
|
||||
logger = DBLogger(__name__)
|
||||
|
||||
|
||||
class SettingDao(DbModelDaoABC[Setting]):
|
||||
|
||||
def __init__(self):
|
||||
DbModelDaoABC.__init__(self, __name__, Setting, "system.settings")
|
||||
|
||||
self.attribute(Setting.key, str)
|
||||
self.attribute(Setting.value, str)
|
||||
|
||||
async def find_by_key(self, key: str) -> Setting:
|
||||
return await self.find_single_by({Setting.key: key})
|
||||
|
||||
|
||||
settingsDao = SettingDao()
|
24
api/src/data/scripts/2025-03-08-08-10-settings.sql
Normal file
24
api/src/data/scripts/2025-03-08-08-10-settings.sql
Normal file
@ -0,0 +1,24 @@
|
||||
CREATE SCHEMA IF NOT EXISTS system;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS system.settings
|
||||
(
|
||||
Id SERIAL PRIMARY KEY,
|
||||
Key TEXT NOT NULL,
|
||||
Value TEXT NOT NULL,
|
||||
-- for history
|
||||
Deleted BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
EditorId INT NULL REFERENCES administration.users (Id),
|
||||
CreatedUtc timestamptz NOT NULL DEFAULT NOW(),
|
||||
UpdatedUtc timestamptz NOT NULL DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE TABLE system.settings_history
|
||||
(
|
||||
LIKE system.settings
|
||||
);
|
||||
|
||||
CREATE TRIGGER ip_list_history_trigger
|
||||
BEFORE INSERT OR UPDATE OR DELETE
|
||||
ON system.settings
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION public.history_trigger_function();
|
24
api/src/data/scripts/2025-03-08-08-15-feature-flags.sql
Normal file
24
api/src/data/scripts/2025-03-08-08-15-feature-flags.sql
Normal file
@ -0,0 +1,24 @@
|
||||
CREATE SCHEMA IF NOT EXISTS system;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS system.feature_flags
|
||||
(
|
||||
Id SERIAL PRIMARY KEY,
|
||||
Key TEXT NOT NULL,
|
||||
Value BOOLEAN NOT NULL,
|
||||
-- for history
|
||||
Deleted BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
EditorId INT NULL REFERENCES administration.users (Id),
|
||||
CreatedUtc timestamptz NOT NULL DEFAULT NOW(),
|
||||
UpdatedUtc timestamptz NOT NULL DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE TABLE system.feature_flags_history
|
||||
(
|
||||
LIKE system.feature_flags
|
||||
);
|
||||
|
||||
CREATE TRIGGER ip_list_history_trigger
|
||||
BEFORE INSERT OR UPDATE OR DELETE
|
||||
ON system.feature_flags
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION public.history_trigger_function();
|
25
api/src/data/scripts/2025-03-08-08-15-user-settings.sql
Normal file
25
api/src/data/scripts/2025-03-08-08-15-user-settings.sql
Normal file
@ -0,0 +1,25 @@
|
||||
CREATE SCHEMA IF NOT EXISTS public;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS public.user_settings
|
||||
(
|
||||
Id SERIAL PRIMARY KEY,
|
||||
Key TEXT NOT NULL,
|
||||
Value TEXT NOT NULL,
|
||||
UserId INT NOT NULL REFERENCES administration.users (Id) ON DELETE CASCADE,
|
||||
-- for history
|
||||
Deleted BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
EditorId INT NULL REFERENCES administration.users (Id),
|
||||
CreatedUtc timestamptz NOT NULL DEFAULT NOW(),
|
||||
UpdatedUtc timestamptz NOT NULL DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE TABLE public.user_settings_history
|
||||
(
|
||||
LIKE public.user_settings
|
||||
);
|
||||
|
||||
CREATE TRIGGER ip_list_history_trigger
|
||||
BEFORE INSERT OR UPDATE OR DELETE
|
||||
ON public.user_settings
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION public.history_trigger_function();
|
40
api/src/data/seeder/feature_flags_seeder.py
Normal file
40
api/src/data/seeder/feature_flags_seeder.py
Normal file
@ -0,0 +1,40 @@
|
||||
from core.configuration.feature_flags import FeatureFlags
|
||||
from core.configuration.feature_flags_enum import FeatureFlagsEnum
|
||||
from core.logger import DBLogger
|
||||
from data.abc.data_seeder_abc import DataSeederABC
|
||||
from data.schemas.system.feature_flag import FeatureFlag
|
||||
from data.schemas.system.feature_flag_dao import featureFlagDao
|
||||
|
||||
logger = DBLogger(__name__)
|
||||
|
||||
|
||||
class FeatureFlagsSeeder(DataSeederABC):
|
||||
def __init__(self):
|
||||
DataSeederABC.__init__(self)
|
||||
|
||||
async def seed(self):
|
||||
logger.info("Seeding feature flags")
|
||||
feature_flags = await featureFlagDao.get_all()
|
||||
feature_flag_keys = [x.key for x in feature_flags]
|
||||
|
||||
possible_feature_flags = {
|
||||
x.value: FeatureFlags.get_default(x) for x in FeatureFlagsEnum
|
||||
}
|
||||
|
||||
to_create = [
|
||||
FeatureFlag(0, x, possible_feature_flags[x])
|
||||
for x in possible_feature_flags.keys()
|
||||
if x not in feature_flag_keys
|
||||
]
|
||||
if len(to_create) > 0:
|
||||
await featureFlagDao.create_many(to_create)
|
||||
to_create_dicts = {x.key: x.value for x in to_create}
|
||||
logger.debug(f"Created feature flags: {to_create_dicts}")
|
||||
|
||||
to_delete = [
|
||||
x for x in feature_flags if x.key not in possible_feature_flags.keys()
|
||||
]
|
||||
if len(to_delete) > 0:
|
||||
await featureFlagDao.delete_many(to_delete, hard_delete=True)
|
||||
to_delete_dicts = {x.key: x.value for x in to_delete}
|
||||
logger.debug(f"Deleted feature flags: {to_delete_dicts}")
|
25
api/src/data/seeder/settings_seeder.py
Normal file
25
api/src/data/seeder/settings_seeder.py
Normal file
@ -0,0 +1,25 @@
|
||||
from typing import Any
|
||||
|
||||
from core.logger import DBLogger
|
||||
from data.abc.data_seeder_abc import DataSeederABC
|
||||
from data.schemas.system.setting import Setting
|
||||
from data.schemas.system.setting_dao import settingsDao
|
||||
|
||||
logger = DBLogger(__name__)
|
||||
|
||||
|
||||
class SettingsSeeder(DataSeederABC):
|
||||
def __init__(self):
|
||||
DataSeederABC.__init__(self)
|
||||
|
||||
async def seed(self):
|
||||
await self._seed_if_not_exists("default_language", "de")
|
||||
await self._seed_if_not_exists("show_terms", True)
|
||||
|
||||
@staticmethod
|
||||
async def _seed_if_not_exists(key: str, value: Any):
|
||||
existing = await settingsDao.find_by_key(key)
|
||||
if existing is not None:
|
||||
return
|
||||
|
||||
await settingsDao.create(Setting(0, key, str(value)))
|
@ -1,10 +1,9 @@
|
||||
import asyncio
|
||||
import sys
|
||||
|
||||
import eventlet
|
||||
from eventlet import wsgi
|
||||
import uvicorn
|
||||
|
||||
from api.api import app
|
||||
from api.api import API
|
||||
from core.environment import Environment
|
||||
from core.logger import Logger
|
||||
from startup import Startup
|
||||
@ -18,15 +17,13 @@ def main():
|
||||
|
||||
asyncio.set_event_loop_policy(WindowsSelectorEventLoopPolicy())
|
||||
|
||||
loop = asyncio.new_event_loop()
|
||||
loop.run_until_complete(Startup.configure())
|
||||
loop.close()
|
||||
|
||||
port = Environment.get("PORT", int, 5000)
|
||||
logger.info(f"Start API on port: {port}")
|
||||
if Environment.get_environment() == "development":
|
||||
logger.info(f"Playground: http://localhost:{port}/ui/playground")
|
||||
wsgi.server(eventlet.listen(("0.0.0.0", port)), app, log_output=False)
|
||||
Startup.configure()
|
||||
uvicorn.run(
|
||||
API.app,
|
||||
host="0.0.0.0",
|
||||
port=Environment.get("PORT", int, 5000),
|
||||
log_config=None,
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
@ -1,117 +1,188 @@
|
||||
import asyncio
|
||||
import sys
|
||||
from typing import Optional
|
||||
|
||||
import eventlet
|
||||
from eventlet import wsgi
|
||||
from flask import Flask, request, Response, redirect, render_template
|
||||
import requests
|
||||
import uvicorn
|
||||
from starlette.applications import Starlette
|
||||
from starlette.requests import Request
|
||||
from starlette.responses import RedirectResponse
|
||||
from starlette.routing import Route, Mount
|
||||
from starlette.staticfiles import StaticFiles
|
||||
from starlette.templating import Jinja2Templates
|
||||
|
||||
from core.database.database import Database
|
||||
from core.environment import Environment
|
||||
from core.logger import Logger
|
||||
from data.schemas.public.short_url import ShortUrl
|
||||
from data.schemas.public.short_url_dao import shortUrlDao
|
||||
from data.schemas.public.short_url_visit import ShortUrlVisit
|
||||
from data.schemas.public.short_url_visit_dao import shortUrlVisitDao
|
||||
|
||||
logger = Logger(__name__)
|
||||
templates = Jinja2Templates(directory="templates")
|
||||
|
||||
|
||||
class Redirector(Flask):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
Flask.__init__(self, *args, **kwargs)
|
||||
async def index(request: Request):
|
||||
return templates.TemplateResponse("404.html", {"request": request}, status_code=404)
|
||||
|
||||
|
||||
app = Redirector(__name__)
|
||||
|
||||
|
||||
@app.route("/")
|
||||
def index():
|
||||
return render_template("404.html"), 404
|
||||
|
||||
|
||||
@app.route("/<path:path>")
|
||||
async def _handle_request(path: str):
|
||||
short_url = await _find_short_url_by_url(path)
|
||||
async def handle_request(request: Request):
|
||||
path = request.path_params["path"]
|
||||
short_url = _find_short_url_by_path(path)
|
||||
if short_url is None:
|
||||
return render_template("404.html"), 404
|
||||
return templates.TemplateResponse(
|
||||
"404.html", {"request": request}, status_code=404
|
||||
)
|
||||
|
||||
domains = Environment.get("DOMAINS", list[str], [])
|
||||
domain = await short_url.domain
|
||||
domain = short_url["domain"]
|
||||
logger.debug(
|
||||
f"Domain: {domain.name if domain is not None else None}, request.host: {request.host}"
|
||||
f"Domain: {domain["name"] if domain is not None else None}, request.host: {request.headers['host']}"
|
||||
)
|
||||
|
||||
host = request.host
|
||||
host = request.headers["host"]
|
||||
if ":" in host:
|
||||
host = host.split(":")[0]
|
||||
|
||||
domain_strict_mode = Environment.get("DOMAIN_STRICT_MODE", bool, False)
|
||||
if domain is not None and (
|
||||
domain.name not in domains
|
||||
or (domain_strict_mode and not host.endswith(domain.name))
|
||||
domain["name"] not in domains
|
||||
or (domain_strict_mode and not host.endswith(domain["name"]))
|
||||
):
|
||||
return render_template("404.html"), 404
|
||||
return templates.TemplateResponse(
|
||||
"404.html", {"request": request}, status_code=404
|
||||
)
|
||||
|
||||
user_agent = request.headers.get("User-Agent", "").lower()
|
||||
|
||||
if "wheregoes" in user_agent or "someothertool" in user_agent:
|
||||
return await _handle_short_url(path, short_url)
|
||||
return await _handle_short_url(request, short_url)
|
||||
|
||||
if short_url.loading_screen:
|
||||
await _track_visit(short_url)
|
||||
if short_url["loadingScreen"]:
|
||||
await _track_visit(request, short_url)
|
||||
|
||||
return render_template(
|
||||
return templates.TemplateResponse(
|
||||
"redirect.html",
|
||||
key=short_url.short_url,
|
||||
target_url=_get_redirect_url(short_url.target_url),
|
||||
{
|
||||
"request": request,
|
||||
"key": short_url["shortUrl"],
|
||||
"target_url": _get_redirect_url(short_url["targetUrl"]),
|
||||
},
|
||||
)
|
||||
|
||||
return await _handle_short_url(path, short_url)
|
||||
return await _handle_short_url(request, short_url)
|
||||
|
||||
|
||||
async def _handle_short_url(path: str, short_url: ShortUrl):
|
||||
if path.startswith("api/"):
|
||||
path = path.replace("api/", "")
|
||||
def _find_short_url_by_path(path: str) -> Optional[dict]:
|
||||
api_url = Environment.get("API_URL", str)
|
||||
if api_url is None:
|
||||
raise Exception("API_URL is not set")
|
||||
|
||||
await _track_visit(short_url)
|
||||
api_key = Environment.get("API_KEY", str)
|
||||
if api_key is None:
|
||||
raise Exception("API_KEY is not set")
|
||||
|
||||
return _do_redirect(short_url.target_url)
|
||||
request = requests.post(
|
||||
f"{api_url}/graphql",
|
||||
json={
|
||||
"query": f"""
|
||||
query getShortUrlByPath($path: String!) {{
|
||||
shortUrls(filter: {{ shortUrl: {{ equal: $path }}, deleted: {{ equal: false }} }}) {{
|
||||
nodes {{
|
||||
id
|
||||
shortUrl
|
||||
targetUrl
|
||||
description
|
||||
group {{
|
||||
id
|
||||
name
|
||||
}}
|
||||
domain {{
|
||||
id
|
||||
name
|
||||
}}
|
||||
loadingScreen
|
||||
deleted
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
""",
|
||||
"variables": {"path": path},
|
||||
},
|
||||
headers={"Authorization": f"API-Key {api_key}"},
|
||||
)
|
||||
data = request.json()["data"]["shortUrls"]["nodes"]
|
||||
if len(data) == 0:
|
||||
return None
|
||||
|
||||
return data[0]
|
||||
|
||||
|
||||
async def _track_visit(short_url: ShortUrl):
|
||||
async def _handle_short_url(request: Request, short_url: dict):
|
||||
await _track_visit(request, short_url)
|
||||
|
||||
return RedirectResponse(_get_redirect_url(short_url["targetUrl"]))
|
||||
|
||||
|
||||
async def _track_visit(r: Request, short_url: dict):
|
||||
api_url = Environment.get("API_URL", str)
|
||||
if api_url is None:
|
||||
raise Exception("API_URL is not set")
|
||||
|
||||
api_key = Environment.get("API_KEY", str)
|
||||
if api_key is None:
|
||||
raise Exception("API_KEY is not set")
|
||||
|
||||
try:
|
||||
await shortUrlVisitDao.create(
|
||||
ShortUrlVisit(0, short_url.id, request.headers.get("User-Agent"))
|
||||
request = requests.post(
|
||||
f"{api_url}/graphql",
|
||||
json={
|
||||
"query": f"""
|
||||
mutation trackShortUrlVisit($id: ID!, $agent: String) {{
|
||||
shortUrl {{
|
||||
trackVisit(id: $id, agent: $agent)
|
||||
}}
|
||||
}}
|
||||
""",
|
||||
"variables": {
|
||||
"id": short_url["id"],
|
||||
"agent": r.headers.get("User-Agent"),
|
||||
},
|
||||
},
|
||||
headers={"Authorization": f"API-Key {api_key}"},
|
||||
)
|
||||
if request.status_code != 200:
|
||||
logger.warning(
|
||||
f"Failed to track visit for short url {short_url["shortUrl"]}"
|
||||
)
|
||||
|
||||
data = request.json()
|
||||
if "errors" in data:
|
||||
raise Exception(data["errors"])
|
||||
else:
|
||||
logger.debug(f"Tracked visit for short url {short_url["shortUrl"]}")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to update short url {short_url.short_url} with error", e)
|
||||
|
||||
|
||||
async def _find_short_url_by_url(url: str) -> ShortUrl:
|
||||
return await shortUrlDao.find_single_by({ShortUrl.short_url: url})
|
||||
logger.error(
|
||||
f"Failed to update short url {short_url["shortUrl"]} with error", e
|
||||
)
|
||||
|
||||
|
||||
def _get_redirect_url(url: str) -> str:
|
||||
# todo: multiple protocols like ts3://
|
||||
if not url.startswith("http://") and not url.startswith("https://"):
|
||||
url = f"http://{url}"
|
||||
|
||||
return url
|
||||
|
||||
|
||||
def _do_redirect(url: str) -> Response:
|
||||
return redirect(_get_redirect_url(url))
|
||||
|
||||
|
||||
async def configure():
|
||||
Logger.set_level(Environment.get("LOG_LEVEL", str, "info"))
|
||||
Environment.set_environment(Environment.get("ENVIRONMENT", str, "production"))
|
||||
logger.info(f"Environment: {Environment.get_environment()}")
|
||||
|
||||
app.debug = Environment.get_environment() == "development"
|
||||
|
||||
await Database.startup_db()
|
||||
routes = [
|
||||
Route("/", endpoint=index),
|
||||
Mount("/static", StaticFiles(directory="static"), name="static"),
|
||||
Route("/{path:path}", endpoint=handle_request),
|
||||
]
|
||||
|
||||
app = Starlette(routes=routes, on_startup=[configure])
|
||||
|
||||
|
||||
def main():
|
||||
@ -120,26 +191,13 @@ def main():
|
||||
|
||||
asyncio.set_event_loop_policy(WindowsSelectorEventLoopPolicy())
|
||||
|
||||
loop = asyncio.new_event_loop()
|
||||
loop.run_until_complete(configure())
|
||||
loop.close()
|
||||
|
||||
port = Environment.get("PORT", int, 5001)
|
||||
logger.info(f"Start API on port: {port}")
|
||||
if Environment.get_environment() == "development":
|
||||
logger.info(f"Playground: http://localhost:{port}/")
|
||||
wsgi.server(eventlet.listen(("0.0.0.0", port)), app, log_output=False)
|
||||
uvicorn.run(
|
||||
app,
|
||||
host="0.0.0.0",
|
||||
port=Environment.get("PORT", int, 5001),
|
||||
log_config=None,
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
# ((
|
||||
# ( )
|
||||
# ; / ,
|
||||
# / \/
|
||||
# / |
|
||||
# / ~/
|
||||
# / ) ) ~ edraft
|
||||
# ___// | /
|
||||
# --' \_~-,
|
||||
|
@ -7,6 +7,9 @@ class Permissions(Enum):
|
||||
"""
|
||||
Administration
|
||||
"""
|
||||
# administrator
|
||||
administrator = "administrator"
|
||||
|
||||
# api keys
|
||||
api_keys = "api_keys"
|
||||
api_keys_create = "api_keys.create"
|
||||
@ -19,6 +22,10 @@ class Permissions(Enum):
|
||||
users_update = "users.update"
|
||||
users_delete = "users.delete"
|
||||
|
||||
# settings
|
||||
settings = "settings"
|
||||
settings_update = "settings.update"
|
||||
|
||||
"""
|
||||
Permissions
|
||||
"""
|
||||
|
@ -1,17 +1,31 @@
|
||||
from flask_cors import CORS
|
||||
from contextlib import asynccontextmanager
|
||||
|
||||
from api.api import app
|
||||
from ariadne.asgi import GraphQL
|
||||
from ariadne.asgi.handlers import GraphQLTransportWSHandler
|
||||
from starlette.applications import Starlette
|
||||
from starlette.middleware import Middleware
|
||||
from starlette.middleware.cors import CORSMiddleware
|
||||
from starlette.routing import WebSocketRoute
|
||||
|
||||
from api.api import API
|
||||
from api.auth.keycloak_client import Keycloak
|
||||
from api.broadcast import broadcast
|
||||
from api.middleware.logging import LoggingMiddleware
|
||||
from api.middleware.request import RequestMiddleware
|
||||
from api.middleware.websocket import AuthenticatedGraphQLTransportWSHandler
|
||||
from api.route import Route
|
||||
from api_graphql.service.schema import schema
|
||||
from core.database.database import Database
|
||||
from core.database.database_settings import DatabaseSettings
|
||||
from core.database.db_context import DBContext
|
||||
from core.environment import Environment
|
||||
from core.logger import Logger
|
||||
from data.seeder.api_key_seeder import ApiKeySeeder
|
||||
from data.seeder.feature_flags_seeder import FeatureFlagsSeeder
|
||||
from data.seeder.file_hash_seeder import FileHashSeeder
|
||||
from data.seeder.permission_seeder import PermissionSeeder
|
||||
from data.seeder.role_seeder import RoleSeeder
|
||||
from data.seeder.short_url_seeder import ShortUrlSeeder
|
||||
from data.seeder.settings_seeder import SettingsSeeder
|
||||
from data.service.migration_service import MigrationService
|
||||
from service.file_service import FileService
|
||||
|
||||
@ -19,15 +33,43 @@ logger = Logger(__name__)
|
||||
|
||||
|
||||
class Startup:
|
||||
@classmethod
|
||||
def _get_db_settings(cls):
|
||||
host = Environment.get("DB_HOST", str)
|
||||
port = Environment.get("DB_PORT", int)
|
||||
user = Environment.get("DB_USER", str)
|
||||
password = Environment.get("DB_PASSWORD", str)
|
||||
database = Environment.get("DB_DATABASE", str)
|
||||
|
||||
if None in [host, port, user, password, database]:
|
||||
logger.fatal(
|
||||
"DB settings are not set correctly",
|
||||
EnvironmentError("DB settings are not set correctly"),
|
||||
)
|
||||
|
||||
return DatabaseSettings(
|
||||
host=host, port=port, user=user, password=password, database=database
|
||||
)
|
||||
|
||||
@classmethod
|
||||
async def _startup_db(cls):
|
||||
logger.info("Init DB")
|
||||
db = DBContext()
|
||||
|
||||
await db.connect(cls._get_db_settings())
|
||||
Database.init(db)
|
||||
migrations = MigrationService(db)
|
||||
await migrations.migrate()
|
||||
|
||||
@staticmethod
|
||||
async def _seed_data():
|
||||
seeders = [
|
||||
SettingsSeeder,
|
||||
FeatureFlagsSeeder,
|
||||
PermissionSeeder,
|
||||
RoleSeeder,
|
||||
ApiKeySeeder,
|
||||
FileHashSeeder,
|
||||
ShortUrlSeeder,
|
||||
]
|
||||
for seeder in [x() for x in seeders]:
|
||||
await seeder.seed()
|
||||
@ -38,22 +80,68 @@ class Startup:
|
||||
Keycloak.init()
|
||||
|
||||
@classmethod
|
||||
async def configure(cls):
|
||||
Logger.set_level(Environment.get("LOG_LEVEL", str, "info"))
|
||||
Environment.set_environment(Environment.get("ENVIRONMENT", str, "production"))
|
||||
logger.info(f"Environment: {Environment.get_environment()}")
|
||||
async def _startup_broadcast(cls):
|
||||
logger.info("Init Broadcast")
|
||||
await broadcast.connect()
|
||||
|
||||
app.debug = Environment.get_environment() == "development"
|
||||
|
||||
await Database.startup_db()
|
||||
@classmethod
|
||||
async def configure_api(cls):
|
||||
await cls._startup_db()
|
||||
await FileService.clean_files()
|
||||
|
||||
await cls._seed_data()
|
||||
cls._startup_keycloak()
|
||||
await cls._startup_broadcast()
|
||||
|
||||
client_urls = Environment.get("CLIENT_URLS", str)
|
||||
if client_urls is None:
|
||||
raise EnvironmentError("CLIENT_URLS not set")
|
||||
@staticmethod
|
||||
@asynccontextmanager
|
||||
async def api_lifespan(app: Starlette):
|
||||
await Startup.configure_api()
|
||||
|
||||
origins = client_urls.split(",")
|
||||
CORS(app, support_credentials=True, resources={r"/api/*": {"origins": origins}})
|
||||
port = Environment.get("PORT", int, 5000)
|
||||
logger.info(f"Start API server on port: {port}")
|
||||
if Environment.get_environment() == "development":
|
||||
logger.info(f"Playground: http://localhost:{port}/ui/playground")
|
||||
|
||||
app.debug = Environment.get_environment() == "development"
|
||||
yield
|
||||
logger.info("Shutdown API")
|
||||
|
||||
@classmethod
|
||||
def init_api(cls):
|
||||
logger.info("Init API")
|
||||
API.import_routes()
|
||||
API.create(
|
||||
Starlette(
|
||||
lifespan=cls.api_lifespan,
|
||||
routes=[
|
||||
*Route.registered_routes,
|
||||
WebSocketRoute(
|
||||
"/graphql",
|
||||
endpoint=GraphQL(
|
||||
schema,
|
||||
websocket_handler=AuthenticatedGraphQLTransportWSHandler(),
|
||||
),
|
||||
),
|
||||
],
|
||||
middleware=[
|
||||
Middleware(RequestMiddleware),
|
||||
Middleware(LoggingMiddleware),
|
||||
Middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=API.get_allowed_origins(),
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
),
|
||||
],
|
||||
exception_handlers={Exception: API.handle_exception},
|
||||
)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def configure(cls):
|
||||
Logger.set_level(Environment.get("LOG_LEVEL", str, "info"))
|
||||
Environment.set_environment(Environment.get("ENVIRONMENT", str, "production"))
|
||||
logger.info(f"Environment: {Environment.get_environment()}")
|
||||
|
||||
cls.init_api()
|
||||
|
@ -8,17 +8,17 @@
|
||||
</head>
|
||||
<body>
|
||||
<div class="w-full h-full flex flex-col justify-center items-center">
|
||||
<div class="flex items-center justify-center">
|
||||
<div class="relative w-screen h-screen bg-cover bg-center"
|
||||
<div class="flex items-center justify-center w-full h-full">
|
||||
<div class="relative w-full h-full bg-cover bg-center"
|
||||
style="background-image: url('/static/custom/background.jpg')"></div>
|
||||
|
||||
<div class="absolute w-1/3 h-2/5 rounded-xl p-5 flex flex-col gap-5 justify-center items-center">
|
||||
<div class="absolute w-11/12 sm:w-2/3 md:w-1/2 lg:w-1/3 h-2/5 rounded-xl p-5 flex flex-col gap-5 justify-center items-center">
|
||||
<div class="absolute inset-0 bg-black opacity-70 rounded-xl"></div>
|
||||
<div class="relative logo flex justify-center items-center">
|
||||
<img class="h-48" src="/static/custom/logo.png" alt="logo">
|
||||
<img class="h-24 sm:h-32 md:h-48" src="/static/custom/logo.png" alt="logo">
|
||||
</div>
|
||||
|
||||
<h1 class="relative text-3xl text-white">Redirecting...</h1>
|
||||
<h1 class="relative text-xl sm:text-2xl md:text-3xl text-white">Redirecting...</h1>
|
||||
<p class="relative text-white">You will be redirected in <span id="countdown">5</span> seconds.</p>
|
||||
</div>
|
||||
</div>
|
||||
|
7
web/.gitignore
vendored
7
web/.gitignore
vendored
@ -1 +1,6 @@
|
||||
config.*.json
|
||||
config.*.json
|
||||
|
||||
dist/
|
||||
.angular/
|
||||
node_modules/
|
||||
coverage/
|
26
web/ngx-translate-lint.json
Executable file
26
web/ngx-translate-lint.json
Executable file
@ -0,0 +1,26 @@
|
||||
{
|
||||
"rules": {
|
||||
"keysOnViews": "error",
|
||||
"zombieKeys": "error",
|
||||
"misprintKeys": "disable",
|
||||
"deepSearch": "enable",
|
||||
"emptyKeys": "warning",
|
||||
"maxWarning": "0",
|
||||
"misprintCoefficient": "0.9",
|
||||
"ignoredKeys": [
|
||||
"permissions.*",
|
||||
"permission_descriptions.*"
|
||||
],
|
||||
"ignoredMisprintKeys": [],
|
||||
"customRegExpToFindKeys": [
|
||||
"(?<=countHeaderTranslation=\")[A-Za-z0-9_.-]+(?=\")",
|
||||
"(?<=translationKey:\\s*['\"])[A-Za-z0-9_.-]+(?=['\"])",
|
||||
"(?<=(success|info|warn|error)\\(['\"])[A-Za-z0-9_.-]+(?=['\"]\\))",
|
||||
"(?<=instant\\(['\"])[A-Za-z0-9_.-]+(?=['\"]\\))",
|
||||
"(?<=\\.instant\\(['\"])[A-Za-z0-9_.-]+(?=['\"]\\))|(?<=\\?\\s*['\"])[A-Za-z0-9_.-]+(?=['\"]\\s*:\\s*['\"].*?\\|\\s*translate)|(?<=:\\s*['\"])[A-Za-z0-9_.-]+(?=['\"]\\s*\\|\\s*translate)\n"
|
||||
]
|
||||
},
|
||||
"fixZombiesKeys": false,
|
||||
"project": "./src/app/**/*.{html,ts}",
|
||||
"languages": "./src/assets/i18n/*.json"
|
||||
}
|
209
web/package-lock.json
generated
209
web/package-lock.json
generated
@ -21,6 +21,7 @@
|
||||
"apollo-angular": "^7.2.1",
|
||||
"date-fns": "^4.1.0",
|
||||
"dompurify": "^3.2.1",
|
||||
"graphql-ws": "^5.16.2",
|
||||
"keycloak-angular": "^16.1.0",
|
||||
"keycloak-js": "^26.0.5",
|
||||
"marked": "^12.0.2",
|
||||
@ -57,6 +58,7 @@
|
||||
"karma-coverage": "~2.2.0",
|
||||
"karma-jasmine": "~5.1.0",
|
||||
"karma-jasmine-html-reporter": "~2.1.0",
|
||||
"ngx-translate-lint": "^1.22.0",
|
||||
"postcss": "^8.4.49",
|
||||
"prettier": "^3.3.3",
|
||||
"prettier-eslint": "^16.3.0",
|
||||
@ -7377,6 +7379,13 @@
|
||||
"node": ">= 0.6"
|
||||
}
|
||||
},
|
||||
"node_modules/conventional-cli": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/conventional-cli/-/conventional-cli-1.2.0.tgz",
|
||||
"integrity": "sha512-4EGXbt16iIOjTz7ocOInsHfjxL6NxdUNqnHv4XHxXfRc8ClZJcQB5SoxQhT7U2XmZ9y2O/PFFkT8hLwG3n+DJg==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/convert-source-map": {
|
||||
"version": "1.9.0",
|
||||
"resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.9.0.tgz",
|
||||
@ -9711,6 +9720,13 @@
|
||||
"node": ">= 0.6"
|
||||
}
|
||||
},
|
||||
"node_modules/fs": {
|
||||
"version": "0.0.1-security",
|
||||
"resolved": "https://registry.npmjs.org/fs/-/fs-0.0.1-security.tgz",
|
||||
"integrity": "sha512-3XY9e1pP0CVEUCdj5BmfIZxRBTSDycnbqhIOGec9QYtmVH2fbLpj86CFWkrNOkt/Fvty4KZG5lTglL9j/gJ87w==",
|
||||
"dev": true,
|
||||
"license": "ISC"
|
||||
},
|
||||
"node_modules/fs-extra": {
|
||||
"version": "8.1.0",
|
||||
"resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-8.1.0.tgz",
|
||||
@ -9996,6 +10012,18 @@
|
||||
"graphql": "^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/graphql-ws": {
|
||||
"version": "5.16.2",
|
||||
"resolved": "https://registry.npmjs.org/graphql-ws/-/graphql-ws-5.16.2.tgz",
|
||||
"integrity": "sha512-E1uccsZxt/96jH/OwmLPuXMACILs76pKF2i3W861LpKBCYtGIyPQGtWLuBLkND4ox1KHns70e83PS4te50nvPQ==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"graphql": ">=0.11 <=16"
|
||||
}
|
||||
},
|
||||
"node_modules/hachure-fill": {
|
||||
"version": "0.5.2",
|
||||
"resolved": "https://registry.npmjs.org/hachure-fill/-/hachure-fill-0.5.2.tgz",
|
||||
@ -12835,6 +12863,141 @@
|
||||
"zone.js": "~0.14.0"
|
||||
}
|
||||
},
|
||||
"node_modules/ngx-translate-lint": {
|
||||
"version": "1.22.0",
|
||||
"resolved": "https://registry.npmjs.org/ngx-translate-lint/-/ngx-translate-lint-1.22.0.tgz",
|
||||
"integrity": "sha512-7ECu8xs5OTWvJ6/9JC6CVhxooqRopGm6LO4BW9VhPQNFQJKuE13bipBxtW3jGz9ecyTVJuQ3hIVDG/8uSAkyig==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"chalk": "^2.4.2",
|
||||
"commander": "^2.20.0",
|
||||
"conventional-cli": "^1.2.0",
|
||||
"dir-glob": "^3.0.1",
|
||||
"fs": "0.0.1-security",
|
||||
"glob": "^7.1.4",
|
||||
"lodash": "^4.17.20",
|
||||
"path": "^0.12.7",
|
||||
"rxjs": "^6.5.4",
|
||||
"string-similarity": "^4.0.1",
|
||||
"typescript": "^4.1.2"
|
||||
},
|
||||
"bin": {
|
||||
"ngx-translate-lint": "dist/bin.js"
|
||||
}
|
||||
},
|
||||
"node_modules/ngx-translate-lint/node_modules/ansi-styles": {
|
||||
"version": "3.2.1",
|
||||
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz",
|
||||
"integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"color-convert": "^1.9.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=4"
|
||||
}
|
||||
},
|
||||
"node_modules/ngx-translate-lint/node_modules/chalk": {
|
||||
"version": "2.4.2",
|
||||
"resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz",
|
||||
"integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"ansi-styles": "^3.2.1",
|
||||
"escape-string-regexp": "^1.0.5",
|
||||
"supports-color": "^5.3.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=4"
|
||||
}
|
||||
},
|
||||
"node_modules/ngx-translate-lint/node_modules/color-convert": {
|
||||
"version": "1.9.3",
|
||||
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz",
|
||||
"integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"color-name": "1.1.3"
|
||||
}
|
||||
},
|
||||
"node_modules/ngx-translate-lint/node_modules/color-name": {
|
||||
"version": "1.1.3",
|
||||
"resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz",
|
||||
"integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/ngx-translate-lint/node_modules/escape-string-regexp": {
|
||||
"version": "1.0.5",
|
||||
"resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz",
|
||||
"integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=0.8.0"
|
||||
}
|
||||
},
|
||||
"node_modules/ngx-translate-lint/node_modules/has-flag": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz",
|
||||
"integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=4"
|
||||
}
|
||||
},
|
||||
"node_modules/ngx-translate-lint/node_modules/rxjs": {
|
||||
"version": "6.6.7",
|
||||
"resolved": "https://registry.npmjs.org/rxjs/-/rxjs-6.6.7.tgz",
|
||||
"integrity": "sha512-hTdwr+7yYNIT5n4AMYp85KA6yw2Va0FLa3Rguvbpa4W3I5xynaBZo41cM3XM+4Q6fRMj3sBYIR1VAmZMXYJvRQ==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"tslib": "^1.9.0"
|
||||
},
|
||||
"engines": {
|
||||
"npm": ">=2.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/ngx-translate-lint/node_modules/supports-color": {
|
||||
"version": "5.5.0",
|
||||
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz",
|
||||
"integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"has-flag": "^3.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=4"
|
||||
}
|
||||
},
|
||||
"node_modules/ngx-translate-lint/node_modules/tslib": {
|
||||
"version": "1.14.1",
|
||||
"resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz",
|
||||
"integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==",
|
||||
"dev": true,
|
||||
"license": "0BSD"
|
||||
},
|
||||
"node_modules/ngx-translate-lint/node_modules/typescript": {
|
||||
"version": "4.9.5",
|
||||
"resolved": "https://registry.npmjs.org/typescript/-/typescript-4.9.5.tgz",
|
||||
"integrity": "sha512-1FXk9E2Hm+QzZQ7z+McJiHL4NW1F2EzMu9Nq9i3zAaGqibafqYwCVU6WyWAuyQRRzOlxou8xZSyXLEN8oKj24g==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"bin": {
|
||||
"tsc": "bin/tsc",
|
||||
"tsserver": "bin/tsserver"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=4.2.0"
|
||||
}
|
||||
},
|
||||
"node_modules/nice-napi": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/nice-napi/-/nice-napi-1.0.2.tgz",
|
||||
@ -13634,6 +13797,17 @@
|
||||
"node": ">= 0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/path": {
|
||||
"version": "0.12.7",
|
||||
"resolved": "https://registry.npmjs.org/path/-/path-0.12.7.tgz",
|
||||
"integrity": "sha512-aXXC6s+1w7otVF9UletFkFcDsJeO7lSZBPUQhtb5O0xJe8LtYhj/GxldoL09bBj9+ZmE2hNoHqQSFMN5fikh4Q==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"process": "^0.11.1",
|
||||
"util": "^0.10.3"
|
||||
}
|
||||
},
|
||||
"node_modules/path-data-parser": {
|
||||
"version": "0.1.0",
|
||||
"resolved": "https://registry.npmjs.org/path-data-parser/-/path-data-parser-0.1.0.tgz",
|
||||
@ -14467,6 +14641,16 @@
|
||||
"node": "^14.17.0 || ^16.13.0 || >=18.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/process": {
|
||||
"version": "0.11.10",
|
||||
"resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz",
|
||||
"integrity": "sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.6.0"
|
||||
}
|
||||
},
|
||||
"node_modules/process-nextick-args": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz",
|
||||
@ -16087,6 +16271,14 @@
|
||||
"safe-buffer": "~5.2.0"
|
||||
}
|
||||
},
|
||||
"node_modules/string-similarity": {
|
||||
"version": "4.0.4",
|
||||
"resolved": "https://registry.npmjs.org/string-similarity/-/string-similarity-4.0.4.tgz",
|
||||
"integrity": "sha512-/q/8Q4Bl4ZKAPjj8WerIBJWALKkaPRfrvhfF8k/B23i4nzrlRj2/go1m90In7nG/3XDSbOo0+pu6RvCTM9RGMQ==",
|
||||
"deprecated": "Package no longer supported. Contact Support at https://www.npmjs.com/support for more info.",
|
||||
"dev": true,
|
||||
"license": "ISC"
|
||||
},
|
||||
"node_modules/string-width": {
|
||||
"version": "7.2.0",
|
||||
"resolved": "https://registry.npmjs.org/string-width/-/string-width-7.2.0.tgz",
|
||||
@ -17019,6 +17211,16 @@
|
||||
"node": ">=6"
|
||||
}
|
||||
},
|
||||
"node_modules/util": {
|
||||
"version": "0.10.4",
|
||||
"resolved": "https://registry.npmjs.org/util/-/util-0.10.4.tgz",
|
||||
"integrity": "sha512-0Pm9hTQ3se5ll1XihRic3FDIku70C+iHUdT/W926rSgHV5QgXsYbKZN8MSC3tJtSkhuROzvsQjAaFENRXr+19A==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"inherits": "2.0.3"
|
||||
}
|
||||
},
|
||||
"node_modules/util-deprecate": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",
|
||||
@ -17026,6 +17228,13 @@
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/util/node_modules/inherits": {
|
||||
"version": "2.0.3",
|
||||
"resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz",
|
||||
"integrity": "sha512-x00IRNXNy63jwGkJmzPigoySHbaqpNuzKbBOmzK+g2OdZpQ9w+sxCN+VSB3ja7IAge2OP2qpfxTjeNcyjmW1uw==",
|
||||
"dev": true,
|
||||
"license": "ISC"
|
||||
},
|
||||
"node_modules/utils-merge": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz",
|
||||
|
@ -9,6 +9,8 @@
|
||||
"test": "ng test",
|
||||
"test:ci": "ng test --browsers=ChromeHeadlessCustom --watch=false --code-coverage",
|
||||
"lint": "ng lint",
|
||||
"lint:fix": "ng lint --fix",
|
||||
"lint:translations": "ngx-translate-lint -c ngx-translate-lint.json",
|
||||
"prettiefy": "prettier --write \"src/**/*.ts\""
|
||||
},
|
||||
"private": true,
|
||||
@ -26,6 +28,7 @@
|
||||
"apollo-angular": "^7.2.1",
|
||||
"date-fns": "^4.1.0",
|
||||
"dompurify": "^3.2.1",
|
||||
"graphql-ws": "^5.16.2",
|
||||
"keycloak-angular": "^16.1.0",
|
||||
"keycloak-js": "^26.0.5",
|
||||
"marked": "^12.0.2",
|
||||
@ -62,6 +65,7 @@
|
||||
"karma-coverage": "~2.2.0",
|
||||
"karma-jasmine": "~5.1.0",
|
||||
"karma-jasmine-html-reporter": "~2.1.0",
|
||||
"ngx-translate-lint": "^1.22.0",
|
||||
"postcss": "^8.4.49",
|
||||
"prettier": "^3.3.3",
|
||||
"prettier-eslint": "^16.3.0",
|
||||
|
@ -3,6 +3,7 @@ import { RouterModule, Routes } from '@angular/router';
|
||||
import { NotFoundComponent } from 'src/app/components/error/not-found/not-found.component';
|
||||
import { AuthGuard } from 'src/app/core/guard/auth.guard';
|
||||
import { HomeComponent } from 'src/app/components/home/home.component';
|
||||
import { ServerUnavailableComponent } from 'src/app/components/error/server-unavailable/server-unavailable.component';
|
||||
|
||||
const routes: Routes = [
|
||||
{
|
||||
@ -15,8 +16,12 @@ const routes: Routes = [
|
||||
import('./modules/admin/admin.module').then(m => m.AdminModule),
|
||||
canActivate: [AuthGuard],
|
||||
},
|
||||
{ path: '404', component: NotFoundComponent },
|
||||
{ path: '**', redirectTo: '/404', pathMatch: 'full' },
|
||||
{ path: 'error/404', component: NotFoundComponent },
|
||||
{ path: 'error/unavailable', component: ServerUnavailableComponent },
|
||||
{
|
||||
path: '**',
|
||||
redirectTo: 'error/404',
|
||||
},
|
||||
];
|
||||
|
||||
@NgModule({
|
||||
|
@ -4,6 +4,7 @@ import { BrowserModule } from '@angular/platform-browser';
|
||||
import { AppRoutingModule } from './app-routing.module';
|
||||
import { AppComponent } from './app.component';
|
||||
import { KeycloakService } from 'keycloak-angular';
|
||||
import { HomeComponent } from './components/home/home.component';
|
||||
import { initializeKeycloak } from './core/init-keycloak';
|
||||
import { HttpClient } from '@angular/common/http';
|
||||
import { environment } from '../environments/environment';
|
||||
@ -20,8 +21,8 @@ import { DialogService } from 'primeng/dynamicdialog';
|
||||
import { BrowserAnimationsModule } from '@angular/platform-browser/animations';
|
||||
import { SidebarComponent } from './components/sidebar/sidebar.component';
|
||||
import { ErrorHandlingService } from 'src/app/service/error-handling.service';
|
||||
import { HomeComponent } from './components/home/home.component';
|
||||
import { SettingsService } from 'src/app/service/settings.service';
|
||||
import { ConfigService } from 'src/app/service/config.service';
|
||||
import { ServerUnavailableComponent } from 'src/app/components/error/server-unavailable/server-unavailable.component';
|
||||
|
||||
if (environment.production) {
|
||||
Logger.enableProductionMode();
|
||||
@ -35,13 +36,13 @@ export function HttpLoaderFactory(http: HttpClient) {
|
||||
|
||||
export function appInitializerFactory(
|
||||
keycloak: KeycloakService,
|
||||
settings: SettingsService
|
||||
config: ConfigService
|
||||
): () => Promise<void> {
|
||||
return (): Promise<void> =>
|
||||
new Promise<void>((resolve, reject) => {
|
||||
settings
|
||||
config
|
||||
.loadSettings()
|
||||
.then(() => initializeKeycloak(keycloak, settings))
|
||||
.then(() => initializeKeycloak(keycloak, config))
|
||||
.then(() => resolve())
|
||||
.catch(error => reject(error));
|
||||
});
|
||||
@ -50,12 +51,13 @@ export function appInitializerFactory(
|
||||
@NgModule({
|
||||
declarations: [
|
||||
AppComponent,
|
||||
HomeComponent,
|
||||
FooterComponent,
|
||||
HeaderComponent,
|
||||
NotFoundComponent,
|
||||
ServerUnavailableComponent,
|
||||
SpinnerComponent,
|
||||
SidebarComponent,
|
||||
HomeComponent,
|
||||
],
|
||||
imports: [
|
||||
BrowserModule,
|
||||
@ -86,7 +88,7 @@ export function appInitializerFactory(
|
||||
provide: APP_INITIALIZER,
|
||||
useFactory: appInitializerFactory,
|
||||
multi: true,
|
||||
deps: [KeycloakService, SettingsService],
|
||||
deps: [KeycloakService, ConfigService],
|
||||
},
|
||||
{
|
||||
provide: ErrorHandler,
|
||||
|
@ -1,8 +1,10 @@
|
||||
<div class="w-full h-full flex flex-col justify-center items-center">
|
||||
<div class="bg-2 padding-10 rounded-15">
|
||||
<h1 class="flex justify-center items-center">
|
||||
{{ 'error.404' | translate }}
|
||||
</h1>
|
||||
<img src="/assets/not_found.gif" alt="" />
|
||||
<div class="bg2 flex p-10 rounded-xl text-center">
|
||||
<div class="flex flex-col gap-5">
|
||||
<h1 class="flex justify-center items-center">
|
||||
{{ 'error.404' | translate }}
|
||||
</h1>
|
||||
<img src="/assets/not_found.gif" alt="" />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
@ -1,9 +1,9 @@
|
||||
import { ComponentFixture, TestBed } from "@angular/core/testing";
|
||||
import { ComponentFixture, TestBed } from '@angular/core/testing';
|
||||
|
||||
import { NotFoundComponent } from "src/app/components/error/not-found/not-found.component";
|
||||
import { TranslateModule } from "@ngx-translate/core";
|
||||
import { NotFoundComponent } from 'src/app/components/error/not-found/not-found.component';
|
||||
import { TranslateModule } from '@ngx-translate/core';
|
||||
|
||||
describe("NotFoundComponent", () => {
|
||||
describe('NotFoundComponent', () => {
|
||||
let component: NotFoundComponent;
|
||||
let fixture: ComponentFixture<NotFoundComponent>;
|
||||
|
||||
@ -20,7 +20,7 @@ describe("NotFoundComponent", () => {
|
||||
fixture.detectChanges();
|
||||
});
|
||||
|
||||
it("should create", () => {
|
||||
it('should create', () => {
|
||||
expect(component).toBeTruthy();
|
||||
});
|
||||
});
|
||||
|
@ -1,8 +1,8 @@
|
||||
import { Component } from "@angular/core";
|
||||
import { Component } from '@angular/core';
|
||||
|
||||
@Component({
|
||||
selector: "app-not-found",
|
||||
templateUrl: "./not-found.component.html",
|
||||
styleUrls: ["./not-found.component.scss"],
|
||||
selector: 'app-not-found',
|
||||
templateUrl: './not-found.component.html',
|
||||
styleUrls: ['./not-found.component.scss'],
|
||||
})
|
||||
export class NotFoundComponent {}
|
||||
|
@ -0,0 +1,12 @@
|
||||
<div class="w-full h-full flex flex-col justify-center items-center">
|
||||
<div class="bg2 flex p-10 rounded-xl text-center">
|
||||
<div class="flex flex-col gap-5">
|
||||
<h1 class="flex justify-center items-center">
|
||||
{{ 'error.server_unavailable' | translate }}
|
||||
</h1>
|
||||
<p-button (onClick)="retryConnection()" class="btn btn-primary">
|
||||
{{ 'error.retry' | translate }}
|
||||
</p-button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
@ -0,0 +1,4 @@
|
||||
h1 {
|
||||
color: #a03033;
|
||||
font-size: 3rem !important;
|
||||
}
|
@ -0,0 +1,26 @@
|
||||
import { ComponentFixture, TestBed } from '@angular/core/testing';
|
||||
|
||||
import { NotFoundComponent } from 'src/app/components/error/not-found/not-found.component';
|
||||
import { TranslateModule } from '@ngx-translate/core';
|
||||
|
||||
describe('NotFoundComponent', () => {
|
||||
let component: NotFoundComponent;
|
||||
let fixture: ComponentFixture<NotFoundComponent>;
|
||||
|
||||
beforeEach(async () => {
|
||||
await TestBed.configureTestingModule({
|
||||
declarations: [NotFoundComponent],
|
||||
imports: [TranslateModule.forRoot()],
|
||||
}).compileComponents();
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
fixture = TestBed.createComponent(NotFoundComponent);
|
||||
component = fixture.componentInstance;
|
||||
fixture.detectChanges();
|
||||
});
|
||||
|
||||
it('should create', () => {
|
||||
expect(component).toBeTruthy();
|
||||
});
|
||||
});
|
@ -0,0 +1,15 @@
|
||||
import { Component } from '@angular/core';
|
||||
import { Router } from '@angular/router';
|
||||
|
||||
@Component({
|
||||
selector: 'app-server-unavailable',
|
||||
templateUrl: './server-unavailable.component.html',
|
||||
styleUrls: ['./server-unavailable.component.scss'],
|
||||
})
|
||||
export class ServerUnavailableComponent {
|
||||
constructor(private router: Router) {}
|
||||
|
||||
async retryConnection() {
|
||||
await this.router.navigate(['/']);
|
||||
}
|
||||
}
|
@ -1,23 +1,23 @@
|
||||
import { Component } from "@angular/core";
|
||||
import { SettingsService } from "src/app/service/settings.service";
|
||||
import { Component } from '@angular/core';
|
||||
import { ConfigService } from 'src/app/service/config.service';
|
||||
|
||||
@Component({
|
||||
selector: "app-footer",
|
||||
templateUrl: "./footer.component.html",
|
||||
styleUrls: ["./footer.component.scss"],
|
||||
selector: 'app-footer',
|
||||
templateUrl: './footer.component.html',
|
||||
styleUrls: ['./footer.component.scss'],
|
||||
})
|
||||
export class FooterComponent {
|
||||
constructor(private settings: SettingsService) {}
|
||||
constructor(private config: ConfigService) {}
|
||||
|
||||
get termsUrl(): string {
|
||||
return this.settings.settings.termsUrl;
|
||||
return this.config.settings.termsUrl;
|
||||
}
|
||||
|
||||
get privacyUrl(): string {
|
||||
return this.settings.settings.privacyURL;
|
||||
return this.config.settings.privacyURL;
|
||||
}
|
||||
|
||||
get imprintUrl(): string {
|
||||
return this.settings.settings.imprintURL;
|
||||
return this.config.settings.imprintURL;
|
||||
}
|
||||
}
|
||||
|
@ -8,6 +8,8 @@ import { User } from 'src/app/model/auth/user';
|
||||
import { AuthService } from 'src/app/service/auth.service';
|
||||
import { MenuElement } from 'src/app/model/view/menu-element';
|
||||
import { SidebarService } from 'src/app/service/sidebar.service';
|
||||
import { ConfigService } from 'src/app/service/config.service';
|
||||
import { UserSettingsService } from 'src/app/service/user_settings.service';
|
||||
import { SettingsService } from 'src/app/service/settings.service';
|
||||
|
||||
@Component({
|
||||
@ -28,11 +30,13 @@ export class HeaderComponent implements OnInit, OnDestroy {
|
||||
|
||||
constructor(
|
||||
private translateService: TranslateService,
|
||||
private config: PrimeNGConfig,
|
||||
private ngConfig: PrimeNGConfig,
|
||||
private guiService: GuiService,
|
||||
private auth: AuthService,
|
||||
private sidebarService: SidebarService,
|
||||
private settings: SettingsService
|
||||
private config: ConfigService,
|
||||
private settings: SettingsService,
|
||||
private userSettings: UserSettingsService
|
||||
) {
|
||||
this.guiService.isMobile$
|
||||
.pipe(takeUntil(this.unsubscribe$))
|
||||
@ -46,13 +50,18 @@ export class HeaderComponent implements OnInit, OnDestroy {
|
||||
this.auth.user$.pipe(takeUntil(this.unsubscribe$)).subscribe(async user => {
|
||||
this.user = user;
|
||||
await this.initMenuLists();
|
||||
if (user) {
|
||||
await this.loadTheme();
|
||||
await this.loadLang();
|
||||
}
|
||||
});
|
||||
|
||||
this.themeList = this.settings.settings.themes.map(theme => {
|
||||
this.themeList = this.config.settings.themes.map(theme => {
|
||||
return {
|
||||
label: theme.label,
|
||||
command: () => {
|
||||
command: async () => {
|
||||
this.guiService.theme$.next(theme.name);
|
||||
await this.userSettings.set('theme', theme.name);
|
||||
},
|
||||
};
|
||||
});
|
||||
@ -60,7 +69,6 @@ export class HeaderComponent implements OnInit, OnDestroy {
|
||||
|
||||
async ngOnInit() {
|
||||
await this.initMenuLists();
|
||||
await this.loadLang();
|
||||
}
|
||||
|
||||
ngOnDestroy() {
|
||||
@ -73,24 +81,49 @@ export class HeaderComponent implements OnInit, OnDestroy {
|
||||
}
|
||||
|
||||
async initMenuLists() {
|
||||
await this.initMenuList();
|
||||
await this.initLangMenuList();
|
||||
await this.initUserMenuList();
|
||||
}
|
||||
|
||||
async initMenuList() {
|
||||
this.menu = [
|
||||
{
|
||||
label: 'common.news',
|
||||
routerLink: ['/'],
|
||||
icon: 'pi pi-home',
|
||||
},
|
||||
{
|
||||
label: 'header.menu.about',
|
||||
routerLink: ['/about'],
|
||||
icon: 'pi pi-info',
|
||||
},
|
||||
];
|
||||
|
||||
if (this.auth.user$.value) {
|
||||
this.menu.push({
|
||||
label: 'header.menu.admin',
|
||||
routerLink: ['/admin'],
|
||||
icon: 'pi pi-cog',
|
||||
visible: await this.auth.isAdmin(),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async initLangMenuList() {
|
||||
this.langList = [
|
||||
{
|
||||
label: 'English',
|
||||
command: () => {
|
||||
command: async () => {
|
||||
this.translate('en');
|
||||
this.setLang('en');
|
||||
await this.setLang('en');
|
||||
},
|
||||
},
|
||||
{
|
||||
label: 'Deutsch',
|
||||
command: () => {
|
||||
command: async () => {
|
||||
this.translate('de');
|
||||
this.setLang('de');
|
||||
await this.setLang('de');
|
||||
},
|
||||
},
|
||||
];
|
||||
@ -108,10 +141,8 @@ export class HeaderComponent implements OnInit, OnDestroy {
|
||||
},
|
||||
{
|
||||
label: this.translateService.instant('header.logout'),
|
||||
command: () => {
|
||||
this.auth.logout().then(() => {
|
||||
console.log('logout');
|
||||
});
|
||||
command: async () => {
|
||||
await this.auth.logout();
|
||||
},
|
||||
icon: 'pi pi-sign-out',
|
||||
},
|
||||
@ -122,18 +153,36 @@ export class HeaderComponent implements OnInit, OnDestroy {
|
||||
this.translateService.use(lang);
|
||||
this.translateService
|
||||
.get('primeng')
|
||||
.subscribe(res => this.config.setTranslation(res));
|
||||
.subscribe(res => this.ngConfig.setTranslation(res));
|
||||
}
|
||||
|
||||
async loadTheme() {
|
||||
const defaultTheme = (await this.settings.get('default_theme')) ?? 'maxlan';
|
||||
const userTheme = await this.userSettings.get('theme');
|
||||
const theme = userTheme ?? defaultTheme;
|
||||
|
||||
this.guiService.theme$.next(theme);
|
||||
|
||||
if (!userTheme) {
|
||||
await this.userSettings.set('theme', theme);
|
||||
}
|
||||
}
|
||||
|
||||
async loadLang() {
|
||||
const lang = 'en';
|
||||
this.setLang(lang);
|
||||
const defaultLang = (await this.settings.get('default_language')) ?? 'en';
|
||||
const userLang = await this.userSettings.get('language');
|
||||
const lang = userLang ?? defaultLang;
|
||||
|
||||
this.translate(lang);
|
||||
|
||||
if (userLang) {
|
||||
return;
|
||||
}
|
||||
await this.userSettings.set('language', lang);
|
||||
}
|
||||
|
||||
setLang(lang: string) {
|
||||
// this.settings.setSetting(`lang`, lang);
|
||||
console.log('setLang', lang);
|
||||
async setLang(lang: string) {
|
||||
await this.userSettings.set('language', lang);
|
||||
}
|
||||
|
||||
toggleSidebar() {
|
||||
|
@ -1,14 +1,38 @@
|
||||
import { ComponentFixture, TestBed } from "@angular/core/testing";
|
||||
import { ComponentFixture, TestBed } from '@angular/core/testing';
|
||||
|
||||
import { HomeComponent } from "./home.component";
|
||||
import { HomeComponent } from './home.component';
|
||||
import { SharedModule } from 'src/app/modules/shared/shared.module';
|
||||
import { TranslateModule } from '@ngx-translate/core';
|
||||
import { AuthService } from 'src/app/service/auth.service';
|
||||
import { KeycloakService } from 'keycloak-angular';
|
||||
import { ErrorHandlingService } from 'src/app/service/error-handling.service';
|
||||
import { ToastService } from 'src/app/service/toast.service';
|
||||
import { ConfirmationService, MessageService } from 'primeng/api';
|
||||
import { ActivatedRoute } from '@angular/router';
|
||||
import { of } from 'rxjs';
|
||||
|
||||
describe("HomeComponent", () => {
|
||||
describe('HomeComponent', () => {
|
||||
let component: HomeComponent;
|
||||
let fixture: ComponentFixture<HomeComponent>;
|
||||
|
||||
beforeEach(async () => {
|
||||
await TestBed.configureTestingModule({
|
||||
declarations: [HomeComponent],
|
||||
imports: [SharedModule, TranslateModule.forRoot()],
|
||||
providers: [
|
||||
AuthService,
|
||||
KeycloakService,
|
||||
ErrorHandlingService,
|
||||
ToastService,
|
||||
MessageService,
|
||||
ConfirmationService,
|
||||
{
|
||||
provide: ActivatedRoute,
|
||||
useValue: {
|
||||
snapshot: { params: of({}) },
|
||||
},
|
||||
},
|
||||
],
|
||||
}).compileComponents();
|
||||
|
||||
fixture = TestBed.createComponent(HomeComponent);
|
||||
@ -16,7 +40,7 @@ describe("HomeComponent", () => {
|
||||
fixture.detectChanges();
|
||||
});
|
||||
|
||||
it("should create", () => {
|
||||
it('should create', () => {
|
||||
expect(component).toBeTruthy();
|
||||
});
|
||||
});
|
||||
|
@ -1,10 +1,10 @@
|
||||
import { Component } from "@angular/core";
|
||||
import { KeycloakService } from "keycloak-angular";
|
||||
import { Component } from '@angular/core';
|
||||
import { KeycloakService } from 'keycloak-angular';
|
||||
|
||||
@Component({
|
||||
selector: "app-home",
|
||||
templateUrl: "./home.component.html",
|
||||
styleUrl: "./home.component.scss",
|
||||
selector: 'app-home',
|
||||
templateUrl: './home.component.html',
|
||||
styleUrl: './home.component.scss',
|
||||
})
|
||||
export class HomeComponent {
|
||||
constructor(private keycloak: KeycloakService) {
|
||||
|
@ -1,9 +1,9 @@
|
||||
import { Directive, inject } from "@angular/core";
|
||||
import { PageDataService } from "src/app/core/base/page.data.service";
|
||||
import { SpinnerService } from "src/app/service/spinner.service";
|
||||
import { FilterService } from "src/app/service/filter.service";
|
||||
import { FormGroup } from "@angular/forms";
|
||||
import { ActivatedRoute, Router } from "@angular/router";
|
||||
import { Directive, inject } from '@angular/core';
|
||||
import { PageDataService } from 'src/app/core/base/page.data.service';
|
||||
import { SpinnerService } from 'src/app/service/spinner.service';
|
||||
import { FilterService } from 'src/app/service/filter.service';
|
||||
import { FormGroup } from '@angular/forms';
|
||||
import { ActivatedRoute, Router } from '@angular/router';
|
||||
|
||||
@Directive()
|
||||
export abstract class FormPageBase<
|
||||
@ -27,7 +27,7 @@ export abstract class FormPageBase<
|
||||
protected dataService = inject(PageDataService) as S;
|
||||
|
||||
protected constructor() {
|
||||
const id = this.route.snapshot.params["id"];
|
||||
const id = this.route.snapshot.params['id'];
|
||||
this.validateRoute(id);
|
||||
|
||||
this.buildForm();
|
||||
@ -35,18 +35,18 @@ export abstract class FormPageBase<
|
||||
|
||||
validateRoute(id: string | undefined) {
|
||||
const url = this.router.url;
|
||||
if (url.endsWith("create") && id !== undefined) {
|
||||
throw new Error("Route ends with create but id is defined");
|
||||
if (url.endsWith('create') && id !== undefined) {
|
||||
throw new Error('Route ends with create but id is defined');
|
||||
}
|
||||
if (url.endsWith("edit") && (id === undefined || isNaN(+id))) {
|
||||
throw new Error("Route ends with edit but id is not a number");
|
||||
if (url.endsWith('edit') && (id === undefined || isNaN(+id))) {
|
||||
throw new Error('Route ends with edit but id is not a number');
|
||||
}
|
||||
|
||||
this.nodeId = id ? +id : undefined;
|
||||
}
|
||||
|
||||
close() {
|
||||
const backRoute = this.nodeId ? "../.." : "..";
|
||||
const backRoute = this.nodeId ? '../..' : '..';
|
||||
|
||||
this.router.navigate([backRoute], { relativeTo: this.route }).then(() => {
|
||||
this.filterService.onLoad.emit();
|
||||
|
@ -1,21 +1,21 @@
|
||||
import { Directive, inject, OnDestroy } from "@angular/core";
|
||||
import { PageDataService } from "src/app/core/base/page.data.service";
|
||||
import { Subject } from "rxjs";
|
||||
import { Logger } from "src/app/service/logger.service";
|
||||
import { QueryResult } from "src/app/model/entities/query-result";
|
||||
import { SpinnerService } from "src/app/service/spinner.service";
|
||||
import { FilterService } from "src/app/service/filter.service";
|
||||
import { Filter } from "src/app/model/graphql/filter/filter.model";
|
||||
import { Sort } from "src/app/model/graphql/filter/sort.model";
|
||||
import { takeUntil } from "rxjs/operators";
|
||||
import { PaginatorState } from "primeng/paginator";
|
||||
import { PageColumns } from "src/app/core/base/page.columns";
|
||||
import { Directive, inject, OnDestroy } from '@angular/core';
|
||||
import { PageDataService } from 'src/app/core/base/page.data.service';
|
||||
import { Subject } from 'rxjs';
|
||||
import { Logger } from 'src/app/service/logger.service';
|
||||
import { QueryResult } from 'src/app/model/entities/query-result';
|
||||
import { SpinnerService } from 'src/app/service/spinner.service';
|
||||
import { FilterService } from 'src/app/service/filter.service';
|
||||
import { Filter } from 'src/app/model/graphql/filter/filter.model';
|
||||
import { Sort } from 'src/app/model/graphql/filter/sort.model';
|
||||
import { takeUntil } from 'rxjs/operators';
|
||||
import { PaginatorState } from 'primeng/paginator';
|
||||
import { PageColumns } from 'src/app/core/base/page.columns';
|
||||
import {
|
||||
TableColumn,
|
||||
TableRequireAnyPermissions,
|
||||
} from "src/app/modules/shared/components/table/table.model";
|
||||
} from 'src/app/modules/shared/components/table/table.model';
|
||||
|
||||
const logger = new Logger("PageBase");
|
||||
const logger = new Logger('PageBase');
|
||||
|
||||
@Directive()
|
||||
export abstract class PageBase<
|
||||
@ -96,13 +96,13 @@ export abstract class PageBase<
|
||||
}
|
||||
|
||||
columns: TableColumn<T>[] =
|
||||
"get" in this.columnsService ? this.columnsService.get() : [];
|
||||
'get' in this.columnsService ? this.columnsService.get() : [];
|
||||
|
||||
protected unsubscribe$ = new Subject<void>();
|
||||
|
||||
protected constructor(
|
||||
useQueryParams = false,
|
||||
permissions?: TableRequireAnyPermissions,
|
||||
permissions?: TableRequireAnyPermissions
|
||||
) {
|
||||
this.subscribeToFilterService();
|
||||
this.filterService.reset({
|
||||
@ -110,10 +110,17 @@ export abstract class PageBase<
|
||||
withHideDeleted: true,
|
||||
});
|
||||
this.requiredPermissions = permissions ?? {};
|
||||
|
||||
this.dataService
|
||||
.onChange()
|
||||
.pipe(takeUntil(this.unsubscribe$))
|
||||
.subscribe(() => {
|
||||
this.load(true);
|
||||
});
|
||||
}
|
||||
|
||||
ngOnDestroy(): void {
|
||||
logger.trace("Destroy component");
|
||||
logger.trace('Destroy component');
|
||||
this.unsubscribe$.next();
|
||||
this.unsubscribe$.complete();
|
||||
}
|
||||
@ -125,26 +132,26 @@ export abstract class PageBase<
|
||||
|
||||
this.filterService.filter$
|
||||
.pipe(takeUntil(this.unsubscribe$))
|
||||
.subscribe((filter) => {
|
||||
.subscribe(filter => {
|
||||
this._filter = filter;
|
||||
});
|
||||
|
||||
this.filterService.sort$
|
||||
.pipe(takeUntil(this.unsubscribe$))
|
||||
.subscribe((sort) => {
|
||||
.subscribe(sort => {
|
||||
this._sort = sort;
|
||||
});
|
||||
|
||||
this.filterService.skip$
|
||||
.pipe(takeUntil(this.unsubscribe$))
|
||||
.subscribe((skip) => {
|
||||
.subscribe(skip => {
|
||||
this._skip = skip;
|
||||
});
|
||||
|
||||
this.filterService.take$
|
||||
.pipe(takeUntil(this.unsubscribe$))
|
||||
.subscribe((take) => {
|
||||
if (take && Object.prototype.hasOwnProperty.call(take, "showAll")) {
|
||||
.subscribe(take => {
|
||||
if (take && Object.prototype.hasOwnProperty.call(take, 'showAll')) {
|
||||
this._take = 0;
|
||||
return;
|
||||
}
|
||||
@ -163,5 +170,5 @@ export abstract class PageBase<
|
||||
this.filterService.onLoad.emit();
|
||||
}
|
||||
|
||||
abstract load(): void;
|
||||
abstract load(silent?: boolean): void;
|
||||
}
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user