diff --git a/api/src/api_graphql/abc/mutation_abc.py b/api/src/api_graphql/abc/mutation_abc.py index 59c97ff..95e61f4 100644 --- a/api/src/api_graphql/abc/mutation_abc.py +++ b/api/src/api_graphql/abc/mutation_abc.py @@ -1,7 +1,6 @@ from abc import abstractmethod from typing import Type, Union -from api_graphql.abc.input_abc import InputABC from api_graphql.abc.query_abc import QueryABC from api_graphql.field.mutation_field_builder import MutationFieldBuilder from core.database.abc.data_access_object_abc import DataAccessObjectABC @@ -22,6 +21,7 @@ class MutationABC(QueryABC): name: str, mutation_name: str, require_any_permission=None, + require_any=None, public: bool = False, ): """ @@ -29,24 +29,30 @@ class MutationABC(QueryABC): :param str name: GraphQL mutation name :param str mutation_name: Internal (class) mutation name without "Mutation" suffix :param list[Permissions] require_any_permission: List of permissions required to access the field + :param tuple[list[Permissions], list[callable]] require_any: List of permissions and resolvers required to access the field :param bool public: Define if the field can resolve without authentication :return: """ - if require_any_permission is None: - require_any_permission = [] from api_graphql.definition import QUERIES - self.field( + field = ( MutationFieldBuilder(name) .with_resolver( lambda *args, **kwargs: [ x for x in QUERIES if x.name == f"{mutation_name}Mutation" ][0] ) - .with_require_any_permission(require_any_permission) .with_public(public) ) + if require_any_permission is not None: + field.with_require_any_permission(require_any_permission) + + if require_any is not None: + field.with_require_any(*require_any) + + self.field(field) + @staticmethod async def _resolve_assignments( foreign_objs: list[int], diff --git a/api/src/api_graphql/abc/query_abc.py b/api/src/api_graphql/abc/query_abc.py index 4b947b6..c3d2ca6 100644 --- a/api/src/api_graphql/abc/query_abc.py +++ b/api/src/api_graphql/abc/query_abc.py @@ -79,6 +79,7 @@ class QueryABC(ObjectType): ): return + resolver_results = [] for x in resolvers: user = await Route.get_authenticated_user_or_api_key_or_default() user_permissions = [] @@ -86,14 +87,16 @@ class QueryABC(ObjectType): user_permissions = await user.permissions if iscoroutinefunction(x): - result = await x( - QueryContext(data, user, user_permissions, *args, **kwargs) + resolver_results.append( + await x(QueryContext(data, user, user_permissions, *args, **kwargs)) ) else: - result = x(QueryContext(data, user, user_permissions, *args, **kwargs)) + resolver_results.append( + x(QueryContext(data, user, user_permissions, *args, **kwargs)) + ) - if not result: - raise AccessDenied() + if not any(resolver_results): + raise AccessDenied() def field( self, @@ -120,7 +123,7 @@ class QueryABC(ObjectType): skip = None if field.default_filter: - filters.append(field.default_filter(*args, **kwargs)) + filters.append(await field.default_filter(*args, **kwargs)) if field.filter_type and "filter" in kwargs: in_filters = kwargs["filter"] @@ -210,7 +213,6 @@ class QueryABC(ObjectType): f"{field.name}: {field.input_type.__name__} {kwargs[field.input_key]}" ) input_obj = field.input_type(kwargs[field.input_key]) - del kwargs[field.input_key] return await resolver_wrapper(input_obj, mutation, info, **kwargs) diff --git a/api/src/api_graphql/field/mutation_field_builder.py b/api/src/api_graphql/field/mutation_field_builder.py index bf33dda..245045f 100644 --- a/api/src/api_graphql/field/mutation_field_builder.py +++ b/api/src/api_graphql/field/mutation_field_builder.py @@ -1,5 +1,5 @@ from asyncio import iscoroutinefunction -from typing import Self, Type +from typing import Self, Type, Union from ariadne.types import Resolver @@ -58,7 +58,11 @@ class MutationFieldBuilder(FieldBuilderABC): self._resolver = resolver_wrapper return self - def with_input(self, input_type: Type[InputABC], input_key: str = None) -> Self: + def with_input( + self, + input_type: Type[Union[InputABC, str, int, bool]], + input_key: str = "input", + ) -> Self: self._input_type = input_type self._input_key = input_key return self diff --git a/api/src/api_graphql/filter/group_filter.py b/api/src/api_graphql/filter/group_filter.py index 0ebad4e..f71148d 100644 --- a/api/src/api_graphql/filter/group_filter.py +++ b/api/src/api_graphql/filter/group_filter.py @@ -11,3 +11,6 @@ class GroupFilter(DbModelFilterABC): self.add_field("name", StringFilter) self.add_field("description", StringFilter) + + self.add_field("isNull", bool) + self.add_field("isNotNull", bool) diff --git a/api/src/api_graphql/graphql/base.gql b/api/src/api_graphql/graphql/base.gql index f389136..359cbcf 100644 --- a/api/src/api_graphql/graphql/base.gql +++ b/api/src/api_graphql/graphql/base.gql @@ -39,8 +39,8 @@ input StringFilter { startsWith: String endsWith: String - isNull: String - isNotNull: String + isNull: Boolean + isNotNull: Boolean } input IntFilter { @@ -51,8 +51,8 @@ input IntFilter { less: Int lessOrEqual: Int - isNull: Int - isNotNull: Int + isNull: Boolean + isNotNull: Boolean in: [Int] notIn: [Int] } @@ -61,8 +61,8 @@ input BooleanFilter { equal: Boolean notEqual: Int - isNull: Int - isNotNull: Int + isNull: Boolean + isNotNull: Boolean } input DateFilter { @@ -78,8 +78,8 @@ input DateFilter { contains: String notContains: String - isNull: String - isNotNull: String + isNull: Boolean + isNotNull: Boolean in: [String] notIn: [String] diff --git a/api/src/api_graphql/graphql/group.gql b/api/src/api_graphql/graphql/group.gql index 2979c4d..f7f9315 100644 --- a/api/src/api_graphql/graphql/group.gql +++ b/api/src/api_graphql/graphql/group.gql @@ -61,6 +61,9 @@ input GroupFilter { editor: IntFilter created: DateFilter updated: DateFilter + + isNull: Boolean + isNotNull: Boolean } type GroupMutation { diff --git a/api/src/api_graphql/graphql/mutation.gql b/api/src/api_graphql/graphql/mutation.gql index ad67439..6dff6f7 100644 --- a/api/src/api_graphql/graphql/mutation.gql +++ b/api/src/api_graphql/graphql/mutation.gql @@ -11,4 +11,6 @@ type Mutation { setting: SettingMutation userSetting: UserSettingMutation featureFlag: FeatureFlagMutation + + privacy: PrivacyMutation } \ No newline at end of file diff --git a/api/src/api_graphql/graphql/privacy.gql b/api/src/api_graphql/graphql/privacy.gql new file mode 100644 index 0000000..b1f6960 --- /dev/null +++ b/api/src/api_graphql/graphql/privacy.gql @@ -0,0 +1,5 @@ +type PrivacyMutation { + exportData(userId: Int!): String + anonymizeData(userId: Int!): Boolean + deleteData(userId: Int!): Boolean +} \ No newline at end of file diff --git a/api/src/api_graphql/mutation.py b/api/src/api_graphql/mutation.py index d46df60..f996321 100644 --- a/api/src/api_graphql/mutation.py +++ b/api/src/api_graphql/mutation.py @@ -1,4 +1,5 @@ from api_graphql.abc.mutation_abc import MutationABC +from api_graphql.require_any_resolvers import by_user_setup_mutation from service.permission.permissions_enum import Permissions @@ -45,20 +46,26 @@ class Mutation(MutationABC): self.add_mutation_type( "group", "Group", - require_any_permission=[ - Permissions.groups_create, - Permissions.groups_update, - Permissions.groups_delete, - ], + require_any=( + [ + Permissions.groups_create, + Permissions.groups_update, + Permissions.groups_delete, + ], + [by_user_setup_mutation], + ), ) self.add_mutation_type( "shortUrl", "ShortUrl", - require_any_permission=[ - Permissions.short_urls_create, - Permissions.short_urls_update, - Permissions.short_urls_delete, - ], + require_any=( + [ + Permissions.short_urls_create, + Permissions.short_urls_update, + Permissions.short_urls_delete, + ], + [by_user_setup_mutation], + ), ) self.add_mutation_type( @@ -79,3 +86,7 @@ class Mutation(MutationABC): Permissions.administrator, ], ) + self.add_mutation_type( + "privacy", + "Privacy", + ) diff --git a/api/src/api_graphql/mutations/group_mutation.py b/api/src/api_graphql/mutations/group_mutation.py index 4372cb1..16addbd 100644 --- a/api/src/api_graphql/mutations/group_mutation.py +++ b/api/src/api_graphql/mutations/group_mutation.py @@ -1,8 +1,13 @@ from typing import Optional +from api.route import Route from api_graphql.abc.mutation_abc import MutationABC +from api_graphql.field.mutation_field_builder import MutationFieldBuilder from api_graphql.input.group_create_input import GroupCreateInput from api_graphql.input.group_update_input import GroupUpdateInput +from api_graphql.require_any_resolvers import by_user_setup_mutation +from core.configuration.feature_flags import FeatureFlags +from core.configuration.feature_flags_enum import FeatureFlagsEnum from core.logger import APILogger from data.schemas.public.group import Group from data.schemas.public.group_dao import groupDao @@ -17,27 +22,30 @@ class GroupMutation(MutationABC): def __init__(self): MutationABC.__init__(self, "Group") - self.mutation( - "create", - self.resolve_create, - GroupCreateInput, - require_any_permission=[Permissions.groups_create], + self.field( + MutationFieldBuilder("create") + .with_resolver(self.resolve_create) + .with_input(GroupCreateInput) + .with_require_any([Permissions.groups_create], [by_user_setup_mutation]) ) - self.mutation( - "update", - self.resolve_update, - GroupUpdateInput, - require_any_permission=[Permissions.groups_update], + + self.field( + MutationFieldBuilder("update") + .with_resolver(self.resolve_update) + .with_input(GroupUpdateInput) + .with_require_any([Permissions.groups_update], [by_user_setup_mutation]) ) - self.mutation( - "delete", - self.resolve_delete, - require_any_permission=[Permissions.groups_delete], + + self.field( + MutationFieldBuilder("delete") + .with_resolver(self.resolve_delete) + .with_require_any([Permissions.groups_delete], [by_user_setup_mutation]) ) - self.mutation( - "restore", - self.resolve_restore, - require_any_permission=[Permissions.groups_delete], + + self.field( + MutationFieldBuilder("restore") + .with_resolver(self.resolve_restore) + .with_require_any([Permissions.groups_delete], [by_user_setup_mutation]) ) @staticmethod @@ -72,9 +80,18 @@ class GroupMutation(MutationABC): async def resolve_create(cls, obj: GroupCreateInput, *_): logger.debug(f"create group: {obj.__dict__}") + already_exists = await groupDao.find_by({Group.name: obj.name}) + if len(already_exists) > 0: + raise ValueError(f"Group {obj.name} already exists") + group = Group( 0, obj.name, + ( + (await Route.get_user()).id + if await FeatureFlags.has_feature(FeatureFlagsEnum.per_user_setup) + else None + ), ) gid = await groupDao.create(group) @@ -90,6 +107,12 @@ class GroupMutation(MutationABC): raise ValueError(f"Group with id {obj.id} not found") if obj.name is not None: + already_exists = await groupDao.find_by( + {Group.name: obj.name, Group.id: {"ne": obj.id}} + ) + if len(already_exists) > 0: + raise ValueError(f"Group {obj.name} already exists") + group = await groupDao.get_by_id(obj.id) group.name = obj.name await groupDao.update(group) diff --git a/api/src/api_graphql/mutations/privacy_mutation.py b/api/src/api_graphql/mutations/privacy_mutation.py new file mode 100644 index 0000000..b0ab8a6 --- /dev/null +++ b/api/src/api_graphql/mutations/privacy_mutation.py @@ -0,0 +1,63 @@ +from api.route import Route +from api_graphql.abc.mutation_abc import MutationABC +from api_graphql.field.mutation_field_builder import MutationFieldBuilder +from api_graphql.service.exceptions import UnauthorizedException, AccessDenied +from core.logger import APILogger +from service.data_privacy_service import DataPrivacyService +from service.permission.permissions_enum import Permissions + +logger = APILogger(__name__) + + +class PrivacyMutation(MutationABC): + def __init__(self): + MutationABC.__init__(self, "Privacy") + + self.field( + MutationFieldBuilder("exportData") + .with_resolver(self.resolve_export_data) + .with_input(int, "userId") + .with_public(True) + ) + self.field( + MutationFieldBuilder("anonymizeData") + .with_resolver(self.resolve_anonymize_data) + .with_input(int, "userId") + .with_public(True) + ) + self.field( + MutationFieldBuilder("deleteData") + .with_resolver(self.resolve_delete_data) + .with_input(int, "userId") + .with_public(True) + ) + + @staticmethod + async def _permission_check(user_id: int): + user = await Route.get_user() + if user is None: + raise UnauthorizedException() + + if user.id != user_id and not user.has_permission(Permissions.administrator): + raise AccessDenied() + + @staticmethod + async def resolve_export_data(user_id: int, *_): + logger.debug(f"export data for user: {user_id}") + await PrivacyMutation._permission_check(user_id) + + return await DataPrivacyService.export_user_data(user_id) + + @staticmethod + async def resolve_anonymize_data(user_id: int, *_): + logger.debug(f"anonymize data for user: {user_id}") + await PrivacyMutation._permission_check(user_id) + + return await DataPrivacyService.anonymize_user(user_id) + + @staticmethod + async def resolve_delete_data(user_id: int, *_): + logger.debug(f"delete data for user: {user_id}") + await PrivacyMutation._permission_check(user_id) + + return await DataPrivacyService.delete_user_data(user_id) diff --git a/api/src/api_graphql/mutations/setting_mutation.py b/api/src/api_graphql/mutations/setting_mutation.py index 4c499f3..7aa2404 100644 --- a/api/src/api_graphql/mutations/setting_mutation.py +++ b/api/src/api_graphql/mutations/setting_mutation.py @@ -2,7 +2,7 @@ from api_graphql.abc.mutation_abc import MutationABC from api_graphql.input.setting_input import SettingInput from core.logger import APILogger from data.schemas.system.setting import Setting -from data.schemas.system.setting_dao import settingsDao +from data.schemas.system.setting_dao import settingDao from service.permission.permissions_enum import Permissions logger = APILogger(__name__) @@ -22,11 +22,11 @@ class SettingMutation(MutationABC): async def resolve_change(obj: SettingInput, *_): logger.debug(f"create new setting: {input}") - setting = await settingsDao.find_single_by({Setting.key: obj.key}) + setting = await settingDao.find_single_by({Setting.key: obj.key}) if setting is None: raise ValueError(f"Setting with key {obj.key} not found") setting.value = obj.value - await settingsDao.update(setting) + await settingDao.update(setting) - return await settingsDao.get_by_id(setting.id) + return await settingDao.get_by_id(setting.id) diff --git a/api/src/api_graphql/mutations/short_url_mutation.py b/api/src/api_graphql/mutations/short_url_mutation.py index d07478d..b471fba 100644 --- a/api/src/api_graphql/mutations/short_url_mutation.py +++ b/api/src/api_graphql/mutations/short_url_mutation.py @@ -1,6 +1,11 @@ +from api.route import Route from api_graphql.abc.mutation_abc import MutationABC +from api_graphql.field.mutation_field_builder import MutationFieldBuilder from api_graphql.input.short_url_create_input import ShortUrlCreateInput from api_graphql.input.short_url_update_input import ShortUrlUpdateInput +from api_graphql.require_any_resolvers import by_user_setup_mutation +from core.configuration.feature_flags import FeatureFlags +from core.configuration.feature_flags_enum import FeatureFlagsEnum from core.logger import APILogger from data.schemas.public.domain_dao import domainDao from data.schemas.public.group_dao import groupDao @@ -17,38 +22,46 @@ class ShortUrlMutation(MutationABC): def __init__(self): MutationABC.__init__(self, "ShortUrl") - self.mutation( - "create", - self.resolve_create, - ShortUrlCreateInput, - require_any_permission=[Permissions.short_urls_create], + self.field( + MutationFieldBuilder("create") + .with_resolver(self.resolve_create) + .with_input(ShortUrlCreateInput) + .with_require_any([Permissions.short_urls_create], [by_user_setup_mutation]) ) - self.mutation( - "update", - self.resolve_update, - ShortUrlUpdateInput, - require_any_permission=[Permissions.short_urls_update], + + self.field( + MutationFieldBuilder("update") + .with_resolver(self.resolve_update) + .with_input(ShortUrlUpdateInput) + .with_require_any([Permissions.short_urls_update], [by_user_setup_mutation]) ) - self.mutation( - "delete", - self.resolve_delete, - require_any_permission=[Permissions.short_urls_delete], + + self.field( + MutationFieldBuilder("delete") + .with_resolver(self.resolve_delete) + .with_require_any([Permissions.short_urls_delete], [by_user_setup_mutation]) ) - self.mutation( - "restore", - self.resolve_restore, - require_any_permission=[Permissions.short_urls_delete], + + self.field( + MutationFieldBuilder("restore") + .with_resolver(self.resolve_restore) + .with_require_any([Permissions.short_urls_delete], [by_user_setup_mutation]) ) - self.mutation( - "trackVisit", - self.resolve_track_visit, - require_any_permission=[Permissions.short_urls_update], + + self.field( + MutationFieldBuilder("trackVisit") + .with_resolver(self.resolve_track_visit) + .with_require_any_permission([Permissions.short_urls_update]) ) @staticmethod async def resolve_create(obj: ShortUrlCreateInput, *_): logger.debug(f"create short_url: {obj.__dict__}") + already_exists = await shortUrlDao.find_by({ShortUrl.short_url: obj.short_url}) + if len(already_exists) > 0: + raise ValueError(f"Short URL {obj.short_url} already exists") + short_url = ShortUrl( 0, obj.short_url, @@ -57,6 +70,11 @@ class ShortUrlMutation(MutationABC): obj.group_id, obj.domain_id, obj.loading_screen, + ( + (await Route.get_user()).id + if await FeatureFlags.has_feature(FeatureFlagsEnum.per_user_setup) + else None + ), ) nid = await shortUrlDao.create(short_url) return await shortUrlDao.get_by_id(nid) @@ -68,6 +86,11 @@ class ShortUrlMutation(MutationABC): short_url = await shortUrlDao.get_by_id(obj.id) if obj.short_url is not None: + already_exists = await shortUrlDao.find_by( + {ShortUrl.short_url: obj.short_url} + ) + if len(already_exists) > 0: + raise ValueError(f"Short URL {obj.short_url} already exists") short_url.short_url = obj.short_url if obj.target_url is not None: diff --git a/api/src/api_graphql/mutations/user_setting_mutation.py b/api/src/api_graphql/mutations/user_setting_mutation.py index 1fb012d..ff1be20 100644 --- a/api/src/api_graphql/mutations/user_setting_mutation.py +++ b/api/src/api_graphql/mutations/user_setting_mutation.py @@ -1,10 +1,12 @@ from api.route import Route from api_graphql.abc.mutation_abc import MutationABC +from api_graphql.field.mutation_field_builder import MutationFieldBuilder from api_graphql.input.user_setting_input import UserSettingInput from core.logger import APILogger +from core.string import first_to_lower from data.schemas.public.user_setting import UserSetting -from data.schemas.public.user_setting_dao import userSettingsDao -from data.schemas.system.setting_dao import settingsDao +from data.schemas.public.user_setting_dao import userSettingDao +from data.schemas.system.setting_dao import settingDao from service.permission.permissions_enum import Permissions logger = APILogger(__name__) @@ -13,13 +15,20 @@ logger = APILogger(__name__) class UserSettingMutation(MutationABC): def __init__(self): MutationABC.__init__(self, "UserSetting") - self.mutation( - "change", - self.resolve_change, - UserSettingInput, - require_any_permission=[Permissions.settings_update], + self.field( + MutationFieldBuilder("change") + .with_resolver(self.resolve_change) + .with_change_broadcast( + f"{first_to_lower(self.name.replace("Mutation", ""))}Change" + ) + .with_input(UserSettingInput, "input") + .with_require_any([], [self._x]) ) + @staticmethod + async def _x(ctx): + return ctx.data.user_id == (await Route.get_user()).id + @staticmethod async def resolve_change(obj: UserSettingInput, *_): logger.debug(f"create new setting: {input}") @@ -28,13 +37,13 @@ class UserSettingMutation(MutationABC): logger.debug("user not authorized") return None - setting = await userSettingsDao.find_single_by( + setting = await userSettingDao.find_single_by( [{UserSetting.user_id: user.id}, {UserSetting.key: obj.key}] ) if setting is None: - await userSettingsDao.create(UserSetting(0, user.id, obj.key, obj.value)) + await userSettingDao.create(UserSetting(0, user.id, obj.key, obj.value)) else: setting.value = obj.value - await userSettingsDao.update(setting) + await userSettingDao.update(setting) - return await userSettingsDao.find_by_key(user, obj.key) + return await userSettingDao.find_by_key(user, obj.key) diff --git a/api/src/api_graphql/queries/group_history_query.py b/api/src/api_graphql/queries/group_history_query.py index 2c062c6..dbfe220 100644 --- a/api/src/api_graphql/queries/group_history_query.py +++ b/api/src/api_graphql/queries/group_history_query.py @@ -1,6 +1,6 @@ from api_graphql.abc.db_history_model_query_abc import DbHistoryModelQueryABC from api_graphql.field.resolver_field_builder import ResolverFieldBuilder -from api_graphql.require_any_resolvers import group_by_assignment_resolver +from api_graphql.require_any_resolvers import by_assignment_resolver from data.schemas.public.group import Group from data.schemas.public.group_dao import groupDao from data.schemas.public.group_role_assignment_dao import groupRoleAssignmentDao @@ -21,7 +21,7 @@ class GroupHistoryQuery(DbHistoryModelQueryABC): [ Permissions.groups, ], - [group_by_assignment_resolver], + [by_assignment_resolver], ) ) self.set_field( diff --git a/api/src/api_graphql/queries/group_query.py b/api/src/api_graphql/queries/group_query.py index 7effacc..8d5b559 100644 --- a/api/src/api_graphql/queries/group_query.py +++ b/api/src/api_graphql/queries/group_query.py @@ -1,6 +1,6 @@ from api_graphql.abc.db_model_query_abc import DbModelQueryABC from api_graphql.field.resolver_field_builder import ResolverFieldBuilder -from api_graphql.require_any_resolvers import group_by_assignment_resolver +from api_graphql.require_any_resolvers import by_assignment_resolver from data.schemas.public.group import Group from data.schemas.public.group_dao import groupDao from data.schemas.public.group_role_assignment_dao import groupRoleAssignmentDao @@ -21,7 +21,7 @@ class GroupQuery(DbModelQueryABC): [ Permissions.groups, ], - [group_by_assignment_resolver], + [by_assignment_resolver], ) ) self.set_field("roles", self._get_roles) diff --git a/api/src/api_graphql/query.py b/api/src/api_graphql/query.py index 6e999f6..2e80df3 100644 --- a/api/src/api_graphql/query.py +++ b/api/src/api_graphql/query.py @@ -11,7 +11,12 @@ from api_graphql.filter.permission_filter import PermissionFilter from api_graphql.filter.role_filter import RoleFilter from api_graphql.filter.short_url_filter import ShortUrlFilter from api_graphql.filter.user_filter import UserFilter -from api_graphql.require_any_resolvers import group_by_assignment_resolver +from api_graphql.require_any_resolvers import ( + by_assignment_resolver, + by_user_setup_resolver, +) +from core.configuration.feature_flags import FeatureFlags +from core.configuration.feature_flags_enum import FeatureFlagsEnum from data.schemas.administration.api_key import ApiKey from data.schemas.administration.api_key_dao import apiKeyDao from data.schemas.administration.user import User @@ -27,9 +32,9 @@ from data.schemas.public.group_dao import groupDao from data.schemas.public.short_url import ShortUrl from data.schemas.public.short_url_dao import shortUrlDao from data.schemas.public.user_setting import UserSetting -from data.schemas.public.user_setting_dao import userSettingsDao +from data.schemas.public.user_setting_dao import userSettingDao from data.schemas.system.feature_flag_dao import featureFlagDao -from data.schemas.system.setting_dao import settingsDao +from data.schemas.system.setting_dao import settingDao from service.permission.permissions_enum import Permissions @@ -109,7 +114,8 @@ class Query(QueryABC): ] ) ) - self.field( + + group_field = ( DaoFieldBuilder("groups") .with_dao(groupDao) .with_filter(GroupFilter) @@ -120,17 +126,35 @@ class Query(QueryABC): Permissions.short_urls_create, Permissions.short_urls_update, ], - [group_by_assignment_resolver], + [by_assignment_resolver, by_user_setup_resolver], ) ) - self.field( + + if FeatureFlags.get_default(FeatureFlagsEnum.per_user_setup): + group_field = group_field.with_default_filter( + self._resolve_default_user_filter + ) + + self.field(group_field) + + short_url_field = ( DaoFieldBuilder("shortUrls") .with_dao(shortUrlDao) .with_filter(ShortUrlFilter) .with_sort(Sort[ShortUrl]) - .with_require_any([Permissions.short_urls], [group_by_assignment_resolver]) + .with_require_any( + [Permissions.short_urls], + [by_assignment_resolver, by_user_setup_resolver], + ) ) + if FeatureFlags.get_default(FeatureFlagsEnum.per_user_setup): + short_url_field = short_url_field.with_default_filter( + self._resolve_default_user_filter + ) + + self.field(short_url_field) + self.field( ResolverFieldBuilder("settings") .with_resolver(self._resolve_settings) @@ -182,8 +206,8 @@ class Query(QueryABC): @staticmethod async def _resolve_settings(*args, **kwargs): if "key" in kwargs: - return [await settingsDao.find_by_key(kwargs["key"])] - return await settingsDao.get_all() + return [await settingDao.find_by_key(kwargs["key"])] + return await settingDao.get_all() @staticmethod async def _resolve_user_settings(*args, **kwargs): @@ -192,13 +216,17 @@ class Query(QueryABC): return None if "key" in kwargs: - return await userSettingsDao.find_by( + return await userSettingDao.find_by( [{UserSetting.user_id: user.id}, {UserSetting.key: kwargs["key"]}] ) - return await userSettingsDao.find_by({UserSetting.user_id: user.id}) + return await userSettingDao.find_by({UserSetting.user_id: user.id}) @staticmethod async def _resolve_feature_flags(*args, **kwargs): if "key" in kwargs: return [await featureFlagDao.find_by_key(kwargs["key"])] return await featureFlagDao.get_all() + + @staticmethod + async def _resolve_default_user_filter(*args, **kwargs) -> dict: + return {"user": {"id": {"equal": (await Route.get_user()).id}}} diff --git a/api/src/api_graphql/require_any_resolvers.py b/api/src/api_graphql/require_any_resolvers.py index 0d31a90..8eea17f 100644 --- a/api/src/api_graphql/require_any_resolvers.py +++ b/api/src/api_graphql/require_any_resolvers.py @@ -1,10 +1,12 @@ from api_graphql.service.collection_result import CollectionResult from api_graphql.service.query_context import QueryContext +from core.configuration.feature_flags import FeatureFlags +from core.configuration.feature_flags_enum import FeatureFlagsEnum from data.schemas.public.group_dao import groupDao from service.permission.permissions_enum import Permissions -async def group_by_assignment_resolver(ctx: QueryContext) -> bool: +async def by_assignment_resolver(ctx: QueryContext) -> bool: if not isinstance(ctx.data, CollectionResult): return False @@ -19,12 +21,23 @@ async def group_by_assignment_resolver(ctx: QueryContext) -> bool: and all(r.id in role_ids for r in roles) ] - ctx.data.nodes = [ - node + return all( + (await node.group) is not None and (await node.group).id in filtered_groups for node in ctx.data.nodes - if (await node.group) is not None - and (await node.group).id in filtered_groups - ] - return True + ) - return True + return False + + +async def by_user_setup_resolver(ctx: QueryContext) -> bool: + if not isinstance(ctx.data, CollectionResult): + return False + + if not FeatureFlags.has_feature(FeatureFlagsEnum.per_user_setup): + return False + + return all(x.user_id == ctx.user.id for x in ctx.data.nodes) + + +async def by_user_setup_mutation(ctx: QueryContext) -> bool: + return await FeatureFlags.has_feature(FeatureFlagsEnum.per_user_setup) diff --git a/api/src/api_graphql/service/query_context.py b/api/src/api_graphql/service/query_context.py index 40f85da..003d73b 100644 --- a/api/src/api_graphql/service/query_context.py +++ b/api/src/api_graphql/service/query_context.py @@ -15,6 +15,7 @@ class QueryContext: data: Any, user: Optional[User], user_permissions: Optional[list[Permissions]], + is_mutation: bool = False, *args, **kwargs ): @@ -31,11 +32,17 @@ class QueryContext: self._resolve_info = arg continue - self._filter = kwargs.get("filter", {}) + self._filter = kwargs.get("filters", {}) self._sort = kwargs.get("sort", {}) self._skip = get_value(kwargs, "skip", int) self._take = get_value(kwargs, "take", int) + self._input = kwargs.get("input", None) + self._args = args + self._kwargs = kwargs + + self._is_mutation = is_mutation + @property def data(self): return self._data @@ -64,5 +71,21 @@ class QueryContext: def take(self) -> Optional[int]: return self._take + @property + def input(self) -> Optional[Any]: + return self._input + + @property + def args(self) -> tuple: + return self._args + + @property + def kwargs(self) -> dict: + return self._kwargs + + @property + def is_mutation(self) -> bool: + return self._is_mutation + def has_permission(self, permission: Permissions) -> bool: return permission.value in self._user_permissions diff --git a/api/src/core/configuration/feature_flags.py b/api/src/core/configuration/feature_flags.py index 46b83d0..8eaa333 100644 --- a/api/src/core/configuration/feature_flags.py +++ b/api/src/core/configuration/feature_flags.py @@ -1,20 +1,38 @@ +from typing import Union + from core.configuration.feature_flags_enum import FeatureFlagsEnum +from core.environment import Environment from data.schemas.system.feature_flag_dao import featureFlagDao class FeatureFlags: _flags = { FeatureFlagsEnum.version_endpoint.value: True, # 15.01.2025 + FeatureFlagsEnum.technical_demo_banner.value: False, # 18.04.2025 + FeatureFlagsEnum.per_user_setup.value: Environment.get( + "PER_USER_SETUP", bool, False + ), # 18.04.2025 } + _overwrite_flags = [ + FeatureFlagsEnum.per_user_setup.value, + ] + + @staticmethod + def overwrite_flag(key: str): + return key in FeatureFlags._overwrite_flags + @staticmethod def get_default(key: FeatureFlagsEnum) -> bool: return FeatureFlags._flags[key.value] @staticmethod - async def has_feature(key: FeatureFlagsEnum) -> bool: - value = await featureFlagDao.find_by_key(key.value) - if value is None: - return False + async def has_feature(key: Union[str, FeatureFlagsEnum]) -> bool: + key_value = key.value if isinstance(key, FeatureFlagsEnum) else key - return value.value + value = await featureFlagDao.find_by_key(key_value) + return ( + value.value + if value + else FeatureFlags.get_default(FeatureFlagsEnum(key_value)) + ) diff --git a/api/src/core/configuration/feature_flags_enum.py b/api/src/core/configuration/feature_flags_enum.py index c5f48c7..c5af428 100644 --- a/api/src/core/configuration/feature_flags_enum.py +++ b/api/src/core/configuration/feature_flags_enum.py @@ -2,5 +2,6 @@ from enum import Enum class FeatureFlagsEnum(Enum): - # modules version_endpoint = "VersionEndpoint" + technical_demo_banner = "TechnicalDemoBanner" + per_user_setup = "PerUserSetup" diff --git a/api/src/core/database/abc/data_access_object_abc.py b/api/src/core/database/abc/data_access_object_abc.py index e83b684..a3003db 100644 --- a/api/src/core/database/abc/data_access_object_abc.py +++ b/api/src/core/database/abc/data_access_object_abc.py @@ -2,16 +2,17 @@ import datetime from abc import ABC, abstractmethod from enum import Enum from types import NoneType -from typing import Generic, Optional, Union, TypeVar, Any, Type +from typing import Generic, Optional, Union, Type, List, Any, TypeVar from core.const import DATETIME_FORMAT from core.database.abc.db_model_abc import DbModelABC from core.database.database import Database from core.database.external_data_temp_table_builder import ExternalDataTempTableBuilder +from core.database.sql_select_builder import SQLSelectBuilder from core.get_value import get_value from core.logger import DBLogger from core.string import camel_to_snake -from core.typing import T, Attribute, AttributeFilters, AttributeSorts, Id +from core.typing import AttributeFilters, AttributeSorts, Id, Attribute, T T_DBM = TypeVar("T_DBM", bound=DbModelABC) @@ -25,13 +26,16 @@ class DataAccessObjectABC(ABC, Database, Generic[T_DBM]): self._model_type = model_type self._table_name = table_name + self._logger = DBLogger(source) + self._model_type = model_type + self._table_name = table_name + self._default_filter_condition = None self.__attributes: dict[str, type] = {} - self.__joins: dict[str, str] = {} self.__db_names: dict[str, str] = {} - self.__foreign_tables: dict[str, str] = {} + self.__foreign_tables: dict[str, tuple[str, str]] = {} self.__foreign_table_keys: dict[str, str] = {} self.__date_attributes: set[str] = set() @@ -44,6 +48,14 @@ class DataAccessObjectABC(ABC, Database, Generic[T_DBM]): def table_name(self) -> str: return self._table_name + def has_attribute(self, attr_name: Attribute) -> bool: + """ + Check if the attribute exists in the DAO + :param Attribute attr_name: Name of the attribute + :return: True if the attribute exists, False otherwise + """ + return attr_name in self.__attributes + def attribute( self, attr_name: Attribute, @@ -122,10 +134,10 @@ class DataAccessObjectABC(ABC, Database, Generic[T_DBM]): if table_name == self._table_name: return - self.__joins[foreign_attr] = ( - f"LEFT JOIN {table_name} ON {table_name}.{primary_attr} = {self._table_name}.{foreign_attr}" + self.__foreign_tables[attr] = ( + table_name, + f"{table_name}.{primary_attr} = {self._table_name}.{foreign_attr}", ) - self.__foreign_tables[attr] = table_name def use_external_fields(self, builder: ExternalDataTempTableBuilder): self._external_fields[builder.table_name] = builder @@ -150,22 +162,36 @@ class DataAccessObjectABC(ABC, Database, Generic[T_DBM]): return self._model_type(**value_map) + def to_dict(self, obj: T_DBM) -> dict: + """ + Convert an object to a dictionary + :param T_DBM obj: Object to convert + :return: + """ + value_map: dict[str, Any] = {} + + for attr_name, attr_type in self.__attributes.items(): + value = getattr(obj, attr_name) + if isinstance(value, datetime.datetime): + value = value.strftime(DATETIME_FORMAT) + elif isinstance(value, Enum): + value = value.value + + value_map[attr_name] = value + + for ex_fname in self._external_fields: + ex_field = self._external_fields[ex_fname] + for ex_attr in ex_field.fields: + if ex_attr == self.__primary_key: + continue + + value_map[ex_attr] = getattr(obj, ex_attr, None) + + return value_map + async def count(self, filters: AttributeFilters = None) -> int: - query = f"SELECT COUNT(*) FROM {self._table_name}" - for join in self.__joins: - query += f" {self.__joins[join]}" - - if filters is not None and (not isinstance(filters, list) or len(filters) > 0): - conditions, external_table_deps = await self._build_conditions(filters) - query = await self._handle_query_external_temp_tables( - query, external_table_deps, ignore_fields=True - ) - query += f" WHERE {conditions};" - - result = await self._db.select_map(query) - if len(result) == 0: - return 0 - return result[0]["count"] + result = await self._prepare_query(filters=filters, for_count=True) + return result[0]["count"] if result else 0 async def get_history( self, @@ -173,56 +199,66 @@ class DataAccessObjectABC(ABC, Database, Generic[T_DBM]): by_key: str = None, when: datetime = None, until: datetime = None, - without_deleted=False, + without_deleted: bool = False, ) -> list[T_DBM]: - query = f"SELECT {self._table_name}_history.* FROM {self._table_name}_history" - for join in self.__joins: - query += f" {self.__joins[join].replace(self._table_name, f'{self._table_name}_history')}" + """ + Retrieve the history of an entry from the history table. + :param entry_id: The ID of the entry to retrieve history for. + :param by_key: The key to filter by (default is the primary key). + :param when: A specific timestamp to filter the history. + :param until: A timestamp to filter history entries up to a certain point. + :param without_deleted: Exclude deleted entries if True. + :return: A list of historical entries as objects. + """ + history_table = f"{self._table_name}_history" + builder = SQLSelectBuilder(history_table, self.__primary_key) - query += f" WHERE {f'{self._table_name}_history.{self.__primary_key}' if by_key is None else f'{self._table_name}_history.{by_key}'} = {entry_id}" + builder.with_attribute("*") + builder.with_value_condition( + f"{history_table}.{by_key or self.__primary_key}", "=", str(entry_id) + ) - if self._default_filter_condition is not None: - query += f" AND {self._default_filter_condition}" + if self._default_filter_condition: + builder.with_condition(self._default_filter_condition, "") if without_deleted: - query += f" AND {self._table_name}_history.deleted = false" + builder.with_value_condition(f"{history_table}.deleted", "=", "false") - if when is not None: - query += f" AND {self._attr_from_date_to_char(f'{self._table_name}_history.updated')} = '{when.strftime(DATETIME_FORMAT)}'" + if when: + builder.with_value_condition( + self._attr_from_date_to_char(f"{history_table}.updated"), + "=", + f"'{when.strftime(DATETIME_FORMAT)}'", + ) - if until is not None: - query += f" AND {self._attr_from_date_to_char(f'{self._table_name}_history.updated')} <= '{until.strftime(DATETIME_FORMAT)}'" + if until: + builder.with_value_condition( + self._attr_from_date_to_char(f"{history_table}.updated"), + "<=", + f"'{until.strftime(DATETIME_FORMAT)}'", + ) - query += f" ORDER BY {self._table_name}_history.updated DESC;" + builder.with_order_by(f"{history_table}.updated", "DESC") + query = await builder.build() result = await self._db.select_map(query) - if result is None: - return [] - return [self.to_object(x) for x in result] + return [self.to_object(x) for x in result] if result else [] - async def get_all(self) -> list[T_DBM]: - result = await self._db.select_map( - f"SELECT * FROM {self._table_name}{f" WHERE {self._default_filter_condition}" if self._default_filter_condition is not None else ''} ORDER BY {self.__primary_key};" + async def get_all(self) -> List[T_DBM]: + result = await self._prepare_query(sorts=[{self.__primary_key: "asc"}]) + return [self.to_object(x) for x in result] if result else [] + + async def get_by_id(self, id: Union[int, str]) -> Optional[T_DBM]: + result = await self._prepare_query( + filters=[{self.__primary_key: id}], sorts=[{self.__primary_key: "asc"}] ) - if result is None: - return [] - - return [self.to_object(x) for x in result] - - async def get_by_id(self, id: Union[int, str]) -> T_DBM: - result = await self._db.select_map( - f"SELECT * FROM {self._table_name} WHERE {f"{self._default_filter_condition} AND " if self._default_filter_condition is not None else ''} {self.__primary_key} = {f"'{id}'" if isinstance(id, str) else id}" - ) - return self.to_object(result[0]) + return self.to_object(result[0]) if result else None async def find_by_id(self, id: Union[int, str]) -> Optional[T_DBM]: - result = await self._db.select_map( - f"SELECT * FROM {self._table_name} WHERE {f"{self._default_filter_condition} AND " if self._default_filter_condition is not None else ''} {self.__primary_key} = {f"'{id}'" if isinstance(id, str) else id}" + result = await self._prepare_query( + filters=[{self.__primary_key: id}], sorts=[{self.__primary_key: "asc"}] ) - if not result or len(result) == 0: - return None - - return self.to_object(result[0]) + return self.to_object(result[0]) if result else None async def get_by( self, @@ -231,23 +267,10 @@ class DataAccessObjectABC(ABC, Database, Generic[T_DBM]): take: int = None, skip: int = None, ) -> list[T_DBM]: - """ - Get all objects by the given filters - :param AttributeFilter filters: - :param AttributeSorts sorts: - :param int skip: - :param int take: - :return: List of objects - :rtype: list[T_DBM] - :raises ValueError: When no result is found - """ - result = await self._db.select_map( - await self._build_conditional_query(filters, sorts, take, skip) - ) + result = await self._prepare_query(filters, sorts, take, skip) if not result or len(result) == 0: raise ValueError("No result found") - - return [self.to_object(x) for x in result] + return [self.to_object(x) for x in result] if result else [] async def get_single_by( self, @@ -256,23 +279,12 @@ class DataAccessObjectABC(ABC, Database, Generic[T_DBM]): take: int = None, skip: int = None, ) -> T_DBM: - """ - Get a single object by the given filters - :param AttributeFilter filters: - :param AttributeSorts sorts: - :param int skip: - :param int take: - :return: Single object - :rtype: T_DBM - :raises ValueError: When no result is found - :raises ValueError: When more than one result is found - """ - result = await self.get_by(filters, sorts, take, skip) + result = await self._prepare_query(filters, sorts, take, skip) if not result: raise ValueError("No result found") if len(result) > 1: raise ValueError("More than one result found") - return result[0] + return self.to_object(result[0]) async def find_by( self, @@ -280,23 +292,9 @@ class DataAccessObjectABC(ABC, Database, Generic[T_DBM]): sorts: AttributeSorts = None, take: int = None, skip: int = None, - ) -> list[Optional[T_DBM]]: - """ - Find all objects by the given filters - :param AttributeFilter filters: - :param AttributeSorts sorts: - :param int skip: - :param int take: - :return: List of objects - :rtype: list[Optional[T_DBM]] - """ - result = await self._db.select_map( - await self._build_conditional_query(filters, sorts, take, skip) - ) - if not result or len(result) == 0: - return [] - - return [self.to_object(x) for x in result] + ) -> list[T_DBM]: + result = await self._prepare_query(filters, sorts, take, skip) + return [self.to_object(x) for x in result] if result else [] async def find_single_by( self, @@ -305,22 +303,10 @@ class DataAccessObjectABC(ABC, Database, Generic[T_DBM]): take: int = None, skip: int = None, ) -> Optional[T_DBM]: - """ - Find a single object by the given filters - :param AttributeFilter filters: - :param AttributeSorts sorts: - :param int skip: - :param int take: - :return: Single object - :rtype: Optional[T_DBM] - :raises ValueError: When more than one result is found - """ - result = await self.find_by(filters, sorts, take, skip) - if not result or len(result) == 0: - return None + result = await self._prepare_query(filters, sorts, take, skip) if len(result) > 1: raise ValueError("More than one result found") - return result[0] + return self.to_object(result[0]) if result else None async def touch(self, obj: T_DBM): """ @@ -489,15 +475,289 @@ class DataAccessObjectABC(ABC, Database, Generic[T_DBM]): await self._db.execute(query) - def _get_primary_key_value_sql(self, obj: T_DBM) -> str: - value = getattr(obj, self.__primary_key) - if isinstance(value, str): - return f"'{value}'" + async def _prepare_query( + self, + filters: AttributeFilters = None, + sorts: AttributeSorts = None, + take: int = None, + skip: int = None, + for_count=False, + ) -> list[dict]: + """ + Prepares and executes a query using the SQLBuilder with the given parameters. + :param filters: Conditions to filter the query. + :param sorts: Sorting attributes and directions. + :param take: Limit the number of results. + :param skip: Offset the results. + :return: Query result as a list of dictionaries. + """ + external_table_deps = [] + builder = SQLSelectBuilder(self._table_name, self.__primary_key) - return value + for temp in self._external_fields: + builder.with_temp_table(self._external_fields[temp]) + + if for_count: + builder.with_attribute("COUNT(*)", ignore_table_name=True) + else: + builder.with_attribute("*") + + for attr in self.__foreign_tables: + table, join_condition = self.__foreign_tables[attr] + builder.with_left_join(table, join_condition) + + if filters: + await self._build_conditions(builder, filters, external_table_deps) + + if sorts: + self._build_sorts(builder, sorts, external_table_deps) + + if take: + builder.with_limit(take) + + if skip: + builder.with_offset(skip) + + for external_table in external_table_deps: + builder.use_temp_table(external_table) + + query = await builder.build() + return await self._db.select_map(query) + + async def _build_conditions( + self, + builder: SQLSelectBuilder, + filters: AttributeFilters, + external_table_deps: list[str], + ): + """ + Builds SQL conditions from GraphQL-like filters and adds them to the SQLBuilder. + :param builder: The SQLBuilder instance to add conditions to. + :param filters: GraphQL-like filter structure. + :param external_table_deps: List to store external table dependencies. + """ + if not isinstance(filters, list): + filters = [filters] + + for filter_group in filters: + sql_conditions = self._graphql_to_sql_conditions( + filter_group, external_table_deps + ) + for attr, operator, value in sql_conditions: + if attr in self.__foreign_table_keys: + attr = self.__foreign_table_keys[attr] + + external_table = self._get_external_field_key(attr) + if external_table: + external_table_deps.append(external_table) + + if operator == "fuzzy": + builder.with_levenshtein_condition(attr) + elif operator in [ + "IS NULL", + "IS NOT NULL", + ]: # operator without value + builder.with_condition(attr, operator) + else: + builder.with_value_condition( + attr, operator, self._get_value_sql(value) + ) + + def _graphql_to_sql_conditions( + self, graphql_structure: dict, external_table_deps: list[str] + ) -> list[tuple[str, str, Any]]: + """ + Converts a GraphQL-like structure to SQL conditions. + :param graphql_structure: The GraphQL-like filter structure. + :param external_table_deps: List to track external table dependencies. + :return: A list of tuples (attribute, operator, value). + """ + + operators = { + "equal": "=", + "notEqual": "!=", + "greater": ">", + "greaterOrEqual": ">=", + "less": "<", + "lessOrEqual": "<=", + "isNull": "IS NULL", + "isNotNull": "IS NOT NULL", + "contains": "LIKE", # Special handling in _graphql_to_sql_conditions + "notContains": "NOT LIKE", # Special handling in _graphql_to_sql_conditions + "startsWith": "LIKE", # Special handling in _graphql_to_sql_conditions + "endsWith": "LIKE", # Special handling in _graphql_to_sql_conditions + "in": "IN", + "notIn": "NOT IN", + } + conditions = [] + + def parse_node(node, parent_key=None): + if not isinstance(node, dict): + return + + if isinstance(node, list): + conditions.append((parent_key, "IN", node)) + return + + for key, value in node.items(): + if isinstance(key, property): + key = key.fget.__name__ + + external_fields_table_name_by_parent = self._get_external_field_key( + parent_key + ) + external_fields_table_name = self._get_external_field_key(key) + external_field = ( + external_fields_table_name + if external_fields_table_name_by_parent is None + else external_fields_table_name_by_parent + ) + + if key == "fuzzy": + self._handle_fuzzy_filter_conditions( + conditions, external_table_deps, value + ) + + elif external_field is not None: + external_table_deps.append(external_field) + parse_node(value, f"{external_field}.{key}") + elif parent_key in self.__foreign_table_keys: + parse_node({key: value}, self.__foreign_table_keys[parent_key]) + elif key in operators: + operator = operators[key] + if key == "contains" or key == "notContains": + value = f"%{value}%" + elif key == "in" or key == "notIn": + value = value + elif key == "startsWith": + value = f"{value}%" + elif key == "endsWith": + value = f"%{value}" + elif (key == "equal" or key == "notEqual") and value is None: + operator = operators["isNull"] + + conditions.append((parent_key, operator, value)) + + elif isinstance(value, dict): + if key in self.__foreign_table_keys: + parse_node(value, key) + elif key in self.__db_names: + parse_node(value, self.__db_names[key]) + else: + parse_node(value, key) + elif value is None: + conditions.append((self.__db_names[key], "IS NULL", value)) + else: + conditions.append((self.__db_names[key], "=", value)) + + parse_node(graphql_structure) + return conditions + + def _handle_fuzzy_filter_conditions( + self, conditions, external_field_table_deps, sub_values + ): + # Extract fuzzy filter parameters + fuzzy_fields = get_value(sub_values, "fields", list[str]) + fuzzy_term = get_value(sub_values, "term", str) + fuzzy_threshold = get_value(sub_values, "threshold", int, 5) + + if not fuzzy_fields or not fuzzy_term: + raise ValueError("Fuzzy filter must include 'fields' and 'term'.") + + fuzzy_fields_db_names = [] + + # Map fields to their database names + for fuzzy_field in fuzzy_fields: + external_fields_table_name = self._get_external_field_key(fuzzy_field) + if external_fields_table_name is not None: + external_fields_table = self._external_fields[ + external_fields_table_name + ] + fuzzy_fields_db_names.append( + f"{external_fields_table.table_name}.{fuzzy_field}" + ) + external_field_table_deps.append(external_fields_table.table_name) + elif fuzzy_field in self.__db_names: + fuzzy_fields_db_names.append( + f"{self._table_name}.{self.__db_names[fuzzy_field]}" + ) + elif fuzzy_field in self.__foreign_tables: + fuzzy_fields_db_names.append( + f"{self._table_name}.{self.__foreign_table_keys[fuzzy_field]}" + ) + else: + fuzzy_fields_db_names.append( + self.__db_names[camel_to_snake(fuzzy_field)][0] + ) + + # Build fuzzy conditions for each field + fuzzy_conditions = self._build_fuzzy_conditions( + fuzzy_fields_db_names, fuzzy_term, fuzzy_threshold + ) + + # Combine conditions with OR and append to the main conditions + conditions.append((f"({' OR '.join(fuzzy_conditions)})", "fuzzy", None)) @staticmethod - def _get_value_sql(value: Any) -> str: + def _build_fuzzy_conditions( + fields: list[str], term: str, threshold: int = 10 + ) -> list[str]: + conditions = [] + for field in fields: + conditions.append( + f"levenshtein({field}::TEXT, '{term}') <= {threshold}" + ) # Adjust the threshold as needed + + return conditions + + def _get_external_field_key(self, field_name: str) -> Optional[str]: + """ + Returns the key to get the external field if found, otherwise None. + :param str field_name: The name of the field to search for. + :return: The key if found, otherwise None. + :rtype: Optional[str] + """ + for key, builder in self._external_fields.items(): + if field_name in builder.fields and field_name not in self.__db_names: + return key + return None + + def _build_sorts( + self, + builder: SQLSelectBuilder, + sorts: AttributeSorts, + external_table_deps: list[str], + ): + """ + Resolves complex sorting structures into SQL-compatible sorting conditions. + Tracks external table dependencies. + :param builder: The SQLBuilder instance to add sorting to. + :param sorts: Sorting attributes and directions in a complex structure. + :param external_table_deps: List to track external table dependencies. + """ + + def parse_sort_node(node): + if isinstance(node, dict): + for key, value in node.items(): + if isinstance(value, dict): + # Recursively parse nested structures + parse_sort_node(value) + elif isinstance(value, str) and value.lower() in ["asc", "desc"]: + external_table = self._get_external_field_key(key) + if external_table: + external_table_deps.append(external_table) + builder.with_order_by(key, value.upper()) + else: + raise ValueError(f"Invalid sort direction: {value}") + elif isinstance(node, list): + for item in node: + parse_sort_node(item) + else: + raise ValueError(f"Invalid sort structure: {node}") + + parse_sort_node(sorts) + + def _get_value_sql(self, value: Any) -> str: if isinstance(value, str): if value.lower() == "null": return "NULL" @@ -517,8 +777,8 @@ class DataAccessObjectABC(ABC, Database, Generic[T_DBM]): if isinstance(value, list): if len(value) == 0: - return "ARRAY[]::text[]" - return f"ARRAY[{", ".join([DataAccessObjectABC._get_value_sql(x) for x in value])}]" + return "()" + return f"({', '.join([self._get_value_sql(x) for x in value])})" if isinstance(value, datetime.datetime): if value.tzinfo is None: @@ -547,410 +807,17 @@ class DataAccessObjectABC(ABC, Database, Generic[T_DBM]): return cast_type(value) - async def _handle_query_external_temp_tables( - self, query: str, external_table_deps: list[str], ignore_fields=False - ) -> str: - for dep in external_table_deps: - temp_table = self._external_fields[dep] - temp_table_sql = await temp_table.build() + def _get_primary_key_value_sql(self, obj: T_DBM) -> str: + value = getattr(obj, self.__primary_key) + if isinstance(value, str): + return f"'{value}'" - if not ignore_fields: - query = query.replace( - " FROM", - f", {','.join([f'{temp_table.table_name}.{x}' for x in temp_table.fields.keys() if x not in self.__db_names])} FROM", - ) - - query = f"{temp_table_sql}\n{query}" - query += f" LEFT JOIN {temp_table.table_name} ON {temp_table.join_ref_table}.{self.__primary_key} = {temp_table.table_name}.{temp_table.primary_key}" - - return query - - async def _build_conditional_query( - self, - filters: AttributeFilters = None, - sorts: AttributeSorts = None, - take: int = None, - skip: int = None, - ) -> str: - filter_conditions = [] - sort_conditions = [] - - external_table_deps = [] - query = f"SELECT {self._table_name}.* FROM {self._table_name}" - for join in self.__joins: - query += f" {self.__joins[join]}" - - # Collect dependencies from filters - if filters is not None and (not isinstance(filters, list) or len(filters) > 0): - filter_conditions, filter_deps = await self._build_conditions(filters) - external_table_deps.extend(filter_deps) - - # Collect dependencies from sorts - if sorts is not None and (not isinstance(sorts, list) or len(sorts) > 0): - sort_conditions, sort_deps = self._build_order_by(sorts) - external_table_deps.extend(sort_deps) - - # Handle external table dependencies before WHERE and ORDER BY - if external_table_deps: - query = await self._handle_query_external_temp_tables( - query, external_table_deps - ) - - # Add WHERE clause - if filters is not None and (not isinstance(filters, list) or len(filters) > 0): - query += f" WHERE {filter_conditions}" - - # Add ORDER BY clause - if sorts is not None and (not isinstance(sorts, list) or len(sorts) > 0): - query += f" ORDER BY {sort_conditions}" - - if take is not None: - query += f" LIMIT {take}" - - if skip is not None: - query += f" OFFSET {skip}" - - if not query.endswith(";"): - query += ";" - return query - - def _get_external_field_key(self, field_name: str) -> Optional[str]: - """ - Returns the key to get the external field if found, otherwise None. - :param str field_name: The name of the field to search for. - :return: The key if found, otherwise None. - :rtype: Optional[str] - """ - for key, builder in self._external_fields.items(): - if field_name in builder.fields and field_name not in self.__db_names: - return key - return None - - async def _build_conditions(self, filters: AttributeFilters) -> (str, list[str]): - """ - Build SQL conditions from the given filters - :param filters: - :return: SQL conditions & External field table dependencies - """ - external_field_table_deps = [] - if not isinstance(filters, list): - filters = [filters] - - conditions = [] - for f in filters: - f_conditions = [] - - for attr, values in f.items(): - if isinstance(attr, property): - attr = attr.fget.__name__ - - if attr in self.__foreign_tables: - foreign_table = self.__foreign_tables[attr] - cons, eftd = self._build_foreign_conditions(foreign_table, values) - if eftd: - external_field_table_deps.extend(eftd) - - f_conditions.extend(cons) - continue - - if attr == "fuzzy": - self._handle_fuzzy_filter_conditions( - f_conditions, external_field_table_deps, values - ) - continue - - external_fields_table_name = self._get_external_field_key(attr) - if external_fields_table_name is not None: - external_fields_table = self._external_fields[ - external_fields_table_name - ] - db_name = f"{external_fields_table.table_name}.{attr}" - external_field_table_deps.append(external_fields_table.table_name) - elif ( - isinstance(values, dict) or isinstance(values, list) - ) and not attr in self.__foreign_tables: - db_name = f"{self._table_name}.{self.__db_names[attr]}" - else: - db_name = self.__db_names[attr] - - if isinstance(values, dict): - for operator, value in values.items(): - f_conditions.append( - self._build_condition(f"{db_name}", operator, value) - ) - elif isinstance(values, list): - sub_conditions = [] - for value in values: - if isinstance(value, dict): - for operator, val in value.items(): - sub_conditions.append( - self._build_condition(f"{db_name}", operator, val) - ) - else: - sub_conditions.append( - self._get_value_validation_sql(db_name, value) - ) - f_conditions.append(f"({' OR '.join(sub_conditions)})") - else: - f_conditions.append(self._get_value_validation_sql(db_name, values)) - - conditions.append(f"({' OR '.join(f_conditions)})") - - return " AND ".join(conditions), external_field_table_deps - - @staticmethod - def _build_fuzzy_conditions( - fields: list[str], term: str, threshold: int = 10 - ) -> list[str]: - conditions = [] - for field in fields: - conditions.append( - f"levenshtein({field}::TEXT, '{term}') <= {threshold}" - ) # Adjust the threshold as needed - - return conditions - - def _build_foreign_conditions( - self, table: str, values: dict - ) -> (list[str], list[str]): - """ - Build SQL conditions for foreign key references - :param table: Foreign table name - :param values: Filter values - :return: List of conditions, List of external field tables - """ - external_field_table_deps = [] - conditions = [] - for attr, sub_values in values.items(): - if isinstance(attr, property): - attr = attr.fget.__name__ - - if attr in self.__foreign_tables: - foreign_table = self.__foreign_tables[attr] - sub_conditions, eftd = self._build_foreign_conditions( - foreign_table, sub_values - ) - if len(eftd) > 0: - external_field_table_deps.extend(eftd) - - conditions.extend(sub_conditions) - continue - - if attr == "fuzzy": - self._handle_fuzzy_filter_conditions( - conditions, external_field_table_deps, sub_values - ) - continue - - external_fields_table_name = self._get_external_field_key(attr) - if external_fields_table_name is not None: - external_fields_table = self._external_fields[ - external_fields_table_name - ] - db_name = f"{external_fields_table.table_name}.{attr}" - external_field_table_deps.append(external_fields_table.table_name) - else: - db_name = f"{table}.{attr.lower().replace('_', '')}" - - if isinstance(sub_values, dict): - for operator, value in sub_values.items(): - conditions.append( - f"{self._build_condition(db_name, operator, value)}" - ) - elif isinstance(sub_values, list): - sub_conditions = [] - for value in sub_values: - if isinstance(value, dict): - for operator, val in value.items(): - sub_conditions.append( - f"{self._build_condition(db_name, operator, val)}" - ) - else: - sub_conditions.append( - self._get_value_validation_sql(db_name, value) - ) - conditions.append(f"({' OR '.join(sub_conditions)})") - else: - conditions.append(self._get_value_validation_sql(db_name, sub_values)) - - return conditions, external_field_table_deps - - def _handle_fuzzy_filter_conditions( - self, conditions, external_field_table_deps, sub_values - ): - fuzzy_fields = get_value(sub_values, "fields", list[str]) - fuzzy_fields_db_names = [] - for fuzzy_field in fuzzy_fields: - external_fields_table_name = self._get_external_field_key(fuzzy_field) - if external_fields_table_name is not None: - external_fields_table = self._external_fields[ - external_fields_table_name - ] - fuzzy_fields_db_names.append( - f"{external_fields_table.table_name}.{fuzzy_field}" - ) - external_field_table_deps.append(external_fields_table.table_name) - elif fuzzy_field in self.__db_names: - fuzzy_fields_db_names.append( - f"{self._table_name}.{self.__db_names[fuzzy_field]}" - ) - elif fuzzy_field in self.__foreign_tables: - fuzzy_fields_db_names.append( - f"{self._table_name}.{self.__foreign_table_keys[fuzzy_field]}" - ) - else: - fuzzy_fields_db_names.append( - self.__db_names[camel_to_snake(fuzzy_field)] - ) - conditions.append( - f"({' OR '.join( - self._build_fuzzy_conditions( - [x for x in fuzzy_fields_db_names], - get_value(sub_values, "term", str), - get_value(sub_values, "threshold", int, 5), - ) - ) - })" - ) - - def _get_value_validation_sql(self, field: str, value: Any): - value = self._get_value_sql(value) - field_selector = f"{self._table_name}.{field}" - if field in self.__foreign_tables: - field_selector = self.__db_names[field] - - if value == "NULL": - return f"{field_selector} IS NULL" - return f"{field_selector} = {value}" - - def _build_condition(self, db_name: str, operator: str, value: Any) -> str: - """ - Build individual SQL condition based on the operator - :param db_name: - :param operator: - :param value: - :return: - """ - attr = db_name.split(".")[-1] - - if attr in self.__date_attributes: - db_name = self._attr_from_date_to_char(db_name) - - sql_value = self._get_value_sql(value) - if operator == "equal": - return f"{db_name} = {sql_value}" - elif operator == "notEqual": - return f"{db_name} != {sql_value}" - elif operator == "greater": - return f"{db_name} > {sql_value}" - elif operator == "greaterOrEqual": - return f"{db_name} >= {sql_value}" - elif operator == "less": - return f"{db_name} < {sql_value}" - elif operator == "lessOrEqual": - return f"{db_name} <= {sql_value}" - elif operator == "isNull": - return f"{db_name} IS NULL" - elif operator == "isNotNull": - return f"{db_name} IS NOT NULL" - elif operator == "contains": - return f"{db_name} LIKE '%{value}%'" - elif operator == "notContains": - return f"{db_name} NOT LIKE '%{value}%'" - elif operator == "startsWith": - return f"{db_name} LIKE '{value}%'" - elif operator == "endsWith": - return f"{db_name} LIKE '%{value}'" - elif operator == "in": - return ( - f"{db_name} IN ({', '.join([self._get_value_sql(x) for x in value])})" - ) - elif operator == "notIn": - return f"{db_name} NOT IN ({', '.join([self._get_value_sql(x) for x in value])})" - else: - raise ValueError(f"Unsupported operator: {operator}") + return value @staticmethod def _attr_from_date_to_char(attr: str) -> str: return f"TO_CHAR({attr}, 'YYYY-MM-DD HH24:MI:SS.US TZ')" - def _build_order_by(self, sorts: AttributeSorts) -> (str, list[str]): - """ - Build SQL order by clause from the given sorts - :param sorts: - :return: - """ - external_field_table_deps = [] - if not isinstance(sorts, list): - sorts = [sorts] - - sort_clauses = [] - for sort in sorts: - for attr, direction in sort.items(): - if isinstance(attr, property): - attr = attr.fget.__name__ - - if attr in self.__foreign_tables: - foreign_table = self.__foreign_tables[attr] - f_sorts, eftd = self._build_foreign_order_by( - foreign_table, direction - ) - if eftd: - external_field_table_deps.extend(eftd) - - sort_clauses.extend(f_sorts) - continue - - external_fields_table_name = self._get_external_field_key(attr) - if external_fields_table_name is not None: - external_fields_table = self._external_fields[ - external_fields_table_name - ] - db_name = f"{external_fields_table.table_name}.{attr}" - external_field_table_deps.append(external_fields_table.table_name) - else: - db_name = self.__db_names[attr] - sort_clauses.append(f"{db_name} {direction.upper()}") - - return ", ".join(sort_clauses), external_field_table_deps - - def _build_foreign_order_by( - self, table: str, direction: dict - ) -> (list[str], list[str]): - """ - Build SQL order by clause for foreign key references - :param table: Foreign table name - :param direction: Sort direction - :return: List of order by clauses - """ - external_field_table_deps = [] - sort_clauses = [] - for attr, sub_direction in direction.items(): - if isinstance(attr, property): - attr = attr.fget.__name__ - - if attr in self.__foreign_tables: - foreign_table = self.__foreign_tables[attr] - f_sorts, eftd = self._build_foreign_order_by(foreign_table, direction) - if eftd: - external_field_table_deps.extend(eftd) - - sort_clauses.extend(f_sorts) - continue - - external_fields_table_name = self._get_external_field_key(attr) - if external_fields_table_name is not None: - external_fields_table = self._external_fields[ - external_fields_table_name - ] - db_name = f"{external_fields_table.table_name}.{attr}" - external_field_table_deps.append(external_fields_table.table_name) - else: - db_name = f"{table}.{attr.lower().replace('_', '')}" - sort_clauses.append(f"{db_name} {sub_direction.upper()}") - - return sort_clauses, external_field_table_deps - @staticmethod async def _get_editor_id(obj: T_DBM): editor_id = obj.editor_id diff --git a/api/src/core/database/abc/db_model_abc.py b/api/src/core/database/abc/db_model_abc.py index eddf74f..b8c81d6 100644 --- a/api/src/core/database/abc/db_model_abc.py +++ b/api/src/core/database/abc/db_model_abc.py @@ -54,7 +54,7 @@ class DbModelABC(ABC): from data.schemas.administration.user_dao import userDao - return await userDao.find_single_by({"id": self._editor_id}) + return await userDao.get_by_id(self._editor_id) @property def created(self) -> datetime: @@ -63,3 +63,7 @@ class DbModelABC(ABC): @property def updated(self) -> datetime: return self._updated + + @updated.setter + def updated(self, value: datetime): + self._updated = value diff --git a/api/src/core/database/abc/db_model_dao_abc.py b/api/src/core/database/abc/db_model_dao_abc.py index 9d8d638..79be8f2 100644 --- a/api/src/core/database/abc/db_model_dao_abc.py +++ b/api/src/core/database/abc/db_model_dao_abc.py @@ -14,6 +14,15 @@ class DbModelDaoABC[T_DBM](DataAccessObjectABC[T_DBM]): self.attribute(DbModelABC.id, int, ignore=True) self.attribute(DbModelABC.deleted, bool) - self.attribute(DbModelABC.editor_id, int, ignore=True) - self.attribute(DbModelABC.created, datetime, "created", ignore=True) - self.attribute(DbModelABC.updated, datetime, "updated", ignore=True) + self.attribute(DbModelABC.editor_id, int, ignore=True) # handled by db trigger + + self.reference( + "editor", "id", DbModelABC.editor_id, "administration.users" + ) # not relevant for updates due to editor_id + + self.attribute( + DbModelABC.created, datetime, ignore=True + ) # handled by db trigger + self.attribute( + DbModelABC.updated, datetime, ignore=True + ) # handled by db trigger diff --git a/api/src/core/database/database.py b/api/src/core/database/database.py index 36adc9f..347dc65 100644 --- a/api/src/core/database/database.py +++ b/api/src/core/database/database.py @@ -1,9 +1,4 @@ -from core.database.database_settings import DatabaseSettings from core.database.db_context import DBContext -from core.environment import Environment -from core.logger import DBLogger - -logger = DBLogger(__name__) class Database: @@ -19,30 +14,3 @@ class Database: @classmethod def connect(cls): cls._db.connect() - - @classmethod - async def startup_db(cls): - from data.service.migration_service import MigrationService - - logger.info("Init DB") - db = DBContext() - host = Environment.get("DB_HOST", str) - port = Environment.get("DB_PORT", int) - user = Environment.get("DB_USER", str) - password = Environment.get("DB_PASSWORD", str) - database = Environment.get("DB_DATABASE", str) - - if None in [host, port, user, password, database]: - logger.fatal( - "DB settings are not set correctly", - EnvironmentError("DB settings are not set correctly"), - ) - - await db.connect( - DatabaseSettings( - host=host, port=port, user=user, password=password, database=database - ) - ) - Database.init(db) - migrations = MigrationService(db) - await migrations.migrate() diff --git a/api/src/core/database/db_context.py b/api/src/core/database/db_context.py index fb2d5a5..3c169c9 100644 --- a/api/src/core/database/db_context.py +++ b/api/src/core/database/db_context.py @@ -1,6 +1,9 @@ import uuid from typing import Optional, Any +from psycopg import OperationalError +from psycopg_pool import PoolTimeout + from core.database.database_settings import DatabaseSettings from core.database.postgres_pool import PostgresPool from core.environment import Environment @@ -26,23 +29,23 @@ class DBContext: except Exception as e: logger.fatal("Connecting to database failed", e) - async def execute(self, statement: str, args=None) -> list[list]: + async def execute(self, statement: str, args=None, multi=True) -> list[list]: logger.trace(f"execute {statement} with args: {args}") - return await self._pool.execute(statement, args) + return await self._pool.execute(statement, args, multi) async def select_map(self, statement: str, args=None) -> list[dict]: logger.trace(f"select {statement} with args: {args}") try: return await self._pool.select_map(statement, args) - except Exception as e: + except (OperationalError, PoolTimeout) as e: if self._fails >= 3: - logger.error(f"Database error caused by {statement}", e) + logger.error(f"Database error caused by `{statement}`", e) uid = uuid.uuid4() raise Exception( f"Query failed three times with {type(e).__name__}. Contact an admin with the UID: {uid}" ) - logger.error(f"Database error caused by {statement}", e) + logger.error(f"Database error caused by `{statement}`", e) self._fails += 1 try: logger.debug("Retry select") @@ -50,6 +53,9 @@ class DBContext: except Exception as e: pass return [] + except Exception as e: + logger.error(f"Database error caused by `{statement}`", e) + raise e async def select( self, statement: str, args=None @@ -57,15 +63,15 @@ class DBContext: logger.trace(f"select {statement} with args: {args}") try: return await self._pool.select(statement, args) - except Exception as e: + except (OperationalError, PoolTimeout) as e: if self._fails >= 3: - logger.error(f"Database error caused by {statement}", e) + logger.error(f"Database error caused by `{statement}`", e) uid = uuid.uuid4() raise Exception( f"Query failed three times with {type(e).__name__}. Contact an admin with the UID: {uid}" ) - logger.error(f"Database error caused by {statement}", e) + logger.error(f"Database error caused by `{statement}`", e) self._fails += 1 try: logger.debug("Retry select") @@ -73,3 +79,6 @@ class DBContext: except Exception as e: pass return [] + except Exception as e: + logger.error(f"Database error caused by `{statement}`", e) + raise e diff --git a/api/src/core/database/postgres_pool.py b/api/src/core/database/postgres_pool.py index 217eabe..6115c40 100644 --- a/api/src/core/database/postgres_pool.py +++ b/api/src/core/database/postgres_pool.py @@ -1,4 +1,4 @@ -from typing import Optional +from typing import Optional, Any from psycopg import sql from psycopg_pool import AsyncConnectionPool, PoolTimeout @@ -21,7 +21,7 @@ class PostgresPool: f"host={database_settings.host} " f"port={database_settings.port} " f"user={database_settings.user} " - f"password={B64Helper.decode(database_settings.password)} " + f"password={database_settings.password} " f"dbname={database_settings.database}" ) self._pool_size = pool_size @@ -41,18 +41,31 @@ class PostgresPool: logger.fatal(f"Failed to connect to the database", e) return pool - async def execute(self, query: str, args=None) -> list[list]: + @staticmethod + async def _exec_sql(cursor: Any, query: str, args=None, multi=True): + if multi: + queries = query.split(";") + for q in queries: + if q.strip() == "": + continue + + await cursor.execute(sql.SQL(q), args) + else: + await cursor.execute(sql.SQL(query), args) + + async def execute(self, query: str, args=None, multi=True) -> list[list]: """ Execute a SQL statement, it could be with args and without args. The usage is similar to the execute() function in the psycopg module. :param query: SQL clause :param args: args needed by the SQL clause + :param multi: if the query is a multi-statement :return: return result """ async with await self._get_pool() as pool: async with pool.connection() as con: async with con.cursor() as cursor: - await cursor.execute(sql.SQL(query), args) + await self._exec_sql(cursor, query, args, multi) if ( cursor.description is not None @@ -68,34 +81,36 @@ class PostgresPool: else: return [] - async def select(self, query: str, args=None) -> list[str]: + async def select(self, query: str, args=None, multi=True) -> list[str]: """ Execute a SQL statement, it could be with args and without args. The usage is similar to the execute() function in the psycopg module. :param query: SQL clause :param args: args needed by the SQL clause + :param multi: if the query is a multi-statement :return: return result """ async with await self._get_pool() as pool: async with pool.connection() as con: async with con.cursor() as cursor: - await cursor.execute(sql.SQL(query), args) + await self._exec_sql(cursor, query, args, multi) res = await cursor.fetchall() return list(res) - async def select_map(self, query: str, args=None) -> list[dict]: + async def select_map(self, query: str, args=None, multi=True) -> list[dict]: """ Execute a SQL statement, it could be with args and without args. The usage is similar to the execute() function in the psycopg module. :param query: SQL clause :param args: args needed by the SQL clause + :param multi: if the query is a multi-statement :return: return result """ async with await self._get_pool() as pool: async with pool.connection() as con: async with con.cursor() as cursor: - await cursor.execute(sql.SQL(query), args) + await self._exec_sql(cursor, query, args, multi) res = await cursor.fetchall() res_map: list[dict] = [] diff --git a/api/src/core/database/sql_select_builder.py b/api/src/core/database/sql_select_builder.py new file mode 100644 index 0000000..f2644ed --- /dev/null +++ b/api/src/core/database/sql_select_builder.py @@ -0,0 +1,150 @@ +from typing import Optional + +from core.database.external_data_temp_table_builder import ExternalDataTempTableBuilder + + +class SQLSelectBuilder: + + def __init__(self, table_name: str, primary_key: str): + self._table_name = table_name + self._primary_key = primary_key + + self._temp_tables: dict[str, ExternalDataTempTableBuilder] = {} + self._to_use_temp_tables: list[str] = [] + self._attributes: list[str] = [] + self._tables: list[str] = [table_name] + self._joins: dict[str, (str, str)] = {} + self._conditions: list[str] = [] + self._order_by: str = "" + self._limit: Optional[int] = None + self._offset: Optional[int] = None + + def with_temp_table( + self, temp_table: ExternalDataTempTableBuilder + ) -> "SQLSelectBuilder": + self._temp_tables[temp_table.table_name] = temp_table + return self + + def use_temp_table(self, temp_table_name: str): + if temp_table_name not in self._temp_tables: + raise ValueError(f"Temp table {temp_table_name} not found.") + + self._to_use_temp_tables.append(temp_table_name) + + def with_attribute(self, attr: str, ignore_table_name=False) -> "SQLSelectBuilder": + if not ignore_table_name and not attr.startswith(self._table_name): + attr = f"{self._table_name}.{attr}" + + self._attributes.append(attr) + return self + + def with_foreign_attribute(self, attr: str) -> "SQLSelectBuilder": + self._attributes.append(attr) + return self + + def with_table(self, table_name: str) -> "SQLSelectBuilder": + self._tables.append(table_name) + return self + + def _check_prefix(self, attr: str) -> str: + assert attr is not None + + valid_prefixes = [ + "levenshtein", + self._table_name, + *self._joins.keys(), + *self._temp_tables.keys(), + ] + if not any(attr.startswith(f"{prefix}.") for prefix in valid_prefixes): + attr = f"{self._table_name}.{attr}" + + return attr + + def with_value_condition( + self, attr: str, operator: str, value: str + ) -> "SQLSelectBuilder": + attr = self._check_prefix(attr) + self._conditions.append(f"{attr} {operator} {value}") + return self + + def with_levenshtein_condition(self, condition: str) -> "SQLSelectBuilder": + self._conditions.append(condition) + return self + + def with_condition(self, attr: str, operator: str) -> "SQLSelectBuilder": + attr = self._check_prefix(attr) + self._conditions.append(f"{attr} {operator}") + return self + + def with_grouped_conditions(self, conditions: list[str]) -> "SQLSelectBuilder": + self._conditions.append(f"({' AND '.join(conditions)})") + return self + + def with_left_join(self, table: str, on: str) -> "SQLSelectBuilder": + if table in self._joins: + self._joins[table] = (f"{self._joins[table][0]} AND {on}", "LEFT") + + self._joins[table] = (on, "LEFT") + return self + + def with_inner_join(self, table: str, on: str) -> "SQLSelectBuilder": + if table in self._joins: + self._joins[table] = (f"{self._joins[table][0]} AND {on}", "INNER") + + self._joins[table] = (on, "INNER") + return self + + def with_right_join(self, table: str, on: str) -> "SQLSelectBuilder": + if table in self._joins: + self._joins[table] = (f"{self._joins[table][0]} AND {on}", "RIGHT") + + self._joins[table] = (on, "RIGHT") + return self + + def with_limit(self, limit: int) -> "SQLSelectBuilder": + self._limit = limit + return self + + def with_offset(self, offset: int) -> "SQLSelectBuilder": + self._offset = offset + return self + + def with_order_by(self, column: str, direction: str = "ASC") -> "SQLSelectBuilder": + self._order_by = f"{column} {direction}" + return self + + async def _handle_temp_table_use(self, query) -> str: + new_query = "" + + for temp_table_name in self._to_use_temp_tables: + temp_table = self._temp_tables[temp_table_name] + new_query += await self._temp_tables[temp_table_name].build() + self.with_left_join( + temp_table.table_name, + f"{temp_table.join_ref_table}.{self._primary_key} = {temp_table.table_name}.{temp_table.primary_key}", + ) + + return f"{new_query} {query}" if new_query != "" else query + + async def build(self) -> str: + query = await self._handle_temp_table_use("") + + attributes = ", ".join(self._attributes) if self._attributes else "*" + query += f"SELECT {attributes} FROM {", ".join(self._tables)}" + + for join in self._joins: + query += f" {self._joins[join][1]} JOIN {join} ON {self._joins[join][0]}" + + if self._conditions: + query += " WHERE " + " AND ".join(self._conditions) + + if self._order_by: + query += f" ORDER BY {self._order_by}" + + if self._limit is not None: + query += f" LIMIT {self._limit}" + + if self._offset is not None: + query += f" OFFSET {self._offset}" + + return query diff --git a/api/src/data/schemas/administration/user.py b/api/src/data/schemas/administration/user.py index 3a44626..b17738b 100644 --- a/api/src/data/schemas/administration/user.py +++ b/api/src/data/schemas/administration/user.py @@ -1,3 +1,4 @@ +import uuid from datetime import datetime from typing import Optional @@ -28,10 +29,16 @@ class User(DbModelABC): @property def username(self): + if self._keycloak_id == str(uuid.UUID(int=0)): + return "ANONYMOUS" + return Keycloak.admin.get_user(self._keycloak_id).get("username") @property def email(self): + if self._keycloak_id == str(uuid.UUID(int=0)): + return "ANONYMOUS" + return Keycloak.admin.get_user(self._keycloak_id).get("email") @async_property @@ -50,3 +57,9 @@ class User(DbModelABC): from data.schemas.administration.user_dao import userDao return await userDao.has_permission(self.id, permission) + + async def anonymize(self): + from data.schemas.administration.user_dao import userDao + + self._keycloak_id = str(uuid.UUID(int=0)) + await userDao.update(self) diff --git a/api/src/data/schemas/administration/user_dao.py b/api/src/data/schemas/administration/user_dao.py index 7759dd3..4649708 100644 --- a/api/src/data/schemas/administration/user_dao.py +++ b/api/src/data/schemas/administration/user_dao.py @@ -1,6 +1,7 @@ from typing import Optional, Union from core.database.abc.db_model_dao_abc import DbModelDaoABC +from core.database.external_data_temp_table_builder import ExternalDataTempTableBuilder from core.logger import DBLogger from data.schemas.administration.user import User from data.schemas.permission.permission_dao import permissionDao @@ -14,7 +15,19 @@ class UserDao(DbModelDaoABC[User]): def __init__(self): DbModelDaoABC.__init__(self, __name__, User, "administration.users") - self.attribute(User.keycloak_id, str) + self.attribute(User.keycloak_id, str, aliases=["keycloakId"]) + + async def get_users(): + return [(x.id, x.username, x.email) for x in await self.get_all()] + + self.use_external_fields( + ExternalDataTempTableBuilder() + .with_table_name(self._table_name) + .with_field("id", "int", True) + .with_field("username", "text") + .with_field("email", "text") + .with_value_getter(get_users) + ) async def get_by_keycloak_id(self, keycloak_id: str) -> User: return await self.get_single_by({User.keycloak_id: keycloak_id}) diff --git a/api/src/data/schemas/public/group.py b/api/src/data/schemas/public/group.py index ddd1c16..da11936 100644 --- a/api/src/data/schemas/public/group.py +++ b/api/src/data/schemas/public/group.py @@ -1,6 +1,8 @@ from datetime import datetime from typing import Optional +from async_property import async_property + from core.database.abc.db_model_abc import DbModelABC from core.typing import SerialId @@ -10,6 +12,7 @@ class Group(DbModelABC): self, id: SerialId, name: str, + user_id: Optional[SerialId] = None, deleted: bool = False, editor_id: Optional[SerialId] = None, created: Optional[datetime] = None, @@ -17,6 +20,7 @@ class Group(DbModelABC): ): DbModelABC.__init__(self, id, deleted, editor_id, created, updated) self._name = name + self._user_id = user_id @property def name(self) -> str: @@ -25,3 +29,17 @@ class Group(DbModelABC): @name.setter def name(self, value: str): self._name = value + + @property + def user_id(self) -> Optional[SerialId]: + return self._user_id + + @async_property + async def user(self): + if self._user_id is None: + return None + + from data.schemas.administration.user_dao import userDao + + user = await userDao.get_by_id(self.user_id) + return user diff --git a/api/src/data/schemas/public/group_dao.py b/api/src/data/schemas/public/group_dao.py index 52603f8..e856f04 100644 --- a/api/src/data/schemas/public/group_dao.py +++ b/api/src/data/schemas/public/group_dao.py @@ -11,6 +11,9 @@ class GroupDao(DbModelDaoABC[Group]): DbModelDaoABC.__init__(self, __name__, Group, "public.groups") self.attribute(Group.name, str) + self.attribute(Group.user_id, int) + self.reference("user", "id", Group.user_id, "administration.users") + async def get_by_name(self, name: str) -> Group: result = await self._db.select_map( f"SELECT * FROM {self._table_name} WHERE Name = '{name}'" diff --git a/api/src/data/schemas/public/short_url.py b/api/src/data/schemas/public/short_url.py index 5b04e65..8d23653 100644 --- a/api/src/data/schemas/public/short_url.py +++ b/api/src/data/schemas/public/short_url.py @@ -18,6 +18,7 @@ class ShortUrl(DbModelABC): group_id: Optional[SerialId], domain_id: Optional[SerialId], loading_screen: Optional[str] = None, + user_id: Optional[SerialId] = None, deleted: bool = False, editor_id: Optional[SerialId] = None, created: Optional[datetime] = None, @@ -34,6 +35,8 @@ class ShortUrl(DbModelABC): loading_screen = False self._loading_screen = loading_screen + self._user_id = user_id + @property def short_url(self) -> str: return self._short_url @@ -106,6 +109,20 @@ class ShortUrl(DbModelABC): def loading_screen(self, value: Optional[str]): self._loading_screen = value + @property + def user_id(self) -> Optional[SerialId]: + return self._user_id + + @async_property + async def user(self): + if self._user_id is None: + return None + + from data.schemas.administration.user_dao import userDao + + user = await userDao.get_by_id(self.user_id) + return user + def to_dto(self) -> dict: return { "id": self.id, diff --git a/api/src/data/schemas/public/short_url_dao.py b/api/src/data/schemas/public/short_url_dao.py index 6866d5c..66e0003 100644 --- a/api/src/data/schemas/public/short_url_dao.py +++ b/api/src/data/schemas/public/short_url_dao.py @@ -18,5 +18,8 @@ class ShortUrlDao(DbModelDaoABC[ShortUrl]): self.reference("domain", "id", ShortUrl.domain_id, "public.domains") self.attribute(ShortUrl.loading_screen, bool) + self.attribute(ShortUrl.user_id, int) + self.reference("user", "id", ShortUrl.user_id, "administration.users") + shortUrlDao = ShortUrlDao() diff --git a/api/src/data/schemas/public/user_setting_dao.py b/api/src/data/schemas/public/user_setting_dao.py index 78f3411..c6d8af0 100644 --- a/api/src/data/schemas/public/user_setting_dao.py +++ b/api/src/data/schemas/public/user_setting_dao.py @@ -21,4 +21,4 @@ class UserSettingDao(DbModelDaoABC[UserSetting]): ) -userSettingsDao = UserSettingDao() +userSettingDao = UserSettingDao() diff --git a/api/src/data/schemas/system/setting_dao.py b/api/src/data/schemas/system/setting_dao.py index d7262eb..c2c836a 100644 --- a/api/src/data/schemas/system/setting_dao.py +++ b/api/src/data/schemas/system/setting_dao.py @@ -17,4 +17,4 @@ class SettingDao(DbModelDaoABC[Setting]): return await self.find_single_by({Setting.key: key}) -settingsDao = SettingDao() +settingDao = SettingDao() diff --git a/api/src/data/scripts/2025-04-18-12-15-user-spaces.sql b/api/src/data/scripts/2025-04-18-12-15-user-spaces.sql new file mode 100644 index 0000000..a01734d --- /dev/null +++ b/api/src/data/scripts/2025-04-18-12-15-user-spaces.sql @@ -0,0 +1,11 @@ +ALTER TABLE public.groups + ADD COLUMN IF NOT EXISTS UserId INT NULL REFERENCES administration.users (Id); + +ALTER TABLE public.groups_history + ADD COLUMN IF NOT EXISTS UserId INT NULL REFERENCES administration.users (Id); + +ALTER TABLE public.short_urls + ADD COLUMN IF NOT EXISTS UserId INT NULL REFERENCES administration.users (Id); + +ALTER TABLE public.short_urls_history + ADD COLUMN IF NOT EXISTS UserId INT NULL REFERENCES administration.users (Id); \ No newline at end of file diff --git a/api/src/data/scripts/2025-04-30-15-30-keycloak-id-anonymizable.sql b/api/src/data/scripts/2025-04-30-15-30-keycloak-id-anonymizable.sql new file mode 100644 index 0000000..8c77c31 --- /dev/null +++ b/api/src/data/scripts/2025-04-30-15-30-keycloak-id-anonymizable.sql @@ -0,0 +1,7 @@ +-- Drop the existing unique constraint +ALTER TABLE administration.users DROP CONSTRAINT IF EXISTS UC_KeycloakId; + +-- Add a partial unique index to allow guid-0 duplicates +CREATE UNIQUE INDEX IF NOT EXISTS idx_unique_keycloakid + ON administration.users (KeycloakId) + WHERE KeycloakId != '00000000-0000-0000-0000-000000000000'; \ No newline at end of file diff --git a/api/src/data/seeder/feature_flags_seeder.py b/api/src/data/seeder/feature_flags_seeder.py index 185312c..373e556 100644 --- a/api/src/data/seeder/feature_flags_seeder.py +++ b/api/src/data/seeder/feature_flags_seeder.py @@ -21,6 +21,7 @@ class FeatureFlagsSeeder(DataSeederABC): x.value: FeatureFlags.get_default(x) for x in FeatureFlagsEnum } + # Create new feature flags to_create = [ FeatureFlag(0, x, possible_feature_flags[x]) for x in possible_feature_flags.keys() @@ -31,6 +32,19 @@ class FeatureFlagsSeeder(DataSeederABC): to_create_dicts = {x.key: x.value for x in to_create} logger.debug(f"Created feature flags: {to_create_dicts}") + # Update existing feature flags if they can be overwritten and have a different value + to_update = [ + FeatureFlag(x.id, x.key, possible_feature_flags[x.key]) + for x in feature_flags + if FeatureFlags.overwrite_flag(x.key) + and x.value != possible_feature_flags[x.key] + ] + if len(to_update) > 0: + await featureFlagDao.update_many(to_update) + to_update_dicts = {x.key: x.value for x in to_update} + logger.debug(f"Updated feature flags: {to_update_dicts}") + + # Delete feature flags that are no longer defined to_delete = [ x for x in feature_flags if x.key not in possible_feature_flags.keys() ] diff --git a/api/src/data/seeder/settings_seeder.py b/api/src/data/seeder/settings_seeder.py index dcdff29..989d637 100644 --- a/api/src/data/seeder/settings_seeder.py +++ b/api/src/data/seeder/settings_seeder.py @@ -3,7 +3,7 @@ from typing import Any from core.logger import DBLogger from data.abc.data_seeder_abc import DataSeederABC from data.schemas.system.setting import Setting -from data.schemas.system.setting_dao import settingsDao +from data.schemas.system.setting_dao import settingDao logger = DBLogger(__name__) @@ -18,8 +18,8 @@ class SettingsSeeder(DataSeederABC): @staticmethod async def _seed_if_not_exists(key: str, value: Any): - existing = await settingsDao.find_by_key(key) + existing = await settingDao.find_by_key(key) if existing is not None: return - await settingsDao.create(Setting(0, key, str(value))) + await settingDao.create(Setting(0, key, str(value))) diff --git a/api/src/data/service/migration_service.py b/api/src/data/service/migration_service.py index e336284..23293c4 100644 --- a/api/src/data/service/migration_service.py +++ b/api/src/data/service/migration_service.py @@ -77,7 +77,7 @@ class MigrationService: logger.debug(f"Running upgrade migration: {migration.name}") - await self._db.execute(migration.script) + await self._db.execute(migration.script, multi=False) await executedMigrationDao.create( ExecutedMigration(migration.name), skip_editor=True diff --git a/api/src/service/data_privacy_service.py b/api/src/service/data_privacy_service.py new file mode 100644 index 0000000..e0d6d82 --- /dev/null +++ b/api/src/service/data_privacy_service.py @@ -0,0 +1,126 @@ +import importlib +import json +from typing import Type + +from api.auth.keycloak_client import Keycloak +from api.broadcast import broadcast +from core.database.abc.data_access_object_abc import DataAccessObjectABC +from core.database.abc.db_model_dao_abc import DbModelDaoABC +from core.logger import Logger +from core.string import first_to_lower +from data.schemas.administration.user_dao import userDao + +logger = Logger("DataPrivacy") + + +class DataPrivacyService: + + @staticmethod + def _dynamic_import_dao(dao_class: Type[DataAccessObjectABC]): + """ + Dynamically import a DAO class and its instance. + :param dao_class: The DAO class to be imported. + :return: The DAO instance. + """ + module = importlib.import_module(dao_class.__module__) + dao_instance = getattr( + module, first_to_lower(first_to_lower(dao_class.__name__)) + ) + return dao_instance + + @classmethod + async def _collect_user_relevant_dao(cls): + """ + Collect all DAO classes that are relevant for data privacy. + :return: List of relevant DAO classes. + """ + # This method should return a list of DAOs that are relevant for data privacy + # For example, it could return a list of DAOs that contain user data + classes: list[DataAccessObjectABC] = [ + cls._dynamic_import_dao(dao) for dao in DbModelDaoABC.__subclasses__() + ] + return [x for x in classes if x.has_attribute("user_id")] + + @classmethod + async def export_user_data(cls, user_id: int): + """ + Export user data from the database. + :param user_id: ID of the user whose data is to be exported. + :return: User data in a structured format. + """ + # Logic to export user data + user = await userDao.find_by_id(user_id) + if user is None: + raise ValueError("User not found") + + collected_data = {"user": userDao.to_dict(await userDao.find_by_id(user_id))} + + daos = await cls._collect_user_relevant_dao() + for dao in daos: + data = await dao.find_by([{"userid": user_id}]) + collected_data[first_to_lower(type(dao).__name__.replace("Dao", "s"))] = [ + dao.to_dict(x) for x in data + ] + + return json.dumps(collected_data, default=str) + + @staticmethod + async def anonymize_user(user_id: int): + """ + Anonymize user data in the database. + :param user_id: ID of the user to be anonymized. + """ + user = await userDao.find_by_id(user_id) + if user is None: + raise ValueError("User not found") + + keycloak_id = user.keycloak_id + + # Anonymize internal data + await user.anonymize() + + # Anonymize external data + try: + Keycloak.admin.delete_user(keycloak_id) + await broadcast.publish("userLogout", user.id) + except Exception as e: + logger.error(f"Failed to anonymize external data for user {user_id}", e) + raise ValueError("Failed to anonymize external data") from e + + return True + + @classmethod + async def delete_user_data(cls, user_id: int): + """ + Delete user data from the database. + :param user_id: ID of the user whose data is to be deleted. + """ + user = await userDao.find_by_id(user_id) + if user is None: + raise ValueError("User not found") + + keycloak_id = user.keycloak_id + + daos = await cls._collect_user_relevant_dao() + for dao in daos: + data = await dao.find_by([{"userid": user_id}]) + try: + await dao.delete_many(data, hard_delete=True) + except Exception as e: + logger.error(f"Failed to delete data for user {user_id}", e) + raise ValueError("Failed to delete data") from e + + try: + await userDao.delete(user) + except Exception as e: + logger.error(f"Failed to delete user {user_id}", e) + raise ValueError("Failed to delete user") from e + + # Delete external data + try: + Keycloak.admin.delete_user(keycloak_id) + except Exception as e: + logger.error(f"Failed to delete external data for user {user_id}", e) + raise ValueError("Failed to delete external data") from e + + return True diff --git a/web/src/app/app.component.html b/web/src/app/app.component.html index b3cf78b..ea278c4 100644 --- a/web/src/app/app.component.html +++ b/web/src/app/app.component.html @@ -17,7 +17,7 @@ - +
preloader.preloadLanguages(); +} + export function HttpLoaderFactory(http: HttpClient) { - return new TranslateHttpLoader(http); + return new TranslateHttpLoader(http, '/assets/i18n/', '.json'); } export function appInitializerFactory( @@ -89,6 +94,12 @@ export function appInitializerFactory( AppRoutingModule, ], providers: [ + { + provide: APP_INITIALIZER, + useFactory: preloadTranslations, + deps: [TranslationPreloaderService], + multi: true, + }, MessageService, ConfirmationService, DialogService, diff --git a/web/src/app/components/header/header.component.html b/web/src/app/components/header/header.component.html index 7c9b160..0819eda 100644 --- a/web/src/app/components/header/header.component.html +++ b/web/src/app/components/header/header.component.html @@ -11,7 +11,7 @@
-

LAN-Maestro

+

Open-Redirect

diff --git a/web/src/app/components/header/header.component.ts b/web/src/app/components/header/header.component.ts index d4d1d72..d1055f3 100644 --- a/web/src/app/components/header/header.component.ts +++ b/web/src/app/components/header/header.component.ts @@ -1,4 +1,4 @@ -import { Component, OnDestroy, OnInit } from '@angular/core'; +import { Component, EventEmitter, OnDestroy, OnInit } from '@angular/core'; import { MenuItem, PrimeNGConfig } from 'primeng/api'; import { Subject } from 'rxjs'; import { takeUntil } from 'rxjs/operators'; @@ -9,8 +9,13 @@ import { AuthService } from 'src/app/service/auth.service'; import { MenuElement } from 'src/app/model/view/menu-element'; import { SidebarService } from 'src/app/service/sidebar.service'; import { ConfigService } from 'src/app/service/config.service'; -import { UserSettingsService } from 'src/app/service/user_settings.service'; +import { UserSettingsService } from 'src/app/service/user-settings.service'; import { SettingsService } from 'src/app/service/settings.service'; +import { DataPrivacyService } from 'src/app/service/data-privacy.service'; +import { ConfirmationDialogService } from 'src/app/service/confirmation-dialog.service'; +import { Logger } from 'src/app/service/logger.service'; + +const logger = new Logger('Header'); @Component({ selector: 'app-header', @@ -28,6 +33,8 @@ export class HeaderComponent implements OnInit, OnDestroy { menu: MenuElement[] = []; + private langChange: EventEmitter = new EventEmitter(); + constructor( private translateService: TranslateService, private ngConfig: PrimeNGConfig, @@ -36,8 +43,14 @@ export class HeaderComponent implements OnInit, OnDestroy { private sidebarService: SidebarService, private config: ConfigService, private settings: SettingsService, - private userSettings: UserSettingsService + private userSettings: UserSettingsService, + private dataPrivacyService: DataPrivacyService, + private confirmation: ConfirmationDialogService ) { + this.langChange.subscribe(async () => { + await this.initUserMenuList(); + }); + this.guiService.isMobile$ .pipe(takeUntil(this.unsubscribe$)) .subscribe(isMobile => { @@ -48,11 +61,11 @@ export class HeaderComponent implements OnInit, OnDestroy { }); this.auth.user$.pipe(takeUntil(this.unsubscribe$)).subscribe(async user => { - await this.initMenuLists(); + this.user = user; + await this.loadTheme(); await this.loadLang(); - this.user = user; this.guiService.loadedGuiSettings$.next(true); }); @@ -117,23 +130,71 @@ export class HeaderComponent implements OnInit, OnDestroy { visible: !!this.user, }, { - separator: true, + label: this.translateService.instant('header.privacy'), + items: [ + { + label: this.translateService.instant('privacy.export_data'), + command: () => { + if (!this.user) { + return; + } + this.dataPrivacyService.downloadDataExportJson(this.user?.id); + }, + icon: 'pi pi-download', + }, + { + label: this.translateService.instant('privacy.delete_data'), + command: () => { + this.confirmation.confirmDialog({ + header: 'privacy.delete_data_header', + message: 'privacy.delete_data_message', + accept: () => { + if (!this.user) { + return; + } + + this.dataPrivacyService + .anonymizeData(this.user.id) + .subscribe(() => {}); + }, + }); + }, + icon: 'pi pi-trash', + }, + ], }, { - label: this.translateService.instant('header.logout'), - command: async () => { - await this.auth.logout(); - }, - icon: 'pi pi-sign-out', + label: this.translateService.instant('header.profile'), + items: [ + { + label: this.translateService.instant('header.edit_profile'), + command: () => { + window.open( + `${this.config.settings.keycloak.url}/realms/${this.config.settings.keycloak.realm}/account`, + '_blank' + ); + }, + icon: 'pi pi-user-edit', + }, + { + label: this.translateService.instant('header.logout'), + command: async () => { + await this.auth.logout(); + }, + icon: 'pi pi-sign-out', + }, + ], }, ]; } translate(lang: string) { + logger.debug('translate', lang); this.translateService.use(lang); this.translateService .get('primeng') .subscribe(res => this.ngConfig.setTranslation(res)); + this.langChange.next(); } async loadTheme() { diff --git a/web/src/app/core/guard/permission.guard.ts b/web/src/app/core/guard/permission.guard.ts index fc07bcf..ca1a076 100644 --- a/web/src/app/core/guard/permission.guard.ts +++ b/web/src/app/core/guard/permission.guard.ts @@ -4,6 +4,7 @@ import { Logger } from 'src/app/service/logger.service'; import { ToastService } from 'src/app/service/toast.service'; import { AuthService } from 'src/app/service/auth.service'; import { PermissionsEnum } from 'src/app/model/auth/permissionsEnum'; +import { FeatureFlagService } from 'src/app/service/feature-flag.service'; const log = new Logger('PermissionGuard'); @@ -14,11 +15,18 @@ export class PermissionGuard { constructor( private router: Router, private toast: ToastService, - private auth: AuthService + private auth: AuthService, + private features: FeatureFlagService ) {} async canActivate(route: ActivatedRouteSnapshot): Promise { const permissions = route.data['permissions'] as PermissionsEnum[]; + const checkByPerUserSetup = route.data['checkByPerUserSetup'] as boolean; + + const isPerUserSetup = await this.features.get('PerUserSetup'); + if (checkByPerUserSetup && isPerUserSetup) { + return true; + } if (!permissions || permissions.length === 0) { return true; diff --git a/web/src/app/model/config/app-settings.ts b/web/src/app/model/config/app-settings.ts index 690d6be..3aeb1a9 100644 --- a/web/src/app/model/config/app-settings.ts +++ b/web/src/app/model/config/app-settings.ts @@ -1,5 +1,16 @@ import { Theme } from 'src/app/model/view/theme'; +export interface AppSettingsFromConfig { + termsUrl: string; + privacyURL: string; + imprintURL: string; + themes: Theme[]; + loadingScreen: LoadingScreenSettings; + keycloak: KeycloakSettings; + api: ApiSettings; + languages?: string[]; +} + export interface AppSettings { termsUrl: string; privacyURL: string; @@ -8,6 +19,7 @@ export interface AppSettings { loadingScreen: LoadingScreenSettings; keycloak: KeycloakSettings; api: ApiSettings; + languages: string[]; } export interface LoadingScreenSettings { diff --git a/web/src/app/modules/admin/admin.module.ts b/web/src/app/modules/admin/admin.module.ts index d47a27d..f6be88c 100644 --- a/web/src/app/modules/admin/admin.module.ts +++ b/web/src/app/modules/admin/admin.module.ts @@ -22,7 +22,7 @@ const routes: Routes = [ m => m.GroupsModule ), canActivate: [PermissionGuard], - data: { permissions: [PermissionsEnum.groups] }, + data: { permissions: [PermissionsEnum.groups], checkByPerUserSetup: true }, }, { path: 'urls', @@ -36,6 +36,7 @@ const routes: Routes = [ PermissionsEnum.shortUrls, PermissionsEnum.shortUrlsByAssignment, ], + checkByPerUserSetup: true, }, }, { diff --git a/web/src/app/modules/admin/domains/domains.data.service.ts b/web/src/app/modules/admin/domains/domains.data.service.ts index 162f453..2af9aca 100644 --- a/web/src/app/modules/admin/domains/domains.data.service.ts +++ b/web/src/app/modules/admin/domains/domains.data.service.ts @@ -228,7 +228,7 @@ export class DomainsDataService return this.apollo .mutate<{ domain: { delete: boolean } }>({ mutation: gql` - mutation deleteDomain($id: ID!) { + mutation deleteDomain($id: Int!) { domain { delete(id: $id) } @@ -251,7 +251,7 @@ export class DomainsDataService return this.apollo .mutate<{ domain: { restore: boolean } }>({ mutation: gql` - mutation restoreDomain($id: ID!) { + mutation restoreDomain($id: Int!) { domain { restore(id: $id) } diff --git a/web/src/app/modules/admin/groups/form-page/group-form-page.component.html b/web/src/app/modules/admin/groups/form-page/group-form-page.component.html index 150c081..8f0373c 100644 --- a/web/src/app/modules/admin/groups/form-page/group-form-page.component.html +++ b/web/src/app/modules/admin/groups/form-page/group-form-page.component.html @@ -27,8 +27,9 @@ type="text" formControlName="name"/> -
+
{ +export class GroupFormPageComponent + extends FormPageBase< + Group, + GroupCreateInput, + GroupUpdateInput, + GroupsDataService + > + implements OnInit +{ roles: Role[] = []; + isPerUserSetup = true; constructor( + private features: FeatureFlagService, private toast: ToastService, private cds: CommonDataService ) { super(); - this.cds.getAllRoles().subscribe(roles => { - this.roles = roles; - }); + } + + async ngOnInit() { + this.isPerUserSetup = await this.features.get('PerUserSetup'); + if (!this.isPerUserSetup) { + this.cds.getAllRoles().subscribe(roles => { + this.roles = roles; + }); + } if (!this.nodeId) { this.node = this.new(); diff --git a/web/src/app/modules/admin/groups/groups.data.service.ts b/web/src/app/modules/admin/groups/groups.data.service.ts index ba84fa3..c40ce19 100644 --- a/web/src/app/modules/admin/groups/groups.data.service.ts +++ b/web/src/app/modules/admin/groups/groups.data.service.ts @@ -238,7 +238,7 @@ export class GroupsDataService return this.apollo .mutate<{ group: { delete: boolean } }>({ mutation: gql` - mutation deleteGroup($id: ID!) { + mutation deleteGroup($id: Int!) { group { delete(id: $id) } @@ -261,7 +261,7 @@ export class GroupsDataService return this.apollo .mutate<{ group: { restore: boolean } }>({ mutation: gql` - mutation restoreGroup($id: ID!) { + mutation restoreGroup($id: Int!) { group { restore(id: $id) } diff --git a/web/src/app/modules/admin/groups/groups.module.ts b/web/src/app/modules/admin/groups/groups.module.ts index 813c76f..f8762ad 100644 --- a/web/src/app/modules/admin/groups/groups.module.ts +++ b/web/src/app/modules/admin/groups/groups.module.ts @@ -21,7 +21,8 @@ const routes: Routes = [ component: GroupFormPageComponent, canActivate: [PermissionGuard], data: { - permissions: [PermissionsEnum.apiKeysCreate], + permissions: [PermissionsEnum.groupsCreate], + checkByPerUserSetup: true, }, }, { @@ -29,7 +30,8 @@ const routes: Routes = [ component: GroupFormPageComponent, canActivate: [PermissionGuard], data: { - permissions: [PermissionsEnum.apiKeysUpdate], + permissions: [PermissionsEnum.groupsUpdate], + checkByPerUserSetup: true, }, }, { @@ -37,7 +39,8 @@ const routes: Routes = [ component: HistoryComponent, canActivate: [PermissionGuard], data: { - permissions: [PermissionsEnum.domains], + permissions: [PermissionsEnum.groups], + checkByPerUserSetup: true, }, }, ], diff --git a/web/src/app/modules/admin/groups/groups.page.ts b/web/src/app/modules/admin/groups/groups.page.ts index e1447e7..b080d40 100644 --- a/web/src/app/modules/admin/groups/groups.page.ts +++ b/web/src/app/modules/admin/groups/groups.page.ts @@ -1,4 +1,4 @@ -import { Component } from '@angular/core'; +import { Component, OnInit } from '@angular/core'; import { PageBase } from 'src/app/core/base/page-base'; import { ToastService } from 'src/app/service/toast.service'; import { ConfirmationDialogService } from 'src/app/service/confirmation-dialog.service'; @@ -6,28 +6,63 @@ import { PermissionsEnum } from 'src/app/model/auth/permissionsEnum'; import { Group } from 'src/app/model/entities/group'; import { GroupsDataService } from 'src/app/modules/admin/groups/groups.data.service'; import { GroupsColumns } from 'src/app/modules/admin/groups/groups.columns'; +import { AuthService } from 'src/app/service/auth.service'; +import { ConfigService } from 'src/app/service/config.service'; +import { FeatureFlagService } from 'src/app/service/feature-flag.service'; @Component({ selector: 'app-groups', templateUrl: './groups.page.html', styleUrl: './groups.page.scss', }) -export class GroupsPage extends PageBase< - Group, - GroupsDataService, - GroupsColumns -> { +export class GroupsPage + extends PageBase + implements OnInit +{ constructor( private toast: ToastService, - private confirmation: ConfirmationDialogService + private confirmation: ConfirmationDialogService, + private auth: AuthService, + private config: ConfigService, + private features: FeatureFlagService ) { - super(true, { - read: [PermissionsEnum.groups], - create: [PermissionsEnum.groupsCreate], - update: [PermissionsEnum.groupsUpdate], - delete: [PermissionsEnum.groupsDelete], - restore: [PermissionsEnum.groupsDelete], - }); + super(true); + } + + async ngOnInit() { + this.requiredPermissions = { + read: (await this.features.get('PerUserSetup')) + ? [] + : [PermissionsEnum.groups], + create: (await this.features.get('PerUserSetup')) + ? [] + : (await this.auth.hasAnyPermissionLazy( + this.requiredPermissions.create ?? [] + )) + ? (this.requiredPermissions.create ?? []) + : [], + update: (await this.features.get('PerUserSetup')) + ? [] + : (await this.auth.hasAnyPermissionLazy( + this.requiredPermissions.update ?? [] + )) + ? (this.requiredPermissions.update ?? []) + : [], + delete: (await this.features.get('PerUserSetup')) + ? [] + : (await this.auth.hasAnyPermissionLazy( + this.requiredPermissions.delete ?? [] + )) + ? (this.requiredPermissions.delete ?? []) + : [], + restore: (await this.features.get('PerUserSetup')) + ? [] + : (await this.auth.hasAnyPermissionLazy( + this.requiredPermissions.restore ?? [] + )) + ? (this.requiredPermissions.restore ?? []) + : [], + }; } load(silent?: boolean): void { diff --git a/web/src/app/modules/admin/short-urls/form-page/short-url-form-page.component.html b/web/src/app/modules/admin/short-urls/form-page/short-url-form-page.component.html index 8240320..e4bddcb 100644 --- a/web/src/app/modules/admin/short-urls/form-page/short-url-form-page.component.html +++ b/web/src/app/modules/admin/short-urls/form-page/short-url-form-page.component.html @@ -65,7 +65,7 @@ > -
+

{{ 'common.domain' | translate }}

diff --git a/web/src/app/modules/admin/short-urls/form-page/short-url-form-page.component.ts b/web/src/app/modules/admin/short-urls/form-page/short-url-form-page.component.ts index fa7ea84..48ebe48 100644 --- a/web/src/app/modules/admin/short-urls/form-page/short-url-form-page.component.ts +++ b/web/src/app/modules/admin/short-urls/form-page/short-url-form-page.component.ts @@ -1,4 +1,4 @@ -import { Component } from '@angular/core'; +import { Component, OnInit } from '@angular/core'; import { FormControl, FormGroup, Validators } from '@angular/forms'; import { ToastService } from 'src/app/service/toast.service'; import { FormPageBase } from 'src/app/core/base/form-page-base'; @@ -10,29 +10,45 @@ import { import { ShortUrlsDataService } from 'src/app/modules/admin/short-urls/short-urls.data.service'; import { Group } from 'src/app/model/entities/group'; import { Domain } from 'src/app/model/entities/domain'; +import { FeatureFlagService } from 'src/app/service/feature-flag.service'; @Component({ selector: 'app-short-url-form-page', templateUrl: './short-url-form-page.component.html', styleUrl: './short-url-form-page.component.scss', }) -export class ShortUrlFormPageComponent extends FormPageBase< - ShortUrl, - ShortUrlCreateInput, - ShortUrlUpdateInput, - ShortUrlsDataService -> { +export class ShortUrlFormPageComponent + extends FormPageBase< + ShortUrl, + ShortUrlCreateInput, + ShortUrlUpdateInput, + ShortUrlsDataService + > + implements OnInit +{ groups: Group[] = []; domains: Domain[] = []; - constructor(private toast: ToastService) { + isPerUserSetup = true; + + constructor( + private features: FeatureFlagService, + private toast: ToastService + ) { super(); this.dataService.getAllGroups().subscribe(groups => { this.groups = groups; }); - this.dataService.getAllDomains().subscribe(domains => { - this.domains = domains; - }); + } + + async ngOnInit() { + this.isPerUserSetup = await this.features.get('PerUserSetup'); + + if (!this.isPerUserSetup) { + this.dataService.getAllDomains().subscribe(domains => { + this.domains = domains; + }); + } if (!this.nodeId) { this.node = this.new(); diff --git a/web/src/app/modules/admin/short-urls/short-urls.data.service.ts b/web/src/app/modules/admin/short-urls/short-urls.data.service.ts index 6fa766a..ddfe661 100644 --- a/web/src/app/modules/admin/short-urls/short-urls.data.service.ts +++ b/web/src/app/modules/admin/short-urls/short-urls.data.service.ts @@ -1,5 +1,5 @@ import { Injectable, Provider } from '@angular/core'; -import { merge, Observable } from 'rxjs'; +import { forkJoin, merge, Observable } from 'rxjs'; import { Create, Delete, @@ -48,50 +48,88 @@ export class ShortUrlsDataService skip?: number | undefined, take?: number | undefined ): Observable> { - return this.apollo - .query<{ shortUrls: QueryResult }>({ - query: gql` - query getShortUrls($filter: [ShortUrlFilter], $sort: [ShortUrlSort]) { - shortUrls(filter: $filter, sort: $sort) { - count - totalCount - nodes { + const query1 = this.apollo.query<{ shortUrls: QueryResult }>({ + query: gql` + query getShortUrls($filter: [ShortUrlFilter], $sort: [ShortUrlSort]) { + shortUrls(filter: $filter, sort: $sort) { + nodes { + id + shortUrl + targetUrl + description + loadingScreen + visits + group { id - shortUrl - targetUrl - description - loadingScreen - visits - group { - id - name - } - domain { - id - name - } - - ...DB_MODEL + name } + domain { + id + name + } + + ...DB_MODEL } } + } - ${DB_MODEL_FRAGMENT} - `, - variables: { - filter: [{ group: { deleted: { equal: false } } }, ...(filter ?? [])], - sort: [{ id: SortOrder.DESC }, ...(sort ?? [])], - skip: skip, - take: take, - }, + ${DB_MODEL_FRAGMENT} + `, + variables: { + filter: [{ group: { deleted: { equal: false } } }, ...(filter ?? [])], + sort: [{ id: SortOrder.DESC }, ...(sort ?? [])], + skip, + take, + }, + }); + + const query2 = this.apollo.query<{ shortUrls: QueryResult }>({ + query: gql` + query getShortUrls($filter: [ShortUrlFilter], $sort: [ShortUrlSort]) { + shortUrls(filter: $filter, sort: $sort) { + nodes { + id + shortUrl + targetUrl + description + loadingScreen + visits + group { + id + name + } + domain { + id + name + } + + ...DB_MODEL + } + } + } + + ${DB_MODEL_FRAGMENT} + `, + variables: { + filter: [{ group: { isNull: true } }, ...(filter ?? [])], + sort: [{ id: SortOrder.DESC }, ...(sort ?? [])], + skip, + take, + }, + }); + + return forkJoin([query1, query2]).pipe( + map(([result1, result2]) => { + const nodes = [ + ...result1.data.shortUrls.nodes, + ...result2.data.shortUrls.nodes, + ]; + const uniqueNodes = Array.from( + new Map(nodes.map(node => [node.id, node])).values() + ); + return { ...result1.data.shortUrls, nodes: uniqueNodes }; }) - .pipe( - catchError(err => { - this.spinner.hide(); - throw err; - }) - ) - .pipe(map(result => result.data.shortUrls)); + ); } loadById(id: number): Observable { @@ -285,7 +323,7 @@ export class ShortUrlsDataService return this.apollo .mutate<{ shortUrl: { delete: boolean } }>({ mutation: gql` - mutation deleteShortUrl($id: ID!) { + mutation deleteShortUrl($id: Int!) { shortUrl { delete(id: $id) } @@ -308,7 +346,7 @@ export class ShortUrlsDataService return this.apollo .mutate<{ shortUrl: { restore: boolean } }>({ mutation: gql` - mutation restoreShortUrl($id: ID!) { + mutation restoreShortUrl($id: Int!) { shortUrl { restore(id: $id) } diff --git a/web/src/app/modules/admin/short-urls/short-urls.module.ts b/web/src/app/modules/admin/short-urls/short-urls.module.ts index 4af7061..a144ca7 100644 --- a/web/src/app/modules/admin/short-urls/short-urls.module.ts +++ b/web/src/app/modules/admin/short-urls/short-urls.module.ts @@ -22,6 +22,7 @@ const routes: Routes = [ canActivate: [PermissionGuard], data: { permissions: [PermissionsEnum.shortUrlsCreate], + checkByPerUserSetup: true, }, }, { @@ -30,6 +31,7 @@ const routes: Routes = [ canActivate: [PermissionGuard], data: { permissions: [PermissionsEnum.shortUrlsUpdate], + checkByPerUserSetup: true, }, }, { @@ -37,7 +39,8 @@ const routes: Routes = [ component: HistoryComponent, canActivate: [PermissionGuard], data: { - permissions: [PermissionsEnum.domains], + permissions: [PermissionsEnum.shortUrls], + checkByPerUserSetup: true, }, }, ], diff --git a/web/src/app/modules/admin/short-urls/short-urls.page.html b/web/src/app/modules/admin/short-urls/short-urls.page.html index 1ead5a3..e0c3333 100644 --- a/web/src/app/modules/admin/short-urls/short-urls.page.html +++ b/web/src/app/modules/admin/short-urls/short-urls.page.html @@ -41,7 +41,7 @@ icon="pi pi-pencil" tooltipPosition="left" pTooltip="{{ 'table.update' | translate }}" - [disabled]="url?.deleted" + [disabled]="url.deleted" routerLink="edit/{{ url.id }}"> { return new Promise((resolve, reject) => { this.http - .get(`/assets/config/${environment.config}`) + .get(`/assets/config/${environment.config}`) .pipe( catchError(error => { reject(error); @@ -48,13 +52,17 @@ export class ConfigService { }) ) .subscribe(settings => { - this.settings = settings; - if (this.settings.themes.length === 0) { - this.settings.themes.push({ + if (settings.themes.length === 0) { + settings.themes.push({ label: 'Maxlan', name: 'maxlan', }); } + if (settings.languages?.length === 0) { + settings.languages = ['en', 'de']; + } + + this.settings = settings as AppSettings; resolve(); }); }); diff --git a/web/src/app/service/data-privacy.service.ts b/web/src/app/service/data-privacy.service.ts new file mode 100644 index 0000000..5b8ab8b --- /dev/null +++ b/web/src/app/service/data-privacy.service.ts @@ -0,0 +1,77 @@ +import { Injectable } from '@angular/core'; +import { Apollo, gql } from 'apollo-angular'; +import { Observable } from 'rxjs'; +import { map } from 'rxjs/operators'; + +@Injectable({ + providedIn: 'root', +}) +export class DataPrivacyService { + constructor(private apollo: Apollo) {} + + downloadDataExportJson(userId: number): void { + this.exportDataPrivacy(userId).subscribe(dataString => { + const jsonData = JSON.stringify(JSON.parse(dataString), null, 2); // Format nicely + const blob = new Blob([jsonData], { type: 'application/json' }); + const url = window.URL.createObjectURL(blob); + + const anchor = document.createElement('a'); + anchor.href = url; + anchor.download = 'exported-data.json'; + anchor.click(); + + window.URL.revokeObjectURL(url); + }); + } + + exportDataPrivacy(userId: number): Observable { + return this.apollo + .mutate<{ privacy: { exportData: string } }>({ + mutation: gql` + mutation exportDataPrivacy($userId: Int!) { + privacy { + exportData(userId: $userId) + } + } + `, + variables: { + userId, + }, + }) + .pipe(map(result => result.data?.privacy.exportData ?? '')); + } + + anonymizeData(userId: number): Observable { + return this.apollo + .mutate<{ privacy: { anonymizeData: boolean } }>({ + mutation: gql` + mutation anonymizeDataPrivacy($userId: Int!) { + privacy { + anonymizeData(userId: $userId) + } + } + `, + variables: { + userId, + }, + }) + .pipe(map(result => result.data?.privacy.anonymizeData ?? false)); + } + + deleteData(userId: number): Observable { + return this.apollo + .mutate<{ privacy: { deleteData: boolean } }>({ + mutation: gql` + mutation deleteDataPrivacy($userId: Int!) { + privacy { + deleteData(userId: $userId) + } + } + `, + variables: { + userId, + }, + }) + .pipe(map(result => result.data?.privacy.deleteData ?? false)); + } +} diff --git a/web/src/app/service/sidebar.service.ts b/web/src/app/service/sidebar.service.ts index b6f3a2d..49bff52 100644 --- a/web/src/app/service/sidebar.service.ts +++ b/web/src/app/service/sidebar.service.ts @@ -3,6 +3,7 @@ import { BehaviorSubject } from 'rxjs'; import { MenuElement } from 'src/app/model/view/menu-element'; import { AuthService } from 'src/app/service/auth.service'; import { PermissionsEnum } from 'src/app/model/auth/permissionsEnum'; +import { FeatureFlagService } from 'src/app/service/feature-flag.service'; @Injectable({ providedIn: 'root', @@ -11,7 +12,10 @@ export class SidebarService { visible$ = new BehaviorSubject(true); elements$ = new BehaviorSubject([]); - constructor(private auth: AuthService) { + constructor( + private auth: AuthService, + private featureFlags: FeatureFlagService + ) { this.auth.user$.subscribe(async () => { await this.setElements(); }); @@ -40,16 +44,19 @@ export class SidebarService { label: 'common.groups', icon: 'pi pi-tags', routerLink: ['/admin/groups'], - visible: await this.auth.hasAnyPermissionLazy([PermissionsEnum.groups]), + visible: + (await this.auth.hasAnyPermissionLazy([PermissionsEnum.groups])) || + (await this.featureFlags.get('PerUserSetup')), }, { label: 'common.urls', icon: 'pi pi-tag', routerLink: ['/admin/urls'], - visible: await this.auth.hasAnyPermissionLazy([ - PermissionsEnum.shortUrls, - PermissionsEnum.shortUrlsByAssignment, - ]), + visible: + (await this.auth.hasAnyPermissionLazy([ + PermissionsEnum.shortUrls, + PermissionsEnum.shortUrlsByAssignment, + ])) || (await this.featureFlags.get('PerUserSetup')), }, await this.sectionAdmin(), ]; diff --git a/web/src/app/service/translation-preloader.service.ts b/web/src/app/service/translation-preloader.service.ts new file mode 100644 index 0000000..be74ea4 --- /dev/null +++ b/web/src/app/service/translation-preloader.service.ts @@ -0,0 +1,25 @@ +import { Injectable } from '@angular/core'; +import { TranslateService } from '@ngx-translate/core'; +import { HttpClient } from '@angular/common/http'; +import { lastValueFrom } from 'rxjs'; +import { ConfigService } from 'src/app/service/config.service'; + +@Injectable({ providedIn: 'root' }) +export class TranslationPreloaderService { + constructor( + private translate: TranslateService, + private http: HttpClient, + private config: ConfigService + ) {} + + preloadLanguages(): Promise { + const loadRequests = this.config.settings.languages.map(async lang => { + const translations = await lastValueFrom( + this.http.get(`/assets/i18n/${lang}.json`) + ); + this.translate.setTranslation(lang, translations, true); // merge = true + }); + + return Promise.all(loadRequests).then(() => {}); + } +} diff --git a/web/src/app/service/user_settings.service.ts b/web/src/app/service/user-settings.service.ts similarity index 100% rename from web/src/app/service/user_settings.service.ts rename to web/src/app/service/user-settings.service.ts diff --git a/web/src/assets/i18n/de.json b/web/src/assets/i18n/de.json index 839c5ad..aa96be0 100644 --- a/web/src/assets/i18n/de.json +++ b/web/src/assets/i18n/de.json @@ -76,7 +76,10 @@ "count_header": "Gruppe(n)" }, "header": { - "logout": "Ausloggen" + "edit_profile": "Profil bearbeiten", + "logout": "Ausloggen", + "privacy": "Datenschutz", + "profile": "Profil" }, "history": { "header": "Historie" @@ -211,6 +214,12 @@ "weak": "Woche", "weekHeader": "Wk" }, + "privacy": { + "delete_data": "Daten löschen", + "delete_data_header": "Bestätigung zur Löschung deiner personenbezogenen Daten", + "delete_data_message": "Bestätigung zur Löschung deiner personenbezogenen Daten

Du bist dabei, dein Konto dauerhaft zu löschen.
Damit werden deine personenbezogenen Daten unkenntlich gemacht.
Eine Wiederherstellung deines Kontos oder deiner Daten ist anschließend nicht mehr möglich.

Hinweis:
Aus rechtlichen oder betrieblichen Gründen (z. B. gesetzliche Aufbewahrungspflichten) können bestimmte Daten weiterhin in anonymisierter oder pseudonymisierter Form gespeichert bleiben. Diese enthalten keine Informationen mehr, die dich als Person identifizierbar machen.

Bitte bestätige, dass du dies verstanden hast und mit der Löschung fortfahren möchtest.", + "export_data": "Daten exportieren" + }, "role": { "count_header": "Rolle(n)" }, diff --git a/web/src/assets/i18n/en.json b/web/src/assets/i18n/en.json index bc91320..4f95c94 100644 --- a/web/src/assets/i18n/en.json +++ b/web/src/assets/i18n/en.json @@ -76,7 +76,10 @@ "count_header": "Group(s)" }, "header": { - "logout": "Logout" + "edit_profile": "Edit profile", + "logout": "Logout", + "privacy": "Privacy", + "profile": "Profile" }, "history": { "header": "History" @@ -211,6 +214,12 @@ "weak": "Weak", "weekHeader": "Wk" }, + "privacy": { + "delete_data": "Delete data", + "delete_data_header": "Confirmation of deletion of your personal data", + "delete_data_message": "Confirmation of deletion of your personal data

You are about to permanently delete your account.
Your personal data will be anonymized and cannot be recovered.
It will no longer be possible to restore your account or associated data.

Note:
For legal or operational reasons (e.g. statutory retention obligations), certain data may continue to be stored in anonymized or pseudonymized form. This data no longer contains any information that can identify you as a person.

Please confirm that you understand this and wish to proceed with the deletion.", + "export_data": "Export data" + }, "role": { "count_header": "Role(s)" },