Handle data privacy #15
Some checks failed
Test API before pr merge / test-lint (pull_request) Successful in 11s
Test before pr merge / test-translation-lint (pull_request) Failing after 41s
Test before pr merge / test-lint (pull_request) Failing after 44s
Test before pr merge / test-before-merge (pull_request) Successful in 1m43s
Some checks failed
Test API before pr merge / test-lint (pull_request) Successful in 11s
Test before pr merge / test-translation-lint (pull_request) Failing after 41s
Test before pr merge / test-lint (pull_request) Failing after 44s
Test before pr merge / test-before-merge (pull_request) Successful in 1m43s
This commit is contained in:
parent
653fce9729
commit
11598cb160
@ -53,7 +53,7 @@ class RouteUserExtension:
|
|||||||
|
|
||||||
user_id = cls._get_user_id_from_token(request)
|
user_id = cls._get_user_id_from_token(request)
|
||||||
if not user_id:
|
if not user_id:
|
||||||
return None
|
return await cls.get_dev_user()
|
||||||
|
|
||||||
return await userDao.find_by_keycloak_id(user_id)
|
return await userDao.find_by_keycloak_id(user_id)
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
from asyncio import iscoroutinefunction
|
from asyncio import iscoroutinefunction
|
||||||
from typing import Self, Type
|
from typing import Self, Type, Union
|
||||||
|
|
||||||
from ariadne.types import Resolver
|
from ariadne.types import Resolver
|
||||||
|
|
||||||
@ -58,7 +58,11 @@ class MutationFieldBuilder(FieldBuilderABC):
|
|||||||
self._resolver = resolver_wrapper
|
self._resolver = resolver_wrapper
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def with_input(self, input_type: Type[InputABC], input_key: str = "input") -> Self:
|
def with_input(
|
||||||
|
self,
|
||||||
|
input_type: Type[Union[InputABC, str, int, bool]],
|
||||||
|
input_key: str = "input",
|
||||||
|
) -> Self:
|
||||||
self._input_type = input_type
|
self._input_type = input_type
|
||||||
self._input_key = input_key
|
self._input_key = input_key
|
||||||
return self
|
return self
|
||||||
|
@ -11,4 +11,6 @@ type Mutation {
|
|||||||
setting: SettingMutation
|
setting: SettingMutation
|
||||||
userSetting: UserSettingMutation
|
userSetting: UserSettingMutation
|
||||||
featureFlag: FeatureFlagMutation
|
featureFlag: FeatureFlagMutation
|
||||||
|
|
||||||
|
privacy: PrivacyMutation
|
||||||
}
|
}
|
5
api/src/api_graphql/graphql/privacy.gql
Normal file
5
api/src/api_graphql/graphql/privacy.gql
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
type PrivacyMutation {
|
||||||
|
exportData(userId: Int!): String
|
||||||
|
anonymizeData(userId: Int!): String
|
||||||
|
deleteData(userId: Int!): String
|
||||||
|
}
|
@ -86,3 +86,7 @@ class Mutation(MutationABC):
|
|||||||
Permissions.administrator,
|
Permissions.administrator,
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
self.add_mutation_type(
|
||||||
|
"privacy",
|
||||||
|
"Privacy",
|
||||||
|
)
|
||||||
|
63
api/src/api_graphql/mutations/privacy_mutation.py
Normal file
63
api/src/api_graphql/mutations/privacy_mutation.py
Normal file
@ -0,0 +1,63 @@
|
|||||||
|
from api.route import Route
|
||||||
|
from api_graphql.abc.mutation_abc import MutationABC
|
||||||
|
from api_graphql.field.mutation_field_builder import MutationFieldBuilder
|
||||||
|
from api_graphql.service.exceptions import UnauthorizedException, AccessDenied
|
||||||
|
from core.logger import APILogger
|
||||||
|
from service.data_privacy_service import DataPrivacyService
|
||||||
|
from service.permission.permissions_enum import Permissions
|
||||||
|
|
||||||
|
logger = APILogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class PrivacyMutation(MutationABC):
|
||||||
|
def __init__(self):
|
||||||
|
MutationABC.__init__(self, "Privacy")
|
||||||
|
|
||||||
|
self.field(
|
||||||
|
MutationFieldBuilder("exportData")
|
||||||
|
.with_resolver(self.resolve_export_data)
|
||||||
|
.with_input(int, "userId")
|
||||||
|
.with_public(True)
|
||||||
|
)
|
||||||
|
self.field(
|
||||||
|
MutationFieldBuilder("anonymizeData")
|
||||||
|
.with_resolver(self.resolve_anonymize_data)
|
||||||
|
.with_input(int, "userId")
|
||||||
|
.with_public(True)
|
||||||
|
)
|
||||||
|
self.field(
|
||||||
|
MutationFieldBuilder("deleteData")
|
||||||
|
.with_resolver(self.resolve_delete_data)
|
||||||
|
.with_input(int, "userId")
|
||||||
|
.with_public(True)
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def _permission_check(user_id: int):
|
||||||
|
user = await Route.get_user()
|
||||||
|
if user is None:
|
||||||
|
raise UnauthorizedException()
|
||||||
|
|
||||||
|
if user.id != user_id and not user.has_permission(Permissions.administrator):
|
||||||
|
raise AccessDenied()
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def resolve_export_data(user_id: int, *_):
|
||||||
|
logger.debug(f"export data for user: {user_id}")
|
||||||
|
await PrivacyMutation._permission_check(user_id)
|
||||||
|
|
||||||
|
return await DataPrivacyService.export_user_data(user_id)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def resolve_anonymize_data(user_id: int, *_):
|
||||||
|
logger.debug(f"anonymize data for user: {user_id}")
|
||||||
|
await PrivacyMutation._permission_check(user_id)
|
||||||
|
|
||||||
|
return await DataPrivacyService.anonymize_user(user_id)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def resolve_delete_data(user_id: int, *_):
|
||||||
|
logger.debug(f"delete data for user: {user_id}")
|
||||||
|
await PrivacyMutation._permission_check(user_id)
|
||||||
|
|
||||||
|
return await DataPrivacyService.delete_user_data(user_id)
|
@ -2,7 +2,7 @@ from api_graphql.abc.mutation_abc import MutationABC
|
|||||||
from api_graphql.input.setting_input import SettingInput
|
from api_graphql.input.setting_input import SettingInput
|
||||||
from core.logger import APILogger
|
from core.logger import APILogger
|
||||||
from data.schemas.system.setting import Setting
|
from data.schemas.system.setting import Setting
|
||||||
from data.schemas.system.setting_dao import settingsDao
|
from data.schemas.system.setting_dao import settingDao
|
||||||
from service.permission.permissions_enum import Permissions
|
from service.permission.permissions_enum import Permissions
|
||||||
|
|
||||||
logger = APILogger(__name__)
|
logger = APILogger(__name__)
|
||||||
@ -22,11 +22,11 @@ class SettingMutation(MutationABC):
|
|||||||
async def resolve_change(obj: SettingInput, *_):
|
async def resolve_change(obj: SettingInput, *_):
|
||||||
logger.debug(f"create new setting: {input}")
|
logger.debug(f"create new setting: {input}")
|
||||||
|
|
||||||
setting = await settingsDao.find_single_by({Setting.key: obj.key})
|
setting = await settingDao.find_single_by({Setting.key: obj.key})
|
||||||
if setting is None:
|
if setting is None:
|
||||||
raise ValueError(f"Setting with key {obj.key} not found")
|
raise ValueError(f"Setting with key {obj.key} not found")
|
||||||
|
|
||||||
setting.value = obj.value
|
setting.value = obj.value
|
||||||
await settingsDao.update(setting)
|
await settingDao.update(setting)
|
||||||
|
|
||||||
return await settingsDao.get_by_id(setting.id)
|
return await settingDao.get_by_id(setting.id)
|
||||||
|
@ -5,8 +5,8 @@ from api_graphql.input.user_setting_input import UserSettingInput
|
|||||||
from core.logger import APILogger
|
from core.logger import APILogger
|
||||||
from core.string import first_to_lower
|
from core.string import first_to_lower
|
||||||
from data.schemas.public.user_setting import UserSetting
|
from data.schemas.public.user_setting import UserSetting
|
||||||
from data.schemas.public.user_setting_dao import userSettingsDao
|
from data.schemas.public.user_setting_dao import userSettingDao
|
||||||
from data.schemas.system.setting_dao import settingsDao
|
from data.schemas.system.setting_dao import settingDao
|
||||||
from service.permission.permissions_enum import Permissions
|
from service.permission.permissions_enum import Permissions
|
||||||
|
|
||||||
logger = APILogger(__name__)
|
logger = APILogger(__name__)
|
||||||
@ -37,13 +37,13 @@ class UserSettingMutation(MutationABC):
|
|||||||
logger.debug("user not authorized")
|
logger.debug("user not authorized")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
setting = await userSettingsDao.find_single_by(
|
setting = await userSettingDao.find_single_by(
|
||||||
[{UserSetting.user_id: user.id}, {UserSetting.key: obj.key}]
|
[{UserSetting.user_id: user.id}, {UserSetting.key: obj.key}]
|
||||||
)
|
)
|
||||||
if setting is None:
|
if setting is None:
|
||||||
await userSettingsDao.create(UserSetting(0, user.id, obj.key, obj.value))
|
await userSettingDao.create(UserSetting(0, user.id, obj.key, obj.value))
|
||||||
else:
|
else:
|
||||||
setting.value = obj.value
|
setting.value = obj.value
|
||||||
await userSettingsDao.update(setting)
|
await userSettingDao.update(setting)
|
||||||
|
|
||||||
return await userSettingsDao.find_by_key(user, obj.key)
|
return await userSettingDao.find_by_key(user, obj.key)
|
||||||
|
@ -32,9 +32,9 @@ from data.schemas.public.group_dao import groupDao
|
|||||||
from data.schemas.public.short_url import ShortUrl
|
from data.schemas.public.short_url import ShortUrl
|
||||||
from data.schemas.public.short_url_dao import shortUrlDao
|
from data.schemas.public.short_url_dao import shortUrlDao
|
||||||
from data.schemas.public.user_setting import UserSetting
|
from data.schemas.public.user_setting import UserSetting
|
||||||
from data.schemas.public.user_setting_dao import userSettingsDao
|
from data.schemas.public.user_setting_dao import userSettingDao
|
||||||
from data.schemas.system.feature_flag_dao import featureFlagDao
|
from data.schemas.system.feature_flag_dao import featureFlagDao
|
||||||
from data.schemas.system.setting_dao import settingsDao
|
from data.schemas.system.setting_dao import settingDao
|
||||||
from service.permission.permissions_enum import Permissions
|
from service.permission.permissions_enum import Permissions
|
||||||
|
|
||||||
|
|
||||||
@ -206,8 +206,8 @@ class Query(QueryABC):
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
async def _resolve_settings(*args, **kwargs):
|
async def _resolve_settings(*args, **kwargs):
|
||||||
if "key" in kwargs:
|
if "key" in kwargs:
|
||||||
return [await settingsDao.find_by_key(kwargs["key"])]
|
return [await settingDao.find_by_key(kwargs["key"])]
|
||||||
return await settingsDao.get_all()
|
return await settingDao.get_all()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def _resolve_user_settings(*args, **kwargs):
|
async def _resolve_user_settings(*args, **kwargs):
|
||||||
@ -216,10 +216,10 @@ class Query(QueryABC):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
if "key" in kwargs:
|
if "key" in kwargs:
|
||||||
return await userSettingsDao.find_by(
|
return await userSettingDao.find_by(
|
||||||
[{UserSetting.user_id: user.id}, {UserSetting.key: kwargs["key"]}]
|
[{UserSetting.user_id: user.id}, {UserSetting.key: kwargs["key"]}]
|
||||||
)
|
)
|
||||||
return await userSettingsDao.find_by({UserSetting.user_id: user.id})
|
return await userSettingDao.find_by({UserSetting.user_id: user.id})
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def _resolve_feature_flags(*args, **kwargs):
|
async def _resolve_feature_flags(*args, **kwargs):
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -39,13 +39,13 @@ class DBContext:
|
|||||||
return await self._pool.select_map(statement, args)
|
return await self._pool.select_map(statement, args)
|
||||||
except (OperationalError, PoolTimeout) as e:
|
except (OperationalError, PoolTimeout) as e:
|
||||||
if self._fails >= 3:
|
if self._fails >= 3:
|
||||||
logger.error(f"Database error caused by {statement}", e)
|
logger.error(f"Database error caused by `{statement}`", e)
|
||||||
uid = uuid.uuid4()
|
uid = uuid.uuid4()
|
||||||
raise Exception(
|
raise Exception(
|
||||||
f"Query failed three times with {type(e).__name__}. Contact an admin with the UID: {uid}"
|
f"Query failed three times with {type(e).__name__}. Contact an admin with the UID: {uid}"
|
||||||
)
|
)
|
||||||
|
|
||||||
logger.error(f"Database error caused by {statement}", e)
|
logger.error(f"Database error caused by `{statement}`", e)
|
||||||
self._fails += 1
|
self._fails += 1
|
||||||
try:
|
try:
|
||||||
logger.debug("Retry select")
|
logger.debug("Retry select")
|
||||||
@ -54,7 +54,7 @@ class DBContext:
|
|||||||
pass
|
pass
|
||||||
return []
|
return []
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Database error caused by {statement}", e)
|
logger.error(f"Database error caused by `{statement}`", e)
|
||||||
raise e
|
raise e
|
||||||
|
|
||||||
async def select(
|
async def select(
|
||||||
@ -65,13 +65,13 @@ class DBContext:
|
|||||||
return await self._pool.select(statement, args)
|
return await self._pool.select(statement, args)
|
||||||
except (OperationalError, PoolTimeout) as e:
|
except (OperationalError, PoolTimeout) as e:
|
||||||
if self._fails >= 3:
|
if self._fails >= 3:
|
||||||
logger.error(f"Database error caused by {statement}", e)
|
logger.error(f"Database error caused by `{statement}`", e)
|
||||||
uid = uuid.uuid4()
|
uid = uuid.uuid4()
|
||||||
raise Exception(
|
raise Exception(
|
||||||
f"Query failed three times with {type(e).__name__}. Contact an admin with the UID: {uid}"
|
f"Query failed three times with {type(e).__name__}. Contact an admin with the UID: {uid}"
|
||||||
)
|
)
|
||||||
|
|
||||||
logger.error(f"Database error caused by {statement}", e)
|
logger.error(f"Database error caused by `{statement}`", e)
|
||||||
self._fails += 1
|
self._fails += 1
|
||||||
try:
|
try:
|
||||||
logger.debug("Retry select")
|
logger.debug("Retry select")
|
||||||
@ -80,5 +80,5 @@ class DBContext:
|
|||||||
pass
|
pass
|
||||||
return []
|
return []
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Database error caused by {statement}", e)
|
logger.error(f"Database error caused by `{statement}`", e)
|
||||||
raise e
|
raise e
|
||||||
|
150
api/src/core/database/sql_select_builder.py
Normal file
150
api/src/core/database/sql_select_builder.py
Normal file
@ -0,0 +1,150 @@
|
|||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from core.database.external_data_temp_table_builder import ExternalDataTempTableBuilder
|
||||||
|
|
||||||
|
|
||||||
|
class SQLSelectBuilder:
|
||||||
|
|
||||||
|
def __init__(self, table_name: str, primary_key: str):
|
||||||
|
self._table_name = table_name
|
||||||
|
self._primary_key = primary_key
|
||||||
|
|
||||||
|
self._temp_tables: dict[str, ExternalDataTempTableBuilder] = {}
|
||||||
|
self._to_use_temp_tables: list[str] = []
|
||||||
|
self._attributes: list[str] = []
|
||||||
|
self._tables: list[str] = [table_name]
|
||||||
|
self._joins: dict[str, (str, str)] = {}
|
||||||
|
self._conditions: list[str] = []
|
||||||
|
self._order_by: str = ""
|
||||||
|
self._limit: Optional[int] = None
|
||||||
|
self._offset: Optional[int] = None
|
||||||
|
|
||||||
|
def with_temp_table(
|
||||||
|
self, temp_table: ExternalDataTempTableBuilder
|
||||||
|
) -> "SQLSelectBuilder":
|
||||||
|
self._temp_tables[temp_table.table_name] = temp_table
|
||||||
|
return self
|
||||||
|
|
||||||
|
def use_temp_table(self, temp_table_name: str):
|
||||||
|
if temp_table_name not in self._temp_tables:
|
||||||
|
raise ValueError(f"Temp table {temp_table_name} not found.")
|
||||||
|
|
||||||
|
self._to_use_temp_tables.append(temp_table_name)
|
||||||
|
|
||||||
|
def with_attribute(self, attr: str, ignore_table_name=False) -> "SQLSelectBuilder":
|
||||||
|
if not ignore_table_name and not attr.startswith(self._table_name):
|
||||||
|
attr = f"{self._table_name}.{attr}"
|
||||||
|
|
||||||
|
self._attributes.append(attr)
|
||||||
|
return self
|
||||||
|
|
||||||
|
def with_foreign_attribute(self, attr: str) -> "SQLSelectBuilder":
|
||||||
|
self._attributes.append(attr)
|
||||||
|
return self
|
||||||
|
|
||||||
|
def with_table(self, table_name: str) -> "SQLSelectBuilder":
|
||||||
|
self._tables.append(table_name)
|
||||||
|
return self
|
||||||
|
|
||||||
|
def _check_prefix(self, attr: str) -> str:
|
||||||
|
assert attr is not None
|
||||||
|
|
||||||
|
valid_prefixes = [
|
||||||
|
"levenshtein",
|
||||||
|
self._table_name,
|
||||||
|
*self._joins.keys(),
|
||||||
|
*self._temp_tables.keys(),
|
||||||
|
]
|
||||||
|
if not any(attr.startswith(f"{prefix}.") for prefix in valid_prefixes):
|
||||||
|
attr = f"{self._table_name}.{attr}"
|
||||||
|
|
||||||
|
return attr
|
||||||
|
|
||||||
|
def with_value_condition(
|
||||||
|
self, attr: str, operator: str, value: str
|
||||||
|
) -> "SQLSelectBuilder":
|
||||||
|
attr = self._check_prefix(attr)
|
||||||
|
self._conditions.append(f"{attr} {operator} {value}")
|
||||||
|
return self
|
||||||
|
|
||||||
|
def with_levenshtein_condition(self, condition: str) -> "SQLSelectBuilder":
|
||||||
|
self._conditions.append(condition)
|
||||||
|
return self
|
||||||
|
|
||||||
|
def with_condition(self, attr: str, operator: str) -> "SQLSelectBuilder":
|
||||||
|
attr = self._check_prefix(attr)
|
||||||
|
self._conditions.append(f"{attr} {operator}")
|
||||||
|
return self
|
||||||
|
|
||||||
|
def with_grouped_conditions(self, conditions: list[str]) -> "SQLSelectBuilder":
|
||||||
|
self._conditions.append(f"({' AND '.join(conditions)})")
|
||||||
|
return self
|
||||||
|
|
||||||
|
def with_left_join(self, table: str, on: str) -> "SQLSelectBuilder":
|
||||||
|
if table in self._joins:
|
||||||
|
self._joins[table] = (f"{self._joins[table][0]} AND {on}", "LEFT")
|
||||||
|
|
||||||
|
self._joins[table] = (on, "LEFT")
|
||||||
|
return self
|
||||||
|
|
||||||
|
def with_inner_join(self, table: str, on: str) -> "SQLSelectBuilder":
|
||||||
|
if table in self._joins:
|
||||||
|
self._joins[table] = (f"{self._joins[table][0]} AND {on}", "INNER")
|
||||||
|
|
||||||
|
self._joins[table] = (on, "INNER")
|
||||||
|
return self
|
||||||
|
|
||||||
|
def with_right_join(self, table: str, on: str) -> "SQLSelectBuilder":
|
||||||
|
if table in self._joins:
|
||||||
|
self._joins[table] = (f"{self._joins[table][0]} AND {on}", "RIGHT")
|
||||||
|
|
||||||
|
self._joins[table] = (on, "RIGHT")
|
||||||
|
return self
|
||||||
|
|
||||||
|
def with_limit(self, limit: int) -> "SQLSelectBuilder":
|
||||||
|
self._limit = limit
|
||||||
|
return self
|
||||||
|
|
||||||
|
def with_offset(self, offset: int) -> "SQLSelectBuilder":
|
||||||
|
self._offset = offset
|
||||||
|
return self
|
||||||
|
|
||||||
|
def with_order_by(self, column: str, direction: str = "ASC") -> "SQLSelectBuilder":
|
||||||
|
self._order_by = f"{column} {direction}"
|
||||||
|
return self
|
||||||
|
|
||||||
|
async def _handle_temp_table_use(self, query) -> str:
|
||||||
|
new_query = ""
|
||||||
|
|
||||||
|
for temp_table_name in self._to_use_temp_tables:
|
||||||
|
temp_table = self._temp_tables[temp_table_name]
|
||||||
|
new_query += await self._temp_tables[temp_table_name].build()
|
||||||
|
self.with_left_join(
|
||||||
|
temp_table.table_name,
|
||||||
|
f"{temp_table.join_ref_table}.{self._primary_key} = {temp_table.table_name}.{temp_table.primary_key}",
|
||||||
|
)
|
||||||
|
|
||||||
|
return f"{new_query} {query}" if new_query != "" else query
|
||||||
|
|
||||||
|
async def build(self) -> str:
|
||||||
|
query = await self._handle_temp_table_use("")
|
||||||
|
|
||||||
|
attributes = ", ".join(self._attributes) if self._attributes else "*"
|
||||||
|
query += f"SELECT {attributes} FROM {", ".join(self._tables)}"
|
||||||
|
|
||||||
|
for join in self._joins:
|
||||||
|
query += f" {self._joins[join][1]} JOIN {join} ON {self._joins[join][0]}"
|
||||||
|
|
||||||
|
if self._conditions:
|
||||||
|
query += " WHERE " + " AND ".join(self._conditions)
|
||||||
|
|
||||||
|
if self._order_by:
|
||||||
|
query += f" ORDER BY {self._order_by}"
|
||||||
|
|
||||||
|
if self._limit is not None:
|
||||||
|
query += f" LIMIT {self._limit}"
|
||||||
|
|
||||||
|
if self._offset is not None:
|
||||||
|
query += f" OFFSET {self._offset}"
|
||||||
|
|
||||||
|
return query
|
@ -21,4 +21,4 @@ class UserSettingDao(DbModelDaoABC[UserSetting]):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
userSettingsDao = UserSettingDao()
|
userSettingDao = UserSettingDao()
|
||||||
|
@ -17,4 +17,4 @@ class SettingDao(DbModelDaoABC[Setting]):
|
|||||||
return await self.find_single_by({Setting.key: key})
|
return await self.find_single_by({Setting.key: key})
|
||||||
|
|
||||||
|
|
||||||
settingsDao = SettingDao()
|
settingDao = SettingDao()
|
||||||
|
@ -3,7 +3,7 @@ from typing import Any
|
|||||||
from core.logger import DBLogger
|
from core.logger import DBLogger
|
||||||
from data.abc.data_seeder_abc import DataSeederABC
|
from data.abc.data_seeder_abc import DataSeederABC
|
||||||
from data.schemas.system.setting import Setting
|
from data.schemas.system.setting import Setting
|
||||||
from data.schemas.system.setting_dao import settingsDao
|
from data.schemas.system.setting_dao import settingDao
|
||||||
|
|
||||||
logger = DBLogger(__name__)
|
logger = DBLogger(__name__)
|
||||||
|
|
||||||
@ -18,8 +18,8 @@ class SettingsSeeder(DataSeederABC):
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def _seed_if_not_exists(key: str, value: Any):
|
async def _seed_if_not_exists(key: str, value: Any):
|
||||||
existing = await settingsDao.find_by_key(key)
|
existing = await settingDao.find_by_key(key)
|
||||||
if existing is not None:
|
if existing is not None:
|
||||||
return
|
return
|
||||||
|
|
||||||
await settingsDao.create(Setting(0, key, str(value)))
|
await settingDao.create(Setting(0, key, str(value)))
|
||||||
|
119
api/src/service/data_privacy_service.py
Normal file
119
api/src/service/data_privacy_service.py
Normal file
@ -0,0 +1,119 @@
|
|||||||
|
import importlib
|
||||||
|
import json
|
||||||
|
from typing import Type
|
||||||
|
|
||||||
|
from api.auth.keycloak_client import Keycloak
|
||||||
|
from core.database.abc.data_access_object_abc import DataAccessObjectABC
|
||||||
|
from core.database.abc.db_model_dao_abc import DbModelDaoABC
|
||||||
|
from core.logger import Logger
|
||||||
|
from core.string import first_to_lower
|
||||||
|
from data.schemas.administration.user_dao import userDao
|
||||||
|
|
||||||
|
logger = Logger("DataPrivacy")
|
||||||
|
|
||||||
|
|
||||||
|
class DataPrivacyService:
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _dynamic_import_dao(dao_class: Type[DataAccessObjectABC]):
|
||||||
|
"""
|
||||||
|
Dynamically import a DAO class and its instance.
|
||||||
|
:param dao_class: The DAO class to be imported.
|
||||||
|
:return: The DAO instance.
|
||||||
|
"""
|
||||||
|
module = importlib.import_module(dao_class.__module__)
|
||||||
|
dao_instance = getattr(
|
||||||
|
module, first_to_lower(first_to_lower(dao_class.__name__))
|
||||||
|
)
|
||||||
|
return dao_instance
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def _collect_user_relevant_dao(cls):
|
||||||
|
"""
|
||||||
|
Collect all DAO classes that are relevant for data privacy.
|
||||||
|
:return: List of relevant DAO classes.
|
||||||
|
"""
|
||||||
|
# This method should return a list of DAOs that are relevant for data privacy
|
||||||
|
# For example, it could return a list of DAOs that contain user data
|
||||||
|
classes: list[DataAccessObjectABC] = [
|
||||||
|
cls._dynamic_import_dao(dao) for dao in DbModelDaoABC.__subclasses__()
|
||||||
|
]
|
||||||
|
return [x for x in classes if x.has_attribute("user_id")]
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def export_user_data(cls, user_id: int):
|
||||||
|
"""
|
||||||
|
Export user data from the database.
|
||||||
|
:param user_id: ID of the user whose data is to be exported.
|
||||||
|
:return: User data in a structured format.
|
||||||
|
"""
|
||||||
|
# Logic to export user data
|
||||||
|
user = await userDao.find_by_id(user_id)
|
||||||
|
if user is None:
|
||||||
|
raise ValueError("User not found")
|
||||||
|
|
||||||
|
collected_data = [userDao.to_dict(await userDao.find_by_id(user_id))]
|
||||||
|
|
||||||
|
daos = await cls._collect_user_relevant_dao()
|
||||||
|
for dao in daos:
|
||||||
|
data = await dao.find_by([{"userid": user_id}])
|
||||||
|
collected_data.append([dao.to_dict(x) for x in data])
|
||||||
|
|
||||||
|
return json.dumps(collected_data, default=str)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def anonymize_user(user_id: int):
|
||||||
|
"""
|
||||||
|
Anonymize user data in the database.
|
||||||
|
:param user_id: ID of the user to be anonymized.
|
||||||
|
"""
|
||||||
|
user = await userDao.find_by_id(user_id)
|
||||||
|
if user is None:
|
||||||
|
raise ValueError("User not found")
|
||||||
|
|
||||||
|
keycloak_id = user.keycloak_id
|
||||||
|
|
||||||
|
# Anonymize internal data
|
||||||
|
user.keycloak_id = "ANONYMIZED"
|
||||||
|
userDao.update(user)
|
||||||
|
|
||||||
|
# Anonymize external data
|
||||||
|
try:
|
||||||
|
Keycloak.admin.delete_user(keycloak_id)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to anonymize external data for user {user_id}", e)
|
||||||
|
raise ValueError("Failed to anonymize external data") from e
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def delete_user_data(cls, user_id: int):
|
||||||
|
"""
|
||||||
|
Delete user data from the database.
|
||||||
|
:param user_id: ID of the user whose data is to be deleted.
|
||||||
|
"""
|
||||||
|
user = await userDao.find_by_id(user_id)
|
||||||
|
if user is None:
|
||||||
|
raise ValueError("User not found")
|
||||||
|
|
||||||
|
keycloak_id = user.keycloak_id
|
||||||
|
|
||||||
|
daos = await cls._collect_user_relevant_dao()
|
||||||
|
for dao in daos:
|
||||||
|
data = await dao.find_by([{"userid": user_id}])
|
||||||
|
try:
|
||||||
|
await dao.delete_many(data, hard_delete=True)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to delete data for user {user_id}", e)
|
||||||
|
raise ValueError("Failed to delete data") from e
|
||||||
|
|
||||||
|
try:
|
||||||
|
await userDao.delete(user)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to delete user {user_id}", e)
|
||||||
|
raise ValueError("Failed to delete user") from e
|
||||||
|
|
||||||
|
# Delete external data
|
||||||
|
try:
|
||||||
|
Keycloak.admin.delete_user(keycloak_id)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to delete external data for user {user_id}", e)
|
||||||
|
raise ValueError("Failed to delete external data") from e
|
@ -1,5 +1,5 @@
|
|||||||
import { Component, OnDestroy, OnInit } from '@angular/core';
|
import { Component, OnDestroy, OnInit } from '@angular/core';
|
||||||
import { MenuItem, PrimeNGConfig } from 'primeng/api';
|
import { MenuItem, MenuItemCommandEvent, PrimeNGConfig } from 'primeng/api';
|
||||||
import { Subject } from 'rxjs';
|
import { Subject } from 'rxjs';
|
||||||
import { takeUntil } from 'rxjs/operators';
|
import { takeUntil } from 'rxjs/operators';
|
||||||
import { TranslateService } from '@ngx-translate/core';
|
import { TranslateService } from '@ngx-translate/core';
|
||||||
@ -11,6 +11,7 @@ import { SidebarService } from 'src/app/service/sidebar.service';
|
|||||||
import { ConfigService } from 'src/app/service/config.service';
|
import { ConfigService } from 'src/app/service/config.service';
|
||||||
import { UserSettingsService } from 'src/app/service/user_settings.service';
|
import { UserSettingsService } from 'src/app/service/user_settings.service';
|
||||||
import { SettingsService } from 'src/app/service/settings.service';
|
import { SettingsService } from 'src/app/service/settings.service';
|
||||||
|
import { environment } from 'src/environments/environment';
|
||||||
|
|
||||||
@Component({
|
@Component({
|
||||||
selector: 'app-header',
|
selector: 'app-header',
|
||||||
@ -48,11 +49,12 @@ export class HeaderComponent implements OnInit, OnDestroy {
|
|||||||
});
|
});
|
||||||
|
|
||||||
this.auth.user$.pipe(takeUntil(this.unsubscribe$)).subscribe(async user => {
|
this.auth.user$.pipe(takeUntil(this.unsubscribe$)).subscribe(async user => {
|
||||||
|
this.user = user;
|
||||||
|
|
||||||
await this.initMenuLists();
|
await this.initMenuLists();
|
||||||
await this.loadTheme();
|
await this.loadTheme();
|
||||||
await this.loadLang();
|
await this.loadLang();
|
||||||
|
|
||||||
this.user = user;
|
|
||||||
this.guiService.loadedGuiSettings$.next(true);
|
this.guiService.loadedGuiSettings$.next(true);
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -117,14 +119,41 @@ export class HeaderComponent implements OnInit, OnDestroy {
|
|||||||
visible: !!this.user,
|
visible: !!this.user,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
separator: true,
|
label: this.translateService.instant('header.privacy'),
|
||||||
|
items: [
|
||||||
|
{
|
||||||
|
label: this.translateService.instant('privacy.export_data'),
|
||||||
|
command: () => {},
|
||||||
|
icon: 'pi pi-download',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
label: this.translateService.instant('privacy.delete_data'),
|
||||||
|
command: () => {},
|
||||||
|
icon: 'pi pi-trash',
|
||||||
|
},
|
||||||
|
],
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
label: this.translateService.instant('header.logout'),
|
label: this.translateService.instant('header.profile'),
|
||||||
command: async () => {
|
items: [
|
||||||
await this.auth.logout();
|
{
|
||||||
},
|
label: this.translateService.instant('header.edit_profile'),
|
||||||
icon: 'pi pi-sign-out',
|
command: () => {
|
||||||
|
window.open(
|
||||||
|
`${this.config.settings.keycloak.url}/realms/${this.config.settings.keycloak.realm}/account`,
|
||||||
|
'_blank'
|
||||||
|
);
|
||||||
|
},
|
||||||
|
icon: 'pi pi-user-edit',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
label: this.translateService.instant('header.logout'),
|
||||||
|
command: async () => {
|
||||||
|
await this.auth.logout();
|
||||||
|
},
|
||||||
|
icon: 'pi pi-sign-out',
|
||||||
|
},
|
||||||
|
],
|
||||||
},
|
},
|
||||||
];
|
];
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user