Added auth & improved database
All checks were successful
Build on push / prepare (push) Successful in 9s
Build on push / query (push) Successful in 18s
Build on push / core (push) Successful in 24s
Build on push / dependency (push) Successful in 17s
Build on push / database (push) Successful in 15s
Build on push / translation (push) Successful in 15s
Build on push / mail (push) Successful in 18s
Build on push / application (push) Successful in 19s
Build on push / auth (push) Successful in 16s

This commit is contained in:
2025-09-17 12:21:32 +02:00
parent 4625b626e6
commit 504dc5e188
76 changed files with 1849 additions and 302 deletions

View File

@@ -3,16 +3,17 @@ from typing import Type
from cpl.dependency import ServiceCollection as _ServiceCollection
from . import mysql as _mysql
from . import postgres as _postgres
from .internal_tables import InternalTables
from .table_manager import TableManager
def _add(collection: _ServiceCollection,db_context: Type, default_port: int, server_type: str):
def _add(collection: _ServiceCollection, db_context: Type, default_port: int, server_type: str):
from cpl.core.console import Console
from cpl.core.configuration import Configuration
from cpl.database.abc.db_context_abc import DBContextABC
from cpl.database.model.server_type import ServerTypes, ServerType
from cpl.database.model.database_settings import DatabaseSettings
from cpl.database.service.migration_service import MigrationService
from cpl.database.service.seeder_service import SeederService
from cpl.database.schema.executed_migration_dao import ExecutedMigrationDao
try:
@@ -22,20 +23,25 @@ def _add(collection: _ServiceCollection,db_context: Type, default_port: int, ser
collection.add_singleton(DBContextABC, db_context)
collection.add_singleton(ExecutedMigrationDao)
collection.add_singleton(MigrationService)
collection.add_singleton(SeederService)
except ImportError as e:
Console.error("cpl-database is not installed", str(e))
def add_mysql(collection: _ServiceCollection):
from cpl.database.mysql.db_context import DBContext
from cpl.database.model import ServerTypes
_add(collection, DBContext, 3306, ServerTypes.MYSQL.value)
def add_postgres(collection: _ServiceCollection):
from cpl.database.mysql.db_context import DBContext
from cpl.database.model import ServerTypes
_add(collection, DBContext, 5432, ServerTypes.POSTGRES.value)
_ServiceCollection.with_module(add_mysql, _mysql.__name__)
_ServiceCollection.with_module(add_postgres, _postgres.__name__)

View File

@@ -4,13 +4,14 @@ from enum import Enum
from types import NoneType
from typing import Generic, Optional, Union, Type, List, Any
from cpl.core.ctx import get_user
from cpl.core.typing import T, Id
from cpl.core.utils import String
from cpl.core.utils.get_value import get_value
from cpl.database._external_data_temp_table_builder import ExternalDataTempTableBuilder
from cpl.database.abc.db_context_abc import DBContextABC
from cpl.database.const import DATETIME_FORMAT
from cpl.database.db_logger import DBLogger
from cpl.database.external_data_temp_table_builder import ExternalDataTempTableBuilder
from cpl.database.postgres.sql_select_builder import SQLSelectBuilder
from cpl.database.typing import T_DBM, Attribute, AttributeFilters, AttributeSorts
@@ -21,7 +22,7 @@ class DataAccessObjectABC(ABC, Generic[T_DBM]):
def __init__(self, source: str, model_type: Type[T_DBM], table_name: str):
from cpl.dependency.service_provider_abc import ServiceProviderABC
self._db = ServiceProviderABC.get_global_provider().get_service(DBContextABC)
self._db = ServiceProviderABC.get_global_service(DBContextABC)
self._logger = DBLogger(source)
self._model_type = model_type
@@ -867,9 +868,9 @@ class DataAccessObjectABC(ABC, Generic[T_DBM]):
@staticmethod
async def _get_editor_id(obj: T_DBM):
editor_id = obj.editor_id
# if editor_id is None:
# user = get_user()
# if user is not None:
# editor_id = user.id
if editor_id is None:
user = get_user()
if user is not None:
editor_id = user.id
return editor_id if editor_id is not None else "NULL"

View File

@@ -0,0 +1,8 @@
from abc import ABC, abstractmethod
class DataSeederABC(ABC):
@abstractmethod
async def seed(self):
pass

View File

@@ -2,9 +2,9 @@ from abc import abstractmethod
from datetime import datetime
from typing import Type
from cpl.database import TableManager
from cpl.database.abc.data_access_object_abc import DataAccessObjectABC
from cpl.database.abc.db_model_abc import DbModelABC
from cpl.database.internal_tables import InternalTables
class DbModelDaoABC[T_DBM](DataAccessObjectABC[T_DBM]):
@@ -15,10 +15,10 @@ class DbModelDaoABC[T_DBM](DataAccessObjectABC[T_DBM]):
self.attribute(DbModelABC.id, int, ignore=True)
self.attribute(DbModelABC.deleted, bool)
self.attribute(DbModelABC.editor_id, int, ignore=True) # handled by db trigger
self.attribute(DbModelABC.editor_id, int, db_name="editorId", ignore=True) # handled by db trigger
self.reference(
"editor", "id", DbModelABC.editor_id, InternalTables.users
"editor", "id", DbModelABC.editor_id, TableManager.get("auth_users")
) # not relevant for updates due to editor_id
self.attribute(DbModelABC.created, datetime, ignore=True) # handled by db trigger

View File

@@ -1,37 +0,0 @@
from abc import ABC, abstractmethod
from datetime import datetime
from typing import Optional
class TableABC(ABC):
@abstractmethod
def __init__(self):
self._created_at: Optional[datetime] = datetime.now().isoformat()
self._modified_at: Optional[datetime] = datetime.now().isoformat()
@property
def created_at(self) -> datetime:
return self._created_at
@property
def modified_at(self) -> datetime:
return self._modified_at
@modified_at.setter
def modified_at(self, value: datetime):
self._modified_at = value
@property
@abstractmethod
def insert_string(self) -> str:
pass
@property
@abstractmethod
def udpate_string(self) -> str:
pass
@property
@abstractmethod
def delete_string(self) -> str:
pass

View File

@@ -1,15 +0,0 @@
from cpl.database.model.server_type import ServerTypes, ServerType
class InternalTables:
@classmethod
@property
def users(cls) -> str:
return "administration.users" if ServerType.server_type is ServerTypes.POSTGRES else "users"
@classmethod
@property
def executed_migrations(cls) -> str:
return "system._executed_migrations" if ServerType.server_type is ServerTypes.POSTGRES else "_executed_migrations"

View File

@@ -5,6 +5,7 @@ class ServerTypes(Enum):
POSTGRES = "postgres"
MYSQL = "mysql"
class ServerType:
_server_type: ServerTypes = None
@@ -18,4 +19,4 @@ class ServerType:
@property
def server_type(cls) -> ServerTypes:
assert cls._server_type is not None, "Server type is not set"
return cls._server_type
return cls._server_type

View File

@@ -31,7 +31,6 @@ class MySQLPool:
db=self._db_settings.database,
minsize=1,
maxsize=Environment.get("DB_POOL_SIZE", int, 1),
autocommit=True,
)
except Exception as e:
_logger.fatal("Failed to connect to the database", e)
@@ -62,6 +61,7 @@ class MySQLPool:
async with pool.acquire() as con:
async with con.cursor() as cursor:
await self._exec_sql(cursor, query, args, multi)
await con.commit()
if cursor.description is not None: # Query returns rows
res = await cursor.fetchall()

View File

@@ -1,6 +1,6 @@
from typing import Optional, Union
from cpl.database._external_data_temp_table_builder import ExternalDataTempTableBuilder
from cpl.database.external_data_temp_table_builder import ExternalDataTempTableBuilder
class SQLSelectBuilder:

View File

@@ -1,4 +1,4 @@
from cpl.database import InternalTables
from cpl.database import TableManager
from cpl.database.abc.data_access_object_abc import DataAccessObjectABC
from cpl.database.db_logger import DBLogger
from cpl.database.schema.executed_migration import ExecutedMigration
@@ -9,6 +9,6 @@ _logger = DBLogger(__name__)
class ExecutedMigrationDao(DataAccessObjectABC[ExecutedMigration]):
def __init__(self):
DataAccessObjectABC.__init__(self, __name__, ExecutedMigration, InternalTables.executed_migrations)
DataAccessObjectABC.__init__(self, __name__, ExecutedMigration, TableManager.get("executed_migrations"))
self.attribute(ExecutedMigration.migration_id, str, primary_key=True, db_name="migrationId")

View File

@@ -1,4 +1,4 @@
CREATE TABLE IF NOT EXISTS _executed_migrations
CREATE TABLE IF NOT EXISTS system__executed_migrations
(
migrationId VARCHAR(255) PRIMARY KEY,
created TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,

View File

@@ -1,26 +1,21 @@
DROP TRIGGER IF EXISTS `TR_TableUpdate`;
DELIMITER //
CREATE TRIGGER mytable_before_update
BEFORE UPDATE
ON mytable
CREATE TRIGGER `TR_TableUpdate`
AFTER UPDATE
ON `Table`
FOR EACH ROW
BEGIN
INSERT INTO mytable_history
SELECT OLD.*;
SET NEW.updated = NOW();
INSERT INTO `TableHistory` (Id, ..., Deleted, EditorId, Created, Updated)
VALUES (OLD.Id, ..., OLD.Deleted, OLD.Created, CURRENT_TIMESTAMP());
END;
//
DELIMITER ;
DELIMITER //
CREATE TRIGGER mytable_before_delete
BEFORE DELETE
ON mytable
DROP TRIGGER IF EXISTS `TR_TableDelete`;
CREATE TRIGGER `TR_TableDelete`
AFTER DELETE
ON `Table`
FOR EACH ROW
BEGIN
INSERT INTO mytable_history
SELECT OLD.*;
END;
//
DELIMITER ;
INSERT INTO `TableHistory` (Id, ..., Deleted, EditorId, Created, Updated)
VALUES (OLD.Id, ..., TRUE, OLD.Created, CURRENT_TIMESTAMP());
END;

View File

@@ -3,9 +3,9 @@ CREATE SCHEMA IF NOT EXISTS system;
CREATE TABLE IF NOT EXISTS system._executed_migrations
(
MigrationId VARCHAR(255) PRIMARY KEY,
Created timestamptz NOT NULL DEFAULT NOW(),
Updated timestamptz NOT NULL DEFAULT NOW()
migrationId VARCHAR(255) PRIMARY KEY,
created timestamptz NOT NULL DEFAULT NOW(),
updated timestamptz NOT NULL DEFAULT NOW()
);
CREATE OR REPLACE FUNCTION public.history_trigger_function()
@@ -33,9 +33,9 @@ BEGIN
USING OLD;
END IF;
-- For UPDATE, update the Updated column and return the new row
-- For UPDATE, update the updated column and return the new row
IF (TG_OP = 'UPDATE') THEN
NEW.updated := NOW(); -- Update the Updated column
NEW.updated := NOW(); -- Update the updated column
RETURN NEW;
END IF;

View File

@@ -0,0 +1,18 @@
from cpl.database.abc.data_seeder_abc import DataSeederABC
from cpl.database.db_logger import DBLogger
from cpl.dependency import ServiceProviderABC
_logger = DBLogger(__name__)
class SeederService:
def __init__(self, provider: ServiceProviderABC):
self._provider = provider
async def seed(self):
seeders = self._provider.get_services(DataSeederABC)
_logger.debug(f"Found {len(seeders)} seeders")
for seeder in seeders:
await seeder.seed()

View File

@@ -0,0 +1,49 @@
from cpl.database.model.server_type import ServerTypes, ServerType
class TableManager:
_tables: dict[str, dict[ServerType, str]] = {
"executed_migrations": {
ServerTypes.POSTGRES: "system._executed_migrations",
ServerTypes.MYSQL: "system__executed_migrations",
},
"auth_users": {
ServerTypes.POSTGRES: "administration.auth_users",
ServerTypes.MYSQL: "administration_auth_users",
},
"api_keys": {
ServerTypes.POSTGRES: "administration.api_keys",
ServerTypes.MYSQL: "administration_api_keys",
},
"api_key_permissions": {
ServerTypes.POSTGRES: "permission.api_key_permissions",
ServerTypes.MYSQL: "permission_api_key_permissions",
},
"permissions": {
ServerTypes.POSTGRES: "permission.permissions",
ServerTypes.MYSQL: "permission_permissions",
},
"roles": {
ServerTypes.POSTGRES: "permission.roles",
ServerTypes.MYSQL: "permission_roles",
},
"role_permissions": {
ServerTypes.POSTGRES: "permission.role_permissions",
ServerTypes.MYSQL: "permission_role_permissions",
},
"role_users": {
ServerTypes.POSTGRES: "permission.role_users",
ServerTypes.MYSQL: "permission_role_users",
},
}
@classmethod
def get(cls, key: str) -> str:
if key not in cls._tables:
raise KeyError(f"Table '{key}' not found in TableManager.")
server_type = ServerType.server_type
if server_type not in cls._tables[key]:
raise KeyError(f"Server type '{server_type}' not configured for table '{key}'.")
return cls._tables[key][server_type]