Compare commits
9 Commits
2025.09.16
...
2025.09.17
| Author | SHA1 | Date | |
|---|---|---|---|
| 9c6078f4fd | |||
| dfdc31512d | |||
| ab7ff7da93 | |||
| 41087a838b | |||
| 836b92ccbf | |||
| 8aaba22940 | |||
| 504dc5e188 | |||
| 4625b626e6 | |||
| 58dbd3ed1e |
@@ -12,6 +12,20 @@ jobs:
|
|||||||
version_suffix: 'dev'
|
version_suffix: 'dev'
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
|
|
||||||
|
application:
|
||||||
|
uses: ./.gitea/workflows/package.yaml
|
||||||
|
needs: [ prepare, core, dependency ]
|
||||||
|
with:
|
||||||
|
working_directory: src/cpl-application
|
||||||
|
secrets: inherit
|
||||||
|
|
||||||
|
auth:
|
||||||
|
uses: ./.gitea/workflows/package.yaml
|
||||||
|
needs: [ prepare, core, dependency, database ]
|
||||||
|
with:
|
||||||
|
working_directory: src/cpl-auth
|
||||||
|
secrets: inherit
|
||||||
|
|
||||||
core:
|
core:
|
||||||
uses: ./.gitea/workflows/package.yaml
|
uses: ./.gitea/workflows/package.yaml
|
||||||
needs: [prepare]
|
needs: [prepare]
|
||||||
@@ -19,6 +33,27 @@ jobs:
|
|||||||
working_directory: src/cpl-core
|
working_directory: src/cpl-core
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
|
|
||||||
|
database:
|
||||||
|
uses: ./.gitea/workflows/package.yaml
|
||||||
|
needs: [ prepare, core, dependency ]
|
||||||
|
with:
|
||||||
|
working_directory: src/cpl-database
|
||||||
|
secrets: inherit
|
||||||
|
|
||||||
|
dependency:
|
||||||
|
uses: ./.gitea/workflows/package.yaml
|
||||||
|
needs: [ prepare, core ]
|
||||||
|
with:
|
||||||
|
working_directory: src/cpl-dependency
|
||||||
|
secrets: inherit
|
||||||
|
|
||||||
|
mail:
|
||||||
|
uses: ./.gitea/workflows/package.yaml
|
||||||
|
needs: [ prepare, core, dependency ]
|
||||||
|
with:
|
||||||
|
working_directory: src/cpl-mail
|
||||||
|
secrets: inherit
|
||||||
|
|
||||||
query:
|
query:
|
||||||
uses: ./.gitea/workflows/package.yaml
|
uses: ./.gitea/workflows/package.yaml
|
||||||
needs: [prepare]
|
needs: [prepare]
|
||||||
@@ -28,14 +63,7 @@ jobs:
|
|||||||
|
|
||||||
translation:
|
translation:
|
||||||
uses: ./.gitea/workflows/package.yaml
|
uses: ./.gitea/workflows/package.yaml
|
||||||
needs: [ prepare, core ]
|
needs: [ prepare, core, dependency ]
|
||||||
with:
|
with:
|
||||||
working_directory: src/cpl-translation
|
working_directory: src/cpl-translation
|
||||||
secrets: inherit
|
|
||||||
|
|
||||||
mail:
|
|
||||||
uses: ./.gitea/workflows/package.yaml
|
|
||||||
needs: [ prepare, core ]
|
|
||||||
with:
|
|
||||||
working_directory: src/cpl-mail
|
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
153
README.md
153
README.md
@@ -1,153 +0,0 @@
|
|||||||
<h1 align="center">CPL - Common python library</h1>
|
|
||||||
|
|
||||||
<!-- Summary -->
|
|
||||||
<p align="center">
|
|
||||||
<!-- <img src="" alt="cpl-logo" width="120px" height="120px"/> -->
|
|
||||||
<br>
|
|
||||||
<i>
|
|
||||||
CPL is a development platform for python server applications
|
|
||||||
<br>using Python.</i>
|
|
||||||
<br>
|
|
||||||
</p>
|
|
||||||
|
|
||||||
## Table of Contents
|
|
||||||
<!-- TABLE OF CONTENTS -->
|
|
||||||
<ol>
|
|
||||||
<li><a href="#Features">Features</a></li>
|
|
||||||
<li>
|
|
||||||
<a href="#getting-started">Getting Started</a>
|
|
||||||
<ul>
|
|
||||||
<li><a href="#prerequisites">Prerequisites</a></li>
|
|
||||||
<li><a href="#installation">Installation</a></li>
|
|
||||||
</ul>
|
|
||||||
</li>
|
|
||||||
<li><a href="#roadmap">Roadmap</a></li>
|
|
||||||
<li><a href="#contributing">Contributing</a></li>
|
|
||||||
<li><a href="#license">License</a></li>
|
|
||||||
<li><a href="#contact">Contact</a></li>
|
|
||||||
</ol>
|
|
||||||
|
|
||||||
## Features
|
|
||||||
<!-- FEATURE OVERVIEW -->
|
|
||||||
- Expandle
|
|
||||||
- Application base
|
|
||||||
- Standardized application classes
|
|
||||||
- Application object builder
|
|
||||||
- Application extension classes
|
|
||||||
- Startup classes
|
|
||||||
- Startup extension classes
|
|
||||||
- Configuration
|
|
||||||
- Configure via object mapped JSON
|
|
||||||
- Console argument handling
|
|
||||||
- Console class for in and output
|
|
||||||
- Banner
|
|
||||||
- Spinner
|
|
||||||
- Options (menu)
|
|
||||||
- Table
|
|
||||||
- Write
|
|
||||||
- Write_at
|
|
||||||
- Write_line
|
|
||||||
- Write_line_at
|
|
||||||
- Dependency injection
|
|
||||||
- Service lifetimes: singleton, scoped and transient
|
|
||||||
- Providing of application environment
|
|
||||||
- Environment (development, staging, testing, production)
|
|
||||||
- Appname
|
|
||||||
- Customer
|
|
||||||
- Hostname
|
|
||||||
- Runtime directory
|
|
||||||
- Working directory
|
|
||||||
- Logging
|
|
||||||
- Standardized logger
|
|
||||||
- Log-level (FATAL, ERROR, WARN, INFO, DEBUG & TRACE)
|
|
||||||
- Mail handling
|
|
||||||
- Send mails
|
|
||||||
- Pipe classes
|
|
||||||
- Convert input
|
|
||||||
- Utils
|
|
||||||
- Credential manager
|
|
||||||
- Encryption via BASE64
|
|
||||||
- PIP wrapper class based on subprocess
|
|
||||||
- Run pip commands
|
|
||||||
- String converter to different variants
|
|
||||||
- to_lower_case
|
|
||||||
- to_camel_case
|
|
||||||
- ...
|
|
||||||
|
|
||||||
<!-- GETTING STARTED -->
|
|
||||||
## Getting Started
|
|
||||||
|
|
||||||
[Get started with CPL][quickstart].
|
|
||||||
|
|
||||||
### Prerequisites
|
|
||||||
|
|
||||||
- Install [python] which includes [Pip installs packages][pip]
|
|
||||||
|
|
||||||
### Installation
|
|
||||||
|
|
||||||
Install the CPL package
|
|
||||||
```sh
|
|
||||||
pip install cpl-core --extra-index-url https://pip.sh-edraft.de
|
|
||||||
```
|
|
||||||
|
|
||||||
Install the CPL CLI
|
|
||||||
```sh
|
|
||||||
pip install cpl-cli --extra-index-url https://pip.sh-edraft.de
|
|
||||||
```
|
|
||||||
|
|
||||||
Create workspace:
|
|
||||||
```sh
|
|
||||||
cpl new <console|library|unittest> <PROJECT NAME>
|
|
||||||
```
|
|
||||||
|
|
||||||
Run the application:
|
|
||||||
```sh
|
|
||||||
cd <PROJECT NAME>
|
|
||||||
cpl start
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
<!-- ROADMAP -->
|
|
||||||
## Roadmap
|
|
||||||
|
|
||||||
See the [open issues](https://git.sh-edraft.de/sh-edraft.de/sh_cpl/issues) for a list of proposed features (and known issues).
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
<!-- CONTRIBUTING -->
|
|
||||||
## Contributing
|
|
||||||
|
|
||||||
### Contributing Guidelines
|
|
||||||
|
|
||||||
Read through our [contributing guidelines][contributing] to learn about our submission process, coding rules and more.
|
|
||||||
|
|
||||||
### Want to Help?
|
|
||||||
|
|
||||||
Want to file a bug, contribute some code, or improve documentation? Excellent! Read up on our guidelines for [contributing][contributing].
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
<!-- LICENSE -->
|
|
||||||
## License
|
|
||||||
|
|
||||||
Distributed under the MIT License. See [LICENSE] for more information.
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
<!-- CONTACT -->
|
|
||||||
## Contact
|
|
||||||
|
|
||||||
Sven Heidemann - sven.heidemann@sh-edraft.de
|
|
||||||
|
|
||||||
Project link: [https://git.sh-edraft.de/sh-edraft.de/sh_common_py_lib](https://git.sh-edraft.de/sh-edraft.de/sh_cpl)
|
|
||||||
|
|
||||||
<!-- External LINKS -->
|
|
||||||
[pip_url]: https://pip.sh-edraft.de
|
|
||||||
[python]: https://www.python.org/
|
|
||||||
[pip]: https://pypi.org/project/pip/
|
|
||||||
|
|
||||||
<!-- Internal LINKS -->
|
|
||||||
[project]: https://git.sh-edraft.de/sh-edraft.de/sh_cpl
|
|
||||||
[quickstart]: https://git.sh-edraft.de/sh-edraft.de/sh_cpl/wiki/quickstart
|
|
||||||
[contributing]: https://git.sh-edraft.de/sh-edraft.de/sh_cpl/wiki/contributing
|
|
||||||
[license]: LICENSE
|
|
||||||
|
|||||||
@@ -1,6 +1 @@
|
|||||||
from .application_abc import ApplicationABC
|
|
||||||
from .application_builder import ApplicationBuilder
|
from .application_builder import ApplicationBuilder
|
||||||
from .application_builder_abc import ApplicationBuilderABC
|
|
||||||
from .application_extension_abc import ApplicationExtensionABC
|
|
||||||
from .startup_abc import StartupABC
|
|
||||||
from .startup_extension_abc import StartupExtensionABC
|
|
||||||
|
|||||||
4
src/cpl-application/cpl/application/abc/__init__.py
Normal file
4
src/cpl-application/cpl/application/abc/__init__.py
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
from .application_abc import ApplicationABC
|
||||||
|
from .application_extension_abc import ApplicationExtensionABC
|
||||||
|
from .startup_abc import StartupABC
|
||||||
|
from .startup_extension_abc import StartupExtensionABC
|
||||||
82
src/cpl-application/cpl/application/abc/application_abc.py
Normal file
82
src/cpl-application/cpl/application/abc/application_abc.py
Normal file
@@ -0,0 +1,82 @@
|
|||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from typing import Callable, Self
|
||||||
|
|
||||||
|
from cpl.application.host import Host
|
||||||
|
from cpl.core.console.console import Console
|
||||||
|
from cpl.core.environment import Environment
|
||||||
|
from cpl.core.log import LoggerABC, LogLevel
|
||||||
|
from cpl.dependency.service_provider_abc import ServiceProviderABC
|
||||||
|
|
||||||
|
|
||||||
|
def __not_implemented__(package: str, func: Callable):
|
||||||
|
raise NotImplementedError(f"Package {package} is required to use {func.__name__} method")
|
||||||
|
|
||||||
|
|
||||||
|
class ApplicationABC(ABC):
|
||||||
|
r"""ABC for the Application class
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
services: :class:`cpl.dependency.service_provider_abc.ServiceProviderABC`
|
||||||
|
Contains instances of prepared objects
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def __init__(self, services: ServiceProviderABC):
|
||||||
|
self._services = services
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def extend(cls, name: str | Callable, func: Callable[[Self], Self]):
|
||||||
|
r"""Extend the Application with a custom method
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
name: :class:`str`
|
||||||
|
Name of the method
|
||||||
|
func: :class:`Callable[[Self], Self]`
|
||||||
|
Function that takes the Application as a parameter and returns it
|
||||||
|
"""
|
||||||
|
if callable(name):
|
||||||
|
name = name.__name__
|
||||||
|
|
||||||
|
setattr(cls, name, func)
|
||||||
|
return cls
|
||||||
|
|
||||||
|
def with_logging(self, level: LogLevel = None):
|
||||||
|
if level is None:
|
||||||
|
level = Environment.get("LOG_LEVEL", LogLevel, LogLevel.info)
|
||||||
|
|
||||||
|
logger = self._services.get_service(LoggerABC)
|
||||||
|
logger.set_level(level)
|
||||||
|
|
||||||
|
def with_permissions(self, *args, **kwargs):
|
||||||
|
__not_implemented__("cpl-auth", self.with_permissions)
|
||||||
|
|
||||||
|
def with_migrations(self, *args, **kwargs):
|
||||||
|
__not_implemented__("cpl-database", self.with_migrations)
|
||||||
|
|
||||||
|
def with_seeders(self, *args, **kwargs):
|
||||||
|
__not_implemented__("cpl-database", self.with_seeders)
|
||||||
|
|
||||||
|
def with_extension(self, func: Callable[[Self, ...], None], *args, **kwargs):
|
||||||
|
r"""Extend the Application with a custom method
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
func: :class:`Callable[[Self], Self]`
|
||||||
|
Function that takes the Application as a parameter and returns it
|
||||||
|
"""
|
||||||
|
assert func is not None, "func must not be None"
|
||||||
|
assert callable(func), "func must be callable"
|
||||||
|
|
||||||
|
func(self, *args, **kwargs)
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
r"""Entry point
|
||||||
|
|
||||||
|
Called by custom Application.main
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
Host.run(self.main)
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
Console.close()
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def main(self): ...
|
||||||
@@ -0,0 +1,10 @@
|
|||||||
|
from abc import ABC, abstractmethod
|
||||||
|
|
||||||
|
from cpl.dependency import ServiceProviderABC
|
||||||
|
|
||||||
|
|
||||||
|
class ApplicationExtensionABC(ABC):
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
@abstractmethod
|
||||||
|
def run(services: ServiceProviderABC): ...
|
||||||
21
src/cpl-application/cpl/application/abc/startup_abc.py
Normal file
21
src/cpl-application/cpl/application/abc/startup_abc.py
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
from abc import ABC, abstractmethod
|
||||||
|
|
||||||
|
from cpl.dependency.service_collection import ServiceCollection
|
||||||
|
|
||||||
|
|
||||||
|
class StartupABC(ABC):
|
||||||
|
r"""ABC for the startup class"""
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
@abstractmethod
|
||||||
|
def configure_configuration():
|
||||||
|
r"""Creates configuration of application"""
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
@abstractmethod
|
||||||
|
def configure_services(service: ServiceCollection):
|
||||||
|
r"""Creates service provider
|
||||||
|
|
||||||
|
Parameter:
|
||||||
|
services: :class:`cpl.dependency.service_collection`
|
||||||
|
"""
|
||||||
@@ -0,0 +1,20 @@
|
|||||||
|
from abc import ABC, abstractmethod
|
||||||
|
|
||||||
|
from cpl.dependency import ServiceCollection
|
||||||
|
|
||||||
|
|
||||||
|
class StartupExtensionABC(ABC):
|
||||||
|
r"""ABC for startup extension classes"""
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
@abstractmethod
|
||||||
|
def configure_configuration():
|
||||||
|
r"""Creates configuration of application"""
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
@abstractmethod
|
||||||
|
def configure_services(services: ServiceCollection):
|
||||||
|
r"""Creates service provider
|
||||||
|
Parameter:
|
||||||
|
services: :class:`cpl.dependency.service_collection`
|
||||||
|
"""
|
||||||
@@ -1,57 +0,0 @@
|
|||||||
from abc import ABC, abstractmethod
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from cpl.dependency.service_provider_abc import ServiceProviderABC
|
|
||||||
|
|
||||||
from cpl.core.console.console import Console
|
|
||||||
|
|
||||||
|
|
||||||
class ApplicationABC(ABC):
|
|
||||||
r"""ABC for the Application class
|
|
||||||
|
|
||||||
Parameters:
|
|
||||||
config: :class:`cpl.core.configuration.configuration_abc.ConfigurationABC`
|
|
||||||
Contains object loaded from appsettings
|
|
||||||
services: :class:`cpl.dependency.service_provider_abc.ServiceProviderABC`
|
|
||||||
Contains instances of prepared objects
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def __init__(self, services: ServiceProviderABC):
|
|
||||||
self._services: Optional[ServiceProviderABC] = services
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
r"""Entry point
|
|
||||||
|
|
||||||
Called by custom Application.main
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
self.configure()
|
|
||||||
self.main()
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
Console.close()
|
|
||||||
|
|
||||||
async def run_async(self):
|
|
||||||
r"""Entry point
|
|
||||||
|
|
||||||
Called by custom Application.main
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
await self.configure()
|
|
||||||
await self.main()
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
Console.close()
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def configure(self):
|
|
||||||
r"""Configure the application
|
|
||||||
|
|
||||||
Called by :class:`cpl.application.application_abc.ApplicationABC.run`
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def main(self):
|
|
||||||
r"""Custom entry point
|
|
||||||
|
|
||||||
Called by :class:`cpl.application.application_abc.ApplicationABC.run`
|
|
||||||
"""
|
|
||||||
@@ -1,97 +1,64 @@
|
|||||||
from typing import Type, Optional, Callable, Union
|
import asyncio
|
||||||
|
from typing import Type, Optional
|
||||||
|
|
||||||
from cpl.application.application_abc import ApplicationABC
|
from cpl.application.abc.application_abc import ApplicationABC
|
||||||
from cpl.application.application_builder_abc import ApplicationBuilderABC
|
from cpl.application.abc.application_extension_abc import ApplicationExtensionABC
|
||||||
from cpl.application.application_extension_abc import ApplicationExtensionABC
|
from cpl.application.abc.startup_abc import StartupABC
|
||||||
from cpl.application.async_application_extension_abc import AsyncApplicationExtensionABC
|
from cpl.application.abc.startup_extension_abc import StartupExtensionABC
|
||||||
from cpl.application.async_startup_abc import AsyncStartupABC
|
from cpl.application.host import Host
|
||||||
from cpl.application.async_startup_extension_abc import AsyncStartupExtensionABC
|
|
||||||
from cpl.application.startup_abc import StartupABC
|
|
||||||
from cpl.application.startup_extension_abc import StartupExtensionABC
|
|
||||||
from cpl.core.configuration.configuration import Configuration
|
|
||||||
from cpl.dependency.service_collection import ServiceCollection
|
from cpl.dependency.service_collection import ServiceCollection
|
||||||
from cpl.core.environment import Environment
|
|
||||||
|
|
||||||
|
|
||||||
class ApplicationBuilder(ApplicationBuilderABC):
|
class ApplicationBuilder:
|
||||||
r"""This is class is used to build an object of :class:`cpl.application.application_abc.ApplicationABC`
|
r"""A builder for constructing an application with configurable services and extensions."""
|
||||||
|
|
||||||
Parameter:
|
|
||||||
app: Type[:class:`cpl.application.application_abc.ApplicationABC`]
|
|
||||||
Application to build
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, app: Type[ApplicationABC]):
|
def __init__(self, app: Type[ApplicationABC]):
|
||||||
ApplicationBuilderABC.__init__(self)
|
assert app is not None, "app must not be None"
|
||||||
self._app = app
|
assert issubclass(app, ApplicationABC), "app must be an subclass of ApplicationABC or its subclass"
|
||||||
self._startup: Optional[StartupABC | AsyncStartupABC] = None
|
|
||||||
|
self._app = app if app is not None else ApplicationABC
|
||||||
|
|
||||||
self._services = ServiceCollection()
|
self._services = ServiceCollection()
|
||||||
|
|
||||||
self._app_extensions: list[Type[ApplicationExtensionABC | AsyncApplicationExtensionABC]] = []
|
self._startup: Optional[StartupABC] = None
|
||||||
self._startup_extensions: list[Type[StartupExtensionABC | AsyncStartupABC]] = []
|
self._app_extensions: list[Type[ApplicationExtensionABC]] = []
|
||||||
|
self._startup_extensions: list[Type[StartupExtensionABC]] = []
|
||||||
|
|
||||||
def use_startup(self, startup: Type[StartupABC | AsyncStartupABC]) -> "ApplicationBuilder":
|
self._async_loop = asyncio.get_event_loop()
|
||||||
self._startup = startup()
|
|
||||||
|
@property
|
||||||
|
def services(self) -> ServiceCollection:
|
||||||
|
return self._services
|
||||||
|
|
||||||
|
@property
|
||||||
|
def service_provider(self):
|
||||||
|
return self._services.build()
|
||||||
|
|
||||||
|
def with_startup(self, startup: Type[StartupABC]) -> "ApplicationBuilder":
|
||||||
|
self._startup = startup
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def use_extension(
|
def with_extension(
|
||||||
self,
|
self,
|
||||||
extension: Type[
|
extension: Type[ApplicationExtensionABC | StartupExtensionABC],
|
||||||
ApplicationExtensionABC | AsyncApplicationExtensionABC | StartupExtensionABC | AsyncStartupExtensionABC
|
|
||||||
],
|
|
||||||
) -> "ApplicationBuilder":
|
) -> "ApplicationBuilder":
|
||||||
if (
|
if (issubclass(extension, ApplicationExtensionABC)) and extension not in self._app_extensions:
|
||||||
issubclass(extension, ApplicationExtensionABC) or issubclass(extension, AsyncApplicationExtensionABC)
|
|
||||||
) and extension not in self._app_extensions:
|
|
||||||
self._app_extensions.append(extension)
|
self._app_extensions.append(extension)
|
||||||
elif (
|
elif (issubclass(extension, StartupExtensionABC)) and extension not in self._startup_extensions:
|
||||||
issubclass(extension, StartupExtensionABC) or issubclass(extension, AsyncStartupExtensionABC)
|
|
||||||
) and extension not in self._startup_extensions:
|
|
||||||
self._startup_extensions.append(extension)
|
self._startup_extensions.append(extension)
|
||||||
|
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def _build_startup(self):
|
|
||||||
for ex in self._startup_extensions:
|
|
||||||
extension = ex()
|
|
||||||
extension.configure_configuration(Configuration, Environment)
|
|
||||||
extension.configure_services(self._services, Environment)
|
|
||||||
|
|
||||||
if self._startup is not None:
|
|
||||||
self._startup.configure_configuration(Configuration, Environment)
|
|
||||||
self._startup.configure_services(self._services, Environment)
|
|
||||||
|
|
||||||
async def _build_async_startup(self):
|
|
||||||
for ex in self._startup_extensions:
|
|
||||||
extension = ex()
|
|
||||||
await extension.configure_configuration(Configuration, Environment)
|
|
||||||
await extension.configure_services(self._services, Environment)
|
|
||||||
|
|
||||||
if self._startup is not None:
|
|
||||||
await self._startup.configure_configuration(Configuration, Environment)
|
|
||||||
await self._startup.configure_services(self._services, Environment)
|
|
||||||
|
|
||||||
def build(self) -> ApplicationABC:
|
def build(self) -> ApplicationABC:
|
||||||
self._build_startup()
|
for extension in self._startup_extensions:
|
||||||
|
Host.run(extension.configure_configuration)
|
||||||
|
Host.run(extension.configure_services, self._services)
|
||||||
|
|
||||||
config = Configuration
|
if self._startup is not None:
|
||||||
services = self._services.build_service_provider()
|
Host.run(self._startup.configure_configuration)
|
||||||
|
Host.run(self._startup.configure_services, self._services)
|
||||||
|
|
||||||
for ex in self._app_extensions:
|
for extension in self._app_extensions:
|
||||||
extension = ex()
|
Host.run(extension.run, self.service_provider)
|
||||||
extension.run(config, services)
|
|
||||||
|
|
||||||
return self._app(services)
|
return self._app(self.service_provider)
|
||||||
|
|
||||||
async def build_async(self) -> ApplicationABC:
|
|
||||||
await self._build_async_startup()
|
|
||||||
|
|
||||||
config = Configuration
|
|
||||||
services = self._services.build_service_provider()
|
|
||||||
|
|
||||||
for ex in self._app_extensions:
|
|
||||||
extension = ex()
|
|
||||||
await extension.run(config, services)
|
|
||||||
|
|
||||||
return self._app(services)
|
|
||||||
|
|||||||
@@ -1,47 +0,0 @@
|
|||||||
from abc import ABC, abstractmethod
|
|
||||||
from typing import Type
|
|
||||||
|
|
||||||
from cpl.application.application_abc import ApplicationABC
|
|
||||||
from cpl.application.startup_abc import StartupABC
|
|
||||||
|
|
||||||
|
|
||||||
class ApplicationBuilderABC(ABC):
|
|
||||||
r"""ABC for the :class:`cpl.application.application_builder.ApplicationBuilder`"""
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def __init__(self, *args):
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def use_startup(self, startup: Type[StartupABC]):
|
|
||||||
r"""Sets the custom startup class to use
|
|
||||||
|
|
||||||
Parameter:
|
|
||||||
startup: Type[:class:`cpl.application.startup_abc.StartupABC`]
|
|
||||||
Startup class to use
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
async def use_startup(self, startup: Type[StartupABC]):
|
|
||||||
r"""Sets the custom startup class to use async
|
|
||||||
|
|
||||||
Parameter:
|
|
||||||
startup: Type[:class:`cpl.application.startup_abc.StartupABC`]
|
|
||||||
Startup class to use
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def build(self) -> ApplicationABC:
|
|
||||||
r"""Creates custom application object
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Object of :class:`cpl.application.application_abc.ApplicationABC`
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
async def build_async(self) -> ApplicationABC:
|
|
||||||
r"""Creates custom application object async
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Object of :class:`cpl.application.application_abc.ApplicationABC`
|
|
||||||
"""
|
|
||||||
@@ -1,14 +0,0 @@
|
|||||||
from abc import ABC, abstractmethod
|
|
||||||
|
|
||||||
from cpl.core.configuration.configuration import Configuration
|
|
||||||
from cpl.dependency import ServiceProviderABC
|
|
||||||
|
|
||||||
|
|
||||||
class ApplicationExtensionABC(ABC):
|
|
||||||
@abstractmethod
|
|
||||||
def __init__(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def run(self, config: Configuration, services: ServiceProviderABC):
|
|
||||||
pass
|
|
||||||
@@ -1,14 +0,0 @@
|
|||||||
from abc import ABC, abstractmethod
|
|
||||||
|
|
||||||
from cpl.core.configuration.configuration import Configuration
|
|
||||||
from cpl.dependency import ServiceProviderABC
|
|
||||||
|
|
||||||
|
|
||||||
class AsyncApplicationExtensionABC(ABC):
|
|
||||||
@abstractmethod
|
|
||||||
def __init__(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
async def run(self, config: Configuration, services: ServiceProviderABC):
|
|
||||||
pass
|
|
||||||
@@ -1,23 +0,0 @@
|
|||||||
from abc import ABC, abstractmethod
|
|
||||||
|
|
||||||
from cpl.dependency.service_collection_abc import ServiceCollectionABC
|
|
||||||
|
|
||||||
|
|
||||||
class AsyncStartupABC(ABC):
|
|
||||||
r"""ABC for the startup class"""
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def __init__(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
async def configure_configuration(self):
|
|
||||||
r"""Creates configuration of application"""
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
async def configure_services(self, service: ServiceCollectionABC):
|
|
||||||
r"""Creates service provider
|
|
||||||
|
|
||||||
Parameter:
|
|
||||||
services: :class:`cpl.dependency.service_collection_abc`
|
|
||||||
"""
|
|
||||||
@@ -1,31 +0,0 @@
|
|||||||
from abc import ABC, abstractmethod
|
|
||||||
|
|
||||||
from cpl.core.configuration.configuration import Configuration
|
|
||||||
from cpl.dependency.service_collection_abc import ServiceCollectionABC
|
|
||||||
from cpl.core.environment.environment import Environment
|
|
||||||
|
|
||||||
|
|
||||||
class AsyncStartupExtensionABC(ABC):
|
|
||||||
r"""ABC for startup extension classes"""
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def __init__(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
async def configure_configuration(self, config: Configuration, env: Environment):
|
|
||||||
r"""Creates configuration of application
|
|
||||||
|
|
||||||
Parameter:
|
|
||||||
config: :class:`cpl.core.configuration.configuration_abc.Configuration`
|
|
||||||
env: :class:`cpl.core.environment.application_environment_abc`
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
async def configure_services(self, service: ServiceCollectionABC, env: Environment):
|
|
||||||
r"""Creates service provider
|
|
||||||
|
|
||||||
Parameter:
|
|
||||||
services: :class:`cpl.dependency.service_collection_abc`
|
|
||||||
env: :class:`cpl.core.environment.application_environment_abc`
|
|
||||||
"""
|
|
||||||
17
src/cpl-application/cpl/application/host.py
Normal file
17
src/cpl-application/cpl/application/host.py
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
import asyncio
|
||||||
|
from typing import Callable
|
||||||
|
|
||||||
|
|
||||||
|
class Host:
|
||||||
|
_loop = asyncio.get_event_loop()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_loop(cls):
|
||||||
|
return cls._loop
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def run(cls, func: Callable, *args, **kwargs):
|
||||||
|
if asyncio.iscoroutinefunction(func):
|
||||||
|
return cls._loop.run_until_complete(func(*args, **kwargs))
|
||||||
|
|
||||||
|
return func(*args, **kwargs)
|
||||||
@@ -1,31 +0,0 @@
|
|||||||
from abc import ABC, abstractmethod
|
|
||||||
|
|
||||||
from cpl.core.configuration import Configuration
|
|
||||||
from cpl.dependency.service_collection_abc import ServiceCollectionABC
|
|
||||||
from cpl.core.environment import Environment
|
|
||||||
|
|
||||||
|
|
||||||
class StartupABC(ABC):
|
|
||||||
r"""ABC for the startup class"""
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def __init__(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def configure_configuration(self, config: Configuration, env: Environment):
|
|
||||||
r"""Creates configuration of application
|
|
||||||
|
|
||||||
Parameter:
|
|
||||||
config: :class:`cpl.core.configuration.configuration_abc.ConfigurationABC`
|
|
||||||
env: :class:`cpl.core.environment.application_environment_abc`
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def configure_services(self, service: ServiceCollectionABC, env: Environment):
|
|
||||||
r"""Creates service provider
|
|
||||||
|
|
||||||
Parameter:
|
|
||||||
services: :class:`cpl.dependency.service_collection_abc`
|
|
||||||
env: :class:`cpl.core.environment.application_environment_abc`
|
|
||||||
"""
|
|
||||||
@@ -1,33 +0,0 @@
|
|||||||
from abc import ABC, abstractmethod
|
|
||||||
|
|
||||||
|
|
||||||
from cpl.core.configuration import Configuration
|
|
||||||
from cpl.dependency.service_collection_abc import ServiceCollectionABC
|
|
||||||
|
|
||||||
from cpl.core.environment.environment import Environment
|
|
||||||
|
|
||||||
|
|
||||||
class StartupExtensionABC(ABC):
|
|
||||||
r"""ABC for startup extension classes"""
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def __init__(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def configure_configuration(self, config: Configuration, env: Environment):
|
|
||||||
r"""Creates configuration of application
|
|
||||||
|
|
||||||
Parameter:
|
|
||||||
config: :class:`cpl.core.configuration.configuration_abc.ConfigurationABC`
|
|
||||||
env: :class:`cpl.core.environment.application_environment_abc`
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def configure_services(self, service: ServiceCollectionABC, env: Environment):
|
|
||||||
r"""Creates service provider
|
|
||||||
|
|
||||||
Parameter:
|
|
||||||
services: :class:`cpl.dependency.service_collection_abc`
|
|
||||||
env: :class:`cpl.core.environment.application_environment_abc`
|
|
||||||
"""
|
|
||||||
82
src/cpl-auth/cpl/auth/__init__.py
Normal file
82
src/cpl-auth/cpl/auth/__init__.py
Normal file
@@ -0,0 +1,82 @@
|
|||||||
|
from enum import Enum
|
||||||
|
from typing import Type
|
||||||
|
|
||||||
|
from cpl.application.abc import ApplicationABC as _ApplicationABC
|
||||||
|
from cpl.auth import permission as _permission
|
||||||
|
from cpl.auth.keycloak.keycloak_admin import KeycloakAdmin as _KeycloakAdmin
|
||||||
|
from cpl.auth.keycloak.keycloak_client import KeycloakClient as _KeycloakClient
|
||||||
|
from cpl.dependency.service_collection import ServiceCollection as _ServiceCollection
|
||||||
|
from .auth_logger import AuthLogger
|
||||||
|
from .keycloak_settings import KeycloakSettings
|
||||||
|
from .permission_seeder import PermissionSeeder
|
||||||
|
|
||||||
|
|
||||||
|
def _with_permissions(self: _ApplicationABC, *permissions: Type[Enum]) -> _ApplicationABC:
|
||||||
|
from cpl.auth.permission.permissions_registry import PermissionsRegistry
|
||||||
|
|
||||||
|
for perm in permissions:
|
||||||
|
PermissionsRegistry.with_enum(perm)
|
||||||
|
return self
|
||||||
|
|
||||||
|
|
||||||
|
def _add_daos(collection: _ServiceCollection):
|
||||||
|
from .schema._administration.auth_user_dao import AuthUserDao
|
||||||
|
from .schema._administration.api_key_dao import ApiKeyDao
|
||||||
|
from .schema._permission.api_key_permission_dao import ApiKeyPermissionDao
|
||||||
|
from .schema._permission.permission_dao import PermissionDao
|
||||||
|
from .schema._permission.role_dao import RoleDao
|
||||||
|
from .schema._permission.role_permission_dao import RolePermissionDao
|
||||||
|
from .schema._permission.role_user_dao import RoleUserDao
|
||||||
|
|
||||||
|
collection.add_singleton(AuthUserDao)
|
||||||
|
collection.add_singleton(ApiKeyDao)
|
||||||
|
collection.add_singleton(ApiKeyPermissionDao)
|
||||||
|
collection.add_singleton(PermissionDao)
|
||||||
|
collection.add_singleton(RoleDao)
|
||||||
|
collection.add_singleton(RolePermissionDao)
|
||||||
|
collection.add_singleton(RoleUserDao)
|
||||||
|
|
||||||
|
|
||||||
|
def add_auth(collection: _ServiceCollection):
|
||||||
|
import os
|
||||||
|
|
||||||
|
from cpl.core.console import Console
|
||||||
|
from cpl.database.service.migration_service import MigrationService
|
||||||
|
from cpl.database.model.server_type import ServerType, ServerTypes
|
||||||
|
|
||||||
|
try:
|
||||||
|
collection.add_singleton(_KeycloakClient)
|
||||||
|
collection.add_singleton(_KeycloakAdmin)
|
||||||
|
|
||||||
|
_add_daos(collection)
|
||||||
|
|
||||||
|
provider = collection.build()
|
||||||
|
migration_service: MigrationService = provider.get_service(MigrationService)
|
||||||
|
if ServerType.server_type == ServerTypes.POSTGRES:
|
||||||
|
migration_service.with_directory(
|
||||||
|
os.path.join(os.path.dirname(os.path.realpath(__file__)), "scripts/postgres")
|
||||||
|
)
|
||||||
|
elif ServerType.server_type == ServerTypes.MYSQL:
|
||||||
|
migration_service.with_directory(os.path.join(os.path.dirname(os.path.realpath(__file__)), "scripts/mysql"))
|
||||||
|
except ImportError as e:
|
||||||
|
Console.error("cpl-auth is not installed", str(e))
|
||||||
|
|
||||||
|
|
||||||
|
def add_permission(collection: _ServiceCollection):
|
||||||
|
from cpl.auth.permission_seeder import PermissionSeeder
|
||||||
|
from cpl.database.abc.data_seeder_abc import DataSeederABC
|
||||||
|
from cpl.auth.permission.permissions_registry import PermissionsRegistry
|
||||||
|
from cpl.auth.permission.permissions import Permissions
|
||||||
|
|
||||||
|
try:
|
||||||
|
collection.add_singleton(DataSeederABC, PermissionSeeder)
|
||||||
|
PermissionsRegistry.with_enum(Permissions)
|
||||||
|
except ImportError as e:
|
||||||
|
from cpl.core.console import Console
|
||||||
|
|
||||||
|
Console.error("cpl-auth is not installed", str(e))
|
||||||
|
|
||||||
|
|
||||||
|
_ServiceCollection.with_module(add_auth, __name__)
|
||||||
|
_ServiceCollection.with_module(add_permission, _permission.__name__)
|
||||||
|
_ApplicationABC.extend(_ApplicationABC.with_permissions, _with_permissions)
|
||||||
8
src/cpl-auth/cpl/auth/auth_logger.py
Normal file
8
src/cpl-auth/cpl/auth/auth_logger.py
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
from cpl.core.log import Logger
|
||||||
|
from cpl.core.typing import Source
|
||||||
|
|
||||||
|
|
||||||
|
class AuthLogger(Logger):
|
||||||
|
|
||||||
|
def __init__(self, source: Source):
|
||||||
|
Logger.__init__(self, source, "auth")
|
||||||
3
src/cpl-auth/cpl/auth/keycloak/__init__.py
Normal file
3
src/cpl-auth/cpl/auth/keycloak/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
from .keycloak_admin import KeycloakAdmin
|
||||||
|
from .keycloak_client import KeycloakClient
|
||||||
|
from .keycloak_user import KeycloakUser
|
||||||
24
src/cpl-auth/cpl/auth/keycloak/keycloak_admin.py
Normal file
24
src/cpl-auth/cpl/auth/keycloak/keycloak_admin.py
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
from keycloak import KeycloakAdmin as _KeycloakAdmin, KeycloakOpenIDConnection
|
||||||
|
|
||||||
|
from cpl.auth.auth_logger import AuthLogger
|
||||||
|
from cpl.auth.keycloak_settings import KeycloakSettings
|
||||||
|
|
||||||
|
_logger = AuthLogger("keycloak")
|
||||||
|
|
||||||
|
|
||||||
|
class KeycloakAdmin(_KeycloakAdmin):
|
||||||
|
|
||||||
|
def __init__(self, settings: KeycloakSettings):
|
||||||
|
_logger.info("Initializing Keycloak admin")
|
||||||
|
_connection = KeycloakOpenIDConnection(
|
||||||
|
server_url=settings.url,
|
||||||
|
client_id=settings.client_id,
|
||||||
|
realm_name=settings.realm,
|
||||||
|
client_secret_key=settings.client_secret,
|
||||||
|
)
|
||||||
|
_KeycloakAdmin.__init__(
|
||||||
|
self,
|
||||||
|
connection=_connection,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.__connection = _connection
|
||||||
26
src/cpl-auth/cpl/auth/keycloak/keycloak_client.py
Normal file
26
src/cpl-auth/cpl/auth/keycloak/keycloak_client.py
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
from keycloak import KeycloakOpenID, KeycloakAdmin, KeycloakOpenIDConnection
|
||||||
|
|
||||||
|
from cpl.auth.auth_logger import AuthLogger
|
||||||
|
from cpl.auth.keycloak_settings import KeycloakSettings
|
||||||
|
|
||||||
|
_logger = AuthLogger("keycloak")
|
||||||
|
|
||||||
|
|
||||||
|
class KeycloakClient(KeycloakOpenID):
|
||||||
|
|
||||||
|
def __init__(self, settings: KeycloakSettings):
|
||||||
|
KeycloakOpenID.__init__(
|
||||||
|
self,
|
||||||
|
server_url=settings.url,
|
||||||
|
client_id=settings.client_id,
|
||||||
|
realm_name=settings.realm,
|
||||||
|
client_secret_key=settings.client_secret,
|
||||||
|
)
|
||||||
|
_logger.info("Initializing Keycloak client")
|
||||||
|
connection = KeycloakOpenIDConnection(
|
||||||
|
server_url=settings.url,
|
||||||
|
client_id=settings.client_id,
|
||||||
|
realm_name=settings.realm,
|
||||||
|
client_secret_key=settings.client_secret,
|
||||||
|
)
|
||||||
|
self._admin = KeycloakAdmin(connection=connection)
|
||||||
36
src/cpl-auth/cpl/auth/keycloak/keycloak_user.py
Normal file
36
src/cpl-auth/cpl/auth/keycloak/keycloak_user.py
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
from cpl.core.utils.get_value import get_value
|
||||||
|
from cpl.dependency import ServiceProviderABC
|
||||||
|
|
||||||
|
|
||||||
|
class KeycloakUser:
|
||||||
|
|
||||||
|
def __init__(self, source: dict):
|
||||||
|
self._username = get_value(source, "preferred_username", str)
|
||||||
|
self._email = get_value(source, "email", str)
|
||||||
|
self._email_verified = get_value(source, "email_verified", bool)
|
||||||
|
self._name = get_value(source, "name", str)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def username(self) -> str:
|
||||||
|
return self._username
|
||||||
|
|
||||||
|
@property
|
||||||
|
def email(self) -> str:
|
||||||
|
return self._email
|
||||||
|
|
||||||
|
@property
|
||||||
|
def email_verified(self) -> bool:
|
||||||
|
return self._email_verified
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name(self) -> str:
|
||||||
|
return self._name
|
||||||
|
|
||||||
|
# Attrs from keycloak
|
||||||
|
|
||||||
|
@property
|
||||||
|
def id(self) -> str:
|
||||||
|
from cpl.auth import KeycloakAdmin
|
||||||
|
|
||||||
|
keycloak_admin: KeycloakAdmin = ServiceProviderABC.get_global_service(KeycloakAdmin)
|
||||||
|
return keycloak_admin.get_user_id(self._username)
|
||||||
37
src/cpl-auth/cpl/auth/keycloak_settings.py
Normal file
37
src/cpl-auth/cpl/auth/keycloak_settings.py
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from cpl.core.configuration.configuration_model_abc import ConfigurationModelABC
|
||||||
|
from cpl.core.environment import Environment
|
||||||
|
|
||||||
|
|
||||||
|
class KeycloakSettings(ConfigurationModelABC):
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
url: str = Environment.get("KEYCLOAK_URL", str),
|
||||||
|
client_id: str = Environment.get("KEYCLOAK_CLIENT_ID", str),
|
||||||
|
realm: str = Environment.get("KEYCLOAK_REALM", str),
|
||||||
|
client_secret: str = Environment.get("KEYCLOAK_CLIENT_SECRET", str),
|
||||||
|
):
|
||||||
|
ConfigurationModelABC.__init__(self)
|
||||||
|
|
||||||
|
self._url: Optional[str] = url
|
||||||
|
self._client_id: Optional[str] = client_id
|
||||||
|
self._realm: Optional[str] = realm
|
||||||
|
self._client_secret: Optional[str] = client_secret
|
||||||
|
|
||||||
|
@property
|
||||||
|
def url(self) -> Optional[str]:
|
||||||
|
return self._url
|
||||||
|
|
||||||
|
@property
|
||||||
|
def client_id(self) -> Optional[str]:
|
||||||
|
return self._client_id
|
||||||
|
|
||||||
|
@property
|
||||||
|
def realm(self) -> Optional[str]:
|
||||||
|
return self._realm
|
||||||
|
|
||||||
|
@property
|
||||||
|
def client_secret(self) -> Optional[str]:
|
||||||
|
return self._client_secret
|
||||||
36
src/cpl-auth/cpl/auth/permission/permissions.py
Normal file
36
src/cpl-auth/cpl/auth/permission/permissions.py
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
from enum import Enum
|
||||||
|
|
||||||
|
|
||||||
|
class Permissions(Enum):
|
||||||
|
""" """
|
||||||
|
|
||||||
|
"""
|
||||||
|
Administration
|
||||||
|
"""
|
||||||
|
# administrator
|
||||||
|
administrator = "administrator"
|
||||||
|
|
||||||
|
# api keys
|
||||||
|
api_keys = "api_keys"
|
||||||
|
api_keys_create = "api_keys.create"
|
||||||
|
api_keys_update = "api_keys.update"
|
||||||
|
api_keys_delete = "api_keys.delete"
|
||||||
|
|
||||||
|
# users
|
||||||
|
users = "users"
|
||||||
|
users_create = "users.create"
|
||||||
|
users_update = "users.update"
|
||||||
|
users_delete = "users.delete"
|
||||||
|
|
||||||
|
# settings
|
||||||
|
settings = "settings"
|
||||||
|
settings_update = "settings.update"
|
||||||
|
|
||||||
|
"""
|
||||||
|
Permissions
|
||||||
|
"""
|
||||||
|
# roles
|
||||||
|
roles = "roles"
|
||||||
|
roles_create = "roles.create"
|
||||||
|
roles_update = "roles.update"
|
||||||
|
roles_delete = "roles.delete"
|
||||||
24
src/cpl-auth/cpl/auth/permission/permissions_registry.py
Normal file
24
src/cpl-auth/cpl/auth/permission/permissions_registry.py
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
from enum import Enum
|
||||||
|
from typing import Type
|
||||||
|
|
||||||
|
|
||||||
|
class PermissionsRegistry:
|
||||||
|
_permissions: dict[str, str] = {}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get(cls):
|
||||||
|
return cls._permissions.keys()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def descriptions(cls):
|
||||||
|
return {x: cls._permissions[x] for x in cls._permissions if cls._permissions[x] is not None}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def set(cls, permission: str, description: str = None):
|
||||||
|
cls._permissions[permission] = description
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def with_enum(cls, e: Type[Enum]):
|
||||||
|
perms = [x.value for x in e]
|
||||||
|
for perm in perms:
|
||||||
|
cls.set(str(perm))
|
||||||
120
src/cpl-auth/cpl/auth/permission_seeder.py
Normal file
120
src/cpl-auth/cpl/auth/permission_seeder.py
Normal file
@@ -0,0 +1,120 @@
|
|||||||
|
from cpl.auth.permission.permissions import Permissions
|
||||||
|
from cpl.auth.permission.permissions_registry import PermissionsRegistry
|
||||||
|
from cpl.auth.schema import (
|
||||||
|
Permission,
|
||||||
|
Role,
|
||||||
|
RolePermission,
|
||||||
|
ApiKey,
|
||||||
|
ApiKeyPermission,
|
||||||
|
PermissionDao,
|
||||||
|
RoleDao,
|
||||||
|
RolePermissionDao,
|
||||||
|
ApiKeyDao,
|
||||||
|
ApiKeyPermissionDao,
|
||||||
|
)
|
||||||
|
from cpl.core.utils.get_value import get_value
|
||||||
|
from cpl.database.abc.data_seeder_abc import DataSeederABC
|
||||||
|
from cpl.database.db_logger import DBLogger
|
||||||
|
|
||||||
|
_logger = DBLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class PermissionSeeder(DataSeederABC):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
permission_dao: PermissionDao,
|
||||||
|
role_dao: RoleDao,
|
||||||
|
role_permission_dao: RolePermissionDao,
|
||||||
|
api_key_dao: ApiKeyDao,
|
||||||
|
api_key_permission_dao: ApiKeyPermissionDao,
|
||||||
|
):
|
||||||
|
DataSeederABC.__init__(self)
|
||||||
|
self._permission_dao = permission_dao
|
||||||
|
self._role_dao = role_dao
|
||||||
|
self._role_permission_dao = role_permission_dao
|
||||||
|
self._api_key_dao = api_key_dao
|
||||||
|
self._api_key_permission_dao = api_key_permission_dao
|
||||||
|
|
||||||
|
async def seed(self):
|
||||||
|
permissions = await self._permission_dao.get_all()
|
||||||
|
possible_permissions = [permission for permission in PermissionsRegistry.get()]
|
||||||
|
|
||||||
|
if len(permissions) == len(possible_permissions):
|
||||||
|
_logger.info("Permissions already existing")
|
||||||
|
await self._update_missing_descriptions()
|
||||||
|
return
|
||||||
|
|
||||||
|
to_delete = []
|
||||||
|
for permission in permissions:
|
||||||
|
if permission.name in possible_permissions:
|
||||||
|
continue
|
||||||
|
|
||||||
|
to_delete.append(permission)
|
||||||
|
|
||||||
|
await self._permission_dao.delete_many(to_delete, hard_delete=True)
|
||||||
|
|
||||||
|
_logger.warning("Permissions incomplete")
|
||||||
|
permission_names = [permission.name for permission in permissions]
|
||||||
|
await self._permission_dao.create_many(
|
||||||
|
[
|
||||||
|
Permission(
|
||||||
|
0,
|
||||||
|
permission,
|
||||||
|
get_value(PermissionsRegistry.descriptions(), permission, str),
|
||||||
|
)
|
||||||
|
for permission in possible_permissions
|
||||||
|
if permission not in permission_names
|
||||||
|
]
|
||||||
|
)
|
||||||
|
await self._update_missing_descriptions()
|
||||||
|
|
||||||
|
await self._add_missing_to_role()
|
||||||
|
await self._add_missing_to_api_key()
|
||||||
|
|
||||||
|
async def _add_missing_to_role(self):
|
||||||
|
admin_role = await self._role_dao.find_single_by([{Role.id: 1}, {Role.name: "admin"}])
|
||||||
|
if admin_role is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
admin_permissions = await self._role_permission_dao.get_by_role_id(admin_role.id, with_deleted=True)
|
||||||
|
to_assign = [
|
||||||
|
RolePermission(0, admin_role.id, permission.id)
|
||||||
|
for permission in await self._permission_dao.get_all()
|
||||||
|
if permission.id not in [x.permission_id for x in admin_permissions]
|
||||||
|
]
|
||||||
|
await self._role_permission_dao.create_many(to_assign)
|
||||||
|
|
||||||
|
async def _add_missing_to_api_key(self):
|
||||||
|
admin_api_key = await self._api_key_dao.find_single_by([{ApiKey.id: 1}, {ApiKey.identifier: "admin"}])
|
||||||
|
if admin_api_key is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
admin_permissions = await self._api_key_permission_dao.find_by_api_key_id(admin_api_key.id, with_deleted=True)
|
||||||
|
to_assign = [
|
||||||
|
ApiKeyPermission(0, admin_api_key.id, permission.id)
|
||||||
|
for permission in await self._permission_dao.get_all()
|
||||||
|
if permission.id not in [x.permission_id for x in admin_permissions]
|
||||||
|
]
|
||||||
|
await self._api_key_permission_dao.create_many(to_assign)
|
||||||
|
|
||||||
|
async def _update_missing_descriptions(self):
|
||||||
|
permissions = {
|
||||||
|
permission.name: permission
|
||||||
|
for permission in await self._permission_dao.find_by([{Permission.description: None}])
|
||||||
|
}
|
||||||
|
to_update = []
|
||||||
|
|
||||||
|
if len(permissions) == 0:
|
||||||
|
return
|
||||||
|
|
||||||
|
for key in PermissionsRegistry.descriptions():
|
||||||
|
if key.value not in permissions:
|
||||||
|
continue
|
||||||
|
|
||||||
|
permissions[key.value].description = PermissionsRegistry.descriptions()[key]
|
||||||
|
to_update.append(permissions[key.value])
|
||||||
|
|
||||||
|
if len(to_update) == 0:
|
||||||
|
return
|
||||||
|
|
||||||
|
await self._permission_dao.update_many(to_update)
|
||||||
15
src/cpl-auth/cpl/auth/schema/__init__.py
Normal file
15
src/cpl-auth/cpl/auth/schema/__init__.py
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
from ._administration.api_key import ApiKey
|
||||||
|
from ._administration.api_key_dao import ApiKeyDao
|
||||||
|
from ._administration.auth_user import AuthUser
|
||||||
|
from ._administration.auth_user_dao import AuthUserDao
|
||||||
|
|
||||||
|
from ._permission.api_key_permission import ApiKeyPermission
|
||||||
|
from ._permission.api_key_permission_dao import ApiKeyPermissionDao
|
||||||
|
from ._permission.permission import Permission
|
||||||
|
from ._permission.permission_dao import PermissionDao
|
||||||
|
from ._permission.role import Role
|
||||||
|
from ._permission.role_dao import RoleDao
|
||||||
|
from ._permission.role_permission import RolePermission
|
||||||
|
from ._permission.role_permission_dao import RolePermissionDao
|
||||||
|
from ._permission.role_user import RoleUser
|
||||||
|
from ._permission.role_user_dao import RoleUserDao
|
||||||
59
src/cpl-auth/cpl/auth/schema/_administration/api_key.py
Normal file
59
src/cpl-auth/cpl/auth/schema/_administration/api_key.py
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
import secrets
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from async_property import async_property
|
||||||
|
|
||||||
|
from cpl.auth.permission.permissions import Permissions
|
||||||
|
from cpl.core.environment import Environment
|
||||||
|
from cpl.core.log import Logger
|
||||||
|
from cpl.core.typing import SerialId, Id
|
||||||
|
from cpl.database.abc import DbModelABC
|
||||||
|
from cpl.dependency import ServiceProviderABC
|
||||||
|
|
||||||
|
_logger = Logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class ApiKey(DbModelABC):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
id: SerialId,
|
||||||
|
identifier: str,
|
||||||
|
key: str,
|
||||||
|
deleted: bool = False,
|
||||||
|
editor_id: Optional[Id] = None,
|
||||||
|
created: Optional[datetime] = None,
|
||||||
|
updated: Optional[datetime] = None,
|
||||||
|
):
|
||||||
|
DbModelABC.__init__(self, id, deleted, editor_id, created, updated)
|
||||||
|
self._identifier = identifier
|
||||||
|
self._key = key
|
||||||
|
|
||||||
|
@property
|
||||||
|
def identifier(self) -> str:
|
||||||
|
return self._identifier
|
||||||
|
|
||||||
|
@property
|
||||||
|
def key(self) -> str:
|
||||||
|
return self._key
|
||||||
|
|
||||||
|
@async_property
|
||||||
|
async def permissions(self):
|
||||||
|
from cpl.auth.schema._permission.api_key_permission_dao import ApiKeyPermissionDao
|
||||||
|
|
||||||
|
api_key_permission_dao: ApiKeyPermissionDao = ServiceProviderABC.get_global_service(ApiKeyPermissionDao)
|
||||||
|
return [await x.permission for x in await api_key_permission_dao.find_by_api_key_id(self.id)]
|
||||||
|
|
||||||
|
async def has_permission(self, permission: Permissions) -> bool:
|
||||||
|
return permission.value in [x.name for x in await self.permissions]
|
||||||
|
|
||||||
|
def set_new_api_key(self):
|
||||||
|
self._key = self.new_key()
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def new_key() -> str:
|
||||||
|
return f"api_{secrets.token_urlsafe(Environment.get("API_KEY_LENGTH", int, 64))}"
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def new(cls, identifier: str) -> "ApiKey":
|
||||||
|
return ApiKey(0, identifier, cls.new_key())
|
||||||
32
src/cpl-auth/cpl/auth/schema/_administration/api_key_dao.py
Normal file
32
src/cpl-auth/cpl/auth/schema/_administration/api_key_dao.py
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from cpl.auth.schema._administration.api_key import ApiKey
|
||||||
|
from cpl.database import TableManager
|
||||||
|
from cpl.database.abc import DbModelDaoABC
|
||||||
|
from cpl.database.db_logger import DBLogger
|
||||||
|
|
||||||
|
_logger = DBLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class ApiKeyDao(DbModelDaoABC[ApiKey]):
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
DbModelDaoABC.__init__(self, __name__, ApiKey, TableManager.get("api_keys"))
|
||||||
|
|
||||||
|
self.attribute(ApiKey.identifier, str)
|
||||||
|
self.attribute(ApiKey.key, str, "keystring")
|
||||||
|
|
||||||
|
async def get_by_identifier(self, ident: str) -> ApiKey:
|
||||||
|
result = await self._db.select_map(f"SELECT * FROM {self._table_name} WHERE Identifier = '{ident}'")
|
||||||
|
return self.to_object(result[0])
|
||||||
|
|
||||||
|
async def get_by_key(self, key: str) -> ApiKey:
|
||||||
|
result = await self._db.select_map(f"SELECT * FROM {self._table_name} WHERE Keystring = '{key}'")
|
||||||
|
return self.to_object(result[0])
|
||||||
|
|
||||||
|
async def find_by_key(self, key: str) -> Optional[ApiKey]:
|
||||||
|
result = await self._db.select_map(f"SELECT * FROM {self._table_name} WHERE Keystring = '{key}'")
|
||||||
|
if not result or len(result) == 0:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return self.to_object(result[0])
|
||||||
89
src/cpl-auth/cpl/auth/schema/_administration/auth_user.py
Normal file
89
src/cpl-auth/cpl/auth/schema/_administration/auth_user.py
Normal file
@@ -0,0 +1,89 @@
|
|||||||
|
import uuid
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from async_property import async_property
|
||||||
|
from keycloak import KeycloakGetError
|
||||||
|
|
||||||
|
from cpl.auth.keycloak import KeycloakAdmin
|
||||||
|
from cpl.auth.auth_logger import AuthLogger
|
||||||
|
from cpl.auth.permission.permissions import Permissions
|
||||||
|
from cpl.core.typing import SerialId
|
||||||
|
from cpl.database.abc import DbModelABC
|
||||||
|
from cpl.dependency import ServiceProviderABC
|
||||||
|
|
||||||
|
_logger = AuthLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class AuthUser(DbModelABC):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
id: SerialId,
|
||||||
|
keycloak_id: str,
|
||||||
|
deleted: bool = False,
|
||||||
|
editor_id: Optional[SerialId] = None,
|
||||||
|
created: Optional[datetime] = None,
|
||||||
|
updated: Optional[datetime] = None,
|
||||||
|
):
|
||||||
|
DbModelABC.__init__(self, id, deleted, editor_id, created, updated)
|
||||||
|
self._keycloak_id = keycloak_id
|
||||||
|
|
||||||
|
@property
|
||||||
|
def keycloak_id(self) -> str:
|
||||||
|
return self._keycloak_id
|
||||||
|
|
||||||
|
@property
|
||||||
|
def username(self):
|
||||||
|
if self._keycloak_id == str(uuid.UUID(int=0)):
|
||||||
|
return "ANONYMOUS"
|
||||||
|
|
||||||
|
try:
|
||||||
|
keycloak_admin: KeycloakAdmin = ServiceProviderABC.get_global_service(KeycloakAdmin)
|
||||||
|
return keycloak_admin.get_user(self._keycloak_id).get("username")
|
||||||
|
except KeycloakGetError as e:
|
||||||
|
return "UNKNOWN"
|
||||||
|
except Exception as e:
|
||||||
|
_logger.error(f"Failed to get user {self._keycloak_id} from Keycloak", e)
|
||||||
|
return "UNKNOWN"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def email(self):
|
||||||
|
if self._keycloak_id == str(uuid.UUID(int=0)):
|
||||||
|
return "ANONYMOUS"
|
||||||
|
|
||||||
|
try:
|
||||||
|
keycloak_admin: KeycloakAdmin = ServiceProviderABC.get_global_service(KeycloakAdmin)
|
||||||
|
return keycloak_admin.get_user(self._keycloak_id).get("email")
|
||||||
|
except KeycloakGetError as e:
|
||||||
|
return "UNKNOWN"
|
||||||
|
except Exception as e:
|
||||||
|
_logger.error(f"Failed to get user {self._keycloak_id} from Keycloak", e)
|
||||||
|
return "UNKNOWN"
|
||||||
|
|
||||||
|
@async_property
|
||||||
|
async def roles(self):
|
||||||
|
from cpl.auth.schema._permission.role_user_dao import RoleUserDao
|
||||||
|
|
||||||
|
role_user_dao: RoleUserDao = ServiceProviderABC.get_global_service(RoleUserDao)
|
||||||
|
return [await x.role for x in await role_user_dao.get_by_user_id(self.id)]
|
||||||
|
|
||||||
|
@async_property
|
||||||
|
async def permissions(self):
|
||||||
|
from cpl.auth.schema._administration.auth_user_dao import AuthUserDao
|
||||||
|
|
||||||
|
auth_user_dao: AuthUserDao = ServiceProviderABC.get_global_service(AuthUserDao)
|
||||||
|
return await auth_user_dao.get_permissions(self.id)
|
||||||
|
|
||||||
|
async def has_permission(self, permission: Permissions) -> bool:
|
||||||
|
from cpl.auth.schema._administration.auth_user_dao import AuthUserDao
|
||||||
|
|
||||||
|
auth_user_dao: AuthUserDao = ServiceProviderABC.get_global_service(AuthUserDao)
|
||||||
|
return await auth_user_dao.has_permission(self.id, permission)
|
||||||
|
|
||||||
|
async def anonymize(self):
|
||||||
|
from cpl.auth.schema._administration.auth_user_dao import AuthUserDao
|
||||||
|
|
||||||
|
auth_user_dao: AuthUserDao = ServiceProviderABC.get_global_service(AuthUserDao)
|
||||||
|
|
||||||
|
self._keycloak_id = str(uuid.UUID(int=0))
|
||||||
|
await auth_user_dao.update(self)
|
||||||
@@ -0,0 +1,72 @@
|
|||||||
|
from typing import Optional, Union
|
||||||
|
|
||||||
|
from cpl.auth.permission.permissions import Permissions
|
||||||
|
from cpl.auth.schema._administration.auth_user import AuthUser
|
||||||
|
from cpl.database import TableManager
|
||||||
|
from cpl.database.abc import DbModelDaoABC
|
||||||
|
from cpl.database.db_logger import DBLogger
|
||||||
|
from cpl.database.external_data_temp_table_builder import ExternalDataTempTableBuilder
|
||||||
|
from cpl.dependency import ServiceProviderABC
|
||||||
|
|
||||||
|
_logger = DBLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class AuthUserDao(DbModelDaoABC[AuthUser]):
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
DbModelDaoABC.__init__(self, __name__, AuthUser, TableManager.get("auth_users"))
|
||||||
|
|
||||||
|
self.attribute(AuthUser.keycloak_id, str, aliases=["keycloakId"])
|
||||||
|
|
||||||
|
async def get_users():
|
||||||
|
return [(x.id, x.username, x.email) for x in await self.get_all()]
|
||||||
|
|
||||||
|
self.use_external_fields(
|
||||||
|
ExternalDataTempTableBuilder()
|
||||||
|
.with_table_name(self._table_name)
|
||||||
|
.with_field("id", "int", True)
|
||||||
|
.with_field("username", "text")
|
||||||
|
.with_field("email", "text")
|
||||||
|
.with_value_getter(get_users)
|
||||||
|
)
|
||||||
|
|
||||||
|
async def get_by_keycloak_id(self, keycloak_id: str) -> AuthUser:
|
||||||
|
return await self.get_single_by({AuthUser.keycloak_id: keycloak_id})
|
||||||
|
|
||||||
|
async def find_by_keycloak_id(self, keycloak_id: str) -> Optional[AuthUser]:
|
||||||
|
return await self.find_single_by({AuthUser.keycloak_id: keycloak_id})
|
||||||
|
|
||||||
|
async def has_permission(self, user_id: int, permission: Union[Permissions, str]) -> bool:
|
||||||
|
from cpl.auth.schema._permission.permission_dao import PermissionDao
|
||||||
|
|
||||||
|
permission_dao: PermissionDao = ServiceProviderABC.get_global_service(PermissionDao)
|
||||||
|
p = await permission_dao.get_by_name(permission if isinstance(permission, str) else permission.value)
|
||||||
|
result = await self._db.select_map(
|
||||||
|
f"""
|
||||||
|
SELECT COUNT(*)
|
||||||
|
FROM permission.role_users ru
|
||||||
|
JOIN permission.role_permissions rp ON ru.roleId = rp.roleId
|
||||||
|
WHERE ru.userId = {user_id}
|
||||||
|
AND rp.permissionId = {p.id}
|
||||||
|
AND ru.deleted = FALSE
|
||||||
|
AND rp.deleted = FALSE;
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
if result is None or len(result) == 0:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return result[0]["count"] > 0
|
||||||
|
|
||||||
|
async def get_permissions(self, user_id: int) -> list[Permissions]:
|
||||||
|
result = await self._db.select_map(
|
||||||
|
f"""
|
||||||
|
SELECT p.*
|
||||||
|
FROM permission.permissions p
|
||||||
|
JOIN permission.role_permissions rp ON p.id = rp.permissionId
|
||||||
|
JOIN permission.role_users ru ON rp.roleId = ru.roleId
|
||||||
|
WHERE ru.userId = {user_id}
|
||||||
|
AND rp.deleted = FALSE
|
||||||
|
AND ru.deleted = FALSE;
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
return [Permissions(p["name"]) for p in result]
|
||||||
@@ -0,0 +1,46 @@
|
|||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from async_property import async_property
|
||||||
|
|
||||||
|
from cpl.core.typing import SerialId
|
||||||
|
from cpl.database.abc import DbJoinModelABC
|
||||||
|
from cpl.dependency import ServiceProviderABC
|
||||||
|
|
||||||
|
|
||||||
|
class ApiKeyPermission(DbJoinModelABC):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
id: SerialId,
|
||||||
|
api_key_id: SerialId,
|
||||||
|
permission_id: SerialId,
|
||||||
|
deleted: bool = False,
|
||||||
|
editor_id: Optional[SerialId] = None,
|
||||||
|
created: Optional[datetime] = None,
|
||||||
|
updated: Optional[datetime] = None,
|
||||||
|
):
|
||||||
|
DbJoinModelABC.__init__(self, api_key_id, permission_id, id, deleted, editor_id, created, updated)
|
||||||
|
self._api_key_id = api_key_id
|
||||||
|
self._permission_id = permission_id
|
||||||
|
|
||||||
|
@property
|
||||||
|
def api_key_id(self) -> int:
|
||||||
|
return self._api_key_id
|
||||||
|
|
||||||
|
@async_property
|
||||||
|
async def api_key(self):
|
||||||
|
from cpl.auth.schema._administration.api_key_dao import ApiKeyDao
|
||||||
|
|
||||||
|
api_key_dao: ApiKeyDao = ServiceProviderABC.get_global_service(ApiKeyDao)
|
||||||
|
return await api_key_dao.get_by_id(self._api_key_id)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def permission_id(self) -> int:
|
||||||
|
return self._permission_id
|
||||||
|
|
||||||
|
@async_property
|
||||||
|
async def permission(self):
|
||||||
|
from cpl.auth.schema._permission.permission_dao import PermissionDao
|
||||||
|
|
||||||
|
permission_dao: PermissionDao = ServiceProviderABC.get_global_service(PermissionDao)
|
||||||
|
return await permission_dao.get_by_id(self._permission_id)
|
||||||
@@ -0,0 +1,29 @@
|
|||||||
|
from cpl.auth.schema._permission.api_key_permission import ApiKeyPermission
|
||||||
|
from cpl.database import TableManager
|
||||||
|
from cpl.database.abc import DbModelDaoABC
|
||||||
|
from cpl.database.db_logger import DBLogger
|
||||||
|
|
||||||
|
_logger = DBLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class ApiKeyPermissionDao(DbModelDaoABC[ApiKeyPermission]):
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
DbModelDaoABC.__init__(self, __name__, ApiKeyPermission, TableManager.get("api_key_permissions"))
|
||||||
|
|
||||||
|
self.attribute(ApiKeyPermission.api_key_id, int)
|
||||||
|
self.attribute(ApiKeyPermission.permission_id, int)
|
||||||
|
|
||||||
|
async def find_by_api_key_id(self, api_key_id: int, with_deleted=False) -> list[ApiKeyPermission]:
|
||||||
|
f = [{ApiKeyPermission.api_key_id: api_key_id}]
|
||||||
|
if not with_deleted:
|
||||||
|
f.append({ApiKeyPermission.deleted: False})
|
||||||
|
|
||||||
|
return await self.find_by(f)
|
||||||
|
|
||||||
|
async def find_by_permission_id(self, permission_id: int, with_deleted=False) -> list[ApiKeyPermission]:
|
||||||
|
f = [{ApiKeyPermission.permission_id: permission_id}]
|
||||||
|
if not with_deleted:
|
||||||
|
f.append({ApiKeyPermission.deleted: False})
|
||||||
|
|
||||||
|
return await self.find_by(f)
|
||||||
37
src/cpl-auth/cpl/auth/schema/_permission/permission.py
Normal file
37
src/cpl-auth/cpl/auth/schema/_permission/permission.py
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from cpl.core.typing import SerialId
|
||||||
|
from cpl.database.abc import DbModelABC
|
||||||
|
|
||||||
|
|
||||||
|
class Permission(DbModelABC):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
id: SerialId,
|
||||||
|
name: str,
|
||||||
|
description: str,
|
||||||
|
deleted: bool = False,
|
||||||
|
editor_id: Optional[SerialId] = None,
|
||||||
|
created: Optional[datetime] = None,
|
||||||
|
updated: Optional[datetime] = None,
|
||||||
|
):
|
||||||
|
DbModelABC.__init__(self, id, deleted, editor_id, created, updated)
|
||||||
|
self._name = name
|
||||||
|
self._description = description
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name(self) -> str:
|
||||||
|
return self._name
|
||||||
|
|
||||||
|
@name.setter
|
||||||
|
def name(self, value: str):
|
||||||
|
self._name = value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def description(self) -> str:
|
||||||
|
return self._description
|
||||||
|
|
||||||
|
@description.setter
|
||||||
|
def description(self, value: str):
|
||||||
|
self._description = value
|
||||||
21
src/cpl-auth/cpl/auth/schema/_permission/permission_dao.py
Normal file
21
src/cpl-auth/cpl/auth/schema/_permission/permission_dao.py
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from cpl.auth.schema._permission.permission import Permission
|
||||||
|
from cpl.database import TableManager
|
||||||
|
from cpl.database.abc import DbModelDaoABC
|
||||||
|
from cpl.database.db_logger import DBLogger
|
||||||
|
|
||||||
|
_logger = DBLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class PermissionDao(DbModelDaoABC[Permission]):
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
DbModelDaoABC.__init__(self, __name__, Permission, TableManager.get("permissions"))
|
||||||
|
|
||||||
|
self.attribute(Permission.name, str)
|
||||||
|
self.attribute(Permission.description, Optional[str])
|
||||||
|
|
||||||
|
async def get_by_name(self, name: str) -> Permission:
|
||||||
|
result = await self._db.select_map(f"SELECT * FROM {self._table_name} WHERE Name = '{name}'")
|
||||||
|
return self.to_object(result[0])
|
||||||
66
src/cpl-auth/cpl/auth/schema/_permission/role.py
Normal file
66
src/cpl-auth/cpl/auth/schema/_permission/role.py
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from async_property import async_property
|
||||||
|
|
||||||
|
from cpl.auth.permission.permissions import Permissions
|
||||||
|
from cpl.core.typing import SerialId
|
||||||
|
from cpl.database.abc import DbModelABC
|
||||||
|
from cpl.dependency import ServiceProviderABC
|
||||||
|
|
||||||
|
|
||||||
|
class Role(DbModelABC):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
id: SerialId,
|
||||||
|
name: str,
|
||||||
|
description: str,
|
||||||
|
deleted: bool = False,
|
||||||
|
editor_id: Optional[SerialId] = None,
|
||||||
|
created: Optional[datetime] = None,
|
||||||
|
updated: Optional[datetime] = None,
|
||||||
|
):
|
||||||
|
DbModelABC.__init__(self, id, deleted, editor_id, created, updated)
|
||||||
|
self._name = name
|
||||||
|
self._description = description
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name(self) -> str:
|
||||||
|
return self._name
|
||||||
|
|
||||||
|
@name.setter
|
||||||
|
def name(self, value: str):
|
||||||
|
self._name = value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def description(self) -> str:
|
||||||
|
return self._description
|
||||||
|
|
||||||
|
@description.setter
|
||||||
|
def description(self, value: str):
|
||||||
|
self._description = value
|
||||||
|
|
||||||
|
@async_property
|
||||||
|
async def permissions(self):
|
||||||
|
from cpl.auth.schema._permission.role_permission_dao import RolePermissionDao
|
||||||
|
|
||||||
|
role_permission_dao: RolePermissionDao = ServiceProviderABC.get_global_service(RolePermissionDao)
|
||||||
|
return [await x.permission for x in await role_permission_dao.get_by_role_id(self.id)]
|
||||||
|
|
||||||
|
@async_property
|
||||||
|
async def users(self):
|
||||||
|
from cpl.auth.schema._permission.role_user_dao import RoleUserDao
|
||||||
|
|
||||||
|
role_user_dao: RoleUserDao = ServiceProviderABC.get_global_service(RoleUserDao)
|
||||||
|
return [await x.user for x in await role_user_dao.get_by_role_id(self.id)]
|
||||||
|
|
||||||
|
async def has_permission(self, permission: Permissions) -> bool:
|
||||||
|
from cpl.auth.schema._permission.permission_dao import PermissionDao
|
||||||
|
from cpl.auth.schema._permission.role_permission_dao import RolePermissionDao
|
||||||
|
|
||||||
|
permission_dao: PermissionDao = ServiceProviderABC.get_global_service(PermissionDao)
|
||||||
|
role_permission_dao: RolePermissionDao = ServiceProviderABC.get_global_service(RolePermissionDao)
|
||||||
|
|
||||||
|
p = await permission_dao.get_by_name(permission.value)
|
||||||
|
|
||||||
|
return p.id in [x.id for x in await role_permission_dao.get_by_role_id(self.id)]
|
||||||
17
src/cpl-auth/cpl/auth/schema/_permission/role_dao.py
Normal file
17
src/cpl-auth/cpl/auth/schema/_permission/role_dao.py
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
from cpl.auth.schema._permission.role import Role
|
||||||
|
from cpl.database import TableManager
|
||||||
|
from cpl.database.abc import DbModelDaoABC
|
||||||
|
from cpl.database.db_logger import DBLogger
|
||||||
|
|
||||||
|
_logger = DBLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class RoleDao(DbModelDaoABC[Role]):
|
||||||
|
def __init__(self):
|
||||||
|
DbModelDaoABC.__init__(self, __name__, Role, TableManager.get("roles"))
|
||||||
|
self.attribute(Role.name, str)
|
||||||
|
self.attribute(Role.description, str)
|
||||||
|
|
||||||
|
async def get_by_name(self, name: str) -> Role:
|
||||||
|
result = await self._db.select_map(f"SELECT * FROM {self._table_name} WHERE Name = '{name}'")
|
||||||
|
return self.to_object(result[0])
|
||||||
46
src/cpl-auth/cpl/auth/schema/_permission/role_permission.py
Normal file
46
src/cpl-auth/cpl/auth/schema/_permission/role_permission.py
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from async_property import async_property
|
||||||
|
|
||||||
|
from cpl.core.typing import SerialId
|
||||||
|
from cpl.database.abc import DbModelABC
|
||||||
|
from cpl.dependency import ServiceProviderABC
|
||||||
|
|
||||||
|
|
||||||
|
class RolePermission(DbModelABC):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
id: SerialId,
|
||||||
|
role_id: SerialId,
|
||||||
|
permission_id: SerialId,
|
||||||
|
deleted: bool = False,
|
||||||
|
editor_id: Optional[SerialId] = None,
|
||||||
|
created: Optional[datetime] = None,
|
||||||
|
updated: Optional[datetime] = None,
|
||||||
|
):
|
||||||
|
DbModelABC.__init__(self, id, deleted, editor_id, created, updated)
|
||||||
|
self._role_id = role_id
|
||||||
|
self._permission_id = permission_id
|
||||||
|
|
||||||
|
@property
|
||||||
|
def role_id(self) -> int:
|
||||||
|
return self._role_id
|
||||||
|
|
||||||
|
@async_property
|
||||||
|
async def role(self):
|
||||||
|
from cpl.auth.schema._permission.role_dao import RoleDao
|
||||||
|
|
||||||
|
role_dao: RoleDao = ServiceProviderABC.get_global_service(RoleDao)
|
||||||
|
return await role_dao.get_by_id(self._role_id)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def permission_id(self) -> int:
|
||||||
|
return self._permission_id
|
||||||
|
|
||||||
|
@async_property
|
||||||
|
async def permission(self):
|
||||||
|
from cpl.auth.schema._permission.permission_dao import PermissionDao
|
||||||
|
|
||||||
|
permission_dao: PermissionDao = ServiceProviderABC.get_global_service(PermissionDao)
|
||||||
|
return await permission_dao.get_by_id(self._permission_id)
|
||||||
@@ -0,0 +1,29 @@
|
|||||||
|
from cpl.auth.schema._permission.role_permission import RolePermission
|
||||||
|
from cpl.database import TableManager
|
||||||
|
from cpl.database.abc import DbModelDaoABC
|
||||||
|
from cpl.database.db_logger import DBLogger
|
||||||
|
|
||||||
|
_logger = DBLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class RolePermissionDao(DbModelDaoABC[RolePermission]):
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
DbModelDaoABC.__init__(self, __name__, RolePermission, TableManager.get("role_permissions"))
|
||||||
|
|
||||||
|
self.attribute(RolePermission.role_id, int)
|
||||||
|
self.attribute(RolePermission.permission_id, int)
|
||||||
|
|
||||||
|
async def get_by_role_id(self, role_id: int, with_deleted=False) -> list[RolePermission]:
|
||||||
|
f = [{RolePermission.role_id: role_id}]
|
||||||
|
if not with_deleted:
|
||||||
|
f.append({RolePermission.deleted: False})
|
||||||
|
|
||||||
|
return await self.find_by(f)
|
||||||
|
|
||||||
|
async def get_by_permission_id(self, permission_id: int, with_deleted=False) -> list[RolePermission]:
|
||||||
|
f = [{RolePermission.permission_id: permission_id}]
|
||||||
|
if not with_deleted:
|
||||||
|
f.append({RolePermission.deleted: False})
|
||||||
|
|
||||||
|
return await self.find_by(f)
|
||||||
46
src/cpl-auth/cpl/auth/schema/_permission/role_user.py
Normal file
46
src/cpl-auth/cpl/auth/schema/_permission/role_user.py
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from async_property import async_property
|
||||||
|
|
||||||
|
from cpl.core.typing import SerialId
|
||||||
|
from cpl.database.abc import DbJoinModelABC
|
||||||
|
from cpl.dependency import ServiceProviderABC
|
||||||
|
|
||||||
|
|
||||||
|
class RoleUser(DbJoinModelABC):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
id: SerialId,
|
||||||
|
user_id: SerialId,
|
||||||
|
role_id: SerialId,
|
||||||
|
deleted: bool = False,
|
||||||
|
editor_id: Optional[SerialId] = None,
|
||||||
|
created: Optional[datetime] = None,
|
||||||
|
updated: Optional[datetime] = None,
|
||||||
|
):
|
||||||
|
DbJoinModelABC.__init__(self, id, user_id, role_id, deleted, editor_id, created, updated)
|
||||||
|
self._user_id = user_id
|
||||||
|
self._role_id = role_id
|
||||||
|
|
||||||
|
@property
|
||||||
|
def user_id(self) -> int:
|
||||||
|
return self._user_id
|
||||||
|
|
||||||
|
@async_property
|
||||||
|
async def user(self):
|
||||||
|
from cpl.auth.schema._administration.auth_user_dao import AuthUserDao
|
||||||
|
|
||||||
|
auth_user_dao: AuthUserDao = ServiceProviderABC.get_global_service(AuthUserDao)
|
||||||
|
return await auth_user_dao.get_by_id(self._user_id)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def role_id(self) -> int:
|
||||||
|
return self._role_id
|
||||||
|
|
||||||
|
@async_property
|
||||||
|
async def role(self):
|
||||||
|
from cpl.auth.schema._permission.role_dao import RoleDao
|
||||||
|
|
||||||
|
role_dao: RoleDao = ServiceProviderABC.get_global_service(RoleDao)
|
||||||
|
return await role_dao.get_by_id(self._role_id)
|
||||||
29
src/cpl-auth/cpl/auth/schema/_permission/role_user_dao.py
Normal file
29
src/cpl-auth/cpl/auth/schema/_permission/role_user_dao.py
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
from cpl.auth.schema._permission.role_user import RoleUser
|
||||||
|
from cpl.database import TableManager
|
||||||
|
from cpl.database.abc import DbModelDaoABC
|
||||||
|
from cpl.database.db_logger import DBLogger
|
||||||
|
|
||||||
|
_logger = DBLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class RoleUserDao(DbModelDaoABC[RoleUser]):
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
DbModelDaoABC.__init__(self, __name__, RoleUser, TableManager.get("role_users"))
|
||||||
|
|
||||||
|
self.attribute(RoleUser.role_id, int)
|
||||||
|
self.attribute(RoleUser.user_id, int)
|
||||||
|
|
||||||
|
async def get_by_role_id(self, rid: int, with_deleted=False) -> list[RoleUser]:
|
||||||
|
f = [{RoleUser.role_id: rid}]
|
||||||
|
if not with_deleted:
|
||||||
|
f.append({RoleUser.deleted: False})
|
||||||
|
|
||||||
|
return await self.find_by(f)
|
||||||
|
|
||||||
|
async def get_by_user_id(self, uid: int, with_deleted=False) -> list[RoleUser]:
|
||||||
|
f = [{RoleUser.user_id: uid}]
|
||||||
|
if not with_deleted:
|
||||||
|
f.append({RoleUser.deleted: False})
|
||||||
|
|
||||||
|
return await self.find_by(f)
|
||||||
44
src/cpl-auth/cpl/auth/scripts/mysql/1-users.sql
Normal file
44
src/cpl-auth/cpl/auth/scripts/mysql/1-users.sql
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
CREATE TABLE IF NOT EXISTS administration_auth_users
|
||||||
|
(
|
||||||
|
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||||
|
keycloakId CHAR(36) NOT NULL,
|
||||||
|
-- for history
|
||||||
|
deleted BOOL NOT NULL DEFAULT FALSE,
|
||||||
|
editorId INT NULL,
|
||||||
|
created TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
updated TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
||||||
|
|
||||||
|
CONSTRAINT UC_KeycloakId UNIQUE (keycloakId),
|
||||||
|
CONSTRAINT FK_EditorId FOREIGN KEY (editorId) REFERENCES administration_auth_users (id)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS administration_auth_users_history
|
||||||
|
(
|
||||||
|
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||||
|
keycloakId CHAR(36) NOT NULL,
|
||||||
|
-- for history
|
||||||
|
deleted BOOL NOT NULL,
|
||||||
|
editorId INT NULL,
|
||||||
|
created TIMESTAMP NOT NULL,
|
||||||
|
updated TIMESTAMP NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TRIGGER TR_administration_auth_usersUpdate
|
||||||
|
AFTER UPDATE
|
||||||
|
ON administration_auth_users
|
||||||
|
FOR EACH ROW
|
||||||
|
BEGIN
|
||||||
|
INSERT INTO administration_auth_users_history
|
||||||
|
(id, keycloakId, deleted, editorId, created, updated)
|
||||||
|
VALUES (OLD.id, OLD.keycloakId, OLD.deleted, OLD.editorId, OLD.created, NOW());
|
||||||
|
END;
|
||||||
|
|
||||||
|
CREATE TRIGGER TR_administration_auth_usersDelete
|
||||||
|
AFTER DELETE
|
||||||
|
ON administration_auth_users
|
||||||
|
FOR EACH ROW
|
||||||
|
BEGIN
|
||||||
|
INSERT INTO administration_auth_users_history
|
||||||
|
(id, keycloakId, deleted, editorId, created, updated)
|
||||||
|
VALUES (OLD.id, OLD.keycloakId, 1, OLD.editorId, OLD.created, NOW());
|
||||||
|
END;
|
||||||
46
src/cpl-auth/cpl/auth/scripts/mysql/2-api-key.sql
Normal file
46
src/cpl-auth/cpl/auth/scripts/mysql/2-api-key.sql
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
CREATE TABLE IF NOT EXISTS administration_api_keys
|
||||||
|
(
|
||||||
|
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||||
|
identifier VARCHAR(255) NOT NULL,
|
||||||
|
keyString VARCHAR(255) NOT NULL,
|
||||||
|
deleted BOOL NOT NULL DEFAULT FALSE,
|
||||||
|
editorId INT NULL,
|
||||||
|
created TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
updated TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
||||||
|
|
||||||
|
CONSTRAINT UC_Identifier_Key UNIQUE (identifier, keyString),
|
||||||
|
CONSTRAINT UC_Key UNIQUE (keyString),
|
||||||
|
CONSTRAINT FK_ApiKeys_Editor FOREIGN KEY (editorId) REFERENCES administration_auth_users (id)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS administration_api_keys_history
|
||||||
|
(
|
||||||
|
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||||
|
identifier VARCHAR(255) NOT NULL,
|
||||||
|
keyString VARCHAR(255) NOT NULL,
|
||||||
|
deleted BOOL NOT NULL,
|
||||||
|
editorId INT NULL,
|
||||||
|
created TIMESTAMP NOT NULL,
|
||||||
|
updated TIMESTAMP NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
CREATE TRIGGER TR_ApiKeysUpdate
|
||||||
|
AFTER UPDATE
|
||||||
|
ON administration_api_keys
|
||||||
|
FOR EACH ROW
|
||||||
|
BEGIN
|
||||||
|
INSERT INTO administration_api_keys_history
|
||||||
|
(id, identifier, keyString, deleted, editorId, created, updated)
|
||||||
|
VALUES (OLD.id, OLD.identifier, OLD.keyString, OLD.deleted, OLD.editorId, OLD.created, NOW());
|
||||||
|
END;
|
||||||
|
|
||||||
|
CREATE TRIGGER TR_ApiKeysDelete
|
||||||
|
AFTER DELETE
|
||||||
|
ON administration_api_keys
|
||||||
|
FOR EACH ROW
|
||||||
|
BEGIN
|
||||||
|
INSERT INTO administration_api_keys_history
|
||||||
|
(id, identifier, keyString, deleted, editorId, created, updated)
|
||||||
|
VALUES (OLD.id, OLD.identifier, OLD.keyString, 1, OLD.editorId, OLD.created, NOW());
|
||||||
|
END;
|
||||||
179
src/cpl-auth/cpl/auth/scripts/mysql/3-roles-permissions.sql
Normal file
179
src/cpl-auth/cpl/auth/scripts/mysql/3-roles-permissions.sql
Normal file
@@ -0,0 +1,179 @@
|
|||||||
|
CREATE TABLE IF NOT EXISTS permission_permissions
|
||||||
|
(
|
||||||
|
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||||
|
name VARCHAR(255) NOT NULL,
|
||||||
|
description TEXT NULL,
|
||||||
|
deleted BOOL NOT NULL DEFAULT FALSE,
|
||||||
|
editorId INT NULL,
|
||||||
|
created TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
updated TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
||||||
|
CONSTRAINT UQ_PermissionName UNIQUE (name),
|
||||||
|
CONSTRAINT FK_Permissions_Editor FOREIGN KEY (editorId) REFERENCES administration_auth_users (id)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS permission_permissions_history
|
||||||
|
(
|
||||||
|
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||||
|
name VARCHAR(255) NOT NULL,
|
||||||
|
description TEXT NULL,
|
||||||
|
deleted BOOL NOT NULL,
|
||||||
|
editorId INT NULL,
|
||||||
|
created TIMESTAMP NOT NULL,
|
||||||
|
updated TIMESTAMP NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TRIGGER TR_PermissionsUpdate
|
||||||
|
AFTER UPDATE
|
||||||
|
ON permission_permissions
|
||||||
|
FOR EACH ROW
|
||||||
|
BEGIN
|
||||||
|
INSERT INTO permission_permissions_history
|
||||||
|
(id, name, description, deleted, editorId, created, updated)
|
||||||
|
VALUES (OLD.id, OLD.name, OLD.description, OLD.deleted, OLD.editorId, OLD.created, NOW());
|
||||||
|
END;
|
||||||
|
|
||||||
|
CREATE TRIGGER TR_PermissionsDelete
|
||||||
|
AFTER DELETE
|
||||||
|
ON permission_permissions
|
||||||
|
FOR EACH ROW
|
||||||
|
BEGIN
|
||||||
|
INSERT INTO permission_permissions_history
|
||||||
|
(id, name, description, deleted, editorId, created, updated)
|
||||||
|
VALUES (OLD.id, OLD.name, OLD.description, 1, OLD.editorId, OLD.created, NOW());
|
||||||
|
END;
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS permission_roles
|
||||||
|
(
|
||||||
|
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||||
|
name VARCHAR(255) NOT NULL,
|
||||||
|
description TEXT NULL,
|
||||||
|
deleted BOOL NOT NULL DEFAULT FALSE,
|
||||||
|
editorId INT NULL,
|
||||||
|
created TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
updated TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
||||||
|
CONSTRAINT UQ_RoleName UNIQUE (name),
|
||||||
|
CONSTRAINT FK_Roles_Editor FOREIGN KEY (editorId) REFERENCES administration_auth_users (id)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS permission_roles_history
|
||||||
|
(
|
||||||
|
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||||
|
name VARCHAR(255) NOT NULL,
|
||||||
|
description TEXT NULL,
|
||||||
|
deleted BOOL NOT NULL,
|
||||||
|
editorId INT NULL,
|
||||||
|
created TIMESTAMP NOT NULL,
|
||||||
|
updated TIMESTAMP NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TRIGGER TR_RolesUpdate
|
||||||
|
AFTER UPDATE
|
||||||
|
ON permission_roles
|
||||||
|
FOR EACH ROW
|
||||||
|
BEGIN
|
||||||
|
INSERT INTO permission_roles_history
|
||||||
|
(id, name, description, deleted, editorId, created, updated)
|
||||||
|
VALUES (OLD.id, OLD.name, OLD.description, OLD.deleted, OLD.editorId, OLD.created, NOW());
|
||||||
|
END;
|
||||||
|
|
||||||
|
CREATE TRIGGER TR_RolesDelete
|
||||||
|
AFTER DELETE
|
||||||
|
ON permission_roles
|
||||||
|
FOR EACH ROW
|
||||||
|
BEGIN
|
||||||
|
INSERT INTO permission_roles_history
|
||||||
|
(id, name, description, deleted, editorId, created, updated)
|
||||||
|
VALUES (OLD.id, OLD.name, OLD.description, 1, OLD.editorId, OLD.created, NOW());
|
||||||
|
END;
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS permission_role_permissions
|
||||||
|
(
|
||||||
|
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||||
|
RoleId INT NOT NULL,
|
||||||
|
permissionId INT NOT NULL,
|
||||||
|
deleted BOOL NOT NULL DEFAULT FALSE,
|
||||||
|
editorId INT NULL,
|
||||||
|
created TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
updated TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
||||||
|
CONSTRAINT UQ_RolePermission UNIQUE (RoleId, permissionId),
|
||||||
|
CONSTRAINT FK_RolePermissions_Role FOREIGN KEY (RoleId) REFERENCES permission_roles (id) ON DELETE CASCADE,
|
||||||
|
CONSTRAINT FK_RolePermissions_Permission FOREIGN KEY (permissionId) REFERENCES permission_permissions (id) ON DELETE CASCADE,
|
||||||
|
CONSTRAINT FK_RolePermissions_Editor FOREIGN KEY (editorId) REFERENCES administration_auth_users (id)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS permission_role_permissions_history
|
||||||
|
(
|
||||||
|
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||||
|
RoleId INT NOT NULL,
|
||||||
|
permissionId INT NOT NULL,
|
||||||
|
deleted BOOL NOT NULL,
|
||||||
|
editorId INT NULL,
|
||||||
|
created TIMESTAMP NOT NULL,
|
||||||
|
updated TIMESTAMP NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TRIGGER TR_RolePermissionsUpdate
|
||||||
|
AFTER UPDATE
|
||||||
|
ON permission_role_permissions
|
||||||
|
FOR EACH ROW
|
||||||
|
BEGIN
|
||||||
|
INSERT INTO permission_role_permissions_history
|
||||||
|
(id, RoleId, permissionId, deleted, editorId, created, updated)
|
||||||
|
VALUES (OLD.id, OLD.RoleId, OLD.permissionId, OLD.deleted, OLD.editorId, OLD.created, NOW());
|
||||||
|
END;
|
||||||
|
|
||||||
|
CREATE TRIGGER TR_RolePermissionsDelete
|
||||||
|
AFTER DELETE
|
||||||
|
ON permission_role_permissions
|
||||||
|
FOR EACH ROW
|
||||||
|
BEGIN
|
||||||
|
INSERT INTO permission_role_permissions_history
|
||||||
|
(id, RoleId, permissionId, deleted, editorId, created, updated)
|
||||||
|
VALUES (OLD.id, OLD.RoleId, OLD.permissionId, 1, OLD.editorId, OLD.created, NOW());
|
||||||
|
END;
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS permission_role_auth_users
|
||||||
|
(
|
||||||
|
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||||
|
RoleId INT NOT NULL,
|
||||||
|
UserId INT NOT NULL,
|
||||||
|
deleted BOOL NOT NULL DEFAULT FALSE,
|
||||||
|
editorId INT NULL,
|
||||||
|
created TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
updated TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
||||||
|
CONSTRAINT UQ_RoleUser UNIQUE (RoleId, UserId),
|
||||||
|
CONSTRAINT FK_Roleauth_users_Role FOREIGN KEY (RoleId) REFERENCES permission_roles (id) ON DELETE CASCADE,
|
||||||
|
CONSTRAINT FK_Roleauth_users_User FOREIGN KEY (UserId) REFERENCES administration_auth_users (id) ON DELETE CASCADE,
|
||||||
|
CONSTRAINT FK_Roleauth_users_Editor FOREIGN KEY (editorId) REFERENCES administration_auth_users (id)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS permission_role_auth_users_history
|
||||||
|
(
|
||||||
|
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||||
|
RoleId INT NOT NULL,
|
||||||
|
UserId INT NOT NULL,
|
||||||
|
deleted BOOL NOT NULL,
|
||||||
|
editorId INT NULL,
|
||||||
|
created TIMESTAMP NOT NULL,
|
||||||
|
updated TIMESTAMP NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TRIGGER TR_Roleauth_usersUpdate
|
||||||
|
AFTER UPDATE
|
||||||
|
ON permission_role_auth_users
|
||||||
|
FOR EACH ROW
|
||||||
|
BEGIN
|
||||||
|
INSERT INTO permission_role_auth_users_history
|
||||||
|
(id, RoleId, UserId, deleted, editorId, created, updated)
|
||||||
|
VALUES (OLD.id, OLD.RoleId, OLD.UserId, OLD.deleted, OLD.editorId, OLD.created, NOW());
|
||||||
|
END;
|
||||||
|
|
||||||
|
CREATE TRIGGER TR_Roleauth_usersDelete
|
||||||
|
AFTER DELETE
|
||||||
|
ON permission_role_auth_users
|
||||||
|
FOR EACH ROW
|
||||||
|
BEGIN
|
||||||
|
INSERT INTO permission_role_auth_users_history
|
||||||
|
(id, RoleId, UserId, deleted, editorId, created, updated)
|
||||||
|
VALUES (OLD.id, OLD.RoleId, OLD.UserId, 1, OLD.editorId, OLD.created, NOW());
|
||||||
|
END;
|
||||||
@@ -0,0 +1,46 @@
|
|||||||
|
CREATE TABLE IF NOT EXISTS permission_api_key_permissions
|
||||||
|
(
|
||||||
|
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||||
|
apiKeyId INT NOT NULL,
|
||||||
|
permissionId INT NOT NULL,
|
||||||
|
deleted BOOL NOT NULL DEFAULT FALSE,
|
||||||
|
editorId INT NULL,
|
||||||
|
created TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
updated TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
||||||
|
CONSTRAINT UQ_ApiKeyPermission UNIQUE (apiKeyId, permissionId),
|
||||||
|
CONSTRAINT FK_ApiKeyPermissions_ApiKey FOREIGN KEY (apiKeyId) REFERENCES administration_api_keys (id) ON DELETE CASCADE,
|
||||||
|
CONSTRAINT FK_ApiKeyPermissions_Permission FOREIGN KEY (permissionId) REFERENCES permission_permissions (id) ON DELETE CASCADE,
|
||||||
|
CONSTRAINT FK_ApiKeyPermissions_Editor FOREIGN KEY (editorId) REFERENCES administration_auth_users (id)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS permission_api_key_permissions_history
|
||||||
|
(
|
||||||
|
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||||
|
apiKeyId INT NOT NULL,
|
||||||
|
permissionId INT NOT NULL,
|
||||||
|
deleted BOOL NOT NULL,
|
||||||
|
editorId INT NULL,
|
||||||
|
created TIMESTAMP NOT NULL,
|
||||||
|
updated TIMESTAMP NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TRIGGER TR_ApiKeyPermissionsUpdate
|
||||||
|
AFTER UPDATE
|
||||||
|
ON permission_api_key_permissions
|
||||||
|
FOR EACH ROW
|
||||||
|
BEGIN
|
||||||
|
INSERT INTO permission_api_key_permissions_history
|
||||||
|
(id, apiKeyId, permissionId, deleted, editorId, created, updated)
|
||||||
|
VALUES (OLD.id, OLD.apiKeyId, OLD.permissionId, OLD.deleted, OLD.editorId, OLD.created, NOW());
|
||||||
|
END;
|
||||||
|
|
||||||
|
CREATE TRIGGER TR_ApiKeyPermissionsDelete
|
||||||
|
AFTER DELETE
|
||||||
|
ON permission_api_key_permissions
|
||||||
|
FOR EACH ROW
|
||||||
|
BEGIN
|
||||||
|
INSERT INTO permission_api_key_permissions_history
|
||||||
|
(id, apiKeyId, permissionId, deleted, editorId, created, updated)
|
||||||
|
VALUES (OLD.id, OLD.apiKeyId, OLD.permissionId, 1, OLD.editorId, OLD.created, NOW());
|
||||||
|
END;
|
||||||
|
|
||||||
26
src/cpl-auth/cpl/auth/scripts/postgres/1-users.sql
Normal file
26
src/cpl-auth/cpl/auth/scripts/postgres/1-users.sql
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
CREATE SCHEMA IF NOT EXISTS administration;
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS administration.auth_users
|
||||||
|
(
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
keycloakId UUID NOT NULL,
|
||||||
|
-- for history
|
||||||
|
deleted BOOLEAN NOT NULL DEFAULT FALSE,
|
||||||
|
editorId INT NULL REFERENCES administration.auth_users (id),
|
||||||
|
created timestamptz NOT NULL DEFAULT NOW(),
|
||||||
|
updated timestamptz NOT NULL DEFAULT NOW(),
|
||||||
|
|
||||||
|
CONSTRAINT UC_KeycloakId UNIQUE (keycloakId)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS administration.auth_users_history
|
||||||
|
(
|
||||||
|
LIKE administration.auth_users
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TRIGGER users_history_trigger
|
||||||
|
BEFORE INSERT OR UPDATE OR DELETE
|
||||||
|
ON administration.auth_users
|
||||||
|
FOR EACH ROW
|
||||||
|
EXECUTE FUNCTION public.history_trigger_function();
|
||||||
|
|
||||||
28
src/cpl-auth/cpl/auth/scripts/postgres/2-api-key.sql
Normal file
28
src/cpl-auth/cpl/auth/scripts/postgres/2-api-key.sql
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
CREATE SCHEMA IF NOT EXISTS administration;
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS administration.api_keys
|
||||||
|
(
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
identifier VARCHAR(255) NOT NULL,
|
||||||
|
keyString VARCHAR(255) NOT NULL,
|
||||||
|
-- for history
|
||||||
|
deleted BOOLEAN NOT NULL DEFAULT FALSE,
|
||||||
|
editorId INT NULL REFERENCES administration.auth_users (id),
|
||||||
|
created timestamptz NOT NULL DEFAULT NOW(),
|
||||||
|
updated timestamptz NOT NULL DEFAULT NOW(),
|
||||||
|
|
||||||
|
CONSTRAINT UC_Identifier_Key UNIQUE (identifier, keyString),
|
||||||
|
CONSTRAINT UC_Key UNIQUE (keyString)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS administration.api_keys_history
|
||||||
|
(
|
||||||
|
LIKE administration.api_keys
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TRIGGER api_keys_history_trigger
|
||||||
|
BEFORE INSERT OR UPDATE OR DELETE
|
||||||
|
ON administration.api_keys
|
||||||
|
FOR EACH ROW
|
||||||
|
EXECUTE FUNCTION public.history_trigger_function();
|
||||||
|
|
||||||
105
src/cpl-auth/cpl/auth/scripts/postgres/3-roles-permissions.sql
Normal file
105
src/cpl-auth/cpl/auth/scripts/postgres/3-roles-permissions.sql
Normal file
@@ -0,0 +1,105 @@
|
|||||||
|
CREATE SCHEMA IF NOT EXISTS permission;
|
||||||
|
|
||||||
|
-- Permissions
|
||||||
|
CREATE TABLE permission.permissions
|
||||||
|
(
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
name VARCHAR(255) NOT NULL,
|
||||||
|
description TEXT NULL,
|
||||||
|
|
||||||
|
-- for history
|
||||||
|
deleted BOOLEAN NOT NULL DEFAULT FALSE,
|
||||||
|
editorId INT NULL REFERENCES administration.auth_users (id),
|
||||||
|
created timestamptz NOT NULL DEFAULT NOW(),
|
||||||
|
updated timestamptz NOT NULL DEFAULT NOW(),
|
||||||
|
CONSTRAINT UQ_PermissionName UNIQUE (name)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE permission.permissions_history
|
||||||
|
(
|
||||||
|
LIKE permission.permissions
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TRIGGER versioning_trigger
|
||||||
|
BEFORE INSERT OR UPDATE OR DELETE
|
||||||
|
ON permission.permissions
|
||||||
|
FOR EACH ROW
|
||||||
|
EXECUTE PROCEDURE public.history_trigger_function();
|
||||||
|
|
||||||
|
-- Roles
|
||||||
|
CREATE TABLE permission.roles
|
||||||
|
(
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
name VARCHAR(255) NOT NULL,
|
||||||
|
description TEXT NULL,
|
||||||
|
|
||||||
|
-- for history
|
||||||
|
deleted BOOLEAN NOT NULL DEFAULT FALSE,
|
||||||
|
editorId INT NULL REFERENCES administration.auth_users (id),
|
||||||
|
created timestamptz NOT NULL DEFAULT NOW(),
|
||||||
|
updated timestamptz NOT NULL DEFAULT NOW(),
|
||||||
|
CONSTRAINT UQ_RoleName UNIQUE (name)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE permission.roles_history
|
||||||
|
(
|
||||||
|
LIKE permission.roles
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TRIGGER versioning_trigger
|
||||||
|
BEFORE INSERT OR UPDATE OR DELETE
|
||||||
|
ON permission.roles
|
||||||
|
FOR EACH ROW
|
||||||
|
EXECUTE PROCEDURE public.history_trigger_function();
|
||||||
|
|
||||||
|
-- Role permissions
|
||||||
|
CREATE TABLE permission.role_permissions
|
||||||
|
(
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
RoleId INT NOT NULL REFERENCES permission.roles (id) ON DELETE CASCADE,
|
||||||
|
permissionId INT NOT NULL REFERENCES permission.permissions (id) ON DELETE CASCADE,
|
||||||
|
|
||||||
|
-- for history
|
||||||
|
deleted BOOLEAN NOT NULL DEFAULT FALSE,
|
||||||
|
editorId INT NULL REFERENCES administration.auth_users (id),
|
||||||
|
created timestamptz NOT NULL DEFAULT NOW(),
|
||||||
|
updated timestamptz NOT NULL DEFAULT NOW(),
|
||||||
|
CONSTRAINT UQ_RolePermission UNIQUE (RoleId, permissionId)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE permission.role_permissions_history
|
||||||
|
(
|
||||||
|
LIKE permission.role_permissions
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TRIGGER versioning_trigger
|
||||||
|
BEFORE INSERT OR UPDATE OR DELETE
|
||||||
|
ON permission.role_permissions
|
||||||
|
FOR EACH ROW
|
||||||
|
EXECUTE PROCEDURE public.history_trigger_function();
|
||||||
|
|
||||||
|
-- Role user
|
||||||
|
CREATE TABLE permission.role_users
|
||||||
|
(
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
RoleId INT NOT NULL REFERENCES permission.roles (id) ON DELETE CASCADE,
|
||||||
|
UserId INT NOT NULL REFERENCES administration.auth_users (id) ON DELETE CASCADE,
|
||||||
|
|
||||||
|
-- for history
|
||||||
|
deleted BOOLEAN NOT NULL DEFAULT FALSE,
|
||||||
|
editorId INT NULL REFERENCES administration.auth_users (id),
|
||||||
|
created timestamptz NOT NULL DEFAULT NOW(),
|
||||||
|
updated timestamptz NOT NULL DEFAULT NOW(),
|
||||||
|
CONSTRAINT UQ_RoleUser UNIQUE (RoleId, UserId)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE permission.role_users_history
|
||||||
|
(
|
||||||
|
LIKE permission.role_users
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TRIGGER versioning_trigger
|
||||||
|
BEFORE INSERT OR UPDATE OR DELETE
|
||||||
|
ON permission.role_users
|
||||||
|
FOR EACH ROW
|
||||||
|
EXECUTE PROCEDURE public.history_trigger_function();
|
||||||
@@ -0,0 +1,24 @@
|
|||||||
|
CREATE TABLE permission.api_key_permissions
|
||||||
|
(
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
apiKeyId INT NOT NULL REFERENCES administration.api_keys (id) ON DELETE CASCADE,
|
||||||
|
permissionId INT NOT NULL REFERENCES permission.permissions (id) ON DELETE CASCADE,
|
||||||
|
|
||||||
|
-- for history
|
||||||
|
deleted BOOLEAN NOT NULL DEFAULT FALSE,
|
||||||
|
editorId INT NULL REFERENCES administration.auth_users (id),
|
||||||
|
created timestamptz NOT NULL DEFAULT NOW(),
|
||||||
|
updated timestamptz NOT NULL DEFAULT NOW(),
|
||||||
|
CONSTRAINT UQ_ApiKeyPermission UNIQUE (apiKeyId, permissionId)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE permission.api_key_permissions_history
|
||||||
|
(
|
||||||
|
LIKE permission.api_key_permissions
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TRIGGER versioning_trigger
|
||||||
|
BEFORE INSERT OR UPDATE OR DELETE
|
||||||
|
ON permission.api_key_permissions
|
||||||
|
FOR EACH ROW
|
||||||
|
EXECUTE PROCEDURE public.history_trigger_function();
|
||||||
30
src/cpl-auth/pyproject.toml
Normal file
30
src/cpl-auth/pyproject.toml
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
[build-system]
|
||||||
|
requires = ["setuptools>=70.1.0", "wheel>=0.43.0"]
|
||||||
|
build-backend = "setuptools.build_meta"
|
||||||
|
|
||||||
|
[project]
|
||||||
|
name = "cpl-auth"
|
||||||
|
version = "2024.7.0"
|
||||||
|
description = "CPL auth"
|
||||||
|
readme ="CPL auth package"
|
||||||
|
requires-python = ">=3.12"
|
||||||
|
license = { text = "MIT" }
|
||||||
|
authors = [
|
||||||
|
{ name = "Sven Heidemann", email = "sven.heidemann@sh-edraft.de" }
|
||||||
|
]
|
||||||
|
keywords = ["cpl", "auth", "backend", "shared", "library"]
|
||||||
|
|
||||||
|
dynamic = ["dependencies", "optional-dependencies"]
|
||||||
|
|
||||||
|
[project.urls]
|
||||||
|
Homepage = "https://www.sh-edraft.de"
|
||||||
|
|
||||||
|
[tool.setuptools.packages.find]
|
||||||
|
where = ["."]
|
||||||
|
include = ["cpl*"]
|
||||||
|
|
||||||
|
[tool.setuptools.dynamic]
|
||||||
|
dependencies = { file = ["requirements.txt"] }
|
||||||
|
optional-dependencies.dev = { file = ["requirements.dev.txt"] }
|
||||||
|
|
||||||
|
|
||||||
1
src/cpl-auth/requirements.dev.txt
Normal file
1
src/cpl-auth/requirements.dev.txt
Normal file
@@ -0,0 +1 @@
|
|||||||
|
black==25.1.0
|
||||||
4
src/cpl-auth/requirements.txt
Normal file
4
src/cpl-auth/requirements.txt
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
cpl-core
|
||||||
|
cpl-dependency
|
||||||
|
cpl-database
|
||||||
|
python-keycloak-5.8.1
|
||||||
@@ -1 +0,0 @@
|
|||||||
|
|
||||||
|
|||||||
@@ -2,4 +2,6 @@ from abc import ABC
|
|||||||
|
|
||||||
|
|
||||||
class ConfigurationModelABC(ABC):
|
class ConfigurationModelABC(ABC):
|
||||||
pass
|
r"""
|
||||||
|
ABC for configuration model classes
|
||||||
|
"""
|
||||||
|
|||||||
1
src/cpl-core/cpl/core/ctx/__init__.py
Normal file
1
src/cpl-core/cpl/core/ctx/__init__.py
Normal file
@@ -0,0 +1 @@
|
|||||||
|
from .user_context import set_user, get_user
|
||||||
18
src/cpl-core/cpl/core/ctx/user_context.py
Normal file
18
src/cpl-core/cpl/core/ctx/user_context.py
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
from contextvars import ContextVar
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from cpl.auth.auth_logger import AuthLogger
|
||||||
|
from cpl.auth.schema._administration.auth_user import AuthUser
|
||||||
|
|
||||||
|
_user_context: ContextVar[Optional[AuthUser]] = ContextVar("user", default=None)
|
||||||
|
|
||||||
|
_logger = AuthLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def set_user(user_id: Optional[AuthUser]):
|
||||||
|
_logger.trace("Setting user context", user_id)
|
||||||
|
_user_context.set(user_id)
|
||||||
|
|
||||||
|
|
||||||
|
def get_user() -> Optional[AuthUser]:
|
||||||
|
return _user_context.get()
|
||||||
@@ -3,7 +3,7 @@ from socket import gethostname
|
|||||||
from typing import Optional, Type
|
from typing import Optional, Type
|
||||||
|
|
||||||
from cpl.core.environment.environment_enum import EnvironmentEnum
|
from cpl.core.environment.environment_enum import EnvironmentEnum
|
||||||
from cpl.core.typing import T
|
from cpl.core.typing import T, D
|
||||||
from cpl.core.utils.get_value import get_value
|
from cpl.core.utils.get_value import get_value
|
||||||
|
|
||||||
|
|
||||||
@@ -55,14 +55,14 @@ class Environment:
|
|||||||
os.environ[key] = str(value)
|
os.environ[key] = str(value)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get(key: str, cast_type: Type[T], default: Optional[T] = None) -> Optional[T]:
|
def get(key: str, cast_type: Type[T], default: D = None) -> T | D:
|
||||||
"""
|
"""
|
||||||
Get an environment variable and cast it to a specified type.
|
Get an environment variable and cast it to a specified type.
|
||||||
:param str key: The name of the environment variable.
|
:param str key: The name of the environment variable.
|
||||||
:param Type[T] cast_type: A callable to cast the variable's value.
|
:param Type[T] cast_type: A callable to cast the variable's value.
|
||||||
:param Optional[T] default: The default value to return if the variable is not found. Defaults to None.The default value to return if the variable is not found. Defaults to None.
|
:param T default: The default value to return if the variable is not found. Defaults to None.The default value to return if the variable is not found. Defaults to None.
|
||||||
:return: The casted value, or None if the variable is not found.
|
:return: The casted value, or None if the variable is not found.
|
||||||
:rtype: Optional[T]
|
:rtype: T | D
|
||||||
"""
|
"""
|
||||||
|
|
||||||
return get_value(dict(os.environ), key, cast_type, default)
|
return get_value(dict(os.environ), key, cast_type, default)
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
from .logger import Logger
|
from .logger import Logger
|
||||||
from .logger_abc import LoggerABC
|
from .logger_abc import LoggerABC
|
||||||
from .log_level_enum import LogLevelEnum
|
from .log_level_enum import LogLevel
|
||||||
from .logging_settings import LogSettings
|
from .logging_settings import LogSettings
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
from enum import Enum
|
from enum import Enum
|
||||||
|
|
||||||
|
|
||||||
class LogLevelEnum(Enum):
|
class LogLevel(Enum):
|
||||||
off = "OFF" # Nothing
|
off = "OFF" # Nothing
|
||||||
trace = "TRC" # Detailed app information's
|
trace = "TRC" # Detailed app information's
|
||||||
debug = "DEB" # Detailed app state
|
debug = "DEB" # Detailed app state
|
||||||
|
|||||||
@@ -3,28 +3,31 @@ import traceback
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
from cpl.core.console import Console
|
from cpl.core.console import Console
|
||||||
from cpl.core.log.log_level_enum import LogLevelEnum
|
from cpl.core.log.log_level_enum import LogLevel
|
||||||
from cpl.core.log.logger_abc import LoggerABC
|
from cpl.core.log.logger_abc import LoggerABC
|
||||||
from cpl.core.typing import Messages, Source
|
from cpl.core.typing import Messages, Source
|
||||||
|
|
||||||
|
|
||||||
class Logger(LoggerABC):
|
class Logger(LoggerABC):
|
||||||
_level = LogLevelEnum.info
|
_level = LogLevel.info
|
||||||
_levels = [x for x in LogLevelEnum]
|
_levels = [x for x in LogLevel]
|
||||||
|
|
||||||
# ANSI color codes for different log levels
|
# ANSI color codes for different log levels
|
||||||
_COLORS = {
|
_COLORS = {
|
||||||
LogLevelEnum.trace: "\033[37m", # Light Gray
|
LogLevel.trace: "\033[37m", # Light Gray
|
||||||
LogLevelEnum.debug: "\033[94m", # Blue
|
LogLevel.debug: "\033[94m", # Blue
|
||||||
LogLevelEnum.info: "\033[92m", # Green
|
LogLevel.info: "\033[92m", # Green
|
||||||
LogLevelEnum.warning: "\033[93m", # Yellow
|
LogLevel.warning: "\033[93m", # Yellow
|
||||||
LogLevelEnum.error: "\033[91m", # Red
|
LogLevel.error: "\033[91m", # Red
|
||||||
LogLevelEnum.fatal: "\033[95m", # Magenta
|
LogLevel.fatal: "\033[95m", # Magenta
|
||||||
}
|
}
|
||||||
|
|
||||||
def __init__(self, source: Source, file_prefix: str = None):
|
def __init__(self, source: Source, file_prefix: str = None):
|
||||||
LoggerABC.__init__(self)
|
LoggerABC.__init__(self)
|
||||||
assert source is not None and source != "", "Source cannot be None or empty"
|
|
||||||
|
if source == LoggerABC.__name__:
|
||||||
|
source = None
|
||||||
|
|
||||||
self._source = source
|
self._source = source
|
||||||
|
|
||||||
if file_prefix is None:
|
if file_prefix is None:
|
||||||
@@ -45,7 +48,7 @@ class Logger(LoggerABC):
|
|||||||
os.makedirs("logs")
|
os.makedirs("logs")
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def set_level(cls, level: LogLevelEnum):
|
def set_level(cls, level: LogLevel):
|
||||||
if level in cls._levels:
|
if level in cls._levels:
|
||||||
cls._level = level
|
cls._level = level
|
||||||
else:
|
else:
|
||||||
@@ -69,7 +72,7 @@ class Logger(LoggerABC):
|
|||||||
log_file.write(content + "\n")
|
log_file.write(content + "\n")
|
||||||
log_file.close()
|
log_file.close()
|
||||||
|
|
||||||
def _log(self, level: LogLevelEnum, *messages: Messages):
|
def _log(self, level: LogLevel, *messages: Messages):
|
||||||
try:
|
try:
|
||||||
if self._levels.index(level) < self._levels.index(self._level):
|
if self._levels.index(level) < self._levels.index(self._level):
|
||||||
return
|
return
|
||||||
@@ -78,7 +81,7 @@ class Logger(LoggerABC):
|
|||||||
formatted_message = self._format_message(level.value, timestamp, *messages)
|
formatted_message = self._format_message(level.value, timestamp, *messages)
|
||||||
|
|
||||||
self._write_log_to_file(formatted_message)
|
self._write_log_to_file(formatted_message)
|
||||||
Console.write_line(f"{self._COLORS.get(self._level, '\033[0m')}{formatted_message}\033[0m")
|
Console.write_line(f"{self._COLORS.get(level, '\033[0m')}{formatted_message}\033[0m")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"Error while logging: {e} -> {traceback.format_exc()}")
|
print(f"Error while logging: {e} -> {traceback.format_exc()}")
|
||||||
|
|
||||||
@@ -91,27 +94,35 @@ class Logger(LoggerABC):
|
|||||||
|
|
||||||
messages = [str(message) for message in messages if message is not None]
|
messages = [str(message) for message in messages if message is not None]
|
||||||
|
|
||||||
return f"<{timestamp}> [{level.upper():^3}] [{self._file_prefix}] - [{self._source}]: {' '.join(messages)}"
|
message = f"<{timestamp}>"
|
||||||
|
message += f" [{level.upper():^3}]"
|
||||||
|
message += f" [{self._file_prefix}]"
|
||||||
|
if self._source is not None:
|
||||||
|
message += f" - [{self._source}]"
|
||||||
|
|
||||||
|
message += f": {' '.join(messages)}"
|
||||||
|
|
||||||
|
return message
|
||||||
|
|
||||||
def header(self, string: str):
|
def header(self, string: str):
|
||||||
self._log(LogLevelEnum.info, string)
|
self._log(LogLevel.info, string)
|
||||||
|
|
||||||
def trace(self, *messages: Messages):
|
def trace(self, *messages: Messages):
|
||||||
self._log(LogLevelEnum.trace, *messages)
|
self._log(LogLevel.trace, *messages)
|
||||||
|
|
||||||
def debug(self, *messages: Messages):
|
def debug(self, *messages: Messages):
|
||||||
self._log(LogLevelEnum.debug, *messages)
|
self._log(LogLevel.debug, *messages)
|
||||||
|
|
||||||
def info(self, *messages: Messages):
|
def info(self, *messages: Messages):
|
||||||
self._log(LogLevelEnum.info, *messages)
|
self._log(LogLevel.info, *messages)
|
||||||
|
|
||||||
def warning(self, *messages: Messages):
|
def warning(self, *messages: Messages):
|
||||||
self._log(LogLevelEnum.warning, *messages)
|
self._log(LogLevel.warning, *messages)
|
||||||
|
|
||||||
def error(self, message, e: Exception = None):
|
def error(self, message, e: Exception = None):
|
||||||
self._log(LogLevelEnum.error, message, f"{e} -> {traceback.format_exc()}" if e else None)
|
self._log(LogLevel.error, message, f"{e} -> {traceback.format_exc()}" if e else None)
|
||||||
|
|
||||||
def fatal(self, message, e: Exception = None, prevent_quit: bool = False):
|
def fatal(self, message, e: Exception = None, prevent_quit: bool = False):
|
||||||
self._log(LogLevelEnum.fatal, message, f"{e} -> {traceback.format_exc()}" if e else None)
|
self._log(LogLevel.fatal, message, f"{e} -> {traceback.format_exc()}" if e else None)
|
||||||
if not prevent_quit:
|
if not prevent_quit:
|
||||||
exit(-1)
|
exit(-1)
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
from abc import abstractmethod, ABC
|
from abc import abstractmethod, ABC
|
||||||
|
|
||||||
|
from cpl.core.log.log_level_enum import LogLevel
|
||||||
from cpl.core.typing import Messages
|
from cpl.core.typing import Messages
|
||||||
|
|
||||||
|
|
||||||
@@ -7,12 +8,10 @@ class LoggerABC(ABC):
|
|||||||
r"""ABC for :class:`cpl.core.log.logger_service.Logger`"""
|
r"""ABC for :class:`cpl.core.log.logger_service.Logger`"""
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def set_level(self, level: str):
|
def set_level(self, level: LogLevel): ...
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def _format_message(self, level: str, timestamp, *messages: Messages) -> str:
|
def _format_message(self, level: str, timestamp, *messages: Messages) -> str: ...
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def header(self, string: str):
|
def header(self, string: str):
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from cpl.core.configuration.configuration_model_abc import ConfigurationModelABC
|
from cpl.core.configuration.configuration_model_abc import ConfigurationModelABC
|
||||||
from cpl.core.log.log_level_enum import LogLevelEnum
|
from cpl.core.log.log_level_enum import LogLevel
|
||||||
|
|
||||||
|
|
||||||
class LogSettings(ConfigurationModelABC):
|
class LogSettings(ConfigurationModelABC):
|
||||||
@@ -11,14 +11,14 @@ class LogSettings(ConfigurationModelABC):
|
|||||||
self,
|
self,
|
||||||
path: str = None,
|
path: str = None,
|
||||||
filename: str = None,
|
filename: str = None,
|
||||||
console_log_level: LogLevelEnum = None,
|
console_log_level: LogLevel = None,
|
||||||
file_log_level: LogLevelEnum = None,
|
file_log_level: LogLevel = None,
|
||||||
):
|
):
|
||||||
ConfigurationModelABC.__init__(self)
|
ConfigurationModelABC.__init__(self)
|
||||||
self._path: Optional[str] = path
|
self._path: Optional[str] = path
|
||||||
self._filename: Optional[str] = filename
|
self._filename: Optional[str] = filename
|
||||||
self._console: Optional[LogLevelEnum] = console_log_level
|
self._console: Optional[LogLevel] = console_log_level
|
||||||
self._level: Optional[LogLevelEnum] = file_log_level
|
self._level: Optional[LogLevel] = file_log_level
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def path(self) -> str:
|
def path(self) -> str:
|
||||||
@@ -37,17 +37,17 @@ class LogSettings(ConfigurationModelABC):
|
|||||||
self._filename = filename
|
self._filename = filename
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def console(self) -> LogLevelEnum:
|
def console(self) -> LogLevel:
|
||||||
return self._console
|
return self._console
|
||||||
|
|
||||||
@console.setter
|
@console.setter
|
||||||
def console(self, console: LogLevelEnum) -> None:
|
def console(self, console: LogLevel) -> None:
|
||||||
self._console = console
|
self._console = console
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def level(self) -> LogLevelEnum:
|
def level(self) -> LogLevel:
|
||||||
return self._level
|
return self._level
|
||||||
|
|
||||||
@level.setter
|
@level.setter
|
||||||
def level(self, level: LogLevelEnum) -> None:
|
def level(self, level: LogLevel) -> None:
|
||||||
self._level = level
|
self._level = level
|
||||||
|
|||||||
@@ -7,10 +7,8 @@ from cpl.core.typing import T
|
|||||||
class PipeABC(ABC, Generic[T]):
|
class PipeABC(ABC, Generic[T]):
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def to_str(value: T, *args) -> str:
|
def to_str(value: T, *args) -> str: ...
|
||||||
pass
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def from_str(value: str, *args) -> T:
|
def from_str(value: str, *args) -> T: ...
|
||||||
pass
|
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
from typing import TypeVar, Any
|
from typing import TypeVar, Any
|
||||||
|
from uuid import UUID
|
||||||
|
|
||||||
T = TypeVar("T")
|
T = TypeVar("T")
|
||||||
D = TypeVar("D")
|
D = TypeVar("D")
|
||||||
@@ -8,3 +9,8 @@ Service = TypeVar("Service")
|
|||||||
Source = TypeVar("Source")
|
Source = TypeVar("Source")
|
||||||
|
|
||||||
Messages = list[Any] | Any
|
Messages = list[Any] | Any
|
||||||
|
|
||||||
|
UuidId = str | UUID
|
||||||
|
SerialId = int
|
||||||
|
|
||||||
|
Id = UuidId | SerialId
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
from .b64 import B64
|
from .base64 import Base64
|
||||||
from .credential_manager import CredentialManager
|
from .credential_manager import CredentialManager
|
||||||
from .json_processor import JSONProcessor
|
from .json_processor import JSONProcessor
|
||||||
from .pip import Pip
|
from .pip import Pip
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ import base64
|
|||||||
from typing import Union
|
from typing import Union
|
||||||
|
|
||||||
|
|
||||||
class B64:
|
class Base64:
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def encode(string: str) -> str:
|
def encode(string: str) -> str:
|
||||||
@@ -1,3 +1,4 @@
|
|||||||
|
from enum import Enum
|
||||||
from typing import Type, Optional
|
from typing import Type, Optional
|
||||||
|
|
||||||
from cpl.core.typing import T
|
from cpl.core.typing import T
|
||||||
@@ -40,6 +41,19 @@ def get_value(
|
|||||||
if cast_type == bool:
|
if cast_type == bool:
|
||||||
return value.lower() in ["true", "1"]
|
return value.lower() in ["true", "1"]
|
||||||
|
|
||||||
|
if issubclass(cast_type, Enum):
|
||||||
|
try:
|
||||||
|
return cast_type(value)
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
return cast_type[value]
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return default
|
||||||
|
|
||||||
if (cast_type if not hasattr(cast_type, "__origin__") else cast_type.__origin__) == list:
|
if (cast_type if not hasattr(cast_type, "__origin__") else cast_type.__origin__) == list:
|
||||||
if not (value.startswith("[") and value.endswith("]")) and list_delimiter not in value:
|
if not (value.startswith("[") and value.endswith("]")) and list_delimiter not in value:
|
||||||
raise ValueError("List values must be enclosed in square brackets or use a delimiter.")
|
raise ValueError("List values must be enclosed in square brackets or use a delimiter.")
|
||||||
|
|||||||
@@ -1,3 +1,68 @@
|
|||||||
from .database_settings_name_enum import DatabaseSettingsNameEnum
|
from typing import Type
|
||||||
from .database_settings import DatabaseSettings
|
|
||||||
from .table_abc import TableABC
|
from cpl.application.abc import ApplicationABC as _ApplicationABC
|
||||||
|
from cpl.dependency import ServiceCollection as _ServiceCollection
|
||||||
|
from . import mysql as _mysql
|
||||||
|
from . import postgres as _postgres
|
||||||
|
from .table_manager import TableManager
|
||||||
|
|
||||||
|
|
||||||
|
def _with_migrations(self: _ApplicationABC, *paths: list[str]) -> _ApplicationABC:
|
||||||
|
from cpl.application.host import Host
|
||||||
|
|
||||||
|
from cpl.database.service.migration_service import MigrationService
|
||||||
|
migration_service = self._services.get_service(MigrationService)
|
||||||
|
migration_service.with_directory("./scripts")
|
||||||
|
Host.run(migration_service.migrate)
|
||||||
|
|
||||||
|
return self
|
||||||
|
|
||||||
|
def _with_seeders(self: _ApplicationABC) -> _ApplicationABC:
|
||||||
|
from cpl.database.service.seeder_service import SeederService
|
||||||
|
from cpl.application.host import Host
|
||||||
|
|
||||||
|
seeder_service: SeederService = self._services.get_service(SeederService)
|
||||||
|
Host.run(seeder_service.seed)
|
||||||
|
return self
|
||||||
|
|
||||||
|
|
||||||
|
def _add(collection: _ServiceCollection, db_context: Type, default_port: int, server_type: str):
|
||||||
|
from cpl.core.console import Console
|
||||||
|
from cpl.core.configuration import Configuration
|
||||||
|
from cpl.database.abc.db_context_abc import DBContextABC
|
||||||
|
from cpl.database.model.server_type import ServerTypes, ServerType
|
||||||
|
from cpl.database.model.database_settings import DatabaseSettings
|
||||||
|
from cpl.database.service.migration_service import MigrationService
|
||||||
|
from cpl.database.service.seeder_service import SeederService
|
||||||
|
from cpl.database.schema.executed_migration_dao import ExecutedMigrationDao
|
||||||
|
|
||||||
|
try:
|
||||||
|
ServerType.set_server_type(ServerTypes(server_type))
|
||||||
|
Configuration.set("DB_DEFAULT_PORT", default_port)
|
||||||
|
|
||||||
|
collection.add_singleton(DBContextABC, db_context)
|
||||||
|
collection.add_singleton(ExecutedMigrationDao)
|
||||||
|
collection.add_singleton(MigrationService)
|
||||||
|
collection.add_singleton(SeederService)
|
||||||
|
except ImportError as e:
|
||||||
|
Console.error("cpl-database is not installed", str(e))
|
||||||
|
|
||||||
|
|
||||||
|
def add_mysql(collection: _ServiceCollection):
|
||||||
|
from cpl.database.mysql.db_context import DBContext
|
||||||
|
from cpl.database.model import ServerTypes
|
||||||
|
|
||||||
|
_add(collection, DBContext, 3306, ServerTypes.MYSQL.value)
|
||||||
|
|
||||||
|
|
||||||
|
def add_postgres(collection: _ServiceCollection):
|
||||||
|
from cpl.database.mysql.db_context import DBContext
|
||||||
|
from cpl.database.model import ServerTypes
|
||||||
|
|
||||||
|
_add(collection, DBContext, 5432, ServerTypes.POSTGRES.value)
|
||||||
|
|
||||||
|
|
||||||
|
_ServiceCollection.with_module(add_mysql, _mysql.__name__)
|
||||||
|
_ServiceCollection.with_module(add_postgres, _postgres.__name__)
|
||||||
|
_ApplicationABC.extend(_ApplicationABC.with_migrations, _with_migrations)
|
||||||
|
_ApplicationABC.extend(_ApplicationABC.with_seeders, _with_seeders)
|
||||||
|
|||||||
5
src/cpl-database/cpl/database/abc/__init__.py
Normal file
5
src/cpl-database/cpl/database/abc/__init__.py
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
from .connection_abc import ConnectionABC
|
||||||
|
from .db_context_abc import DBContextABC
|
||||||
|
from .db_join_model_abc import DbJoinModelABC
|
||||||
|
from .db_model_abc import DbModelABC
|
||||||
|
from .db_model_dao_abc import DbModelDaoABC
|
||||||
@@ -1,26 +1,23 @@
|
|||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
|
|
||||||
from cpl.database.database_settings import DatabaseSettings
|
from cpl.database.model.database_settings import DatabaseSettings
|
||||||
from mysql.connector.abstracts import MySQLConnectionAbstract
|
from mysql.connector.abstracts import MySQLConnectionAbstract
|
||||||
from mysql.connector.cursor import MySQLCursorBuffered
|
from mysql.connector.cursor import MySQLCursorBuffered
|
||||||
|
|
||||||
|
|
||||||
class DatabaseConnectionABC(ABC):
|
class ConnectionABC(ABC):
|
||||||
r"""ABC for the :class:`cpl.database.connection.database_connection.DatabaseConnection`"""
|
r"""ABC for the :class:`cpl.database.connection.database_connection.DatabaseConnection`"""
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def __init__(self):
|
def __init__(self): ...
|
||||||
pass
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def server(self) -> MySQLConnectionAbstract:
|
def server(self) -> MySQLConnectionAbstract: ...
|
||||||
pass
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def cursor(self) -> MySQLCursorBuffered:
|
def cursor(self) -> MySQLCursorBuffered: ...
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def connect(self, database_settings: DatabaseSettings):
|
def connect(self, database_settings: DatabaseSettings):
|
||||||
876
src/cpl-database/cpl/database/abc/data_access_object_abc.py
Normal file
876
src/cpl-database/cpl/database/abc/data_access_object_abc.py
Normal file
@@ -0,0 +1,876 @@
|
|||||||
|
import datetime
|
||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from enum import Enum
|
||||||
|
from types import NoneType
|
||||||
|
from typing import Generic, Optional, Union, Type, List, Any
|
||||||
|
|
||||||
|
from cpl.core.ctx import get_user
|
||||||
|
from cpl.core.typing import T, Id
|
||||||
|
from cpl.core.utils import String
|
||||||
|
from cpl.core.utils.get_value import get_value
|
||||||
|
from cpl.database.abc.db_context_abc import DBContextABC
|
||||||
|
from cpl.database.const import DATETIME_FORMAT
|
||||||
|
from cpl.database.db_logger import DBLogger
|
||||||
|
from cpl.database.external_data_temp_table_builder import ExternalDataTempTableBuilder
|
||||||
|
from cpl.database.postgres.sql_select_builder import SQLSelectBuilder
|
||||||
|
from cpl.database.typing import T_DBM, Attribute, AttributeFilters, AttributeSorts
|
||||||
|
|
||||||
|
|
||||||
|
class DataAccessObjectABC(ABC, Generic[T_DBM]):
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def __init__(self, source: str, model_type: Type[T_DBM], table_name: str):
|
||||||
|
from cpl.dependency.service_provider_abc import ServiceProviderABC
|
||||||
|
|
||||||
|
self._db = ServiceProviderABC.get_global_service(DBContextABC)
|
||||||
|
|
||||||
|
self._logger = DBLogger(source)
|
||||||
|
self._model_type = model_type
|
||||||
|
self._table_name = table_name
|
||||||
|
|
||||||
|
self._logger = DBLogger(source)
|
||||||
|
self._model_type = model_type
|
||||||
|
self._table_name = table_name
|
||||||
|
|
||||||
|
self._default_filter_condition = None
|
||||||
|
|
||||||
|
self.__attributes: dict[str, type] = {}
|
||||||
|
|
||||||
|
self.__db_names: dict[str, str] = {}
|
||||||
|
self.__foreign_tables: dict[str, tuple[str, str]] = {}
|
||||||
|
self.__foreign_table_keys: dict[str, str] = {}
|
||||||
|
self.__foreign_dao: dict[str, "DataAccessObjectABC"] = {}
|
||||||
|
|
||||||
|
self.__date_attributes: set[str] = set()
|
||||||
|
self.__ignored_attributes: set[str] = set()
|
||||||
|
|
||||||
|
self.__primary_key = "id"
|
||||||
|
self.__primary_key_type = int
|
||||||
|
self._external_fields: dict[str, ExternalDataTempTableBuilder] = {}
|
||||||
|
|
||||||
|
@property
|
||||||
|
def table_name(self) -> str:
|
||||||
|
return self._table_name
|
||||||
|
|
||||||
|
def has_attribute(self, attr_name: Attribute) -> bool:
|
||||||
|
"""
|
||||||
|
Check if the attribute exists in the DAO
|
||||||
|
:param Attribute attr_name: Name of the attribute
|
||||||
|
:return: True if the attribute exists, False otherwise
|
||||||
|
"""
|
||||||
|
return attr_name in self.__attributes
|
||||||
|
|
||||||
|
def attribute(
|
||||||
|
self,
|
||||||
|
attr_name: Attribute,
|
||||||
|
attr_type: type,
|
||||||
|
db_name: str = None,
|
||||||
|
ignore=False,
|
||||||
|
primary_key=False,
|
||||||
|
aliases: list[str] = None,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Add an attribute for db and object mapping to the data access object
|
||||||
|
:param Attribute attr_name: Name of the attribute in the object
|
||||||
|
:param type attr_type: Python type of the attribute to cast db value to
|
||||||
|
:param str db_name: Name of the field in the database, if None the attribute lowered attr_name without "_" is used
|
||||||
|
:param bool ignore: Defines if field is ignored for create and update (for e.g. auto increment fields or created/updated fields)
|
||||||
|
:param bool primary_key: Defines if field is the primary key
|
||||||
|
:param list[str] aliases: List of aliases for the attribute name
|
||||||
|
:return:
|
||||||
|
"""
|
||||||
|
if isinstance(attr_name, property):
|
||||||
|
attr_name = attr_name.fget.__name__
|
||||||
|
|
||||||
|
self.__attributes[attr_name] = attr_type
|
||||||
|
if ignore:
|
||||||
|
self.__ignored_attributes.add(attr_name)
|
||||||
|
|
||||||
|
if not db_name:
|
||||||
|
db_name = attr_name.lower().replace("_", "")
|
||||||
|
|
||||||
|
self.__db_names[attr_name] = db_name
|
||||||
|
self.__db_names[db_name] = db_name
|
||||||
|
|
||||||
|
if aliases is not None:
|
||||||
|
for alias in aliases:
|
||||||
|
if alias in self.__db_names:
|
||||||
|
raise ValueError(f"Alias {alias} already exists")
|
||||||
|
self.__db_names[alias] = db_name
|
||||||
|
|
||||||
|
if primary_key:
|
||||||
|
self.__primary_key = db_name
|
||||||
|
self.__primary_key_type = attr_type
|
||||||
|
|
||||||
|
if attr_type in [datetime, datetime.datetime]:
|
||||||
|
self.__date_attributes.add(attr_name)
|
||||||
|
self.__date_attributes.add(db_name)
|
||||||
|
|
||||||
|
def reference(
|
||||||
|
self,
|
||||||
|
attr: Attribute,
|
||||||
|
primary_attr: Attribute,
|
||||||
|
foreign_attr: Attribute,
|
||||||
|
table_name: str,
|
||||||
|
reference_dao: "DataAccessObjectABC" = None,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Add a reference to another table for the given attribute
|
||||||
|
:param Attribute attr: Name of the attribute in the object
|
||||||
|
:param str primary_attr: Name of the primary key in the foreign object
|
||||||
|
:param str foreign_attr: Name of the foreign key in the object
|
||||||
|
:param str table_name: Name of the table to reference
|
||||||
|
:param DataAccessObjectABC reference_dao: The data access object for the referenced table
|
||||||
|
:return:
|
||||||
|
"""
|
||||||
|
if isinstance(attr, property):
|
||||||
|
attr = attr.fget.__name__
|
||||||
|
|
||||||
|
if isinstance(primary_attr, property):
|
||||||
|
primary_attr = primary_attr.fget.__name__
|
||||||
|
|
||||||
|
primary_attr = primary_attr.lower().replace("_", "")
|
||||||
|
|
||||||
|
if isinstance(foreign_attr, property):
|
||||||
|
foreign_attr = foreign_attr.fget.__name__
|
||||||
|
|
||||||
|
foreign_attr = foreign_attr.lower().replace("_", "")
|
||||||
|
|
||||||
|
self.__foreign_table_keys[attr] = foreign_attr
|
||||||
|
if reference_dao is not None:
|
||||||
|
self.__foreign_dao[attr] = reference_dao
|
||||||
|
|
||||||
|
if table_name == self._table_name:
|
||||||
|
return
|
||||||
|
|
||||||
|
self.__foreign_tables[attr] = (
|
||||||
|
table_name,
|
||||||
|
f"{table_name}.{primary_attr} = {self._table_name}.{foreign_attr}",
|
||||||
|
)
|
||||||
|
|
||||||
|
def use_external_fields(self, builder: ExternalDataTempTableBuilder):
|
||||||
|
self._external_fields[builder.table_name] = builder
|
||||||
|
|
||||||
|
def to_object(self, result: dict) -> T_DBM:
|
||||||
|
"""
|
||||||
|
Convert a result from the database to an object
|
||||||
|
:param dict result: Result from the database
|
||||||
|
:return:
|
||||||
|
"""
|
||||||
|
value_map: dict[str, T] = {}
|
||||||
|
|
||||||
|
for db_name, value in result.items():
|
||||||
|
# Find the attribute name corresponding to the db_name
|
||||||
|
attr_name = next((k for k, v in self.__db_names.items() if v == db_name), None)
|
||||||
|
if attr_name:
|
||||||
|
value_map[attr_name] = self._get_value_from_sql(self.__attributes[attr_name], value)
|
||||||
|
|
||||||
|
return self._model_type(**value_map)
|
||||||
|
|
||||||
|
def to_dict(self, obj: T_DBM) -> dict:
|
||||||
|
"""
|
||||||
|
Convert an object to a dictionary
|
||||||
|
:param T_DBM obj: Object to convert
|
||||||
|
:return:
|
||||||
|
"""
|
||||||
|
value_map: dict[str, Any] = {}
|
||||||
|
|
||||||
|
for attr_name, attr_type in self.__attributes.items():
|
||||||
|
value = getattr(obj, attr_name)
|
||||||
|
if isinstance(value, datetime.datetime):
|
||||||
|
value = value.strftime(DATETIME_FORMAT)
|
||||||
|
elif isinstance(value, Enum):
|
||||||
|
value = value.value
|
||||||
|
|
||||||
|
value_map[attr_name] = value
|
||||||
|
|
||||||
|
for ex_fname in self._external_fields:
|
||||||
|
ex_field = self._external_fields[ex_fname]
|
||||||
|
for ex_attr in ex_field.fields:
|
||||||
|
if ex_attr == self.__primary_key:
|
||||||
|
continue
|
||||||
|
|
||||||
|
value_map[ex_attr] = getattr(obj, ex_attr, None)
|
||||||
|
|
||||||
|
return value_map
|
||||||
|
|
||||||
|
async def count(self, filters: AttributeFilters = None) -> int:
|
||||||
|
result = await self._prepare_query(filters=filters, for_count=True)
|
||||||
|
return result[0]["count"] if result else 0
|
||||||
|
|
||||||
|
async def get_history(
|
||||||
|
self,
|
||||||
|
entry_id: int,
|
||||||
|
by_key: str = None,
|
||||||
|
when: datetime = None,
|
||||||
|
until: datetime = None,
|
||||||
|
without_deleted: bool = False,
|
||||||
|
) -> list[T_DBM]:
|
||||||
|
"""
|
||||||
|
Retrieve the history of an entry from the history table.
|
||||||
|
:param entry_id: The ID of the entry to retrieve history for.
|
||||||
|
:param by_key: The key to filter by (default is the primary key).
|
||||||
|
:param when: A specific timestamp to filter the history.
|
||||||
|
:param until: A timestamp to filter history entries up to a certain point.
|
||||||
|
:param without_deleted: Exclude deleted entries if True.
|
||||||
|
:return: A list of historical entries as objects.
|
||||||
|
"""
|
||||||
|
f_tables = list(self.__foreign_tables.keys())
|
||||||
|
|
||||||
|
history_table = f"{self._table_name}_history"
|
||||||
|
builder = SQLSelectBuilder(history_table, self.__primary_key)
|
||||||
|
|
||||||
|
builder.with_attribute("*")
|
||||||
|
builder.with_value_condition(
|
||||||
|
f"{history_table}.{by_key or self.__primary_key}",
|
||||||
|
"=",
|
||||||
|
str(entry_id),
|
||||||
|
f_tables,
|
||||||
|
)
|
||||||
|
|
||||||
|
if self._default_filter_condition:
|
||||||
|
builder.with_condition(self._default_filter_condition, "", f_tables)
|
||||||
|
|
||||||
|
if without_deleted:
|
||||||
|
builder.with_value_condition(f"{history_table}.deleted", "=", "false", f_tables)
|
||||||
|
|
||||||
|
if when:
|
||||||
|
builder.with_value_condition(
|
||||||
|
self._attr_from_date_to_char(f"{history_table}.updated"),
|
||||||
|
"=",
|
||||||
|
f"'{when.strftime(DATETIME_FORMAT)}'",
|
||||||
|
f_tables,
|
||||||
|
)
|
||||||
|
|
||||||
|
if until:
|
||||||
|
builder.with_value_condition(
|
||||||
|
self._attr_from_date_to_char(f"{history_table}.updated"),
|
||||||
|
"<=",
|
||||||
|
f"'{until.strftime(DATETIME_FORMAT)}'",
|
||||||
|
f_tables,
|
||||||
|
)
|
||||||
|
|
||||||
|
builder.with_order_by(f"{history_table}.updated", "DESC")
|
||||||
|
|
||||||
|
query = await builder.build()
|
||||||
|
result = await self._db.select_map(query)
|
||||||
|
return [self.to_object(x) for x in result] if result else []
|
||||||
|
|
||||||
|
async def get_all(self) -> List[T_DBM]:
|
||||||
|
result = await self._prepare_query(sorts=[{self.__primary_key: "asc"}])
|
||||||
|
return [self.to_object(x) for x in result] if result else []
|
||||||
|
|
||||||
|
async def get_by_id(self, id: Union[int, str]) -> Optional[T_DBM]:
|
||||||
|
result = await self._prepare_query(filters=[{self.__primary_key: id}], sorts=[{self.__primary_key: "asc"}])
|
||||||
|
return self.to_object(result[0]) if result else None
|
||||||
|
|
||||||
|
async def find_by_id(self, id: Union[int, str]) -> Optional[T_DBM]:
|
||||||
|
result = await self._prepare_query(filters=[{self.__primary_key: id}], sorts=[{self.__primary_key: "asc"}])
|
||||||
|
return self.to_object(result[0]) if result else None
|
||||||
|
|
||||||
|
async def get_by(
|
||||||
|
self,
|
||||||
|
filters: AttributeFilters = None,
|
||||||
|
sorts: AttributeSorts = None,
|
||||||
|
take: int = None,
|
||||||
|
skip: int = None,
|
||||||
|
) -> list[T_DBM]:
|
||||||
|
result = await self._prepare_query(filters, sorts, take, skip)
|
||||||
|
if not result or len(result) == 0:
|
||||||
|
raise ValueError("No result found")
|
||||||
|
return [self.to_object(x) for x in result] if result else []
|
||||||
|
|
||||||
|
async def get_single_by(
|
||||||
|
self,
|
||||||
|
filters: AttributeFilters = None,
|
||||||
|
sorts: AttributeSorts = None,
|
||||||
|
take: int = None,
|
||||||
|
skip: int = None,
|
||||||
|
) -> T_DBM:
|
||||||
|
result = await self._prepare_query(filters, sorts, take, skip)
|
||||||
|
if not result:
|
||||||
|
raise ValueError("No result found")
|
||||||
|
if len(result) > 1:
|
||||||
|
raise ValueError("More than one result found")
|
||||||
|
return self.to_object(result[0])
|
||||||
|
|
||||||
|
async def find_by(
|
||||||
|
self,
|
||||||
|
filters: AttributeFilters = None,
|
||||||
|
sorts: AttributeSorts = None,
|
||||||
|
take: int = None,
|
||||||
|
skip: int = None,
|
||||||
|
) -> list[T_DBM]:
|
||||||
|
result = await self._prepare_query(filters, sorts, take, skip)
|
||||||
|
return [self.to_object(x) for x in result] if result else []
|
||||||
|
|
||||||
|
async def find_single_by(
|
||||||
|
self,
|
||||||
|
filters: AttributeFilters = None,
|
||||||
|
sorts: AttributeSorts = None,
|
||||||
|
take: int = None,
|
||||||
|
skip: int = None,
|
||||||
|
) -> Optional[T_DBM]:
|
||||||
|
result = await self._prepare_query(filters, sorts, take, skip)
|
||||||
|
if len(result) > 1:
|
||||||
|
raise ValueError("More than one result found")
|
||||||
|
return self.to_object(result[0]) if result else None
|
||||||
|
|
||||||
|
async def touch(self, obj: T_DBM):
|
||||||
|
"""
|
||||||
|
Touch the entry to update the last updated date
|
||||||
|
:return:
|
||||||
|
"""
|
||||||
|
await self._db.execute(
|
||||||
|
f"""
|
||||||
|
UPDATE {self._table_name}
|
||||||
|
SET updated = NOW()
|
||||||
|
WHERE {self.__primary_key} = {self._get_primary_key_value_sql(obj)};
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
async def touch_many_by_id(self, ids: list[Id]):
|
||||||
|
"""
|
||||||
|
Touch the entries to update the last updated date
|
||||||
|
:return:
|
||||||
|
"""
|
||||||
|
if len(ids) == 0:
|
||||||
|
return
|
||||||
|
|
||||||
|
await self._db.execute(
|
||||||
|
f"""
|
||||||
|
UPDATE {self._table_name}
|
||||||
|
SET updated = NOW()
|
||||||
|
WHERE {self.__primary_key} IN ({", ".join([str(x) for x in ids])});
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
async def _build_create_statement(self, obj: T_DBM, skip_editor=False) -> str:
|
||||||
|
allowed_fields = [x for x in self.__attributes.keys() if x not in self.__ignored_attributes]
|
||||||
|
|
||||||
|
fields = ", ".join([self.__db_names[x] for x in allowed_fields])
|
||||||
|
fields = f"{'EditorId' if not skip_editor else ''}{f', {fields}' if not skip_editor and len(fields) > 0 else f'{fields}'}"
|
||||||
|
|
||||||
|
values = ", ".join([self._get_value_sql(getattr(obj, x)) for x in allowed_fields])
|
||||||
|
values = f"{await self._get_editor_id(obj) if not skip_editor else ''}{f', {values}' if not skip_editor and len(values) > 0 else f'{values}'}"
|
||||||
|
|
||||||
|
return f"""
|
||||||
|
INSERT INTO {self._table_name} (
|
||||||
|
{fields}
|
||||||
|
) VALUES (
|
||||||
|
{values}
|
||||||
|
)
|
||||||
|
RETURNING {self.__primary_key};
|
||||||
|
"""
|
||||||
|
|
||||||
|
async def create(self, obj: T_DBM, skip_editor=False) -> int:
|
||||||
|
self._logger.debug(f"create {type(obj).__name__} {obj.__dict__}")
|
||||||
|
|
||||||
|
result = await self._db.execute(await self._build_create_statement(obj, skip_editor))
|
||||||
|
return self._get_value_from_sql(self.__primary_key_type, result[0][0])
|
||||||
|
|
||||||
|
async def create_many(self, objs: list[T_DBM], skip_editor=False) -> list[int]:
|
||||||
|
if len(objs) == 0:
|
||||||
|
return []
|
||||||
|
self._logger.debug(f"create many {type(objs[0]).__name__} {len(objs)} {[x.__dict__ for x in objs]}")
|
||||||
|
|
||||||
|
query = ""
|
||||||
|
for obj in objs:
|
||||||
|
query += await self._build_create_statement(obj, skip_editor)
|
||||||
|
|
||||||
|
result = await self._db.execute(query)
|
||||||
|
return [self._get_value_from_sql(self.__primary_key_type, x[0]) for x in result]
|
||||||
|
|
||||||
|
async def _build_update_statement(self, obj: T_DBM, skip_editor=False) -> str:
|
||||||
|
allowed_fields = [x for x in self.__attributes.keys() if x not in self.__ignored_attributes]
|
||||||
|
|
||||||
|
fields = ", ".join(
|
||||||
|
[f"{self.__db_names[x]} = {self._get_value_sql(getattr(obj, x, None))}" for x in allowed_fields]
|
||||||
|
)
|
||||||
|
fields = f"{f'EditorId = {await self._get_editor_id(obj)}' if not skip_editor else ''}{f', {fields}' if not skip_editor and len(fields) > 0 else f'{fields}'}"
|
||||||
|
|
||||||
|
return f"""
|
||||||
|
UPDATE {self._table_name}
|
||||||
|
SET {fields}
|
||||||
|
WHERE {self.__primary_key} = {self._get_primary_key_value_sql(obj)};
|
||||||
|
"""
|
||||||
|
|
||||||
|
async def update(self, obj: T_DBM, skip_editor=False):
|
||||||
|
self._logger.debug(f"update {type(obj).__name__} {obj.__dict__}")
|
||||||
|
await self._db.execute(await self._build_update_statement(obj, skip_editor))
|
||||||
|
|
||||||
|
async def update_many(self, objs: list[T_DBM], skip_editor=False):
|
||||||
|
if len(objs) == 0:
|
||||||
|
return
|
||||||
|
self._logger.debug(f"update many {type(objs[0]).__name__} {len(objs)} {[x.__dict__ for x in objs]}")
|
||||||
|
|
||||||
|
query = ""
|
||||||
|
for obj in objs:
|
||||||
|
query += await self._build_update_statement(obj, skip_editor)
|
||||||
|
|
||||||
|
await self._db.execute(query)
|
||||||
|
|
||||||
|
async def _build_delete_statement(self, obj: T_DBM, hard_delete: bool = False) -> str:
|
||||||
|
if hard_delete:
|
||||||
|
return f"""
|
||||||
|
DELETE FROM {self._table_name}
|
||||||
|
WHERE {self.__primary_key} = {self._get_primary_key_value_sql(obj)};
|
||||||
|
"""
|
||||||
|
|
||||||
|
return f"""
|
||||||
|
UPDATE {self._table_name}
|
||||||
|
SET EditorId = {await self._get_editor_id(obj)},
|
||||||
|
Deleted = true
|
||||||
|
WHERE {self.__primary_key} = {self._get_primary_key_value_sql(obj)};
|
||||||
|
"""
|
||||||
|
|
||||||
|
async def delete(self, obj: T_DBM, hard_delete: bool = False):
|
||||||
|
self._logger.debug(f"delete {type(obj).__name__} {obj.__dict__}")
|
||||||
|
await self._db.execute(await self._build_delete_statement(obj, hard_delete))
|
||||||
|
|
||||||
|
async def delete_many(self, objs: list[T_DBM], hard_delete: bool = False):
|
||||||
|
if len(objs) == 0:
|
||||||
|
return
|
||||||
|
self._logger.debug(f"delete many {type(objs[0]).__name__} {len(objs)} {[x.__dict__ for x in objs]}")
|
||||||
|
|
||||||
|
query = ""
|
||||||
|
for obj in objs:
|
||||||
|
query += await self._build_delete_statement(obj, hard_delete)
|
||||||
|
|
||||||
|
await self._db.execute(query)
|
||||||
|
|
||||||
|
async def _build_restore_statement(self, obj: T_DBM) -> str:
|
||||||
|
return f"""
|
||||||
|
UPDATE {self._table_name}
|
||||||
|
SET EditorId = {await self._get_editor_id(obj)},
|
||||||
|
Deleted = false
|
||||||
|
WHERE {self.__primary_key} = {self._get_primary_key_value_sql(obj)};
|
||||||
|
"""
|
||||||
|
|
||||||
|
async def restore(self, obj: T_DBM):
|
||||||
|
self._logger.debug(f"restore {type(obj).__name__} {obj.__dict__}")
|
||||||
|
await self._db.execute(await self._build_restore_statement(obj))
|
||||||
|
|
||||||
|
async def restore_many(self, objs: list[T_DBM]):
|
||||||
|
if len(objs) == 0:
|
||||||
|
return
|
||||||
|
self._logger.debug(f"restore many {type(objs[0]).__name__} {len(objs)} {objs[0].__dict__}")
|
||||||
|
|
||||||
|
query = ""
|
||||||
|
for obj in objs:
|
||||||
|
query += await self._build_restore_statement(obj)
|
||||||
|
|
||||||
|
await self._db.execute(query)
|
||||||
|
|
||||||
|
async def _prepare_query(
|
||||||
|
self,
|
||||||
|
filters: AttributeFilters = None,
|
||||||
|
sorts: AttributeSorts = None,
|
||||||
|
take: int = None,
|
||||||
|
skip: int = None,
|
||||||
|
for_count=False,
|
||||||
|
) -> list[dict]:
|
||||||
|
"""
|
||||||
|
Prepares and executes a query using the SQLBuilder with the given parameters.
|
||||||
|
:param filters: Conditions to filter the query.
|
||||||
|
:param sorts: Sorting attributes and directions.
|
||||||
|
:param take: Limit the number of results.
|
||||||
|
:param skip: Offset the results.
|
||||||
|
:return: Query result as a list of dictionaries.
|
||||||
|
"""
|
||||||
|
external_table_deps = []
|
||||||
|
builder = SQLSelectBuilder(self._table_name, self.__primary_key)
|
||||||
|
|
||||||
|
for temp in self._external_fields:
|
||||||
|
builder.with_temp_table(self._external_fields[temp])
|
||||||
|
|
||||||
|
if for_count:
|
||||||
|
builder.with_attribute("COUNT(*)", ignore_table_name=True)
|
||||||
|
else:
|
||||||
|
builder.with_attribute("*")
|
||||||
|
|
||||||
|
for attr in self.__foreign_tables:
|
||||||
|
table, join_condition = self.__foreign_tables[attr]
|
||||||
|
builder.with_left_join(table, join_condition)
|
||||||
|
|
||||||
|
if filters:
|
||||||
|
await self._build_conditions(builder, filters, external_table_deps)
|
||||||
|
|
||||||
|
if sorts:
|
||||||
|
self._build_sorts(builder, sorts, external_table_deps)
|
||||||
|
|
||||||
|
if take:
|
||||||
|
builder.with_limit(take)
|
||||||
|
|
||||||
|
if skip:
|
||||||
|
builder.with_offset(skip)
|
||||||
|
|
||||||
|
for external_table in external_table_deps:
|
||||||
|
builder.use_temp_table(external_table)
|
||||||
|
|
||||||
|
query = await builder.build()
|
||||||
|
return await self._db.select_map(query)
|
||||||
|
|
||||||
|
async def _build_conditions(
|
||||||
|
self,
|
||||||
|
builder: SQLSelectBuilder,
|
||||||
|
filters: AttributeFilters,
|
||||||
|
external_table_deps: list[str],
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Builds SQL conditions from GraphQL-like filters and adds them to the SQLBuilder.
|
||||||
|
:param builder: The SQLBuilder instance to add conditions to.
|
||||||
|
:param filters: GraphQL-like filter structure.
|
||||||
|
:param external_table_deps: List to store external table dependencies.
|
||||||
|
"""
|
||||||
|
if not isinstance(filters, list):
|
||||||
|
filters = [filters]
|
||||||
|
|
||||||
|
for filter_group in filters:
|
||||||
|
sql_conditions = self._graphql_to_sql_conditions(filter_group, external_table_deps)
|
||||||
|
for attr, operator, value in sql_conditions:
|
||||||
|
if attr in self.__foreign_table_keys:
|
||||||
|
attr = self.__foreign_table_keys[attr]
|
||||||
|
|
||||||
|
recursive_join = self._get_recursive_reference_join(attr)
|
||||||
|
if recursive_join is not None:
|
||||||
|
builder.with_left_join(*recursive_join)
|
||||||
|
|
||||||
|
external_table = self._get_external_field_key(attr)
|
||||||
|
if external_table is not None:
|
||||||
|
external_table_deps.append(external_table)
|
||||||
|
|
||||||
|
if operator == "fuzzy":
|
||||||
|
builder.with_levenshtein_condition(attr)
|
||||||
|
elif operator in [
|
||||||
|
"IS NULL",
|
||||||
|
"IS NOT NULL",
|
||||||
|
]: # operator without value
|
||||||
|
builder.with_condition(
|
||||||
|
attr,
|
||||||
|
operator,
|
||||||
|
[
|
||||||
|
x[0]
|
||||||
|
for fdao in self.__foreign_dao
|
||||||
|
for x in self.__foreign_dao[fdao].__foreign_tables.values()
|
||||||
|
],
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
if attr in self.__date_attributes or String.to_snake_case(attr) in self.__date_attributes:
|
||||||
|
attr = self._attr_from_date_to_char(f"{self._table_name}.{attr}")
|
||||||
|
|
||||||
|
builder.with_value_condition(
|
||||||
|
attr,
|
||||||
|
operator,
|
||||||
|
self._get_value_sql(value),
|
||||||
|
[
|
||||||
|
x[0]
|
||||||
|
for fdao in self.__foreign_dao
|
||||||
|
for x in self.__foreign_dao[fdao].__foreign_tables.values()
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
def _graphql_to_sql_conditions(
|
||||||
|
self, graphql_structure: dict, external_table_deps: list[str]
|
||||||
|
) -> list[tuple[str, str, Any]]:
|
||||||
|
"""
|
||||||
|
Converts a GraphQL-like structure to SQL conditions.
|
||||||
|
:param graphql_structure: The GraphQL-like filter structure.
|
||||||
|
:param external_table_deps: List to track external table dependencies.
|
||||||
|
:return: A list of tuples (attribute, operator, value).
|
||||||
|
"""
|
||||||
|
|
||||||
|
operators = {
|
||||||
|
"equal": "=",
|
||||||
|
"notEqual": "!=",
|
||||||
|
"greater": ">",
|
||||||
|
"greaterOrEqual": ">=",
|
||||||
|
"less": "<",
|
||||||
|
"lessOrEqual": "<=",
|
||||||
|
"isNull": "IS NULL",
|
||||||
|
"isNotNull": "IS NOT NULL",
|
||||||
|
"contains": "LIKE", # Special handling in _graphql_to_sql_conditions
|
||||||
|
"notContains": "NOT LIKE", # Special handling in _graphql_to_sql_conditions
|
||||||
|
"startsWith": "LIKE", # Special handling in _graphql_to_sql_conditions
|
||||||
|
"endsWith": "LIKE", # Special handling in _graphql_to_sql_conditions
|
||||||
|
"in": "IN",
|
||||||
|
"notIn": "NOT IN",
|
||||||
|
}
|
||||||
|
conditions = []
|
||||||
|
|
||||||
|
def parse_node(node, parent_key=None, parent_dao=None):
|
||||||
|
if not isinstance(node, dict):
|
||||||
|
return
|
||||||
|
|
||||||
|
if isinstance(node, list):
|
||||||
|
conditions.append((parent_key, "IN", node))
|
||||||
|
return
|
||||||
|
|
||||||
|
for key, value in node.items():
|
||||||
|
if isinstance(key, property):
|
||||||
|
key = key.fget.__name__
|
||||||
|
|
||||||
|
external_fields_table_name_by_parent = self._get_external_field_key(parent_key)
|
||||||
|
external_fields_table_name = self._get_external_field_key(key)
|
||||||
|
external_field = (
|
||||||
|
external_fields_table_name
|
||||||
|
if external_fields_table_name_by_parent is None
|
||||||
|
else external_fields_table_name_by_parent
|
||||||
|
)
|
||||||
|
|
||||||
|
if key == "fuzzy":
|
||||||
|
self._handle_fuzzy_filter_conditions(conditions, external_table_deps, value)
|
||||||
|
elif parent_dao is not None and key in parent_dao.__db_names:
|
||||||
|
parse_node(value, f"{parent_dao.table_name}.{key}")
|
||||||
|
continue
|
||||||
|
|
||||||
|
elif external_field is not None:
|
||||||
|
external_table_deps.append(external_field)
|
||||||
|
parse_node(value, f"{external_field}.{key}")
|
||||||
|
elif parent_key in self.__foreign_table_keys:
|
||||||
|
if key in operators:
|
||||||
|
parse_node({key: value}, self.__foreign_table_keys[parent_key])
|
||||||
|
continue
|
||||||
|
|
||||||
|
if parent_key in self.__foreign_dao:
|
||||||
|
foreign_dao = self.__foreign_dao[parent_key]
|
||||||
|
if key in foreign_dao.__foreign_tables:
|
||||||
|
parse_node(
|
||||||
|
value,
|
||||||
|
f"{self.__foreign_tables[parent_key][0]}.{foreign_dao.__foreign_table_keys[key]}",
|
||||||
|
foreign_dao.__foreign_dao[key],
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
if parent_key in self.__foreign_tables:
|
||||||
|
parse_node(value, f"{self.__foreign_tables[parent_key][0]}.{key}")
|
||||||
|
continue
|
||||||
|
|
||||||
|
parse_node({parent_key: value})
|
||||||
|
elif key in operators:
|
||||||
|
operator = operators[key]
|
||||||
|
if key == "contains" or key == "notContains":
|
||||||
|
value = f"%{value}%"
|
||||||
|
elif key == "in" or key == "notIn":
|
||||||
|
value = value
|
||||||
|
elif key == "startsWith":
|
||||||
|
value = f"{value}%"
|
||||||
|
elif key == "endsWith":
|
||||||
|
value = f"%{value}"
|
||||||
|
elif key == "isNull" or key == "isNotNull":
|
||||||
|
is_null_value = value.get("equal", None) if isinstance(value, dict) else value
|
||||||
|
|
||||||
|
if is_null_value is None:
|
||||||
|
operator = operators[key]
|
||||||
|
elif (key == "isNull" and is_null_value) or (key == "isNotNull" and not is_null_value):
|
||||||
|
operator = "IS NULL"
|
||||||
|
else:
|
||||||
|
operator = "IS NOT NULL"
|
||||||
|
|
||||||
|
conditions.append((parent_key, operator, None))
|
||||||
|
elif (key == "equal" or key == "notEqual") and value is None:
|
||||||
|
operator = operators["isNull"]
|
||||||
|
|
||||||
|
conditions.append((parent_key, operator, value))
|
||||||
|
|
||||||
|
elif isinstance(value, dict):
|
||||||
|
if key in self.__foreign_table_keys:
|
||||||
|
parse_node(value, key)
|
||||||
|
elif key in self.__db_names and parent_key is not None:
|
||||||
|
parse_node({f"{parent_key}": value})
|
||||||
|
elif key in self.__db_names:
|
||||||
|
parse_node(value, self.__db_names[key])
|
||||||
|
else:
|
||||||
|
parse_node(value, key)
|
||||||
|
elif value is None:
|
||||||
|
conditions.append((self.__db_names[key], "IS NULL", value))
|
||||||
|
else:
|
||||||
|
conditions.append((self.__db_names[key], "=", value))
|
||||||
|
|
||||||
|
parse_node(graphql_structure)
|
||||||
|
return conditions
|
||||||
|
|
||||||
|
def _handle_fuzzy_filter_conditions(self, conditions, external_field_table_deps, sub_values):
|
||||||
|
# Extract fuzzy filter parameters
|
||||||
|
fuzzy_fields = get_value(sub_values, "fields", list[str])
|
||||||
|
fuzzy_term = get_value(sub_values, "term", str)
|
||||||
|
fuzzy_threshold = get_value(sub_values, "threshold", int, 5)
|
||||||
|
|
||||||
|
if not fuzzy_fields or not fuzzy_term:
|
||||||
|
raise ValueError("Fuzzy filter must include 'fields' and 'term'.")
|
||||||
|
|
||||||
|
fuzzy_fields_db_names = []
|
||||||
|
|
||||||
|
# Map fields to their database names
|
||||||
|
for fuzzy_field in fuzzy_fields:
|
||||||
|
external_fields_table_name = self._get_external_field_key(fuzzy_field)
|
||||||
|
if external_fields_table_name is not None:
|
||||||
|
external_fields_table = self._external_fields[external_fields_table_name]
|
||||||
|
fuzzy_fields_db_names.append(f"{external_fields_table.table_name}.{fuzzy_field}")
|
||||||
|
external_field_table_deps.append(external_fields_table.table_name)
|
||||||
|
elif fuzzy_field in self.__db_names:
|
||||||
|
fuzzy_fields_db_names.append(f"{self._table_name}.{self.__db_names[fuzzy_field]}")
|
||||||
|
elif fuzzy_field in self.__foreign_tables:
|
||||||
|
fuzzy_fields_db_names.append(f"{self._table_name}.{self.__foreign_table_keys[fuzzy_field]}")
|
||||||
|
else:
|
||||||
|
fuzzy_fields_db_names.append(self.__db_names[String.to_snake_case(fuzzy_field)][0])
|
||||||
|
|
||||||
|
# Build fuzzy conditions for each field
|
||||||
|
fuzzy_conditions = self._build_fuzzy_conditions(fuzzy_fields_db_names, fuzzy_term, fuzzy_threshold)
|
||||||
|
|
||||||
|
# Combine conditions with OR and append to the main conditions
|
||||||
|
conditions.append((f"({' OR '.join(fuzzy_conditions)})", "fuzzy", None))
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _build_fuzzy_conditions(fields: list[str], term: str, threshold: int = 10) -> list[str]:
|
||||||
|
conditions = []
|
||||||
|
for field in fields:
|
||||||
|
conditions.append(f"levenshtein({field}::TEXT, '{term}') <= {threshold}") # Adjust the threshold as needed
|
||||||
|
|
||||||
|
return conditions
|
||||||
|
|
||||||
|
def _get_external_field_key(self, field_name: str) -> Optional[str]:
|
||||||
|
"""
|
||||||
|
Returns the key to get the external field if found, otherwise None.
|
||||||
|
:param str field_name: The name of the field to search for.
|
||||||
|
:return: The key if found, otherwise None.
|
||||||
|
:rtype: Optional[str]
|
||||||
|
"""
|
||||||
|
if field_name is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
for key, builder in self._external_fields.items():
|
||||||
|
if field_name in builder.fields and field_name not in self.__db_names:
|
||||||
|
return key
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _get_recursive_reference_join(self, attr: str) -> Optional[tuple[str, str]]:
|
||||||
|
parts = attr.split(".")
|
||||||
|
table_name = ".".join(parts[:-1])
|
||||||
|
|
||||||
|
if table_name == self._table_name or table_name == "":
|
||||||
|
return None
|
||||||
|
|
||||||
|
all_foreign_tables = {
|
||||||
|
x[0]: x[1]
|
||||||
|
for x in [
|
||||||
|
*[x for x in self.__foreign_tables.values() if x[0] != self._table_name],
|
||||||
|
*[x for fdao in self.__foreign_dao for x in self.__foreign_dao[fdao].__foreign_tables.values()],
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
if not table_name in all_foreign_tables:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return table_name, all_foreign_tables[table_name]
|
||||||
|
|
||||||
|
def _build_sorts(
|
||||||
|
self,
|
||||||
|
builder: SQLSelectBuilder,
|
||||||
|
sorts: AttributeSorts,
|
||||||
|
external_table_deps: list[str],
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Resolves complex sorting structures into SQL-compatible sorting conditions.
|
||||||
|
Tracks external table dependencies.
|
||||||
|
:param builder: The SQLBuilder instance to add sorting to.
|
||||||
|
:param sorts: Sorting attributes and directions in a complex structure.
|
||||||
|
:param external_table_deps: List to track external table dependencies.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def parse_sort_node(node, parent_key=None):
|
||||||
|
if isinstance(node, dict):
|
||||||
|
for key, value in node.items():
|
||||||
|
if isinstance(value, dict):
|
||||||
|
# Recursively parse nested structures
|
||||||
|
parse_sort_node(value, key)
|
||||||
|
elif isinstance(value, str) and value.lower() in ["asc", "desc"]:
|
||||||
|
external_table = self._get_external_field_key(key)
|
||||||
|
if external_table:
|
||||||
|
external_table_deps.append(external_table)
|
||||||
|
key = f"{external_table}.{key}"
|
||||||
|
|
||||||
|
if parent_key in self.__foreign_tables:
|
||||||
|
key = f"{self.__foreign_tables[parent_key][0]}.{key}"
|
||||||
|
builder.with_order_by(key, value.upper())
|
||||||
|
else:
|
||||||
|
raise ValueError(f"Invalid sort direction: {value}")
|
||||||
|
elif isinstance(node, list):
|
||||||
|
for item in node:
|
||||||
|
parse_sort_node(item)
|
||||||
|
else:
|
||||||
|
raise ValueError(f"Invalid sort structure: {node}")
|
||||||
|
|
||||||
|
parse_sort_node(sorts)
|
||||||
|
|
||||||
|
def _get_value_sql(self, value: Any) -> str:
|
||||||
|
if isinstance(value, str):
|
||||||
|
if value.lower() == "null":
|
||||||
|
return "NULL"
|
||||||
|
return f"'{value}'"
|
||||||
|
|
||||||
|
if isinstance(value, NoneType):
|
||||||
|
return "NULL"
|
||||||
|
|
||||||
|
if value is None:
|
||||||
|
return "NULL"
|
||||||
|
|
||||||
|
if isinstance(value, Enum):
|
||||||
|
return f"'{value.value}'"
|
||||||
|
|
||||||
|
if isinstance(value, bool):
|
||||||
|
return "true" if value else "false"
|
||||||
|
|
||||||
|
if isinstance(value, list):
|
||||||
|
if len(value) == 0:
|
||||||
|
return "()"
|
||||||
|
return f"({', '.join([self._get_value_sql(x) for x in value])})"
|
||||||
|
|
||||||
|
if isinstance(value, datetime.datetime):
|
||||||
|
if value.tzinfo is None:
|
||||||
|
value = value.replace(tzinfo=datetime.timezone.utc)
|
||||||
|
|
||||||
|
return f"'{value.strftime(DATETIME_FORMAT)}'"
|
||||||
|
|
||||||
|
return str(value)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _get_value_from_sql(cast_type: type, value: Any) -> Optional[T]:
|
||||||
|
"""
|
||||||
|
Get the value from the query result and cast it to the correct type
|
||||||
|
:param type cast_type:
|
||||||
|
:param Any value:
|
||||||
|
:return Optional[T]: Casted value, when value is str "NULL" None is returned
|
||||||
|
"""
|
||||||
|
if isinstance(value, str) and "NULL" in value:
|
||||||
|
return None
|
||||||
|
|
||||||
|
if isinstance(value, NoneType):
|
||||||
|
return None
|
||||||
|
|
||||||
|
if isinstance(value, cast_type):
|
||||||
|
return value
|
||||||
|
|
||||||
|
return cast_type(value)
|
||||||
|
|
||||||
|
def _get_primary_key_value_sql(self, obj: T_DBM) -> str:
|
||||||
|
value = getattr(obj, self.__primary_key)
|
||||||
|
if isinstance(value, str):
|
||||||
|
return f"'{value}'"
|
||||||
|
|
||||||
|
return value
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _attr_from_date_to_char(attr: str) -> str:
|
||||||
|
return f"TO_CHAR({attr}, 'YYYY-MM-DD HH24:MI:SS.US TZ')"
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def _get_editor_id(obj: T_DBM):
|
||||||
|
editor_id = obj.editor_id
|
||||||
|
if editor_id is None:
|
||||||
|
user = get_user()
|
||||||
|
if user is not None:
|
||||||
|
editor_id = user.id
|
||||||
|
|
||||||
|
return editor_id if editor_id is not None else "NULL"
|
||||||
7
src/cpl-database/cpl/database/abc/data_seeder_abc.py
Normal file
7
src/cpl-database/cpl/database/abc/data_seeder_abc.py
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
from abc import ABC, abstractmethod
|
||||||
|
|
||||||
|
|
||||||
|
class DataSeederABC(ABC):
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def seed(self): ...
|
||||||
53
src/cpl-database/cpl/database/abc/db_context_abc.py
Normal file
53
src/cpl-database/cpl/database/abc/db_context_abc.py
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from cpl.database.model.database_settings import DatabaseSettings
|
||||||
|
|
||||||
|
|
||||||
|
class DBContextABC(ABC):
|
||||||
|
r"""ABC for the :class:`cpl.database.context.database_context.DatabaseContext`"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def connect(self, database_settings: DatabaseSettings):
|
||||||
|
r"""Connects to a database by connection settings
|
||||||
|
|
||||||
|
Parameter:
|
||||||
|
database_settings :class:`cpl.database.database_settings.DatabaseSettings`
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def execute(self, statement: str, args=None, multi=True) -> list[list]:
|
||||||
|
r"""Runs SQL Statements
|
||||||
|
|
||||||
|
Parameter:
|
||||||
|
statement: :class:`str`
|
||||||
|
args: :class:`list` | :class:`tuple` | :class:`dict` | :class:`None`
|
||||||
|
multi: :class:`bool`
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list: Fetched list of executed elements
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def select_map(self, statement: str, args=None) -> list[dict]:
|
||||||
|
r"""Runs SQL Select Statements and returns a list of dictionaries
|
||||||
|
|
||||||
|
Parameter:
|
||||||
|
statement: :class:`str`
|
||||||
|
args: :class:`list` | :class:`tuple` | :class:`dict` | :class:`None`
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list: Fetched list of executed elements as dictionary
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def select(self, statement: str, args=None) -> list[str] | list[tuple] | list[Any]:
|
||||||
|
r"""Runs SQL Select Statements and returns a list of dictionaries
|
||||||
|
|
||||||
|
Parameter:
|
||||||
|
statement: :class:`str`
|
||||||
|
args: :class:`list` | :class:`tuple` | :class:`dict` | :class:`None`
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list: Fetched list of executed elements
|
||||||
|
"""
|
||||||
30
src/cpl-database/cpl/database/abc/db_join_model_abc.py
Normal file
30
src/cpl-database/cpl/database/abc/db_join_model_abc.py
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from cpl.core.typing import Id, SerialId
|
||||||
|
from cpl.database.abc.db_model_abc import DbModelABC
|
||||||
|
|
||||||
|
|
||||||
|
class DbJoinModelABC[T](DbModelABC[T]):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
id: Id,
|
||||||
|
source_id: Id,
|
||||||
|
foreign_id: Id,
|
||||||
|
deleted: bool = False,
|
||||||
|
editor_id: Optional[SerialId] = None,
|
||||||
|
created: Optional[datetime] = None,
|
||||||
|
updated: Optional[datetime] = None,
|
||||||
|
):
|
||||||
|
DbModelABC.__init__(self, id, deleted, editor_id, created, updated)
|
||||||
|
|
||||||
|
self._source_id = source_id
|
||||||
|
self._foreign_id = foreign_id
|
||||||
|
|
||||||
|
@property
|
||||||
|
def source_id(self) -> Id:
|
||||||
|
return self._source_id
|
||||||
|
|
||||||
|
@property
|
||||||
|
def foreign_id(self) -> Id:
|
||||||
|
return self._foreign_id
|
||||||
79
src/cpl-database/cpl/database/abc/db_model_abc.py
Normal file
79
src/cpl-database/cpl/database/abc/db_model_abc.py
Normal file
@@ -0,0 +1,79 @@
|
|||||||
|
from abc import ABC
|
||||||
|
from datetime import datetime, timezone
|
||||||
|
from typing import Optional, Generic
|
||||||
|
|
||||||
|
from cpl.core.typing import Id, SerialId, T
|
||||||
|
|
||||||
|
|
||||||
|
class DbModelABC(ABC, Generic[T]):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
id: Id,
|
||||||
|
deleted: bool = False,
|
||||||
|
editor_id: Optional[SerialId] = None,
|
||||||
|
created: Optional[datetime] = None,
|
||||||
|
updated: Optional[datetime] = None,
|
||||||
|
):
|
||||||
|
self._id = id
|
||||||
|
self._deleted = deleted
|
||||||
|
self._editor_id = editor_id
|
||||||
|
|
||||||
|
self._created = created if created is not None else datetime.now(timezone.utc).isoformat()
|
||||||
|
self._updated = updated if updated is not None else datetime.now(timezone.utc).isoformat()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def id(self) -> Id:
|
||||||
|
return self._id
|
||||||
|
|
||||||
|
@property
|
||||||
|
def deleted(self) -> bool:
|
||||||
|
return self._deleted
|
||||||
|
|
||||||
|
@deleted.setter
|
||||||
|
def deleted(self, value: bool):
|
||||||
|
self._deleted = value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def editor_id(self) -> SerialId:
|
||||||
|
return self._editor_id
|
||||||
|
|
||||||
|
@editor_id.setter
|
||||||
|
def editor_id(self, value: SerialId):
|
||||||
|
self._editor_id = value
|
||||||
|
|
||||||
|
# @async_property
|
||||||
|
# async def editor(self):
|
||||||
|
# if self._editor_id is None:
|
||||||
|
# return None
|
||||||
|
#
|
||||||
|
# from data.schemas.administration.user_dao import userDao
|
||||||
|
#
|
||||||
|
# return await userDao.get_by_id(self._editor_id)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def created(self) -> datetime:
|
||||||
|
return self._created
|
||||||
|
|
||||||
|
@property
|
||||||
|
def updated(self) -> datetime:
|
||||||
|
return self._updated
|
||||||
|
|
||||||
|
@updated.setter
|
||||||
|
def updated(self, value: datetime):
|
||||||
|
self._updated = value
|
||||||
|
|
||||||
|
def to_dict(self) -> dict:
|
||||||
|
result = {}
|
||||||
|
for name, value in self.__dict__.items():
|
||||||
|
if not name.startswith("_") or name.endswith("_"):
|
||||||
|
continue
|
||||||
|
|
||||||
|
if isinstance(value, datetime):
|
||||||
|
value = value.isoformat()
|
||||||
|
|
||||||
|
if not isinstance(value, str):
|
||||||
|
value = str(value)
|
||||||
|
|
||||||
|
result[name.replace("_", "")] = value
|
||||||
|
|
||||||
|
return result
|
||||||
25
src/cpl-database/cpl/database/abc/db_model_dao_abc.py
Normal file
25
src/cpl-database/cpl/database/abc/db_model_dao_abc.py
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
from abc import abstractmethod
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Type
|
||||||
|
|
||||||
|
from cpl.database import TableManager
|
||||||
|
from cpl.database.abc.data_access_object_abc import DataAccessObjectABC
|
||||||
|
from cpl.database.abc.db_model_abc import DbModelABC
|
||||||
|
|
||||||
|
|
||||||
|
class DbModelDaoABC[T_DBM](DataAccessObjectABC[T_DBM]):
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def __init__(self, source: str, model_type: Type[T_DBM], table_name: str):
|
||||||
|
DataAccessObjectABC.__init__(self, source, model_type, table_name)
|
||||||
|
|
||||||
|
self.attribute(DbModelABC.id, int, ignore=True)
|
||||||
|
self.attribute(DbModelABC.deleted, bool)
|
||||||
|
self.attribute(DbModelABC.editor_id, int, db_name="editorId", ignore=True) # handled by db trigger
|
||||||
|
|
||||||
|
self.reference(
|
||||||
|
"editor", "id", DbModelABC.editor_id, TableManager.get("auth_users")
|
||||||
|
) # not relevant for updates due to editor_id
|
||||||
|
|
||||||
|
self.attribute(DbModelABC.created, datetime, ignore=True) # handled by db trigger
|
||||||
|
self.attribute(DbModelABC.updated, datetime, ignore=True) # handled by db trigger
|
||||||
@@ -1,2 +0,0 @@
|
|||||||
from .database_connection import DatabaseConnection
|
|
||||||
from .database_connection_abc import DatabaseConnectionABC
|
|
||||||
1
src/cpl-database/cpl/database/const.py
Normal file
1
src/cpl-database/cpl/database/const.py
Normal file
@@ -0,0 +1 @@
|
|||||||
|
DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S.%f %z"
|
||||||
@@ -1,2 +0,0 @@
|
|||||||
from .database_context import DatabaseContext
|
|
||||||
from .database_context_abc import DatabaseContextABC
|
|
||||||
@@ -1,52 +0,0 @@
|
|||||||
from typing import Optional
|
|
||||||
|
|
||||||
|
|
||||||
from cpl.database.connection.database_connection import DatabaseConnection
|
|
||||||
from cpl.database.connection.database_connection_abc import DatabaseConnectionABC
|
|
||||||
from cpl.database.context.database_context_abc import DatabaseContextABC
|
|
||||||
from cpl.database.database_settings import DatabaseSettings
|
|
||||||
from mysql.connector.cursor import MySQLCursorBuffered
|
|
||||||
|
|
||||||
|
|
||||||
class DatabaseContext(DatabaseContextABC):
|
|
||||||
r"""Representation of the database context
|
|
||||||
|
|
||||||
Parameter:
|
|
||||||
database_settings: :class:`cpl.database.database_settings.DatabaseSettings`
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
DatabaseContextABC.__init__(self)
|
|
||||||
|
|
||||||
self._db: DatabaseConnectionABC = DatabaseConnection()
|
|
||||||
self._settings: Optional[DatabaseSettings] = None
|
|
||||||
|
|
||||||
@property
|
|
||||||
def cursor(self) -> MySQLCursorBuffered:
|
|
||||||
self._ping_and_reconnect()
|
|
||||||
return self._db.cursor
|
|
||||||
|
|
||||||
def _ping_and_reconnect(self):
|
|
||||||
try:
|
|
||||||
self._db.server.ping(reconnect=True, attempts=3, delay=5)
|
|
||||||
except Exception:
|
|
||||||
# reconnect your cursor as you did in __init__ or wherever
|
|
||||||
if self._settings is None:
|
|
||||||
raise Exception("Call DatabaseContext.connect first")
|
|
||||||
self.connect(self._settings)
|
|
||||||
|
|
||||||
def connect(self, database_settings: DatabaseSettings):
|
|
||||||
if self._settings is None:
|
|
||||||
self._settings = database_settings
|
|
||||||
self._db.connect(database_settings)
|
|
||||||
|
|
||||||
self.save_changes()
|
|
||||||
|
|
||||||
def save_changes(self):
|
|
||||||
self._ping_and_reconnect()
|
|
||||||
self._db.server.commit()
|
|
||||||
|
|
||||||
def select(self, statement: str) -> list[tuple]:
|
|
||||||
self._ping_and_reconnect()
|
|
||||||
self._db.cursor.execute(statement)
|
|
||||||
return self._db.cursor.fetchall()
|
|
||||||
@@ -1,40 +0,0 @@
|
|||||||
from abc import ABC, abstractmethod
|
|
||||||
|
|
||||||
from cpl.database.database_settings import DatabaseSettings
|
|
||||||
from mysql.connector.cursor import MySQLCursorBuffered
|
|
||||||
|
|
||||||
|
|
||||||
class DatabaseContextABC(ABC):
|
|
||||||
r"""ABC for the :class:`cpl.database.context.database_context.DatabaseContext`"""
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def __init__(self, *args):
|
|
||||||
pass
|
|
||||||
|
|
||||||
@property
|
|
||||||
@abstractmethod
|
|
||||||
def cursor(self) -> MySQLCursorBuffered:
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def connect(self, database_settings: DatabaseSettings):
|
|
||||||
r"""Connects to a database by connection settings
|
|
||||||
|
|
||||||
Parameter:
|
|
||||||
database_settings :class:`cpl.database.database_settings.DatabaseSettings`
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def save_changes(self):
|
|
||||||
r"""Saves changes of the database"""
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def select(self, statement: str) -> list[tuple]:
|
|
||||||
r"""Runs SQL Statements
|
|
||||||
|
|
||||||
Parameter:
|
|
||||||
statement: :class:`str`
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
list: Fetched list of selected elements
|
|
||||||
"""
|
|
||||||
@@ -1,13 +0,0 @@
|
|||||||
from enum import Enum
|
|
||||||
|
|
||||||
|
|
||||||
class DatabaseSettingsNameEnum(Enum):
|
|
||||||
host = "Host"
|
|
||||||
port = "Port"
|
|
||||||
user = "User"
|
|
||||||
password = "Password"
|
|
||||||
database = "Database"
|
|
||||||
charset = "Charset"
|
|
||||||
use_unicode = "UseUnicode"
|
|
||||||
buffered = "Buffered"
|
|
||||||
auth_plugin = "AuthPlugin"
|
|
||||||
@@ -0,0 +1,68 @@
|
|||||||
|
import textwrap
|
||||||
|
from typing import Callable
|
||||||
|
|
||||||
|
|
||||||
|
class ExternalDataTempTableBuilder:
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self._table_name = None
|
||||||
|
self._fields: dict[str, str] = {}
|
||||||
|
self._primary_key = "id"
|
||||||
|
self._join_ref_table = None
|
||||||
|
self._value_getter = None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def table_name(self) -> str:
|
||||||
|
return self._table_name
|
||||||
|
|
||||||
|
@property
|
||||||
|
def fields(self) -> dict[str, str]:
|
||||||
|
return self._fields
|
||||||
|
|
||||||
|
@property
|
||||||
|
def primary_key(self) -> str:
|
||||||
|
return self._primary_key
|
||||||
|
|
||||||
|
@property
|
||||||
|
def join_ref_table(self) -> str:
|
||||||
|
return self._join_ref_table
|
||||||
|
|
||||||
|
def with_table_name(self, table_name: str) -> "ExternalDataTempTableBuilder":
|
||||||
|
self._join_ref_table = table_name
|
||||||
|
|
||||||
|
if "." in table_name:
|
||||||
|
table_name = table_name.split(".")[-1]
|
||||||
|
|
||||||
|
if not table_name.endswith("_temp"):
|
||||||
|
table_name = f"{table_name}_temp"
|
||||||
|
|
||||||
|
self._table_name = table_name
|
||||||
|
return self
|
||||||
|
|
||||||
|
def with_field(self, name: str, sql_type: str, primary=False) -> "ExternalDataTempTableBuilder":
|
||||||
|
if primary:
|
||||||
|
sql_type += " PRIMARY KEY"
|
||||||
|
self._primary_key = name
|
||||||
|
self._fields[name] = sql_type
|
||||||
|
return self
|
||||||
|
|
||||||
|
def with_value_getter(self, value_getter: Callable) -> "ExternalDataTempTableBuilder":
|
||||||
|
self._value_getter = value_getter
|
||||||
|
return self
|
||||||
|
|
||||||
|
async def build(self) -> str:
|
||||||
|
assert self._table_name is not None, "Table name is required"
|
||||||
|
assert self._value_getter is not None, "Value getter is required"
|
||||||
|
|
||||||
|
values_str = ", ".join([f"{value}" for value in await self._value_getter()])
|
||||||
|
|
||||||
|
return textwrap.dedent(
|
||||||
|
f"""
|
||||||
|
DROP TABLE IF EXISTS {self._table_name};
|
||||||
|
CREATE TEMP TABLE {self._table_name} (
|
||||||
|
{", ".join([f"{k} {v}" for k, v in self._fields.items()])}
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO {self._table_name} VALUES {values_str};
|
||||||
|
"""
|
||||||
|
)
|
||||||
3
src/cpl-database/cpl/database/model/__init__.py
Normal file
3
src/cpl-database/cpl/database/model/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
from .database_settings import DatabaseSettings
|
||||||
|
from .migration import Migration
|
||||||
|
from .server_type import ServerTypes
|
||||||
@@ -1,6 +1,9 @@
|
|||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
|
from cpl.core.configuration import Configuration
|
||||||
from cpl.core.configuration.configuration_model_abc import ConfigurationModelABC
|
from cpl.core.configuration.configuration_model_abc import ConfigurationModelABC
|
||||||
|
from cpl.core.environment import Environment
|
||||||
|
from cpl.core.utils import Base64
|
||||||
|
|
||||||
|
|
||||||
class DatabaseSettings(ConfigurationModelABC):
|
class DatabaseSettings(ConfigurationModelABC):
|
||||||
@@ -8,23 +11,23 @@ class DatabaseSettings(ConfigurationModelABC):
|
|||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
host: str = None,
|
host: str = Environment.get("DB_HOST", str),
|
||||||
port: int = 3306,
|
port: int = Environment.get("DB_PORT", str, Configuration.get("DB_DEFAULT_PORT", 0)),
|
||||||
user: str = None,
|
user: str = Environment.get("DB_USER", str),
|
||||||
password: str = None,
|
password: str = Environment.get("DB_PASSWORD", str),
|
||||||
database: str = None,
|
database: str = Environment.get("DB_DATABASE", str),
|
||||||
charset: str = "utf8mb4",
|
charset: str = Environment.get("DB_CHARSET", str, "utf8mb4"),
|
||||||
use_unicode: bool = False,
|
use_unicode: bool = Environment.get("DB_USE_UNICODE", bool, False),
|
||||||
buffered: bool = False,
|
buffered: bool = Environment.get("DB_BUFFERED", bool, False),
|
||||||
auth_plugin: str = "caching_sha2_password",
|
auth_plugin: str = Environment.get("DB_AUTH_PLUGIN", str, "caching_sha2_password"),
|
||||||
ssl_disabled: bool = False,
|
ssl_disabled: bool = Environment.get("DB_SSL_DISABLED", bool, False),
|
||||||
):
|
):
|
||||||
ConfigurationModelABC.__init__(self)
|
ConfigurationModelABC.__init__(self)
|
||||||
|
|
||||||
self._host: Optional[str] = host
|
self._host: Optional[str] = host
|
||||||
self._port: Optional[int] = port
|
self._port: Optional[int] = port
|
||||||
self._user: Optional[str] = user
|
self._user: Optional[str] = user
|
||||||
self._password: Optional[str] = password
|
self._password: Optional[str] = Base64.decode(password) if Base64.is_b64(password) else password
|
||||||
self._database: Optional[str] = database
|
self._database: Optional[str] = database
|
||||||
self._charset: Optional[str] = charset
|
self._charset: Optional[str] = charset
|
||||||
self._use_unicode: Optional[bool] = use_unicode
|
self._use_unicode: Optional[bool] = use_unicode
|
||||||
12
src/cpl-database/cpl/database/model/migration.py
Normal file
12
src/cpl-database/cpl/database/model/migration.py
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
class Migration:
|
||||||
|
def __init__(self, name: str, script: str):
|
||||||
|
self._name = name
|
||||||
|
self._script = script
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name(self) -> str:
|
||||||
|
return self._name
|
||||||
|
|
||||||
|
@property
|
||||||
|
def script(self) -> str:
|
||||||
|
return self._script
|
||||||
22
src/cpl-database/cpl/database/model/server_type.py
Normal file
22
src/cpl-database/cpl/database/model/server_type.py
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
from enum import Enum
|
||||||
|
|
||||||
|
|
||||||
|
class ServerTypes(Enum):
|
||||||
|
POSTGRES = "postgres"
|
||||||
|
MYSQL = "mysql"
|
||||||
|
|
||||||
|
|
||||||
|
class ServerType:
|
||||||
|
_server_type: ServerTypes = None
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def set_server_type(cls, server_type: ServerTypes):
|
||||||
|
assert server_type is not None, "server_type must not be None"
|
||||||
|
assert isinstance(server_type, ServerTypes), f"Expected ServerType but got {type(server_type)}"
|
||||||
|
cls._server_type = server_type
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@property
|
||||||
|
def server_type(cls) -> ServerTypes:
|
||||||
|
assert cls._server_type is not None, "Server type is not set"
|
||||||
|
return cls._server_type
|
||||||
@@ -4,16 +4,16 @@ import mysql.connector as sql
|
|||||||
from mysql.connector.abstracts import MySQLConnectionAbstract
|
from mysql.connector.abstracts import MySQLConnectionAbstract
|
||||||
from mysql.connector.cursor import MySQLCursorBuffered
|
from mysql.connector.cursor import MySQLCursorBuffered
|
||||||
|
|
||||||
from cpl.database.connection.database_connection_abc import DatabaseConnectionABC
|
from cpl.database.abc.connection_abc import ConnectionABC
|
||||||
from cpl.database.database_settings import DatabaseSettings
|
from cpl.database.database_settings import DatabaseSettings
|
||||||
from cpl.core.utils.credential_manager import CredentialManager
|
from cpl.core.utils.credential_manager import CredentialManager
|
||||||
|
|
||||||
|
|
||||||
class DatabaseConnection(DatabaseConnectionABC):
|
class DatabaseConnection(ConnectionABC):
|
||||||
r"""Representation of the database connection"""
|
r"""Representation of the database connection"""
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
DatabaseConnectionABC.__init__(self)
|
ConnectionABC.__init__(self)
|
||||||
|
|
||||||
self._database: Optional[MySQLConnectionAbstract] = None
|
self._database: Optional[MySQLConnectionAbstract] = None
|
||||||
self._cursor: Optional[MySQLCursorBuffered] = None
|
self._cursor: Optional[MySQLCursorBuffered] = None
|
||||||
84
src/cpl-database/cpl/database/mysql/db_context.py
Normal file
84
src/cpl-database/cpl/database/mysql/db_context.py
Normal file
@@ -0,0 +1,84 @@
|
|||||||
|
import uuid
|
||||||
|
from typing import Any, List, Dict, Tuple, Union
|
||||||
|
|
||||||
|
from mysql.connector import Error as MySQLError, PoolError
|
||||||
|
|
||||||
|
from cpl.core.configuration import Configuration
|
||||||
|
from cpl.core.environment import Environment
|
||||||
|
from cpl.database.abc.db_context_abc import DBContextABC
|
||||||
|
from cpl.database.db_logger import DBLogger
|
||||||
|
from cpl.database.model.database_settings import DatabaseSettings
|
||||||
|
from cpl.database.mysql.mysql_pool import MySQLPool
|
||||||
|
|
||||||
|
_logger = DBLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class DBContext(DBContextABC):
|
||||||
|
def __init__(self):
|
||||||
|
DBContextABC.__init__(self)
|
||||||
|
self._pool: MySQLPool = None
|
||||||
|
self._fails = 0
|
||||||
|
|
||||||
|
self.connect(Configuration.get(DatabaseSettings))
|
||||||
|
|
||||||
|
def connect(self, database_settings: DatabaseSettings):
|
||||||
|
try:
|
||||||
|
_logger.debug("Connecting to database")
|
||||||
|
self._pool = MySQLPool(
|
||||||
|
database_settings,
|
||||||
|
)
|
||||||
|
_logger.info("Connected to database")
|
||||||
|
except Exception as e:
|
||||||
|
_logger.fatal("Connecting to database failed", e)
|
||||||
|
|
||||||
|
async def execute(self, statement: str, args=None, multi=True) -> List[List]:
|
||||||
|
_logger.trace(f"execute {statement} with args: {args}")
|
||||||
|
return await self._pool.execute(statement, args, multi)
|
||||||
|
|
||||||
|
async def select_map(self, statement: str, args=None) -> List[Dict]:
|
||||||
|
_logger.trace(f"select {statement} with args: {args}")
|
||||||
|
try:
|
||||||
|
return await self._pool.select_map(statement, args)
|
||||||
|
except (MySQLError, PoolError) as e:
|
||||||
|
if self._fails >= 3:
|
||||||
|
_logger.error(f"Database error caused by `{statement}`", e)
|
||||||
|
uid = uuid.uuid4()
|
||||||
|
raise Exception(
|
||||||
|
f"Query failed three times with {type(e).__name__}. Contact an admin with the UID: {uid}"
|
||||||
|
)
|
||||||
|
|
||||||
|
_logger.error(f"Database error caused by `{statement}`", e)
|
||||||
|
self._fails += 1
|
||||||
|
try:
|
||||||
|
_logger.debug("Retry select")
|
||||||
|
return await self.select_map(statement, args)
|
||||||
|
except Exception as e:
|
||||||
|
pass
|
||||||
|
return []
|
||||||
|
except Exception as e:
|
||||||
|
_logger.error(f"Database error caused by `{statement}`", e)
|
||||||
|
raise e
|
||||||
|
|
||||||
|
async def select(self, statement: str, args=None) -> Union[List[str], List[Tuple], List[Any]]:
|
||||||
|
_logger.trace(f"select {statement} with args: {args}")
|
||||||
|
try:
|
||||||
|
return await self._pool.select(statement, args)
|
||||||
|
except (MySQLError, PoolError) as e:
|
||||||
|
if self._fails >= 3:
|
||||||
|
_logger.error(f"Database error caused by `{statement}`", e)
|
||||||
|
uid = uuid.uuid4()
|
||||||
|
raise Exception(
|
||||||
|
f"Query failed three times with {type(e).__name__}. Contact an admin with the UID: {uid}"
|
||||||
|
)
|
||||||
|
|
||||||
|
_logger.error(f"Database error caused by `{statement}`", e)
|
||||||
|
self._fails += 1
|
||||||
|
try:
|
||||||
|
_logger.debug("Retry select")
|
||||||
|
return await self.select(statement, args)
|
||||||
|
except Exception as e:
|
||||||
|
pass
|
||||||
|
return []
|
||||||
|
except Exception as e:
|
||||||
|
_logger.error(f"Database error caused by `{statement}`", e)
|
||||||
|
raise e
|
||||||
105
src/cpl-database/cpl/database/mysql/mysql_pool.py
Normal file
105
src/cpl-database/cpl/database/mysql/mysql_pool.py
Normal file
@@ -0,0 +1,105 @@
|
|||||||
|
from typing import Optional, Any
|
||||||
|
|
||||||
|
import sqlparse
|
||||||
|
import aiomysql
|
||||||
|
|
||||||
|
from cpl.core.environment import Environment
|
||||||
|
from cpl.database.db_logger import DBLogger
|
||||||
|
from cpl.database.model import DatabaseSettings
|
||||||
|
|
||||||
|
_logger = DBLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class MySQLPool:
|
||||||
|
"""
|
||||||
|
Create a pool when connecting to MySQL, which will decrease the time spent in
|
||||||
|
requesting connection, creating connection, and closing connection.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, database_settings: DatabaseSettings):
|
||||||
|
self._db_settings = database_settings
|
||||||
|
self.pool: Optional[aiomysql.Pool] = None
|
||||||
|
|
||||||
|
async def _get_pool(self):
|
||||||
|
if self.pool is None or self.pool._closed:
|
||||||
|
try:
|
||||||
|
self.pool = await aiomysql.create_pool(
|
||||||
|
host=self._db_settings.host,
|
||||||
|
port=self._db_settings.port,
|
||||||
|
user=self._db_settings.user,
|
||||||
|
password=self._db_settings.password,
|
||||||
|
db=self._db_settings.database,
|
||||||
|
minsize=1,
|
||||||
|
maxsize=Environment.get("DB_POOL_SIZE", int, 1),
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
_logger.fatal("Failed to connect to the database", e)
|
||||||
|
raise
|
||||||
|
return self.pool
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def _exec_sql(cursor: Any, query: str, args=None, multi=True):
|
||||||
|
if multi:
|
||||||
|
queries = [str(stmt).strip() for stmt in sqlparse.parse(query) if str(stmt).strip()]
|
||||||
|
for q in queries:
|
||||||
|
if q.strip() == "":
|
||||||
|
continue
|
||||||
|
await cursor.execute(q, args)
|
||||||
|
else:
|
||||||
|
await cursor.execute(query, args)
|
||||||
|
|
||||||
|
async def execute(self, query: str, args=None, multi=True) -> list[list]:
|
||||||
|
"""
|
||||||
|
Execute a SQL statement, it could be with args and without args. The usage is
|
||||||
|
similar to the execute() function in aiomysql.
|
||||||
|
:param query: SQL clause
|
||||||
|
:param args: args needed by the SQL clause
|
||||||
|
:param multi: if the query is a multi-statement
|
||||||
|
:return: return result
|
||||||
|
"""
|
||||||
|
pool = await self._get_pool()
|
||||||
|
async with pool.acquire() as con:
|
||||||
|
async with con.cursor() as cursor:
|
||||||
|
await self._exec_sql(cursor, query, args, multi)
|
||||||
|
await con.commit()
|
||||||
|
|
||||||
|
if cursor.description is not None: # Query returns rows
|
||||||
|
res = await cursor.fetchall()
|
||||||
|
if res is None:
|
||||||
|
return []
|
||||||
|
|
||||||
|
return [list(row) for row in res]
|
||||||
|
else:
|
||||||
|
return []
|
||||||
|
|
||||||
|
async def select(self, query: str, args=None, multi=True) -> list[str]:
|
||||||
|
"""
|
||||||
|
Execute a SQL statement, it could be with args and without args. The usage is
|
||||||
|
similar to the execute() function in aiomysql.
|
||||||
|
:param query: SQL clause
|
||||||
|
:param args: args needed by the SQL clause
|
||||||
|
:param multi: if the query is a multi-statement
|
||||||
|
:return: return result
|
||||||
|
"""
|
||||||
|
pool = await self._get_pool()
|
||||||
|
async with pool.acquire() as con:
|
||||||
|
async with con.cursor() as cursor:
|
||||||
|
await self._exec_sql(cursor, query, args, multi)
|
||||||
|
res = await cursor.fetchall()
|
||||||
|
return list(res)
|
||||||
|
|
||||||
|
async def select_map(self, query: str, args=None, multi=True) -> list[dict]:
|
||||||
|
"""
|
||||||
|
Execute a SQL statement, it could be with args and without args. The usage is
|
||||||
|
similar to the execute() function in aiomysql.
|
||||||
|
:param query: SQL clause
|
||||||
|
:param args: args needed by the SQL clause
|
||||||
|
:param multi: if the query is a multi-statement
|
||||||
|
:return: return result
|
||||||
|
"""
|
||||||
|
pool = await self._get_pool()
|
||||||
|
async with pool.acquire() as con:
|
||||||
|
async with con.cursor(aiomysql.DictCursor) as cursor:
|
||||||
|
await self._exec_sql(cursor, query, args, multi)
|
||||||
|
res = await cursor.fetchall()
|
||||||
|
return list(res)
|
||||||
0
src/cpl-database/cpl/database/postgres/__init__.py
Normal file
0
src/cpl-database/cpl/database/postgres/__init__.py
Normal file
86
src/cpl-database/cpl/database/postgres/db_context.py
Normal file
86
src/cpl-database/cpl/database/postgres/db_context.py
Normal file
@@ -0,0 +1,86 @@
|
|||||||
|
import uuid
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from psycopg import OperationalError
|
||||||
|
from psycopg_pool import PoolTimeout
|
||||||
|
|
||||||
|
from cpl.core.configuration import Configuration
|
||||||
|
from cpl.core.environment import Environment
|
||||||
|
from cpl.database.abc.db_context_abc import DBContextABC
|
||||||
|
from cpl.database.database_settings import DatabaseSettings
|
||||||
|
from cpl.database.db_logger import DBLogger
|
||||||
|
from cpl.database.postgres.postgres_pool import PostgresPool
|
||||||
|
|
||||||
|
_logger = DBLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class DBContext(DBContextABC):
|
||||||
|
def __init__(self):
|
||||||
|
DBContextABC.__init__(self)
|
||||||
|
self._pool: PostgresPool = None
|
||||||
|
self._fails = 0
|
||||||
|
|
||||||
|
self.connect(Configuration.get(DatabaseSettings))
|
||||||
|
|
||||||
|
def connect(self, database_settings: DatabaseSettings):
|
||||||
|
try:
|
||||||
|
_logger.debug("Connecting to database")
|
||||||
|
self._pool = PostgresPool(
|
||||||
|
database_settings,
|
||||||
|
Environment.get("DB_POOL_SIZE", int, 1),
|
||||||
|
)
|
||||||
|
_logger.info("Connected to database")
|
||||||
|
except Exception as e:
|
||||||
|
_logger.fatal("Connecting to database failed", e)
|
||||||
|
|
||||||
|
async def execute(self, statement: str, args=None, multi=True) -> list[list]:
|
||||||
|
_logger.trace(f"execute {statement} with args: {args}")
|
||||||
|
return await self._pool.execute(statement, args, multi)
|
||||||
|
|
||||||
|
async def select_map(self, statement: str, args=None) -> list[dict]:
|
||||||
|
_logger.trace(f"select {statement} with args: {args}")
|
||||||
|
try:
|
||||||
|
return await self._pool.select_map(statement, args)
|
||||||
|
except (OperationalError, PoolTimeout) as e:
|
||||||
|
if self._fails >= 3:
|
||||||
|
_logger.error(f"Database error caused by `{statement}`", e)
|
||||||
|
uid = uuid.uuid4()
|
||||||
|
raise Exception(
|
||||||
|
f"Query failed three times with {type(e).__name__}. Contact an admin with the UID: {uid}"
|
||||||
|
)
|
||||||
|
|
||||||
|
_logger.error(f"Database error caused by `{statement}`", e)
|
||||||
|
self._fails += 1
|
||||||
|
try:
|
||||||
|
_logger.debug("Retry select")
|
||||||
|
return await self.select_map(statement, args)
|
||||||
|
except Exception as e:
|
||||||
|
pass
|
||||||
|
return []
|
||||||
|
except Exception as e:
|
||||||
|
_logger.error(f"Database error caused by `{statement}`", e)
|
||||||
|
raise e
|
||||||
|
|
||||||
|
async def select(self, statement: str, args=None) -> list[str] | list[tuple] | list[Any]:
|
||||||
|
_logger.trace(f"select {statement} with args: {args}")
|
||||||
|
try:
|
||||||
|
return await self._pool.select(statement, args)
|
||||||
|
except (OperationalError, PoolTimeout) as e:
|
||||||
|
if self._fails >= 3:
|
||||||
|
_logger.error(f"Database error caused by `{statement}`", e)
|
||||||
|
uid = uuid.uuid4()
|
||||||
|
raise Exception(
|
||||||
|
f"Query failed three times with {type(e).__name__}. Contact an admin with the UID: {uid}"
|
||||||
|
)
|
||||||
|
|
||||||
|
_logger.error(f"Database error caused by `{statement}`", e)
|
||||||
|
self._fails += 1
|
||||||
|
try:
|
||||||
|
_logger.debug("Retry select")
|
||||||
|
return await self.select(statement, args)
|
||||||
|
except Exception as e:
|
||||||
|
pass
|
||||||
|
return []
|
||||||
|
except Exception as e:
|
||||||
|
_logger.error(f"Database error caused by `{statement}`", e)
|
||||||
|
raise e
|
||||||
123
src/cpl-database/cpl/database/postgres/postgres_pool.py
Normal file
123
src/cpl-database/cpl/database/postgres/postgres_pool.py
Normal file
@@ -0,0 +1,123 @@
|
|||||||
|
from typing import Optional, Any
|
||||||
|
|
||||||
|
import sqlparse
|
||||||
|
from psycopg import sql
|
||||||
|
from psycopg_pool import AsyncConnectionPool, PoolTimeout
|
||||||
|
|
||||||
|
from cpl.core.environment import Environment
|
||||||
|
from cpl.database.db_logger import DBLogger
|
||||||
|
from cpl.database.model import DatabaseSettings
|
||||||
|
|
||||||
|
_logger = DBLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class PostgresPool:
|
||||||
|
"""
|
||||||
|
Create a pool when connecting to PostgreSQL, which will decrease the time spent in
|
||||||
|
requesting connection, creating connection, and closing connection.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, database_settings: DatabaseSettings):
|
||||||
|
self._conninfo = (
|
||||||
|
f"host={database_settings.host} "
|
||||||
|
f"port={database_settings.port} "
|
||||||
|
f"user={database_settings.user} "
|
||||||
|
f"password={database_settings.password} "
|
||||||
|
f"dbname={database_settings.database}"
|
||||||
|
)
|
||||||
|
|
||||||
|
self.pool: Optional[AsyncConnectionPool] = None
|
||||||
|
|
||||||
|
async def _get_pool(self):
|
||||||
|
pool = AsyncConnectionPool(
|
||||||
|
conninfo=self._conninfo, open=False, min_size=1, max_size=Environment.get("DB_POOL_SIZE", int, 1)
|
||||||
|
)
|
||||||
|
await pool.open()
|
||||||
|
try:
|
||||||
|
async with pool.connection() as con:
|
||||||
|
await pool.check_connection(con)
|
||||||
|
except PoolTimeout as e:
|
||||||
|
await pool.close()
|
||||||
|
_logger.fatal(f"Failed to connect to the database", e)
|
||||||
|
return pool
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def _exec_sql(cursor: Any, query: str, args=None, multi=True):
|
||||||
|
if multi:
|
||||||
|
queries = [str(stmt).strip() for stmt in sqlparse.parse(query) if str(stmt).strip()]
|
||||||
|
for q in queries:
|
||||||
|
if q.strip() == "":
|
||||||
|
continue
|
||||||
|
|
||||||
|
await cursor.execute(sql.SQL(q), args)
|
||||||
|
else:
|
||||||
|
await cursor.execute(sql.SQL(query), args)
|
||||||
|
|
||||||
|
async def execute(self, query: str, args=None, multi=True) -> list[list]:
|
||||||
|
"""
|
||||||
|
Execute a SQL statement, it could be with args and without args. The usage is
|
||||||
|
similar to the execute() function in the psycopg module.
|
||||||
|
:param query: SQL clause
|
||||||
|
:param args: args needed by the SQL clause
|
||||||
|
:param multi: if the query is a multi-statement
|
||||||
|
:return: return result
|
||||||
|
"""
|
||||||
|
async with await self._get_pool() as pool:
|
||||||
|
async with pool.connection() as con:
|
||||||
|
async with con.cursor() as cursor:
|
||||||
|
await self._exec_sql(cursor, query, args, multi)
|
||||||
|
|
||||||
|
if cursor.description is not None: # Check if the query returns rows
|
||||||
|
res = await cursor.fetchall()
|
||||||
|
if res is None:
|
||||||
|
return []
|
||||||
|
|
||||||
|
result = []
|
||||||
|
for row in res:
|
||||||
|
result.append(list(row))
|
||||||
|
return result
|
||||||
|
else:
|
||||||
|
return []
|
||||||
|
|
||||||
|
async def select(self, query: str, args=None, multi=True) -> list[str]:
|
||||||
|
"""
|
||||||
|
Execute a SQL statement, it could be with args and without args. The usage is
|
||||||
|
similar to the execute() function in the psycopg module.
|
||||||
|
:param query: SQL clause
|
||||||
|
:param args: args needed by the SQL clause
|
||||||
|
:param multi: if the query is a multi-statement
|
||||||
|
:return: return result
|
||||||
|
"""
|
||||||
|
async with await self._get_pool() as pool:
|
||||||
|
async with pool.connection() as con:
|
||||||
|
async with con.cursor() as cursor:
|
||||||
|
await self._exec_sql(cursor, query, args, multi)
|
||||||
|
|
||||||
|
res = await cursor.fetchall()
|
||||||
|
return list(res)
|
||||||
|
|
||||||
|
async def select_map(self, query: str, args=None, multi=True) -> list[dict]:
|
||||||
|
"""
|
||||||
|
Execute a SQL statement, it could be with args and without args. The usage is
|
||||||
|
similar to the execute() function in the psycopg module.
|
||||||
|
:param query: SQL clause
|
||||||
|
:param args: args needed by the SQL clause
|
||||||
|
:param multi: if the query is a multi-statement
|
||||||
|
:return: return result
|
||||||
|
"""
|
||||||
|
async with await self._get_pool() as pool:
|
||||||
|
async with pool.connection() as con:
|
||||||
|
async with con.cursor() as cursor:
|
||||||
|
await self._exec_sql(cursor, query, args, multi)
|
||||||
|
|
||||||
|
res = await cursor.fetchall()
|
||||||
|
res_map: list[dict] = []
|
||||||
|
|
||||||
|
for i_res in range(len(res)):
|
||||||
|
cols = {}
|
||||||
|
for i_col in range(len(res[i_res])):
|
||||||
|
cols[cursor.description[i_col].name] = res[i_res][i_col]
|
||||||
|
|
||||||
|
res_map.append(cols)
|
||||||
|
|
||||||
|
return res_map
|
||||||
154
src/cpl-database/cpl/database/postgres/sql_select_builder.py
Normal file
154
src/cpl-database/cpl/database/postgres/sql_select_builder.py
Normal file
@@ -0,0 +1,154 @@
|
|||||||
|
from typing import Optional, Union
|
||||||
|
|
||||||
|
from cpl.database.external_data_temp_table_builder import ExternalDataTempTableBuilder
|
||||||
|
|
||||||
|
|
||||||
|
class SQLSelectBuilder:
|
||||||
|
|
||||||
|
def __init__(self, table_name: str, primary_key: str):
|
||||||
|
self._table_name = table_name
|
||||||
|
self._primary_key = primary_key
|
||||||
|
|
||||||
|
self._temp_tables: dict[str, ExternalDataTempTableBuilder] = {}
|
||||||
|
self._to_use_temp_tables: list[str] = []
|
||||||
|
self._attributes: list[str] = []
|
||||||
|
self._tables: list[str] = [table_name]
|
||||||
|
self._joins: dict[str, (str, str)] = {}
|
||||||
|
self._conditions: list[str] = []
|
||||||
|
self._order_by: str = ""
|
||||||
|
self._limit: Optional[int] = None
|
||||||
|
self._offset: Optional[int] = None
|
||||||
|
|
||||||
|
def with_temp_table(self, temp_table: ExternalDataTempTableBuilder) -> "SQLSelectBuilder":
|
||||||
|
self._temp_tables[temp_table.table_name] = temp_table
|
||||||
|
return self
|
||||||
|
|
||||||
|
def use_temp_table(self, temp_table_name: str):
|
||||||
|
if temp_table_name not in self._temp_tables:
|
||||||
|
raise ValueError(f"Temp table {temp_table_name} not found.")
|
||||||
|
|
||||||
|
self._to_use_temp_tables.append(temp_table_name)
|
||||||
|
|
||||||
|
def with_attribute(self, attr: str, ignore_table_name=False) -> "SQLSelectBuilder":
|
||||||
|
if not ignore_table_name and not attr.startswith(self._table_name):
|
||||||
|
attr = f"{self._table_name}.{attr}"
|
||||||
|
|
||||||
|
self._attributes.append(attr)
|
||||||
|
return self
|
||||||
|
|
||||||
|
def with_foreign_attribute(self, attr: str) -> "SQLSelectBuilder":
|
||||||
|
self._attributes.append(attr)
|
||||||
|
return self
|
||||||
|
|
||||||
|
def with_table(self, table_name: str) -> "SQLSelectBuilder":
|
||||||
|
self._tables.append(table_name)
|
||||||
|
return self
|
||||||
|
|
||||||
|
def _check_prefix(self, attr: str, foreign_tables: list[str]) -> str:
|
||||||
|
assert attr is not None
|
||||||
|
|
||||||
|
if "TO_CHAR" in attr:
|
||||||
|
return attr
|
||||||
|
|
||||||
|
valid_prefixes = [
|
||||||
|
"levenshtein",
|
||||||
|
self._table_name,
|
||||||
|
*self._joins.keys(),
|
||||||
|
*self._temp_tables.keys(),
|
||||||
|
*foreign_tables,
|
||||||
|
]
|
||||||
|
if not any(attr.startswith(f"{prefix}.") for prefix in valid_prefixes):
|
||||||
|
attr = f"{self._table_name}.{attr}"
|
||||||
|
|
||||||
|
return attr
|
||||||
|
|
||||||
|
def with_value_condition(
|
||||||
|
self, attr: str, operator: str, value: str, foreign_tables: list[str]
|
||||||
|
) -> "SQLSelectBuilder":
|
||||||
|
attr = self._check_prefix(attr, foreign_tables)
|
||||||
|
self._conditions.append(f"{attr} {operator} {value}")
|
||||||
|
return self
|
||||||
|
|
||||||
|
def with_levenshtein_condition(self, condition: str) -> "SQLSelectBuilder":
|
||||||
|
self._conditions.append(condition)
|
||||||
|
return self
|
||||||
|
|
||||||
|
def with_condition(self, attr: str, operator: str, foreign_tables: list[str]) -> "SQLSelectBuilder":
|
||||||
|
attr = self._check_prefix(attr, foreign_tables)
|
||||||
|
self._conditions.append(f"{attr} {operator}")
|
||||||
|
return self
|
||||||
|
|
||||||
|
def with_grouped_conditions(self, conditions: list[str]) -> "SQLSelectBuilder":
|
||||||
|
self._conditions.append(f"({' AND '.join(conditions)})")
|
||||||
|
return self
|
||||||
|
|
||||||
|
def with_left_join(self, table: str, on: str) -> "SQLSelectBuilder":
|
||||||
|
if table in self._joins:
|
||||||
|
self._joins[table] = (f"{self._joins[table][0]} AND {on}", "LEFT")
|
||||||
|
|
||||||
|
self._joins[table] = (on, "LEFT")
|
||||||
|
return self
|
||||||
|
|
||||||
|
def with_inner_join(self, table: str, on: str) -> "SQLSelectBuilder":
|
||||||
|
if table in self._joins:
|
||||||
|
self._joins[table] = (f"{self._joins[table][0]} AND {on}", "INNER")
|
||||||
|
|
||||||
|
self._joins[table] = (on, "INNER")
|
||||||
|
return self
|
||||||
|
|
||||||
|
def with_right_join(self, table: str, on: str) -> "SQLSelectBuilder":
|
||||||
|
if table in self._joins:
|
||||||
|
self._joins[table] = (f"{self._joins[table][0]} AND {on}", "RIGHT")
|
||||||
|
|
||||||
|
self._joins[table] = (on, "RIGHT")
|
||||||
|
return self
|
||||||
|
|
||||||
|
def with_limit(self, limit: int) -> "SQLSelectBuilder":
|
||||||
|
self._limit = limit
|
||||||
|
return self
|
||||||
|
|
||||||
|
def with_offset(self, offset: int) -> "SQLSelectBuilder":
|
||||||
|
self._offset = offset
|
||||||
|
return self
|
||||||
|
|
||||||
|
def with_order_by(self, column: Union[str, property], direction: str = "ASC") -> "SQLSelectBuilder":
|
||||||
|
if isinstance(column, property):
|
||||||
|
column = column.fget.__name__
|
||||||
|
self._order_by = f"{column} {direction}"
|
||||||
|
return self
|
||||||
|
|
||||||
|
async def _handle_temp_table_use(self, query) -> str:
|
||||||
|
new_query = ""
|
||||||
|
|
||||||
|
for temp_table_name in self._to_use_temp_tables:
|
||||||
|
temp_table = self._temp_tables[temp_table_name]
|
||||||
|
new_query += await self._temp_tables[temp_table_name].build()
|
||||||
|
self.with_left_join(
|
||||||
|
temp_table.table_name,
|
||||||
|
f"{temp_table.join_ref_table}.{self._primary_key} = {temp_table.table_name}.{temp_table.primary_key}",
|
||||||
|
)
|
||||||
|
|
||||||
|
return f"{new_query} {query}" if new_query != "" else query
|
||||||
|
|
||||||
|
async def build(self) -> str:
|
||||||
|
query = await self._handle_temp_table_use("")
|
||||||
|
|
||||||
|
attributes = ", ".join(self._attributes) if self._attributes else "*"
|
||||||
|
query += f"SELECT {attributes} FROM {", ".join(self._tables)}"
|
||||||
|
|
||||||
|
for join in self._joins:
|
||||||
|
query += f" {self._joins[join][1]} JOIN {join} ON {self._joins[join][0]}"
|
||||||
|
|
||||||
|
if self._conditions:
|
||||||
|
query += " WHERE " + " AND ".join(self._conditions)
|
||||||
|
|
||||||
|
if self._order_by:
|
||||||
|
query += f" ORDER BY {self._order_by}"
|
||||||
|
|
||||||
|
if self._limit is not None:
|
||||||
|
query += f" LIMIT {self._limit}"
|
||||||
|
|
||||||
|
if self._offset is not None:
|
||||||
|
query += f" OFFSET {self._offset}"
|
||||||
|
|
||||||
|
return query
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user