Compare commits
2 Commits
2025.09.21
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
| b6cf5962aa | |||
| d3084041a9 |
@@ -1,76 +0,0 @@
|
|||||||
name: Build on push
|
|
||||||
run-name: Build on push
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- dev
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
prepare:
|
|
||||||
uses: ./.gitea/workflows/prepare.yaml
|
|
||||||
with:
|
|
||||||
version_suffix: 'dev'
|
|
||||||
secrets: inherit
|
|
||||||
|
|
||||||
api:
|
|
||||||
uses: ./.gitea/workflows/package.yaml
|
|
||||||
needs: [ prepare, application, auth, core, dependency ]
|
|
||||||
with:
|
|
||||||
working_directory: src/cpl-api
|
|
||||||
secrets: inherit
|
|
||||||
|
|
||||||
application:
|
|
||||||
uses: ./.gitea/workflows/package.yaml
|
|
||||||
needs: [ prepare, core, dependency ]
|
|
||||||
with:
|
|
||||||
working_directory: src/cpl-application
|
|
||||||
secrets: inherit
|
|
||||||
|
|
||||||
auth:
|
|
||||||
uses: ./.gitea/workflows/package.yaml
|
|
||||||
needs: [ prepare, core, dependency, database ]
|
|
||||||
with:
|
|
||||||
working_directory: src/cpl-auth
|
|
||||||
secrets: inherit
|
|
||||||
|
|
||||||
core:
|
|
||||||
uses: ./.gitea/workflows/package.yaml
|
|
||||||
needs: [prepare]
|
|
||||||
with:
|
|
||||||
working_directory: src/cpl-core
|
|
||||||
secrets: inherit
|
|
||||||
|
|
||||||
database:
|
|
||||||
uses: ./.gitea/workflows/package.yaml
|
|
||||||
needs: [ prepare, core, dependency ]
|
|
||||||
with:
|
|
||||||
working_directory: src/cpl-database
|
|
||||||
secrets: inherit
|
|
||||||
|
|
||||||
dependency:
|
|
||||||
uses: ./.gitea/workflows/package.yaml
|
|
||||||
needs: [ prepare, core ]
|
|
||||||
with:
|
|
||||||
working_directory: src/cpl-dependency
|
|
||||||
secrets: inherit
|
|
||||||
|
|
||||||
mail:
|
|
||||||
uses: ./.gitea/workflows/package.yaml
|
|
||||||
needs: [ prepare, core, dependency ]
|
|
||||||
with:
|
|
||||||
working_directory: src/cpl-mail
|
|
||||||
secrets: inherit
|
|
||||||
|
|
||||||
query:
|
|
||||||
uses: ./.gitea/workflows/package.yaml
|
|
||||||
needs: [prepare]
|
|
||||||
with:
|
|
||||||
working_directory: src/cpl-query
|
|
||||||
secrets: inherit
|
|
||||||
|
|
||||||
translation:
|
|
||||||
uses: ./.gitea/workflows/package.yaml
|
|
||||||
needs: [ prepare, core, dependency ]
|
|
||||||
with:
|
|
||||||
working_directory: src/cpl-translation
|
|
||||||
secrets: inherit
|
|
||||||
@@ -1,39 +0,0 @@
|
|||||||
name: Build on push
|
|
||||||
run-name: Build on push
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- master
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
prepare:
|
|
||||||
uses: ./.gitea/workflows/prepare.yaml
|
|
||||||
secrets: inherit
|
|
||||||
|
|
||||||
core:
|
|
||||||
uses: ./.gitea/workflows/package.yaml
|
|
||||||
needs: [prepare]
|
|
||||||
with:
|
|
||||||
working_directory: src/cpl-core
|
|
||||||
secrets: inherit
|
|
||||||
|
|
||||||
query:
|
|
||||||
uses: ./.gitea/workflows/package.yaml
|
|
||||||
needs: [prepare]
|
|
||||||
with:
|
|
||||||
working_directory: src/cpl-query
|
|
||||||
secrets: inherit
|
|
||||||
|
|
||||||
translation:
|
|
||||||
uses: ./.gitea/workflows/package.yaml
|
|
||||||
needs: [ prepare, core ]
|
|
||||||
with:
|
|
||||||
working_directory: src/cpl-translation
|
|
||||||
secrets: inherit
|
|
||||||
|
|
||||||
mail:
|
|
||||||
uses: ./.gitea/workflows/package.yaml
|
|
||||||
needs: [ prepare, core ]
|
|
||||||
with:
|
|
||||||
working_directory: src/cpl-mail
|
|
||||||
secrets: inherit
|
|
||||||
@@ -1,65 +0,0 @@
|
|||||||
name: Build Package
|
|
||||||
run-name: Build Python Package
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_call:
|
|
||||||
inputs:
|
|
||||||
version_suffix:
|
|
||||||
description: 'Suffix for version (z.B. "dev", "alpha", "beta")'
|
|
||||||
required: false
|
|
||||||
type: string
|
|
||||||
working_directory:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
runs-on: [ runner ]
|
|
||||||
container: git.sh-edraft.de/sh-edraft.de/act-runner:latest
|
|
||||||
defaults:
|
|
||||||
run:
|
|
||||||
working-directory: ${{ inputs.working_directory }}
|
|
||||||
steps:
|
|
||||||
- name: Clone Repository
|
|
||||||
uses: https://github.com/actions/checkout@v3
|
|
||||||
with:
|
|
||||||
token: ${{ secrets.CI_ACCESS_TOKEN }}
|
|
||||||
|
|
||||||
- name: Download build version artifact
|
|
||||||
uses: actions/download-artifact@v3
|
|
||||||
with:
|
|
||||||
name: version
|
|
||||||
|
|
||||||
- name: Set version
|
|
||||||
run: |
|
|
||||||
sed -i -E "s/^version = \".*\"/version = \"$(cat /workspace/sh-edraft.de/cpl/version.txt)\"/" pyproject.toml
|
|
||||||
echo "Set version to $(cat /workspace/sh-edraft.de/cpl/version.txt)"
|
|
||||||
cat pyproject.toml
|
|
||||||
|
|
||||||
- name: Set pip conf
|
|
||||||
run: |
|
|
||||||
cat > .pip.conf <<'EOF'
|
|
||||||
[global]
|
|
||||||
extra-index-url = https://git.sh-edraft.de/api/packages/sh-edraft.de/pypi/simple/
|
|
||||||
EOF
|
|
||||||
|
|
||||||
- name: Install Dependencies
|
|
||||||
run: |
|
|
||||||
export PIP_CONFIG_FILE=".pip.conf"
|
|
||||||
pip install build
|
|
||||||
|
|
||||||
- name: Build Package
|
|
||||||
run: |
|
|
||||||
python -m build --outdir dist
|
|
||||||
|
|
||||||
- name: Login to registry git.sh-edraft.de
|
|
||||||
uses: https://github.com/docker/login-action@v1
|
|
||||||
with:
|
|
||||||
registry: git.sh-edraft.de
|
|
||||||
username: ${{ secrets.CI_USERNAME }}
|
|
||||||
password: ${{ secrets.CI_ACCESS_TOKEN }}
|
|
||||||
|
|
||||||
- name: Push image
|
|
||||||
run: |
|
|
||||||
pip install twine
|
|
||||||
python -m twine upload --repository-url https://git.sh-edraft.de/api/packages/sh-edraft.de/pypi -u ${{ secrets.CI_USERNAME }} -p ${{ secrets.CI_ACCESS_TOKEN }} ./dist/*
|
|
||||||
@@ -1,54 +0,0 @@
|
|||||||
name: Prepare Build
|
|
||||||
run-name: Prepare Build Version
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_call:
|
|
||||||
inputs:
|
|
||||||
version_suffix:
|
|
||||||
description: 'Suffix for version (z.B. "dev", "alpha", "beta")'
|
|
||||||
required: false
|
|
||||||
type: string
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
prepare:
|
|
||||||
runs-on: [ runner ]
|
|
||||||
container: git.sh-edraft.de/sh-edraft.de/act-runner:latest
|
|
||||||
steps:
|
|
||||||
- name: Clone Repository
|
|
||||||
uses: https://github.com/actions/checkout@v3
|
|
||||||
with:
|
|
||||||
token: ${{ secrets.CI_ACCESS_TOKEN }}
|
|
||||||
|
|
||||||
- name: Get Date and Build Number
|
|
||||||
run: |
|
|
||||||
git fetch --tags
|
|
||||||
git tag
|
|
||||||
DATE=$(date +'%Y.%m.%d')
|
|
||||||
TAG_COUNT=$(git tag -l "${DATE}.*" | wc -l)
|
|
||||||
BUILD_NUMBER=$(($TAG_COUNT + 1))
|
|
||||||
|
|
||||||
VERSION_SUFFIX=${{ inputs.version_suffix }}
|
|
||||||
if [ -n "$VERSION_SUFFIX" ] && [ "$VERSION_SUFFIX" = "dev" ]; then
|
|
||||||
BUILD_VERSION="${DATE}.dev${BUILD_NUMBER}"
|
|
||||||
elif [ -n "$VERSION_SUFFIX" ]; then
|
|
||||||
BUILD_VERSION="${DATE}.${BUILD_NUMBER}${VERSION_SUFFIX}"
|
|
||||||
else
|
|
||||||
BUILD_VERSION="${DATE}.${BUILD_NUMBER}"
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "$BUILD_VERSION" > version.txt
|
|
||||||
echo "VERSION $BUILD_VERSION"
|
|
||||||
|
|
||||||
- name: Create Git Tag for Build
|
|
||||||
run: |
|
|
||||||
git config user.name "ci"
|
|
||||||
git config user.email "dev@sh-edraft.de"
|
|
||||||
echo "tag $(cat version.txt)"
|
|
||||||
git tag $(cat version.txt)
|
|
||||||
git push origin --tags
|
|
||||||
|
|
||||||
- name: Upload build version artifact
|
|
||||||
uses: actions/upload-artifact@v3
|
|
||||||
with:
|
|
||||||
name: version
|
|
||||||
path: version.txt
|
|
||||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -113,7 +113,6 @@ venv.bak/
|
|||||||
|
|
||||||
# Custom Environments
|
# Custom Environments
|
||||||
cpl-env/
|
cpl-env/
|
||||||
.secret
|
|
||||||
|
|
||||||
# Spyder project settings
|
# Spyder project settings
|
||||||
.spyderproject
|
.spyderproject
|
||||||
|
|||||||
@@ -1,2 +0,0 @@
|
|||||||
[global]
|
|
||||||
extra-index-url = https://git.sh-edraft.de/api/packages/sh-edraft.de/pypi/simple/
|
|
||||||
153
README.md
153
README.md
@@ -0,0 +1,153 @@
|
|||||||
|
<h1 align="center">CPL - Common python library</h1>
|
||||||
|
|
||||||
|
<!-- Summary -->
|
||||||
|
<p align="center">
|
||||||
|
<!-- <img src="" alt="cpl-logo" width="120px" height="120px"/> -->
|
||||||
|
<br>
|
||||||
|
<i>
|
||||||
|
CPL is a development platform for python server applications
|
||||||
|
<br>using Python.</i>
|
||||||
|
<br>
|
||||||
|
</p>
|
||||||
|
|
||||||
|
## Table of Contents
|
||||||
|
<!-- TABLE OF CONTENTS -->
|
||||||
|
<ol>
|
||||||
|
<li><a href="#Features">Features</a></li>
|
||||||
|
<li>
|
||||||
|
<a href="#getting-started">Getting Started</a>
|
||||||
|
<ul>
|
||||||
|
<li><a href="#prerequisites">Prerequisites</a></li>
|
||||||
|
<li><a href="#installation">Installation</a></li>
|
||||||
|
</ul>
|
||||||
|
</li>
|
||||||
|
<li><a href="#roadmap">Roadmap</a></li>
|
||||||
|
<li><a href="#contributing">Contributing</a></li>
|
||||||
|
<li><a href="#license">License</a></li>
|
||||||
|
<li><a href="#contact">Contact</a></li>
|
||||||
|
</ol>
|
||||||
|
|
||||||
|
## Features
|
||||||
|
<!-- FEATURE OVERVIEW -->
|
||||||
|
- Expandle
|
||||||
|
- Application base
|
||||||
|
- Standardized application classes
|
||||||
|
- Application object builder
|
||||||
|
- Application extension classes
|
||||||
|
- Startup classes
|
||||||
|
- Startup extension classes
|
||||||
|
- Configuration
|
||||||
|
- Configure via object mapped JSON
|
||||||
|
- Console argument handling
|
||||||
|
- Console class for in and output
|
||||||
|
- Banner
|
||||||
|
- Spinner
|
||||||
|
- Options (menu)
|
||||||
|
- Table
|
||||||
|
- Write
|
||||||
|
- Write_at
|
||||||
|
- Write_line
|
||||||
|
- Write_line_at
|
||||||
|
- Dependency injection
|
||||||
|
- Service lifetimes: singleton, scoped and transient
|
||||||
|
- Providing of application environment
|
||||||
|
- Environment (development, staging, testing, production)
|
||||||
|
- Appname
|
||||||
|
- Customer
|
||||||
|
- Hostname
|
||||||
|
- Runtime directory
|
||||||
|
- Working directory
|
||||||
|
- Logging
|
||||||
|
- Standardized logger
|
||||||
|
- Log-level (FATAL, ERROR, WARN, INFO, DEBUG & TRACE)
|
||||||
|
- Mail handling
|
||||||
|
- Send mails
|
||||||
|
- Pipe classes
|
||||||
|
- Convert input
|
||||||
|
- Utils
|
||||||
|
- Credential manager
|
||||||
|
- Encryption via BASE64
|
||||||
|
- PIP wrapper class based on subprocess
|
||||||
|
- Run pip commands
|
||||||
|
- String converter to different variants
|
||||||
|
- to_lower_case
|
||||||
|
- to_camel_case
|
||||||
|
- ...
|
||||||
|
|
||||||
|
<!-- GETTING STARTED -->
|
||||||
|
## Getting Started
|
||||||
|
|
||||||
|
[Get started with CPL][quickstart].
|
||||||
|
|
||||||
|
### Prerequisites
|
||||||
|
|
||||||
|
- Install [python] which includes [Pip installs packages][pip]
|
||||||
|
|
||||||
|
### Installation
|
||||||
|
|
||||||
|
Install the CPL package
|
||||||
|
```sh
|
||||||
|
pip install cpl-core --extra-index-url https://pip.sh-edraft.de
|
||||||
|
```
|
||||||
|
|
||||||
|
Install the CPL CLI
|
||||||
|
```sh
|
||||||
|
pip install cpl-cli --extra-index-url https://pip.sh-edraft.de
|
||||||
|
```
|
||||||
|
|
||||||
|
Create workspace:
|
||||||
|
```sh
|
||||||
|
cpl new <console|library|unittest> <PROJECT NAME>
|
||||||
|
```
|
||||||
|
|
||||||
|
Run the application:
|
||||||
|
```sh
|
||||||
|
cd <PROJECT NAME>
|
||||||
|
cpl start
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
<!-- ROADMAP -->
|
||||||
|
## Roadmap
|
||||||
|
|
||||||
|
See the [open issues](https://git.sh-edraft.de/sh-edraft.de/sh_cpl/issues) for a list of proposed features (and known issues).
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
<!-- CONTRIBUTING -->
|
||||||
|
## Contributing
|
||||||
|
|
||||||
|
### Contributing Guidelines
|
||||||
|
|
||||||
|
Read through our [contributing guidelines][contributing] to learn about our submission process, coding rules and more.
|
||||||
|
|
||||||
|
### Want to Help?
|
||||||
|
|
||||||
|
Want to file a bug, contribute some code, or improve documentation? Excellent! Read up on our guidelines for [contributing][contributing].
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
<!-- LICENSE -->
|
||||||
|
## License
|
||||||
|
|
||||||
|
Distributed under the MIT License. See [LICENSE] for more information.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
<!-- CONTACT -->
|
||||||
|
## Contact
|
||||||
|
|
||||||
|
Sven Heidemann - sven.heidemann@sh-edraft.de
|
||||||
|
|
||||||
|
Project link: [https://git.sh-edraft.de/sh-edraft.de/sh_common_py_lib](https://git.sh-edraft.de/sh-edraft.de/sh_cpl)
|
||||||
|
|
||||||
|
<!-- External LINKS -->
|
||||||
|
[pip_url]: https://pip.sh-edraft.de
|
||||||
|
[python]: https://www.python.org/
|
||||||
|
[pip]: https://pypi.org/project/pip/
|
||||||
|
|
||||||
|
<!-- Internal LINKS -->
|
||||||
|
[project]: https://git.sh-edraft.de/sh-edraft.de/sh_cpl
|
||||||
|
[quickstart]: https://git.sh-edraft.de/sh-edraft.de/sh_cpl/wiki/quickstart
|
||||||
|
[contributing]: https://git.sh-edraft.de/sh-edraft.de/sh_cpl/wiki/contributing
|
||||||
|
[license]: LICENSE
|
||||||
|
|||||||
151
cpl-workspace.json
Normal file
151
cpl-workspace.json
Normal file
@@ -0,0 +1,151 @@
|
|||||||
|
{
|
||||||
|
"WorkspaceSettings": {
|
||||||
|
"DefaultProject": "cpl-core",
|
||||||
|
"Projects": {
|
||||||
|
"cpl-cli": "src/cpl_cli/cpl-cli.json",
|
||||||
|
"cpl-core": "src/cpl_core/cpl-core.json",
|
||||||
|
"cpl-discord": "src/cpl_discord/cpl-discord.json",
|
||||||
|
"cpl-query": "src/cpl_query/cpl-query.json",
|
||||||
|
"cpl-translation": "src/cpl_translation/cpl-translation.json",
|
||||||
|
"set-version": "tools/set_version/set-version.json",
|
||||||
|
"set-pip-urls": "tools/set_pip_urls/set-pip-urls.json",
|
||||||
|
"unittests": "unittests/unittests/unittests.json",
|
||||||
|
"unittests_cli": "unittests/unittests_cli/unittests_cli.json",
|
||||||
|
"unittests_core": "unittests/unittests_core/unittests_core.json",
|
||||||
|
"unittests_query": "unittests/unittests_query/unittests_query.json",
|
||||||
|
"unittests_shared": "unittests/unittests_shared/unittests_shared.json",
|
||||||
|
"unittests_translation": "unittests/unittests_translation/unittests_translation.json"
|
||||||
|
},
|
||||||
|
"Scripts": {
|
||||||
|
"hello-world": "echo 'Hello World'",
|
||||||
|
|
||||||
|
"format": "echo 'Formatting:'; black ./",
|
||||||
|
|
||||||
|
"sv": "cpl set-version",
|
||||||
|
"set-version": "cpl run set-version --dev $ARGS; echo '';",
|
||||||
|
|
||||||
|
"spu": "cpl set-pip-urls",
|
||||||
|
"set-pip-urls": "cpl run set-pip-urls --dev $ARGS; echo '';",
|
||||||
|
|
||||||
|
"docs-build": "cpl format; echo 'Build Documentation'; cpl db-core; cpl db-discord; cpl db-query; cpl db-translation; cd docs/; make clean; make html;",
|
||||||
|
"db-core": "cd docs/; sphinx-apidoc -o source/ ../src/cpl_core; cd ../",
|
||||||
|
"db-discord": "cd docs/; sphinx-apidoc -o source/ ../src/cpl_discord; cd ../",
|
||||||
|
"db-query": "cd docs/; sphinx-apidoc -o source/ ../src/cpl_query; cd ../",
|
||||||
|
"db-translation": "cd docs/; sphinx-apidoc -o source/ ../src/cpl_translation; cd ../",
|
||||||
|
"db": "cpl docs-build",
|
||||||
|
|
||||||
|
"docs-open": "xdg-open $PWD/docs/build/html/index.html &",
|
||||||
|
"do": "cpl docs-open",
|
||||||
|
|
||||||
|
"test": "cpl run unittests",
|
||||||
|
|
||||||
|
"pre-build-all": "cpl sv $ARGS; cpl spu $ARGS;",
|
||||||
|
"build-all": "cpl build-cli; cpl build-core; cpl build-discord; cpl build-query; cpl build-translation; cpl build-set-pip-urls; cpl build-set-version",
|
||||||
|
"ba": "cpl build-all $ARGS",
|
||||||
|
"build-cli": "echo 'Build cpl-cli'; cd ./src/cpl_cli; cpl build; cd ../../;",
|
||||||
|
"build-core": "echo 'Build cpl-core'; cd ./src/cpl_core; cpl build; cd ../../;",
|
||||||
|
"build-discord": "echo 'Build cpl-discord'; cd ./src/cpl_discord; cpl build; cd ../../;",
|
||||||
|
"build-query": "echo 'Build cpl-query'; cd ./src/cpl_query; cpl build; cd ../../;",
|
||||||
|
"build-translation": "echo 'Build cpl-translation'; cd ./src/cpl_translation; cpl build; cd ../../;",
|
||||||
|
"build-set-pip-urls": "echo 'Build set-pip-urls'; cd ./tools/set_pip_urls; cpl build; cd ../../;",
|
||||||
|
"build-set-version": "echo 'Build set-version'; cd ./tools/set_version; cpl build; cd ../../;",
|
||||||
|
|
||||||
|
"pre-publish-all": "cpl sv $ARGS; cpl spu $ARGS;",
|
||||||
|
"publish-all": "cpl publish-cli; cpl publish-core; cpl publish-discord; cpl publish-query; cpl publish-translation;",
|
||||||
|
"pa": "cpl publish-all $ARGS",
|
||||||
|
"publish-cli": "echo 'Publish cpl-cli'; cd ./src/cpl_cli; cpl publish; cd ../../;",
|
||||||
|
"publish-core": "echo 'Publish cpl-core'; cd ./src/cpl_core; cpl publish; cd ../../;",
|
||||||
|
"publish-discord": "echo 'Publish cpl-discord'; cd ./src/cpl_discord; cpl publish; cd ../../;",
|
||||||
|
"publish-query": "echo 'Publish cpl-query'; cd ./src/cpl_query; cpl publish; cd ../../;",
|
||||||
|
"publish-translation": "echo 'Publish cpl-translation'; cd ./src/cpl_translation; cpl publish; cd ../../;",
|
||||||
|
|
||||||
|
"upload-prod-cli": "echo 'PROD Upload cpl-cli'; cpl upl-prod-cli;",
|
||||||
|
"upl-prod-cli": "twine upload -r pip.sh-edraft.de dist/cpl-cli/publish/setup/*",
|
||||||
|
|
||||||
|
"upload-prod-core": "echo 'PROD Upload cpl-core'; cpl upl-prod-core;",
|
||||||
|
"upl-prod-core": "twine upload -r pip.sh-edraft.de dist/cpl-core/publish/setup/*",
|
||||||
|
|
||||||
|
"upload-prod-discord": "echo 'PROD Upload cpl-discord'; cpl upl-prod-discord;",
|
||||||
|
"upl-prod-discord": "twine upload -r pip.sh-edraft.de dist/cpl-discord/publish/setup/*",
|
||||||
|
|
||||||
|
"upload-prod-query": "echo 'PROD Upload cpl-query'; cpl upl-prod-query;",
|
||||||
|
"upl-prod-query": "twine upload -r pip.sh-edraft.de dist/cpl-query/publish/setup/*",
|
||||||
|
|
||||||
|
"upload-prod-translation": "echo 'PROD Upload cpl-translation'; cpl upl-prod-translation;",
|
||||||
|
"upl-prod-translation": "twine upload -r pip.sh-edraft.de dist/cpl-translation/publish/setup/*",
|
||||||
|
|
||||||
|
"upload-exp-cli": "echo 'EXP Upload cpl-cli'; cpl upl-exp-cli;",
|
||||||
|
"upl-exp-cli": "twine upload -r pip-exp.sh-edraft.de dist/cpl-cli/publish/setup/*",
|
||||||
|
|
||||||
|
"upload-exp-core": "echo 'EXP Upload cpl-core'; cpl upl-exp-core;",
|
||||||
|
"upl-exp-core": "twine upload -r pip-exp.sh-edraft.de dist/cpl-core/publish/setup/*",
|
||||||
|
|
||||||
|
"upload-exp-discord": "echo 'EXP Upload cpl-discord'; cpl upl-exp-discord;",
|
||||||
|
"upl-exp-discord": "twine upload -r pip-exp.sh-edraft.de dist/cpl-discord/publish/setup/*",
|
||||||
|
|
||||||
|
"upload-exp-query": "echo 'EXP Upload cpl-query'; cpl upl-exp-query;",
|
||||||
|
"upl-exp-query": "twine upload -r pip-exp.sh-edraft.de dist/cpl-query/publish/setup/*",
|
||||||
|
|
||||||
|
"upload-exp-translation": "echo 'EXP Upload cpl-translation'; cpl upl-exp-translation;",
|
||||||
|
"upl-exp-translation": "twine upload -r pip-exp.sh-edraft.de dist/cpl-translation/publish/setup/*",
|
||||||
|
|
||||||
|
"upload-dev-cli": "echo 'DEV Upload cpl-cli'; cpl upl-dev-cli;",
|
||||||
|
"upl-dev-cli": "twine upload -r pip-dev.sh-edraft.de dist/cpl-cli/publish/setup/*",
|
||||||
|
|
||||||
|
"upload-dev-core": "echo 'DEV Upload cpl-core'; cpl upl-dev-core;",
|
||||||
|
"upl-dev-core": "twine upload -r pip-dev.sh-edraft.de dist/cpl-core/publish/setup/*",
|
||||||
|
|
||||||
|
"upload-dev-discord": "echo 'DEV Upload cpl-discord'; cpl upl-dev-discord;",
|
||||||
|
"upl-dev-discord": "twine upload -r pip-dev.sh-edraft.de dist/cpl-discord/publish/setup/*",
|
||||||
|
|
||||||
|
"upload-dev-query": "echo 'DEV Upload cpl-query'; cpl upl-dev-query;",
|
||||||
|
"upl-dev-query": "twine upload -r pip-dev.sh-edraft.de dist/cpl-query/publish/setup/*",
|
||||||
|
|
||||||
|
"upload-dev-translation": "echo 'DEV Upload cpl-translation'; cpl upl-dev-translation;",
|
||||||
|
"upl-dev-translation": "twine upload -r pip-dev.sh-edraft.de dist/cpl-translation/publish/setup/*",
|
||||||
|
|
||||||
|
"pre-deploy-prod": "cpl sv $ARGS; cpl spu --environment=production;",
|
||||||
|
"deploy-prod": "cpl deploy-prod-cli; cpl deploy-prod-core; cpl deploy-prod-discord; cpl deploy-prod-query; cpl deploy-prod-translation;",
|
||||||
|
"dp": "cpl deploy-prod $ARGS",
|
||||||
|
"deploy-prod-cli": "cpl publish-cli; cpl upload-prod-cli",
|
||||||
|
"deploy-prod-core": "cpl publish-core; cpl upload-prod-core",
|
||||||
|
"deploy-prod-query": "cpl publish-query; cpl upload-prod-query",
|
||||||
|
"deploy-prod-discord": "cpl publish-discord; cpl upload-prod-discord",
|
||||||
|
"deploy-prod-translation": "cpl publish-translation; cpl upload-prod-translation",
|
||||||
|
|
||||||
|
"pre-deploy-exp": "cpl sv $ARGS; cpl spu --environment=staging;",
|
||||||
|
"deploy-exp": "cpl deploy-exp-cli; cpl deploy-exp-core; cpl deploy-exp-discord; cpl deploy-exp-query; cpl deploy-exp-translation;",
|
||||||
|
"de": "cpl deploy-exp $ARGS",
|
||||||
|
"deploy-exp-cli": "cpl publish-cli; cpl upload-exp-cli",
|
||||||
|
"deploy-exp-core": "cpl publish-core; cpl upload-exp-core",
|
||||||
|
"deploy-exp-discord": "cpl publish-discord; cpl upload-exp-discord",
|
||||||
|
"deploy-exp-query": "cpl publish-query; cpl upload-exp-query",
|
||||||
|
"deploy-exp-translation": "cpl publish-translation; cpl upload-exp-translation",
|
||||||
|
|
||||||
|
"pre-deploy-dev": "cpl sv $ARGS; cpl spu --environment=development;",
|
||||||
|
"deploy-dev": "cpl deploy-dev-cli; cpl deploy-dev-core; cpl deploy-dev-discord; cpl deploy-dev-query; cpl deploy-dev-translation;",
|
||||||
|
"dd": "cpl deploy-dev $ARGS",
|
||||||
|
"deploy-dev-cli": "cpl publish-cli; cpl upload-dev-cli",
|
||||||
|
"deploy-dev-core": "cpl publish-core; cpl upload-dev-core",
|
||||||
|
"deploy-dev-discord": "cpl publish-discord; cpl upload-dev-discord",
|
||||||
|
"deploy-dev-query": "cpl publish-query; cpl upload-dev-query",
|
||||||
|
"deploy-dev-translation": "cpl publish-query; cpl upload-dev-translation",
|
||||||
|
|
||||||
|
"dev-install": "cpl di-core; cpl di-cli; cpl di-query; cpl di-translation;",
|
||||||
|
"di": "cpl dev-install",
|
||||||
|
"di-core": "pip install cpl-core --pre --upgrade --extra-index-url https://pip-dev.sh-edraft.de",
|
||||||
|
"di-cli": "pip install cpl-cli --pre --upgrade --extra-index-url https://pip-dev.sh-edraft.de",
|
||||||
|
"di-discord": "pip install cpl-discord --pre --upgrade --extra-index-url https://pip-dev.sh-edraft.de",
|
||||||
|
"di-query": "pip install cpl-query --pre --upgrade --extra-index-url https://pip-dev.sh-edraft.de",
|
||||||
|
"di-translation": "pip install cpl-translation --pre --upgrade --extra-index-url https://pip-dev.sh-edraft.de",
|
||||||
|
|
||||||
|
"prod-install": "cpl pi-core; cpl pi-cli; cpl pi-query; cpl pi-translation;",
|
||||||
|
"pi": "cpl prod-install",
|
||||||
|
"pi-core": "pip install cpl-core --pre --upgrade --extra-index-url https://pip.sh-edraft.de",
|
||||||
|
"pi-cli": "pip install cpl-cli --pre --upgrade --extra-index-url https://pip.sh-edraft.de",
|
||||||
|
"pi-discord": "pip install cpl-discord --pre --upgrade --extra-index-url https://pip.sh-edraft.de",
|
||||||
|
"pi-query": "pip install cpl-query --pre --upgrade --extra-index-url https://pip.sh-edraft.de",
|
||||||
|
"pi-translation": "pip install cpl-translation --pre --upgrade --extra-index-url https://pip.sh-edraft.de"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
61
install.sh
61
install.sh
@@ -1,61 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
set -euo pipefail
|
|
||||||
|
|
||||||
# Find and combine requirements from src/cpl-*/requirements.txt,
|
|
||||||
# filtering out lines whose *package name* starts with "cpl-".
|
|
||||||
# Works with pinned versions, extras, markers, editable installs, and VCS refs.
|
|
||||||
|
|
||||||
shopt -s nullglob
|
|
||||||
|
|
||||||
req_files=(src/cpl-*/requirements.txt)
|
|
||||||
if ((${#req_files[@]} == 0)); then
|
|
||||||
echo "No requirements files found at src/cpl-*/requirements.txt" >&2
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
tmp_combined="$(mktemp)"
|
|
||||||
trap 'rm -f "$tmp_combined"' EXIT
|
|
||||||
|
|
||||||
# Concatenate, trim comments/whitespace, filter out cpl-* packages, dedupe.
|
|
||||||
# We keep non-package options/flags/constraints as-is.
|
|
||||||
awk '
|
|
||||||
function trim(s){ sub(/^[[:space:]]+/,"",s); sub(/[[:space:]]+$/,"",s); return s }
|
|
||||||
|
|
||||||
{
|
|
||||||
line=$0
|
|
||||||
# drop full-line comments and strip inline comments
|
|
||||||
if (line ~ /^[[:space:]]*#/) next
|
|
||||||
sub(/#[^!].*$/,"",line) # strip trailing comment (simple heuristic)
|
|
||||||
line=trim(line)
|
|
||||||
if (line == "") next
|
|
||||||
|
|
||||||
# Determine the package *name* even for "-e", extras, pins, markers, or VCS "@"
|
|
||||||
e = line
|
|
||||||
sub(/^-e[[:space:]]+/,"",e) # remove editable prefix
|
|
||||||
# Tokenize up to the first of these separators: space, [ < > = ! ~ ; @
|
|
||||||
token = e
|
|
||||||
sub(/\[.*/,"",token) # remove extras quickly
|
|
||||||
n = split(token, a, /[<>=!~;@[:space:]]/)
|
|
||||||
name = tolower(a[1])
|
|
||||||
|
|
||||||
# If the first token (name) starts with "cpl-", skip this requirement
|
|
||||||
if (name ~ /^cpl-/) next
|
|
||||||
|
|
||||||
print line
|
|
||||||
}
|
|
||||||
' "${req_files[@]}" | sort -u > "$tmp_combined"
|
|
||||||
|
|
||||||
if ! [ -s "$tmp_combined" ]; then
|
|
||||||
echo "Nothing to install after filtering out cpl-* packages." >&2
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "Installing dependencies (excluding cpl-*) from:"
|
|
||||||
printf ' - %s\n' "${req_files[@]}"
|
|
||||||
echo
|
|
||||||
echo "Final set to install:"
|
|
||||||
cat "$tmp_combined"
|
|
||||||
echo
|
|
||||||
|
|
||||||
# Use python -m pip for reliability; change to python3 if needed.
|
|
||||||
python -m pip install -r "$tmp_combined"
|
|
||||||
@@ -1,20 +0,0 @@
|
|||||||
from cpl.dependency.service_collection import ServiceCollection as _ServiceCollection
|
|
||||||
|
|
||||||
def add_api(collection: _ServiceCollection):
|
|
||||||
try:
|
|
||||||
from cpl.database import mysql
|
|
||||||
collection.add_module(mysql)
|
|
||||||
except ImportError as e:
|
|
||||||
from cpl.core.errors import dependency_error
|
|
||||||
dependency_error("cpl-database", e)
|
|
||||||
|
|
||||||
try:
|
|
||||||
from cpl import auth
|
|
||||||
from cpl.auth import permission
|
|
||||||
collection.add_module(auth)
|
|
||||||
collection.add_module(permission)
|
|
||||||
except ImportError as e:
|
|
||||||
from cpl.core.errors import dependency_error
|
|
||||||
dependency_error("cpl-auth", e)
|
|
||||||
|
|
||||||
_ServiceCollection.with_module(add_api, __name__)
|
|
||||||
@@ -1,15 +0,0 @@
|
|||||||
from abc import ABC, abstractmethod
|
|
||||||
|
|
||||||
from starlette.types import Scope, Receive, Send
|
|
||||||
|
|
||||||
|
|
||||||
class ASGIMiddleware(ABC):
|
|
||||||
@abstractmethod
|
|
||||||
def __init__(self, app):
|
|
||||||
self._app = app
|
|
||||||
|
|
||||||
def _call_next(self, scope: Scope, receive: Receive, send: Send):
|
|
||||||
return self._app(scope, receive, send)
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
async def __call__(self, scope: Scope, receive: Receive, send: Send): ...
|
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
from cpl.core.log.logger import Logger
|
|
||||||
|
|
||||||
|
|
||||||
class APILogger(Logger):
|
|
||||||
|
|
||||||
def __init__(self, source: str):
|
|
||||||
Logger.__init__(self, source, "api")
|
|
||||||
@@ -1,13 +0,0 @@
|
|||||||
from typing import Optional
|
|
||||||
|
|
||||||
from cpl.core.configuration import ConfigurationModelABC
|
|
||||||
|
|
||||||
|
|
||||||
class ApiSettings(ConfigurationModelABC):
|
|
||||||
|
|
||||||
def __init__(self, src: Optional[dict] = None):
|
|
||||||
super().__init__(src)
|
|
||||||
|
|
||||||
self.option("host", str, "0.0.0.0")
|
|
||||||
self.option("port", int, 5000)
|
|
||||||
self.option("allowed_origins", list[str])
|
|
||||||
@@ -1,37 +0,0 @@
|
|||||||
from http.client import HTTPException
|
|
||||||
|
|
||||||
from starlette.responses import JSONResponse
|
|
||||||
from starlette.types import Scope, Receive, Send
|
|
||||||
|
|
||||||
|
|
||||||
class APIError(HTTPException):
|
|
||||||
status_code = 500
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
async def asgi_response(cls, scope: Scope, receive: Receive, send: Send):
|
|
||||||
r = JSONResponse({"error": cls.__name__}, status_code=cls.status_code)
|
|
||||||
return await r(scope, receive, send)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def response(cls):
|
|
||||||
return JSONResponse({"error": cls.__name__}, status_code=cls.status_code)
|
|
||||||
|
|
||||||
|
|
||||||
class Unauthorized(APIError):
|
|
||||||
status_code = 401
|
|
||||||
|
|
||||||
|
|
||||||
class Forbidden(APIError):
|
|
||||||
status_code = 403
|
|
||||||
|
|
||||||
|
|
||||||
class NotFound(APIError):
|
|
||||||
status_code = 404
|
|
||||||
|
|
||||||
|
|
||||||
class AlreadyExists(APIError):
|
|
||||||
status_code = 409
|
|
||||||
|
|
||||||
|
|
||||||
class EndpointNotImplemented(APIError):
|
|
||||||
status_code = 501
|
|
||||||
@@ -1,76 +0,0 @@
|
|||||||
from keycloak import KeycloakAuthenticationError
|
|
||||||
from starlette.types import Scope, Receive, Send
|
|
||||||
|
|
||||||
from cpl.api.abc.asgi_middleware_abc import ASGIMiddleware
|
|
||||||
from cpl.api.api_logger import APILogger
|
|
||||||
from cpl.api.error import Unauthorized
|
|
||||||
from cpl.api.middleware.request import get_request
|
|
||||||
from cpl.api.router import Router
|
|
||||||
from cpl.auth.keycloak import KeycloakClient
|
|
||||||
from cpl.auth.schema import AuthUserDao, AuthUser
|
|
||||||
from cpl.dependency import ServiceProviderABC
|
|
||||||
|
|
||||||
_logger = APILogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class AuthenticationMiddleware(ASGIMiddleware):
|
|
||||||
|
|
||||||
@ServiceProviderABC.inject
|
|
||||||
def __init__(self, app, keycloak: KeycloakClient, user_dao: AuthUserDao):
|
|
||||||
ASGIMiddleware.__init__(self, app)
|
|
||||||
|
|
||||||
self._keycloak = keycloak
|
|
||||||
self._user_dao = user_dao
|
|
||||||
|
|
||||||
async def __call__(self, scope: Scope, receive: Receive, send: Send):
|
|
||||||
request = get_request()
|
|
||||||
url = request.url.path
|
|
||||||
|
|
||||||
if url not in Router.get_auth_required_routes():
|
|
||||||
_logger.trace(f"No authentication required for {url}")
|
|
||||||
return await self._app(scope, receive, send)
|
|
||||||
|
|
||||||
if not request.headers.get("Authorization"):
|
|
||||||
_logger.debug(f"Unauthorized access to {url}, missing Authorization header")
|
|
||||||
return await Unauthorized(f"Missing header Authorization").asgi_response(scope, receive, send)
|
|
||||||
|
|
||||||
auth_header = request.headers.get("Authorization", None)
|
|
||||||
if not auth_header or not auth_header.startswith("Bearer "):
|
|
||||||
return await Unauthorized("Invalid Authorization header").asgi_response(scope, receive, send)
|
|
||||||
|
|
||||||
token = auth_header.split("Bearer ")[1]
|
|
||||||
if not await self._verify_login(token):
|
|
||||||
_logger.debug(f"Unauthorized access to {url}, invalid token")
|
|
||||||
return await Unauthorized("Invalid token").asgi_response(scope, receive, send)
|
|
||||||
|
|
||||||
# check user exists in db, if not create
|
|
||||||
keycloak_id = self._keycloak.get_user_id(token)
|
|
||||||
if keycloak_id is None:
|
|
||||||
return await Unauthorized("Failed to get user id from token").asgi_response(scope, receive, send)
|
|
||||||
|
|
||||||
user = await self._get_or_crate_user(keycloak_id)
|
|
||||||
if user.deleted:
|
|
||||||
_logger.debug(f"Unauthorized access to {url}, user is deleted")
|
|
||||||
return await Unauthorized("User is deleted").asgi_response(scope, receive, send)
|
|
||||||
|
|
||||||
return await self._call_next(scope, receive, send)
|
|
||||||
|
|
||||||
async def _get_or_crate_user(self, keycloak_id: str) -> AuthUser:
|
|
||||||
existing = await self._user_dao.find_by_keycloak_id(keycloak_id)
|
|
||||||
if existing is not None:
|
|
||||||
return existing
|
|
||||||
|
|
||||||
user = AuthUser(0, keycloak_id)
|
|
||||||
uid = await self._user_dao.create(user)
|
|
||||||
return await self._user_dao.get_by_id(uid)
|
|
||||||
|
|
||||||
async def _verify_login(self, token: str) -> bool:
|
|
||||||
try:
|
|
||||||
token_info = self._keycloak.introspect(token)
|
|
||||||
return token_info.get("active", False)
|
|
||||||
except KeycloakAuthenticationError as e:
|
|
||||||
_logger.debug(f"Keycloak authentication error: {e}")
|
|
||||||
return False
|
|
||||||
except Exception as e:
|
|
||||||
_logger.error(f"Unexpected error during token verification: {e}")
|
|
||||||
return False
|
|
||||||
@@ -1,86 +0,0 @@
|
|||||||
import time
|
|
||||||
|
|
||||||
from starlette.requests import Request
|
|
||||||
from starlette.types import Receive, Scope, Send
|
|
||||||
|
|
||||||
from cpl.api.abc.asgi_middleware_abc import ASGIMiddleware
|
|
||||||
from cpl.api.api_logger import APILogger
|
|
||||||
from cpl.api.middleware.request import get_request
|
|
||||||
|
|
||||||
_logger = APILogger(__name__)
|
|
||||||
|
|
||||||
class LoggingMiddleware(ASGIMiddleware):
|
|
||||||
|
|
||||||
def __init__(self, app):
|
|
||||||
ASGIMiddleware.__init__(self, app)
|
|
||||||
|
|
||||||
async def __call__(self, scope: Scope, receive: Receive, send: Send):
|
|
||||||
if scope["type"] != "http":
|
|
||||||
await self._call_next(scope, receive, send)
|
|
||||||
return
|
|
||||||
|
|
||||||
request = get_request()
|
|
||||||
await self._log_request(request)
|
|
||||||
start_time = time.time()
|
|
||||||
|
|
||||||
response_body = b""
|
|
||||||
status_code = 500
|
|
||||||
|
|
||||||
async def send_wrapper(message):
|
|
||||||
nonlocal response_body, status_code
|
|
||||||
if message["type"] == "http.response.start":
|
|
||||||
status_code = message["status"]
|
|
||||||
if message["type"] == "http.response.body":
|
|
||||||
response_body += message.get("body", b"")
|
|
||||||
await send(message)
|
|
||||||
|
|
||||||
await self._call_next(scope, receive, send_wrapper)
|
|
||||||
|
|
||||||
duration = (time.time() - start_time) * 1000
|
|
||||||
await self._log_after_request(request, status_code, duration)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _filter_relevant_headers(headers: dict) -> dict:
|
|
||||||
relevant_keys = {
|
|
||||||
"content-type",
|
|
||||||
"host",
|
|
||||||
"connection",
|
|
||||||
"user-agent",
|
|
||||||
"origin",
|
|
||||||
"referer",
|
|
||||||
"accept",
|
|
||||||
}
|
|
||||||
return {key: value for key, value in headers.items() if key in relevant_keys}
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
async def _log_request(cls, request: Request):
|
|
||||||
_logger.debug(
|
|
||||||
f"Request {getattr(request.state, 'request_id', '-')}: {request.method}@{request.url.path} from {request.client.host}"
|
|
||||||
)
|
|
||||||
|
|
||||||
from cpl.core.ctx.user_context import get_user
|
|
||||||
|
|
||||||
user = get_user()
|
|
||||||
|
|
||||||
request_info = {
|
|
||||||
"headers": cls._filter_relevant_headers(dict(request.headers)),
|
|
||||||
"args": dict(request.query_params),
|
|
||||||
"form-data": (
|
|
||||||
await request.form()
|
|
||||||
if request.headers.get("content-type") == "application/x-www-form-urlencoded"
|
|
||||||
else None
|
|
||||||
),
|
|
||||||
"payload": (await request.json() if request.headers.get("content-length") == "0" else None),
|
|
||||||
"user": f"{user.id}-{user.keycloak_id}" if user else None,
|
|
||||||
"files": (
|
|
||||||
{key: file.filename for key, file in (await request.form()).items()} if await request.form() else None
|
|
||||||
),
|
|
||||||
}
|
|
||||||
|
|
||||||
_logger.trace(f"Request {getattr(request.state, 'request_id', '-')}: {request_info}")
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
async def _log_after_request(request: Request, status_code: int, duration: float):
|
|
||||||
_logger.info(
|
|
||||||
f"Request finished {getattr(request.state, 'request_id', '-')}: {status_code}-{request.method}@{request.url.path} from {request.client.host} in {duration:.2f}ms"
|
|
||||||
)
|
|
||||||
@@ -1,54 +0,0 @@
|
|||||||
import time
|
|
||||||
from contextvars import ContextVar
|
|
||||||
from typing import Optional, Union
|
|
||||||
from uuid import uuid4
|
|
||||||
|
|
||||||
from starlette.requests import Request
|
|
||||||
from starlette.types import Scope, Receive, Send
|
|
||||||
from starlette.websockets import WebSocket
|
|
||||||
|
|
||||||
from cpl.api.abc.asgi_middleware_abc import ASGIMiddleware
|
|
||||||
from cpl.api.api_logger import APILogger
|
|
||||||
from cpl.api.typing import TRequest
|
|
||||||
|
|
||||||
_request_context: ContextVar[Union[TRequest, None]] = ContextVar("request", default=None)
|
|
||||||
|
|
||||||
_logger = APILogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class RequestMiddleware(ASGIMiddleware):
|
|
||||||
|
|
||||||
def __init__(self, app):
|
|
||||||
ASGIMiddleware.__init__(self, app)
|
|
||||||
self._ctx_token = None
|
|
||||||
|
|
||||||
async def __call__(self, scope: Scope, receive: Receive, send: Send):
|
|
||||||
request = Request(scope, receive, send)
|
|
||||||
await self.set_request_data(request)
|
|
||||||
|
|
||||||
try:
|
|
||||||
await self._app(scope, receive, send)
|
|
||||||
finally:
|
|
||||||
await self.clean_request_data()
|
|
||||||
|
|
||||||
async def set_request_data(self, request: TRequest):
|
|
||||||
request.state.request_id = uuid4()
|
|
||||||
request.state.start_time = time.time()
|
|
||||||
_logger.trace(f"Set new current request: {request.state.request_id}")
|
|
||||||
|
|
||||||
self._ctx_token = _request_context.set(request)
|
|
||||||
|
|
||||||
async def clean_request_data(self):
|
|
||||||
request = get_request()
|
|
||||||
if request is None:
|
|
||||||
return
|
|
||||||
|
|
||||||
if self._ctx_token is None:
|
|
||||||
return
|
|
||||||
|
|
||||||
_logger.trace(f"Clearing current request: {request.state.request_id}")
|
|
||||||
_request_context.reset(self._ctx_token)
|
|
||||||
|
|
||||||
|
|
||||||
def get_request() -> Optional[Union[TRequest, WebSocket]]:
|
|
||||||
return _request_context.get()
|
|
||||||
@@ -1,84 +0,0 @@
|
|||||||
from starlette.routing import Route
|
|
||||||
|
|
||||||
|
|
||||||
class Router:
|
|
||||||
_registered_routes: list[Route] = []
|
|
||||||
_auth_required: list[str] = []
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_routes(cls) -> list[Route]:
|
|
||||||
return cls._registered_routes
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_auth_required_routes(cls) -> list[str]:
|
|
||||||
return cls._auth_required
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def authenticate(cls):
|
|
||||||
"""
|
|
||||||
Decorator to mark a route as requiring authentication.
|
|
||||||
Usage:
|
|
||||||
@Route.authenticate()
|
|
||||||
@Route.get("/example")
|
|
||||||
async def example_endpoint(request: TRequest):
|
|
||||||
...
|
|
||||||
"""
|
|
||||||
|
|
||||||
def inner(fn):
|
|
||||||
route_path = getattr(fn, "_route_path", None)
|
|
||||||
if route_path and route_path not in cls._auth_required:
|
|
||||||
cls._auth_required.append(route_path)
|
|
||||||
return fn
|
|
||||||
|
|
||||||
return inner
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def route(cls, path=None, **kwargs):
|
|
||||||
def inner(fn):
|
|
||||||
cls._registered_routes.append(Route(path, fn, **kwargs))
|
|
||||||
setattr(fn, "_route_path", path)
|
|
||||||
return fn
|
|
||||||
|
|
||||||
return inner
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get(cls, path=None, **kwargs):
|
|
||||||
return cls.route(path, methods=["GET"], **kwargs)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def post(cls, path=None, **kwargs):
|
|
||||||
return cls.route(path, methods=["POST"], **kwargs)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def head(cls, path=None, **kwargs):
|
|
||||||
return cls.route(path, methods=["HEAD"], **kwargs)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def put(cls, path=None, **kwargs):
|
|
||||||
return cls.route(path, methods=["PUT"], **kwargs)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def delete(cls, path=None, **kwargs):
|
|
||||||
return cls.route(path, methods=["DELETE"], **kwargs)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def override(cls):
|
|
||||||
"""
|
|
||||||
Decorator to override an existing route with the same path.
|
|
||||||
Usage:
|
|
||||||
@Route.override()
|
|
||||||
@Route.get("/example")
|
|
||||||
async def example_endpoint(request: TRequest):
|
|
||||||
...
|
|
||||||
"""
|
|
||||||
|
|
||||||
def inner(fn):
|
|
||||||
route_path = getattr(fn, "_route_path", None)
|
|
||||||
|
|
||||||
routes = list(filter(lambda x: x.path == route_path, cls._registered_routes))
|
|
||||||
for route in routes[:-1]:
|
|
||||||
cls._registered_routes.remove(route)
|
|
||||||
|
|
||||||
return fn
|
|
||||||
|
|
||||||
return inner
|
|
||||||
@@ -1,13 +0,0 @@
|
|||||||
from typing import Union, Literal, Callable
|
|
||||||
from urllib.request import Request
|
|
||||||
|
|
||||||
from starlette.middleware import Middleware
|
|
||||||
from starlette.types import ASGIApp
|
|
||||||
from starlette.websockets import WebSocket
|
|
||||||
|
|
||||||
TRequest = Union[Request, WebSocket]
|
|
||||||
HTTPMethods = Literal["GET", "POST", "PUT", "PATCH", "DELETE", "OPTIONS"]
|
|
||||||
PartialMiddleware = Union[
|
|
||||||
Middleware,
|
|
||||||
Callable[[ASGIApp], ASGIApp],
|
|
||||||
]
|
|
||||||
@@ -1,187 +0,0 @@
|
|||||||
import os
|
|
||||||
from typing import Mapping, Any, Callable
|
|
||||||
|
|
||||||
import uvicorn
|
|
||||||
from starlette.applications import Starlette
|
|
||||||
from starlette.middleware import Middleware
|
|
||||||
from starlette.middleware.cors import CORSMiddleware
|
|
||||||
from starlette.requests import Request
|
|
||||||
from starlette.responses import JSONResponse
|
|
||||||
from starlette.routing import Route
|
|
||||||
from starlette.types import ExceptionHandler
|
|
||||||
|
|
||||||
from cpl import api, auth
|
|
||||||
from cpl.api.api_logger import APILogger
|
|
||||||
from cpl.api.api_settings import ApiSettings
|
|
||||||
from cpl.api.error import APIError
|
|
||||||
from cpl.api.middleware.authentication import AuthenticationMiddleware
|
|
||||||
from cpl.api.middleware.logging import LoggingMiddleware
|
|
||||||
from cpl.api.middleware.request import RequestMiddleware
|
|
||||||
from cpl.api.router import Router
|
|
||||||
from cpl.api.typing import HTTPMethods, PartialMiddleware
|
|
||||||
from cpl.application.abc.application_abc import ApplicationABC
|
|
||||||
from cpl.core.configuration import Configuration
|
|
||||||
from cpl.dependency.service_provider_abc import ServiceProviderABC
|
|
||||||
|
|
||||||
_logger = APILogger("API")
|
|
||||||
|
|
||||||
|
|
||||||
class WebApp(ApplicationABC):
|
|
||||||
def __init__(self, services: ServiceProviderABC):
|
|
||||||
super().__init__(services, [auth, api])
|
|
||||||
self._app: Starlette | None = None
|
|
||||||
|
|
||||||
self._api_settings = Configuration.get(ApiSettings)
|
|
||||||
|
|
||||||
self._routes: list[Route] = []
|
|
||||||
self._middleware: list[Middleware] = [
|
|
||||||
Middleware(RequestMiddleware),
|
|
||||||
Middleware(LoggingMiddleware),
|
|
||||||
]
|
|
||||||
self._exception_handlers: Mapping[Any, ExceptionHandler] = {
|
|
||||||
Exception: self._handle_exception,
|
|
||||||
APIError: self._handle_exception,
|
|
||||||
}
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
async def _handle_exception(request: Request, exc: Exception):
|
|
||||||
if isinstance(exc, APIError):
|
|
||||||
_logger.error(exc)
|
|
||||||
return JSONResponse({"error": str(exc)}, status_code=exc.status_code)
|
|
||||||
|
|
||||||
if hasattr(request.state, "request_id"):
|
|
||||||
_logger.error(f"Request {request.state.request_id}", exc)
|
|
||||||
else:
|
|
||||||
_logger.error("Request unknown", exc)
|
|
||||||
|
|
||||||
return JSONResponse({"error": str(exc)}, status_code=500)
|
|
||||||
|
|
||||||
def _get_allowed_origins(self):
|
|
||||||
origins = self._api_settings.allowed_origins
|
|
||||||
|
|
||||||
if origins is None or origins == "":
|
|
||||||
_logger.warning("No allowed origins specified, allowing all origins")
|
|
||||||
return ["*"]
|
|
||||||
|
|
||||||
_logger.debug(f"Allowed origins: {origins}")
|
|
||||||
return origins.split(",")
|
|
||||||
|
|
||||||
def with_database(self):
|
|
||||||
self.with_migrations()
|
|
||||||
self.with_seeders()
|
|
||||||
|
|
||||||
def with_app(self, app: Starlette):
|
|
||||||
assert app is not None, "app must not be None"
|
|
||||||
assert isinstance(app, Starlette), "app must be an instance of Starlette"
|
|
||||||
self._app = app
|
|
||||||
return self
|
|
||||||
|
|
||||||
def _check_for_app(self):
|
|
||||||
if self._app is not None:
|
|
||||||
raise ValueError("App is already set, cannot add routes or middleware")
|
|
||||||
|
|
||||||
def with_routes_directory(self, directory: str) -> "WebApp":
|
|
||||||
self._check_for_app()
|
|
||||||
assert directory is not None, "directory must not be None"
|
|
||||||
|
|
||||||
base = directory.replace("/", ".").replace("\\", ".")
|
|
||||||
|
|
||||||
for filename in os.listdir(directory):
|
|
||||||
if not filename.endswith(".py") or filename == "__init__.py":
|
|
||||||
continue
|
|
||||||
|
|
||||||
__import__(f"{base}.{filename[:-3]}")
|
|
||||||
|
|
||||||
return self
|
|
||||||
|
|
||||||
def with_routes(self, routes: list[Route]) -> "WebApp":
|
|
||||||
self._check_for_app()
|
|
||||||
assert self._routes is not None, "routes must not be None"
|
|
||||||
assert all(isinstance(route, Route) for route in routes), "all routes must be of type starlette.routing.Route"
|
|
||||||
self._routes.extend(routes)
|
|
||||||
return self
|
|
||||||
|
|
||||||
def with_route(self, path: str, fn: Callable[[Request], Any], method: HTTPMethods, **kwargs) -> "WebApp":
|
|
||||||
self._check_for_app()
|
|
||||||
assert path is not None, "path must not be None"
|
|
||||||
assert fn is not None, "fn must not be None"
|
|
||||||
assert method in [
|
|
||||||
"GET",
|
|
||||||
"POST",
|
|
||||||
"PUT",
|
|
||||||
"DELETE",
|
|
||||||
"PATCH",
|
|
||||||
"OPTIONS",
|
|
||||||
"HEAD",
|
|
||||||
], "method must be a valid HTTP method"
|
|
||||||
self._routes.append(Route(path, fn, methods=[method], **kwargs))
|
|
||||||
return self
|
|
||||||
|
|
||||||
def with_middleware(self, middleware: PartialMiddleware) -> "WebApp":
|
|
||||||
self._check_for_app()
|
|
||||||
|
|
||||||
if isinstance(middleware, Middleware):
|
|
||||||
self._middleware.append(middleware)
|
|
||||||
elif callable(middleware):
|
|
||||||
self._middleware.append(Middleware(middleware))
|
|
||||||
else:
|
|
||||||
raise ValueError("middleware must be of type starlette.middleware.Middleware or a callable")
|
|
||||||
|
|
||||||
return self
|
|
||||||
|
|
||||||
def with_authentication(self):
|
|
||||||
self.with_middleware(AuthenticationMiddleware)
|
|
||||||
return self
|
|
||||||
|
|
||||||
def with_authorization(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
async def main(self):
|
|
||||||
_logger.debug(f"Preparing API")
|
|
||||||
if self._app is None:
|
|
||||||
routes = [
|
|
||||||
Route(
|
|
||||||
path=route.path,
|
|
||||||
endpoint=self._services.inject(route.endpoint),
|
|
||||||
methods=route.methods,
|
|
||||||
name=route.name,
|
|
||||||
)
|
|
||||||
for route in self._routes + Router.get_routes()
|
|
||||||
]
|
|
||||||
|
|
||||||
app = Starlette(
|
|
||||||
routes=routes,
|
|
||||||
middleware=[
|
|
||||||
*self._middleware,
|
|
||||||
Middleware(
|
|
||||||
CORSMiddleware,
|
|
||||||
allow_origins=self._get_allowed_origins(),
|
|
||||||
allow_methods=["*"],
|
|
||||||
allow_headers=["*"],
|
|
||||||
),
|
|
||||||
],
|
|
||||||
exception_handlers=self._exception_handlers,
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
app = self._app
|
|
||||||
|
|
||||||
_logger.info(f"Start API on {self._api_settings.host}:{self._api_settings.port}")
|
|
||||||
# uvicorn.run(
|
|
||||||
# app,
|
|
||||||
# host=self._api_settings.host,
|
|
||||||
# port=self._api_settings.port,
|
|
||||||
# log_config=None,
|
|
||||||
# loop="asyncio"
|
|
||||||
# )
|
|
||||||
|
|
||||||
config = uvicorn.Config(
|
|
||||||
app,
|
|
||||||
host=self._api_settings.host,
|
|
||||||
port=self._api_settings.port,
|
|
||||||
log_config=None,
|
|
||||||
loop="asyncio"
|
|
||||||
)
|
|
||||||
server = uvicorn.Server(config)
|
|
||||||
await server.serve()
|
|
||||||
|
|
||||||
_logger.info("Shutdown API")
|
|
||||||
@@ -1,30 +0,0 @@
|
|||||||
[build-system]
|
|
||||||
requires = ["setuptools>=70.1.0", "wheel>=0.43.0"]
|
|
||||||
build-backend = "setuptools.build_meta"
|
|
||||||
|
|
||||||
[project]
|
|
||||||
name = "cpl-api"
|
|
||||||
version = "2024.7.0"
|
|
||||||
description = "CPL api"
|
|
||||||
readme ="CPL api package"
|
|
||||||
requires-python = ">=3.12"
|
|
||||||
license = { text = "MIT" }
|
|
||||||
authors = [
|
|
||||||
{ name = "Sven Heidemann", email = "sven.heidemann@sh-edraft.de" }
|
|
||||||
]
|
|
||||||
keywords = ["cpl", "api", "backend", "shared", "library"]
|
|
||||||
|
|
||||||
dynamic = ["dependencies", "optional-dependencies"]
|
|
||||||
|
|
||||||
[project.urls]
|
|
||||||
Homepage = "https://www.sh-edraft.de"
|
|
||||||
|
|
||||||
[tool.setuptools.packages.find]
|
|
||||||
where = ["."]
|
|
||||||
include = ["cpl*"]
|
|
||||||
|
|
||||||
[tool.setuptools.dynamic]
|
|
||||||
dependencies = { file = ["requirements.txt"] }
|
|
||||||
optional-dependencies.dev = { file = ["requirements.dev.txt"] }
|
|
||||||
|
|
||||||
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
black==25.1.0
|
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
cpl-auth
|
|
||||||
cpl-application
|
|
||||||
cpl-core
|
|
||||||
cpl-dependency
|
|
||||||
starlette==0.48.0
|
|
||||||
python-multipart==0.0.20
|
|
||||||
uvicorn==0.35.0
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
from .application_builder import ApplicationBuilder
|
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
from .application_abc import ApplicationABC
|
|
||||||
from .application_extension_abc import ApplicationExtensionABC
|
|
||||||
from .startup_abc import StartupABC
|
|
||||||
from .startup_extension_abc import StartupExtensionABC
|
|
||||||
@@ -1,94 +0,0 @@
|
|||||||
from abc import ABC, abstractmethod
|
|
||||||
from typing import Callable, Self
|
|
||||||
|
|
||||||
from cpl.application.host import Host
|
|
||||||
from cpl.core.console.console import Console
|
|
||||||
from cpl.core.log import LogSettings
|
|
||||||
from cpl.core.log.log_level import LogLevel
|
|
||||||
from cpl.core.log.logger_abc import LoggerABC
|
|
||||||
from cpl.dependency.service_provider_abc import ServiceProviderABC
|
|
||||||
|
|
||||||
|
|
||||||
def __not_implemented__(package: str, func: Callable):
|
|
||||||
raise NotImplementedError(f"Package {package} is required to use {func.__name__} method")
|
|
||||||
|
|
||||||
|
|
||||||
class ApplicationABC(ABC):
|
|
||||||
r"""ABC for the Application class
|
|
||||||
|
|
||||||
Parameters:
|
|
||||||
services: :class:`cpl.dependency.service_provider_abc.ServiceProviderABC`
|
|
||||||
Contains instances of prepared objects
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def __init__(self, services: ServiceProviderABC, required_modules: list[str | object] = None):
|
|
||||||
self._services = services
|
|
||||||
self._required_modules = [
|
|
||||||
x.__name__ if not isinstance(x, str) else x
|
|
||||||
for x in required_modules
|
|
||||||
] if required_modules else []
|
|
||||||
|
|
||||||
@property
|
|
||||||
def required_modules(self) -> list[str]:
|
|
||||||
return self._required_modules
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def extend(cls, name: str | Callable, func: Callable[[Self], Self]):
|
|
||||||
r"""Extend the Application with a custom method
|
|
||||||
|
|
||||||
Parameters:
|
|
||||||
name: :class:`str`
|
|
||||||
Name of the method
|
|
||||||
func: :class:`Callable[[Self], Self]`
|
|
||||||
Function that takes the Application as a parameter and returns it
|
|
||||||
"""
|
|
||||||
if callable(name):
|
|
||||||
name = name.__name__
|
|
||||||
|
|
||||||
setattr(cls, name, func)
|
|
||||||
return cls
|
|
||||||
|
|
||||||
def with_logging(self, level: LogLevel = None):
|
|
||||||
if level is None:
|
|
||||||
from cpl.core.configuration.configuration import Configuration
|
|
||||||
|
|
||||||
settings = Configuration.get(LogSettings)
|
|
||||||
level = settings.level if settings else LogLevel.info
|
|
||||||
|
|
||||||
logger = self._services.get_service(LoggerABC)
|
|
||||||
logger.set_level(level)
|
|
||||||
|
|
||||||
def with_permissions(self, *args, **kwargs):
|
|
||||||
__not_implemented__("cpl-auth", self.with_permissions)
|
|
||||||
|
|
||||||
def with_migrations(self, *args, **kwargs):
|
|
||||||
__not_implemented__("cpl-database", self.with_migrations)
|
|
||||||
|
|
||||||
def with_seeders(self, *args, **kwargs):
|
|
||||||
__not_implemented__("cpl-database", self.with_seeders)
|
|
||||||
|
|
||||||
def with_extension(self, func: Callable[[Self, ...], None], *args, **kwargs):
|
|
||||||
r"""Extend the Application with a custom method
|
|
||||||
|
|
||||||
Parameters:
|
|
||||||
func: :class:`Callable[[Self], Self]`
|
|
||||||
Function that takes the Application as a parameter and returns it
|
|
||||||
"""
|
|
||||||
assert func is not None, "func must not be None"
|
|
||||||
assert callable(func), "func must be callable"
|
|
||||||
|
|
||||||
func(self, *args, **kwargs)
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
r"""Entry point
|
|
||||||
|
|
||||||
Called by custom Application.main
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
Host.run(self.main)
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def main(self): ...
|
|
||||||
@@ -1,10 +0,0 @@
|
|||||||
from abc import ABC, abstractmethod
|
|
||||||
|
|
||||||
from cpl.dependency import ServiceProviderABC
|
|
||||||
|
|
||||||
|
|
||||||
class ApplicationExtensionABC(ABC):
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
@abstractmethod
|
|
||||||
def run(services: ServiceProviderABC): ...
|
|
||||||
@@ -1,21 +0,0 @@
|
|||||||
from abc import ABC, abstractmethod
|
|
||||||
|
|
||||||
from cpl.dependency.service_collection import ServiceCollection
|
|
||||||
|
|
||||||
|
|
||||||
class StartupABC(ABC):
|
|
||||||
r"""ABC for the startup class"""
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
@abstractmethod
|
|
||||||
def configure_configuration():
|
|
||||||
r"""Creates configuration of application"""
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
@abstractmethod
|
|
||||||
def configure_services(service: ServiceCollection):
|
|
||||||
r"""Creates service provider
|
|
||||||
|
|
||||||
Parameter:
|
|
||||||
services: :class:`cpl.dependency.service_collection`
|
|
||||||
"""
|
|
||||||
@@ -1,20 +0,0 @@
|
|||||||
from abc import ABC, abstractmethod
|
|
||||||
|
|
||||||
from cpl.dependency import ServiceCollection
|
|
||||||
|
|
||||||
|
|
||||||
class StartupExtensionABC(ABC):
|
|
||||||
r"""ABC for startup extension classes"""
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
@abstractmethod
|
|
||||||
def configure_configuration():
|
|
||||||
r"""Creates configuration of application"""
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
@abstractmethod
|
|
||||||
def configure_services(services: ServiceCollection):
|
|
||||||
r"""Creates service provider
|
|
||||||
Parameter:
|
|
||||||
services: :class:`cpl.dependency.service_collection`
|
|
||||||
"""
|
|
||||||
@@ -1,80 +0,0 @@
|
|||||||
import asyncio
|
|
||||||
from typing import Type, Optional, TypeVar, Generic
|
|
||||||
|
|
||||||
from cpl.application.abc.application_abc import ApplicationABC
|
|
||||||
from cpl.application.abc.application_extension_abc import ApplicationExtensionABC
|
|
||||||
from cpl.application.abc.startup_abc import StartupABC
|
|
||||||
from cpl.application.abc.startup_extension_abc import StartupExtensionABC
|
|
||||||
from cpl.application.host import Host
|
|
||||||
from cpl.core.errors import dependency_error
|
|
||||||
from cpl.dependency.service_collection import ServiceCollection
|
|
||||||
|
|
||||||
TApp = TypeVar("TApp", bound=ApplicationABC)
|
|
||||||
|
|
||||||
|
|
||||||
class ApplicationBuilder(Generic[TApp]):
|
|
||||||
|
|
||||||
def __init__(self, app: Type[ApplicationABC]):
|
|
||||||
assert app is not None, "app must not be None"
|
|
||||||
assert issubclass(app, ApplicationABC), "app must be an subclass of ApplicationABC or its subclass"
|
|
||||||
|
|
||||||
self._app = app if app is not None else ApplicationABC
|
|
||||||
|
|
||||||
self._services = ServiceCollection()
|
|
||||||
|
|
||||||
self._startup: Optional[StartupABC] = None
|
|
||||||
self._app_extensions: list[Type[ApplicationExtensionABC]] = []
|
|
||||||
self._startup_extensions: list[Type[StartupExtensionABC]] = []
|
|
||||||
|
|
||||||
self._async_loop = asyncio.get_event_loop()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def services(self) -> ServiceCollection:
|
|
||||||
return self._services
|
|
||||||
|
|
||||||
@property
|
|
||||||
def service_provider(self):
|
|
||||||
return self._services.build()
|
|
||||||
|
|
||||||
def validate_app_required_modules(self, app: ApplicationABC):
|
|
||||||
for module in app.required_modules:
|
|
||||||
if module in self._services.loaded_modules:
|
|
||||||
continue
|
|
||||||
|
|
||||||
dependency_error(
|
|
||||||
module,
|
|
||||||
ImportError(
|
|
||||||
f"Required module '{module}' for application '{app.__class__.__name__}' is not loaded. Load using 'add_module({module})' method."
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
def with_startup(self, startup: Type[StartupABC]) -> "ApplicationBuilder":
|
|
||||||
self._startup = startup
|
|
||||||
return self
|
|
||||||
|
|
||||||
def with_extension(
|
|
||||||
self,
|
|
||||||
extension: Type[ApplicationExtensionABC | StartupExtensionABC],
|
|
||||||
) -> "ApplicationBuilder":
|
|
||||||
if (issubclass(extension, ApplicationExtensionABC)) and extension not in self._app_extensions:
|
|
||||||
self._app_extensions.append(extension)
|
|
||||||
elif (issubclass(extension, StartupExtensionABC)) and extension not in self._startup_extensions:
|
|
||||||
self._startup_extensions.append(extension)
|
|
||||||
|
|
||||||
return self
|
|
||||||
|
|
||||||
def build(self) -> TApp:
|
|
||||||
for extension in self._startup_extensions:
|
|
||||||
Host.run(extension.configure_configuration)
|
|
||||||
Host.run(extension.configure_services, self._services)
|
|
||||||
|
|
||||||
if self._startup is not None:
|
|
||||||
Host.run(self._startup.configure_configuration)
|
|
||||||
Host.run(self._startup.configure_services, self._services)
|
|
||||||
|
|
||||||
for extension in self._app_extensions:
|
|
||||||
Host.run(extension.run, self.service_provider)
|
|
||||||
|
|
||||||
app = self._app(self.service_provider)
|
|
||||||
self.validate_app_required_modules(app)
|
|
||||||
return app
|
|
||||||
@@ -1,17 +0,0 @@
|
|||||||
import asyncio
|
|
||||||
from typing import Callable
|
|
||||||
|
|
||||||
|
|
||||||
class Host:
|
|
||||||
_loop = asyncio.get_event_loop()
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_loop(cls):
|
|
||||||
return cls._loop
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def run(cls, func: Callable, *args, **kwargs):
|
|
||||||
if asyncio.iscoroutinefunction(func):
|
|
||||||
return cls._loop.run_until_complete(func(*args, **kwargs))
|
|
||||||
|
|
||||||
return func(*args, **kwargs)
|
|
||||||
@@ -1,30 +0,0 @@
|
|||||||
[build-system]
|
|
||||||
requires = ["setuptools>=70.1.0", "wheel>=0.43.0"]
|
|
||||||
build-backend = "setuptools.build_meta"
|
|
||||||
|
|
||||||
[project]
|
|
||||||
name = "cpl-application"
|
|
||||||
version = "2024.7.0"
|
|
||||||
description = "CPL application"
|
|
||||||
readme ="CPL application package"
|
|
||||||
requires-python = ">=3.12"
|
|
||||||
license = { text = "MIT" }
|
|
||||||
authors = [
|
|
||||||
{ name = "Sven Heidemann", email = "sven.heidemann@sh-edraft.de" }
|
|
||||||
]
|
|
||||||
keywords = ["cpl", "application", "backend", "shared", "library"]
|
|
||||||
|
|
||||||
dynamic = ["dependencies", "optional-dependencies"]
|
|
||||||
|
|
||||||
[project.urls]
|
|
||||||
Homepage = "https://www.sh-edraft.de"
|
|
||||||
|
|
||||||
[tool.setuptools.packages.find]
|
|
||||||
where = ["."]
|
|
||||||
include = ["cpl*"]
|
|
||||||
|
|
||||||
[tool.setuptools.dynamic]
|
|
||||||
dependencies = { file = ["requirements.txt"] }
|
|
||||||
optional-dependencies.dev = { file = ["requirements.dev.txt"] }
|
|
||||||
|
|
||||||
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
black==25.1.0
|
|
||||||
@@ -1,2 +0,0 @@
|
|||||||
cpl-core
|
|
||||||
cpl-dependency
|
|
||||||
@@ -1,82 +0,0 @@
|
|||||||
from enum import Enum
|
|
||||||
from typing import Type
|
|
||||||
|
|
||||||
from cpl.application.abc import ApplicationABC as _ApplicationABC
|
|
||||||
from cpl.auth import permission as _permission
|
|
||||||
from cpl.auth.keycloak.keycloak_admin import KeycloakAdmin as _KeycloakAdmin
|
|
||||||
from cpl.auth.keycloak.keycloak_client import KeycloakClient as _KeycloakClient
|
|
||||||
from cpl.dependency.service_collection import ServiceCollection as _ServiceCollection
|
|
||||||
from .auth_logger import AuthLogger
|
|
||||||
from .keycloak_settings import KeycloakSettings
|
|
||||||
from .permission_seeder import PermissionSeeder
|
|
||||||
|
|
||||||
|
|
||||||
def _with_permissions(self: _ApplicationABC, *permissions: Type[Enum]) -> _ApplicationABC:
|
|
||||||
from cpl.auth.permission.permissions_registry import PermissionsRegistry
|
|
||||||
|
|
||||||
for perm in permissions:
|
|
||||||
PermissionsRegistry.with_enum(perm)
|
|
||||||
return self
|
|
||||||
|
|
||||||
|
|
||||||
def _add_daos(collection: _ServiceCollection):
|
|
||||||
from .schema._administration.auth_user_dao import AuthUserDao
|
|
||||||
from .schema._administration.api_key_dao import ApiKeyDao
|
|
||||||
from .schema._permission.api_key_permission_dao import ApiKeyPermissionDao
|
|
||||||
from .schema._permission.permission_dao import PermissionDao
|
|
||||||
from .schema._permission.role_dao import RoleDao
|
|
||||||
from .schema._permission.role_permission_dao import RolePermissionDao
|
|
||||||
from .schema._permission.role_user_dao import RoleUserDao
|
|
||||||
|
|
||||||
collection.add_singleton(AuthUserDao)
|
|
||||||
collection.add_singleton(ApiKeyDao)
|
|
||||||
collection.add_singleton(ApiKeyPermissionDao)
|
|
||||||
collection.add_singleton(PermissionDao)
|
|
||||||
collection.add_singleton(RoleDao)
|
|
||||||
collection.add_singleton(RolePermissionDao)
|
|
||||||
collection.add_singleton(RoleUserDao)
|
|
||||||
|
|
||||||
|
|
||||||
def add_auth(collection: _ServiceCollection):
|
|
||||||
import os
|
|
||||||
|
|
||||||
try:
|
|
||||||
from cpl.database.service.migration_service import MigrationService
|
|
||||||
from cpl.database.model.server_type import ServerType, ServerTypes
|
|
||||||
|
|
||||||
collection.add_singleton(_KeycloakClient)
|
|
||||||
collection.add_singleton(_KeycloakAdmin)
|
|
||||||
|
|
||||||
_add_daos(collection)
|
|
||||||
|
|
||||||
provider = collection.build()
|
|
||||||
migration_service: MigrationService = provider.get_service(MigrationService)
|
|
||||||
if ServerType.server_type == ServerTypes.POSTGRES:
|
|
||||||
migration_service.with_directory(
|
|
||||||
os.path.join(os.path.dirname(os.path.realpath(__file__)), "scripts/postgres")
|
|
||||||
)
|
|
||||||
elif ServerType.server_type == ServerTypes.MYSQL:
|
|
||||||
migration_service.with_directory(os.path.join(os.path.dirname(os.path.realpath(__file__)), "scripts/mysql"))
|
|
||||||
except ImportError as e:
|
|
||||||
from cpl.core.console import Console
|
|
||||||
Console.error("cpl-database is not installed", str(e))
|
|
||||||
|
|
||||||
|
|
||||||
def add_permission(collection: _ServiceCollection):
|
|
||||||
from .permission_seeder import PermissionSeeder
|
|
||||||
from .permission.permissions_registry import PermissionsRegistry
|
|
||||||
from .permission.permissions import Permissions
|
|
||||||
|
|
||||||
try:
|
|
||||||
from cpl.database.abc.data_seeder_abc import DataSeederABC
|
|
||||||
collection.add_singleton(DataSeederABC, PermissionSeeder)
|
|
||||||
PermissionsRegistry.with_enum(Permissions)
|
|
||||||
except ImportError as e:
|
|
||||||
from cpl.core.console import Console
|
|
||||||
|
|
||||||
Console.error("cpl-database is not installed", str(e))
|
|
||||||
|
|
||||||
|
|
||||||
_ServiceCollection.with_module(add_auth, __name__)
|
|
||||||
_ServiceCollection.with_module(add_permission, _permission.__name__)
|
|
||||||
_ApplicationABC.extend(_ApplicationABC.with_permissions, _with_permissions)
|
|
||||||
@@ -1,8 +0,0 @@
|
|||||||
from cpl.core.log import Logger
|
|
||||||
from cpl.core.typing import Source
|
|
||||||
|
|
||||||
|
|
||||||
class AuthLogger(Logger):
|
|
||||||
|
|
||||||
def __init__(self, source: Source):
|
|
||||||
Logger.__init__(self, source, "auth")
|
|
||||||
@@ -1,3 +0,0 @@
|
|||||||
from .keycloak_admin import KeycloakAdmin
|
|
||||||
from .keycloak_client import KeycloakClient
|
|
||||||
from .keycloak_user import KeycloakUser
|
|
||||||
@@ -1,24 +0,0 @@
|
|||||||
from keycloak import KeycloakAdmin as _KeycloakAdmin, KeycloakOpenIDConnection
|
|
||||||
|
|
||||||
from cpl.auth.auth_logger import AuthLogger
|
|
||||||
from cpl.auth.keycloak_settings import KeycloakSettings
|
|
||||||
|
|
||||||
_logger = AuthLogger("keycloak")
|
|
||||||
|
|
||||||
|
|
||||||
class KeycloakAdmin(_KeycloakAdmin):
|
|
||||||
|
|
||||||
def __init__(self, settings: KeycloakSettings):
|
|
||||||
_logger.info("Initializing Keycloak admin")
|
|
||||||
_connection = KeycloakOpenIDConnection(
|
|
||||||
server_url=settings.url,
|
|
||||||
client_id=settings.client_id,
|
|
||||||
realm_name=settings.realm,
|
|
||||||
client_secret_key=settings.client_secret,
|
|
||||||
)
|
|
||||||
_KeycloakAdmin.__init__(
|
|
||||||
self,
|
|
||||||
connection=_connection,
|
|
||||||
)
|
|
||||||
|
|
||||||
self.__connection = _connection
|
|
||||||
@@ -1,25 +0,0 @@
|
|||||||
from typing import Optional
|
|
||||||
|
|
||||||
from keycloak import KeycloakOpenID
|
|
||||||
|
|
||||||
from cpl.auth.auth_logger import AuthLogger
|
|
||||||
from cpl.auth.keycloak_settings import KeycloakSettings
|
|
||||||
|
|
||||||
_logger = AuthLogger("keycloak")
|
|
||||||
|
|
||||||
|
|
||||||
class KeycloakClient(KeycloakOpenID):
|
|
||||||
|
|
||||||
def __init__(self, settings: KeycloakSettings):
|
|
||||||
KeycloakOpenID.__init__(
|
|
||||||
self,
|
|
||||||
server_url=settings.url,
|
|
||||||
client_id=settings.client_id,
|
|
||||||
realm_name=settings.realm,
|
|
||||||
client_secret_key=settings.client_secret,
|
|
||||||
)
|
|
||||||
_logger.info("Initializing Keycloak client")
|
|
||||||
|
|
||||||
def get_user_id(self, token: str) -> Optional[str]:
|
|
||||||
info = self.introspect(token)
|
|
||||||
return info.get("sub", None)
|
|
||||||
@@ -1,36 +0,0 @@
|
|||||||
from cpl.core.utils.get_value import get_value
|
|
||||||
from cpl.dependency import ServiceProviderABC
|
|
||||||
|
|
||||||
|
|
||||||
class KeycloakUser:
|
|
||||||
|
|
||||||
def __init__(self, source: dict):
|
|
||||||
self._username = get_value(source, "preferred_username", str)
|
|
||||||
self._email = get_value(source, "email", str)
|
|
||||||
self._email_verified = get_value(source, "email_verified", bool)
|
|
||||||
self._name = get_value(source, "name", str)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def username(self) -> str:
|
|
||||||
return self._username
|
|
||||||
|
|
||||||
@property
|
|
||||||
def email(self) -> str:
|
|
||||||
return self._email
|
|
||||||
|
|
||||||
@property
|
|
||||||
def email_verified(self) -> bool:
|
|
||||||
return self._email_verified
|
|
||||||
|
|
||||||
@property
|
|
||||||
def name(self) -> str:
|
|
||||||
return self._name
|
|
||||||
|
|
||||||
# Attrs from keycloak
|
|
||||||
|
|
||||||
@property
|
|
||||||
def id(self) -> str:
|
|
||||||
from cpl.auth import KeycloakAdmin
|
|
||||||
|
|
||||||
keycloak_admin: KeycloakAdmin = ServiceProviderABC.get_global_service(KeycloakAdmin)
|
|
||||||
return keycloak_admin.get_user_id(self._username)
|
|
||||||
@@ -1,17 +0,0 @@
|
|||||||
from typing import Optional
|
|
||||||
|
|
||||||
from cpl.core.configuration.configuration_model_abc import ConfigurationModelABC
|
|
||||||
|
|
||||||
|
|
||||||
class KeycloakSettings(ConfigurationModelABC):
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
src: Optional[dict] = None,
|
|
||||||
):
|
|
||||||
ConfigurationModelABC.__init__(self, src, "KEYCLOAK")
|
|
||||||
|
|
||||||
self.option("url", str, required=True)
|
|
||||||
self.option("client_id", str, required=True)
|
|
||||||
self.option("realm", str, required=True)
|
|
||||||
self.option("client_secret", str, required=True)
|
|
||||||
@@ -1,36 +0,0 @@
|
|||||||
from enum import Enum
|
|
||||||
|
|
||||||
|
|
||||||
class Permissions(Enum):
|
|
||||||
""" """
|
|
||||||
|
|
||||||
"""
|
|
||||||
Administration
|
|
||||||
"""
|
|
||||||
# administrator
|
|
||||||
administrator = "administrator"
|
|
||||||
|
|
||||||
# api keys
|
|
||||||
api_keys = "api_keys"
|
|
||||||
api_keys_create = "api_keys.create"
|
|
||||||
api_keys_update = "api_keys.update"
|
|
||||||
api_keys_delete = "api_keys.delete"
|
|
||||||
|
|
||||||
# users
|
|
||||||
users = "users"
|
|
||||||
users_create = "users.create"
|
|
||||||
users_update = "users.update"
|
|
||||||
users_delete = "users.delete"
|
|
||||||
|
|
||||||
# settings
|
|
||||||
settings = "settings"
|
|
||||||
settings_update = "settings.update"
|
|
||||||
|
|
||||||
"""
|
|
||||||
Permissions
|
|
||||||
"""
|
|
||||||
# roles
|
|
||||||
roles = "roles"
|
|
||||||
roles_create = "roles.create"
|
|
||||||
roles_update = "roles.update"
|
|
||||||
roles_delete = "roles.delete"
|
|
||||||
@@ -1,24 +0,0 @@
|
|||||||
from enum import Enum
|
|
||||||
from typing import Type
|
|
||||||
|
|
||||||
|
|
||||||
class PermissionsRegistry:
|
|
||||||
_permissions: dict[str, str] = {}
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get(cls):
|
|
||||||
return cls._permissions.keys()
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def descriptions(cls):
|
|
||||||
return {x: cls._permissions[x] for x in cls._permissions if cls._permissions[x] is not None}
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def set(cls, permission: str, description: str = None):
|
|
||||||
cls._permissions[permission] = description
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def with_enum(cls, e: Type[Enum]):
|
|
||||||
perms = [x.value for x in e]
|
|
||||||
for perm in perms:
|
|
||||||
cls.set(str(perm))
|
|
||||||
@@ -1,120 +0,0 @@
|
|||||||
from cpl.auth.permission.permissions import Permissions
|
|
||||||
from cpl.auth.permission.permissions_registry import PermissionsRegistry
|
|
||||||
from cpl.auth.schema import (
|
|
||||||
Permission,
|
|
||||||
Role,
|
|
||||||
RolePermission,
|
|
||||||
ApiKey,
|
|
||||||
ApiKeyPermission,
|
|
||||||
PermissionDao,
|
|
||||||
RoleDao,
|
|
||||||
RolePermissionDao,
|
|
||||||
ApiKeyDao,
|
|
||||||
ApiKeyPermissionDao,
|
|
||||||
)
|
|
||||||
from cpl.core.utils.get_value import get_value
|
|
||||||
from cpl.database.abc.data_seeder_abc import DataSeederABC
|
|
||||||
from cpl.database.db_logger import DBLogger
|
|
||||||
|
|
||||||
_logger = DBLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class PermissionSeeder(DataSeederABC):
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
permission_dao: PermissionDao,
|
|
||||||
role_dao: RoleDao,
|
|
||||||
role_permission_dao: RolePermissionDao,
|
|
||||||
api_key_dao: ApiKeyDao,
|
|
||||||
api_key_permission_dao: ApiKeyPermissionDao,
|
|
||||||
):
|
|
||||||
DataSeederABC.__init__(self)
|
|
||||||
self._permission_dao = permission_dao
|
|
||||||
self._role_dao = role_dao
|
|
||||||
self._role_permission_dao = role_permission_dao
|
|
||||||
self._api_key_dao = api_key_dao
|
|
||||||
self._api_key_permission_dao = api_key_permission_dao
|
|
||||||
|
|
||||||
async def seed(self):
|
|
||||||
permissions = await self._permission_dao.get_all()
|
|
||||||
possible_permissions = [permission for permission in PermissionsRegistry.get()]
|
|
||||||
|
|
||||||
if len(permissions) == len(possible_permissions):
|
|
||||||
_logger.info("Permissions already existing")
|
|
||||||
await self._update_missing_descriptions()
|
|
||||||
return
|
|
||||||
|
|
||||||
to_delete = []
|
|
||||||
for permission in permissions:
|
|
||||||
if permission.name in possible_permissions:
|
|
||||||
continue
|
|
||||||
|
|
||||||
to_delete.append(permission)
|
|
||||||
|
|
||||||
await self._permission_dao.delete_many(to_delete, hard_delete=True)
|
|
||||||
|
|
||||||
_logger.warning("Permissions incomplete")
|
|
||||||
permission_names = [permission.name for permission in permissions]
|
|
||||||
await self._permission_dao.create_many(
|
|
||||||
[
|
|
||||||
Permission(
|
|
||||||
0,
|
|
||||||
permission,
|
|
||||||
get_value(PermissionsRegistry.descriptions(), permission, str),
|
|
||||||
)
|
|
||||||
for permission in possible_permissions
|
|
||||||
if permission not in permission_names
|
|
||||||
]
|
|
||||||
)
|
|
||||||
await self._update_missing_descriptions()
|
|
||||||
|
|
||||||
await self._add_missing_to_role()
|
|
||||||
await self._add_missing_to_api_key()
|
|
||||||
|
|
||||||
async def _add_missing_to_role(self):
|
|
||||||
admin_role = await self._role_dao.find_single_by([{Role.id: 1}, {Role.name: "admin"}])
|
|
||||||
if admin_role is None:
|
|
||||||
return
|
|
||||||
|
|
||||||
admin_permissions = await self._role_permission_dao.get_by_role_id(admin_role.id, with_deleted=True)
|
|
||||||
to_assign = [
|
|
||||||
RolePermission(0, admin_role.id, permission.id)
|
|
||||||
for permission in await self._permission_dao.get_all()
|
|
||||||
if permission.id not in [x.permission_id for x in admin_permissions]
|
|
||||||
]
|
|
||||||
await self._role_permission_dao.create_many(to_assign)
|
|
||||||
|
|
||||||
async def _add_missing_to_api_key(self):
|
|
||||||
admin_api_key = await self._api_key_dao.find_single_by([{ApiKey.id: 1}, {ApiKey.identifier: "admin"}])
|
|
||||||
if admin_api_key is None:
|
|
||||||
return
|
|
||||||
|
|
||||||
admin_permissions = await self._api_key_permission_dao.find_by_api_key_id(admin_api_key.id, with_deleted=True)
|
|
||||||
to_assign = [
|
|
||||||
ApiKeyPermission(0, admin_api_key.id, permission.id)
|
|
||||||
for permission in await self._permission_dao.get_all()
|
|
||||||
if permission.id not in [x.permission_id for x in admin_permissions]
|
|
||||||
]
|
|
||||||
await self._api_key_permission_dao.create_many(to_assign)
|
|
||||||
|
|
||||||
async def _update_missing_descriptions(self):
|
|
||||||
permissions = {
|
|
||||||
permission.name: permission
|
|
||||||
for permission in await self._permission_dao.find_by([{Permission.description: None}])
|
|
||||||
}
|
|
||||||
to_update = []
|
|
||||||
|
|
||||||
if len(permissions) == 0:
|
|
||||||
return
|
|
||||||
|
|
||||||
for key in PermissionsRegistry.descriptions():
|
|
||||||
if key.value not in permissions:
|
|
||||||
continue
|
|
||||||
|
|
||||||
permissions[key.value].description = PermissionsRegistry.descriptions()[key]
|
|
||||||
to_update.append(permissions[key.value])
|
|
||||||
|
|
||||||
if len(to_update) == 0:
|
|
||||||
return
|
|
||||||
|
|
||||||
await self._permission_dao.update_many(to_update)
|
|
||||||
@@ -1,15 +0,0 @@
|
|||||||
from ._administration.api_key import ApiKey
|
|
||||||
from ._administration.api_key_dao import ApiKeyDao
|
|
||||||
from ._administration.auth_user import AuthUser
|
|
||||||
from ._administration.auth_user_dao import AuthUserDao
|
|
||||||
|
|
||||||
from ._permission.api_key_permission import ApiKeyPermission
|
|
||||||
from ._permission.api_key_permission_dao import ApiKeyPermissionDao
|
|
||||||
from ._permission.permission import Permission
|
|
||||||
from ._permission.permission_dao import PermissionDao
|
|
||||||
from ._permission.role import Role
|
|
||||||
from ._permission.role_dao import RoleDao
|
|
||||||
from ._permission.role_permission import RolePermission
|
|
||||||
from ._permission.role_permission_dao import RolePermissionDao
|
|
||||||
from ._permission.role_user import RoleUser
|
|
||||||
from ._permission.role_user_dao import RoleUserDao
|
|
||||||
@@ -1,66 +0,0 @@
|
|||||||
import secrets
|
|
||||||
from datetime import datetime
|
|
||||||
from typing import Optional, Union
|
|
||||||
|
|
||||||
from async_property import async_property
|
|
||||||
|
|
||||||
from cpl.auth.permission.permissions import Permissions
|
|
||||||
from cpl.core.environment.environment import Environment
|
|
||||||
from cpl.core.log.logger import Logger
|
|
||||||
from cpl.core.typing import Id, SerialId
|
|
||||||
from cpl.core.utils.credential_manager import CredentialManager
|
|
||||||
from cpl.database.abc.db_model_abc import DbModelABC
|
|
||||||
from cpl.dependency.service_provider_abc import ServiceProviderABC
|
|
||||||
|
|
||||||
_logger = Logger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class ApiKey(DbModelABC):
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
id: SerialId,
|
|
||||||
identifier: str,
|
|
||||||
key: Union[str, bytes],
|
|
||||||
deleted: bool = False,
|
|
||||||
editor_id: Optional[Id] = None,
|
|
||||||
created: Optional[datetime] = None,
|
|
||||||
updated: Optional[datetime] = None,
|
|
||||||
):
|
|
||||||
DbModelABC.__init__(self, id, deleted, editor_id, created, updated)
|
|
||||||
self._identifier = identifier
|
|
||||||
self._key = key
|
|
||||||
|
|
||||||
@property
|
|
||||||
def identifier(self) -> str:
|
|
||||||
return self._identifier
|
|
||||||
|
|
||||||
@property
|
|
||||||
def key(self) -> str:
|
|
||||||
return self._key
|
|
||||||
|
|
||||||
@property
|
|
||||||
def plain_key(self) -> str:
|
|
||||||
return CredentialManager.decrypt(self.key)
|
|
||||||
|
|
||||||
@async_property
|
|
||||||
async def permissions(self):
|
|
||||||
from cpl.auth.schema._permission.api_key_permission_dao import ApiKeyPermissionDao
|
|
||||||
|
|
||||||
apiKeyPermissionDao = ServiceProviderABC.get_global_provider().get_service(ApiKeyPermissionDao)
|
|
||||||
|
|
||||||
return [await x.permission for x in await apiKeyPermissionDao.find_by_api_key_id(self.id)]
|
|
||||||
|
|
||||||
async def has_permission(self, permission: Permissions) -> bool:
|
|
||||||
return permission.value in [x.name for x in await self.permissions]
|
|
||||||
|
|
||||||
def set_new_api_key(self):
|
|
||||||
self._key = self.new_key()
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def new_key() -> str:
|
|
||||||
return CredentialManager.encrypt(f"api_{secrets.token_urlsafe(Environment.get("API_KEY_LENGTH", int, 64))}")
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def new(cls, identifier: str) -> "ApiKey":
|
|
||||||
return ApiKey(0, identifier, cls.new_key())
|
|
||||||
@@ -1,32 +0,0 @@
|
|||||||
from typing import Optional
|
|
||||||
|
|
||||||
from cpl.auth.schema._administration.api_key import ApiKey
|
|
||||||
from cpl.database import TableManager
|
|
||||||
from cpl.database.abc import DbModelDaoABC
|
|
||||||
from cpl.database.db_logger import DBLogger
|
|
||||||
|
|
||||||
_logger = DBLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class ApiKeyDao(DbModelDaoABC[ApiKey]):
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
DbModelDaoABC.__init__(self, __name__, ApiKey, TableManager.get("api_keys"))
|
|
||||||
|
|
||||||
self.attribute(ApiKey.identifier, str)
|
|
||||||
self.attribute(ApiKey.key, str, "keystring")
|
|
||||||
|
|
||||||
async def get_by_identifier(self, ident: str) -> ApiKey:
|
|
||||||
result = await self._db.select_map(f"SELECT * FROM {self._table_name} WHERE Identifier = '{ident}'")
|
|
||||||
return self.to_object(result[0])
|
|
||||||
|
|
||||||
async def get_by_key(self, key: str) -> ApiKey:
|
|
||||||
result = await self._db.select_map(f"SELECT * FROM {self._table_name} WHERE Keystring = '{key}'")
|
|
||||||
return self.to_object(result[0])
|
|
||||||
|
|
||||||
async def find_by_key(self, key: str) -> Optional[ApiKey]:
|
|
||||||
result = await self._db.select_map(f"SELECT * FROM {self._table_name} WHERE Keystring = '{key}'")
|
|
||||||
if not result or len(result) == 0:
|
|
||||||
return None
|
|
||||||
|
|
||||||
return self.to_object(result[0])
|
|
||||||
@@ -1,89 +0,0 @@
|
|||||||
import uuid
|
|
||||||
from datetime import datetime
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from async_property import async_property
|
|
||||||
from keycloak import KeycloakGetError
|
|
||||||
|
|
||||||
from cpl.auth.keycloak import KeycloakAdmin
|
|
||||||
from cpl.auth.auth_logger import AuthLogger
|
|
||||||
from cpl.auth.permission.permissions import Permissions
|
|
||||||
from cpl.core.typing import SerialId
|
|
||||||
from cpl.database.abc import DbModelABC
|
|
||||||
from cpl.dependency import ServiceProviderABC
|
|
||||||
|
|
||||||
_logger = AuthLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class AuthUser(DbModelABC):
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
id: SerialId,
|
|
||||||
keycloak_id: str,
|
|
||||||
deleted: bool = False,
|
|
||||||
editor_id: Optional[SerialId] = None,
|
|
||||||
created: Optional[datetime] = None,
|
|
||||||
updated: Optional[datetime] = None,
|
|
||||||
):
|
|
||||||
DbModelABC.__init__(self, id, deleted, editor_id, created, updated)
|
|
||||||
self._keycloak_id = keycloak_id
|
|
||||||
|
|
||||||
@property
|
|
||||||
def keycloak_id(self) -> str:
|
|
||||||
return self._keycloak_id
|
|
||||||
|
|
||||||
@property
|
|
||||||
def username(self):
|
|
||||||
if self._keycloak_id == str(uuid.UUID(int=0)):
|
|
||||||
return "ANONYMOUS"
|
|
||||||
|
|
||||||
try:
|
|
||||||
keycloak_admin: KeycloakAdmin = ServiceProviderABC.get_global_service(KeycloakAdmin)
|
|
||||||
return keycloak_admin.get_user(self._keycloak_id).get("username")
|
|
||||||
except KeycloakGetError as e:
|
|
||||||
return "UNKNOWN"
|
|
||||||
except Exception as e:
|
|
||||||
_logger.error(f"Failed to get user {self._keycloak_id} from Keycloak", e)
|
|
||||||
return "UNKNOWN"
|
|
||||||
|
|
||||||
@property
|
|
||||||
def email(self):
|
|
||||||
if self._keycloak_id == str(uuid.UUID(int=0)):
|
|
||||||
return "ANONYMOUS"
|
|
||||||
|
|
||||||
try:
|
|
||||||
keycloak_admin: KeycloakAdmin = ServiceProviderABC.get_global_service(KeycloakAdmin)
|
|
||||||
return keycloak_admin.get_user(self._keycloak_id).get("email")
|
|
||||||
except KeycloakGetError as e:
|
|
||||||
return "UNKNOWN"
|
|
||||||
except Exception as e:
|
|
||||||
_logger.error(f"Failed to get user {self._keycloak_id} from Keycloak", e)
|
|
||||||
return "UNKNOWN"
|
|
||||||
|
|
||||||
@async_property
|
|
||||||
async def roles(self):
|
|
||||||
from cpl.auth.schema._permission.role_user_dao import RoleUserDao
|
|
||||||
|
|
||||||
role_user_dao: RoleUserDao = ServiceProviderABC.get_global_service(RoleUserDao)
|
|
||||||
return [await x.role for x in await role_user_dao.get_by_user_id(self.id)]
|
|
||||||
|
|
||||||
@async_property
|
|
||||||
async def permissions(self):
|
|
||||||
from cpl.auth.schema._administration.auth_user_dao import AuthUserDao
|
|
||||||
|
|
||||||
auth_user_dao: AuthUserDao = ServiceProviderABC.get_global_service(AuthUserDao)
|
|
||||||
return await auth_user_dao.get_permissions(self.id)
|
|
||||||
|
|
||||||
async def has_permission(self, permission: Permissions) -> bool:
|
|
||||||
from cpl.auth.schema._administration.auth_user_dao import AuthUserDao
|
|
||||||
|
|
||||||
auth_user_dao: AuthUserDao = ServiceProviderABC.get_global_service(AuthUserDao)
|
|
||||||
return await auth_user_dao.has_permission(self.id, permission)
|
|
||||||
|
|
||||||
async def anonymize(self):
|
|
||||||
from cpl.auth.schema._administration.auth_user_dao import AuthUserDao
|
|
||||||
|
|
||||||
auth_user_dao: AuthUserDao = ServiceProviderABC.get_global_service(AuthUserDao)
|
|
||||||
|
|
||||||
self._keycloak_id = str(uuid.UUID(int=0))
|
|
||||||
await auth_user_dao.update(self)
|
|
||||||
@@ -1,72 +0,0 @@
|
|||||||
from typing import Optional, Union
|
|
||||||
|
|
||||||
from cpl.auth.permission.permissions import Permissions
|
|
||||||
from cpl.auth.schema._administration.auth_user import AuthUser
|
|
||||||
from cpl.database import TableManager
|
|
||||||
from cpl.database.abc import DbModelDaoABC
|
|
||||||
from cpl.database.db_logger import DBLogger
|
|
||||||
from cpl.database.external_data_temp_table_builder import ExternalDataTempTableBuilder
|
|
||||||
from cpl.dependency import ServiceProviderABC
|
|
||||||
|
|
||||||
_logger = DBLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class AuthUserDao(DbModelDaoABC[AuthUser]):
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
DbModelDaoABC.__init__(self, __name__, AuthUser, TableManager.get("auth_users"))
|
|
||||||
|
|
||||||
self.attribute(AuthUser.keycloak_id, str, db_name="keycloakId")
|
|
||||||
|
|
||||||
async def get_users():
|
|
||||||
return [(x.id, x.username, x.email) for x in await self.get_all()]
|
|
||||||
|
|
||||||
self.use_external_fields(
|
|
||||||
ExternalDataTempTableBuilder()
|
|
||||||
.with_table_name(self._table_name)
|
|
||||||
.with_field("id", "int", True)
|
|
||||||
.with_field("username", "text")
|
|
||||||
.with_field("email", "text")
|
|
||||||
.with_value_getter(get_users)
|
|
||||||
)
|
|
||||||
|
|
||||||
async def get_by_keycloak_id(self, keycloak_id: str) -> AuthUser:
|
|
||||||
return await self.get_single_by({AuthUser.keycloak_id: keycloak_id})
|
|
||||||
|
|
||||||
async def find_by_keycloak_id(self, keycloak_id: str) -> Optional[AuthUser]:
|
|
||||||
return await self.find_single_by({AuthUser.keycloak_id: keycloak_id})
|
|
||||||
|
|
||||||
async def has_permission(self, user_id: int, permission: Union[Permissions, str]) -> bool:
|
|
||||||
from cpl.auth.schema._permission.permission_dao import PermissionDao
|
|
||||||
|
|
||||||
permission_dao: PermissionDao = ServiceProviderABC.get_global_service(PermissionDao)
|
|
||||||
p = await permission_dao.get_by_name(permission if isinstance(permission, str) else permission.value)
|
|
||||||
result = await self._db.select_map(
|
|
||||||
f"""
|
|
||||||
SELECT COUNT(*)
|
|
||||||
FROM permission.role_users ru
|
|
||||||
JOIN permission.role_permissions rp ON ru.roleId = rp.roleId
|
|
||||||
WHERE ru.userId = {user_id}
|
|
||||||
AND rp.permissionId = {p.id}
|
|
||||||
AND ru.deleted = FALSE
|
|
||||||
AND rp.deleted = FALSE;
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
if result is None or len(result) == 0:
|
|
||||||
return False
|
|
||||||
|
|
||||||
return result[0]["count"] > 0
|
|
||||||
|
|
||||||
async def get_permissions(self, user_id: int) -> list[Permissions]:
|
|
||||||
result = await self._db.select_map(
|
|
||||||
f"""
|
|
||||||
SELECT p.*
|
|
||||||
FROM permission.permissions p
|
|
||||||
JOIN permission.role_permissions rp ON p.id = rp.permissionId
|
|
||||||
JOIN permission.role_users ru ON rp.roleId = ru.roleId
|
|
||||||
WHERE ru.userId = {user_id}
|
|
||||||
AND rp.deleted = FALSE
|
|
||||||
AND ru.deleted = FALSE;
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
return [Permissions(p["name"]) for p in result]
|
|
||||||
@@ -1,46 +0,0 @@
|
|||||||
from datetime import datetime
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from async_property import async_property
|
|
||||||
|
|
||||||
from cpl.core.typing import SerialId
|
|
||||||
from cpl.database.abc import DbJoinModelABC
|
|
||||||
from cpl.dependency import ServiceProviderABC
|
|
||||||
|
|
||||||
|
|
||||||
class ApiKeyPermission(DbJoinModelABC):
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
id: SerialId,
|
|
||||||
api_key_id: SerialId,
|
|
||||||
permission_id: SerialId,
|
|
||||||
deleted: bool = False,
|
|
||||||
editor_id: Optional[SerialId] = None,
|
|
||||||
created: Optional[datetime] = None,
|
|
||||||
updated: Optional[datetime] = None,
|
|
||||||
):
|
|
||||||
DbJoinModelABC.__init__(self, api_key_id, permission_id, id, deleted, editor_id, created, updated)
|
|
||||||
self._api_key_id = api_key_id
|
|
||||||
self._permission_id = permission_id
|
|
||||||
|
|
||||||
@property
|
|
||||||
def api_key_id(self) -> int:
|
|
||||||
return self._api_key_id
|
|
||||||
|
|
||||||
@async_property
|
|
||||||
async def api_key(self):
|
|
||||||
from cpl.auth.schema._administration.api_key_dao import ApiKeyDao
|
|
||||||
|
|
||||||
api_key_dao: ApiKeyDao = ServiceProviderABC.get_global_service(ApiKeyDao)
|
|
||||||
return await api_key_dao.get_by_id(self._api_key_id)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def permission_id(self) -> int:
|
|
||||||
return self._permission_id
|
|
||||||
|
|
||||||
@async_property
|
|
||||||
async def permission(self):
|
|
||||||
from cpl.auth.schema._permission.permission_dao import PermissionDao
|
|
||||||
|
|
||||||
permission_dao: PermissionDao = ServiceProviderABC.get_global_service(PermissionDao)
|
|
||||||
return await permission_dao.get_by_id(self._permission_id)
|
|
||||||
@@ -1,29 +0,0 @@
|
|||||||
from cpl.auth.schema._permission.api_key_permission import ApiKeyPermission
|
|
||||||
from cpl.database import TableManager
|
|
||||||
from cpl.database.abc import DbModelDaoABC
|
|
||||||
from cpl.database.db_logger import DBLogger
|
|
||||||
|
|
||||||
_logger = DBLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class ApiKeyPermissionDao(DbModelDaoABC[ApiKeyPermission]):
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
DbModelDaoABC.__init__(self, __name__, ApiKeyPermission, TableManager.get("api_key_permissions"))
|
|
||||||
|
|
||||||
self.attribute(ApiKeyPermission.api_key_id, int)
|
|
||||||
self.attribute(ApiKeyPermission.permission_id, int)
|
|
||||||
|
|
||||||
async def find_by_api_key_id(self, api_key_id: int, with_deleted=False) -> list[ApiKeyPermission]:
|
|
||||||
f = [{ApiKeyPermission.api_key_id: api_key_id}]
|
|
||||||
if not with_deleted:
|
|
||||||
f.append({ApiKeyPermission.deleted: False})
|
|
||||||
|
|
||||||
return await self.find_by(f)
|
|
||||||
|
|
||||||
async def find_by_permission_id(self, permission_id: int, with_deleted=False) -> list[ApiKeyPermission]:
|
|
||||||
f = [{ApiKeyPermission.permission_id: permission_id}]
|
|
||||||
if not with_deleted:
|
|
||||||
f.append({ApiKeyPermission.deleted: False})
|
|
||||||
|
|
||||||
return await self.find_by(f)
|
|
||||||
@@ -1,37 +0,0 @@
|
|||||||
from datetime import datetime
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from cpl.core.typing import SerialId
|
|
||||||
from cpl.database.abc import DbModelABC
|
|
||||||
|
|
||||||
|
|
||||||
class Permission(DbModelABC):
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
id: SerialId,
|
|
||||||
name: str,
|
|
||||||
description: str,
|
|
||||||
deleted: bool = False,
|
|
||||||
editor_id: Optional[SerialId] = None,
|
|
||||||
created: Optional[datetime] = None,
|
|
||||||
updated: Optional[datetime] = None,
|
|
||||||
):
|
|
||||||
DbModelABC.__init__(self, id, deleted, editor_id, created, updated)
|
|
||||||
self._name = name
|
|
||||||
self._description = description
|
|
||||||
|
|
||||||
@property
|
|
||||||
def name(self) -> str:
|
|
||||||
return self._name
|
|
||||||
|
|
||||||
@name.setter
|
|
||||||
def name(self, value: str):
|
|
||||||
self._name = value
|
|
||||||
|
|
||||||
@property
|
|
||||||
def description(self) -> str:
|
|
||||||
return self._description
|
|
||||||
|
|
||||||
@description.setter
|
|
||||||
def description(self, value: str):
|
|
||||||
self._description = value
|
|
||||||
@@ -1,21 +0,0 @@
|
|||||||
from typing import Optional
|
|
||||||
|
|
||||||
from cpl.auth.schema._permission.permission import Permission
|
|
||||||
from cpl.database import TableManager
|
|
||||||
from cpl.database.abc import DbModelDaoABC
|
|
||||||
from cpl.database.db_logger import DBLogger
|
|
||||||
|
|
||||||
_logger = DBLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class PermissionDao(DbModelDaoABC[Permission]):
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
DbModelDaoABC.__init__(self, __name__, Permission, TableManager.get("permissions"))
|
|
||||||
|
|
||||||
self.attribute(Permission.name, str)
|
|
||||||
self.attribute(Permission.description, Optional[str])
|
|
||||||
|
|
||||||
async def get_by_name(self, name: str) -> Permission:
|
|
||||||
result = await self._db.select_map(f"SELECT * FROM {self._table_name} WHERE Name = '{name}'")
|
|
||||||
return self.to_object(result[0])
|
|
||||||
@@ -1,66 +0,0 @@
|
|||||||
from datetime import datetime
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from async_property import async_property
|
|
||||||
|
|
||||||
from cpl.auth.permission.permissions import Permissions
|
|
||||||
from cpl.core.typing import SerialId
|
|
||||||
from cpl.database.abc import DbModelABC
|
|
||||||
from cpl.dependency import ServiceProviderABC
|
|
||||||
|
|
||||||
|
|
||||||
class Role(DbModelABC):
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
id: SerialId,
|
|
||||||
name: str,
|
|
||||||
description: str,
|
|
||||||
deleted: bool = False,
|
|
||||||
editor_id: Optional[SerialId] = None,
|
|
||||||
created: Optional[datetime] = None,
|
|
||||||
updated: Optional[datetime] = None,
|
|
||||||
):
|
|
||||||
DbModelABC.__init__(self, id, deleted, editor_id, created, updated)
|
|
||||||
self._name = name
|
|
||||||
self._description = description
|
|
||||||
|
|
||||||
@property
|
|
||||||
def name(self) -> str:
|
|
||||||
return self._name
|
|
||||||
|
|
||||||
@name.setter
|
|
||||||
def name(self, value: str):
|
|
||||||
self._name = value
|
|
||||||
|
|
||||||
@property
|
|
||||||
def description(self) -> str:
|
|
||||||
return self._description
|
|
||||||
|
|
||||||
@description.setter
|
|
||||||
def description(self, value: str):
|
|
||||||
self._description = value
|
|
||||||
|
|
||||||
@async_property
|
|
||||||
async def permissions(self):
|
|
||||||
from cpl.auth.schema._permission.role_permission_dao import RolePermissionDao
|
|
||||||
|
|
||||||
role_permission_dao: RolePermissionDao = ServiceProviderABC.get_global_service(RolePermissionDao)
|
|
||||||
return [await x.permission for x in await role_permission_dao.get_by_role_id(self.id)]
|
|
||||||
|
|
||||||
@async_property
|
|
||||||
async def users(self):
|
|
||||||
from cpl.auth.schema._permission.role_user_dao import RoleUserDao
|
|
||||||
|
|
||||||
role_user_dao: RoleUserDao = ServiceProviderABC.get_global_service(RoleUserDao)
|
|
||||||
return [await x.user for x in await role_user_dao.get_by_role_id(self.id)]
|
|
||||||
|
|
||||||
async def has_permission(self, permission: Permissions) -> bool:
|
|
||||||
from cpl.auth.schema._permission.permission_dao import PermissionDao
|
|
||||||
from cpl.auth.schema._permission.role_permission_dao import RolePermissionDao
|
|
||||||
|
|
||||||
permission_dao: PermissionDao = ServiceProviderABC.get_global_service(PermissionDao)
|
|
||||||
role_permission_dao: RolePermissionDao = ServiceProviderABC.get_global_service(RolePermissionDao)
|
|
||||||
|
|
||||||
p = await permission_dao.get_by_name(permission.value)
|
|
||||||
|
|
||||||
return p.id in [x.id for x in await role_permission_dao.get_by_role_id(self.id)]
|
|
||||||
@@ -1,17 +0,0 @@
|
|||||||
from cpl.auth.schema._permission.role import Role
|
|
||||||
from cpl.database import TableManager
|
|
||||||
from cpl.database.abc import DbModelDaoABC
|
|
||||||
from cpl.database.db_logger import DBLogger
|
|
||||||
|
|
||||||
_logger = DBLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class RoleDao(DbModelDaoABC[Role]):
|
|
||||||
def __init__(self):
|
|
||||||
DbModelDaoABC.__init__(self, __name__, Role, TableManager.get("roles"))
|
|
||||||
self.attribute(Role.name, str)
|
|
||||||
self.attribute(Role.description, str)
|
|
||||||
|
|
||||||
async def get_by_name(self, name: str) -> Role:
|
|
||||||
result = await self._db.select_map(f"SELECT * FROM {self._table_name} WHERE Name = '{name}'")
|
|
||||||
return self.to_object(result[0])
|
|
||||||
@@ -1,46 +0,0 @@
|
|||||||
from datetime import datetime
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from async_property import async_property
|
|
||||||
|
|
||||||
from cpl.core.typing import SerialId
|
|
||||||
from cpl.database.abc import DbModelABC
|
|
||||||
from cpl.dependency import ServiceProviderABC
|
|
||||||
|
|
||||||
|
|
||||||
class RolePermission(DbModelABC):
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
id: SerialId,
|
|
||||||
role_id: SerialId,
|
|
||||||
permission_id: SerialId,
|
|
||||||
deleted: bool = False,
|
|
||||||
editor_id: Optional[SerialId] = None,
|
|
||||||
created: Optional[datetime] = None,
|
|
||||||
updated: Optional[datetime] = None,
|
|
||||||
):
|
|
||||||
DbModelABC.__init__(self, id, deleted, editor_id, created, updated)
|
|
||||||
self._role_id = role_id
|
|
||||||
self._permission_id = permission_id
|
|
||||||
|
|
||||||
@property
|
|
||||||
def role_id(self) -> int:
|
|
||||||
return self._role_id
|
|
||||||
|
|
||||||
@async_property
|
|
||||||
async def role(self):
|
|
||||||
from cpl.auth.schema._permission.role_dao import RoleDao
|
|
||||||
|
|
||||||
role_dao: RoleDao = ServiceProviderABC.get_global_service(RoleDao)
|
|
||||||
return await role_dao.get_by_id(self._role_id)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def permission_id(self) -> int:
|
|
||||||
return self._permission_id
|
|
||||||
|
|
||||||
@async_property
|
|
||||||
async def permission(self):
|
|
||||||
from cpl.auth.schema._permission.permission_dao import PermissionDao
|
|
||||||
|
|
||||||
permission_dao: PermissionDao = ServiceProviderABC.get_global_service(PermissionDao)
|
|
||||||
return await permission_dao.get_by_id(self._permission_id)
|
|
||||||
@@ -1,29 +0,0 @@
|
|||||||
from cpl.auth.schema._permission.role_permission import RolePermission
|
|
||||||
from cpl.database import TableManager
|
|
||||||
from cpl.database.abc import DbModelDaoABC
|
|
||||||
from cpl.database.db_logger import DBLogger
|
|
||||||
|
|
||||||
_logger = DBLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class RolePermissionDao(DbModelDaoABC[RolePermission]):
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
DbModelDaoABC.__init__(self, __name__, RolePermission, TableManager.get("role_permissions"))
|
|
||||||
|
|
||||||
self.attribute(RolePermission.role_id, int)
|
|
||||||
self.attribute(RolePermission.permission_id, int)
|
|
||||||
|
|
||||||
async def get_by_role_id(self, role_id: int, with_deleted=False) -> list[RolePermission]:
|
|
||||||
f = [{RolePermission.role_id: role_id}]
|
|
||||||
if not with_deleted:
|
|
||||||
f.append({RolePermission.deleted: False})
|
|
||||||
|
|
||||||
return await self.find_by(f)
|
|
||||||
|
|
||||||
async def get_by_permission_id(self, permission_id: int, with_deleted=False) -> list[RolePermission]:
|
|
||||||
f = [{RolePermission.permission_id: permission_id}]
|
|
||||||
if not with_deleted:
|
|
||||||
f.append({RolePermission.deleted: False})
|
|
||||||
|
|
||||||
return await self.find_by(f)
|
|
||||||
@@ -1,46 +0,0 @@
|
|||||||
from datetime import datetime
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from async_property import async_property
|
|
||||||
|
|
||||||
from cpl.core.typing import SerialId
|
|
||||||
from cpl.database.abc import DbJoinModelABC
|
|
||||||
from cpl.dependency import ServiceProviderABC
|
|
||||||
|
|
||||||
|
|
||||||
class RoleUser(DbJoinModelABC):
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
id: SerialId,
|
|
||||||
user_id: SerialId,
|
|
||||||
role_id: SerialId,
|
|
||||||
deleted: bool = False,
|
|
||||||
editor_id: Optional[SerialId] = None,
|
|
||||||
created: Optional[datetime] = None,
|
|
||||||
updated: Optional[datetime] = None,
|
|
||||||
):
|
|
||||||
DbJoinModelABC.__init__(self, id, user_id, role_id, deleted, editor_id, created, updated)
|
|
||||||
self._user_id = user_id
|
|
||||||
self._role_id = role_id
|
|
||||||
|
|
||||||
@property
|
|
||||||
def user_id(self) -> int:
|
|
||||||
return self._user_id
|
|
||||||
|
|
||||||
@async_property
|
|
||||||
async def user(self):
|
|
||||||
from cpl.auth.schema._administration.auth_user_dao import AuthUserDao
|
|
||||||
|
|
||||||
auth_user_dao: AuthUserDao = ServiceProviderABC.get_global_service(AuthUserDao)
|
|
||||||
return await auth_user_dao.get_by_id(self._user_id)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def role_id(self) -> int:
|
|
||||||
return self._role_id
|
|
||||||
|
|
||||||
@async_property
|
|
||||||
async def role(self):
|
|
||||||
from cpl.auth.schema._permission.role_dao import RoleDao
|
|
||||||
|
|
||||||
role_dao: RoleDao = ServiceProviderABC.get_global_service(RoleDao)
|
|
||||||
return await role_dao.get_by_id(self._role_id)
|
|
||||||
@@ -1,29 +0,0 @@
|
|||||||
from cpl.auth.schema._permission.role_user import RoleUser
|
|
||||||
from cpl.database import TableManager
|
|
||||||
from cpl.database.abc import DbModelDaoABC
|
|
||||||
from cpl.database.db_logger import DBLogger
|
|
||||||
|
|
||||||
_logger = DBLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class RoleUserDao(DbModelDaoABC[RoleUser]):
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
DbModelDaoABC.__init__(self, __name__, RoleUser, TableManager.get("role_users"))
|
|
||||||
|
|
||||||
self.attribute(RoleUser.role_id, int)
|
|
||||||
self.attribute(RoleUser.user_id, int)
|
|
||||||
|
|
||||||
async def get_by_role_id(self, rid: int, with_deleted=False) -> list[RoleUser]:
|
|
||||||
f = [{RoleUser.role_id: rid}]
|
|
||||||
if not with_deleted:
|
|
||||||
f.append({RoleUser.deleted: False})
|
|
||||||
|
|
||||||
return await self.find_by(f)
|
|
||||||
|
|
||||||
async def get_by_user_id(self, uid: int, with_deleted=False) -> list[RoleUser]:
|
|
||||||
f = [{RoleUser.user_id: uid}]
|
|
||||||
if not with_deleted:
|
|
||||||
f.append({RoleUser.deleted: False})
|
|
||||||
|
|
||||||
return await self.find_by(f)
|
|
||||||
@@ -1,44 +0,0 @@
|
|||||||
CREATE TABLE IF NOT EXISTS administration_auth_users
|
|
||||||
(
|
|
||||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
|
||||||
keycloakId CHAR(36) NOT NULL,
|
|
||||||
-- for history
|
|
||||||
deleted BOOL NOT NULL DEFAULT FALSE,
|
|
||||||
editorId INT NULL,
|
|
||||||
created TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
updated TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
|
||||||
|
|
||||||
CONSTRAINT UC_KeycloakId UNIQUE (keycloakId),
|
|
||||||
CONSTRAINT FK_EditorId FOREIGN KEY (editorId) REFERENCES administration_auth_users (id)
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS administration_auth_users_history
|
|
||||||
(
|
|
||||||
id INT NOT NULL,
|
|
||||||
keycloakId CHAR(36) NOT NULL,
|
|
||||||
-- for history
|
|
||||||
deleted BOOL NOT NULL,
|
|
||||||
editorId INT NULL,
|
|
||||||
created TIMESTAMP NOT NULL,
|
|
||||||
updated TIMESTAMP NOT NULL
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TRIGGER TR_administration_auth_usersUpdate
|
|
||||||
AFTER UPDATE
|
|
||||||
ON administration_auth_users
|
|
||||||
FOR EACH ROW
|
|
||||||
BEGIN
|
|
||||||
INSERT INTO administration_auth_users_history
|
|
||||||
(id, keycloakId, deleted, editorId, created, updated)
|
|
||||||
VALUES (OLD.id, OLD.keycloakId, OLD.deleted, OLD.editorId, OLD.created, NOW());
|
|
||||||
END;
|
|
||||||
|
|
||||||
CREATE TRIGGER TR_administration_auth_usersDelete
|
|
||||||
AFTER DELETE
|
|
||||||
ON administration_auth_users
|
|
||||||
FOR EACH ROW
|
|
||||||
BEGIN
|
|
||||||
INSERT INTO administration_auth_users_history
|
|
||||||
(id, keycloakId, deleted, editorId, created, updated)
|
|
||||||
VALUES (OLD.id, OLD.keycloakId, 1, OLD.editorId, OLD.created, NOW());
|
|
||||||
END;
|
|
||||||
@@ -1,46 +0,0 @@
|
|||||||
CREATE TABLE IF NOT EXISTS administration_api_keys
|
|
||||||
(
|
|
||||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
|
||||||
identifier VARCHAR(255) NOT NULL,
|
|
||||||
keyString VARCHAR(255) NOT NULL,
|
|
||||||
deleted BOOL NOT NULL DEFAULT FALSE,
|
|
||||||
editorId INT NULL,
|
|
||||||
created TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
updated TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
|
||||||
|
|
||||||
CONSTRAINT UC_Identifier_Key UNIQUE (identifier, keyString),
|
|
||||||
CONSTRAINT UC_Key UNIQUE (keyString),
|
|
||||||
CONSTRAINT FK_ApiKeys_Editor FOREIGN KEY (editorId) REFERENCES administration_auth_users (id)
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS administration_api_keys_history
|
|
||||||
(
|
|
||||||
id INT NOT NULL,
|
|
||||||
identifier VARCHAR(255) NOT NULL,
|
|
||||||
keyString VARCHAR(255) NOT NULL,
|
|
||||||
deleted BOOL NOT NULL,
|
|
||||||
editorId INT NULL,
|
|
||||||
created TIMESTAMP NOT NULL,
|
|
||||||
updated TIMESTAMP NOT NULL
|
|
||||||
);
|
|
||||||
|
|
||||||
|
|
||||||
CREATE TRIGGER TR_ApiKeysUpdate
|
|
||||||
AFTER UPDATE
|
|
||||||
ON administration_api_keys
|
|
||||||
FOR EACH ROW
|
|
||||||
BEGIN
|
|
||||||
INSERT INTO administration_api_keys_history
|
|
||||||
(id, identifier, keyString, deleted, editorId, created, updated)
|
|
||||||
VALUES (OLD.id, OLD.identifier, OLD.keyString, OLD.deleted, OLD.editorId, OLD.created, NOW());
|
|
||||||
END;
|
|
||||||
|
|
||||||
CREATE TRIGGER TR_ApiKeysDelete
|
|
||||||
AFTER DELETE
|
|
||||||
ON administration_api_keys
|
|
||||||
FOR EACH ROW
|
|
||||||
BEGIN
|
|
||||||
INSERT INTO administration_api_keys_history
|
|
||||||
(id, identifier, keyString, deleted, editorId, created, updated)
|
|
||||||
VALUES (OLD.id, OLD.identifier, OLD.keyString, 1, OLD.editorId, OLD.created, NOW());
|
|
||||||
END;
|
|
||||||
@@ -1,179 +0,0 @@
|
|||||||
CREATE TABLE IF NOT EXISTS permission_permissions
|
|
||||||
(
|
|
||||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
|
||||||
name VARCHAR(255) NOT NULL,
|
|
||||||
description TEXT NULL,
|
|
||||||
deleted BOOL NOT NULL DEFAULT FALSE,
|
|
||||||
editorId INT NULL,
|
|
||||||
created TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
updated TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
|
||||||
CONSTRAINT UQ_PermissionName UNIQUE (name),
|
|
||||||
CONSTRAINT FK_Permissions_Editor FOREIGN KEY (editorId) REFERENCES administration_auth_users (id)
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS permission_permissions_history
|
|
||||||
(
|
|
||||||
id INT NOT NULL,
|
|
||||||
name VARCHAR(255) NOT NULL,
|
|
||||||
description TEXT NULL,
|
|
||||||
deleted BOOL NOT NULL,
|
|
||||||
editorId INT NULL,
|
|
||||||
created TIMESTAMP NOT NULL,
|
|
||||||
updated TIMESTAMP NOT NULL
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TRIGGER TR_PermissionsUpdate
|
|
||||||
AFTER UPDATE
|
|
||||||
ON permission_permissions
|
|
||||||
FOR EACH ROW
|
|
||||||
BEGIN
|
|
||||||
INSERT INTO permission_permissions_history
|
|
||||||
(id, name, description, deleted, editorId, created, updated)
|
|
||||||
VALUES (OLD.id, OLD.name, OLD.description, OLD.deleted, OLD.editorId, OLD.created, NOW());
|
|
||||||
END;
|
|
||||||
|
|
||||||
CREATE TRIGGER TR_PermissionsDelete
|
|
||||||
AFTER DELETE
|
|
||||||
ON permission_permissions
|
|
||||||
FOR EACH ROW
|
|
||||||
BEGIN
|
|
||||||
INSERT INTO permission_permissions_history
|
|
||||||
(id, name, description, deleted, editorId, created, updated)
|
|
||||||
VALUES (OLD.id, OLD.name, OLD.description, 1, OLD.editorId, OLD.created, NOW());
|
|
||||||
END;
|
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS permission_roles
|
|
||||||
(
|
|
||||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
|
||||||
name VARCHAR(255) NOT NULL,
|
|
||||||
description TEXT NULL,
|
|
||||||
deleted BOOL NOT NULL DEFAULT FALSE,
|
|
||||||
editorId INT NULL,
|
|
||||||
created TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
updated TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
|
||||||
CONSTRAINT UQ_RoleName UNIQUE (name),
|
|
||||||
CONSTRAINT FK_Roles_Editor FOREIGN KEY (editorId) REFERENCES administration_auth_users (id)
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS permission_roles_history
|
|
||||||
(
|
|
||||||
id INT NOT NULL,
|
|
||||||
name VARCHAR(255) NOT NULL,
|
|
||||||
description TEXT NULL,
|
|
||||||
deleted BOOL NOT NULL,
|
|
||||||
editorId INT NULL,
|
|
||||||
created TIMESTAMP NOT NULL,
|
|
||||||
updated TIMESTAMP NOT NULL
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TRIGGER TR_RolesUpdate
|
|
||||||
AFTER UPDATE
|
|
||||||
ON permission_roles
|
|
||||||
FOR EACH ROW
|
|
||||||
BEGIN
|
|
||||||
INSERT INTO permission_roles_history
|
|
||||||
(id, name, description, deleted, editorId, created, updated)
|
|
||||||
VALUES (OLD.id, OLD.name, OLD.description, OLD.deleted, OLD.editorId, OLD.created, NOW());
|
|
||||||
END;
|
|
||||||
|
|
||||||
CREATE TRIGGER TR_RolesDelete
|
|
||||||
AFTER DELETE
|
|
||||||
ON permission_roles
|
|
||||||
FOR EACH ROW
|
|
||||||
BEGIN
|
|
||||||
INSERT INTO permission_roles_history
|
|
||||||
(id, name, description, deleted, editorId, created, updated)
|
|
||||||
VALUES (OLD.id, OLD.name, OLD.description, 1, OLD.editorId, OLD.created, NOW());
|
|
||||||
END;
|
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS permission_role_permissions
|
|
||||||
(
|
|
||||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
|
||||||
RoleId INT NOT NULL,
|
|
||||||
permissionId INT NOT NULL,
|
|
||||||
deleted BOOL NOT NULL DEFAULT FALSE,
|
|
||||||
editorId INT NULL,
|
|
||||||
created TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
updated TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
|
||||||
CONSTRAINT UQ_RolePermission UNIQUE (RoleId, permissionId),
|
|
||||||
CONSTRAINT FK_RolePermissions_Role FOREIGN KEY (RoleId) REFERENCES permission_roles (id) ON DELETE CASCADE,
|
|
||||||
CONSTRAINT FK_RolePermissions_Permission FOREIGN KEY (permissionId) REFERENCES permission_permissions (id) ON DELETE CASCADE,
|
|
||||||
CONSTRAINT FK_RolePermissions_Editor FOREIGN KEY (editorId) REFERENCES administration_auth_users (id)
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS permission_role_permissions_history
|
|
||||||
(
|
|
||||||
id INT NOT NULL,
|
|
||||||
RoleId INT NOT NULL,
|
|
||||||
permissionId INT NOT NULL,
|
|
||||||
deleted BOOL NOT NULL,
|
|
||||||
editorId INT NULL,
|
|
||||||
created TIMESTAMP NOT NULL,
|
|
||||||
updated TIMESTAMP NOT NULL
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TRIGGER TR_RolePermissionsUpdate
|
|
||||||
AFTER UPDATE
|
|
||||||
ON permission_role_permissions
|
|
||||||
FOR EACH ROW
|
|
||||||
BEGIN
|
|
||||||
INSERT INTO permission_role_permissions_history
|
|
||||||
(id, RoleId, permissionId, deleted, editorId, created, updated)
|
|
||||||
VALUES (OLD.id, OLD.RoleId, OLD.permissionId, OLD.deleted, OLD.editorId, OLD.created, NOW());
|
|
||||||
END;
|
|
||||||
|
|
||||||
CREATE TRIGGER TR_RolePermissionsDelete
|
|
||||||
AFTER DELETE
|
|
||||||
ON permission_role_permissions
|
|
||||||
FOR EACH ROW
|
|
||||||
BEGIN
|
|
||||||
INSERT INTO permission_role_permissions_history
|
|
||||||
(id, RoleId, permissionId, deleted, editorId, created, updated)
|
|
||||||
VALUES (OLD.id, OLD.RoleId, OLD.permissionId, 1, OLD.editorId, OLD.created, NOW());
|
|
||||||
END;
|
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS permission_role_auth_users
|
|
||||||
(
|
|
||||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
|
||||||
RoleId INT NOT NULL,
|
|
||||||
UserId INT NOT NULL,
|
|
||||||
deleted BOOL NOT NULL DEFAULT FALSE,
|
|
||||||
editorId INT NULL,
|
|
||||||
created TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
updated TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
|
||||||
CONSTRAINT UQ_RoleUser UNIQUE (RoleId, UserId),
|
|
||||||
CONSTRAINT FK_Roleauth_users_Role FOREIGN KEY (RoleId) REFERENCES permission_roles (id) ON DELETE CASCADE,
|
|
||||||
CONSTRAINT FK_Roleauth_users_User FOREIGN KEY (UserId) REFERENCES administration_auth_users (id) ON DELETE CASCADE,
|
|
||||||
CONSTRAINT FK_Roleauth_users_Editor FOREIGN KEY (editorId) REFERENCES administration_auth_users (id)
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS permission_role_auth_users_history
|
|
||||||
(
|
|
||||||
id INT NOT NULL,
|
|
||||||
RoleId INT NOT NULL,
|
|
||||||
UserId INT NOT NULL,
|
|
||||||
deleted BOOL NOT NULL,
|
|
||||||
editorId INT NULL,
|
|
||||||
created TIMESTAMP NOT NULL,
|
|
||||||
updated TIMESTAMP NOT NULL
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TRIGGER TR_Roleauth_usersUpdate
|
|
||||||
AFTER UPDATE
|
|
||||||
ON permission_role_auth_users
|
|
||||||
FOR EACH ROW
|
|
||||||
BEGIN
|
|
||||||
INSERT INTO permission_role_auth_users_history
|
|
||||||
(id, RoleId, UserId, deleted, editorId, created, updated)
|
|
||||||
VALUES (OLD.id, OLD.RoleId, OLD.UserId, OLD.deleted, OLD.editorId, OLD.created, NOW());
|
|
||||||
END;
|
|
||||||
|
|
||||||
CREATE TRIGGER TR_Roleauth_usersDelete
|
|
||||||
AFTER DELETE
|
|
||||||
ON permission_role_auth_users
|
|
||||||
FOR EACH ROW
|
|
||||||
BEGIN
|
|
||||||
INSERT INTO permission_role_auth_users_history
|
|
||||||
(id, RoleId, UserId, deleted, editorId, created, updated)
|
|
||||||
VALUES (OLD.id, OLD.RoleId, OLD.UserId, 1, OLD.editorId, OLD.created, NOW());
|
|
||||||
END;
|
|
||||||
@@ -1,46 +0,0 @@
|
|||||||
CREATE TABLE IF NOT EXISTS permission_api_key_permissions
|
|
||||||
(
|
|
||||||
id INT AUTO_INCREMENT PRIMARY KEY,
|
|
||||||
apiKeyId INT NOT NULL,
|
|
||||||
permissionId INT NOT NULL,
|
|
||||||
deleted BOOL NOT NULL DEFAULT FALSE,
|
|
||||||
editorId INT NULL,
|
|
||||||
created TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
updated TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
|
||||||
CONSTRAINT UQ_ApiKeyPermission UNIQUE (apiKeyId, permissionId),
|
|
||||||
CONSTRAINT FK_ApiKeyPermissions_ApiKey FOREIGN KEY (apiKeyId) REFERENCES administration_api_keys (id) ON DELETE CASCADE,
|
|
||||||
CONSTRAINT FK_ApiKeyPermissions_Permission FOREIGN KEY (permissionId) REFERENCES permission_permissions (id) ON DELETE CASCADE,
|
|
||||||
CONSTRAINT FK_ApiKeyPermissions_Editor FOREIGN KEY (editorId) REFERENCES administration_auth_users (id)
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS permission_api_key_permissions_history
|
|
||||||
(
|
|
||||||
id INT NOT NULL,
|
|
||||||
apiKeyId INT NOT NULL,
|
|
||||||
permissionId INT NOT NULL,
|
|
||||||
deleted BOOL NOT NULL,
|
|
||||||
editorId INT NULL,
|
|
||||||
created TIMESTAMP NOT NULL,
|
|
||||||
updated TIMESTAMP NOT NULL
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TRIGGER TR_ApiKeyPermissionsUpdate
|
|
||||||
AFTER UPDATE
|
|
||||||
ON permission_api_key_permissions
|
|
||||||
FOR EACH ROW
|
|
||||||
BEGIN
|
|
||||||
INSERT INTO permission_api_key_permissions_history
|
|
||||||
(id, apiKeyId, permissionId, deleted, editorId, created, updated)
|
|
||||||
VALUES (OLD.id, OLD.apiKeyId, OLD.permissionId, OLD.deleted, OLD.editorId, OLD.created, NOW());
|
|
||||||
END;
|
|
||||||
|
|
||||||
CREATE TRIGGER TR_ApiKeyPermissionsDelete
|
|
||||||
AFTER DELETE
|
|
||||||
ON permission_api_key_permissions
|
|
||||||
FOR EACH ROW
|
|
||||||
BEGIN
|
|
||||||
INSERT INTO permission_api_key_permissions_history
|
|
||||||
(id, apiKeyId, permissionId, deleted, editorId, created, updated)
|
|
||||||
VALUES (OLD.id, OLD.apiKeyId, OLD.permissionId, 1, OLD.editorId, OLD.created, NOW());
|
|
||||||
END;
|
|
||||||
|
|
||||||
@@ -1,26 +0,0 @@
|
|||||||
CREATE SCHEMA IF NOT EXISTS administration;
|
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS administration.auth_users
|
|
||||||
(
|
|
||||||
id SERIAL PRIMARY KEY,
|
|
||||||
keycloakId UUID NOT NULL,
|
|
||||||
-- for history
|
|
||||||
deleted BOOLEAN NOT NULL DEFAULT FALSE,
|
|
||||||
editorId INT NULL REFERENCES administration.auth_users (id),
|
|
||||||
created timestamptz NOT NULL DEFAULT NOW(),
|
|
||||||
updated timestamptz NOT NULL DEFAULT NOW(),
|
|
||||||
|
|
||||||
CONSTRAINT UC_KeycloakId UNIQUE (keycloakId)
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS administration.auth_users_history
|
|
||||||
(
|
|
||||||
LIKE administration.auth_users
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TRIGGER users_history_trigger
|
|
||||||
BEFORE INSERT OR UPDATE OR DELETE
|
|
||||||
ON administration.auth_users
|
|
||||||
FOR EACH ROW
|
|
||||||
EXECUTE FUNCTION public.history_trigger_function();
|
|
||||||
|
|
||||||
@@ -1,28 +0,0 @@
|
|||||||
CREATE SCHEMA IF NOT EXISTS administration;
|
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS administration.api_keys
|
|
||||||
(
|
|
||||||
id SERIAL PRIMARY KEY,
|
|
||||||
identifier VARCHAR(255) NOT NULL,
|
|
||||||
keyString VARCHAR(255) NOT NULL,
|
|
||||||
-- for history
|
|
||||||
deleted BOOLEAN NOT NULL DEFAULT FALSE,
|
|
||||||
editorId INT NULL REFERENCES administration.auth_users (id),
|
|
||||||
created timestamptz NOT NULL DEFAULT NOW(),
|
|
||||||
updated timestamptz NOT NULL DEFAULT NOW(),
|
|
||||||
|
|
||||||
CONSTRAINT UC_Identifier_Key UNIQUE (identifier, keyString),
|
|
||||||
CONSTRAINT UC_Key UNIQUE (keyString)
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS administration.api_keys_history
|
|
||||||
(
|
|
||||||
LIKE administration.api_keys
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TRIGGER api_keys_history_trigger
|
|
||||||
BEFORE INSERT OR UPDATE OR DELETE
|
|
||||||
ON administration.api_keys
|
|
||||||
FOR EACH ROW
|
|
||||||
EXECUTE FUNCTION public.history_trigger_function();
|
|
||||||
|
|
||||||
@@ -1,105 +0,0 @@
|
|||||||
CREATE SCHEMA IF NOT EXISTS permission;
|
|
||||||
|
|
||||||
-- Permissions
|
|
||||||
CREATE TABLE permission.permissions
|
|
||||||
(
|
|
||||||
id SERIAL PRIMARY KEY,
|
|
||||||
name VARCHAR(255) NOT NULL,
|
|
||||||
description TEXT NULL,
|
|
||||||
|
|
||||||
-- for history
|
|
||||||
deleted BOOLEAN NOT NULL DEFAULT FALSE,
|
|
||||||
editorId INT NULL REFERENCES administration.auth_users (id),
|
|
||||||
created timestamptz NOT NULL DEFAULT NOW(),
|
|
||||||
updated timestamptz NOT NULL DEFAULT NOW(),
|
|
||||||
CONSTRAINT UQ_PermissionName UNIQUE (name)
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE permission.permissions_history
|
|
||||||
(
|
|
||||||
LIKE permission.permissions
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TRIGGER versioning_trigger
|
|
||||||
BEFORE INSERT OR UPDATE OR DELETE
|
|
||||||
ON permission.permissions
|
|
||||||
FOR EACH ROW
|
|
||||||
EXECUTE PROCEDURE public.history_trigger_function();
|
|
||||||
|
|
||||||
-- Roles
|
|
||||||
CREATE TABLE permission.roles
|
|
||||||
(
|
|
||||||
id SERIAL PRIMARY KEY,
|
|
||||||
name VARCHAR(255) NOT NULL,
|
|
||||||
description TEXT NULL,
|
|
||||||
|
|
||||||
-- for history
|
|
||||||
deleted BOOLEAN NOT NULL DEFAULT FALSE,
|
|
||||||
editorId INT NULL REFERENCES administration.auth_users (id),
|
|
||||||
created timestamptz NOT NULL DEFAULT NOW(),
|
|
||||||
updated timestamptz NOT NULL DEFAULT NOW(),
|
|
||||||
CONSTRAINT UQ_RoleName UNIQUE (name)
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE permission.roles_history
|
|
||||||
(
|
|
||||||
LIKE permission.roles
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TRIGGER versioning_trigger
|
|
||||||
BEFORE INSERT OR UPDATE OR DELETE
|
|
||||||
ON permission.roles
|
|
||||||
FOR EACH ROW
|
|
||||||
EXECUTE PROCEDURE public.history_trigger_function();
|
|
||||||
|
|
||||||
-- Role permissions
|
|
||||||
CREATE TABLE permission.role_permissions
|
|
||||||
(
|
|
||||||
id SERIAL PRIMARY KEY,
|
|
||||||
RoleId INT NOT NULL REFERENCES permission.roles (id) ON DELETE CASCADE,
|
|
||||||
permissionId INT NOT NULL REFERENCES permission.permissions (id) ON DELETE CASCADE,
|
|
||||||
|
|
||||||
-- for history
|
|
||||||
deleted BOOLEAN NOT NULL DEFAULT FALSE,
|
|
||||||
editorId INT NULL REFERENCES administration.auth_users (id),
|
|
||||||
created timestamptz NOT NULL DEFAULT NOW(),
|
|
||||||
updated timestamptz NOT NULL DEFAULT NOW(),
|
|
||||||
CONSTRAINT UQ_RolePermission UNIQUE (RoleId, permissionId)
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE permission.role_permissions_history
|
|
||||||
(
|
|
||||||
LIKE permission.role_permissions
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TRIGGER versioning_trigger
|
|
||||||
BEFORE INSERT OR UPDATE OR DELETE
|
|
||||||
ON permission.role_permissions
|
|
||||||
FOR EACH ROW
|
|
||||||
EXECUTE PROCEDURE public.history_trigger_function();
|
|
||||||
|
|
||||||
-- Role user
|
|
||||||
CREATE TABLE permission.role_users
|
|
||||||
(
|
|
||||||
id SERIAL PRIMARY KEY,
|
|
||||||
RoleId INT NOT NULL REFERENCES permission.roles (id) ON DELETE CASCADE,
|
|
||||||
UserId INT NOT NULL REFERENCES administration.auth_users (id) ON DELETE CASCADE,
|
|
||||||
|
|
||||||
-- for history
|
|
||||||
deleted BOOLEAN NOT NULL DEFAULT FALSE,
|
|
||||||
editorId INT NULL REFERENCES administration.auth_users (id),
|
|
||||||
created timestamptz NOT NULL DEFAULT NOW(),
|
|
||||||
updated timestamptz NOT NULL DEFAULT NOW(),
|
|
||||||
CONSTRAINT UQ_RoleUser UNIQUE (RoleId, UserId)
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE permission.role_users_history
|
|
||||||
(
|
|
||||||
LIKE permission.role_users
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TRIGGER versioning_trigger
|
|
||||||
BEFORE INSERT OR UPDATE OR DELETE
|
|
||||||
ON permission.role_users
|
|
||||||
FOR EACH ROW
|
|
||||||
EXECUTE PROCEDURE public.history_trigger_function();
|
|
||||||
@@ -1,24 +0,0 @@
|
|||||||
CREATE TABLE permission.api_key_permissions
|
|
||||||
(
|
|
||||||
id SERIAL PRIMARY KEY,
|
|
||||||
apiKeyId INT NOT NULL REFERENCES administration.api_keys (id) ON DELETE CASCADE,
|
|
||||||
permissionId INT NOT NULL REFERENCES permission.permissions (id) ON DELETE CASCADE,
|
|
||||||
|
|
||||||
-- for history
|
|
||||||
deleted BOOLEAN NOT NULL DEFAULT FALSE,
|
|
||||||
editorId INT NULL REFERENCES administration.auth_users (id),
|
|
||||||
created timestamptz NOT NULL DEFAULT NOW(),
|
|
||||||
updated timestamptz NOT NULL DEFAULT NOW(),
|
|
||||||
CONSTRAINT UQ_ApiKeyPermission UNIQUE (apiKeyId, permissionId)
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE permission.api_key_permissions_history
|
|
||||||
(
|
|
||||||
LIKE permission.api_key_permissions
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TRIGGER versioning_trigger
|
|
||||||
BEFORE INSERT OR UPDATE OR DELETE
|
|
||||||
ON permission.api_key_permissions
|
|
||||||
FOR EACH ROW
|
|
||||||
EXECUTE PROCEDURE public.history_trigger_function();
|
|
||||||
@@ -1,30 +0,0 @@
|
|||||||
[build-system]
|
|
||||||
requires = ["setuptools>=70.1.0", "wheel>=0.43.0"]
|
|
||||||
build-backend = "setuptools.build_meta"
|
|
||||||
|
|
||||||
[project]
|
|
||||||
name = "cpl-auth"
|
|
||||||
version = "2024.7.0"
|
|
||||||
description = "CPL auth"
|
|
||||||
readme ="CPL auth package"
|
|
||||||
requires-python = ">=3.12"
|
|
||||||
license = { text = "MIT" }
|
|
||||||
authors = [
|
|
||||||
{ name = "Sven Heidemann", email = "sven.heidemann@sh-edraft.de" }
|
|
||||||
]
|
|
||||||
keywords = ["cpl", "auth", "backend", "shared", "library"]
|
|
||||||
|
|
||||||
dynamic = ["dependencies", "optional-dependencies"]
|
|
||||||
|
|
||||||
[project.urls]
|
|
||||||
Homepage = "https://www.sh-edraft.de"
|
|
||||||
|
|
||||||
[tool.setuptools.packages.find]
|
|
||||||
where = ["."]
|
|
||||||
include = ["cpl*"]
|
|
||||||
|
|
||||||
[tool.setuptools.dynamic]
|
|
||||||
dependencies = { file = ["requirements.txt"] }
|
|
||||||
optional-dependencies.dev = { file = ["requirements.dev.txt"] }
|
|
||||||
|
|
||||||
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
black==25.1.0
|
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
cpl-core
|
|
||||||
cpl-dependency
|
|
||||||
cpl-database
|
|
||||||
python-keycloak==5.8.1
|
|
||||||
@@ -1,2 +0,0 @@
|
|||||||
from .configuration import Configuration
|
|
||||||
from .configuration_model_abc import ConfigurationModelABC
|
|
||||||
@@ -1,137 +0,0 @@
|
|||||||
import inspect
|
|
||||||
import json
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
from inspect import isclass
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
from cpl.core.configuration.configuration_model_abc import ConfigurationModelABC
|
|
||||||
from cpl.core.console.console import Console
|
|
||||||
from cpl.core.console.foreground_color_enum import ForegroundColorEnum
|
|
||||||
from cpl.core.typing import D, T
|
|
||||||
|
|
||||||
|
|
||||||
class Configuration:
|
|
||||||
_config = {}
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _print_info(message: str):
|
|
||||||
r"""Prints an info message
|
|
||||||
|
|
||||||
Parameter:
|
|
||||||
name: :class:`str`
|
|
||||||
Info name
|
|
||||||
message: :class:`str`
|
|
||||||
Info message
|
|
||||||
"""
|
|
||||||
Console.set_foreground_color(ForegroundColorEnum.green)
|
|
||||||
Console.write_line(f"[CONFIG] {message}")
|
|
||||||
Console.set_foreground_color(ForegroundColorEnum.default)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _print_warn(message: str):
|
|
||||||
r"""Prints a warning
|
|
||||||
|
|
||||||
Parameter:
|
|
||||||
name: :class:`str`
|
|
||||||
Warning name
|
|
||||||
message: :class:`str`
|
|
||||||
Warning message
|
|
||||||
"""
|
|
||||||
Console.set_foreground_color(ForegroundColorEnum.yellow)
|
|
||||||
Console.write_line(f"[CONFIG] {message}")
|
|
||||||
Console.set_foreground_color(ForegroundColorEnum.default)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _print_error(message: str):
|
|
||||||
r"""Prints an error
|
|
||||||
|
|
||||||
Parameter:
|
|
||||||
name: :class:`str`
|
|
||||||
Error name
|
|
||||||
message: :class:`str`
|
|
||||||
Error message
|
|
||||||
"""
|
|
||||||
Console.set_foreground_color(ForegroundColorEnum.red)
|
|
||||||
Console.write_line(f"[CONFIG] {message}")
|
|
||||||
Console.set_foreground_color(ForegroundColorEnum.default)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def _load_json_file(cls, file: str, output: bool) -> dict:
|
|
||||||
r"""Reads the json file
|
|
||||||
|
|
||||||
Parameter:
|
|
||||||
file: :class:`str`
|
|
||||||
Name of the file
|
|
||||||
output: :class:`bool`
|
|
||||||
Specifies whether an output should take place
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Object of :class:`dict`
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
# open config file, create if not exists
|
|
||||||
with open(file, encoding="utf-8") as cfg:
|
|
||||||
# load json
|
|
||||||
json_cfg = json.load(cfg)
|
|
||||||
if output:
|
|
||||||
cls._print_info(f"Loaded config file: {file}")
|
|
||||||
|
|
||||||
return json_cfg
|
|
||||||
except Exception as e:
|
|
||||||
cls._print_error(f"Cannot load config file: {file}! -> {e}")
|
|
||||||
return {}
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def add_json_file(cls, name: str, optional: bool = None, output: bool = True, path: str = None):
|
|
||||||
if os.path.isabs(name):
|
|
||||||
file_path = name
|
|
||||||
else:
|
|
||||||
from cpl.core.environment import Environment
|
|
||||||
|
|
||||||
path_root = Environment.get_cwd()
|
|
||||||
if path is not None:
|
|
||||||
path_root = path
|
|
||||||
|
|
||||||
if str(path_root).endswith("/") and not name.startswith("/"):
|
|
||||||
file_path = f"{path_root}{name}"
|
|
||||||
else:
|
|
||||||
file_path = f"{path_root}/{name}"
|
|
||||||
|
|
||||||
if not os.path.isfile(file_path):
|
|
||||||
if optional is not True:
|
|
||||||
if output:
|
|
||||||
cls._print_error(f"File not found: {file_path}")
|
|
||||||
|
|
||||||
sys.exit()
|
|
||||||
|
|
||||||
if output:
|
|
||||||
cls._print_warn(f"Not Loaded config file: {file_path}")
|
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
||||||
config_from_file = cls._load_json_file(file_path, output)
|
|
||||||
for sub in ConfigurationModelABC.__subclasses__():
|
|
||||||
for key, value in config_from_file.items():
|
|
||||||
if sub.__name__ != key and sub.__name__.replace("Settings", "") != key:
|
|
||||||
continue
|
|
||||||
|
|
||||||
cls.set(sub, sub(value))
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def set(cls, key: Any, value: T):
|
|
||||||
if inspect.isclass(key):
|
|
||||||
key = key.__name__
|
|
||||||
|
|
||||||
cls._config[key] = value
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get(cls, key: Any, default: D = None) -> T | D:
|
|
||||||
key_name = key.__name__ if inspect.isclass(key) else key
|
|
||||||
|
|
||||||
result = cls._config.get(key_name, default)
|
|
||||||
if isclass(key) and issubclass(key, ConfigurationModelABC) and result == default:
|
|
||||||
result = key()
|
|
||||||
cls.set(key, result)
|
|
||||||
|
|
||||||
return result
|
|
||||||
@@ -1,82 +0,0 @@
|
|||||||
from abc import ABC, abstractmethod
|
|
||||||
from typing import Optional, Type, Any
|
|
||||||
|
|
||||||
from cpl.core.typing import T
|
|
||||||
from cpl.core.utils.cast import cast
|
|
||||||
from cpl.core.utils.get_value import get_value
|
|
||||||
from cpl.core.utils.string import String
|
|
||||||
|
|
||||||
|
|
||||||
class ConfigurationModelABC(ABC):
|
|
||||||
r"""
|
|
||||||
ABC for configuration model classes
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
src: Optional[dict] = None,
|
|
||||||
env_prefix: Optional[str] = None,
|
|
||||||
readonly: bool = True,
|
|
||||||
):
|
|
||||||
ABC.__init__(self)
|
|
||||||
|
|
||||||
self._src = src or {}
|
|
||||||
self._options: dict[str, Any] = {}
|
|
||||||
|
|
||||||
self._env_prefix = env_prefix
|
|
||||||
self._readonly = readonly
|
|
||||||
|
|
||||||
def __setattr__(self, attr: str, value: Any):
|
|
||||||
if hasattr(self, "_readonly") and self._readonly:
|
|
||||||
raise AttributeError(f"Cannot set attribute: {attr}. {type(self).__name__} is read-only")
|
|
||||||
|
|
||||||
super().__setattr__(attr, value)
|
|
||||||
|
|
||||||
def __getattr__(self, attr: str) -> Any:
|
|
||||||
options = super().__getattribute__("_options")
|
|
||||||
if attr in options:
|
|
||||||
return options[attr]
|
|
||||||
|
|
||||||
return super().__getattribute__(attr)
|
|
||||||
|
|
||||||
def option(self, field: str, cast_type: Type[T], default=None, required=False, from_env=True):
|
|
||||||
value = None
|
|
||||||
|
|
||||||
field_variants = [
|
|
||||||
field,
|
|
||||||
String.first_to_upper(field),
|
|
||||||
String.first_to_lower(field),
|
|
||||||
String.to_camel_case(field),
|
|
||||||
String.to_snake_case(field),
|
|
||||||
String.to_pascal_case(field),
|
|
||||||
]
|
|
||||||
|
|
||||||
value = None
|
|
||||||
for variant in field_variants:
|
|
||||||
if variant in self._src:
|
|
||||||
value = self._src[variant]
|
|
||||||
break
|
|
||||||
|
|
||||||
if value is None and from_env:
|
|
||||||
from cpl.core.environment import Environment
|
|
||||||
|
|
||||||
env_field = field.upper()
|
|
||||||
if self._env_prefix:
|
|
||||||
env_field = f"{self._env_prefix}_{env_field}"
|
|
||||||
|
|
||||||
value = cast(Environment.get(env_field, str), cast_type)
|
|
||||||
|
|
||||||
if value is None and required:
|
|
||||||
raise ValueError(f"{type(self).__name__}.{field} is required")
|
|
||||||
elif value is None:
|
|
||||||
self._options[field] = default
|
|
||||||
return
|
|
||||||
|
|
||||||
self._options[field] = cast(value, cast_type)
|
|
||||||
|
|
||||||
def get(self, field: str, default=None) -> Optional[T]:
|
|
||||||
return get_value(self._src, field, self._options[field].type, default)
|
|
||||||
|
|
||||||
def to_dict(self) -> dict:
|
|
||||||
return {field: self.get(field) for field in self._options.keys()}
|
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
from .background_color_enum import BackgroundColorEnum
|
|
||||||
from .console import Console
|
|
||||||
from ._call import ConsoleCall
|
|
||||||
from .foreground_color_enum import ForegroundColorEnum
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
from .user_context import set_user, get_user
|
|
||||||
@@ -1,18 +0,0 @@
|
|||||||
from contextvars import ContextVar
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from cpl.auth.auth_logger import AuthLogger
|
|
||||||
from cpl.auth.schema._administration.auth_user import AuthUser
|
|
||||||
|
|
||||||
_user_context: ContextVar[Optional[AuthUser]] = ContextVar("user", default=None)
|
|
||||||
|
|
||||||
_logger = AuthLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def set_user(user_id: Optional[AuthUser]):
|
|
||||||
_logger.trace("Setting user context", user_id)
|
|
||||||
_user_context.set(user_id)
|
|
||||||
|
|
||||||
|
|
||||||
def get_user() -> Optional[AuthUser]:
|
|
||||||
return _user_context.get()
|
|
||||||
@@ -1,2 +0,0 @@
|
|||||||
from .environment_enum import EnvironmentEnum
|
|
||||||
from .environment import Environment
|
|
||||||
@@ -1,68 +0,0 @@
|
|||||||
import os
|
|
||||||
from socket import gethostname
|
|
||||||
from typing import Type
|
|
||||||
|
|
||||||
from cpl.core.environment.environment_enum import EnvironmentEnum
|
|
||||||
from cpl.core.typing import T, D
|
|
||||||
from cpl.core.utils.get_value import get_value
|
|
||||||
|
|
||||||
|
|
||||||
class Environment:
|
|
||||||
r"""Represents environment of the application
|
|
||||||
|
|
||||||
Parameter:
|
|
||||||
name: :class:`cpl.core.environment.environment_name_enum.EnvironmentNameEnum`
|
|
||||||
"""
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_environment(cls):
|
|
||||||
return cls.get("ENVIRONMENT", str, EnvironmentEnum.production.value)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def set_environment(cls, environment: str):
|
|
||||||
assert environment is not None and environment != "", "environment must not be None or empty"
|
|
||||||
assert environment.lower() in [
|
|
||||||
e.value for e in EnvironmentEnum
|
|
||||||
], f"environment must be one of {[e.value for e in EnvironmentEnum]}"
|
|
||||||
cls.set("ENVIRONMENT", environment.lower())
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_app_name(cls) -> str:
|
|
||||||
return cls.get("APP_NAME", str)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def set_app_name(cls, app_name: str):
|
|
||||||
cls.set("APP_NAME", app_name)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def get_host_name() -> str:
|
|
||||||
return gethostname()
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def get_cwd() -> str:
|
|
||||||
return os.getcwd()
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def set_cwd(working_directory: str):
|
|
||||||
assert working_directory is not None and working_directory != "", "working_directory must not be None or empty"
|
|
||||||
|
|
||||||
os.chdir(working_directory)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def set(key: str, value: T):
|
|
||||||
assert key is not None and key != "", "key must not be None or empty"
|
|
||||||
|
|
||||||
os.environ[key] = str(value)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def get(key: str, cast_type: Type[T], default: D = None) -> T | D:
|
|
||||||
"""
|
|
||||||
Get an environment variable and cast it to a specified type.
|
|
||||||
:param str key: The name of the environment variable.
|
|
||||||
:param Type[T] cast_type: A callable to cast the variable's value.
|
|
||||||
:param T default: The default value to return if the variable is not found. Defaults to None.The default value to return if the variable is not found. Defaults to None.
|
|
||||||
:return: The casted value, or None if the variable is not found.
|
|
||||||
:rtype: T | D
|
|
||||||
"""
|
|
||||||
|
|
||||||
return get_value(dict(os.environ), key, cast_type, default)
|
|
||||||
@@ -1,15 +0,0 @@
|
|||||||
import traceback
|
|
||||||
|
|
||||||
from cpl.core.console import Console
|
|
||||||
|
|
||||||
|
|
||||||
def dependency_error(package_name: str, e: ImportError) -> None:
|
|
||||||
Console.error(f"'{package_name}' is required to use this feature. Please install it and try again.")
|
|
||||||
tb = traceback.format_exc()
|
|
||||||
if not tb.startswith("NoneType: None"):
|
|
||||||
Console.write_line("->", tb)
|
|
||||||
|
|
||||||
elif e is not None:
|
|
||||||
Console.write_line("->", str(e))
|
|
||||||
|
|
||||||
exit(1)
|
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
from .logger import Logger
|
|
||||||
from .logger_abc import LoggerABC
|
|
||||||
from .log_level import LogLevel
|
|
||||||
from .log_settings import LogSettings
|
|
||||||
@@ -1,11 +0,0 @@
|
|||||||
from enum import Enum
|
|
||||||
|
|
||||||
|
|
||||||
class LogLevel(Enum):
|
|
||||||
off = "OFF" # Nothing
|
|
||||||
trace = "TRC" # Detailed app information's
|
|
||||||
debug = "DEB" # Detailed app state
|
|
||||||
info = "INF" # Normal information's
|
|
||||||
warning = "WAR" # Error that can later be fatal
|
|
||||||
error = "ERR" # Non fatal error
|
|
||||||
fatal = "FAT" # Error that cause exit
|
|
||||||
@@ -1,18 +0,0 @@
|
|||||||
from typing import Optional
|
|
||||||
|
|
||||||
from cpl.core.configuration.configuration_model_abc import ConfigurationModelABC
|
|
||||||
from cpl.core.log.log_level import LogLevel
|
|
||||||
|
|
||||||
|
|
||||||
class LogSettings(ConfigurationModelABC):
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
src: Optional[dict] = None,
|
|
||||||
):
|
|
||||||
ConfigurationModelABC.__init__(self, src, "LOG")
|
|
||||||
|
|
||||||
self.option("path", str, default="logs")
|
|
||||||
self.option("filename", str, default="app.log")
|
|
||||||
self.option("console", LogLevel, default=LogLevel.info)
|
|
||||||
self.option("level", LogLevel, default=LogLevel.info)
|
|
||||||
@@ -1,143 +0,0 @@
|
|||||||
import os
|
|
||||||
import traceback
|
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
from cpl.core.console import Console
|
|
||||||
from cpl.core.log.log_level import LogLevel
|
|
||||||
from cpl.core.log.logger_abc import LoggerABC
|
|
||||||
from cpl.core.typing import Messages, Source
|
|
||||||
|
|
||||||
|
|
||||||
class Logger(LoggerABC):
|
|
||||||
_levels = [x for x in LogLevel]
|
|
||||||
|
|
||||||
# ANSI color codes for different log levels
|
|
||||||
_COLORS = {
|
|
||||||
LogLevel.trace: "\033[37m", # Light Gray
|
|
||||||
LogLevel.debug: "\033[94m", # Blue
|
|
||||||
LogLevel.info: "\033[92m", # Green
|
|
||||||
LogLevel.warning: "\033[93m", # Yellow
|
|
||||||
LogLevel.error: "\033[91m", # Red
|
|
||||||
LogLevel.fatal: "\033[95m", # Magenta
|
|
||||||
}
|
|
||||||
|
|
||||||
def __init__(self, source: Source, file_prefix: str = None):
|
|
||||||
LoggerABC.__init__(self)
|
|
||||||
|
|
||||||
if source == LoggerABC.__name__:
|
|
||||||
source = None
|
|
||||||
|
|
||||||
self._source = source
|
|
||||||
|
|
||||||
if file_prefix is None:
|
|
||||||
file_prefix = "app"
|
|
||||||
|
|
||||||
self._file_prefix = file_prefix
|
|
||||||
self._create_log_dir()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def _settings(self):
|
|
||||||
from cpl.core.configuration.configuration import Configuration
|
|
||||||
from cpl.core.log.log_settings import LogSettings
|
|
||||||
|
|
||||||
return Configuration.get(LogSettings)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def log_file(self):
|
|
||||||
return f"logs/{self._file_prefix}_{datetime.now().strftime('%Y-%m-%d')}.log"
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _create_log_dir():
|
|
||||||
if os.path.exists("logs"):
|
|
||||||
return
|
|
||||||
|
|
||||||
os.makedirs("logs")
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def set_level(cls, level: LogLevel):
|
|
||||||
if level in cls._levels:
|
|
||||||
cls._level = level
|
|
||||||
else:
|
|
||||||
raise ValueError(f"Invalid log level: {level}")
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _ensure_file_size(log_file: str):
|
|
||||||
if not os.path.exists(log_file) or os.path.getsize(log_file) <= 0.5 * 1024 * 1024:
|
|
||||||
return
|
|
||||||
|
|
||||||
# if exists and size is greater than 300MB, create a new file
|
|
||||||
os.rename(
|
|
||||||
log_file,
|
|
||||||
f"{log_file.split('.log')[0]}_{datetime.now().strftime('%H-%M-%S')}.log",
|
|
||||||
)
|
|
||||||
|
|
||||||
def _should_log(self, input_level: LogLevel, settings_level: LogLevel) -> bool:
|
|
||||||
return self._levels.index(input_level) >= self._levels.index(settings_level)
|
|
||||||
|
|
||||||
def _write_log_to_file(self, level: LogLevel, content: str):
|
|
||||||
if not self._should_log(level, self._settings.level):
|
|
||||||
return
|
|
||||||
|
|
||||||
file = self.log_file
|
|
||||||
self._ensure_file_size(file)
|
|
||||||
with open(file, "a") as log_file:
|
|
||||||
log_file.write(content + "\n")
|
|
||||||
log_file.close()
|
|
||||||
|
|
||||||
def _write_to_console(self, level: LogLevel, content: str):
|
|
||||||
if not self._should_log(level, self._settings.console):
|
|
||||||
return
|
|
||||||
|
|
||||||
Console.write_line(f"{self._COLORS.get(level, '\033[0m')}{content}\033[0m")
|
|
||||||
|
|
||||||
def _log(self, level: LogLevel, *messages: Messages):
|
|
||||||
try:
|
|
||||||
timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f")
|
|
||||||
formatted_message = self._format_message(level.value, timestamp, *messages)
|
|
||||||
|
|
||||||
self._write_log_to_file(level, formatted_message)
|
|
||||||
self._write_to_console(level, formatted_message)
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error while logging: {e} -> {traceback.format_exc()}")
|
|
||||||
|
|
||||||
def _format_message(self, level: str, timestamp, *messages: Messages) -> str:
|
|
||||||
if isinstance(messages, tuple):
|
|
||||||
messages = list(messages)
|
|
||||||
|
|
||||||
if not isinstance(messages, list):
|
|
||||||
messages = [messages]
|
|
||||||
|
|
||||||
messages = [str(message) for message in messages if message is not None]
|
|
||||||
|
|
||||||
message = f"<{timestamp}>"
|
|
||||||
message += f" [{level.upper():^3}]"
|
|
||||||
message += f" [{self._file_prefix}]"
|
|
||||||
if self._source is not None:
|
|
||||||
message += f" - [{self._source}]"
|
|
||||||
|
|
||||||
message += f": {' '.join(messages)}"
|
|
||||||
|
|
||||||
return message
|
|
||||||
|
|
||||||
def header(self, string: str):
|
|
||||||
self._log(LogLevel.info, string)
|
|
||||||
|
|
||||||
def trace(self, *messages: Messages):
|
|
||||||
self._log(LogLevel.trace, *messages)
|
|
||||||
|
|
||||||
def debug(self, *messages: Messages):
|
|
||||||
self._log(LogLevel.debug, *messages)
|
|
||||||
|
|
||||||
def info(self, *messages: Messages):
|
|
||||||
self._log(LogLevel.info, *messages)
|
|
||||||
|
|
||||||
def warning(self, *messages: Messages):
|
|
||||||
self._log(LogLevel.warning, *messages)
|
|
||||||
|
|
||||||
def error(self, message, e: Exception = None):
|
|
||||||
self._log(LogLevel.error, message, f"{e} -> {traceback.format_exc()}" if e else None)
|
|
||||||
|
|
||||||
def fatal(self, message, e: Exception = None, prevent_quit: bool = False):
|
|
||||||
self._log(LogLevel.fatal, message, f"{e} -> {traceback.format_exc()}" if e else None)
|
|
||||||
if not prevent_quit:
|
|
||||||
exit(-1)
|
|
||||||
@@ -1,3 +0,0 @@
|
|||||||
from .bool_pipe import BoolPipe
|
|
||||||
from .ip_address_pipe import IPAddressPipe
|
|
||||||
from .pipe_abc import PipeABC
|
|
||||||
@@ -1,13 +0,0 @@
|
|||||||
from cpl.core.pipes.pipe_abc import PipeABC
|
|
||||||
from cpl.core.typing import T
|
|
||||||
|
|
||||||
|
|
||||||
class BoolPipe[bool](PipeABC):
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def to_str(value: T, *args):
|
|
||||||
return str(value).lower()
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def from_str(value: str, *args) -> T:
|
|
||||||
return value in ("True", "true", "1", "yes", "y", "Y")
|
|
||||||
@@ -1,38 +0,0 @@
|
|||||||
from cpl.core.pipes.pipe_abc import PipeABC
|
|
||||||
from cpl.core.typing import T
|
|
||||||
|
|
||||||
|
|
||||||
class IPAddressPipe[list](PipeABC):
|
|
||||||
@staticmethod
|
|
||||||
def to_str(value: T, *args) -> str:
|
|
||||||
string = ""
|
|
||||||
|
|
||||||
if len(value) != 4:
|
|
||||||
raise ValueError("Invalid IP")
|
|
||||||
|
|
||||||
for i in range(0, len(value)):
|
|
||||||
byte = value[i]
|
|
||||||
if not 0 <= byte <= 255:
|
|
||||||
raise ValueError("Invalid IP")
|
|
||||||
|
|
||||||
if i == len(value) - 1:
|
|
||||||
string += f"{byte}"
|
|
||||||
else:
|
|
||||||
string += f"{byte}."
|
|
||||||
|
|
||||||
return string
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def from_str(value: str, *args) -> T:
|
|
||||||
parts = value.split(".")
|
|
||||||
if len(parts) != 4:
|
|
||||||
raise Exception("Invalid IP")
|
|
||||||
|
|
||||||
result = []
|
|
||||||
for part in parts:
|
|
||||||
byte = int(part)
|
|
||||||
if not 0 <= byte <= 255:
|
|
||||||
raise Exception("Invalid IP")
|
|
||||||
result.append(byte)
|
|
||||||
|
|
||||||
return result
|
|
||||||
@@ -1,14 +0,0 @@
|
|||||||
from abc import ABC, abstractmethod
|
|
||||||
from typing import Generic
|
|
||||||
|
|
||||||
from cpl.core.typing import T
|
|
||||||
|
|
||||||
|
|
||||||
class PipeABC(ABC, Generic[T]):
|
|
||||||
@staticmethod
|
|
||||||
@abstractmethod
|
|
||||||
def to_str(value: T, *args) -> str: ...
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
@abstractmethod
|
|
||||||
def from_str(value: str, *args) -> T: ...
|
|
||||||
@@ -1,2 +0,0 @@
|
|||||||
from .time_format_settings import TimeFormatSettings
|
|
||||||
from .time_format_settings_names_enum import TimeFormatSettingsNamesEnum
|
|
||||||
@@ -1,16 +0,0 @@
|
|||||||
from typing import TypeVar, Any
|
|
||||||
from uuid import UUID
|
|
||||||
|
|
||||||
T = TypeVar("T")
|
|
||||||
D = TypeVar("D")
|
|
||||||
R = TypeVar("R")
|
|
||||||
|
|
||||||
Service = TypeVar("Service")
|
|
||||||
Source = TypeVar("Source")
|
|
||||||
|
|
||||||
Messages = list[Any] | Any
|
|
||||||
|
|
||||||
UuidId = str | UUID
|
|
||||||
SerialId = int
|
|
||||||
|
|
||||||
Id = UuidId | SerialId
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
from .base64 import Base64
|
|
||||||
from .credential_manager import CredentialManager
|
|
||||||
from .json_processor import JSONProcessor
|
|
||||||
from .pip import Pip
|
|
||||||
from .string import String
|
|
||||||
from .get_value import get_value
|
|
||||||
@@ -1,43 +0,0 @@
|
|||||||
import base64
|
|
||||||
from typing import Union
|
|
||||||
|
|
||||||
|
|
||||||
class Base64:
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def encode(string: str) -> str:
|
|
||||||
"""
|
|
||||||
Encode a string with base64
|
|
||||||
:param string:
|
|
||||||
:return:
|
|
||||||
"""
|
|
||||||
return base64.b64encode(string.encode("utf-8")).decode("utf-8")
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def decode(string: str) -> str:
|
|
||||||
"""
|
|
||||||
Decode a string with base64
|
|
||||||
:param string:
|
|
||||||
:return:
|
|
||||||
"""
|
|
||||||
return base64.b64decode(string).decode("utf-8")
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def is_b64(sb: Union[str, bytes]) -> bool:
|
|
||||||
"""
|
|
||||||
Check if a string is base64 encoded
|
|
||||||
:param Union[str, bytes] sb:
|
|
||||||
:return:
|
|
||||||
:rtype: bool
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
if isinstance(sb, str):
|
|
||||||
# If there's any unicode here, an exception will be thrown and the function will return false
|
|
||||||
sb_bytes = bytes(sb, "ascii")
|
|
||||||
elif isinstance(sb, bytes):
|
|
||||||
sb_bytes = sb
|
|
||||||
else:
|
|
||||||
raise ValueError("Argument must be string or bytes")
|
|
||||||
return base64.b64encode(base64.b64decode(sb_bytes)) == sb_bytes
|
|
||||||
except ValueError:
|
|
||||||
return False
|
|
||||||
@@ -1,69 +0,0 @@
|
|||||||
from enum import Enum
|
|
||||||
from typing import Type, Any
|
|
||||||
|
|
||||||
from cpl.core.typing import T
|
|
||||||
|
|
||||||
|
|
||||||
def _cast_enum(value: str, enum_type: Type[Enum]) -> Enum:
|
|
||||||
try:
|
|
||||||
return enum_type(value)
|
|
||||||
except ValueError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
try:
|
|
||||||
return enum_type(value.lower())
|
|
||||||
except ValueError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
try:
|
|
||||||
return enum_type(value.upper())
|
|
||||||
except ValueError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
try:
|
|
||||||
return enum_type[value]
|
|
||||||
except KeyError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
try:
|
|
||||||
return enum_type[value.lower()]
|
|
||||||
except KeyError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
try:
|
|
||||||
return enum_type[value.upper()]
|
|
||||||
except KeyError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
raise ValueError(f"Cannot cast value '{value}' to enum '{enum_type.__name__}'")
|
|
||||||
|
|
||||||
|
|
||||||
def cast(value: Any, cast_type: Type[T], list_delimiter: str = ",") -> T:
|
|
||||||
"""
|
|
||||||
Cast a value to a specified type.
|
|
||||||
:param Any value: Value to be casted.
|
|
||||||
:param Type[T] cast_type: A callable to cast the variable's value.
|
|
||||||
:param str list_delimiter: The delimiter to split the value into a list. Defaults to ",".
|
|
||||||
:return:
|
|
||||||
"""
|
|
||||||
if value is None:
|
|
||||||
return None
|
|
||||||
|
|
||||||
if cast_type == bool:
|
|
||||||
return value.lower() in ["true", "1", "yes", "on"]
|
|
||||||
|
|
||||||
if (cast_type if not hasattr(cast_type, "__origin__") else cast_type.__origin__) == list:
|
|
||||||
if not (value.startswith("[") and value.endswith("]")) and list_delimiter not in value:
|
|
||||||
raise ValueError("List values must be enclosed in square brackets or use a delimiter.")
|
|
||||||
|
|
||||||
if value.startswith("[") and value.endswith("]"):
|
|
||||||
value = value[1:-1]
|
|
||||||
|
|
||||||
value = value.split(list_delimiter)
|
|
||||||
subtype = cast_type.__args__[0] if hasattr(cast_type, "__args__") else None
|
|
||||||
return [subtype(item) if subtype is not None else item for item in value]
|
|
||||||
|
|
||||||
if isinstance(cast_type, type) and issubclass(cast_type, Enum):
|
|
||||||
return _cast_enum(value, cast_type)
|
|
||||||
|
|
||||||
return cast_type(value)
|
|
||||||
@@ -1,61 +0,0 @@
|
|||||||
import os
|
|
||||||
|
|
||||||
from cryptography.fernet import Fernet
|
|
||||||
|
|
||||||
from cpl.core.log.logger import Logger
|
|
||||||
|
|
||||||
_logger = Logger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class CredentialManager:
|
|
||||||
r"""Handles credential encryption and decryption"""
|
|
||||||
|
|
||||||
_secret: str = None
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def with_secret(cls, file: str = None):
|
|
||||||
if file is None:
|
|
||||||
file = ".secret"
|
|
||||||
|
|
||||||
if not os.path.isfile(file):
|
|
||||||
dirname = os.path.dirname(file)
|
|
||||||
if dirname != "":
|
|
||||||
os.makedirs(dirname, exist_ok=True)
|
|
||||||
|
|
||||||
with open(file, "w") as secret_file:
|
|
||||||
secret_file.write(Fernet.generate_key().decode())
|
|
||||||
secret_file.close()
|
|
||||||
_logger.warning("Secret file not found, regenerating")
|
|
||||||
|
|
||||||
with open(file, "r") as secret_file:
|
|
||||||
secret = secret_file.read().strip()
|
|
||||||
if secret == "" or secret is None:
|
|
||||||
_logger.fatal("No secret found in .secret file.")
|
|
||||||
|
|
||||||
cls._secret = str(secret)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def encrypt(cls, string: str) -> str:
|
|
||||||
r"""Encode with Fernet
|
|
||||||
|
|
||||||
Parameter:
|
|
||||||
string: :class:`str`
|
|
||||||
String to encode
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Encoded string
|
|
||||||
"""
|
|
||||||
return Fernet(cls._secret).encrypt(string.encode()).decode()
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def decrypt(cls, string: str) -> str:
|
|
||||||
r"""Decode with Fernet
|
|
||||||
|
|
||||||
Parameter:
|
|
||||||
string: :class:`str`
|
|
||||||
String to decode
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Decoded string
|
|
||||||
"""
|
|
||||||
return Fernet(cls._secret).decrypt(string).decode()
|
|
||||||
@@ -1,46 +0,0 @@
|
|||||||
from typing import Type, Optional
|
|
||||||
|
|
||||||
from cpl.core.typing import T
|
|
||||||
from cpl.core.utils.cast import cast
|
|
||||||
|
|
||||||
|
|
||||||
def get_value(
|
|
||||||
source: dict,
|
|
||||||
key: str,
|
|
||||||
cast_type: Type[T],
|
|
||||||
default: Optional[T] = None,
|
|
||||||
list_delimiter: str = ",",
|
|
||||||
) -> Optional[T]:
|
|
||||||
"""
|
|
||||||
Get value from source dictionary and cast it to a specified type.
|
|
||||||
:param dict source: The source dictionary.
|
|
||||||
:param str key: The name of the environment variable.
|
|
||||||
:param Type[T] cast_type: A callable to cast the variable's value.
|
|
||||||
:param Optional[T] default: The default value to return if the variable is not found. Defaults to None.
|
|
||||||
:param str list_delimiter: The delimiter to split the value into a list. Defaults to ",".
|
|
||||||
:return: The casted value, or None if the key is not found.
|
|
||||||
:rtype: Optional[T]
|
|
||||||
"""
|
|
||||||
|
|
||||||
if key not in source:
|
|
||||||
return default
|
|
||||||
|
|
||||||
value = source[key]
|
|
||||||
if isinstance(
|
|
||||||
value,
|
|
||||||
cast_type if not hasattr(cast_type, "__origin__") else cast_type.__origin__,
|
|
||||||
):
|
|
||||||
# Handle list[int] case explicitly
|
|
||||||
if hasattr(cast_type, "__origin__") and cast_type.__origin__ == list:
|
|
||||||
subtype = cast_type.__args__[0] if hasattr(cast_type, "__args__") else None
|
|
||||||
if subtype is not None:
|
|
||||||
return [subtype(item) for item in value]
|
|
||||||
return value
|
|
||||||
|
|
||||||
try:
|
|
||||||
cast(value, cast_type, list_delimiter)
|
|
||||||
except (ValueError, TypeError):
|
|
||||||
from cpl.core.log import Logger
|
|
||||||
|
|
||||||
Logger(__name__).debug(f"Failed to cast value '{value}' to type '{cast_type.__name__}'")
|
|
||||||
return default
|
|
||||||
@@ -1,29 +0,0 @@
|
|||||||
[build-system]
|
|
||||||
requires = ["setuptools>=70.1.0", "wheel>=0.43.0"]
|
|
||||||
build-backend = "setuptools.build_meta"
|
|
||||||
|
|
||||||
[project]
|
|
||||||
name = "cpl-core"
|
|
||||||
version = "2024.7.0"
|
|
||||||
description = "CPL core"
|
|
||||||
readme = "CPL core package"
|
|
||||||
requires-python = ">=3.12"
|
|
||||||
license = "MIT"
|
|
||||||
authors = [
|
|
||||||
{ name = "Sven Heidemann", email = "sven.heidemann@sh-edraft.de" }
|
|
||||||
]
|
|
||||||
keywords = ["cpl", "core", "backend", "shared", "library"]
|
|
||||||
|
|
||||||
dynamic = ["dependencies", "optional-dependencies"]
|
|
||||||
|
|
||||||
[project.urls]
|
|
||||||
Homepage = "https://www.sh-edraft.de"
|
|
||||||
|
|
||||||
[tool.setuptools.packages.find]
|
|
||||||
where = ["."]
|
|
||||||
include = ["cpl*"]
|
|
||||||
|
|
||||||
[tool.setuptools.dynamic]
|
|
||||||
dependencies = { file = ["requirements.txt"] }
|
|
||||||
optional-dependencies.dev = { file = ["requirements.dev.txt"] }
|
|
||||||
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
black==25.1.0
|
|
||||||
@@ -1,5 +0,0 @@
|
|||||||
art==6.5
|
|
||||||
colorama==0.4.6
|
|
||||||
tabulate==0.9.0
|
|
||||||
termcolor==3.1.0
|
|
||||||
pynput==1.8.1
|
|
||||||
@@ -1,77 +0,0 @@
|
|||||||
import os
|
|
||||||
from typing import Type
|
|
||||||
|
|
||||||
from cpl.application.abc import ApplicationABC as _ApplicationABC
|
|
||||||
from cpl.dependency import ServiceCollection as _ServiceCollection
|
|
||||||
from . import mysql as _mysql
|
|
||||||
from . import postgres as _postgres
|
|
||||||
from .table_manager import TableManager
|
|
||||||
|
|
||||||
|
|
||||||
def _with_migrations(self: _ApplicationABC, *paths: str | list[str]) -> _ApplicationABC:
|
|
||||||
from cpl.application.host import Host
|
|
||||||
from cpl.database.service.migration_service import MigrationService
|
|
||||||
|
|
||||||
migration_service = self._services.get_service(MigrationService)
|
|
||||||
migration_service.with_directory(os.path.join(os.path.dirname(os.path.abspath(__file__)), "scripts"))
|
|
||||||
|
|
||||||
if isinstance(paths, str):
|
|
||||||
paths = [paths]
|
|
||||||
|
|
||||||
for path in paths:
|
|
||||||
migration_service.with_directory(path)
|
|
||||||
|
|
||||||
Host.run(migration_service.migrate)
|
|
||||||
|
|
||||||
return self
|
|
||||||
|
|
||||||
|
|
||||||
def _with_seeders(self: _ApplicationABC) -> _ApplicationABC:
|
|
||||||
from cpl.database.service.seeder_service import SeederService
|
|
||||||
from cpl.application.host import Host
|
|
||||||
|
|
||||||
seeder_service: SeederService = self._services.get_service(SeederService)
|
|
||||||
Host.run(seeder_service.seed)
|
|
||||||
return self
|
|
||||||
|
|
||||||
|
|
||||||
def _add(collection: _ServiceCollection, db_context: Type, default_port: int, server_type: str):
|
|
||||||
from cpl.core.console import Console
|
|
||||||
from cpl.core.configuration import Configuration
|
|
||||||
from cpl.database.abc.db_context_abc import DBContextABC
|
|
||||||
from cpl.database.model.server_type import ServerTypes, ServerType
|
|
||||||
from cpl.database.model.database_settings import DatabaseSettings
|
|
||||||
from cpl.database.service.migration_service import MigrationService
|
|
||||||
from cpl.database.service.seeder_service import SeederService
|
|
||||||
from cpl.database.schema.executed_migration_dao import ExecutedMigrationDao
|
|
||||||
|
|
||||||
try:
|
|
||||||
ServerType.set_server_type(ServerTypes(server_type))
|
|
||||||
Configuration.set("DB_DEFAULT_PORT", default_port)
|
|
||||||
|
|
||||||
collection.add_singleton(DBContextABC, db_context)
|
|
||||||
collection.add_singleton(ExecutedMigrationDao)
|
|
||||||
collection.add_singleton(MigrationService)
|
|
||||||
collection.add_singleton(SeederService)
|
|
||||||
except ImportError as e:
|
|
||||||
Console.error("cpl-database is not installed", str(e))
|
|
||||||
|
|
||||||
|
|
||||||
def add_mysql(collection: _ServiceCollection):
|
|
||||||
from cpl.database.mysql.db_context import DBContext
|
|
||||||
from cpl.database.model import ServerTypes
|
|
||||||
|
|
||||||
_add(collection, DBContext, 3306, ServerTypes.MYSQL.value)
|
|
||||||
|
|
||||||
|
|
||||||
def add_postgres(collection: _ServiceCollection):
|
|
||||||
from cpl.database.mysql.db_context import DBContext
|
|
||||||
from cpl.database.model import ServerTypes
|
|
||||||
|
|
||||||
_add(collection, DBContext, 5432, ServerTypes.POSTGRES.value)
|
|
||||||
|
|
||||||
|
|
||||||
_ServiceCollection.with_module(add_mysql, _mysql.__name__)
|
|
||||||
_ServiceCollection.with_module(add_postgres, _postgres.__name__)
|
|
||||||
_ApplicationABC.extend(_ApplicationABC.with_migrations, _with_migrations)
|
|
||||||
_ApplicationABC.extend(_ApplicationABC.with_seeders, _with_seeders)
|
|
||||||
@@ -1,5 +0,0 @@
|
|||||||
from .connection_abc import ConnectionABC
|
|
||||||
from .db_context_abc import DBContextABC
|
|
||||||
from .db_join_model_abc import DbJoinModelABC
|
|
||||||
from .db_model_abc import DbModelABC
|
|
||||||
from .db_model_dao_abc import DbModelDaoABC
|
|
||||||
@@ -1,880 +0,0 @@
|
|||||||
import datetime
|
|
||||||
from abc import ABC, abstractmethod
|
|
||||||
from enum import Enum
|
|
||||||
from types import NoneType
|
|
||||||
from typing import Generic, Optional, Union, Type, List, Any
|
|
||||||
|
|
||||||
from cpl.core.typing import T, Id
|
|
||||||
from cpl.core.utils.get_value import get_value
|
|
||||||
from cpl.core.utils.string import String
|
|
||||||
from cpl.database.abc.db_context_abc import DBContextABC
|
|
||||||
from cpl.database.const import DATETIME_FORMAT
|
|
||||||
from cpl.database.db_logger import DBLogger
|
|
||||||
from cpl.database.external_data_temp_table_builder import ExternalDataTempTableBuilder
|
|
||||||
from cpl.database.postgres.sql_select_builder import SQLSelectBuilder
|
|
||||||
from cpl.database.typing import T_DBM, Attribute, AttributeFilters, AttributeSorts
|
|
||||||
|
|
||||||
|
|
||||||
class DataAccessObjectABC(ABC, Generic[T_DBM]):
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def __init__(self, source: str, model_type: Type[T_DBM], table_name: str):
|
|
||||||
from cpl.dependency.service_provider_abc import ServiceProviderABC
|
|
||||||
|
|
||||||
self._db = ServiceProviderABC.get_global_service(DBContextABC)
|
|
||||||
|
|
||||||
self._logger = DBLogger(source)
|
|
||||||
self._model_type = model_type
|
|
||||||
self._table_name = table_name
|
|
||||||
|
|
||||||
self._logger = DBLogger(source)
|
|
||||||
self._model_type = model_type
|
|
||||||
self._table_name = table_name
|
|
||||||
|
|
||||||
self._default_filter_condition = None
|
|
||||||
|
|
||||||
self.__attributes: dict[str, type] = {}
|
|
||||||
|
|
||||||
self.__db_names: dict[str, str] = {}
|
|
||||||
self.__foreign_tables: dict[str, tuple[str, str]] = {}
|
|
||||||
self.__foreign_table_keys: dict[str, str] = {}
|
|
||||||
self.__foreign_dao: dict[str, "DataAccessObjectABC"] = {}
|
|
||||||
|
|
||||||
self.__date_attributes: set[str] = set()
|
|
||||||
self.__ignored_attributes: set[str] = set()
|
|
||||||
|
|
||||||
self.__primary_key = "id"
|
|
||||||
self.__primary_key_type = int
|
|
||||||
self._external_fields: dict[str, ExternalDataTempTableBuilder] = {}
|
|
||||||
|
|
||||||
@property
|
|
||||||
def table_name(self) -> str:
|
|
||||||
return self._table_name
|
|
||||||
|
|
||||||
def has_attribute(self, attr_name: Attribute) -> bool:
|
|
||||||
"""
|
|
||||||
Check if the attribute exists in the DAO
|
|
||||||
:param Attribute attr_name: Name of the attribute
|
|
||||||
:return: True if the attribute exists, False otherwise
|
|
||||||
"""
|
|
||||||
return attr_name in self.__attributes
|
|
||||||
|
|
||||||
def attribute(
|
|
||||||
self,
|
|
||||||
attr_name: Attribute,
|
|
||||||
attr_type: type,
|
|
||||||
db_name: str = None,
|
|
||||||
ignore=False,
|
|
||||||
primary_key=False,
|
|
||||||
aliases: list[str] = None,
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Add an attribute for db and object mapping to the data access object
|
|
||||||
:param Attribute attr_name: Name of the attribute in the object
|
|
||||||
:param type attr_type: Python type of the attribute to cast db value to
|
|
||||||
:param str db_name: Name of the field in the database, if None the attribute lowered attr_name without "_" is used
|
|
||||||
:param bool ignore: Defines if field is ignored for create and update (for e.g. auto increment fields or created/updated fields)
|
|
||||||
:param bool primary_key: Defines if field is the primary key
|
|
||||||
:param list[str] aliases: List of aliases for the attribute name
|
|
||||||
:return:
|
|
||||||
"""
|
|
||||||
if isinstance(attr_name, property):
|
|
||||||
attr_name = attr_name.fget.__name__
|
|
||||||
|
|
||||||
self.__attributes[attr_name] = attr_type
|
|
||||||
if ignore:
|
|
||||||
self.__ignored_attributes.add(attr_name)
|
|
||||||
|
|
||||||
if not db_name:
|
|
||||||
db_name = attr_name.lower().replace("_", "")
|
|
||||||
|
|
||||||
self.__db_names[attr_name] = db_name
|
|
||||||
self.__db_names[db_name] = db_name
|
|
||||||
|
|
||||||
if aliases is not None:
|
|
||||||
for alias in aliases:
|
|
||||||
if alias in self.__db_names:
|
|
||||||
raise ValueError(f"Alias {alias} already exists")
|
|
||||||
self.__db_names[alias] = db_name
|
|
||||||
|
|
||||||
if primary_key:
|
|
||||||
self.__primary_key = db_name
|
|
||||||
self.__primary_key_type = attr_type
|
|
||||||
|
|
||||||
if attr_type in [datetime, datetime.datetime]:
|
|
||||||
self.__date_attributes.add(attr_name)
|
|
||||||
self.__date_attributes.add(db_name)
|
|
||||||
|
|
||||||
def reference(
|
|
||||||
self,
|
|
||||||
attr: Attribute,
|
|
||||||
primary_attr: Attribute,
|
|
||||||
foreign_attr: Attribute,
|
|
||||||
table_name: str,
|
|
||||||
reference_dao: "DataAccessObjectABC" = None,
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Add a reference to another table for the given attribute
|
|
||||||
:param Attribute attr: Name of the attribute in the object
|
|
||||||
:param str primary_attr: Name of the primary key in the foreign object
|
|
||||||
:param str foreign_attr: Name of the foreign key in the object
|
|
||||||
:param str table_name: Name of the table to reference
|
|
||||||
:param DataAccessObjectABC reference_dao: The data access object for the referenced table
|
|
||||||
:return:
|
|
||||||
"""
|
|
||||||
if isinstance(attr, property):
|
|
||||||
attr = attr.fget.__name__
|
|
||||||
|
|
||||||
if isinstance(primary_attr, property):
|
|
||||||
primary_attr = primary_attr.fget.__name__
|
|
||||||
|
|
||||||
primary_attr = primary_attr.lower().replace("_", "")
|
|
||||||
|
|
||||||
if isinstance(foreign_attr, property):
|
|
||||||
foreign_attr = foreign_attr.fget.__name__
|
|
||||||
|
|
||||||
foreign_attr = foreign_attr.lower().replace("_", "")
|
|
||||||
|
|
||||||
self.__foreign_table_keys[attr] = foreign_attr
|
|
||||||
if reference_dao is not None:
|
|
||||||
self.__foreign_dao[attr] = reference_dao
|
|
||||||
|
|
||||||
if table_name == self._table_name:
|
|
||||||
return
|
|
||||||
|
|
||||||
self.__foreign_tables[attr] = (
|
|
||||||
table_name,
|
|
||||||
f"{table_name}.{primary_attr} = {self._table_name}.{foreign_attr}",
|
|
||||||
)
|
|
||||||
|
|
||||||
def use_external_fields(self, builder: ExternalDataTempTableBuilder):
|
|
||||||
self._external_fields[builder.table_name] = builder
|
|
||||||
|
|
||||||
def to_object(self, result: dict) -> T_DBM:
|
|
||||||
"""
|
|
||||||
Convert a result from the database to an object
|
|
||||||
:param dict result: Result from the database
|
|
||||||
:return:
|
|
||||||
"""
|
|
||||||
value_map: dict[str, Any] = {}
|
|
||||||
db_names = self.__db_names.items()
|
|
||||||
|
|
||||||
for db_name, value in result.items():
|
|
||||||
# Find the attribute name corresponding to the db_name
|
|
||||||
attr_name = next((k for k, v in db_names if v == db_name), None)
|
|
||||||
if not attr_name:
|
|
||||||
continue
|
|
||||||
|
|
||||||
value_map[attr_name] = self._get_value_from_sql(self.__attributes[attr_name], value)
|
|
||||||
|
|
||||||
return self._model_type(**value_map)
|
|
||||||
|
|
||||||
def to_dict(self, obj: T_DBM) -> dict:
|
|
||||||
"""
|
|
||||||
Convert an object to a dictionary
|
|
||||||
:param T_DBM obj: Object to convert
|
|
||||||
:return:
|
|
||||||
"""
|
|
||||||
value_map: dict[str, Any] = {}
|
|
||||||
|
|
||||||
for attr_name, attr_type in self.__attributes.items():
|
|
||||||
value = getattr(obj, attr_name)
|
|
||||||
if isinstance(value, datetime.datetime):
|
|
||||||
value = value.strftime(DATETIME_FORMAT)
|
|
||||||
elif isinstance(value, Enum):
|
|
||||||
value = value.value
|
|
||||||
|
|
||||||
value_map[attr_name] = value
|
|
||||||
|
|
||||||
for ex_fname in self._external_fields:
|
|
||||||
ex_field = self._external_fields[ex_fname]
|
|
||||||
for ex_attr in ex_field.fields:
|
|
||||||
if ex_attr == self.__primary_key:
|
|
||||||
continue
|
|
||||||
|
|
||||||
value_map[ex_attr] = getattr(obj, ex_attr, None)
|
|
||||||
|
|
||||||
return value_map
|
|
||||||
|
|
||||||
async def count(self, filters: AttributeFilters = None) -> int:
|
|
||||||
result = await self._prepare_query(filters=filters, for_count=True)
|
|
||||||
return result[0]["count"] if result else 0
|
|
||||||
|
|
||||||
async def get_history(
|
|
||||||
self,
|
|
||||||
entry_id: int,
|
|
||||||
by_key: str = None,
|
|
||||||
when: datetime = None,
|
|
||||||
until: datetime = None,
|
|
||||||
without_deleted: bool = False,
|
|
||||||
) -> list[T_DBM]:
|
|
||||||
"""
|
|
||||||
Retrieve the history of an entry from the history table.
|
|
||||||
:param entry_id: The ID of the entry to retrieve history for.
|
|
||||||
:param by_key: The key to filter by (default is the primary key).
|
|
||||||
:param when: A specific timestamp to filter the history.
|
|
||||||
:param until: A timestamp to filter history entries up to a certain point.
|
|
||||||
:param without_deleted: Exclude deleted entries if True.
|
|
||||||
:return: A list of historical entries as objects.
|
|
||||||
"""
|
|
||||||
f_tables = list(self.__foreign_tables.keys())
|
|
||||||
|
|
||||||
history_table = f"{self._table_name}_history"
|
|
||||||
builder = SQLSelectBuilder(history_table, self.__primary_key)
|
|
||||||
|
|
||||||
builder.with_attribute("*")
|
|
||||||
builder.with_value_condition(
|
|
||||||
f"{history_table}.{by_key or self.__primary_key}",
|
|
||||||
"=",
|
|
||||||
str(entry_id),
|
|
||||||
f_tables,
|
|
||||||
)
|
|
||||||
|
|
||||||
if self._default_filter_condition:
|
|
||||||
builder.with_condition(self._default_filter_condition, "", f_tables)
|
|
||||||
|
|
||||||
if without_deleted:
|
|
||||||
builder.with_value_condition(f"{history_table}.deleted", "=", "false", f_tables)
|
|
||||||
|
|
||||||
if when:
|
|
||||||
builder.with_value_condition(
|
|
||||||
self._attr_from_date_to_char(f"{history_table}.updated"),
|
|
||||||
"=",
|
|
||||||
f"'{when.strftime(DATETIME_FORMAT)}'",
|
|
||||||
f_tables,
|
|
||||||
)
|
|
||||||
|
|
||||||
if until:
|
|
||||||
builder.with_value_condition(
|
|
||||||
self._attr_from_date_to_char(f"{history_table}.updated"),
|
|
||||||
"<=",
|
|
||||||
f"'{until.strftime(DATETIME_FORMAT)}'",
|
|
||||||
f_tables,
|
|
||||||
)
|
|
||||||
|
|
||||||
builder.with_order_by(f"{history_table}.updated", "DESC")
|
|
||||||
|
|
||||||
query = await builder.build()
|
|
||||||
result = await self._db.select_map(query)
|
|
||||||
return [self.to_object(x) for x in result] if result else []
|
|
||||||
|
|
||||||
async def get_all(self) -> List[T_DBM]:
|
|
||||||
result = await self._prepare_query(sorts=[{self.__primary_key: "asc"}])
|
|
||||||
return [self.to_object(x) for x in result] if result else []
|
|
||||||
|
|
||||||
async def get_by_id(self, id: Union[int, str]) -> Optional[T_DBM]:
|
|
||||||
result = await self._prepare_query(filters=[{self.__primary_key: id}], sorts=[{self.__primary_key: "asc"}])
|
|
||||||
return self.to_object(result[0]) if result else None
|
|
||||||
|
|
||||||
async def find_by_id(self, id: Union[int, str]) -> Optional[T_DBM]:
|
|
||||||
result = await self._prepare_query(filters=[{self.__primary_key: id}], sorts=[{self.__primary_key: "asc"}])
|
|
||||||
return self.to_object(result[0]) if result else None
|
|
||||||
|
|
||||||
async def get_by(
|
|
||||||
self,
|
|
||||||
filters: AttributeFilters = None,
|
|
||||||
sorts: AttributeSorts = None,
|
|
||||||
take: int = None,
|
|
||||||
skip: int = None,
|
|
||||||
) -> list[T_DBM]:
|
|
||||||
result = await self._prepare_query(filters, sorts, take, skip)
|
|
||||||
if not result or len(result) == 0:
|
|
||||||
raise ValueError("No result found")
|
|
||||||
return [self.to_object(x) for x in result] if result else []
|
|
||||||
|
|
||||||
async def get_single_by(
|
|
||||||
self,
|
|
||||||
filters: AttributeFilters = None,
|
|
||||||
sorts: AttributeSorts = None,
|
|
||||||
take: int = None,
|
|
||||||
skip: int = None,
|
|
||||||
) -> T_DBM:
|
|
||||||
result = await self._prepare_query(filters, sorts, take, skip)
|
|
||||||
if not result:
|
|
||||||
raise ValueError("No result found")
|
|
||||||
if len(result) > 1:
|
|
||||||
raise ValueError("More than one result found")
|
|
||||||
return self.to_object(result[0])
|
|
||||||
|
|
||||||
async def find_by(
|
|
||||||
self,
|
|
||||||
filters: AttributeFilters = None,
|
|
||||||
sorts: AttributeSorts = None,
|
|
||||||
take: int = None,
|
|
||||||
skip: int = None,
|
|
||||||
) -> list[T_DBM]:
|
|
||||||
result = await self._prepare_query(filters, sorts, take, skip)
|
|
||||||
return [self.to_object(x) for x in result] if result else []
|
|
||||||
|
|
||||||
async def find_single_by(
|
|
||||||
self,
|
|
||||||
filters: AttributeFilters = None,
|
|
||||||
sorts: AttributeSorts = None,
|
|
||||||
take: int = None,
|
|
||||||
skip: int = None,
|
|
||||||
) -> Optional[T_DBM]:
|
|
||||||
result = await self._prepare_query(filters, sorts, take, skip)
|
|
||||||
if len(result) > 1:
|
|
||||||
raise ValueError("More than one result found")
|
|
||||||
return self.to_object(result[0]) if result else None
|
|
||||||
|
|
||||||
async def touch(self, obj: T_DBM):
|
|
||||||
"""
|
|
||||||
Touch the entry to update the last updated date
|
|
||||||
:return:
|
|
||||||
"""
|
|
||||||
await self._db.execute(
|
|
||||||
f"""
|
|
||||||
UPDATE {self._table_name}
|
|
||||||
SET updated = NOW()
|
|
||||||
WHERE {self.__primary_key} = {self._get_primary_key_value_sql(obj)};
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
|
|
||||||
async def touch_many_by_id(self, ids: list[Id]):
|
|
||||||
"""
|
|
||||||
Touch the entries to update the last updated date
|
|
||||||
:return:
|
|
||||||
"""
|
|
||||||
if len(ids) == 0:
|
|
||||||
return
|
|
||||||
|
|
||||||
await self._db.execute(
|
|
||||||
f"""
|
|
||||||
UPDATE {self._table_name}
|
|
||||||
SET updated = NOW()
|
|
||||||
WHERE {self.__primary_key} IN ({", ".join([str(x) for x in ids])});
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
|
|
||||||
async def _build_create_statement(self, obj: T_DBM, skip_editor=False) -> str:
|
|
||||||
allowed_fields = [x for x in self.__attributes.keys() if x not in self.__ignored_attributes]
|
|
||||||
|
|
||||||
fields = ", ".join([self.__db_names[x] for x in allowed_fields])
|
|
||||||
fields = f"{'EditorId' if not skip_editor else ''}{f', {fields}' if not skip_editor and len(fields) > 0 else f'{fields}'}"
|
|
||||||
|
|
||||||
values = ", ".join([self._get_value_sql(getattr(obj, x)) for x in allowed_fields])
|
|
||||||
values = f"{await self._get_editor_id(obj) if not skip_editor else ''}{f', {values}' if not skip_editor and len(values) > 0 else f'{values}'}"
|
|
||||||
|
|
||||||
return f"""
|
|
||||||
INSERT INTO {self._table_name} (
|
|
||||||
{fields}
|
|
||||||
) VALUES (
|
|
||||||
{values}
|
|
||||||
)
|
|
||||||
RETURNING {self.__primary_key};
|
|
||||||
"""
|
|
||||||
|
|
||||||
async def create(self, obj: T_DBM, skip_editor=False) -> int:
|
|
||||||
self._logger.debug(f"create {type(obj).__name__} {obj.__dict__}")
|
|
||||||
|
|
||||||
result = await self._db.execute(await self._build_create_statement(obj, skip_editor))
|
|
||||||
return self._get_value_from_sql(self.__primary_key_type, result[0][0])
|
|
||||||
|
|
||||||
async def create_many(self, objs: list[T_DBM], skip_editor=False) -> list[int]:
|
|
||||||
if len(objs) == 0:
|
|
||||||
return []
|
|
||||||
self._logger.debug(f"create many {type(objs[0]).__name__} {len(objs)} {[x.__dict__ for x in objs]}")
|
|
||||||
|
|
||||||
query = ""
|
|
||||||
for obj in objs:
|
|
||||||
query += await self._build_create_statement(obj, skip_editor)
|
|
||||||
|
|
||||||
result = await self._db.execute(query)
|
|
||||||
return [self._get_value_from_sql(self.__primary_key_type, x[0]) for x in result]
|
|
||||||
|
|
||||||
async def _build_update_statement(self, obj: T_DBM, skip_editor=False) -> str:
|
|
||||||
allowed_fields = [x for x in self.__attributes.keys() if x not in self.__ignored_attributes]
|
|
||||||
|
|
||||||
fields = ", ".join(
|
|
||||||
[f"{self.__db_names[x]} = {self._get_value_sql(getattr(obj, x, None))}" for x in allowed_fields]
|
|
||||||
)
|
|
||||||
fields = f"{f'EditorId = {await self._get_editor_id(obj)}' if not skip_editor else ''}{f', {fields}' if not skip_editor and len(fields) > 0 else f'{fields}'}"
|
|
||||||
|
|
||||||
return f"""
|
|
||||||
UPDATE {self._table_name}
|
|
||||||
SET {fields}
|
|
||||||
WHERE {self.__primary_key} = {self._get_primary_key_value_sql(obj)};
|
|
||||||
"""
|
|
||||||
|
|
||||||
async def update(self, obj: T_DBM, skip_editor=False):
|
|
||||||
self._logger.debug(f"update {type(obj).__name__} {obj.__dict__}")
|
|
||||||
await self._db.execute(await self._build_update_statement(obj, skip_editor))
|
|
||||||
|
|
||||||
async def update_many(self, objs: list[T_DBM], skip_editor=False):
|
|
||||||
if len(objs) == 0:
|
|
||||||
return
|
|
||||||
self._logger.debug(f"update many {type(objs[0]).__name__} {len(objs)} {[x.__dict__ for x in objs]}")
|
|
||||||
|
|
||||||
query = ""
|
|
||||||
for obj in objs:
|
|
||||||
query += await self._build_update_statement(obj, skip_editor)
|
|
||||||
|
|
||||||
await self._db.execute(query)
|
|
||||||
|
|
||||||
async def _build_delete_statement(self, obj: T_DBM, hard_delete: bool = False) -> str:
|
|
||||||
if hard_delete:
|
|
||||||
return f"""
|
|
||||||
DELETE FROM {self._table_name}
|
|
||||||
WHERE {self.__primary_key} = {self._get_primary_key_value_sql(obj)};
|
|
||||||
"""
|
|
||||||
|
|
||||||
return f"""
|
|
||||||
UPDATE {self._table_name}
|
|
||||||
SET EditorId = {await self._get_editor_id(obj)},
|
|
||||||
Deleted = true
|
|
||||||
WHERE {self.__primary_key} = {self._get_primary_key_value_sql(obj)};
|
|
||||||
"""
|
|
||||||
|
|
||||||
async def delete(self, obj: T_DBM, hard_delete: bool = False):
|
|
||||||
self._logger.debug(f"delete {type(obj).__name__} {obj.__dict__}")
|
|
||||||
await self._db.execute(await self._build_delete_statement(obj, hard_delete))
|
|
||||||
|
|
||||||
async def delete_many(self, objs: list[T_DBM], hard_delete: bool = False):
|
|
||||||
if len(objs) == 0:
|
|
||||||
return
|
|
||||||
self._logger.debug(f"delete many {type(objs[0]).__name__} {len(objs)} {[x.__dict__ for x in objs]}")
|
|
||||||
|
|
||||||
query = ""
|
|
||||||
for obj in objs:
|
|
||||||
query += await self._build_delete_statement(obj, hard_delete)
|
|
||||||
|
|
||||||
await self._db.execute(query)
|
|
||||||
|
|
||||||
async def _build_restore_statement(self, obj: T_DBM) -> str:
|
|
||||||
return f"""
|
|
||||||
UPDATE {self._table_name}
|
|
||||||
SET EditorId = {await self._get_editor_id(obj)},
|
|
||||||
Deleted = false
|
|
||||||
WHERE {self.__primary_key} = {self._get_primary_key_value_sql(obj)};
|
|
||||||
"""
|
|
||||||
|
|
||||||
async def restore(self, obj: T_DBM):
|
|
||||||
self._logger.debug(f"restore {type(obj).__name__} {obj.__dict__}")
|
|
||||||
await self._db.execute(await self._build_restore_statement(obj))
|
|
||||||
|
|
||||||
async def restore_many(self, objs: list[T_DBM]):
|
|
||||||
if len(objs) == 0:
|
|
||||||
return
|
|
||||||
self._logger.debug(f"restore many {type(objs[0]).__name__} {len(objs)} {objs[0].__dict__}")
|
|
||||||
|
|
||||||
query = ""
|
|
||||||
for obj in objs:
|
|
||||||
query += await self._build_restore_statement(obj)
|
|
||||||
|
|
||||||
await self._db.execute(query)
|
|
||||||
|
|
||||||
async def _prepare_query(
|
|
||||||
self,
|
|
||||||
filters: AttributeFilters = None,
|
|
||||||
sorts: AttributeSorts = None,
|
|
||||||
take: int = None,
|
|
||||||
skip: int = None,
|
|
||||||
for_count=False,
|
|
||||||
) -> list[dict]:
|
|
||||||
"""
|
|
||||||
Prepares and executes a query using the SQLBuilder with the given parameters.
|
|
||||||
:param filters: Conditions to filter the query.
|
|
||||||
:param sorts: Sorting attributes and directions.
|
|
||||||
:param take: Limit the number of results.
|
|
||||||
:param skip: Offset the results.
|
|
||||||
:return: Query result as a list of dictionaries.
|
|
||||||
"""
|
|
||||||
external_table_deps = []
|
|
||||||
builder = SQLSelectBuilder(self._table_name, self.__primary_key)
|
|
||||||
|
|
||||||
for temp in self._external_fields:
|
|
||||||
builder.with_temp_table(self._external_fields[temp])
|
|
||||||
|
|
||||||
if for_count:
|
|
||||||
builder.with_attribute("COUNT(*)", ignore_table_name=True)
|
|
||||||
else:
|
|
||||||
builder.with_attribute("*")
|
|
||||||
|
|
||||||
for attr in self.__foreign_tables:
|
|
||||||
table, join_condition = self.__foreign_tables[attr]
|
|
||||||
builder.with_left_join(table, join_condition)
|
|
||||||
|
|
||||||
if filters:
|
|
||||||
await self._build_conditions(builder, filters, external_table_deps)
|
|
||||||
|
|
||||||
if sorts:
|
|
||||||
self._build_sorts(builder, sorts, external_table_deps)
|
|
||||||
|
|
||||||
if take:
|
|
||||||
builder.with_limit(take)
|
|
||||||
|
|
||||||
if skip:
|
|
||||||
builder.with_offset(skip)
|
|
||||||
|
|
||||||
for external_table in external_table_deps:
|
|
||||||
builder.use_temp_table(external_table)
|
|
||||||
|
|
||||||
query = await builder.build()
|
|
||||||
return await self._db.select_map(query)
|
|
||||||
|
|
||||||
async def _build_conditions(
|
|
||||||
self,
|
|
||||||
builder: SQLSelectBuilder,
|
|
||||||
filters: AttributeFilters,
|
|
||||||
external_table_deps: list[str],
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Builds SQL conditions from GraphQL-like filters and adds them to the SQLBuilder.
|
|
||||||
:param builder: The SQLBuilder instance to add conditions to.
|
|
||||||
:param filters: GraphQL-like filter structure.
|
|
||||||
:param external_table_deps: List to store external table dependencies.
|
|
||||||
"""
|
|
||||||
if not isinstance(filters, list):
|
|
||||||
filters = [filters]
|
|
||||||
|
|
||||||
for filter_group in filters:
|
|
||||||
sql_conditions = self._graphql_to_sql_conditions(filter_group, external_table_deps)
|
|
||||||
for attr, operator, value in sql_conditions:
|
|
||||||
if attr in self.__foreign_table_keys:
|
|
||||||
attr = self.__foreign_table_keys[attr]
|
|
||||||
|
|
||||||
recursive_join = self._get_recursive_reference_join(attr)
|
|
||||||
if recursive_join is not None:
|
|
||||||
builder.with_left_join(*recursive_join)
|
|
||||||
|
|
||||||
external_table = self._get_external_field_key(attr)
|
|
||||||
if external_table is not None:
|
|
||||||
external_table_deps.append(external_table)
|
|
||||||
|
|
||||||
if operator == "fuzzy":
|
|
||||||
builder.with_levenshtein_condition(attr)
|
|
||||||
elif operator in [
|
|
||||||
"IS NULL",
|
|
||||||
"IS NOT NULL",
|
|
||||||
]: # operator without value
|
|
||||||
builder.with_condition(
|
|
||||||
attr,
|
|
||||||
operator,
|
|
||||||
[
|
|
||||||
x[0]
|
|
||||||
for fdao in self.__foreign_dao
|
|
||||||
for x in self.__foreign_dao[fdao].__foreign_tables.values()
|
|
||||||
],
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
if attr in self.__date_attributes or String.to_snake_case(attr) in self.__date_attributes:
|
|
||||||
attr = self._attr_from_date_to_char(f"{self._table_name}.{attr}")
|
|
||||||
|
|
||||||
builder.with_value_condition(
|
|
||||||
attr,
|
|
||||||
operator,
|
|
||||||
self._get_value_sql(value),
|
|
||||||
[
|
|
||||||
x[0]
|
|
||||||
for fdao in self.__foreign_dao
|
|
||||||
for x in self.__foreign_dao[fdao].__foreign_tables.values()
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
def _graphql_to_sql_conditions(
|
|
||||||
self, graphql_structure: dict, external_table_deps: list[str]
|
|
||||||
) -> list[tuple[str, str, Any]]:
|
|
||||||
"""
|
|
||||||
Converts a GraphQL-like structure to SQL conditions.
|
|
||||||
:param graphql_structure: The GraphQL-like filter structure.
|
|
||||||
:param external_table_deps: List to track external table dependencies.
|
|
||||||
:return: A list of tuples (attribute, operator, value).
|
|
||||||
"""
|
|
||||||
|
|
||||||
operators = {
|
|
||||||
"equal": "=",
|
|
||||||
"notEqual": "!=",
|
|
||||||
"greater": ">",
|
|
||||||
"greaterOrEqual": ">=",
|
|
||||||
"less": "<",
|
|
||||||
"lessOrEqual": "<=",
|
|
||||||
"isNull": "IS NULL",
|
|
||||||
"isNotNull": "IS NOT NULL",
|
|
||||||
"contains": "LIKE", # Special handling in _graphql_to_sql_conditions
|
|
||||||
"notContains": "NOT LIKE", # Special handling in _graphql_to_sql_conditions
|
|
||||||
"startsWith": "LIKE", # Special handling in _graphql_to_sql_conditions
|
|
||||||
"endsWith": "LIKE", # Special handling in _graphql_to_sql_conditions
|
|
||||||
"in": "IN",
|
|
||||||
"notIn": "NOT IN",
|
|
||||||
}
|
|
||||||
conditions = []
|
|
||||||
|
|
||||||
def parse_node(node, parent_key=None, parent_dao=None):
|
|
||||||
if not isinstance(node, dict):
|
|
||||||
return
|
|
||||||
|
|
||||||
if isinstance(node, list):
|
|
||||||
conditions.append((parent_key, "IN", node))
|
|
||||||
return
|
|
||||||
|
|
||||||
for key, value in node.items():
|
|
||||||
if isinstance(key, property):
|
|
||||||
key = key.fget.__name__
|
|
||||||
|
|
||||||
external_fields_table_name_by_parent = self._get_external_field_key(parent_key)
|
|
||||||
external_fields_table_name = self._get_external_field_key(key)
|
|
||||||
external_field = (
|
|
||||||
external_fields_table_name
|
|
||||||
if external_fields_table_name_by_parent is None
|
|
||||||
else external_fields_table_name_by_parent
|
|
||||||
)
|
|
||||||
|
|
||||||
if key == "fuzzy":
|
|
||||||
self._handle_fuzzy_filter_conditions(conditions, external_table_deps, value)
|
|
||||||
elif parent_dao is not None and key in parent_dao.__db_names:
|
|
||||||
parse_node(value, f"{parent_dao.table_name}.{key}")
|
|
||||||
continue
|
|
||||||
|
|
||||||
elif external_field is not None:
|
|
||||||
external_table_deps.append(external_field)
|
|
||||||
parse_node(value, f"{external_field}.{key}")
|
|
||||||
elif parent_key in self.__foreign_table_keys:
|
|
||||||
if key in operators:
|
|
||||||
parse_node({key: value}, self.__foreign_table_keys[parent_key])
|
|
||||||
continue
|
|
||||||
|
|
||||||
if parent_key in self.__foreign_dao:
|
|
||||||
foreign_dao = self.__foreign_dao[parent_key]
|
|
||||||
if key in foreign_dao.__foreign_tables:
|
|
||||||
parse_node(
|
|
||||||
value,
|
|
||||||
f"{self.__foreign_tables[parent_key][0]}.{foreign_dao.__foreign_table_keys[key]}",
|
|
||||||
foreign_dao.__foreign_dao[key],
|
|
||||||
)
|
|
||||||
continue
|
|
||||||
|
|
||||||
if parent_key in self.__foreign_tables:
|
|
||||||
parse_node(value, f"{self.__foreign_tables[parent_key][0]}.{key}")
|
|
||||||
continue
|
|
||||||
|
|
||||||
parse_node({parent_key: value})
|
|
||||||
elif key in operators:
|
|
||||||
operator = operators[key]
|
|
||||||
if key == "contains" or key == "notContains":
|
|
||||||
value = f"%{value}%"
|
|
||||||
elif key == "in" or key == "notIn":
|
|
||||||
value = value
|
|
||||||
elif key == "startsWith":
|
|
||||||
value = f"{value}%"
|
|
||||||
elif key == "endsWith":
|
|
||||||
value = f"%{value}"
|
|
||||||
elif key == "isNull" or key == "isNotNull":
|
|
||||||
is_null_value = value.get("equal", None) if isinstance(value, dict) else value
|
|
||||||
|
|
||||||
if is_null_value is None:
|
|
||||||
operator = operators[key]
|
|
||||||
elif (key == "isNull" and is_null_value) or (key == "isNotNull" and not is_null_value):
|
|
||||||
operator = "IS NULL"
|
|
||||||
else:
|
|
||||||
operator = "IS NOT NULL"
|
|
||||||
|
|
||||||
conditions.append((parent_key, operator, None))
|
|
||||||
elif (key == "equal" or key == "notEqual") and value is None:
|
|
||||||
operator = operators["isNull"]
|
|
||||||
|
|
||||||
conditions.append((parent_key, operator, value))
|
|
||||||
|
|
||||||
elif isinstance(value, dict):
|
|
||||||
if key in self.__foreign_table_keys:
|
|
||||||
parse_node(value, key)
|
|
||||||
elif key in self.__db_names and parent_key is not None:
|
|
||||||
parse_node({f"{parent_key}": value})
|
|
||||||
elif key in self.__db_names:
|
|
||||||
parse_node(value, self.__db_names[key])
|
|
||||||
else:
|
|
||||||
parse_node(value, key)
|
|
||||||
elif value is None:
|
|
||||||
conditions.append((self.__db_names[key], "IS NULL", value))
|
|
||||||
else:
|
|
||||||
conditions.append((self.__db_names[key], "=", value))
|
|
||||||
|
|
||||||
parse_node(graphql_structure)
|
|
||||||
return conditions
|
|
||||||
|
|
||||||
def _handle_fuzzy_filter_conditions(self, conditions, external_field_table_deps, sub_values):
|
|
||||||
# Extract fuzzy filter parameters
|
|
||||||
fuzzy_fields = get_value(sub_values, "fields", list[str])
|
|
||||||
fuzzy_term = get_value(sub_values, "term", str)
|
|
||||||
fuzzy_threshold = get_value(sub_values, "threshold", int, 5)
|
|
||||||
|
|
||||||
if not fuzzy_fields or not fuzzy_term:
|
|
||||||
raise ValueError("Fuzzy filter must include 'fields' and 'term'.")
|
|
||||||
|
|
||||||
fuzzy_fields_db_names = []
|
|
||||||
|
|
||||||
# Map fields to their database names
|
|
||||||
for fuzzy_field in fuzzy_fields:
|
|
||||||
external_fields_table_name = self._get_external_field_key(fuzzy_field)
|
|
||||||
if external_fields_table_name is not None:
|
|
||||||
external_fields_table = self._external_fields[external_fields_table_name]
|
|
||||||
fuzzy_fields_db_names.append(f"{external_fields_table.table_name}.{fuzzy_field}")
|
|
||||||
external_field_table_deps.append(external_fields_table.table_name)
|
|
||||||
elif fuzzy_field in self.__db_names:
|
|
||||||
fuzzy_fields_db_names.append(f"{self._table_name}.{self.__db_names[fuzzy_field]}")
|
|
||||||
elif fuzzy_field in self.__foreign_tables:
|
|
||||||
fuzzy_fields_db_names.append(f"{self._table_name}.{self.__foreign_table_keys[fuzzy_field]}")
|
|
||||||
else:
|
|
||||||
fuzzy_fields_db_names.append(self.__db_names[String.to_snake_case(fuzzy_field)][0])
|
|
||||||
|
|
||||||
# Build fuzzy conditions for each field
|
|
||||||
fuzzy_conditions = self._build_fuzzy_conditions(fuzzy_fields_db_names, fuzzy_term, fuzzy_threshold)
|
|
||||||
|
|
||||||
# Combine conditions with OR and append to the main conditions
|
|
||||||
conditions.append((f"({' OR '.join(fuzzy_conditions)})", "fuzzy", None))
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _build_fuzzy_conditions(fields: list[str], term: str, threshold: int = 10) -> list[str]:
|
|
||||||
conditions = []
|
|
||||||
for field in fields:
|
|
||||||
conditions.append(f"levenshtein({field}::TEXT, '{term}') <= {threshold}") # Adjust the threshold as needed
|
|
||||||
|
|
||||||
return conditions
|
|
||||||
|
|
||||||
def _get_external_field_key(self, field_name: str) -> Optional[str]:
|
|
||||||
"""
|
|
||||||
Returns the key to get the external field if found, otherwise None.
|
|
||||||
:param str field_name: The name of the field to search for.
|
|
||||||
:return: The key if found, otherwise None.
|
|
||||||
:rtype: Optional[str]
|
|
||||||
"""
|
|
||||||
if field_name is None:
|
|
||||||
return None
|
|
||||||
|
|
||||||
for key, builder in self._external_fields.items():
|
|
||||||
if field_name in builder.fields and field_name not in self.__db_names:
|
|
||||||
return key
|
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
||||||
def _get_recursive_reference_join(self, attr: str) -> Optional[tuple[str, str]]:
|
|
||||||
parts = attr.split(".")
|
|
||||||
table_name = ".".join(parts[:-1])
|
|
||||||
|
|
||||||
if table_name == self._table_name or table_name == "":
|
|
||||||
return None
|
|
||||||
|
|
||||||
all_foreign_tables = {
|
|
||||||
x[0]: x[1]
|
|
||||||
for x in [
|
|
||||||
*[x for x in self.__foreign_tables.values() if x[0] != self._table_name],
|
|
||||||
*[x for fdao in self.__foreign_dao for x in self.__foreign_dao[fdao].__foreign_tables.values()],
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
if not table_name in all_foreign_tables:
|
|
||||||
return None
|
|
||||||
|
|
||||||
return table_name, all_foreign_tables[table_name]
|
|
||||||
|
|
||||||
def _build_sorts(
|
|
||||||
self,
|
|
||||||
builder: SQLSelectBuilder,
|
|
||||||
sorts: AttributeSorts,
|
|
||||||
external_table_deps: list[str],
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Resolves complex sorting structures into SQL-compatible sorting conditions.
|
|
||||||
Tracks external table dependencies.
|
|
||||||
:param builder: The SQLBuilder instance to add sorting to.
|
|
||||||
:param sorts: Sorting attributes and directions in a complex structure.
|
|
||||||
:param external_table_deps: List to track external table dependencies.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def parse_sort_node(node, parent_key=None):
|
|
||||||
if isinstance(node, dict):
|
|
||||||
for key, value in node.items():
|
|
||||||
if isinstance(value, dict):
|
|
||||||
# Recursively parse nested structures
|
|
||||||
parse_sort_node(value, key)
|
|
||||||
elif isinstance(value, str) and value.lower() in ["asc", "desc"]:
|
|
||||||
external_table = self._get_external_field_key(key)
|
|
||||||
if external_table:
|
|
||||||
external_table_deps.append(external_table)
|
|
||||||
key = f"{external_table}.{key}"
|
|
||||||
|
|
||||||
if parent_key in self.__foreign_tables:
|
|
||||||
key = f"{self.__foreign_tables[parent_key][0]}.{key}"
|
|
||||||
builder.with_order_by(key, value.upper())
|
|
||||||
else:
|
|
||||||
raise ValueError(f"Invalid sort direction: {value}")
|
|
||||||
elif isinstance(node, list):
|
|
||||||
for item in node:
|
|
||||||
parse_sort_node(item)
|
|
||||||
else:
|
|
||||||
raise ValueError(f"Invalid sort structure: {node}")
|
|
||||||
|
|
||||||
parse_sort_node(sorts)
|
|
||||||
|
|
||||||
def _get_value_sql(self, value: Any) -> str:
|
|
||||||
if isinstance(value, str):
|
|
||||||
if value.lower() == "null":
|
|
||||||
return "NULL"
|
|
||||||
return f"'{value}'"
|
|
||||||
|
|
||||||
if isinstance(value, NoneType):
|
|
||||||
return "NULL"
|
|
||||||
|
|
||||||
if value is None:
|
|
||||||
return "NULL"
|
|
||||||
|
|
||||||
if isinstance(value, Enum):
|
|
||||||
return f"'{value.value}'"
|
|
||||||
|
|
||||||
if isinstance(value, bool):
|
|
||||||
return "true" if value else "false"
|
|
||||||
|
|
||||||
if isinstance(value, list):
|
|
||||||
if len(value) == 0:
|
|
||||||
return "()"
|
|
||||||
return f"({', '.join([self._get_value_sql(x) for x in value])})"
|
|
||||||
|
|
||||||
if isinstance(value, datetime.datetime):
|
|
||||||
if value.tzinfo is None:
|
|
||||||
value = value.replace(tzinfo=datetime.timezone.utc)
|
|
||||||
|
|
||||||
return f"'{value.strftime(DATETIME_FORMAT)}'"
|
|
||||||
|
|
||||||
return str(value)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _get_value_from_sql(cast_type: type, value: Any) -> Optional[T]:
|
|
||||||
"""
|
|
||||||
Get the value from the query result and cast it to the correct type
|
|
||||||
:param type cast_type:
|
|
||||||
:param Any value:
|
|
||||||
:return Optional[T]: Casted value, when value is str "NULL" None is returned
|
|
||||||
"""
|
|
||||||
if isinstance(value, str) and "NULL" in value:
|
|
||||||
return None
|
|
||||||
|
|
||||||
if isinstance(value, NoneType):
|
|
||||||
return None
|
|
||||||
|
|
||||||
if isinstance(value, cast_type):
|
|
||||||
return value
|
|
||||||
|
|
||||||
return cast_type(value)
|
|
||||||
|
|
||||||
def _get_primary_key_value_sql(self, obj: T_DBM) -> str:
|
|
||||||
value = getattr(obj, self.__primary_key)
|
|
||||||
if isinstance(value, str):
|
|
||||||
return f"'{value}'"
|
|
||||||
|
|
||||||
return value
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _attr_from_date_to_char(attr: str) -> str:
|
|
||||||
return f"TO_CHAR({attr}, 'YYYY-MM-DD HH24:MI:SS.US TZ')"
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
async def _get_editor_id(obj: T_DBM):
|
|
||||||
editor_id = obj.editor_id
|
|
||||||
if editor_id is None:
|
|
||||||
from cpl.core.ctx.user_context import get_user
|
|
||||||
|
|
||||||
user = get_user()
|
|
||||||
if user is not None:
|
|
||||||
editor_id = user.id
|
|
||||||
|
|
||||||
return editor_id if editor_id is not None else "NULL"
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user