├── .github
└── workflows
│ ├── build_docs.yml
│ └── codeql.yml
├── .gitignore
├── CODE_OF_CONDUCT.md
├── LICENSE.md
├── README.md
├── SECURITY.md
├── assimilator
├── __init__.py
├── alchemy
│ ├── __init__.py
│ ├── database
│ │ ├── __init__.py
│ │ ├── error_wrapper.py
│ │ ├── model_utils.py
│ │ ├── repository.py
│ │ ├── specifications
│ │ │ ├── __init__.py
│ │ │ ├── filtering_options.py
│ │ │ └── specifications.py
│ │ └── unit_of_work.py
│ └── events
│ │ ├── __init__.py
│ │ ├── database
│ │ ├── __init__.py
│ │ └── repository.py
│ │ └── outbox_relay.py
├── core
│ ├── __init__.py
│ ├── database
│ │ ├── __init__.py
│ │ ├── exceptions.py
│ │ ├── models.py
│ │ ├── repository.py
│ │ ├── specifications
│ │ │ ├── __init__.py
│ │ │ ├── adaptive.py
│ │ │ ├── filtering_options.py
│ │ │ ├── specifications.py
│ │ │ └── types.py
│ │ └── unit_of_work.py
│ ├── events
│ │ ├── __init__.py
│ │ ├── events.py
│ │ ├── events_bus.py
│ │ ├── exceptions.py
│ │ └── outbox_relay.py
│ ├── exceptions.py
│ ├── patterns
│ │ ├── __init__.py
│ │ ├── context_managers.py
│ │ ├── error_wrapper.py
│ │ └── lazy_command.py
│ ├── services
│ │ ├── __init__.py
│ │ ├── base.py
│ │ └── crud.py
│ └── usability
│ │ ├── __init__.py
│ │ ├── exceptions.py
│ │ ├── pattern_creator.py
│ │ └── registry.py
├── internal
│ ├── __init__.py
│ ├── database
│ │ ├── __init__.py
│ │ ├── error_wrapper.py
│ │ ├── models_utils.py
│ │ ├── repository.py
│ │ ├── specifications
│ │ │ ├── __init__.py
│ │ │ ├── filter_specifications.py
│ │ │ ├── filtering_options.py
│ │ │ ├── internal_operator.py
│ │ │ ├── specifications.py
│ │ │ └── utils.py
│ │ └── unit_of_work.py
│ └── events
│ │ ├── __init__.py
│ │ └── events_bus.py
├── kafka_
│ ├── __init__.py
│ └── events
│ │ ├── __init__.py
│ │ └── events_bus.py
├── mongo
│ ├── __init__.py
│ └── database
│ │ ├── __init__.py
│ │ ├── error_wrapper.py
│ │ ├── models.py
│ │ ├── repository.py
│ │ ├── specifications
│ │ ├── __init__.py
│ │ ├── filtering_options.py
│ │ ├── specifications.py
│ │ └── utils.py
│ │ └── unit_of_work.py
└── redis_
│ ├── __init__.py
│ ├── database
│ ├── __init__.py
│ ├── models.py
│ ├── repository.py
│ └── unit_of_work.py
│ └── events
│ ├── __init__.py
│ └── events_bus.py
├── docs
├── alchemy
│ ├── database.md
│ └── events.md
├── api_reference
│ └── core.md
├── concepts.md
├── docs_theme
│ └── base.html
├── help_framework.md
├── images
│ ├── icon.png
│ ├── logo.png
│ ├── logo.svg
│ ├── logo_white.png
│ ├── logo_white.svg
│ ├── why_assimilator_no_usage.PNG
│ └── why_assimilator_usage.PNG
├── index.md
├── internal
│ ├── database.md
│ └── events.md
├── kafka
│ └── events.md
├── management
│ └── privacy.md
├── mongo
│ └── database.md
├── new_changes.md
├── next_update.md
├── redis
│ ├── database.md
│ └── events.md
├── scripts
│ ├── add_footer.js
│ └── feedback.js
├── services.md
├── tutorial
│ ├── advanced_database.md
│ ├── architecture_tutorial.md
│ ├── database.md
│ ├── events.md
│ ├── how_to_create.md
│ └── important.md
├── unidentified_patterns.md
└── video_tutorials.md
├── examples
├── __init__.py
├── complex_database
│ ├── __init__.py
│ ├── dependencies.py
│ ├── main.py
│ └── models.py
├── fastapi_crud_example
│ ├── __init__.py
│ ├── dependencies.py
│ ├── main.py
│ ├── models.py
│ └── schema.py
├── simple_database
│ ├── __init__.py
│ ├── dependencies.py
│ ├── main.py
│ └── models.py
├── simple_events
│ ├── __init__.py
│ ├── dependencies.py
│ ├── events.py
│ └── main.py
└── simplest_example.py
├── mkdocs.yml
├── pyproject.toml
└── tests
└── __init__.py
/.github/workflows/build_docs.yml:
--------------------------------------------------------------------------------
1 | name: Preview Docs
2 |
3 | on:
4 | push:
5 | branches:
6 | - master
7 |
8 | jobs:
9 | create_docs:
10 | runs-on: ubuntu-latest
11 | steps:
12 | - name: Build docs
13 | run: mkdocs gh-deploy
14 |
--------------------------------------------------------------------------------
/.github/workflows/codeql.yml:
--------------------------------------------------------------------------------
1 | name: "CodeQL"
2 |
3 | on:
4 | push:
5 | branches: [ "master" ]
6 | pull_request:
7 | # The branches below must be a subset of the branches above
8 | branches: [ "master" ]
9 | schedule:
10 | - cron: '21 19 * * 3'
11 |
12 | jobs:
13 | analyze:
14 | name: Analyze
15 | runs-on: ${{ (matrix.language == 'swift' && 'macos-latest') || 'ubuntu-latest' }}
16 | permissions:
17 | actions: read
18 | contents: read
19 | security-events: write
20 |
21 | strategy:
22 | fail-fast: false
23 | matrix:
24 | language: [ 'python' ]
25 |
26 | steps:
27 | - name: Checkout repository
28 | uses: actions/checkout@v3
29 |
30 | # Initializes the CodeQL tools for scanning.
31 | - name: Initialize CodeQL
32 | uses: github/codeql-action/init@v2
33 | with:
34 | languages: ${{ matrix.language }}
35 |
36 |
37 | # Autobuild attempts to build any compiled languages (C/C++, C#, Go, or Java).
38 | # If this step fails, then you should remove it and run the build manually (see below)
39 | - name: Autobuild
40 | uses: github/codeql-action/autobuild@v2
41 |
42 | - name: Perform CodeQL Analysis
43 | uses: github/codeql-action/analyze@v2
44 | with:
45 | category: "/language:${{matrix.language}}"
46 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | share/python-wheels/
24 | *.egg-info/
25 | .installed.cfg
26 | *.egg
27 | MANIFEST
28 |
29 | # PyInstaller
30 | # Usually these files are written by a python script from a template
31 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
32 | *.manifest
33 | *.spec
34 |
35 | # Installer logs
36 | pip-log.txt
37 | pip-delete-this-directory.txt
38 |
39 | # Unit test / coverage reports
40 | htmlcov/
41 | .tox/
42 | .nox/
43 | .coverage
44 | .coverage.*
45 | .cache
46 | nosetests.xml
47 | coverage.xml
48 | *.cover
49 | *.py,cover
50 | .hypothesis/
51 | .pytest_cache/
52 | cover/
53 |
54 | # Translations
55 | *.mo
56 | *.pot
57 |
58 | # Django stuff:
59 | *.log
60 | local_settings.py
61 | db.sqlite3
62 | db.sqlite3-journal
63 |
64 | # Flask stuff:
65 | instance/
66 | .webassets-cache
67 |
68 | # Scrapy stuff:
69 | .scrapy
70 |
71 | # Sphinx documentation
72 | docs/_build/
73 |
74 | # PyBuilder
75 | .pybuilder/
76 | target/
77 |
78 | # Jupyter Notebook
79 | .ipynb_checkpoints
80 |
81 | # IPython
82 | profile_default/
83 | ipython_config.py
84 |
85 | # pyenv
86 | # For a library or package, you might want to ignore these files since the code is
87 | # intended to run in multiple environments; otherwise, check them in:
88 | # .python-version
89 |
90 | # pipenv
91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
94 | # install all needed dependencies.
95 | #Pipfile.lock
96 |
97 | # poetry
98 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
99 | # This is especially recommended for binary packages to ensure reproducibility, and is more
100 | # commonly ignored for libraries.
101 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
102 | #poetry.lock
103 |
104 | # pdm
105 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
106 | #pdm.lock
107 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
108 | # in version control.
109 | # https://pdm.fming.dev/#use-with-ide
110 | .pdm.toml
111 |
112 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
113 | __pypackages__/
114 |
115 | # Celery stuff
116 | celerybeat-schedule
117 | celerybeat.pid
118 |
119 | # SageMath parsed files
120 | *.sage.py
121 |
122 | # Environments
123 | .env
124 | .venv
125 | env/
126 | venv/
127 | ENV/
128 | env.bak/
129 | venv.bak/
130 |
131 | # Spyder project settings
132 | .spyderproject
133 | .spyproject
134 |
135 | # Rope project settings
136 | .ropeproject
137 |
138 | # mkdocs documentation
139 | /site
140 | .idea/
141 | py_assimilator-0.1.1/
142 | py_assimilator.egg-info/
143 |
144 | # mypy
145 | .mypy_cache/
146 | .dmypy.json
147 | dmypy.json
148 |
149 | # Pyre type checker
150 | .pyre/
151 |
152 | # pytype static type analyzer
153 | .pytype/
154 |
155 | # Cython debug symbols
156 | cython_debug/
157 |
158 | # PyCharm
159 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can
160 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
161 | # and can be added to the global gitignore or merged into this file. For a more nuclear
162 | # option (not recommended) you can uncomment the following to ignore the entire idea folder.
163 | #.idea/
164 |
--------------------------------------------------------------------------------
/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | # Contributor Covenant Code of Conduct
2 |
3 | ## Our Pledge
4 |
5 | We as members, contributors, and leaders pledge to make participation in our
6 | community a harassment-free experience for everyone, regardless of age, body
7 | size, visible or invisible disability, ethnicity, sex characteristics, gender
8 | identity and expression, level of experience, education, socio-economic status,
9 | nationality, personal appearance, race, religion, or sexual identity
10 | and orientation.
11 |
12 | We pledge to act and interact in ways that contribute to an open, welcoming,
13 | diverse, inclusive, and healthy community.
14 |
15 | ## Our Standards
16 |
17 | Examples of behavior that contributes to a positive environment for our
18 | community include:
19 |
20 | * Demonstrating empathy and kindness toward other people
21 | * Being respectful of differing opinions, viewpoints, and experiences
22 | * Giving and gracefully accepting constructive feedback
23 | * Accepting responsibility and apologizing to those affected by our mistakes,
24 | and learning from the experience
25 | * Focusing on what is best not just for us as individuals, but for the
26 | overall community
27 |
28 | Examples of unacceptable behavior include:
29 |
30 | * The use of sexualized language or imagery, and sexual attention or
31 | advances of any kind
32 | * Trolling, insulting or derogatory comments, and personal or political attacks
33 | * Public or private harassment
34 | * Publishing others' private information, such as a physical or email
35 | address, without their explicit permission
36 | * Other conduct which could reasonably be considered inappropriate in a
37 | professional setting
38 |
39 | ## Enforcement Responsibilities
40 |
41 | Community leaders are responsible for clarifying and enforcing our standards of
42 | acceptable behavior and will take appropriate and fair corrective action in
43 | response to any behavior that they deem inappropriate, threatening, offensive,
44 | or harmful.
45 |
46 | Community leaders have the right and responsibility to remove, edit, or reject
47 | comments, commits, code, wiki edits, issues, and other contributions that are
48 | not aligned to this Code of Conduct, and will communicate reasons for moderation
49 | decisions when appropriate.
50 |
51 | ## Scope
52 |
53 | This Code of Conduct applies within all community spaces, and also applies when
54 | an individual is officially representing the community in public spaces.
55 | Examples of representing our community include using an official e-mail address,
56 | posting via an official social media account, or acting as an appointed
57 | representative at an online or offline event.
58 |
59 | ## Enforcement
60 |
61 | Instances of abusive, harassing, or otherwise unacceptable behavior may be
62 | reported to the community leaders responsible for enforcement at
63 | python.on.papyrus@gmail.com.
64 | All complaints will be reviewed and investigated promptly and fairly.
65 |
66 | All community leaders are obligated to respect the privacy and security of the
67 | reporter of any incident.
68 |
69 | ## Enforcement Guidelines
70 |
71 | Community leaders will follow these Community Impact Guidelines in determining
72 | the consequences for any action they deem in violation of this Code of Conduct:
73 |
74 | ### 1. Correction
75 |
76 | **Community Impact**: Use of inappropriate language or other behavior deemed
77 | unprofessional or unwelcome in the community.
78 |
79 | **Consequence**: A private, written warning from community leaders, providing
80 | clarity around the nature of the violation and an explanation of why the
81 | behavior was inappropriate. A public apology may be requested.
82 |
83 | ### 2. Warning
84 |
85 | **Community Impact**: A violation through a single incident or series
86 | of actions.
87 |
88 | **Consequence**: A warning with consequences for continued behavior. No
89 | interaction with the people involved, including unsolicited interaction with
90 | those enforcing the Code of Conduct, for a specified period of time. This
91 | includes avoiding interactions in community spaces as well as external channels
92 | like social media. Violating these terms may lead to a temporary or
93 | permanent ban.
94 |
95 | ### 3. Temporary Ban
96 |
97 | **Community Impact**: A serious violation of community standards, including
98 | sustained inappropriate behavior.
99 |
100 | **Consequence**: A temporary ban from any sort of interaction or public
101 | communication with the community for a specified period of time. No public or
102 | private interaction with the people involved, including unsolicited interaction
103 | with those enforcing the Code of Conduct, is allowed during this period.
104 | Violating these terms may lead to a permanent ban.
105 |
106 | ### 4. Permanent Ban
107 |
108 | **Community Impact**: Demonstrating a pattern of violation of community
109 | standards, including sustained inappropriate behavior, harassment of an
110 | individual, or aggression toward or disparagement of classes of individuals.
111 |
112 | **Consequence**: A permanent ban from any sort of public interaction within
113 | the community.
114 |
115 | ## Attribution
116 |
117 | This Code of Conduct is adapted from the [Contributor Covenant][homepage],
118 | version 2.0, available at
119 | https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
120 |
121 | Community Impact Guidelines were inspired by [Mozilla's code of conduct
122 | enforcement ladder](https://github.com/mozilla/diversity).
123 |
124 | [homepage]: https://www.contributor-covenant.org
125 |
126 | For answers to common questions about this code of conduct, see the FAQ at
127 | https://www.contributor-covenant.org/faq. Translations are available at
128 | https://www.contributor-covenant.org/translations.
129 |
--------------------------------------------------------------------------------
/LICENSE.md:
--------------------------------------------------------------------------------
1 | Copyright © 2022 Andrey Ivanov
2 |
3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
4 |
5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
6 |
7 | THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
8 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Assimilator - the best Python patterns for the best projects
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 | ## Install now
21 | * `pip install py-assimilator`
22 |
23 | ## What is that all about?
24 |
25 | 1. We want to write the best code.
26 | 2. We need the best patterns and techniques for this.
27 | 3. We use PyAssimilator and save lots of time.
28 | 4. We use PyAssimilator and write the best code.
29 | 4. We use PyAssimilator and use the best patterns.
30 | 6. We use PyAssimilator and have no dependencies in our code.
31 | 7. We use PyAssimilator and can switch one database to another in a matter of seconds.
32 | 7. We learn PyAssimilator once and use it forever!
33 | 7. **And most importantly, we make Python projects better!**
34 |
35 |
36 | ## Code comparison
37 |
38 | Before PyAssimilator:
39 | ```Python
40 | # BAD CODE :(
41 |
42 | def create_user(username: str, email: str):
43 | # NO PATTERNS!
44 | # ONLY ONE DATABASE CHOICE!
45 | new_user = User(username=username, email=email, balance=0) # DEPENDENCY!
46 | session = db_session() # DEPENDENCY!
47 | session.add(new_user)
48 | session.commit() # NO ACID TRANSACTIONS!
49 | return new_user
50 |
51 | ```
52 |
53 | After:
54 | ```Python
55 | # GOOD CODE :)
56 |
57 | def create_user(username: str, email: str, uow: UnitOfWork):
58 | # BEST DDD PATTERNS
59 | # PATTERN SUBSTITUTION/MULTIPLE DATABASES AT ONCE
60 |
61 | with uow: # ACID TRANSACTIONS IN ANY DATABASE
62 | new_user = uow.repository.save(
63 | username=username, # NO MODEL DEPENDENCIES
64 | email=email,
65 | balance=0,
66 | )
67 | uow.commit() # AUTO ROLLBACK
68 |
69 | return new_user
70 |
71 | ```
72 |
73 | ## So, do I really need it?
74 |
75 | If you want to spend less time writing your code, but write better code - then you must use PyAssimilator.
76 | It can be hard to start if you have no experience with good code, so you can watch creator's [video tutorials](https://knucklesuganda.github.io/py_assimilator/video_tutorials/).
77 |
78 |
79 | ## Our vision
80 |
81 | Make Python the best programming language for enterprise development and use all of its dynamic capabilities to write
82 | things that other languages can't even comprehend!
83 |
84 | - Pattern substitution(switch databases easily) ✔️
85 | - Event-based apps(in development) 🛠️
86 | - 45% of all Python projects use PyAssimilator 🛠️
87 | - Independent code(in development) 🛠️
88 | - Adaptive patterns(in development) 🛠️
89 | - Automatic code improvements(in development) 🛠️
90 | - Decentralized code management(in development) 🛠️
91 |
92 | If you want to help with any of those things - be free to contribute to the project. Remember, you never do anything for
93 | free - and that will not be the case either.
94 |
95 | ## Sources
96 | * [Github](https://github.com/knucklesuganda/py_assimilator)
97 | * [PyPI](https://pypi.org/project/py-assimilator/)
98 | * [Documentation](https://knucklesuganda.github.io/py_assimilator/)
99 | * [Github](https://github.com/knucklesuganda/py_assimilator)
100 | * [Author's YouTube RU](https://www.youtube.com/channel/UCSNpJHMOU7FqjD4Ttux0uuw)
101 | * [Author's YouTube ENG](https://www.youtube.com/channel/UCeC9LNDwRP9OfjyOFHaSikA)
102 | * [Discord channel](https://discord.gg/gTVaGu7DHN)
103 | * [Donations](https://www.donationalerts.com/r/pyassimilator)
104 |
105 | ## Contributors
106 |
107 |
108 |
109 |
110 |
111 | ## Stars history
112 |
113 | [](https://star-history.com/#knucklesuganda/py_assimilator&Date)
114 |
115 |
116 | # Donate and create your own framework!
117 |
118 | [Donate using this link](https://www.donationalerts.com/r/pyassimilator) and help PyAssimilator prosper! You can also request a feature that you want to see in our framework and we will have it in our priority list!
119 |
120 |
121 | ## ⭐Stargazers⭐
122 |
123 | We love all people who star our library. You can look at all stargazers in the documentation:
124 |
125 | https://knucklesuganda.github.io/py_assimilator/#stars-history
126 |
127 | > If you star the library you will appear there as well!
128 |
129 |
130 | ## Types of patterns
131 | These are different use cases for the patterns implemented:
132 |
133 | - Database - patterns for database/data layer interactions.
134 | - Events(in development) - projects with events or event-driven architecture.
135 | - Unidentified - patterns that are useful for different purposes.
136 |
137 | ## Available providers
138 | Providers are different patterns for external modules like SQLAlchemy or FastAPI.
139 |
140 | - Alchemy(Database, Events) - patterns for [SQLAlchemy](https://docs.sqlalchemy.org/en/20/) for both database and events.
141 | - Kafka(Events) - patterns in [Kafka](https://kafka.apache.org/) related to events.
142 | - Internal(Database, Events) - internal is the type of provider that saves everything in memory(dict, list and all the tools within your app).
143 | - Redis(Database, Events) - redis_ allows us to work with [Redis](https://redis.io/) memory database.
144 | - MongoDB(Database) - mongo allows us to work with [MongoDB](https://www.mongodb.com/) database.
145 |
--------------------------------------------------------------------------------
/SECURITY.md:
--------------------------------------------------------------------------------
1 | # Security Policy
2 | Security is one of the most important things for PyAssimilator and its community.
3 |
4 | ## Versions
5 | The module is still in development, so there may be bugs and drastic changes.
6 |
7 | You are encouraged to write tests for your application and update your PyAssimilator version frequently after ensuring that your tests are passing. This way you will benefit from the latest features, bug fixes, and security fixes.
8 |
9 | You can learn more about PyAssimilator versions and how to pin and upgrade them in the docs.
10 |
11 | ## Reporting a Vulnerability
12 | If you think you found a vulnerability, please report it by sending an email to: `python.on.papyrus@gmail.com`. Please try to be as explicit as possible, describing all the steps and example code to reproduce the security issue.
13 | [Andrey](https://github.com/knucklesuganda) will review it thoroughly and get back to you.
14 | You will be added to the list of developers of PyAssimilator after that.
15 |
16 | ## Public Discussions
17 | Please do not make security issues public until they are fixed! That will reduce the impact and save a lot of programs!
18 |
19 | Thanks for your help! Let's code!
20 |
--------------------------------------------------------------------------------
/assimilator/__init__.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 |
4 | if os.environ.get('PY_ASSIMILATOR_MESSAGE', None) is None:
5 | print(f"""
6 | ___ _ _ __ __
7 | ____ __ __/ | __________(_)___ ___ (_) /___ _/ /_____ _____
8 | / __ \/ / / / /| | / ___/ ___/ / __ `__ \/ / / __ `/ __/ __ \/ ___/
9 | / /_/ / /_/ / ___ |(__ |__ ) / / / / / / / / /_/ / /_/ /_/ / /
10 | / .___/\__, /_/ |_/____/____/_/_/ /_/ /_/_/_/\__,_/\__/\____/_/
11 | /_/ /____/
12 |
13 |
14 | Thank you for using PyAssimilator!
15 | Documentation is available here: https://knucklesuganda.github.io/py_assimilator/
16 | Star this library on GitHub and get perks: https://github.com/knucklesuganda/py_assimilator
17 |
18 | If you want to turn off this text, add an environment variable:
19 | 1) You can add this code BEFORE all of your imports:
20 | import os
21 | os.environ['PY_ASSIMILATOR_MESSAGE'] = 'False'
22 |
23 | 2) You can add it within your system or in your .env file: PY_ASSIMILATOR_MESSAGE=False
24 |
25 | """)
26 |
--------------------------------------------------------------------------------
/assimilator/alchemy/__init__.py:
--------------------------------------------------------------------------------
1 | import sqlalchemy
2 |
3 | from assimilator.core.services import CRUDService
4 | from assimilator.core.usability.registry import register_provider, PatternList
5 | from assimilator.alchemy.database import AlchemyRepository, AlchemyUnitOfWork
6 |
7 |
8 | if sqlalchemy.__version__ < "2.0.0":
9 | raise RuntimeWarning(
10 | "PyAssimilator will only support SQLAlchemy 2 from now on. Please, update "
11 | "the library using this manual: https://docs.sqlalchemy.org/en/20/changelog/migration_20.html"
12 | )
13 |
14 |
15 | register_provider(provider='alchemy', pattern_list=PatternList(
16 | repository=AlchemyRepository,
17 | uow=AlchemyUnitOfWork,
18 | crud=CRUDService
19 | ))
20 |
--------------------------------------------------------------------------------
/assimilator/alchemy/database/__init__.py:
--------------------------------------------------------------------------------
1 | from assimilator.alchemy.database.repository import *
2 | from assimilator.alchemy.database.specifications.specifications import *
3 | from assimilator.alchemy.database.specifications.filtering_options import *
4 | from assimilator.alchemy.database.unit_of_work import *
5 |
--------------------------------------------------------------------------------
/assimilator/alchemy/database/error_wrapper.py:
--------------------------------------------------------------------------------
1 | from sqlalchemy.exc import NoResultFound, IntegrityError, SQLAlchemyError, MultipleResultsFound
2 |
3 | from assimilator.core.database.exceptions import (
4 | DataLayerError,
5 | NotFoundError,
6 | InvalidQueryError,
7 | MultipleResultsError,
8 | )
9 | from assimilator.core.patterns.error_wrapper import ErrorWrapper
10 |
11 |
12 | class AlchemyErrorWrapper(ErrorWrapper):
13 | def __init__(self):
14 | super(AlchemyErrorWrapper, self).__init__(error_mappings={
15 | NoResultFound: NotFoundError,
16 | IntegrityError: InvalidQueryError,
17 | SQLAlchemyError: DataLayerError,
18 | MultipleResultsFound: MultipleResultsError,
19 | }, default_error=DataLayerError)
20 |
21 |
22 | __all__ = ['AlchemyErrorWrapper']
23 |
--------------------------------------------------------------------------------
/assimilator/alchemy/database/model_utils.py:
--------------------------------------------------------------------------------
1 | from typing import TypeVar, Type
2 |
3 | from sqlalchemy import inspect
4 |
5 |
6 | T = TypeVar("T")
7 |
8 |
9 | def get_model_from_relationship(model: T, relationship_name: str):
10 | foreign_prop = inspect(model).relationships[relationship_name]
11 | return foreign_prop.entity.class_, foreign_prop.uselist
12 |
13 |
14 | def dict_to_alchemy_models(data: dict, model: Type[T]) -> T:
15 | for relationship in inspect(model).relationships.keys():
16 | foreign_data = data.get(relationship)
17 | if foreign_data is None:
18 | continue
19 |
20 | foreign_model, is_list = get_model_from_relationship(
21 | model=model,
22 | relationship_name=relationship,
23 | )
24 |
25 | if not is_list and isinstance(foreign_data, dict):
26 | foreign_data = dict_to_alchemy_models(data=foreign_data, model=foreign_model)
27 | foreign_data = foreign_model(**foreign_data)
28 | elif is_list:
29 | foreign_models = (
30 | foreign_data for foreign_data in foreign_data
31 | if isinstance(foreign_data, dict)
32 | )
33 |
34 | for i, foreign_part in enumerate(foreign_models):
35 | foreign_part = dict_to_alchemy_models(data=foreign_part, model=foreign_model)
36 | foreign_data[i] = foreign_model(**foreign_part)
37 |
38 | data[relationship] = foreign_data
39 |
40 | return data
41 |
--------------------------------------------------------------------------------
/assimilator/alchemy/database/repository.py:
--------------------------------------------------------------------------------
1 | from typing import Type, Union, Optional, TypeVar, Collection
2 |
3 | from sqlalchemy import func, select, update, delete
4 | from sqlalchemy.orm import Session, Query
5 | from sqlalchemy.inspection import inspect
6 |
7 | from assimilator.alchemy.database.model_utils import dict_to_alchemy_models
8 | from assimilator.core.patterns.error_wrapper import ErrorWrapper
9 | from assimilator.core.database.exceptions import InvalidQueryError
10 | from assimilator.alchemy.database.error_wrapper import AlchemyErrorWrapper
11 | from assimilator.alchemy.database.specifications.specifications import AlchemySpecificationList
12 | from assimilator.core.database import Repository, LazyCommand, SpecificationType
13 |
14 |
15 | AlchemyModelT = TypeVar("AlchemyModelT")
16 |
17 |
18 | class AlchemyRepository(Repository):
19 | session: Session
20 | model: Type[AlchemyModelT]
21 |
22 | def __init__(
23 | self,
24 | session: Session,
25 | model: Type[AlchemyModelT],
26 | initial_query: Query = None,
27 | specifications: Type[AlchemySpecificationList] = AlchemySpecificationList,
28 | error_wrapper: Optional[ErrorWrapper] = None,
29 | ):
30 | super(AlchemyRepository, self).__init__(
31 | session=session,
32 | model=model,
33 | initial_query=initial_query if initial_query is not None else select(model),
34 | specifications=specifications,
35 | error_wrapper=error_wrapper or AlchemyErrorWrapper(),
36 | )
37 |
38 | def get(
39 | self,
40 | *specifications: SpecificationType,
41 | lazy: bool = False,
42 | initial_query: Query = None,
43 | ) -> Union[AlchemyModelT, LazyCommand[AlchemyModelT]]:
44 | query = self._apply_specifications(
45 | query=initial_query,
46 | specifications=specifications,
47 | )
48 | return self.session.execute(query).one()[0]
49 |
50 | def filter(
51 | self,
52 | *specifications: SpecificationType,
53 | lazy: bool = False,
54 | initial_query: Query = None,
55 | ) -> Union[Collection[AlchemyModelT], LazyCommand[Collection[AlchemyModelT]]]:
56 | query = self._apply_specifications(
57 | query=initial_query,
58 | specifications=specifications,
59 | )
60 | return [result[0] for result in self.session.execute(query)]
61 |
62 | def update(
63 | self,
64 | obj: Optional[AlchemyModelT] = None,
65 | *specifications: SpecificationType,
66 | **update_values,
67 | ) -> None:
68 | obj, specifications = self._check_obj_is_specification(obj, specifications)
69 |
70 | if specifications:
71 | if not update_values:
72 | raise InvalidQueryError(
73 | "You did not provide any update_values "
74 | "to the update() yet provided specifications"
75 | )
76 |
77 | query = self._apply_specifications(
78 | query=update(self.model),
79 | specifications=specifications,
80 | )
81 | self.session.execute(
82 | query.values(update_values).execution_options(synchronize_session=False)
83 | )
84 |
85 | elif obj is not None:
86 | if obj not in self.session:
87 | obj = self.session.merge(obj)
88 | self.session.add(obj)
89 |
90 | def dict_to_models(self, data: dict) -> AlchemyModelT:
91 | return self.model(**dict_to_alchemy_models(data=data, model=self.model))
92 |
93 | def save(self, obj: Optional[AlchemyModelT] = None, **data) -> AlchemyModelT:
94 | if obj is None:
95 | obj = self.dict_to_models(data)
96 |
97 | self.session.add(obj)
98 | return obj
99 |
100 | def refresh(self, obj: AlchemyModelT) -> None:
101 | if obj not in self.session:
102 | obj = self.session.merge(obj)
103 |
104 | self.session.refresh(obj)
105 |
106 | def delete(self, obj: Optional[AlchemyModelT] = None, *specifications: SpecificationType) -> None:
107 | obj, specifications = self._check_obj_is_specification(obj, specifications)
108 |
109 | if specifications:
110 | self.session.execute(self._apply_specifications(
111 | query=delete(self.model),
112 | specifications=specifications,
113 | ))
114 | elif obj is not None:
115 | self.session.delete(obj)
116 |
117 | def is_modified(self, obj: AlchemyModelT) -> bool:
118 | return obj in self.session and self.session.is_modified(obj)
119 |
120 | def count(
121 | self,
122 | *specifications: SpecificationType,
123 | lazy: bool = False,
124 | initial_query: Query = None
125 | ) -> Union[LazyCommand[int], int]:
126 | primary_keys = inspect(self.model).primary_key
127 |
128 | if not primary_keys:
129 | raise InvalidQueryError(
130 | "Your repository model does not have any primary keys. We cannot use count()"
131 | )
132 |
133 | return self.get(
134 | *specifications,
135 | lazy=False,
136 | initial_query=initial_query or select(func.count(getattr(self.model, primary_keys[0].name))),
137 | )
138 |
139 |
140 | __all__ = [
141 | 'AlchemyRepository',
142 | ]
143 |
--------------------------------------------------------------------------------
/assimilator/alchemy/database/specifications/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/knucklesuganda/py_assimilator/43108e3575750f3cd6e9dff6d59068d1a5be5e55/assimilator/alchemy/database/specifications/__init__.py
--------------------------------------------------------------------------------
/assimilator/alchemy/database/specifications/filtering_options.py:
--------------------------------------------------------------------------------
1 | from typing import Callable, Any
2 |
3 | from sqlalchemy import column
4 | from sqlalchemy.sql.elements import ColumnClause
5 |
6 | from assimilator.core.database.specifications.filtering_options import \
7 | FilteringOptions, FILTERING_OPTIONS_SEPARATOR
8 |
9 |
10 | class AlchemyFilteringOptions(FilteringOptions):
11 | table_name: str = None
12 |
13 | @staticmethod
14 | def _convert_field(field: str) -> ColumnClause:
15 | field_parts = field.split(FILTERING_OPTIONS_SEPARATOR)
16 |
17 | if len(field_parts) > 2:
18 | field = ".".join(field_parts[-2:])
19 | else:
20 | field = ".".join(field_parts)
21 |
22 | return column(field, is_literal=True)
23 |
24 | def parse_field(self, raw_field: str, value: Any) -> Callable:
25 | fields = raw_field.split(FILTERING_OPTIONS_SEPARATOR)
26 | last_field = fields[-1]
27 |
28 | if len(fields) == 1 and self.table_name is not None:
29 | last_field = f"{self.table_name}.{last_field}"
30 | filter_func = self.filter_options.get(last_field, self.get_default_filter())
31 | return filter_func(last_field, value)
32 |
33 | return super(AlchemyFilteringOptions, self).parse_field(raw_field=raw_field, value=value)
34 |
35 | @staticmethod
36 | def _eq(field, value):
37 | return AlchemyFilteringOptions._convert_field(field) == value
38 |
39 | @staticmethod
40 | def _gt(field, value):
41 | return AlchemyFilteringOptions._convert_field(field) > value
42 |
43 | @staticmethod
44 | def _gte(field, value):
45 | return AlchemyFilteringOptions._convert_field(field) >= value
46 |
47 | @staticmethod
48 | def _lt(field, value):
49 | return AlchemyFilteringOptions._convert_field(field) < value
50 |
51 | @staticmethod
52 | def _lte(field, value):
53 | return AlchemyFilteringOptions._convert_field(field) <= value
54 |
55 | @staticmethod
56 | def _not(field, value):
57 | return AlchemyFilteringOptions._convert_field(field) != value
58 |
59 | @staticmethod
60 | def _is(field, value):
61 | return AlchemyFilteringOptions._convert_field(field).is_(value)
62 |
63 | @staticmethod
64 | def _like(field, value):
65 | return AlchemyFilteringOptions._convert_field(field).like(value)
66 |
67 | @staticmethod
68 | def _regex(field, value):
69 | return AlchemyFilteringOptions._convert_field(field).regexp_match(value)
70 |
71 |
72 | __all__ = [
73 | 'AlchemyFilteringOptions',
74 | ]
75 |
--------------------------------------------------------------------------------
/assimilator/alchemy/database/specifications/specifications.py:
--------------------------------------------------------------------------------
1 | from itertools import zip_longest
2 | from typing import Collection, Optional, Iterable, Any, Dict, Callable, Union
3 |
4 | from sqlalchemy.orm import load_only, Load
5 | from sqlalchemy import column, desc, and_, or_, not_, Select, inspect
6 |
7 | from assimilator.alchemy.database.model_utils import get_model_from_relationship
8 | from assimilator.alchemy.database.specifications.filtering_options import AlchemyFilteringOptions
9 | from assimilator.core.database.specifications.specifications import (
10 | specification,
11 | SpecificationList,
12 | SpecificationType,
13 | FilterSpecification,
14 | )
15 |
16 |
17 | class AlchemyFilter(FilterSpecification):
18 | filtering_options_cls = AlchemyFilteringOptions
19 |
20 | def __init__(self, *filters, **named_filters):
21 | super(AlchemyFilter, self).__init__(*filters)
22 | self.filters.append(named_filters)
23 |
24 | def __or__(self, other: 'FilterSpecification') -> SpecificationType:
25 | return CompositeFilter(self, other, func=or_)
26 |
27 | def __and__(self, other: 'FilterSpecification') -> SpecificationType:
28 | return CompositeFilter(self, other, func=and_)
29 |
30 | def __invert__(self):
31 | return CompositeFilter(self, func=not_)
32 |
33 | def parse_filters(self, model):
34 | self.filtering_options.table_name = str(inspect(model).selectable)
35 | named_filters = list(filter_ for filter_ in self.filters if isinstance(filter_, Dict))
36 |
37 | for filter_ in named_filters:
38 | for field, value in filter_.items():
39 | self.filters.append(
40 | self.filtering_options.parse_field(
41 | raw_field=field, value=value,
42 | )
43 | )
44 |
45 | self.filters.remove(filter_)
46 |
47 | def __call__(self, query: Select, **context: Any) -> Select:
48 | self.parse_filters(context['repository'].model)
49 | return query.filter(*self.filters)
50 |
51 |
52 | class CompositeFilter(AlchemyFilter):
53 | def __init__(self, *filters: Union[FilterSpecification, 'CompositeFilter'], func: Callable):
54 | super(CompositeFilter, self).__init__()
55 | self.filter_specs = filters
56 | self.func = func
57 |
58 | def __call__(self, query: Select, **context: Any) -> Select:
59 | parsed_specs = []
60 |
61 | for spec in self.filter_specs:
62 | parsed_specs.append(spec(query=Select(), **context).whereclause)
63 |
64 | return query.filter(self.func(*parsed_specs))
65 |
66 |
67 | alchemy_filter = AlchemyFilter
68 |
69 |
70 | @specification
71 | def alchemy_order(*clauses: str, query: Select, **_) -> Select:
72 | parsed_clauses = []
73 |
74 | for clause in clauses:
75 | if clause.startswith("-"):
76 | parsed_clauses.append(desc(column(clause[1:], is_literal=True)))
77 | else:
78 | parsed_clauses.append(column(clause, is_literal=True))
79 |
80 | return query.order_by(*parsed_clauses)
81 |
82 |
83 | @specification
84 | def alchemy_paginate(
85 | *,
86 | limit: Optional[int] = None,
87 | offset: Optional[int] = None,
88 | query: Select,
89 | **_,
90 | ) -> Select:
91 | if offset is not None:
92 | query = query.offset(offset)
93 | if limit is not None:
94 | query = query.limit(limit)
95 |
96 | return query
97 |
98 |
99 | @specification
100 | def alchemy_join(
101 | *targets: Collection,
102 | join_args: Iterable[dict] = None,
103 | query: Select,
104 | **context,
105 | ) -> Select:
106 | model = context['repository'].model
107 |
108 | for target, join_data in zip_longest(targets, (join_args or {}), fillvalue=dict()):
109 | if not target:
110 | continue
111 |
112 | if isinstance(target, str):
113 | entities = target.split(".")
114 | target = model
115 |
116 | for entity in entities:
117 | target, _ = get_model_from_relationship(
118 | model=target,
119 | relationship_name=entity,
120 | )
121 |
122 | query = query.join_from(model, target, **join_data)\
123 | .add_columns(target).select_from(model)
124 |
125 | return query
126 |
127 |
128 | @specification
129 | def alchemy_only(
130 | *only_fields: str,
131 | query: Select,
132 | model,
133 | **_,
134 | ):
135 | models_to_fields: Dict[Load, Any] = {}
136 | parsed_loads = list(field for field in only_fields if not isinstance(field, str))
137 |
138 | if parsed_loads:
139 | query = query.options(load_only(*parsed_loads))
140 |
141 | for field in (field for field in only_fields if isinstance(field, str)):
142 | parts = field.split('.')
143 | if len(parts) == 1:
144 | models_to_fields[Load(model)] = [
145 | getattr(model, field),
146 | *models_to_fields.get(model, []),
147 | ]
148 | continue
149 |
150 | current_load = model
151 | for part in parts:
152 | model_relationships = set(inspect(current_load).relationships.keys())
153 |
154 | if part in model_relationships:
155 | current_load, _ = get_model_from_relationship(current_load, relationship_name=part)
156 | else:
157 | models_to_fields[Load(current_load)] = [
158 | getattr(current_load, part),
159 | *models_to_fields.get(current_load, []),
160 | ]
161 | break
162 |
163 | else:
164 | models_to_fields[Load(current_load)] = []
165 |
166 | for loader, load_fields in models_to_fields.items():
167 | if load_fields:
168 | query = query.options(loader.load_only(*load_fields))
169 | else:
170 | query = query.options(loader)
171 |
172 | return query
173 |
174 |
175 | class AlchemySpecificationList(SpecificationList):
176 | filter = AlchemyFilter
177 | order = alchemy_order
178 | paginate = alchemy_paginate
179 | join = alchemy_join
180 | only = alchemy_only
181 |
182 |
183 | __all__ = [
184 | 'AlchemySpecificationList',
185 | 'alchemy_filter',
186 | 'AlchemyFilter',
187 | 'alchemy_order',
188 | 'alchemy_paginate',
189 | 'alchemy_join',
190 | 'alchemy_only',
191 | ]
192 |
--------------------------------------------------------------------------------
/assimilator/alchemy/database/unit_of_work.py:
--------------------------------------------------------------------------------
1 | from typing import Optional
2 |
3 | from assimilator.alchemy.database.repository import AlchemyRepository
4 | from assimilator.alchemy.database.error_wrapper import AlchemyErrorWrapper
5 | from assimilator.core.database.unit_of_work import UnitOfWork
6 | from assimilator.core.patterns.error_wrapper import ErrorWrapper
7 |
8 |
9 | class AlchemyUnitOfWork(UnitOfWork):
10 | repository: AlchemyRepository
11 |
12 | def __init__(
13 | self,
14 | repository: AlchemyRepository,
15 | error_wrapper: Optional[ErrorWrapper] = None,
16 | autocommit: bool = False,
17 | ):
18 | super(AlchemyUnitOfWork, self).__init__(
19 | repository=repository,
20 | error_wrapper=error_wrapper or AlchemyErrorWrapper(),
21 | autocommit=autocommit,
22 | )
23 |
24 | def begin(self):
25 | self.repository.session.begin()
26 |
27 | def rollback(self):
28 | self.repository.session.rollback()
29 |
30 | def close(self):
31 | pass
32 |
33 | def commit(self):
34 | self.repository.session.commit()
35 |
36 |
37 | __all__ = [
38 | 'AlchemyUnitOfWork',
39 | ]
40 |
--------------------------------------------------------------------------------
/assimilator/alchemy/events/__init__.py:
--------------------------------------------------------------------------------
1 | from assimilator.alchemy.events.outbox_relay import *
2 | from assimilator.alchemy.events.database.repository import *
3 |
--------------------------------------------------------------------------------
/assimilator/alchemy/events/database/__init__.py:
--------------------------------------------------------------------------------
1 | from assimilator.alchemy.events.database.repository import *
2 |
--------------------------------------------------------------------------------
/assimilator/alchemy/events/database/repository.py:
--------------------------------------------------------------------------------
1 | from typing import Type, Optional
2 |
3 | from sqlalchemy import Table
4 | from sqlalchemy.orm import Query
5 |
6 | from assimilator.alchemy.database import AlchemySpecificationList
7 | from assimilator.alchemy.database.repository import AlchemyRepository
8 | from assimilator.core.database import SpecificationList
9 | from assimilator.core.patterns.error_wrapper import ErrorWrapper
10 |
11 |
12 | class AlchemyOutboxRepository(AlchemyRepository):
13 | def __init__(
14 | self,
15 | session,
16 | event_model: Type[Table],
17 | model: Type[Table],
18 | initial_query: Optional[Query] = None,
19 | specifications: Type[SpecificationList] = AlchemySpecificationList,
20 | error_wrapper: ErrorWrapper = None,
21 | ):
22 | super(AlchemyOutboxRepository, self).__init__(
23 | session=session,
24 | initial_query=initial_query,
25 | model=model,
26 | specifications=specifications,
27 | error_wrapper=error_wrapper,
28 | )
29 | self.event_model = event_model
30 |
31 | def save(self, obj):
32 | super(AlchemyOutboxRepository, self).save(obj)
33 | super(AlchemyOutboxRepository, self).save(self.event_model(obj.outbox_event))
34 |
35 |
36 | __all__ = [
37 | 'AlchemyOutboxRepository',
38 | ]
39 |
--------------------------------------------------------------------------------
/assimilator/alchemy/events/outbox_relay.py:
--------------------------------------------------------------------------------
1 | from sqlalchemy import Column, BigInteger, Text, DateTime
2 |
3 | from assimilator.core.events.events import Event
4 | from assimilator.core.database.unit_of_work import UnitOfWork
5 | from assimilator.core.events import OutboxRelay
6 | from assimilator.core.events.events_bus import EventProducer
7 |
8 |
9 | def create_outbox_event_model(Base):
10 | class OutboxEvent(Base):
11 | id = Column(BigInteger(), primary_key=True)
12 | event_data = Column(Text())
13 | event_date = Column(DateTime(timezone=True))
14 |
15 | def __init__(self, event: Event, *args, **kwargs):
16 | super(OutboxEvent, self).__init__(
17 | event_data=event.json(),
18 | event_date=event.event_date,
19 | *args,
20 | **kwargs,
21 | )
22 |
23 | return OutboxEvent
24 |
25 |
26 | class AlchemyOutboxRelay(OutboxRelay):
27 | def __init__(self, outbox_event_model, uow: UnitOfWork, producer: EventProducer):
28 | super(AlchemyOutboxRelay, self).__init__(uow=uow, producer=producer)
29 | self.outbox_event_model = outbox_event_model
30 |
31 | def start(self):
32 | with self.producer:
33 | while True:
34 | with self.uow:
35 | events = self.uow.repository.filter()
36 |
37 | for event in events:
38 | self.producer.produce(event)
39 |
40 | self.acknowledge(events)
41 | self.uow.commit()
42 |
43 | self.delay_function()
44 |
45 | def delay_function(self):
46 | raise NotImplementedError("delay_function() is not implemented")
47 |
48 | def acknowledge(self, events):
49 | for event in events:
50 | self.uow.repository.delete(event)
51 |
52 |
53 | __all__ = [
54 | 'create_outbox_event_model',
55 | 'AlchemyOutboxRelay',
56 | ]
57 |
--------------------------------------------------------------------------------
/assimilator/core/__init__.py:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/assimilator/core/database/__init__.py:
--------------------------------------------------------------------------------
1 | from assimilator.core.database.repository import *
2 | from assimilator.core.database.unit_of_work import *
3 | from assimilator.core.database.exceptions import *
4 | from assimilator.core.database.models import *
5 | from assimilator.core.database.specifications.adaptive import *
6 | from assimilator.core.database.specifications.specifications import *
7 | from assimilator.core.database.specifications.filtering_options import *
8 | from assimilator.core.database.specifications.types import *
9 |
--------------------------------------------------------------------------------
/assimilator/core/database/exceptions.py:
--------------------------------------------------------------------------------
1 |
2 |
3 | class DataLayerError(Exception):
4 | """ Any error related to Repository, UnitOfWork, Model """
5 |
6 |
7 | class NotFoundError(DataLayerError):
8 | """ Results are not found """
9 |
10 |
11 | class InvalidQueryError(DataLayerError):
12 | """ The query to the data storage supplied was invalid """
13 |
14 |
15 | class MultipleResultsError(InvalidQueryError):
16 | """ Repository get() function returned more than one result """
17 |
--------------------------------------------------------------------------------
/assimilator/core/database/models.py:
--------------------------------------------------------------------------------
1 | import json
2 | from uuid import uuid4, UUID
3 | from typing import (
4 | Type, TypeVar, ClassVar, Union,
5 | Optional, Callable, Any, AbstractSet,
6 | Mapping, Dict,
7 | )
8 |
9 | from pydantic import BaseModel as PydanticBaseModel, Extra, ValidationError, Field
10 |
11 | from assimilator.core.exceptions import ParsingError
12 |
13 |
14 | T = TypeVar("T", bound='BaseModel')
15 | AbstractSetIntStr = AbstractSet[Union[int, str]]
16 | MappingIntStrAny = Mapping[Union[int, str], Any]
17 |
18 |
19 | class BaseModel(PydanticBaseModel):
20 | id: str = Field(allow_mutation=False)
21 |
22 | class AssimilatorConfig(PydanticBaseModel, extra=Extra.allow):
23 | autogenerate_id: ClassVar[bool] = True
24 | exclude: ClassVar[set] = None
25 |
26 | class Config:
27 | arbitrary_types_allowed = True
28 | validate_assignment = True
29 |
30 | def __hash__(self):
31 | return UUID(self.id).int
32 |
33 | def __init_subclass__(cls, **kwargs):
34 | super().__init_subclass__(**kwargs)
35 |
36 | if not issubclass(cls.AssimilatorConfig, BaseModel.AssimilatorConfig):
37 | base_configs = [
38 | getattr(base_class, 'AssimilatorConfig') for base_class in cls.mro()
39 | if hasattr(base_class, 'AssimilatorConfig')
40 | ]
41 |
42 | class InheritedConfig(*base_configs):
43 | ...
44 |
45 | cls.AssimilatorConfig = InheritedConfig
46 |
47 | return cls
48 |
49 | def generate_id(self, **kwargs) -> str:
50 | return str(uuid4())
51 |
52 | def __init__(self, **kwargs):
53 | if self.AssimilatorConfig.autogenerate_id and kwargs.get('id') is None:
54 | kwargs['id'] = self.generate_id(**kwargs)
55 |
56 | super(BaseModel, self).__init__(**kwargs)
57 |
58 | @classmethod
59 | def loads(cls: Type['T'], data: str) -> 'T':
60 | try:
61 | return cls(**json.loads(data))
62 | except (ValidationError, TypeError) as exc:
63 | raise ParsingError(exc)
64 |
65 | def json(
66 | self,
67 | *,
68 | include: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None,
69 | exclude: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None,
70 | by_alias: bool = False,
71 | skip_defaults: Optional[bool] = None,
72 | exclude_unset: bool = False,
73 | exclude_defaults: bool = False,
74 | exclude_none: bool = False,
75 | encoder: Optional[Callable[[Any], Any]] = None,
76 | models_as_dict: bool = True,
77 | **dumps_kwargs: Any,
78 | ) -> str:
79 | return super(BaseModel, self).json(
80 | include=include,
81 | exclude={*(exclude or []), *(self.AssimilatorConfig.exclude or [])},
82 | by_alias=by_alias,
83 | skip_defaults=skip_defaults,
84 | exclude_unset=exclude_unset,
85 | exclude_defaults=exclude_defaults,
86 | exclude_none=exclude_none,
87 | encoder=encoder,
88 | models_as_dict=models_as_dict,
89 | **dumps_kwargs,
90 | )
91 |
92 | def dict(
93 | self,
94 | *,
95 | include: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None,
96 | exclude: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None,
97 | by_alias: bool = False,
98 | skip_defaults: Optional[bool] = None,
99 | exclude_unset: bool = False,
100 | exclude_defaults: bool = False,
101 | exclude_none: bool = False,
102 | ) -> Dict[str, Any]:
103 | return super(BaseModel, self).dict(
104 | include=include,
105 | exclude={*(exclude or []), *(self.AssimilatorConfig.exclude or [])},
106 | by_alias=by_alias,
107 | skip_defaults=skip_defaults,
108 | exclude_unset=exclude_unset,
109 | exclude_defaults=exclude_defaults,
110 | exclude_none=exclude_none,
111 | )
112 |
113 |
114 | __all__ = [
115 | 'BaseModel',
116 | ]
117 |
--------------------------------------------------------------------------------
/assimilator/core/database/repository.py:
--------------------------------------------------------------------------------
1 | from functools import wraps
2 | from abc import ABC, abstractmethod
3 | from typing import (
4 | TypeVar, Callable, Generic, final,
5 | Union, Optional, Iterable, Type,
6 | Collection, Tuple, Any, Dict,
7 | )
8 |
9 | from assimilator.core.patterns.error_wrapper import ErrorWrapper
10 | from assimilator.core.patterns.lazy_command import LazyCommand
11 | from assimilator.core.database.specifications.specifications import SpecificationType, SpecificationList
12 |
13 |
14 | def make_lazy(func: Callable):
15 |
16 | @wraps(func)
17 | def make_lazy_wrapper(
18 | self,
19 | *specifications: SpecificationType,
20 | lazy: bool = False,
21 | initial_query: QueryT = None,
22 | ):
23 | if lazy:
24 | return LazyCommand(func, self, *specifications, lazy=False, initial_query=initial_query)
25 | return func(self, *specifications, lazy=lazy, initial_query=initial_query)
26 |
27 | return make_lazy_wrapper
28 |
29 |
30 | QueryT = TypeVar("QueryT")
31 | ModelT = TypeVar("ModelT")
32 | SessionT = TypeVar("SessionT")
33 | SpecsT = TypeVar("SpecsT", bound=Type[SpecificationList])
34 |
35 |
36 | class Repository(Generic[SessionT, ModelT, QueryT], ABC):
37 | def __init__(
38 | self,
39 | session: SessionT,
40 | model: Type[ModelT],
41 | specifications: SpecsT,
42 | initial_query: Optional[QueryT] = None,
43 | error_wrapper: Optional[ErrorWrapper] = None,
44 | ):
45 | self.session = session
46 | self.model = model
47 | self.__initial_query: QueryT = initial_query
48 | self.specifications: SpecsT = specifications
49 |
50 | self.error_wrapper = error_wrapper or ErrorWrapper()
51 | self.get = LazyCommand.decorate(self.error_wrapper.decorate(self.get))
52 | self.filter: Repository.filter = LazyCommand.decorate(self.error_wrapper.decorate(self.filter))
53 | self.save: Repository.save = self.error_wrapper.decorate(self.save)
54 | self.delete: Repository.delete = self.error_wrapper.decorate(self.delete)
55 | self.update: Repository.update = self.error_wrapper.decorate(self.update)
56 | self.is_modified: Repository.is_modified = self.error_wrapper.decorate(self.is_modified)
57 | self.refresh: Repository.refresh = self.error_wrapper.decorate(self.refresh)
58 | self.count: Repository.count = LazyCommand.decorate(self.error_wrapper.decorate(self.count))
59 |
60 | @final
61 | def _check_obj_is_specification(
62 | self,
63 | obj: ModelT,
64 | specifications: Iterable[SpecificationType]
65 | ) -> Tuple[Optional[ModelT], Iterable[SpecificationType]]:
66 | """
67 | This function is called for parts of the code that use both obj and *specifications.
68 | We check that if the obj is a model
69 | """
70 |
71 | if not isinstance(obj, self.model) and (obj is not None):
72 | return None, (obj, *specifications) # obj is specification
73 |
74 | return obj, specifications
75 |
76 | @property
77 | def specs(self) -> SpecsT:
78 | """ That property is used to shorten the full name of the self.specifications. """
79 | return self.specifications
80 |
81 | def get_initial_query(self, override_query: Optional[QueryT] = None) -> QueryT:
82 | if override_query is not None:
83 | return override_query
84 | elif self.__initial_query is not None:
85 | return self.__initial_query
86 | else:
87 | raise NotImplementedError("You must either pass the initial query or define get_initial_query()")
88 |
89 | def _get_specifications_context(self) -> Dict[str, Any]:
90 | return {"model": self.model, "repository": self}
91 |
92 | @abstractmethod
93 | def dict_to_models(self, data: dict) -> ModelT:
94 | """
95 | Converts data from Python dictionaries to models that Repository uses.
96 | You don't need to use that function directly 99% of the times.
97 | """
98 | raise NotImplementedError()
99 |
100 | @final
101 | def _apply_specifications(
102 | self, query: Union[QueryT, None],
103 | specifications: Iterable[SpecificationType],
104 | ) -> QueryT:
105 | query = self.get_initial_query(query)
106 |
107 | for specification in specifications:
108 | query = specification(query=query, **self._get_specifications_context())
109 |
110 | return query
111 |
112 | @abstractmethod
113 | def get(
114 | self,
115 | *specifications: SpecificationType,
116 | lazy: bool = False,
117 | initial_query: QueryT = None,
118 | ) -> Union[ModelT, LazyCommand[ModelT]]:
119 | raise NotImplementedError("get() is not implemented()")
120 |
121 | @abstractmethod
122 | def filter(
123 | self,
124 | *specifications: SpecificationType,
125 | lazy: bool = False,
126 | initial_query: QueryT = None,
127 | ) -> Union[Collection[ModelT], LazyCommand[Collection[ModelT]]]:
128 | raise NotImplementedError("filter() is not implemented()")
129 |
130 | @abstractmethod
131 | def save(self, obj: Optional[ModelT] = None, **obj_data) -> ModelT:
132 | raise NotImplementedError("save() is not implemented in the repository")
133 |
134 | @abstractmethod
135 | def delete(self, obj: Optional[ModelT] = None, *specifications: SpecificationType) -> None:
136 | raise NotImplementedError("delete() is not implemented in the repository")
137 |
138 | @abstractmethod
139 | def update(self, obj: Optional[ModelT] = None, *specifications: SpecificationType, **update_values) -> None:
140 | raise NotImplementedError("update() is not implemented in the repository")
141 |
142 | @abstractmethod
143 | def is_modified(self, obj: ModelT) -> bool:
144 | raise NotImplementedError("is_modified() is not implemented in the repository")
145 |
146 | @abstractmethod
147 | def refresh(self, obj: ModelT) -> None:
148 | raise NotImplementedError("refresh() is not implemented in the repository")
149 |
150 | @abstractmethod
151 | def count(
152 | self,
153 | *specifications: SpecificationType,
154 | lazy: bool = False,
155 | initial_query: QueryT = None,
156 | ) -> Union[LazyCommand[int], int]:
157 | raise NotImplementedError("count() is not implemented in the repository")
158 |
159 | def __str__(self):
160 | return f"{self.__class__.__name__}({self.model})"
161 |
162 | def __repr__(self):
163 | return str(self)
164 |
165 |
166 | __all__ = [
167 | 'LazyCommand',
168 | 'Repository',
169 | 'make_lazy',
170 | ]
171 |
--------------------------------------------------------------------------------
/assimilator/core/database/specifications/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/knucklesuganda/py_assimilator/43108e3575750f3cd6e9dff6d59068d1a5be5e55/assimilator/core/database/specifications/__init__.py
--------------------------------------------------------------------------------
/assimilator/core/database/specifications/adaptive.py:
--------------------------------------------------------------------------------
1 | import operator
2 | from typing import Optional, Iterable, Union, Callable, Any
3 |
4 | from assimilator.core.database.specifications.specifications import specification, FilterSpecification
5 |
6 |
7 | class AdaptiveFilter:
8 | def __init__(self, *fields, **kwargs_fields):
9 | self.fields = fields
10 | self.kwargs_fields = kwargs_fields
11 |
12 | def __or__(self, other: Union['AdaptiveFilter', 'FilterSpecification']) -> 'CompositeAdaptiveFilter':
13 | return CompositeAdaptiveFilter(first=self, second=other, func=operator.or_)
14 |
15 | def __and__(self, other: Union['AdaptiveFilter', 'FilterSpecification']) -> 'CompositeAdaptiveFilter':
16 | return CompositeAdaptiveFilter(first=self, second=other, func=operator.and_)
17 |
18 | def __invert__(self):
19 | return AdaptiveFilter(self.fields, self.kwargs_fields)
20 |
21 | def __call__(self, query, repository, **context):
22 | return repository.specs.filter(
23 | *self.fields, **self.kwargs_fields,
24 | )(query=query, repository=repository)
25 |
26 |
27 | class CompositeAdaptiveFilter(AdaptiveFilter):
28 | def __init__(
29 | self,
30 | first: Union['AdaptiveFilter', 'FilterSpecification'],
31 | second: Union['AdaptiveFilter', 'FilterSpecification'],
32 | func: Callable[['AdaptiveFilter', 'AdaptiveFilter'], Any],
33 | ):
34 | super(CompositeAdaptiveFilter, self).__init__()
35 | self.first = first
36 | self.second = second
37 | self.func = func
38 |
39 | def _parse_specification(self, filter_spec, repository):
40 | if isinstance(filter_spec, CompositeAdaptiveFilter):
41 | first = self._parse_specification(filter_spec=filter_spec.first, repository=repository)
42 | second = self._parse_specification(filter_spec=filter_spec.second, repository=repository)
43 | return filter_spec.func(first, second)
44 |
45 | elif isinstance(filter_spec, AdaptiveFilter):
46 | return repository.specs.filter(*filter_spec.fields, **filter_spec.kwargs_fields)
47 | else:
48 | return filter_spec
49 |
50 | def __call__(self, query, repository, **context):
51 | first = self._parse_specification(filter_spec=self.first, repository=repository)
52 | second = self._parse_specification(filter_spec=self.first, repository=repository)
53 | return self.func(first, second)(query=query, repository=repository, **context)
54 |
55 |
56 | filter_ = AdaptiveFilter
57 |
58 |
59 | @specification
60 | def order(*clauses: str, query, repository, **context):
61 | return repository.specs.order(*clauses)(query=query, repository=repository, **context)
62 |
63 |
64 | @specification
65 | def paginate(
66 | *,
67 | limit: Optional[int] = None,
68 | offset: Optional[int] = None,
69 | query,
70 | repository,
71 | **context,
72 | ):
73 | paginate_spec = repository.specs.paginate(limit=limit, offset=offset)
74 | return paginate_spec(query=query, repository=repository, **context)
75 |
76 |
77 | @specification
78 | def join(*targets: str, join_args: Iterable[dict] = None, query, repository, **context):
79 | return repository.specs.join(*targets, join_args=join_args)(query=query, repository=repository, **context)
80 |
81 |
82 | @specification
83 | def only(*only_fields: str, query, repository, **context):
84 | return repository.specs.only(*only_fields)(query=query, repository=repository, **context)
85 |
86 |
87 | __all__ = [
88 | 'AdaptiveFilter',
89 | 'filter_',
90 | 'only',
91 | 'order',
92 | 'join',
93 | 'paginate',
94 | ]
95 |
--------------------------------------------------------------------------------
/assimilator/core/database/specifications/filtering_options.py:
--------------------------------------------------------------------------------
1 | from abc import abstractstaticmethod
2 | from typing import Dict, Protocol, Any, Callable
3 |
4 |
5 | class FilterOptionProtocol(Protocol):
6 | def __call__(self, field: str, value: Any) -> Callable[[], Any]:
7 | ...
8 |
9 |
10 | FILTERING_OPTIONS_SEPARATOR = "__"
11 |
12 |
13 | class FilteringOptions:
14 | """ Looks for the filtering option """
15 |
16 | def __init__(self):
17 | self.filter_options: Dict[str, FilterOptionProtocol] = {
18 | "eq": self._eq,
19 | "gt": self._gt,
20 | "gte": self._gte,
21 | "lt": self._lt,
22 | "lte": self._lte,
23 | "not": self._not,
24 | "is": self._is,
25 | "like": self._like,
26 | "regex": self._regex,
27 | }
28 |
29 | def get_default_filter(self) -> FilterOptionProtocol:
30 | return self._eq
31 |
32 | def parse_field(self, raw_field: str, value: Any) -> Callable:
33 | fields = raw_field.split(FILTERING_OPTIONS_SEPARATOR)
34 | last_field = fields[-1]
35 |
36 | if len(fields) == 1:
37 | filter_func = self.filter_options.get(last_field, self.get_default_filter())
38 | return filter_func(last_field, value)
39 |
40 | # Foreign Key
41 |
42 | if self.filter_options.get(fields[-1]) is None:
43 | foreign_field = raw_field
44 | filter_func = self.get_default_filter()
45 | else:
46 | foreign_field = FILTERING_OPTIONS_SEPARATOR.join(fields[:-1])
47 | filter_func = self.filter_options.get(fields[-1])
48 |
49 | return filter_func(foreign_field, value)
50 |
51 | @abstractstaticmethod
52 | def _eq(field: str, value):
53 | raise NotImplementedError("_eq() is not implemented in the filtering options")
54 |
55 | @abstractstaticmethod
56 | def _gt(field: str, value):
57 | raise NotImplementedError("_gt() is not implemented in the filtering options")
58 |
59 | @abstractstaticmethod
60 | def _gte(field: str, value):
61 | raise NotImplementedError("_gte() is not implemented in the filtering options")
62 |
63 | @abstractstaticmethod
64 | def _lt(field: str, value):
65 | raise NotImplementedError("_lt() is not implemented in the filtering options")
66 |
67 | @abstractstaticmethod
68 | def _lte(field: str, value):
69 | raise NotImplementedError("_lte() is not implemented in the filtering options")
70 |
71 | @abstractstaticmethod
72 | def _not(field: str, value):
73 | raise NotImplementedError("_not() is not implemented in the filtering options")
74 |
75 | @abstractstaticmethod
76 | def _is(field: str, value):
77 | raise NotImplementedError("_is() is not implemented in the filtering options")
78 |
79 | @abstractstaticmethod
80 | def _like(field: str, value):
81 | raise NotImplementedError("_like() is not implemented in the filtering options")
82 |
83 | @abstractstaticmethod
84 | def _regex(field: str, value):
85 | raise NotImplementedError("_regex() is not implemented in the filtering options")
86 |
87 |
88 | __all__ = [
89 | 'FilteringOptions',
90 | 'FilterOptionProtocol',
91 | 'FILTERING_OPTIONS_SEPARATOR',
92 | ]
93 |
--------------------------------------------------------------------------------
/assimilator/core/database/specifications/specifications.py:
--------------------------------------------------------------------------------
1 | from functools import wraps
2 | from abc import ABC
3 | from typing import Callable, TypeVar, Type, Any, Union
4 |
5 | from assimilator.core.database.specifications.filtering_options import FilteringOptions
6 | from assimilator.core.database.specifications.types import (
7 | OrderSpecificationProtocol,
8 | PaginateSpecificationProtocol,
9 | OnlySpecificationProtocol,
10 | JoinSpecificationProtocol,
11 | )
12 |
13 | QueryT = TypeVar("QueryT")
14 |
15 |
16 | class Specification(ABC):
17 | def __call__(self, query: QueryT, **context: Any) -> QueryT:
18 | raise NotImplementedError("Specification must specify __call__()")
19 |
20 |
21 | class FilterSpecification(Specification, ABC):
22 | filtering_options_cls: Type[FilteringOptions]
23 |
24 | def __init__(self, *filters, **named_filters):
25 | self.filters = list(filters)
26 | self.filtering_options = self.filtering_options_cls()
27 |
28 | for field, value in named_filters.items():
29 | self.filters.append(
30 | self.filtering_options.parse_field(raw_field=field, value=value)
31 | )
32 |
33 | def __or__(self, other: 'SpecificationType') -> 'FilterSpecification':
34 | raise NotImplementedError("or() is not implemented for FilterSpecification")
35 |
36 | def __and__(self, other: 'SpecificationType') -> 'FilterSpecification':
37 | raise NotImplementedError("and() is not implemented for FilterSpecification")
38 |
39 | def __invert__(self):
40 | raise NotImplementedError("invert() is not implemented for FilterSpecification")
41 |
42 | def __str__(self):
43 | return f'filter_spec({self.filters})'
44 |
45 |
46 | def specification(func: Callable) -> Callable:
47 | def create_specification(*args, **kwargs):
48 | @wraps(func)
49 | def created_specification(query: QueryT, **context) -> QueryT:
50 | return func(*args, **kwargs, query=query, **context)
51 |
52 | created_specification: func
53 | return created_specification
54 |
55 | return create_specification
56 |
57 |
58 | class SpecificationList:
59 | filter: Type[FilterSpecification]
60 | order: OrderSpecificationProtocol
61 | paginate: PaginateSpecificationProtocol
62 | join: JoinSpecificationProtocol
63 | only: OnlySpecificationProtocol
64 |
65 |
66 | SpecificationType = Union[Callable, Specification]
67 |
68 |
69 | __all__ = [
70 | 'SpecificationList',
71 | 'Specification',
72 | 'specification',
73 | 'SpecificationType',
74 | 'FilterSpecification',
75 | ]
76 |
--------------------------------------------------------------------------------
/assimilator/core/database/specifications/types.py:
--------------------------------------------------------------------------------
1 | from typing import Protocol, TypeVar, Optional, Iterable, Any
2 |
3 | QueryT = TypeVar("QueryT")
4 |
5 |
6 | class OrderSpecificationProtocol(Protocol):
7 | def __call__(self, *clauses: str) -> QueryT:
8 | ...
9 |
10 |
11 | class PaginateSpecificationProtocol(Protocol):
12 | def __call__(self, *, limit: Optional[int] = None, offset: Optional[int] = None):
13 | ...
14 |
15 |
16 | class JoinSpecificationProtocol(Protocol):
17 | def __call__(self, *targets: Any, join_args: Iterable[dict] = None):
18 | ...
19 |
20 |
21 | class OnlySpecificationProtocol(Protocol):
22 | def __call__(self, *only_fields: Iterable[str]) -> Iterable[QueryT]:
23 | ...
24 |
25 |
26 | __all__ = [
27 | 'OrderSpecificationProtocol',
28 | 'PaginateSpecificationProtocol',
29 | 'JoinSpecificationProtocol',
30 | 'OnlySpecificationProtocol',
31 | ]
32 |
--------------------------------------------------------------------------------
/assimilator/core/database/unit_of_work.py:
--------------------------------------------------------------------------------
1 | from abc import ABC, abstractmethod
2 | from typing import Optional
3 |
4 | from assimilator.core.database.repository import Repository
5 | from assimilator.core.patterns import ErrorWrapper
6 |
7 |
8 | class UnitOfWork(ABC):
9 | error_wrapper: ErrorWrapper = ErrorWrapper()
10 |
11 | def __init__(
12 | self,
13 | repository: Repository,
14 | error_wrapper: Optional[ErrorWrapper] = None,
15 | autocommit: bool = False,
16 | ):
17 | self.repository = repository
18 | if error_wrapper is not None:
19 | self.error_wrapper = error_wrapper
20 |
21 | self.autocommit = autocommit
22 | self.begin = self.error_wrapper.decorate(self.begin)
23 | self.rollback = self.error_wrapper.decorate(self.rollback)
24 | self.commit = self.error_wrapper.decorate(self.commit)
25 | self.close = self.error_wrapper.decorate(self.close)
26 |
27 | @abstractmethod
28 | def begin(self):
29 | raise NotImplementedError()
30 |
31 | @abstractmethod
32 | def rollback(self):
33 | raise NotImplementedError()
34 |
35 | @abstractmethod
36 | def commit(self):
37 | raise NotImplementedError()
38 |
39 | @abstractmethod
40 | def close(self):
41 | raise NotImplementedError()
42 |
43 | def __enter__(self):
44 | self.begin()
45 | return self
46 |
47 | def __exit__(self, exc_type, exc_val, exc_tb):
48 | if exc_type is not None:
49 | self.rollback()
50 | self.close()
51 | raise exc_val
52 | else:
53 | if self.autocommit:
54 | self.commit()
55 |
56 | self.close()
57 |
58 | def __str__(self):
59 | return f"{self.__class__.__name__}({self.repository.model})"
60 |
61 | def __repr__(self):
62 | return str(self)
63 |
64 |
65 | __all__ = [
66 | 'UnitOfWork',
67 | ]
68 |
--------------------------------------------------------------------------------
/assimilator/core/events/__init__.py:
--------------------------------------------------------------------------------
1 | from assimilator.core.events.outbox_relay import *
2 | from assimilator.core.events.events import *
3 | from assimilator.core.events.exceptions import *
4 | from assimilator.core.events.events_bus import *
5 |
--------------------------------------------------------------------------------
/assimilator/core/events/events.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime
2 | from typing import Any
3 |
4 | from pydantic import Field
5 |
6 | from assimilator.core.database.models import BaseModel
7 |
8 |
9 | class Event(BaseModel):
10 | event_name: str = Field(allow_mutation=False)
11 | event_date: datetime = Field(default_factory=datetime.now)
12 |
13 | class Config:
14 | validate_assignment = True
15 |
16 |
17 | class ExternalEvent(Event):
18 | """ The event type is unknown, so all the fields are in data """
19 | data: Any
20 |
21 |
22 | class AckEvent(Event):
23 | acknowledged: bool = False
24 |
25 |
26 | __all__ = [
27 | 'Event',
28 | 'ExternalEvent',
29 | 'AckEvent',
30 | ]
31 |
--------------------------------------------------------------------------------
/assimilator/core/events/events_bus.py:
--------------------------------------------------------------------------------
1 | from abc import abstractmethod
2 | from typing import Iterator, Callable, List, Optional, final
3 |
4 | from assimilator.core.events.events import Event
5 | from assimilator.core.patterns.context_managers import StartCloseContextMixin
6 |
7 |
8 | class EventConsumer(StartCloseContextMixin):
9 | def __init__(self, callbacks: Optional[List[Callable]] = None):
10 | if callbacks is None:
11 | callbacks = []
12 |
13 | self._callbacks: List[Callable] = callbacks
14 |
15 | @final
16 | def register(self, callback: Callable):
17 | self._callbacks.append(callback)
18 |
19 | @abstractmethod
20 | def consume(self) -> Iterator[Event]:
21 | raise NotImplementedError("consume() is not implemented")
22 |
23 |
24 | class EventProducer(StartCloseContextMixin):
25 | @abstractmethod
26 | def produce(self, event: Event) -> None:
27 | raise NotImplementedError("produce() is not implemented")
28 |
29 |
30 | class EventBus:
31 | def __init__(self, consumer: EventConsumer, producer: EventProducer):
32 | self.consumer = consumer
33 | self.producer = producer
34 |
35 | def produce(self, event: Event) -> None:
36 | self.producer.produce(event)
37 |
38 | def consume(self) -> Iterator[Event]:
39 | return self.consumer.consume()
40 |
41 |
42 | __all__ = [
43 | 'EventConsumer',
44 | 'EventProducer',
45 | 'EventBus',
46 | ]
47 |
--------------------------------------------------------------------------------
/assimilator/core/events/exceptions.py:
--------------------------------------------------------------------------------
1 | from assimilator.core.exceptions import ParsingError
2 |
3 |
4 | class EventError(Exception):
5 | pass
6 |
7 |
8 | class EventParsingError(ParsingError, EventError):
9 | pass
10 |
11 |
12 | class EventProducingError(EventError):
13 | pass
14 |
15 |
16 | __all__ = [
17 | 'EventError',
18 | 'EventParsingError',
19 | 'EventProducingError',
20 | ]
21 |
--------------------------------------------------------------------------------
/assimilator/core/events/outbox_relay.py:
--------------------------------------------------------------------------------
1 | from abc import ABC
2 | from typing import Iterable
3 |
4 | from assimilator.core.database.unit_of_work import UnitOfWork
5 | from assimilator.core.events.events import Event
6 | from assimilator.core.events.events_bus import EventProducer
7 |
8 |
9 | class OutboxRelay(ABC):
10 | def __init__(self, uow: UnitOfWork, producer: EventProducer):
11 | self.uow = uow
12 | self.producer = producer
13 |
14 | def start(self):
15 | raise NotImplementedError("start() is not implemented")
16 |
17 | def acknowledge(self, events: Iterable[Event]):
18 | raise NotImplementedError("acknowledge() is not implemented")
19 |
20 |
21 | __all__ = [
22 | 'OutboxRelay',
23 | ]
24 |
--------------------------------------------------------------------------------
/assimilator/core/exceptions.py:
--------------------------------------------------------------------------------
1 |
2 | class ParsingError(Exception):
3 | pass
4 |
--------------------------------------------------------------------------------
/assimilator/core/patterns/__init__.py:
--------------------------------------------------------------------------------
1 | from assimilator.core.patterns.context_managers import *
2 | from assimilator.core.patterns.error_wrapper import *
3 | from assimilator.core.patterns.lazy_command import *
4 |
--------------------------------------------------------------------------------
/assimilator/core/patterns/context_managers.py:
--------------------------------------------------------------------------------
1 | from abc import ABC, abstractmethod
2 |
3 |
4 | class StartCloseContextMixin(ABC):
5 | @abstractmethod
6 | def close(self) -> None:
7 | raise NotImplementedError("close() is not implemented")
8 |
9 | @abstractmethod
10 | def start(self) -> None:
11 | raise NotImplementedError("start() is not implemented")
12 |
13 | def __enter__(self):
14 | self.start()
15 | return self
16 |
17 | def __exit__(self, exc_type, exc_val, exc_tb):
18 | self.close()
19 |
20 | if exc_type is not None:
21 | raise exc_val
22 |
23 |
24 | __all__ = [
25 | 'StartCloseContextMixin',
26 | ]
27 |
--------------------------------------------------------------------------------
/assimilator/core/patterns/error_wrapper.py:
--------------------------------------------------------------------------------
1 | import sys
2 | from functools import wraps
3 | from typing import Dict, Type, Optional, Callable, Container, Union
4 |
5 |
6 | ErrorT = Union[Callable[[Exception], Exception], Type[Exception]]
7 |
8 |
9 | class ErrorWrapper:
10 | def __init__(
11 | self,
12 | error_mappings: Optional[Dict[Type[Exception], ErrorT]] = None,
13 | default_error: Optional[ErrorT] = None,
14 | skipped_errors: Optional[Container[Type[Exception]]] = None,
15 | ):
16 | self.error_mappings = error_mappings or {}
17 | self.default_error = default_error
18 | self.skipped_errors = {
19 | *(skipped_errors or set()),
20 | KeyboardInterrupt,
21 | SystemExit,
22 | *self.error_mappings.values(), # we want to skip all the mapped values as they are already fixed
23 | }
24 |
25 | def __enter__(self):
26 | return self
27 |
28 | def is_already_wrapped(self, exc_type: Type[Exception]) -> bool:
29 | return any(
30 | issubclass(exc_type, error)
31 | for error in self.skipped_errors
32 | if issubclass(error, Exception)
33 | )
34 |
35 | def create_error(self, original_error: Exception, wrapped_error_type: Type[Exception]):
36 | _, _, tb = sys.exc_info()
37 | raise wrapped_error_type(original_error).with_traceback(tb)
38 |
39 | def __exit__(self, exc_type: Type[Exception], exc_val: Exception, exc_tb):
40 | if exc_val is None:
41 | return True
42 | elif self.is_already_wrapped(exc_type):
43 | return False
44 |
45 | wrapped_error = self.error_mappings.get(exc_type)
46 |
47 | if wrapped_error is not None:
48 | raise self.create_error(
49 | original_error=exc_val,
50 | wrapped_error_type=wrapped_error,
51 | )
52 | elif self.default_error is not None:
53 | raise self.create_error(
54 | original_error=exc_val,
55 | wrapped_error_type=self.default_error,
56 | )
57 |
58 | return False # No wrapping error was found
59 |
60 | def decorate(self, func: Callable) -> Callable:
61 | @wraps(func)
62 | def wrapper(*args, **kwargs):
63 | with self:
64 | return func(*args, **kwargs)
65 |
66 | wrapper: func
67 | return wrapper
68 |
69 | def __str__(self):
70 | return f"{type(self).__name__}({self.error_mappings})"
71 |
72 |
73 | __all__ = ['ErrorWrapper']
74 |
--------------------------------------------------------------------------------
/assimilator/core/patterns/lazy_command.py:
--------------------------------------------------------------------------------
1 | from functools import wraps
2 | from typing import Union, Callable, Iterable, TypeVar, Generic, Iterator
3 |
4 | T = TypeVar("T")
5 |
6 |
7 | class LazyCommand(Generic[T]):
8 | def __init__(self, command: Callable, *args, **kwargs):
9 | self.command = command
10 | self.args = args
11 | self.kwargs = kwargs
12 | self._results: T = None
13 |
14 | def __call__(self) -> Union[T]:
15 | if self._results is not None:
16 | return self._results
17 |
18 | self._results = self.command(*self.args, **self.kwargs)
19 | return self._results
20 |
21 | def __iter__(self) -> Iterator[T]:
22 | results = self()
23 |
24 | if not isinstance(results, Iterable): # get() command
25 | raise StopIteration("Results are not iterable")
26 |
27 | return iter(results) # filter() command
28 |
29 | def __eq__(self, other):
30 | return self() == other
31 |
32 | def __gt__(self, other):
33 | return self() > other
34 |
35 | def __getattr__(self, item):
36 | result = self()
37 | return getattr(result, item)
38 |
39 | def __bool__(self):
40 | return bool(self())
41 |
42 | def __str__(self):
43 | return f"Lazy<{self.command}(*{self.args}, **{self.kwargs})>"
44 |
45 | def __repr__(self):
46 | return str(self)
47 |
48 | @staticmethod
49 | def decorate(func: Callable) -> Callable:
50 |
51 | @wraps(func)
52 | def lazy_wrapper(*args, lazy: bool = False, **kwargs) -> Union[LazyCommand[T], T]:
53 | if lazy:
54 | return LazyCommand(
55 | func,
56 | *args,
57 | lazy=False,
58 | **kwargs,
59 | )
60 |
61 | return func(*args, **kwargs)
62 |
63 | lazy_wrapper: func
64 | return lazy_wrapper
65 |
66 |
67 | __all__ = ['LazyCommand']
68 |
--------------------------------------------------------------------------------
/assimilator/core/services/__init__.py:
--------------------------------------------------------------------------------
1 | from assimilator.core.services.crud import *
2 | from assimilator.core.services.base import *
3 |
--------------------------------------------------------------------------------
/assimilator/core/services/base.py:
--------------------------------------------------------------------------------
1 | from abc import ABC
2 |
3 |
4 | class Service(ABC):
5 | pass
6 |
7 |
8 | __all__ = [
9 | 'Service',
10 | ]
11 |
--------------------------------------------------------------------------------
/assimilator/core/services/crud.py:
--------------------------------------------------------------------------------
1 | from typing import TypeVar, Iterable, Union
2 |
3 | from assimilator.core.database import UnitOfWork, SpecificationList
4 | from assimilator.core.services.base import Service
5 | from assimilator.core.patterns import LazyCommand
6 |
7 |
8 | ModelT = TypeVar("ModelT")
9 |
10 |
11 | class CRUDService(Service):
12 | def __init__(self, uow: UnitOfWork):
13 | self.uow = uow
14 | self._specs: SpecificationList = self.uow.repository.specs
15 |
16 | def create(self, obj_data: Union[dict, ModelT]) -> ModelT:
17 | with self.uow:
18 | if isinstance(obj_data, dict):
19 | obj = self.uow.repository.save(**obj_data)
20 | else:
21 | obj = self.uow.repository.save(obj_data)
22 |
23 | self.uow.commit()
24 |
25 | self.uow.repository.refresh(obj)
26 | return obj
27 |
28 | def update(self, obj_data: Union[dict, ModelT], *filters, **kwargs_filters) -> ModelT:
29 | with self.uow:
30 | if isinstance(obj_data, dict):
31 | old_obj = self.get(*filters, **kwargs_filters)
32 | parsed_obj = self.uow.repository.dict_to_models(obj_data)
33 |
34 | for updated_key in obj_data:
35 | setattr(old_obj, updated_key, getattr(parsed_obj, updated_key))
36 |
37 | update_obj = old_obj
38 |
39 | self.uow.repository.update(update_obj)
40 | self.uow.commit()
41 |
42 | self.uow.repository.refresh(update_obj)
43 | return update_obj
44 |
45 | def list(
46 | self, *filters, lazy: bool = False, **kwargs_filters
47 | ) -> Union[Iterable[ModelT], LazyCommand[Iterable[ModelT]]]:
48 | return self.uow.repository.filter(self._specs.filter(*filters, **kwargs_filters), lazy=lazy)
49 |
50 | def get(self, *filters, lazy: bool = False, **kwargs_filters) -> Union[ModelT, LazyCommand[ModelT]]:
51 | return self.uow.repository.get(self._specs.filter(*filters, **kwargs_filters), lazy=lazy)
52 |
53 | def delete(self, *filters, **kwargs_filters) -> None:
54 | with self.uow:
55 | obj = self.get(*filters, **kwargs_filters)
56 | self.uow.repository.delete(obj)
57 | self.uow.commit()
58 |
59 | def __str__(self):
60 | return f"CRUD({self.uow.repository.model})"
61 |
62 |
63 | __all__ = [
64 | 'CRUDService',
65 | ]
66 |
--------------------------------------------------------------------------------
/assimilator/core/usability/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/knucklesuganda/py_assimilator/43108e3575750f3cd6e9dff6d59068d1a5be5e55/assimilator/core/usability/__init__.py
--------------------------------------------------------------------------------
/assimilator/core/usability/exceptions.py:
--------------------------------------------------------------------------------
1 |
2 |
3 | class PatternNotFoundError(KeyError):
4 | pass
5 |
6 |
7 | class ProviderNotFoundError(KeyError):
8 | pass
9 |
--------------------------------------------------------------------------------
/assimilator/core/usability/pattern_creator.py:
--------------------------------------------------------------------------------
1 | from typing import TypeVar, Any, Type, Dict
2 |
3 | from assimilator.core.usability.registry import get_pattern
4 | from assimilator.core.database import Repository, UnitOfWork
5 | from assimilator.core.services import CRUDService
6 |
7 | ModelT = TypeVar("ModelT")
8 |
9 |
10 | def create_repository(
11 | provider: str,
12 | model: Type[ModelT],
13 | session: Any,
14 | kwargs_repository: Dict[str, Any] = None,
15 | ) -> Repository:
16 | repository_cls: Type[Repository] = get_pattern(provider=provider, pattern_name='repository')
17 | return repository_cls(model=model, session=session, **(kwargs_repository or {}))
18 |
19 |
20 | def create_uow(
21 | provider: str,
22 | model: Type[ModelT],
23 | session: Any,
24 | kwargs_repository: Dict[str, Any] = None,
25 | kwargs_uow: Dict[str, Any] = None,
26 | ) -> UnitOfWork:
27 | repository = create_repository(
28 | provider=provider,
29 | model=model,
30 | session=session,
31 | kwargs_repository=kwargs_repository,
32 | )
33 | uow_cls: Type[UnitOfWork] = get_pattern(provider=provider, pattern_name='uow')
34 | return uow_cls(repository=repository, **(kwargs_uow or {}))
35 |
36 |
37 | def create_crud(
38 | provider: str,
39 | model: Type[ModelT],
40 | session: Any,
41 | kwargs_repository: Dict[str, Any] = None,
42 | kwargs_uow: Dict[str, Any] = None,
43 | ) -> CRUDService:
44 | uow = create_uow(
45 | provider=provider,
46 | model=model,
47 | session=session,
48 | kwargs_repository=kwargs_repository,
49 | kwargs_uow=kwargs_uow,
50 | )
51 | crud_cls: Type[CRUDService] = get_pattern(provider=provider, pattern_name='crud')
52 | return crud_cls(uow=uow)
53 |
--------------------------------------------------------------------------------
/assimilator/core/usability/registry.py:
--------------------------------------------------------------------------------
1 | import importlib
2 | from typing import Dict, Type, Union
3 |
4 | from pydantic import BaseModel
5 |
6 | from assimilator.core.services.crud import CRUDService
7 | from assimilator.core.database import Repository, UnitOfWork
8 | from assimilator.core.usability.exceptions import PatternNotFoundError, ProviderNotFoundError
9 |
10 |
11 | class PatternList(BaseModel):
12 | class Config:
13 | frozen = True
14 |
15 | repository: Type[Repository]
16 | uow: Type[UnitOfWork]
17 | crud: Type[CRUDService]
18 |
19 |
20 | registry: Dict[str, PatternList] = {}
21 |
22 |
23 | def register_provider(provider: str, pattern_list: PatternList):
24 | registry[provider] = pattern_list
25 |
26 |
27 | def find_provider(provider_path: str):
28 | """ Imports a module that has automatic pattern registration """
29 | importlib.import_module(provider_path)
30 |
31 |
32 | def get_pattern_list(provider: str):
33 | return registry[provider]
34 |
35 |
36 | def unregister_provider(provider: str):
37 | try:
38 | del registry[provider]
39 | except KeyError:
40 | raise ProviderNotFoundError(f"Provider {provider} was not found")
41 |
42 |
43 | def get_pattern(provider: str, pattern_name: str) -> Type[Union[Repository, UnitOfWork, CRUDService]]:
44 | try:
45 | pattern_cls = getattr(registry[provider], pattern_name, None)
46 | except KeyError:
47 | raise ProviderNotFoundError(f"Provider {pattern_name} was not found")
48 |
49 | if pattern_cls is None:
50 | raise PatternNotFoundError(f"Pattern '{pattern_name}' for {provider} provider was not found")
51 |
52 | return pattern_cls
53 |
54 |
55 | __all__ = [
56 | 'register_provider',
57 | 'unregister_provider',
58 | 'PatternList',
59 | 'get_pattern_list',
60 | 'get_pattern',
61 | 'find_provider',
62 | ]
63 |
--------------------------------------------------------------------------------
/assimilator/internal/__init__.py:
--------------------------------------------------------------------------------
1 | from assimilator.core.services import CRUDService
2 | from assimilator.core.usability.registry import register_provider, PatternList
3 | from assimilator.internal.database import InternalRepository, InternalUnitOfWork
4 |
5 | register_provider(provider='internal', pattern_list=PatternList(
6 | repository=InternalRepository,
7 | uow=InternalUnitOfWork,
8 | crud=CRUDService,
9 | ))
10 |
--------------------------------------------------------------------------------
/assimilator/internal/database/__init__.py:
--------------------------------------------------------------------------------
1 | from assimilator.internal.database.repository import *
2 | from assimilator.internal.database.unit_of_work import *
3 | from assimilator.internal.database.specifications.specifications import *
4 | from assimilator.internal.database.specifications.internal_operator import *
5 | from assimilator.internal.database.specifications.filter_specifications import *
6 |
--------------------------------------------------------------------------------
/assimilator/internal/database/error_wrapper.py:
--------------------------------------------------------------------------------
1 | from assimilator.core.database.exceptions import DataLayerError, NotFoundError
2 | from assimilator.core.patterns.error_wrapper import ErrorWrapper
3 |
4 |
5 | class InternalErrorWrapper(ErrorWrapper):
6 | def __init__(self):
7 | super(InternalErrorWrapper, self).__init__(error_mappings={
8 | KeyError: NotFoundError,
9 | TypeError: NotFoundError,
10 | }, default_error=DataLayerError)
11 |
12 |
13 | __all__ = ['InternalErrorWrapper']
14 |
--------------------------------------------------------------------------------
/assimilator/internal/database/models_utils.py:
--------------------------------------------------------------------------------
1 | from typing import Type, Union
2 |
3 | from pydantic import BaseModel as PydanticBaseModel
4 |
5 | from assimilator.core.database.models import BaseModel
6 |
7 |
8 | def get_model_relationship(model: Type[BaseModel], field_name: str) -> Union[Type[BaseModel], None]:
9 | try:
10 | return model.__fields__.get(field_name).type_
11 | except AttributeError:
12 | return None
13 |
14 |
15 | def dict_to_internal_models(data: dict, model: Type[BaseModel]) -> dict:
16 | for field_name, value in dict(data).items():
17 | field_type = get_model_relationship(model, field_name)
18 | if field_type is None:
19 | continue
20 | elif not issubclass(field_type, PydanticBaseModel):
21 | continue
22 |
23 | if not isinstance(value, dict):
24 | data[field_name] = [
25 | field_type(**dict_to_internal_models(data=val_part, model=field_type))
26 | for val_part in value
27 | ]
28 | else:
29 | data[field_name] = field_type(**value)
30 |
31 | return data
32 |
--------------------------------------------------------------------------------
/assimilator/internal/database/repository.py:
--------------------------------------------------------------------------------
1 | from typing import Type, Union, Optional, TypeVar, List
2 |
3 | from assimilator.core.patterns.error_wrapper import ErrorWrapper
4 | from assimilator.internal.database.error_wrapper import InternalErrorWrapper
5 | from assimilator.core.database import (
6 | Repository,
7 | SpecificationType,
8 | LazyCommand,
9 | InvalidQueryError,
10 | BaseModel,
11 | NotFoundError,
12 | )
13 | from assimilator.core.database import MultipleResultsError
14 | from assimilator.internal.database.specifications.specifications import InternalSpecificationList
15 | from assimilator.internal.database.models_utils import dict_to_internal_models
16 |
17 | ModelT = TypeVar("ModelT", bound=BaseModel)
18 |
19 |
20 | class InternalRepository(Repository):
21 | session: dict
22 | model: Type[ModelT]
23 |
24 | def __init__(
25 | self,
26 | session: dict,
27 | model: Type[ModelT],
28 | initial_query: Optional[str] = '',
29 | specifications: Type[InternalSpecificationList] = InternalSpecificationList,
30 | error_wrapper: Optional[ErrorWrapper] = None,
31 | ):
32 | super(InternalRepository, self).__init__(
33 | model=model,
34 | session=session,
35 | initial_query=initial_query,
36 | specifications=specifications,
37 | error_wrapper=error_wrapper or InternalErrorWrapper(),
38 | )
39 |
40 | def get(
41 | self,
42 | *specifications: SpecificationType,
43 | lazy: bool = False,
44 | initial_query: Optional[str] = None,
45 | ) -> Union[LazyCommand[ModelT], ModelT]:
46 | query = self._apply_specifications(
47 | query=initial_query,
48 | specifications=specifications,
49 | )
50 |
51 | if query: # Dict key was not provided, we must use other search parameters
52 | return self.session[query]
53 |
54 | found_models = list(self._apply_specifications(
55 | query=self.session.values(),
56 | specifications=specifications,
57 | ))
58 |
59 | if not found_models:
60 | raise NotFoundError(f"{self} repository did not find an entity")
61 | elif len(found_models) != 1:
62 | raise MultipleResultsError(f"{self} repository found multiple results: {found_models}")
63 |
64 | return found_models[0]
65 |
66 | def filter(
67 | self,
68 | *specifications: SpecificationType,
69 | lazy: bool = False,
70 | initial_query: Optional[str] = None,
71 | ) -> Union[LazyCommand[List[ModelT]], List[ModelT]]:
72 | return list(self._apply_specifications(
73 | query=self.session.values(),
74 | specifications=specifications,
75 | ))
76 |
77 | def dict_to_models(self, data: dict) -> ModelT:
78 | return self.model(**dict_to_internal_models(data=data, model=self.model))
79 |
80 | def save(self, obj: Optional[ModelT] = None, **obj_data) -> ModelT:
81 | if obj is None:
82 | obj = self.dict_to_models(obj_data)
83 |
84 | self.session[obj.id] = obj
85 | return obj
86 |
87 | def delete(self, obj: Optional[ModelT] = None, *specifications: SpecificationType) -> None:
88 | obj, specifications = self._check_obj_is_specification(obj, specifications)
89 |
90 | if specifications:
91 | for model in self.filter(*specifications, lazy=True):
92 | del self.session[model.id]
93 | elif obj is not None:
94 | del self.session[obj.id]
95 |
96 | def update(
97 | self,
98 | obj: Optional[ModelT] = None,
99 | *specifications: SpecificationType,
100 | **update_values,
101 | ) -> None:
102 | obj, specifications = self._check_obj_is_specification(obj, specifications)
103 |
104 | if specifications:
105 | if not update_values:
106 | raise InvalidQueryError(
107 | "You did not provide any update_values "
108 | "to the update() yet provided specifications"
109 | )
110 |
111 | for model in self.filter(*specifications, lazy=True):
112 | model.__dict__.update(update_values)
113 | self.save(model)
114 |
115 | elif obj is not None:
116 | self.save(obj)
117 |
118 | def is_modified(self, obj: ModelT) -> bool:
119 | return self.get(self.specs.filter(id=obj.id)) == obj
120 |
121 | def refresh(self, obj: ModelT) -> None:
122 | fresh_obj = self.get(self.specs.filter(id=obj.id), lazy=False)
123 | obj.__dict__.update(fresh_obj.__dict__)
124 |
125 | def count(
126 | self,
127 | *specifications: SpecificationType,
128 | lazy: bool = False,
129 | initial_query: Optional[str] = None,
130 | ) -> Union[LazyCommand[int], int]:
131 | if specifications:
132 | return len(list(self._apply_specifications( # We do not call filter() for list() optimization
133 | query=self.session.values(),
134 | specifications=specifications,
135 | )))
136 | return len(self.session)
137 |
138 |
139 | __all__ = [
140 | 'InternalRepository',
141 | ]
142 |
--------------------------------------------------------------------------------
/assimilator/internal/database/specifications/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/knucklesuganda/py_assimilator/43108e3575750f3cd6e9dff6d59068d1a5be5e55/assimilator/internal/database/specifications/__init__.py
--------------------------------------------------------------------------------
/assimilator/internal/database/specifications/filter_specifications.py:
--------------------------------------------------------------------------------
1 | from operator import or_, and_
2 | from typing import Union, List, Generator, Any
3 |
4 | from assimilator.core.database.models import BaseModel
5 | from assimilator.core.database import FilterSpecification
6 | from assimilator.internal.database.specifications.internal_operator import invert
7 | from assimilator.internal.database.specifications.filtering_options import InternalFilteringOptions
8 |
9 | QueryT = Union[str, List[BaseModel]]
10 |
11 |
12 | class InternalFilter(FilterSpecification):
13 | filtering_options_cls = InternalFilteringOptions
14 |
15 | def __init__(self, *filters, **named_filters):
16 | self.text_filters = [filter_ for filter_ in filters if isinstance(filter_, str)]
17 |
18 | if named_filters.get('id'):
19 | self.text_filters.append(named_filters.pop('id'))
20 |
21 | super(InternalFilter, self).__init__(
22 | *(set(filters) - set(self.text_filters)),
23 | **named_filters,
24 | )
25 |
26 | def __call__(self, query: QueryT, **context) -> Union[str, Generator[BaseModel, Any, None]]:
27 | if isinstance(query, str):
28 | return f'{query}{"".join(str(filter_) for filter_ in self.text_filters)}'
29 | elif not self.filters:
30 | return query
31 |
32 | return (
33 | model for model in query
34 | if all(filter_func(model) for filter_func in self.filters)
35 | )
36 |
37 | def __or__(self, other: Union['InternalFilter', 'CompositeFilter']) -> 'InternalFilter':
38 | return CompositeFilter(first=self, second=other, operation=or_)
39 |
40 | def __and__(self, other: Union['InternalFilter', 'CompositeFilter']) -> 'InternalFilter':
41 | return CompositeFilter(first=self, second=other, operation=and_)
42 |
43 | def __invert__(self):
44 | return InternalFilter(*(invert(func) for func in self.filters))
45 |
46 |
47 | class CompositeFilter(InternalFilter):
48 | def __init__(
49 | self,
50 | first: Union[FilterSpecification, 'CompositeFilter'],
51 | second: Union[FilterSpecification, 'CompositeFilter'],
52 | operation: Union[or_, and_],
53 | ):
54 | super(CompositeFilter, self).__init__()
55 | self.first = first
56 | self.second = second
57 | self.operation = operation
58 |
59 | def __call__(self, query: QueryT, **context) -> Union[str, QueryT]:
60 | if isinstance(query, str):
61 | first_result = self.first(query=query, **context)
62 | second_result = self.second(query=query, **context)
63 | return f'{query}{first_result.replace(query, "")}{second_result.replace(query, "")}'
64 |
65 | first_result = self.first(query=query, **context)
66 | second_result = self.second(query=query, **context)
67 |
68 | return list(self.operation(set(first_result), set(second_result)))
69 |
70 | def __str__(self):
71 | return f"{self.first} {self.operation} {self.second}"
72 |
73 |
74 | __all__ = ['InternalFilter']
75 |
--------------------------------------------------------------------------------
/assimilator/internal/database/specifications/filtering_options.py:
--------------------------------------------------------------------------------
1 | from typing import Any, Callable
2 |
3 | from assimilator.core.database import BaseModel, FilteringOptions
4 | from assimilator.internal.database.specifications.internal_operator import (
5 | find_attribute, eq, gte, gt, lte, lt, is_, not_, like, regex,
6 | )
7 |
8 |
9 | AttrFinderType = Callable[[Callable, str, Any], Callable[[BaseModel], bool]]
10 |
11 |
12 | class InternalFilteringOptions(FilteringOptions):
13 | def __init__(self, attr_finder: AttrFinderType = find_attribute):
14 | super(InternalFilteringOptions, self).__init__()
15 | self.attr_finder = attr_finder
16 |
17 | _eq = staticmethod(eq)
18 | _gt = staticmethod(gt)
19 | _gte = staticmethod(gte)
20 | _lt = staticmethod(lt)
21 | _lte = staticmethod(lte)
22 | _not = staticmethod(not_)
23 | _is = staticmethod(is_)
24 | _like = staticmethod(like)
25 | _regex = staticmethod(regex)
26 |
27 |
28 | __all__ = [
29 | 'InternalFilteringOptions',
30 | 'find_attribute',
31 | "eq",
32 | "gte",
33 | "gt",
34 | "lte",
35 | "lt",
36 | "is_",
37 | "not_",
38 | "like",
39 | "regex",
40 | ]
41 |
--------------------------------------------------------------------------------
/assimilator/internal/database/specifications/internal_operator.py:
--------------------------------------------------------------------------------
1 | import operator
2 | import re
3 | from functools import wraps
4 | from numbers import Number
5 | from typing import Any, Callable, Union, Literal
6 |
7 | from assimilator.core.database.models import BaseModel
8 | from assimilator.core.database.specifications.filtering_options import FILTERING_OPTIONS_SEPARATOR
9 | from assimilator.internal.database.specifications.utils import InternalContainers, find_model_value
10 |
11 |
12 | def find_attribute(func: callable, field: str, value: Any) -> Callable[[BaseModel], bool]:
13 | """
14 | That decorator is used to get the value of the field from a BaseModel that is provided in the query.
15 | We do that because we need to get the value of the field in the internal specifications, not just the name
16 | of it. For example, User(id=1) will use field='id' to get 1 as the result.
17 |
18 | :param func: filtering option function that is going to be decorated.
19 | :param field: field name that is used in getattr(model, field)
20 | :param value: value of the field.
21 | :return: function to be called with a model to find an attribute and call the comparison function.
22 | """
23 |
24 | @wraps(func)
25 | def find_attribute_wrapper(model: BaseModel) -> bool:
26 | foreign_fields = field.split(FILTERING_OPTIONS_SEPARATOR)
27 |
28 | if len(foreign_fields) == 1:
29 | return func(getattr(model, foreign_fields[0]), value)
30 |
31 | model_val = find_model_value(fields=foreign_fields, model=model)
32 | if isinstance(model_val, InternalContainers):
33 | return any(func(member, value) for member in model_val)
34 |
35 | return func(model_val, value)
36 |
37 | find_attribute_wrapper: func
38 | return find_attribute_wrapper
39 |
40 |
41 | def eq(field: str, value: Any):
42 | return find_attribute(func=operator.eq, field=field, value=value)
43 |
44 |
45 | def gt(field: str, value: Number):
46 | return find_attribute(func=operator.gt, field=field, value=value)
47 |
48 |
49 | def gte(field: str, value: Number):
50 | return find_attribute(func=operator.ge, field=field, value=value)
51 |
52 |
53 | def lt(field: str, value: Number):
54 | return find_attribute(func=operator.lt, field=field, value=value)
55 |
56 |
57 | def lte(field: str, value: Number):
58 | return find_attribute(func=operator.le, field=field, value=value)
59 |
60 |
61 | def not_(field: str, value: Union[Literal[True], Literal[False], Literal[None]]):
62 | return find_attribute(func=operator.not_, field=field, value=value)
63 |
64 |
65 | def is_(field: str, value: Union[Literal[True], Literal[False], Literal[None]]):
66 | return find_attribute(func=operator.is_, field=field, value=value)
67 |
68 |
69 | def regex(field: str, value: str):
70 | return find_attribute(
71 | func=lambda model_val, val: re.compile(value).match(model_val),
72 | field=field,
73 | value=value,
74 | )
75 |
76 |
77 | def like(field: str, value: str):
78 | return regex(field, f'^{value.replace("%", ".*?")}$')
79 |
80 |
81 | def invert(func: Callable):
82 |
83 | @wraps(func)
84 | def invert_wrapper(model):
85 | return not func(model)
86 |
87 | invert_wrapper: func
88 | return invert_wrapper
89 |
90 |
91 | __all__ = [
92 | 'find_attribute',
93 | 'eq',
94 | 'gt',
95 | 'gte',
96 | 'lt',
97 | 'lte',
98 | 'not_',
99 | 'is_',
100 | 'regex',
101 | 'like',
102 | 'invert',
103 | ]
104 |
--------------------------------------------------------------------------------
/assimilator/internal/database/specifications/specifications.py:
--------------------------------------------------------------------------------
1 | from typing import List, Iterable, Union, Optional, Collection
2 |
3 | from assimilator.core.database import specification, SpecificationList, BaseModel
4 | from assimilator.internal.database.specifications.filter_specifications import InternalFilter
5 | from assimilator.internal.database.specifications.utils import find_model_value
6 |
7 | QueryT = Union[str, List[BaseModel]]
8 | internal_filter = InternalFilter
9 |
10 |
11 | def _internal_ordering(sorting_field: str):
12 | fields = sorting_field.strip("-").split(".")
13 |
14 | def _internal_ordering_wrapper(item: BaseModel):
15 | current = find_model_value(fields=fields, model=item)
16 |
17 | return current
18 |
19 | return _internal_ordering_wrapper
20 |
21 |
22 | @specification
23 | def internal_order(*clauses: str, query: QueryT, **_) -> Iterable[BaseModel]:
24 | if isinstance(query, str):
25 | return query
26 |
27 | query = list(query)
28 | for field in clauses:
29 | query.sort(
30 | key=_internal_ordering(sorting_field=field),
31 | reverse=field.startswith("-"),
32 | )
33 |
34 | return query
35 |
36 |
37 | @specification
38 | def internal_paginate(
39 | *,
40 | query: QueryT,
41 | limit: Optional[int] = None,
42 | offset: Optional[int] = None,
43 | **_,
44 | ) -> Iterable[BaseModel]:
45 | if isinstance(query, str):
46 | return query
47 |
48 | return list(query)[offset:limit]
49 |
50 |
51 | @specification
52 | def internal_join(*targets: Collection, query: QueryT, **join_args: dict) -> QueryT:
53 | return query
54 |
55 |
56 | @specification
57 | def internal_only(*only_fields: Iterable[str], query: QueryT, **_) -> Iterable[BaseModel]:
58 | """
59 | This specification will do nothing since we waste more resources trying to remove all the fields.
60 | Also, we must provide a deference mechanisms for fields to be loaded which is impossible.
61 | """
62 | return query
63 |
64 |
65 | class InternalSpecificationList(SpecificationList):
66 | filter = internal_filter
67 | order = internal_order
68 | paginate = internal_paginate
69 | join = internal_join
70 | only = internal_only
71 |
72 |
73 | __all__ = [
74 | 'internal_filter',
75 | 'InternalFilter',
76 | 'internal_order',
77 | 'internal_paginate',
78 | 'internal_join',
79 | 'internal_only',
80 | 'InternalSpecificationList',
81 | ]
82 |
--------------------------------------------------------------------------------
/assimilator/internal/database/specifications/utils.py:
--------------------------------------------------------------------------------
1 | from typing import Dict, List, Set, Tuple, Iterable
2 |
3 | from assimilator.core.database.models import BaseModel
4 |
5 |
6 | InternalContainers = (List, Set, Tuple, map)
7 |
8 |
9 | def find_model_value(fields: Iterable[str], model: BaseModel):
10 | model_val = model
11 |
12 | for foreign_field in fields:
13 | if isinstance(model_val, InternalContainers):
14 | model_val = list(getattr(obj, foreign_field) for obj in model_val)
15 | elif isinstance(model_val, Dict):
16 | model_val = list(getattr(obj, foreign_field) for obj in model_val.values())
17 | else:
18 | model_val = getattr(model_val, foreign_field)
19 |
20 | return model_val
21 |
--------------------------------------------------------------------------------
/assimilator/internal/database/unit_of_work.py:
--------------------------------------------------------------------------------
1 | from copy import deepcopy
2 | from typing import Optional
3 |
4 | from assimilator.core.database import UnitOfWork, Repository
5 | from assimilator.internal.database.error_wrapper import InternalErrorWrapper
6 | from assimilator.core.patterns import ErrorWrapper
7 |
8 |
9 | class InternalUnitOfWork(UnitOfWork):
10 | def __init__(
11 | self,
12 | repository: Repository,
13 | error_wrapper: Optional[ErrorWrapper] = None,
14 | autocommit: bool = False,
15 | ):
16 | super(InternalUnitOfWork, self).__init__(
17 | repository=repository,
18 | error_wrapper=error_wrapper or InternalErrorWrapper(),
19 | autocommit=autocommit,
20 | )
21 | self._saved_data: Optional[dict] = None
22 |
23 | def begin(self):
24 | self._saved_data = self.repository.session
25 | self.repository.session = deepcopy(self._saved_data)
26 |
27 | def rollback(self):
28 | self.repository.session = self._saved_data
29 |
30 | def commit(self):
31 | self._saved_data.update(self.repository.session)
32 |
33 | for deleted_key in set(self._saved_data.keys()) - set(self.repository.session.keys()):
34 | del self._saved_data[deleted_key]
35 |
36 | self.repository.session = self._saved_data
37 |
38 | def close(self):
39 | self._saved_data = None
40 |
41 |
42 | __all__ = [
43 | 'InternalUnitOfWork',
44 | ]
45 |
--------------------------------------------------------------------------------
/assimilator/internal/events/__init__.py:
--------------------------------------------------------------------------------
1 | from assimilator.internal.events.events_bus import *
2 |
--------------------------------------------------------------------------------
/assimilator/internal/events/events_bus.py:
--------------------------------------------------------------------------------
1 | from assimilator.core.events.events import Event
2 | from assimilator.core.events.events_bus import EventConsumer, EventProducer
3 |
4 |
5 | class InternalEventConsumer(EventConsumer):
6 | def __init__(self, event_storage: list):
7 | self.event_storage = event_storage
8 |
9 | def close(self):
10 | pass
11 |
12 | def start(self):
13 | pass
14 |
15 | def consume(self):
16 | while self.event_storage:
17 | yield self.event_storage.pop()
18 |
19 |
20 | class InternalEventProducer(EventProducer):
21 | def __init__(self, event_storage: list):
22 | self.event_storage = event_storage
23 |
24 | def produce(self, event: Event):
25 | self.event_storage.append(event)
26 |
27 | def start(self):
28 | pass
29 |
30 | def close(self):
31 | pass
32 |
33 |
34 | __all__ = [
35 | 'InternalEventConsumer',
36 | 'InternalEventProducer',
37 | ]
38 |
--------------------------------------------------------------------------------
/assimilator/kafka_/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/knucklesuganda/py_assimilator/43108e3575750f3cd6e9dff6d59068d1a5be5e55/assimilator/kafka_/__init__.py
--------------------------------------------------------------------------------
/assimilator/kafka_/events/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/knucklesuganda/py_assimilator/43108e3575750f3cd6e9dff6d59068d1a5be5e55/assimilator/kafka_/events/__init__.py
--------------------------------------------------------------------------------
/assimilator/kafka_/events/events_bus.py:
--------------------------------------------------------------------------------
1 | import json
2 | from typing import Iterable
3 |
4 | from kafka import KafkaProducer, KafkaConsumer
5 | from kafka.errors import KafkaError
6 |
7 | from assimilator.core.events import Event, ExternalEvent
8 | from assimilator.core.events import EventParsingError, EventProducingError
9 | from assimilator.core.events.events_bus import EventConsumer, EventProducer
10 |
11 |
12 | class KafkaEventConsumer(EventConsumer):
13 | def __init__(self, topics: Iterable[str], consumer: KafkaConsumer):
14 | self.consumer = consumer
15 | self.topics = list(topics)
16 |
17 | def close(self):
18 | self.consumer.close()
19 |
20 | def start(self):
21 | """ Connected by default """
22 |
23 | def consume(self) -> Iterable[ExternalEvent]:
24 | self.consumer.subscribe(self.topics)
25 |
26 | for message in self.consumer:
27 | try:
28 | yield ExternalEvent.loads(message.value)
29 | except json.JSONDecoder as exc:
30 | raise EventParsingError(exc)
31 |
32 |
33 | class KafkaEventProducer(EventProducer):
34 | def __init__(self, topic: str, producer: KafkaProducer, sync_produce: bool = False, timeout: int = None):
35 | self.topic = topic
36 | self.producer = producer
37 | self.sync_produce = sync_produce
38 | self.timeout = timeout
39 |
40 | def produce(self, event: Event):
41 | message = self.producer.send(self.topic, key=event.id, value=event.json())
42 |
43 | if self.sync_produce:
44 | try:
45 | message.get(timeout=self.timeout)
46 | except KafkaError as exc:
47 | raise EventProducingError(exc)
48 |
49 | def start(self):
50 | """ Already started """
51 |
52 | def close(self):
53 | self.producer.close()
54 |
55 |
56 | __all__ = [
57 | 'KafkaEventConsumer',
58 | 'KafkaEventProducer',
59 | ]
60 |
--------------------------------------------------------------------------------
/assimilator/mongo/__init__.py:
--------------------------------------------------------------------------------
1 | from assimilator.core.services import CRUDService
2 | from assimilator.core.usability.registry import register_provider, PatternList
3 | from assimilator.mongo.database import MongoRepository, MongoUnitOfWork
4 |
5 | register_provider(provider='mongo', pattern_list=PatternList(
6 | repository=MongoRepository,
7 | uow=MongoUnitOfWork,
8 | crud=CRUDService,
9 | ))
10 |
--------------------------------------------------------------------------------
/assimilator/mongo/database/__init__.py:
--------------------------------------------------------------------------------
1 | from assimilator.mongo.database.models import *
2 | from assimilator.mongo.database.repository import *
3 | from assimilator.mongo.database.unit_of_work import *
4 | from assimilator.mongo.database.specifications.filtering_options import *
5 | from assimilator.mongo.database.specifications.specifications import *
6 |
--------------------------------------------------------------------------------
/assimilator/mongo/database/error_wrapper.py:
--------------------------------------------------------------------------------
1 | from bson.errors import BSONError
2 | from pymongo.errors import DuplicateKeyError, InvalidOperation, WriteError
3 |
4 | from assimilator.core.patterns import ErrorWrapper
5 | from assimilator.core.exceptions import ParsingError
6 | from assimilator.core.database import DataLayerError, InvalidQueryError, NotFoundError
7 |
8 |
9 | class MongoErrorWrapper(ErrorWrapper):
10 | def __init__(self):
11 | super(MongoErrorWrapper, self).__init__(
12 | error_mappings={
13 | BSONError: ParsingError,
14 | DuplicateKeyError: InvalidQueryError,
15 | InvalidOperation: InvalidQueryError,
16 | WriteError: InvalidQueryError,
17 | },
18 | skipped_errors={NotFoundError, DataLayerError},
19 | default_error=DataLayerError,
20 | )
21 |
22 |
23 | __all__ = ['MongoErrorWrapper']
24 |
--------------------------------------------------------------------------------
/assimilator/mongo/database/models.py:
--------------------------------------------------------------------------------
1 | from typing import ClassVar, Any, Union, AbstractSet, Mapping, Dict
2 |
3 | from bson import ObjectId
4 | from pydantic import Field
5 |
6 | from assimilator.core.database.models import BaseModel
7 |
8 | AbstractSetIntStr = AbstractSet[Union[int, str]]
9 | MappingIntStrAny = Mapping[Union[int, str], Any]
10 |
11 |
12 | class MongoModel(BaseModel):
13 | class Config:
14 | allow_population_by_field_name = True
15 | use_enum_values = True
16 | json_encoders = {
17 | ObjectId: str,
18 | }
19 |
20 | class AssimilatorConfig:
21 | collection: ClassVar[str]
22 | autogenerate_id: ClassVar[bool] = True
23 | exclude = {'collection': True, 'upsert': True}
24 | id_name: ClassVar[str] = "_id"
25 |
26 | upsert: bool = False
27 | id: ObjectId = Field(alias="_id")
28 |
29 | def __init_subclass__(cls, **kwargs):
30 | super().__init_subclass__(**kwargs)
31 | cls.__fields__['id'].alias = cls.AssimilatorConfig.id_name
32 | return cls
33 |
34 | def __hash__(self):
35 | return int(str(self.id), base=16)
36 |
37 | def generate_id(self, **kwargs) -> ObjectId:
38 | return ObjectId()
39 |
40 | def json(self, *args, by_alias: bool = True, **kwargs) -> str:
41 | return super(BaseModel, self).json(*args, by_alias=by_alias, **kwargs)
42 |
43 | def dict(self, *args, by_alias: bool = True, **kwargs) -> Dict[str, Any]:
44 | return super(BaseModel, self).dict(*args, by_alias=by_alias, **kwargs)
45 |
46 |
47 | __all__ = ['MongoModel']
48 |
--------------------------------------------------------------------------------
/assimilator/mongo/database/repository.py:
--------------------------------------------------------------------------------
1 | from typing import Union, Optional, Collection, Type, TypeVar, Any
2 |
3 | from pymongo import MongoClient
4 |
5 | from assimilator.mongo.database.models import MongoModel
6 | from assimilator.core.patterns import LazyCommand, ErrorWrapper
7 | from assimilator.mongo.database.error_wrapper import MongoErrorWrapper
8 | from assimilator.core.database import Repository, SpecificationType, \
9 | SpecificationList, NotFoundError, MultipleResultsError
10 | from assimilator.mongo.database.specifications.specifications import MongoSpecificationList
11 | from assimilator.internal.database.models_utils import dict_to_internal_models
12 |
13 | ModelT = TypeVar("ModelT", bound=MongoModel)
14 |
15 |
16 | class MongoRepository(Repository):
17 | id: str = "_id"
18 | session: MongoClient
19 | model: Type[MongoModel]
20 |
21 | def __init__(
22 | self,
23 | session: MongoClient,
24 | model: Type[MongoModel],
25 | database: str,
26 | specifications: Type[SpecificationList] = MongoSpecificationList,
27 | initial_query: Optional[dict] = None,
28 | error_wrapper: Optional[ErrorWrapper] = None,
29 | ):
30 | super(MongoRepository, self).__init__(
31 | session=session,
32 | model=model,
33 | initial_query=initial_query or {},
34 | specifications=specifications,
35 | error_wrapper=error_wrapper or MongoErrorWrapper(),
36 | )
37 | self.database = database
38 |
39 | def get_initial_query(self, override_query: Optional[dict] = None) -> dict:
40 | return dict(super(MongoRepository, self).get_initial_query(override_query))
41 |
42 | def dict_to_models(self, data: dict) -> ModelT:
43 | return self.model(**dict_to_internal_models(data, model=self.model))
44 |
45 | @property
46 | def _model_id_name(self):
47 | config = getattr(self.model, 'AssimilatorConfig', None)
48 | return "_id" if config is None else config.id_name
49 |
50 | @property
51 | def _collection_name(self):
52 | config = getattr(self.model, 'AssimilatorConfig', None)
53 | if config is not None:
54 | return self.model.AssimilatorConfig.collection
55 |
56 | return getattr(self.model, 'collection', self.model.__class__.__name__.lower())
57 |
58 | @property
59 | def _collection(self):
60 | return self.session[self.database][self._collection_name]
61 |
62 | def get(
63 | self,
64 | *specifications: SpecificationType,
65 | lazy: bool = False,
66 | initial_query: dict = None,
67 | ):
68 | query = self._apply_specifications(query=initial_query, specifications=specifications)
69 | data = list(self._collection.find(**query))
70 |
71 | if not data:
72 | raise NotFoundError(f"{self} repository get() did not find "
73 | f"any entities with {query} filter")
74 | elif len(data) != 1:
75 | raise MultipleResultsError(f"{self} repository get() returned"
76 | f" multiple results with {query} query")
77 |
78 | return self.model(**data[0])
79 |
80 | def filter(
81 | self,
82 | *specifications: SpecificationType,
83 | lazy: bool = False,
84 | initial_query: dict = None
85 | ) -> Union[Collection[ModelT], LazyCommand[Collection[ModelT]]]:
86 | query = self._apply_specifications(query=initial_query, specifications=specifications)
87 | return [self.model(**data) for data in self._collection.find(**query)]
88 |
89 | def save(self, obj: Optional[ModelT] = None, **obj_data) -> ModelT:
90 | if obj is None:
91 | obj = self.dict_to_models(data=obj_data)
92 |
93 | self._collection.insert_one(obj.dict())
94 | return obj
95 |
96 | def delete(self, obj: Optional[ModelT] = None, *specifications: SpecificationType) -> None:
97 | obj, specifications = self._check_obj_is_specification(obj, specifications)
98 |
99 | if specifications:
100 | id_name = self._model_id_name
101 | results = self._collection.find(**self._apply_specifications(
102 | query=self.get_initial_query(),
103 | specifications=(*specifications, self.specs.only(id_name)),
104 | ))
105 |
106 | self._collection.delete_many({
107 | id_name: {"$in": [result[id_name] for result in results]}
108 | })
109 | elif obj is not None:
110 | self._collection.delete_one(obj.dict())
111 |
112 | def update(
113 | self,
114 | obj: Optional[ModelT] = None,
115 | *specifications: SpecificationType,
116 | **update_values,
117 | ) -> None:
118 | obj, specifications = self._check_obj_is_specification(obj, specifications)
119 |
120 | if specifications:
121 | results = self._collection.find(**self._apply_specifications(
122 | query=self.get_initial_query(),
123 | specifications=(*specifications, self.specs.only(self._model_id_name)),
124 | ))
125 |
126 | self._collection.update_many(
127 | filter={self._model_id_name: {"$in": [
128 | result[self._model_id_name] for result in results
129 | ]}},
130 | update={'$set': update_values},
131 | )
132 | elif obj is not None:
133 | self._collection.update_one(
134 | {self._model_id_name: obj.id},
135 | update={'$set': obj.dict()},
136 | upsert=getattr('obj', 'upsert', False),
137 | )
138 |
139 | def is_modified(self, obj: ModelT) -> bool:
140 | return self.get(self.specs.filter(id=obj.id)) == obj
141 |
142 | def refresh(self, obj: ModelT) -> None:
143 | fresh_obj = self.get(self.specs.filter(id=obj.id))
144 | obj.__dict__.update(fresh_obj.__dict__)
145 |
146 | def count(
147 | self,
148 | *specifications: SpecificationType,
149 | lazy: bool = False,
150 | initial_query: Optional[dict] = None,
151 | ) -> Union[LazyCommand[int], int]:
152 | return self._collection.count_documents(
153 | filter=self._apply_specifications(
154 | query=initial_query,
155 | specifications=specifications,
156 | ),
157 | )
158 |
159 |
160 | __all__ = ['MongoRepository']
161 |
--------------------------------------------------------------------------------
/assimilator/mongo/database/specifications/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/knucklesuganda/py_assimilator/43108e3575750f3cd6e9dff6d59068d1a5be5e55/assimilator/mongo/database/specifications/__init__.py
--------------------------------------------------------------------------------
/assimilator/mongo/database/specifications/filtering_options.py:
--------------------------------------------------------------------------------
1 | from typing import Any, Tuple
2 |
3 | from bson import ObjectId
4 |
5 | from assimilator.core.database.specifications.filtering_options import\
6 | FilteringOptions, FILTERING_OPTIONS_SEPARATOR
7 | from assimilator.mongo.database.specifications.utils import rename_mongo_id, contains_mongo_id
8 |
9 |
10 | class MongoFilteringOptions(FilteringOptions):
11 | @staticmethod
12 | def _convert_option(field: str, value: Any) -> Tuple[str, Any]:
13 | field = rename_mongo_id(field.replace(FILTERING_OPTIONS_SEPARATOR, "."))
14 | if contains_mongo_id(field):
15 | value = ObjectId(value)
16 |
17 | return field, value
18 |
19 | def _eq(self, field: str, value):
20 | field, value = self._convert_option(field=field, value=value)
21 | return {field: {"$eq": value}}
22 |
23 | def _gt(self, field: str, value):
24 | field, value = self._convert_option(field=field, value=value)
25 | return {field: {"$gt": value}}
26 |
27 | def _gte(self, field: str, value):
28 | field, value = self._convert_option(field=field, value=value)
29 | return {field: {"$gte": value}}
30 |
31 | def _lt(self, field: str, value):
32 | field, value = self._convert_option(field=field, value=value)
33 | return {field: {"$lt": value}}
34 |
35 | def _lte(self, field: str, value):
36 | field, value = self._convert_option(field=field, value=value)
37 | return {field: {"$lte": value}}
38 |
39 | def _not(self, field: str, value):
40 | field, value = self._convert_option(field=field, value=value)
41 | return {field: {"$ne": value}}
42 |
43 | def _is(self, field: str, value):
44 | field, value = self._convert_option(field=field, value=value)
45 | return self._eq(field, value)
46 |
47 | def _like(self, field: str, value):
48 | return self._regex(field, f'^{value.replace("%", ".*?")}$')
49 |
50 | def _regex(self, field: str, value):
51 | field, value = self._convert_option(field=field, value=value)
52 | return {field: {"$regex": value}}
53 |
54 |
55 | __all__ = [
56 | 'MongoFilteringOptions',
57 | ]
58 |
--------------------------------------------------------------------------------
/assimilator/mongo/database/specifications/specifications.py:
--------------------------------------------------------------------------------
1 | from typing import Any, Optional, Collection
2 |
3 | from assimilator.mongo.database.specifications.utils import rename_mongo_id
4 | from assimilator.mongo.database.specifications.filtering_options import MongoFilteringOptions
5 | from assimilator.core.database import SpecificationList, FilterSpecification, specification, AdaptiveFilter
6 |
7 |
8 | class MongoFilter(FilterSpecification):
9 | filters: dict
10 | filtering_options_cls = MongoFilteringOptions
11 |
12 | def __init__(self, *filters, **named_filters):
13 | super(MongoFilter, self).__init__(*filters, **named_filters)
14 | parsed_filters = {}
15 |
16 | for filter_ in self.filters:
17 | parsed_filters.update(filter_)
18 |
19 | self.filters = parsed_filters
20 | if self.filters.get('filter') is not None:
21 | self.filters = self.filters['filter']
22 |
23 | def __or__(self, other: 'FilterSpecification') -> 'FilterSpecification':
24 | if isinstance(other, AdaptiveFilter):
25 | other = MongoFilter(*other.fields, **other.kwargs_fields)
26 |
27 | return MongoFilter({"$or": [self.filters, other.filters]})
28 |
29 | def __and__(self, other: 'FilterSpecification') -> 'FilterSpecification':
30 | if isinstance(other, AdaptiveFilter):
31 | other = MongoFilter(*other.fields, **other.kwargs_fields)
32 |
33 | return MongoFilter({"$and": [self.filters, other.filters]})
34 |
35 | def __invert__(self) -> 'MongoFilter':
36 | inverted_filters = []
37 |
38 | for column, value in self.filters.items():
39 | inverted_filters.append({column: {"$not": value}})
40 |
41 | return MongoFilter(*inverted_filters)
42 |
43 | def __call__(self, query: dict, **context: Any) -> dict:
44 | query['filter'] = {**query.get('filter', {}), **self.filters}
45 | return query
46 |
47 |
48 | mongo_filter = MongoFilter
49 |
50 |
51 | @specification
52 | def mongo_order(*clauses: str, query: dict, **_) -> dict:
53 | query['sort'] = query.get('sort', []) + [
54 | (column, -1 if column.startswith("-") else 1)
55 | for column in map(rename_mongo_id, clauses)
56 | ]
57 | return query
58 |
59 |
60 | @specification
61 | def mongo_paginate(
62 | *,
63 | limit: Optional[int] = None,
64 | offset: Optional[int] = None,
65 | query: dict,
66 | **_,
67 | ) -> dict:
68 | if offset is not None:
69 | query['skip'] = offset
70 | if limit is not None:
71 | query['limit'] = limit
72 |
73 | return query
74 |
75 |
76 | @specification
77 | def mongo_join(*targets: Collection, query: dict, **join_args: dict) -> dict:
78 | return query
79 |
80 |
81 | @specification
82 | def mongo_only(*only_fields: str, query: dict, **_) -> dict:
83 | query['projection'] = list(map(rename_mongo_id, only_fields))
84 | return query
85 |
86 |
87 | class MongoSpecificationList(SpecificationList):
88 | filter = MongoFilter
89 | order = mongo_order
90 | paginate = mongo_paginate
91 | join = mongo_join
92 | only = mongo_only
93 |
94 |
95 | __all__ = [
96 | 'MongoSpecificationList',
97 | 'MongoFilter',
98 | 'mongo_filter',
99 | 'mongo_order',
100 | 'mongo_paginate',
101 | 'mongo_join',
102 | 'mongo_only',
103 | ]
104 |
--------------------------------------------------------------------------------
/assimilator/mongo/database/specifications/utils.py:
--------------------------------------------------------------------------------
1 |
2 |
3 | def rename_mongo_id(field: str) -> str:
4 | return field.replace("id", "_id")
5 |
6 |
7 | def contains_mongo_id(field: str) -> bool:
8 | return field.find("id") != -1
9 |
--------------------------------------------------------------------------------
/assimilator/mongo/database/unit_of_work.py:
--------------------------------------------------------------------------------
1 | from typing import Optional
2 |
3 | from pymongo.client_session import ClientSession
4 |
5 | from assimilator.core.database import UnitOfWork
6 | from assimilator.core.patterns import ErrorWrapper
7 | from assimilator.mongo.database.repository import MongoRepository
8 | from assimilator.mongo.database.error_wrapper import MongoErrorWrapper
9 |
10 |
11 | class MongoUnitOfWork(UnitOfWork):
12 | repository: MongoRepository
13 | transaction: ClientSession
14 |
15 | def __init__(
16 | self,
17 | repository: MongoRepository,
18 | error_wrapper: Optional[ErrorWrapper] = None,
19 | autocommit: bool = False,
20 | ):
21 | super(MongoUnitOfWork, self).__init__(
22 | repository=repository,
23 | error_wrapper=error_wrapper or MongoErrorWrapper(),
24 | autocommit=autocommit,
25 | )
26 |
27 | def begin(self):
28 | self.transaction = self.repository.session.start_session()
29 | self.transaction.start_transaction()
30 |
31 | def rollback(self):
32 | self.transaction.abort_transaction()
33 |
34 | def commit(self):
35 | self.transaction.commit_transaction()
36 |
37 | def close(self):
38 | pass
39 |
40 |
41 | __all__ = [
42 | 'MongoUnitOfWork',
43 | ]
44 |
--------------------------------------------------------------------------------
/assimilator/redis_/__init__.py:
--------------------------------------------------------------------------------
1 | from assimilator.core.services import CRUDService
2 | from assimilator.redis_.database import RedisRepository, RedisUnitOfWork
3 | from assimilator.core.usability.registry import register_provider, PatternList
4 |
5 | pattern_list = PatternList(
6 | repository=RedisRepository,
7 | uow=RedisUnitOfWork,
8 | crud=CRUDService,
9 | )
10 |
11 | register_provider(provider='redis', pattern_list=pattern_list)
12 | register_provider(provider='redis_', pattern_list=pattern_list)
13 |
--------------------------------------------------------------------------------
/assimilator/redis_/database/__init__.py:
--------------------------------------------------------------------------------
1 | from assimilator.redis_.database.models import *
2 | from assimilator.redis_.database.repository import *
3 | from assimilator.redis_.database.unit_of_work import *
4 |
--------------------------------------------------------------------------------
/assimilator/redis_/database/models.py:
--------------------------------------------------------------------------------
1 | from typing import Optional
2 |
3 | from assimilator.core.database.models import BaseModel
4 |
5 |
6 | class RedisModel(BaseModel):
7 | expire_in: Optional[int] = None
8 | expire_in_px: Optional[int] = None
9 | only_update: Optional[bool] = False # Same as xx in redis set. Only set if key exists
10 | only_create: Optional[bool] = False # Same as nx in redis set. Only set if key does not exist
11 | keep_ttl: Optional[bool] = False
12 |
13 | class AssimilatorConfig:
14 | exclude = {
15 | 'expire_in': True,
16 | 'expire_in_px': True,
17 | 'only_update': True,
18 | 'only_create': True,
19 | 'keep_ttl': True,
20 | }
21 |
22 |
23 | __all__ = [
24 | 'RedisModel',
25 | ]
26 |
--------------------------------------------------------------------------------
/assimilator/redis_/database/repository.py:
--------------------------------------------------------------------------------
1 | import json
2 | from typing import Type, Union, Optional, TypeVar, List
3 |
4 | from redis import Redis
5 | from redis.client import Pipeline
6 |
7 | from assimilator.core.patterns.error_wrapper import ErrorWrapper
8 | from assimilator.core.database import (
9 | SpecificationList,
10 | SpecificationType,
11 | Repository,
12 | LazyCommand,
13 | )
14 | from assimilator.internal.database import InternalSpecificationList
15 | from assimilator.internal.database.models_utils import dict_to_internal_models
16 | from assimilator.core.database.exceptions import (
17 | DataLayerError,
18 | NotFoundError,
19 | InvalidQueryError,
20 | MultipleResultsError,
21 | )
22 | from assimilator.core.database import BaseModel
23 |
24 | RedisModelT = TypeVar("RedisModelT", bound=BaseModel)
25 |
26 |
27 | class RedisRepository(Repository):
28 | session: Redis
29 | transaction: Union[Pipeline, Redis]
30 | model: Type[RedisModelT]
31 |
32 | def __init__(
33 | self,
34 | session: Redis,
35 | model: Type[RedisModelT],
36 | initial_query: Optional[str] = '',
37 | specifications: Type[SpecificationList] = InternalSpecificationList,
38 | error_wrapper: Optional[ErrorWrapper] = None,
39 | use_double_filter: bool = True,
40 | ):
41 | super(RedisRepository, self).__init__(
42 | session=session,
43 | model=model,
44 | initial_query=initial_query,
45 | specifications=specifications,
46 | error_wrapper=error_wrapper or ErrorWrapper(
47 | default_error=DataLayerError,
48 | skipped_errors=(NotFoundError,)
49 | )
50 | )
51 | self.transaction = session
52 | self.use_double_specifications = use_double_filter
53 |
54 | def get(
55 | self,
56 | *specifications: SpecificationType,
57 | lazy: bool = False,
58 | initial_query: Optional[str] = None,
59 | ) -> Union[LazyCommand[RedisModelT], RedisModelT]:
60 | query = self._apply_specifications(query=initial_query, specifications=specifications) or '*'
61 | found_objects = self.session.mget(self.session.keys(query))
62 |
63 | if not all(found_objects):
64 | raise NotFoundError(f"{self} repository get() did not find any results with this query: {query}")
65 |
66 | parsed_objects = list(self._apply_specifications(
67 | query=[self.model.loads(found_object) for found_object in found_objects],
68 | specifications=specifications,
69 | ))
70 |
71 | if not parsed_objects:
72 | raise NotFoundError(f"{self} repository get() did not find "
73 | f"any results with this query: {query}")
74 | elif len(parsed_objects) != 1:
75 | raise MultipleResultsError(f"{self} repository get() did not"
76 | f" find any results with this query: {query}")
77 |
78 | return parsed_objects[0]
79 |
80 | def filter(
81 | self,
82 | *specifications: SpecificationType,
83 | lazy: bool = False,
84 | initial_query: Optional[str] = None,
85 | ) -> Union[LazyCommand[List[RedisModelT]], List[RedisModelT]]:
86 | if self.use_double_specifications and specifications:
87 | key_name = self._apply_specifications(
88 | query=initial_query,
89 | specifications=specifications,
90 | ) or "*"
91 | else:
92 | key_name = "*"
93 |
94 | models = self.session.mget(self.session.keys(key_name))
95 |
96 | if isinstance(self.model, BaseModel):
97 | query = [self.model.loads(value) for value in models]
98 | else:
99 | query = [self.model(**json.loads(value)) for value in models]
100 |
101 | return list(self._apply_specifications(specifications=specifications, query=query))
102 |
103 | def dict_to_models(self, data: dict) -> RedisModelT:
104 | return self.model(**dict_to_internal_models(data=data, model=self.model))
105 |
106 | def save(self, obj: Optional[RedisModelT] = None, **obj_data) -> RedisModelT:
107 | if obj is None:
108 | obj = self.dict_to_models(data=obj_data)
109 |
110 | self.transaction.set(
111 | name=obj.id,
112 | value=obj.json(),
113 | ex=getattr(obj, 'expire_in', None), # for Pydantic model compatability
114 | px=getattr(obj, 'expire_in_px', None),
115 | nx=getattr(obj, 'only_create', False),
116 | xx=getattr(obj, 'only_update', False),
117 | keepttl=getattr(obj, 'keep_ttl', False),
118 | )
119 | return obj
120 |
121 | def delete(self, obj: Optional[RedisModelT] = None, *specifications: SpecificationType) -> None:
122 | obj, specifications = self._check_obj_is_specification(obj, specifications)
123 |
124 | if specifications:
125 | self.transaction.delete(*[str(model.id) for model in self.filter(*specifications)])
126 | elif obj is not None:
127 | self.transaction.delete(obj.id)
128 |
129 | def update(
130 | self,
131 | obj: Optional[RedisModelT] = None,
132 | *specifications: SpecificationType,
133 | **update_values,
134 | ) -> None:
135 | obj, specifications = self._check_obj_is_specification(obj, specifications)
136 |
137 | if specifications:
138 | if not update_values:
139 | raise InvalidQueryError(
140 | "You did not provide any update_values "
141 | "to the update() yet provided specifications"
142 | )
143 |
144 | models = self.filter(*specifications, lazy=False)
145 | updated_models = {}
146 |
147 | for model in models:
148 | model.__dict__.update(update_values)
149 | updated_models[str(model.id)] = model.json()
150 |
151 | self.transaction.mset(updated_models)
152 |
153 | elif obj is not None:
154 | obj.only_update = True
155 | self.save(obj)
156 |
157 | def is_modified(self, obj: RedisModelT) -> None:
158 | return self.get(self.specifications.filter(obj.id), lazy=False) == obj
159 |
160 | def refresh(self, obj: RedisModelT) -> None:
161 | fresh_obj = self.get(self.specifications.filter(obj.id), lazy=False)
162 |
163 | for key, value in fresh_obj.dict().items():
164 | setattr(obj, key, value)
165 |
166 | def count(
167 | self,
168 | *specifications: SpecificationType,
169 | lazy: bool = False,
170 | initial_query: Optional[str] = None,
171 | ) -> Union[LazyCommand[int], int]:
172 | if not specifications:
173 | return self.session.dbsize()
174 |
175 | filter_query = self._apply_specifications(
176 | query=initial_query,
177 | specifications=specifications,
178 | )
179 | return len(self.session.keys(filter_query))
180 |
181 |
182 | __all__ = [
183 | 'RedisRepository',
184 | ]
185 |
--------------------------------------------------------------------------------
/assimilator/redis_/database/unit_of_work.py:
--------------------------------------------------------------------------------
1 | from typing import Optional
2 |
3 | from assimilator.core.patterns import ErrorWrapper
4 | from assimilator.core.database.unit_of_work import UnitOfWork
5 | from assimilator.redis_.database.repository import RedisRepository
6 | from assimilator.internal.database.error_wrapper import InternalErrorWrapper
7 |
8 |
9 | class RedisUnitOfWork(UnitOfWork):
10 | repository: RedisRepository
11 |
12 | def __init__(
13 | self,
14 | repository: RedisRepository,
15 | error_wrapper: Optional[ErrorWrapper] = None,
16 | autocommit: bool = False,
17 | ):
18 | super(RedisUnitOfWork, self).__init__(
19 | repository=repository,
20 | error_wrapper=error_wrapper or InternalErrorWrapper(),
21 | autocommit=autocommit,
22 | )
23 |
24 | def begin(self):
25 | self.repository.transaction = self.repository.session.pipeline()
26 |
27 | def rollback(self):
28 | self.repository.transaction.discard()
29 |
30 | def commit(self):
31 | self.repository.transaction.execute()
32 |
33 | def close(self):
34 | self.repository.transaction.reset()
35 | self.repository.transaction = self.repository.session
36 |
37 |
38 | __all__ = [
39 | 'RedisUnitOfWork',
40 | ]
41 |
--------------------------------------------------------------------------------
/assimilator/redis_/events/__init__.py:
--------------------------------------------------------------------------------
1 | from assimilator.redis_.events.events_bus import *
2 |
--------------------------------------------------------------------------------
/assimilator/redis_/events/events_bus.py:
--------------------------------------------------------------------------------
1 | from typing import Iterable, Optional
2 |
3 | from redis import Redis
4 | from redis.client import PubSub
5 |
6 | from assimilator.core.events import Event, ExternalEvent
7 | from assimilator.core.events.events_bus import EventConsumer, EventProducer
8 |
9 |
10 | class RedisEventConsumer(EventConsumer):
11 | def __init__(self, channels: Iterable[str], session: Redis):
12 | super(RedisEventConsumer, self).__init__()
13 | self.session = session
14 | self.channels = channels
15 | self._event_channel: Optional[PubSub] = None
16 |
17 | def close(self):
18 | self._event_channel.close()
19 | self._event_channel = None
20 |
21 | def start(self):
22 | self._event_channel = self.session.pubsub()
23 | self._event_channel.subscribe(*self.channels)
24 |
25 | def consume(self) -> Iterable[ExternalEvent]:
26 | message = self._event_channel.get_message(ignore_subscribe_messages=True)
27 |
28 | while message is not None:
29 | if message['type'] == 'message':
30 | yield ExternalEvent.loads(message['data'])
31 |
32 | message = self._event_channel.get_message(ignore_subscribe_messages=True)
33 |
34 |
35 | class RedisEventProducer(EventProducer):
36 | def __init__(self, channel: str, session: Redis):
37 | self.session = session
38 | self.channel = channel
39 |
40 | def produce(self, event: Event):
41 | self.session.publish(self.channel, event.json())
42 |
43 | def start(self):
44 | pass
45 |
46 | def close(self):
47 | pass
48 |
49 |
50 | __all__ = [
51 | 'RedisEventConsumer',
52 | 'RedisEventProducer',
53 | ]
54 |
--------------------------------------------------------------------------------
/docs/alchemy/events.md:
--------------------------------------------------------------------------------
1 | # Alchemy Events - STILL IN DEVELOPMENT
2 |
--------------------------------------------------------------------------------
/docs/concepts.md:
--------------------------------------------------------------------------------
1 | # PyAssimilator concepts
2 |
3 | We want to write the best code. Our code must use the best techniques that other programmers created, have
4 | no dependencies, and be readable. On the other hand, we don't want to spend a lot of time writing that code, because the
5 | only final measure for our program is **result**.
6 |
7 | That is why we use PyAssimilator. What we want to do is create patterns that allow us to remove dependencies from our code
8 | and make it cleaner. Our patterns can either:
9 |
10 | 1. Talk to a database - `Repository, UnitOfWork`
11 | 2. Optimize our code - `LazyCommand, ErrorWrapper`
12 | 3. Make it more readable - `CRUDService, Service`
13 | 4. Make it more secure - `UnitOfWork`
14 | 5. Help other patterns - `Specification, SpecificationList, AdaptiveSpecification`
15 |
16 | We use these patterns and tick all of the boxes above. That is the whole point of this library. Now, you can start
17 | reading [Basic Tutorials](/tutorial/database/).
18 |
19 | -------------------------------------
20 |
21 | # How do we build these patterns
22 |
23 | You don't really need to read about these concepts below as you will see them later. But, if you want to know all the things
24 | that were put into this library - be free to check out things below!
25 |
26 | ### 1. Dependency injection
27 | Dependency injection is a really important concept in assimilator. We do
28 | not use any additional dependency injection frameworks, but all the patterns inject
29 | different components into themselves. [If you want to know more about DI](https://www.youtube.com/watch?v=HFU4nAaU63c&feature=youtu.be)
30 |
31 | ### 2. SOLID
32 | SOLID principles are highly used in assimilator. That means, that in theory
33 | you can replace one pattern to another and experience no trouble in using them.
34 | _That is why it is not advised to create your own function in patterns, but you can easily
35 | override them. For example, you don't want to create: createUsers() in Repository pattern, but
36 | can override save() function without any problems_. With that said, it is almost impossible
37 | to write such vast variety of patterns without breaking some principles. But, if you have
38 | any ideas on how to fix that, then be sure to check out our [GitHub](https://github.com/knucklesuganda/py_assimilator)
39 |
40 | ### 3. Domain-driven design
41 | Most of the patterns here are used in Domain driven design. You do not really need to know all the intricacies, but
42 | make sure that you know the basics of it.
43 |
44 |
45 | ### 4. Reusable patterns
46 | The best thing about our patterns is that you can write your code for SQLAlchemy, then change it to Redis, then change
47 | it to Kafka, and finally test it with Python dictionaries. The thing is, you only have to change your pattern creation
48 | code, everything else stays the same. All the functions work the same in all the patterns that we create.
49 |
--------------------------------------------------------------------------------
/docs/help_framework.md:
--------------------------------------------------------------------------------
1 | ### Use PyAssimilator in your projects
2 |
3 | The easiest way to help us and yourself is to use the framework!
4 | Your code will be better with our patterns, and that will allow us to improve
5 | PyAssimilator!
6 |
7 | ### Star the library
8 |
9 | [Star](https://github.com/knucklesuganda/py_assimilator) PyAssimilator on GitHub.
10 | That will help us a lot! Also, you will be placed in a list of
11 | [⭐Stargazers⭐](/#stargazers)!
12 |
13 | ### Ask questions in PyAssimilator communities
14 |
15 | You can ask questions on our GitHub or Discord
16 | if you have any issues with PyAssimilator. There is also an option to
17 | contact Andrey(primary creator of PyAssimilator) .
18 |
19 | ### Create Pull Requests
20 |
21 | If you want to add something to the library, then you can create pull requests on our GitHub.
22 | You will be added to the list of [Contributors](/#contributors)!
23 |
24 |
--------------------------------------------------------------------------------
/docs/images/icon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/knucklesuganda/py_assimilator/43108e3575750f3cd6e9dff6d59068d1a5be5e55/docs/images/icon.png
--------------------------------------------------------------------------------
/docs/images/logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/knucklesuganda/py_assimilator/43108e3575750f3cd6e9dff6d59068d1a5be5e55/docs/images/logo.png
--------------------------------------------------------------------------------
/docs/images/logo_white.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/knucklesuganda/py_assimilator/43108e3575750f3cd6e9dff6d59068d1a5be5e55/docs/images/logo_white.png
--------------------------------------------------------------------------------
/docs/images/logo_white.svg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/knucklesuganda/py_assimilator/43108e3575750f3cd6e9dff6d59068d1a5be5e55/docs/images/logo_white.svg
--------------------------------------------------------------------------------
/docs/images/why_assimilator_no_usage.PNG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/knucklesuganda/py_assimilator/43108e3575750f3cd6e9dff6d59068d1a5be5e55/docs/images/why_assimilator_no_usage.PNG
--------------------------------------------------------------------------------
/docs/images/why_assimilator_usage.PNG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/knucklesuganda/py_assimilator/43108e3575750f3cd6e9dff6d59068d1a5be5e55/docs/images/why_assimilator_usage.PNG
--------------------------------------------------------------------------------
/docs/index.md:
--------------------------------------------------------------------------------
1 | # Assimilator - the best Python patterns for the best projects
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 | ## Install now
21 | * `pip install py-assimilator`
22 |
23 | ## What is that all about?
24 |
25 | 1. We want to write the best code.
26 | 2. We need the best patterns and techniques for this.
27 | 3. We use PyAssimilator and save lots of time.
28 | 4. We use PyAssimilator and write the best code.
29 | 4. We use PyAssimilator and use the best patterns.
30 | 6. We use PyAssimilator and have no dependencies in our code.
31 | 7. We use PyAssimilator and can switch one database to another in a matter of seconds.
32 | 7. We learn PyAssimilator once and use it forever!
33 | 7. **And most importantly, we make Python projects better!**
34 |
35 |
36 | ## Code comparison
37 |
38 | Before PyAssimilator:
39 | ```Python
40 | # BAD CODE :(
41 |
42 | def create_user(username: str, email: str):
43 | # NO PATTERNS!
44 | # ONLY ONE DATABASE CHOICE!
45 | new_user = User(username=username, email=email, balance=0) # DEPENDENCY!
46 | session = db_session() # DEPENDENCY!
47 | session.add(new_user)
48 | session.commit() # NO ACID TRANSACTIONS!
49 | return new_user
50 |
51 | ```
52 |
53 | After:
54 | ```Python
55 | # GOOD CODE :)
56 |
57 | def create_user(username: str, email: str, uow: UnitOfWork):
58 | # BEST DDD PATTERNS
59 | # PATTERN SUBSTITUTION/MULTIPLE DATABASES AT ONCE
60 |
61 | with uow: # ACID TRANSACTIONS IN ANY DATABASE
62 | new_user = uow.repository.save(
63 | username=username, # NO MODEL DEPENDENCIES
64 | email=email,
65 | balance=0,
66 | )
67 | uow.commit() # AUTO ROLLBACK
68 |
69 | return new_user
70 |
71 | ```
72 |
73 | ## So, do I really need it?
74 |
75 | If you want to spend less time writing your code, but write better code - then you must use PyAssimilator.
76 | It can be hard to start if you have no experience with good code, so you can watch creator's [video tutorials](https://knucklesuganda.github.io/py_assimilator/video_tutorials/).
77 |
78 |
79 | ## Our vision
80 |
81 | Make Python the best programming language for enterprise development and use all of its dynamic capabilities to write
82 | things that other languages can't even comprehend!
83 |
84 | - Pattern substitution(switch databases easily) ✔️
85 | - Event-based apps(in development) 🛠️
86 | - 45% of all Python projects use PyAssimilator 🛠️
87 | - Independent code(in development) 🛠️
88 | - Adaptive patterns(in development) 🛠️
89 | - Automatic code improvements(in development) 🛠️
90 | - Decentralized code management(in development) 🛠️
91 |
92 | If you want to help with any of those things - be free to contribute to the project. Remember, you never do anything for
93 | free - and that will not be the case either.
94 |
95 | ## Sources
96 | * [Github](https://github.com/knucklesuganda/py_assimilator)
97 | * [PyPI](https://pypi.org/project/py-assimilator/)
98 | * [Documentation](https://knucklesuganda.github.io/py_assimilator/)
99 | * [Github](https://github.com/knucklesuganda/py_assimilator)
100 | * [Author's YouTube RU](https://www.youtube.com/channel/UCSNpJHMOU7FqjD4Ttux0uuw)
101 | * [Author's YouTube ENG](https://www.youtube.com/channel/UCeC9LNDwRP9OfjyOFHaSikA)
102 | * [Discord channel](https://discord.gg/gTVaGu7DHN)
103 |
104 | ## Contributors
105 |
106 |
107 |
108 |
109 |
110 | ## Stars history
111 |
112 | [](https://star-history.com/#knucklesuganda/py_assimilator&Date)
113 |
114 | ## ⭐Stargazers⭐
115 |
116 |
119 |
120 |
142 |
143 |
174 |
175 | ## Types of patterns
176 | These are different use cases for the patterns implemented:
177 |
178 | - Database - patterns for database/data layer interactions.
179 | - Events(in development) - projects with events or event-driven architecture.
180 | - Unidentified - patterns that are useful for different purposes.
181 |
182 | ## Available providers
183 | Providers are different patterns for external modules like SQLAlchemy or FastAPI.
184 |
185 | - Alchemy(Database, Events) - patterns for [SQLAlchemy](https://docs.sqlalchemy.org/en/20/) for both database and events.
186 | - Kafka(Events) - patterns in [Kafka](https://kafka.apache.org/) related to events.
187 | - Internal(Database, Events) - internal is the type of provider that saves everything in memory(dict, list and all the tools within your app).
188 | - Redis(Database, Events) - redis_ allows us to work with [Redis](https://redis.io/) memory database.
189 | - MongoDB(Database) - mongo allows us to work with [MongoDB](https://www.mongodb.com/) database.
190 |
--------------------------------------------------------------------------------
/docs/internal/events.md:
--------------------------------------------------------------------------------
1 | # Internal Events - STILL IN DEVELOPMENT
2 |
--------------------------------------------------------------------------------
/docs/kafka/events.md:
--------------------------------------------------------------------------------
1 | # Kafka events - STILL IN DEVELOPMENT
2 |
--------------------------------------------------------------------------------
/docs/new_changes.md:
--------------------------------------------------------------------------------
1 | ## The problem
2 |
3 | We had a problem with pattern creation. You had to write three or more functions just to create a pattern, and we assume
4 | that this was too much for an average user. So, that is why we changed the way you create your patterns in the
5 | Usability update!
6 |
7 | > You can read the full documentation on Pattern creation here: [How to create patterns](/tutorial/how_to_create/)
8 |
9 | Here is an example on how we **used to** create CRUDService pattern:
10 |
11 | ```python
12 |
13 | def get_repository():
14 | return MongoRepository(
15 | session=mongo_client,
16 | model=User,
17 | database='assimilator_complex'
18 | )
19 |
20 |
21 | def get_uow():
22 | return MongoUnitOfWork(repository=get_repository())
23 |
24 |
25 | def get_crud():
26 | return CRUDService(uow=get_uow())
27 |
28 |
29 | crud = get_crud()
30 |
31 | ```
32 |
33 |
34 | ----------------------------------------------
35 |
36 | ## Our solution
37 |
38 | Now, instead of writing three functions just to create a pattern, you can just call a function and pass all the parameters
39 | inside it. For example, here is how we create a CRUDService:
40 |
41 | ```python
42 |
43 | crud = create_crud(
44 | provider='alchemy', # External library that we are using.
45 | model=User, # Model your CRUDService is going to work with.
46 | session=session_creator(), # Database session.
47 | )
48 |
49 | ```
50 |
51 |
52 | ## In-depth look
53 |
54 | There are other functions that allow us to fully inject all the patterns into our new update. Firstly, we have
55 | a pattern registry. Pattern registry is a dictionary that contains all the possible patterns with their provider names.
56 | When we want to create a pattern, we use that pattern registry to find the class that we want to use.
57 |
58 | You don't want to interact with pattern registry directly(it is a dictionary, and they do not have any structure in Python).
59 | That is why we have the following functions:
60 |
61 | - `register_provider()` - Allows you to register a new provider of your own.
62 | If you are developing a library that interacts with PyAssimilator, it is a good practice to register your provider.
63 | This way, anyone can easily import your patterns with our new functions.
64 | - `find_provider()` - Allows you to provide a module that should find and register your provider.
65 | - `get_pattern_list()` - Returns a list of all patterns for a provider.
66 | - `unregister_provider()` - Deletes a provider from the registry.
67 | - `get_pattern()` - Low-level function that returns a pattern class.
68 |
69 | To find out more about these functions, go to [How to create patterns](/tutorial/how_to_create/)
70 |
--------------------------------------------------------------------------------
/docs/next_update.md:
--------------------------------------------------------------------------------
1 | ## PyAssimilator Model Singularity Update(1.4.0)
2 |
3 | We are focusing on the usability of the library, allowing you to use the same model with different data sources.
4 |
5 | ##### Problem
6 | You can easily use the same logic with different data sources. Example of that is: use the same code with SQLAlchemy and MongoDB.
7 | But, the problem is that you have to create two models: SQLAlchemy model and MongoDB model in your configurations.
8 | You can see that we create lots of classes in here: https://github.com/knucklesuganda/py_assimilator/blob/master/examples/simple_database/models.py
9 | The logic and structure of these models is the same, but we kind of copy our code.
10 |
11 | -------------------------------------------------
12 |
13 | ##### Solution
14 | We will create a single class/function that will allow us to use the same model class with different providers and libraries.
15 | The implementation is not known yet.
16 |
--------------------------------------------------------------------------------
/docs/redis/events.md:
--------------------------------------------------------------------------------
1 | # Redis events - still in development
2 |
--------------------------------------------------------------------------------
/docs/scripts/add_footer.js:
--------------------------------------------------------------------------------
1 | document.getElementsByClassName("md-footer-meta__inner md-grid")[0].innerHTML += `
2 | `;
3 |
--------------------------------------------------------------------------------
/docs/scripts/feedback.js:
--------------------------------------------------------------------------------
1 | window.onUsersnapLoad = function(api) {
2 | api.init();
3 | };
4 | var script = document.createElement('script');
5 | script.defer = 1;
6 | script.src = 'https://widget.usersnap.com/global/load/eaffb19f-011a-4ff4-ad87-956a22981880?onload=onUsersnapLoad';
7 | document.getElementsByTagName('head')[0].appendChild(script);
8 |
9 |
--------------------------------------------------------------------------------
/docs/services.md:
--------------------------------------------------------------------------------
1 | Service is the main part of your business logic. It allows you to use all the patterns together, and you will probably
2 | write services yourself. But, there are some classes that can help you with that.
3 |
4 | ----------------------------------------------------------
5 |
6 | ## CRUD Service Example
7 |
8 | For example, you may want to write your own [CRUD](https://www.freecodecamp.org/news/crud-operations-explained/) services,
9 | but, there is a class that helps you with this:
10 |
11 | ```Python
12 | # dependencies.py
13 | from assimilator.core.database import UnitOfWork
14 | from assimilator.core.services.crud import CRUDService
15 |
16 |
17 | def get_repository(): # create repository
18 | ...
19 |
20 |
21 | def get_uow() -> UnitOfWork: # create UnitOfWork
22 | ...
23 |
24 |
25 | def get_service():
26 | """ This function creates CRUDService and accepts UnitOfWork as a parameter. """
27 | return CRUDService(uow=get_uow())
28 |
29 | ```
30 |
31 | Then, we can integrate it with any web framework of our choice. I will use some pseudocode for that:
32 |
33 | ```Python
34 | from web_framework import Router, Response
35 | from assimilator.core.services import CRUDService
36 |
37 | from dependencies import get_service
38 |
39 | router = Router()
40 |
41 |
42 | @router.list('/')
43 | def list_all_users():
44 | service: CRUDService = get_service()
45 | # Use list function to get all users from the service
46 | return Response(service.list())
47 | ```
48 |
49 | So, basically, `CRUDService` allows you to quickly create all the functions for data interactions using any kind of
50 | pattern. You can also find a full [FastAPI example here](https://github.com/knucklesuganda/py_assimilator/tree/master/examples/fastapi_crud_example).
51 |
52 |
53 | ## CRUD Service methods
54 |
55 | ### `list`
56 | This function allows you to return multiple entities. Used for Read operation in CRUD.
57 |
58 | - `*filters` - any kind of filters passed to filter specification.
59 | - `lazy` - whether to run `list()` as a lazy command. `False` by default.
60 | - `**kwargs_filters` - any kind of filters passed to filter specification.
61 |
62 | ```Python
63 | # For example, you may use it like this:
64 |
65 | service.list(
66 | User.username.not_("Andrey"), # Direct SQLAlchemy filter
67 | id__gt=20, # only where id > 20
68 | lazy=True, # as a lazy query
69 | )
70 | ```
71 |
72 | ### `get`
73 | This function allows you to return one entity. Used for Read operation in CRUD.
74 |
75 | - `*filters` - any kind of filters passed to filter specification.
76 | - `lazy` - whether to run `get()` as a lazy command. `False` by default.
77 | - `**kwargs_filters` - any kind of filters passed to filter specification.
78 |
79 | ```Python
80 | # For example, you may use it like this:
81 |
82 | service.get(
83 | User.username == "Andrey", # Direct SQLAlchemy filter
84 | id=20, # only where id == 20
85 | lazy=True, # as a lazy query
86 | )
87 | ```
88 |
89 | ### `create`
90 | This function allows you to create entities. Used for CREATE operation in CRUD.
91 |
92 | - `obj_data` - `dict` with entity data or Model that you want to create.
93 |
94 | ```Python
95 | # For example, you may use it like this:
96 |
97 | service.create({
98 | "username": "Andrey",
99 | "balances": [ # Foreign key
100 | {
101 | "amount": 100,
102 | "currency": { # Foreign key
103 | "name": "USD",
104 | "country": "USA",
105 | },
106 | },
107 | ],
108 | })
109 |
110 | # You may also provide the model itself:
111 | user = User(username="Andrey")
112 | user.balances.add(
113 | Balance(
114 | amount=100,
115 | currency=Currency(name="USD", country="USA")
116 | )
117 | )
118 |
119 | service.create(user)
120 | ```
121 |
122 | The second method is direct, and we would advise you to you indirect methods(the first one with dict) when possible.
123 |
124 |
125 | ### `update`
126 | This function allows you to update one entity. Used for Update operation in CRUD.
127 |
128 | - `update_data` - dictionary of updated values
129 | - `*filters` - any kind of filters passed to filter specification.
130 | - `**kwargs_filters` - any kind of filters passed to filter specification.
131 |
132 | ```Python
133 | # For example, you may use it like this:
134 |
135 | service.update(
136 | id=1, # user with ID 1
137 | obj_data={
138 | "username": "Andrey-2", # will have this new username
139 | },
140 | )
141 | ```
142 |
143 | > Important notice on foreign keys. We do not know how to effectively update them with indirect coding styles. So, update()
144 | > only works with simple models now. But, you are free to override the function and put your foreign key handlers in there.
145 | > Also, if you have an idea on how to improve update() or any other thing in Assimilator - be sure to open a pull request!
146 |
147 |
148 | ### `delete`
149 | This function allows you to delete one entity. Used for Delete operation in CRUD.
150 |
151 | - `*filters` - any kind of filters passed to filter specification.
152 | - `**kwargs_filters` - any kind of filters passed to filter specification.
153 |
154 | ```Python
155 | # For example, you may use it like this:
156 |
157 | service.delete(
158 | id=1, # delete user with id == 1
159 | username="Andrey", # and username == "Andrey"
160 | )
161 | ```
162 |
163 | You can also find a full [FastAPI example here](https://github.com/knucklesuganda/py_assimilator/tree/master/examples/fastapi_crud_example).
164 |
--------------------------------------------------------------------------------
/docs/tutorial/architecture_tutorial.md:
--------------------------------------------------------------------------------
1 | # Architecture tutorial
2 |
3 | This tutorial will bring up some points on how to create your apps.
4 |
5 |
6 | ## Repository creation
7 |
8 | When we create our repositories, we need to provide our model type in the constructor. Let's say that we have a program
9 | with User, Product, Order, UserAddress, Billing entities. Do we create five repositories for each model? Do we only have
10 | one repository for User? It depends.
11 |
12 | What you want to do in general is find your primary entities. Primary entity is a model that can live by itself, and does
13 | not add any information to other models. User is a primary entity in the majority of cases, because our users can be stored
14 | by themselves. UserAddress, on the other hand, cannot live without a User that it is bound to. So, there is no need to
15 | create a `Repository` for UserAddress. What you want to do is a new `Repository` for User, and write your code in such
16 | a way that your primary entity manages auxiliary entities. If you want to know more about that, please, read Domain-Driven
17 | design books. PyAssimilator does not follow them maniacally, but they have good basis that we use in here.
18 |
19 |
20 | ## Repository and Unit Of Work synergy
21 |
22 | When you want to change your data you are always going to use `UnitOfWork`. You have to make sure that you don't create
23 | any additional repositories in your business logic code. That is, you want to use `UnitOfWork` as your `Repository` source:
24 | `uow.repository`. We do that because we want to remove any kind of dependency from our business logic code, and because we
25 | don't want to open multiple sessions when we don't need it.
26 |
27 |
28 | ## Pattern creations
29 |
30 | It's better if you create your patterns in separate files and use Dependency Injection to provide them in your business
31 | logic code. Dependency Injections allow you to remove dependencies from your business logic code, and they are very
32 | useful for pattern substitution. There are multiple ways of using them. You could look at the way Django does that with
33 | string imports or find a Dependency Injection framework that can help you with that.
34 |
--------------------------------------------------------------------------------
/docs/tutorial/how_to_create.md:
--------------------------------------------------------------------------------
1 |
2 |
3 | Right now you know how to interact with database using patterns from [Basic Tutorials](/tutorial/database/). But,
4 | some information in these tutorials is not completely correct. One of the latest updates called Usability Update allowed
5 | us to create patterns instantly without using functions described in tutorials.
6 |
7 | > It is still important to know how to create patterns using your own functions, and some applications may prefer to use
8 | > that instead of new functions.
9 |
10 | -----------------------------------------------
11 |
12 | ## Pattern registry
13 |
14 | Pattern registry is a dictionary that contains all the providers and respective patterns. Provider is an external library,
15 | data source or basically a set of patterns that you are using. Example of providers are: SQLAlchemy, Redis, MongoDB, Cassandra, etc.
16 |
17 | Each provider corresponds to a `PatternList` which is basically a class that has our patterns.
18 | In order to use the functions that create our patterns, we must register our provider first.
19 |
20 | ----------------------------------------------------
21 |
22 | #### Registering the patterns
23 |
24 | ```python
25 | from assimilator.core.usability.registry import register_provider, PatternList
26 | from assimilator.internal.database import InternalRepository, InternalUnitOfWork
27 | from assimilator.core.services import CRUDService
28 |
29 | # Our pattern list:
30 | internal_patterns = PatternList(
31 | repository=InternalRepository,
32 | uow=InternalUnitOfWork,
33 | crud=CRUDService,
34 | )
35 |
36 | # Register the patterns with provider's name:
37 | register_provider(provider='internal', pattern_list=internal_patterns)
38 | ```
39 |
40 | Fortunately, you don't have to do that most of the time. All providers inside PyAssimilator are already registered,
41 | and other libraries which interact with PyAssimilator must register them as well. But, that is an option for you if you
42 | want to change the registry.
43 |
44 |
45 | The function that you will probably use a lot is `find_provider`:
46 |
47 | ```python
48 | from assimilator.core.usability.registry import find_provider
49 |
50 | # Finds and imports the module if it is not in the registry yet.
51 | find_provider(provider_path='assimilator.alchemy')
52 | ```
53 |
54 | > `find_provider()` is a function that just imports the module. If you are developing a library you still have to use
55 | > `register_provider()`.
56 |
57 | ----------------------------------------------------
58 |
59 | #### Interacting with the registry
60 |
61 | You can find a provider using `get_pattern_list()`:
62 |
63 | ```python
64 | from assimilator.core.usability.registry import get_pattern_list
65 |
66 | alchemy_patterns = get_pattern_list('alchemy')
67 | ```
68 |
69 | You can also unregister patterns using `unregister_provider()`:
70 |
71 | ```python
72 | from assimilator.core.usability.registry import unregister_provider
73 |
74 | unregister_provider('alchemy')
75 | # Now, you will not be able to find SQLAlchemy patterns
76 |
77 | ```
78 |
79 | You can also get a pattern class by its name using `get_pattern()`:
80 |
81 | ```python
82 | from assimilator.core.usability.registry import get_pattern
83 |
84 | alchemy_uow_cls = get_pattern(provider='alchemy', pattern_name='uow')
85 | ```
86 |
87 | > However, this is a usability update, meaning that you won't have to use most of these functions. They are low-level,
88 | > and are only useful when developing an extension library for PyAssimilator or for very specific tasks.
89 |
90 | ## Creating the patterns
91 |
92 | Let's finally create our patterns using Usability Update:
93 |
94 | ```python
95 | from assimilator.core.usability.pattern_creator import create_uow, create_repository, create_crud
96 |
97 | # Create repository:
98 | repository = create_repository(provider='alchemy', model=User, session=alchemy_session())
99 |
100 | # Create unit of work:
101 | uow = create_uow(
102 | provider='mongo',
103 | model=Product,
104 | session=pymongo.client_session.ClientSession(),
105 | kwargs_repository={
106 | "database": "my_db"
107 | }
108 | )
109 |
110 | # Create crud service:
111 | crud = create_crud(provider='internal', model=CustomModel, session={})
112 | ```
113 |
114 | So, instead of writing three separate functions with all the configurations, we can just use a create_PATTERN NAME function
115 | and easily add a new pattern to our code!
116 |
117 | -------------------------------------------------
118 | #### Custom creation arguments
119 |
120 | Some patterns have custom arguments in init function, and you can also provide them like this:
121 |
122 | ```python
123 | uow = create_uow(
124 | provider='mongo',
125 | model=Product,
126 | session=pymongo.client_session.ClientSession(),
127 | kwargs_repository={ # Custom kwargs for repository
128 | "database": "my_db",
129 | },
130 | kwargs_uow={
131 | "custom_argument": True,
132 | }
133 | )
134 | ```
135 |
--------------------------------------------------------------------------------
/docs/tutorial/important.md:
--------------------------------------------------------------------------------
1 | # Important things related to all patterns
2 |
3 | ## What is a pattern???
4 |
5 | Pattern - typical solution to commonly occurring problems. In our case, the problems are:
6 |
7 | - Database communication
8 | - Data integrity
9 | - Event-based systems
10 | - Coding speed
11 | - Good code
12 | - External dependencies
13 | - The best code that some people cannot write easily
14 |
15 | We solve all of them with different classes like: `Repository`, `UnitOfWork`, `Producer`, and so on. Each
16 | class is a pattern. Lots of them are used with each other: `Repository` is always used with `UnitOfWork` and `Specification`.
17 |
18 |
19 | ## Indirect vs Direct code
20 | When you write your code, you can choose two styles: direct and indirect. What does that mean?
21 | We use different libraries like SQLAlchemy, PyRedis, PyMongo and others to ease the use of our patterns.
22 | We did not want to create a module that allows you to completely remove these modules from your code.
23 |
24 | But, we made it so our patterns are interchangeable. That means that you can write some code for SQLAlchemy, and change
25 | it to Redis 2 minutes later, even if you coded 20 000 lines.
26 |
27 | ### Indirect coding style
28 | - You do not import any functions from assimilator, every useful thing is directly in the pattern.
29 | - You do not use anything from external providers(except for pattern creation) in your code. You only use our patterns.
30 |
31 | Indirect coding example:
32 | ```Python
33 |
34 | def create_user(uow: UnitOfWork):
35 | with uow:
36 | uow.repository.save(
37 | username="Andrey", # No external library usage
38 | email="python.on.papyrus@gmail.com",
39 | )
40 |
41 |
42 | def filter_users(repository: Repository):
43 | return repository.filter(repository.specs.filter(balance__gt=20)) # only using arguments
44 |
45 |
46 | # Patterns Configuration
47 | # External library(SQLAlchemy) is only found in the pattern creation
48 | repository = AlchemyRepository(Session(), model=User)
49 | uow = AlchemyUnitOfWork(repository)
50 | ```
51 |
52 | ### Direct coding style
53 | - You import functions and objects from assimilator.
54 | - You use things from external libraries in your code with assimilator patterns
55 |
56 | Direct coding example:
57 | ```Python
58 |
59 | def create_user(uow: UnitOfWork):
60 | with uow:
61 | new_user = User( # SQLAlchemy model is used directly
62 | username="Andrey", # No external library usage
63 | email="python.on.papyrus@gmail.com",
64 | )
65 | uow.repository.save(new_user)
66 |
67 |
68 | def filter_users(repository: Repository):
69 | return repository.filter(
70 | repository.specs.filter(User.balance > 20), # SQLAlchemy filter user
71 | # AlchemyFilter(User.balance > 20), # AlchemyFilter is imported from assimilator, direct use
72 | ) # repository.specs.filter == AlchemyFilter for AlchemyRepository, but you either use it directly or indirectly
73 |
74 |
75 | # Patterns Configuration. Everything is the same
76 | repository = AlchemyRepository(Session(), model=User)
77 | uow = AlchemyUnitOfWork(repository)
78 | ```
79 |
80 | ## Why do you need all that?
81 |
82 | #### Indirect style pluses ✔️
83 | - You won't have any external dependencies. For example, you don't want to use SQLAlchemy
84 | directly.
85 | - You can change data storages by only changing the configuration:
86 |
87 | ```Python
88 | def create_user(uow: UnitOfWork):
89 | """ Stays the same using indirect coding """
90 |
91 |
92 | def filter_users(repository: Repository):
93 | """ Stays the same using indirect coding """
94 |
95 |
96 | # Patterns Configuration
97 | # You can change pattern creation and move to another data storage without any issues.
98 | repository = RedisRepository(Redis(), model=RedisUser) ####### LOOK HERE
99 | uow = RedisUnitOfWork(repository)
100 | ```
101 |
102 | #### Indirect minuses ❌
103 |
104 | - Indirect coding is a little slower than the direct one.
105 | - It may not include all the features that your app
106 | needs. For example, what if you need to run a MongoDB pipeline with aggregation framework😵(even though you can do this specific thing with indirect coding).
107 |
108 | -------------------------------------------
109 |
110 | #### Direct style pluses ✔️
111 | - Your app is very complex, and you don't have all the features in indirect variant.
112 | - You are 100% sure that you will not change your code to other external libraries with Assimilator patterns.
113 | - A little faster since we do not parse anything, we just use external objects and methods.
114 |
115 | #### Direct minuses ❌
116 |
117 | - Very hard to move to other data storages or libraries since you are using external features directly.
118 | - External dependencies in your code.
119 |
120 |
121 | ## How to choose?
122 | We prefer to use indirect style, since it hides dependencies. But, what you need to do is adapt to your project. Start
123 | with indirect style and use direct features only when needed.
124 |
--------------------------------------------------------------------------------
/docs/unidentified_patterns.md:
--------------------------------------------------------------------------------
1 | ## LazyCommand
2 |
3 | `LazyCommand` is an object that allows you to postpone the execution of any kind of code.
4 | You will typically use it in some kind of pattern with `lazy=True` argument. But, if you want to create your own `LazyCommand`, then
5 | you can do it like this:
6 |
7 | ```Python
8 | from assimilator.core.patterns import LazyCommand
9 |
10 |
11 | def func(a: int, b: int):
12 | # Function we want to postpone
13 | return a + b - 10 * a ** b
14 |
15 |
16 | lazy_func: LazyCommand[int] = LazyCommand(
17 | command=func, # command is the function you want to execute
18 | a=10, # argument a
19 | b=20, # argument b
20 | )
21 |
22 | print("No execution yet")
23 |
24 | lazy_func() # function is executed here
25 | ```
26 |
27 |
28 | ### Operations
29 |
30 | You can do the following things with `LazyCommand`:
31 |
32 | ```Python
33 | # call it
34 | lazy_func()
35 |
36 |
37 | # iterate it(if the result is iterable)
38 | for value in lazy_func:
39 | print(value)
40 |
41 |
42 | # use it as boolean
43 | if lazy_func:
44 | print("Result gives True")
45 |
46 |
47 | # get attributes from the result
48 | print(lazy_obj.obj_attr)
49 |
50 |
51 | # compare it
52 | lazy_obj > 10
53 | ```
54 |
55 | ### Result retrieval
56 |
57 | When we use any of the `LazyCommand` methods, we must run the function for the result. That means, that if we want
58 | to use two methods on the same `LazyCommand` object, we must store the result in a variable not to run calculation twice.
59 |
60 | ```Python
61 | from assimilator.core.patterns import LazyCommand
62 |
63 |
64 | lazy_func: LazyCommand[int] = LazyCommand(
65 | command=run_api_query, # runs API query
66 | # no other arguments present
67 | )
68 |
69 | print("API result:", lazy_func()) # run_api_query() execution
70 |
71 | if lazy_func: # The result is stored in the LazyCommand, no execution needed
72 | print("API returned true!")
73 | ```
74 |
75 | ### Decorator
76 |
77 | Sometimes you want to make your function lazy, but you don't want to write any additional code for that. If that is the
78 | case, then you can use LazyCommand decorator:
79 |
80 | ```Python
81 | from assimilator.core.patterns import LazyCommand
82 |
83 |
84 | @LazyCommand.decorate # decorate LazyCommand
85 | def get_user_from_api(
86 | id: int,
87 | lazy: bool = False,
88 | ):
89 | ...
90 |
91 |
92 | # Now, we can run it like this:
93 | lazy_command = get_user_from_api(id=1, lazy=True)
94 |
95 | # We can also execute it normally:
96 | user = get_user_from_api(id=1)
97 | ```
98 |
99 | -----------------------------
100 |
101 | ## ErrorWrapper
102 |
103 | `ErrorWrapper` is a pattern that allows you to change the type of your error. We want to do that to completely remove any
104 | kind of dependency that other libraries may introduce. It is mainly used internally, but you can use it in your code as well.
105 |
106 | Before:
107 | ```Python
108 |
109 | def func():
110 | raise ValueError()
111 |
112 |
113 | def foo():
114 | try:
115 | func()
116 | except ValueError: # DEPENDENCY(KIND OF)
117 | print("Invalid data!")
118 | except sqlalchemy.exc.NotFoundError: # DEPENDENCY
119 | print("Not found!")
120 | except redis.LOLError: # DEPENDENCY
121 | print("Lol error!")
122 |
123 | ```
124 |
125 | After:
126 | ```Python
127 | from assimilator.core.patterns import ErrorWrapper
128 |
129 | wrapper = ErrorWrapper(error_mappings={
130 | ValueError: InvalidDataError,
131 | sqlalchemy.exc.NotFoundError: NotFoundCustomError,
132 | redis.LOLError: CustomLOLError,
133 | })
134 |
135 | def func():
136 | raise ValueError()
137 |
138 |
139 | def foo():
140 | with error_wrapper:
141 | try:
142 | func()
143 | except InvalidDataError: # NO DEPENDENCY
144 | print("Invalid data!")
145 | except NotFoundCustomError: # NO DEPENDENCY
146 | print("Not found!")
147 | except CustomLOLError: # NO DEPENDENCY
148 | print("Lol error!")
149 |
150 | ```
151 |
152 |
--------------------------------------------------------------------------------
/docs/video_tutorials.md:
--------------------------------------------------------------------------------
1 | ## Tutorial 1
2 |
3 | - PyAssimilator introduction
4 | - Pattern types
5 | - Various concepts
6 |
7 | VIDEO
11 |
12 |
13 | ## Tutorial 2
14 |
15 | - Repository pattern
16 | - Basic User save/read
17 | - Internal module
18 |
19 | VIDEO
22 |
23 |
24 | ## Tutorial 3
25 |
26 | - Transaction management
27 | - Unit Of Work
28 |
29 | VIDEO
31 |
32 |
33 | ## Tutorial 4
34 |
35 | - Update operation
36 | - Delete operation
37 | - Fully working CRUD
38 |
39 | VIDEO
41 |
--------------------------------------------------------------------------------
/examples/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/knucklesuganda/py_assimilator/43108e3575750f3cd6e9dff6d59068d1a5be5e55/examples/__init__.py
--------------------------------------------------------------------------------
/examples/complex_database/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/knucklesuganda/py_assimilator/43108e3575750f3cd6e9dff6d59068d1a5be5e55/examples/complex_database/__init__.py
--------------------------------------------------------------------------------
/examples/complex_database/dependencies.py:
--------------------------------------------------------------------------------
1 | import sys
2 |
3 | import pymongo
4 | from redis.client import Redis
5 | from sqlalchemy.orm import sessionmaker
6 |
7 | from assimilator.alchemy.database import AlchemyUnitOfWork, AlchemyRepository
8 | from assimilator.internal.database import InternalRepository, InternalUnitOfWork
9 | from assimilator.redis_.database import RedisRepository, RedisUnitOfWork
10 | from assimilator.mongo.database import MongoRepository, MongoUnitOfWork
11 |
12 | from examples.complex_database.models import (
13 | engine, AlchemyUser, AlchemyUserBalance, AlchemyBalanceCurrency,
14 | InternalUser, InternalBalance, InternalCurrency,
15 | RedisUser, RedisBalance, RedisCurrency,
16 | MongoUser, MongoCurrency, MongoBalance,
17 | )
18 |
19 | if len(sys.argv) == 1 or sys.argv[1] == "alchemy":
20 | User = AlchemyUser
21 | Balance = AlchemyUserBalance
22 | Currency = AlchemyBalanceCurrency
23 |
24 |
25 | def get_uow():
26 | DatabaseSession = sessionmaker(bind=engine)
27 | repository = AlchemyRepository(
28 | session=DatabaseSession(),
29 | model=User,
30 | )
31 | return AlchemyUnitOfWork(repository)
32 |
33 | elif sys.argv[1] == "internal":
34 | User = InternalUser
35 | Balance = InternalBalance
36 | Currency = InternalCurrency
37 | internal_session = {}
38 |
39 | def get_uow():
40 | repository = InternalRepository(internal_session, model=InternalUser)
41 | return InternalUnitOfWork(repository)
42 |
43 | elif sys.argv[1] == "redis":
44 | redis_session = Redis()
45 | User = RedisUser
46 | Balance = RedisBalance
47 | Currency = RedisCurrency
48 |
49 |
50 | def get_uow():
51 | repository = RedisRepository(redis_session, model=User)
52 | return RedisUnitOfWork(repository)
53 |
54 |
55 | redis_session.flushdb()
56 |
57 | elif sys.argv[1] == "mongo":
58 | User = MongoUser
59 | Balance = MongoBalance
60 | Currency = MongoCurrency
61 | mongo_client = pymongo.MongoClient()
62 |
63 | mongo_client['assimilator_complex'].drop_collection(MongoUser.AssimilatorConfig.collection)
64 |
65 |
66 | def get_uow():
67 | repository = MongoRepository(session=mongo_client, model=User, database='assimilator_complex')
68 | return MongoUnitOfWork(repository)
69 |
--------------------------------------------------------------------------------
/examples/complex_database/models.py:
--------------------------------------------------------------------------------
1 | from typing import List, Optional
2 | from uuid import uuid4, UUID
3 |
4 | from sqlalchemy import create_engine, Column, String, Float, Integer, ForeignKey, UniqueConstraint
5 | from sqlalchemy.orm import declarative_base, relationship
6 |
7 | from assimilator.core.database import BaseModel
8 | from assimilator.mongo.database import MongoModel
9 | from assimilator.redis_.database import RedisModel
10 |
11 | engine = create_engine(url="sqlite:///:memory:")
12 | Base = declarative_base()
13 |
14 |
15 | class AlchemyUser(Base):
16 | __tablename__ = "users"
17 |
18 | id = Column(Integer(), primary_key=True)
19 | username = Column(String())
20 | email = Column(String())
21 |
22 | balances = relationship("AlchemyUserBalance", back_populates="user")
23 |
24 | def __str__(self):
25 | return f"{self.id} {self.username} {self.email}"
26 |
27 |
28 | class AlchemyUserBalance(Base):
29 | __tablename__ = "balances"
30 | __table_args__ = (
31 | UniqueConstraint("balance", "user_id"),
32 | )
33 |
34 | id = Column(Integer(), primary_key=True)
35 |
36 | user_id = Column(ForeignKey("users.id", ondelete="CASCADE"))
37 | user = relationship("AlchemyUser", back_populates="balances")
38 |
39 | balance = Column(Float(), server_default='0')
40 |
41 | currency_id = Column(ForeignKey("currency.id"))
42 | currency = relationship("AlchemyBalanceCurrency", uselist=False)
43 |
44 | def __str__(self):
45 | return f"{self.balance}{self.currency.currency}"
46 |
47 | def __repr__(self):
48 | return str(self)
49 |
50 |
51 | class AlchemyBalanceCurrency(Base):
52 | __tablename__ = "currency"
53 |
54 | id = Column(Integer(), primary_key=True)
55 | currency = Column(String(length=20))
56 | country = Column(String(length=20))
57 |
58 | def __str__(self):
59 | return self.currency
60 |
61 | def __repr__(self):
62 | return str(self)
63 |
64 |
65 | Base.metadata.create_all(engine)
66 |
67 |
68 | class InternalCurrency(BaseModel):
69 | currency: str
70 | country: str
71 |
72 | def __str__(self):
73 | return self.currency
74 |
75 | def __repr__(self):
76 | return str(self)
77 |
78 |
79 | class InternalBalance(BaseModel):
80 | balance: float
81 | currency: InternalCurrency
82 |
83 | def __str__(self):
84 | return f"{self.balance}{self.currency.currency}"
85 |
86 | def __repr__(self):
87 | return str(self)
88 |
89 |
90 | class InternalUser(BaseModel):
91 | username: str
92 | email: str
93 | balances: List[InternalBalance] = []
94 |
95 | def __str__(self):
96 | return f"{self.id} {self.username} {self.email}"
97 |
98 |
99 | class RedisCurrency(InternalCurrency):
100 | def __str__(self):
101 | return self.currency
102 |
103 | def __repr__(self):
104 | return str(self)
105 |
106 |
107 | class RedisBalance(InternalBalance, RedisModel):
108 | currency: RedisCurrency
109 |
110 | def __str__(self):
111 | return f"{self.balance}{self.currency.currency}"
112 |
113 | def __repr__(self):
114 | return str(self)
115 |
116 |
117 | class RedisUser(InternalUser, RedisModel):
118 | balances: List[RedisBalance] = []
119 |
120 | def __str__(self):
121 | return f"{self.id} {self.username} {self.email}"
122 |
123 |
124 | class MongoCurrency(MongoModel):
125 | class AssimilatorConfig:
126 | collection: str = "currencies"
127 | autogenerate_id = True
128 |
129 | currency: str
130 | country: str
131 |
132 | def __str__(self):
133 | return self.currency
134 |
135 | def __repr__(self):
136 | return str(self)
137 |
138 |
139 | class MongoBalance(MongoModel):
140 | class AssimilatorConfig:
141 | collection: str = "balances"
142 |
143 | balance: float
144 | currency: MongoCurrency
145 |
146 | def __str__(self):
147 | return f"{self.balance}{self.currency.currency}"
148 |
149 | def __repr__(self):
150 | return str(self)
151 |
152 |
153 | class MongoUser(MongoModel):
154 | class AssimilatorConfig:
155 | collection: str = "users"
156 |
157 | balances: List[MongoBalance] = []
158 | username: Optional[str] # For only specification
159 | email: Optional[str]
160 |
161 | def __str__(self):
162 | return f"{self.id} {self.username} {self.email}"
163 |
--------------------------------------------------------------------------------
/examples/fastapi_crud_example/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/knucklesuganda/py_assimilator/43108e3575750f3cd6e9dff6d59068d1a5be5e55/examples/fastapi_crud_example/__init__.py
--------------------------------------------------------------------------------
/examples/fastapi_crud_example/dependencies.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | import pymongo
4 | from redis.client import Redis
5 | from sqlalchemy.orm import sessionmaker
6 |
7 | from assimilator.alchemy.database import AlchemyUnitOfWork, AlchemyRepository
8 | from assimilator.internal.database import InternalRepository, InternalUnitOfWork
9 | from assimilator.redis_.database import RedisRepository, RedisUnitOfWork
10 | from assimilator.mongo.database import MongoRepository, MongoUnitOfWork
11 | from assimilator.core.services import CRUDService
12 |
13 | from examples.fastapi_crud_example.models import (
14 | engine, AlchemyUser, AlchemyCurrency, AlchemyBalance,
15 | InternalUser, InternalBalance, InternalCurrency,
16 | RedisUser, RedisBalance, RedisCurrency,
17 | MongoUser, MongoCurrency, MongoBalance,
18 | )
19 |
20 | storage = os.environ.get('storage', 'internal')
21 |
22 |
23 | if storage == "alchemy":
24 | User = AlchemyUser
25 | Balance = AlchemyBalance
26 | Currency = AlchemyCurrency
27 |
28 | def get_uow():
29 | DatabaseSession = sessionmaker(bind=engine)
30 | repository = AlchemyRepository(
31 | session=DatabaseSession(),
32 | model=User,
33 | )
34 | return AlchemyUnitOfWork(repository)
35 |
36 | elif storage == "internal":
37 | User = InternalUser
38 | Balance = InternalBalance
39 | Currency = InternalCurrency
40 | internal_session = {}
41 |
42 | def get_uow():
43 | repository = InternalRepository(internal_session, model=InternalUser)
44 | return InternalUnitOfWork(repository)
45 |
46 | elif storage == "redis":
47 | redis_session = Redis()
48 | User = RedisUser
49 | Balance = RedisBalance
50 | Currency = RedisCurrency
51 |
52 | def get_uow():
53 | repository = RedisRepository(redis_session, model=User)
54 | return RedisUnitOfWork(repository)
55 |
56 | elif storage == "mongo":
57 | User = MongoUser
58 | Balance = MongoBalance
59 | Currency = MongoCurrency
60 | mongo_client = pymongo.MongoClient()
61 |
62 | def get_uow():
63 | repository = MongoRepository(session=mongo_client, model=User, database='assimilator_fastapi')
64 | return MongoUnitOfWork(repository)
65 |
66 |
67 | def get_service():
68 | return CRUDService(uow=get_uow())
69 |
--------------------------------------------------------------------------------
/examples/fastapi_crud_example/main.py:
--------------------------------------------------------------------------------
1 | from fastapi import FastAPI, Depends, HTTPException
2 |
3 | from assimilator.core.services import CRUDService
4 | from assimilator.core.patterns import ErrorWrapper
5 | from assimilator.core.database import NotFoundError
6 | from examples.fastapi_crud_example.dependencies import get_service
7 | from examples.fastapi_crud_example.schema import UserCreateSchema
8 |
9 | app = FastAPI()
10 |
11 | api_error_wrapper = ErrorWrapper(
12 | error_mappings={
13 | NotFoundError: lambda error: HTTPException(status_code=404, detail="Not found"),
14 | },
15 | # default_error=lambda error: HTTPException(status_code=500, detail="Unknown error"),
16 | )
17 |
18 |
19 | @app.get('/users/')
20 | def user_list_route(service: CRUDService = Depends(get_service)):
21 | return service.list()
22 |
23 |
24 | @app.get('/users/{id}')
25 | @api_error_wrapper.decorate
26 | def user_get_route(id: str, service: CRUDService = Depends(get_service)):
27 | return service.get(id=str(id))
28 |
29 |
30 | @app.post('/users/')
31 | def user_create_route(user_data: UserCreateSchema, service: CRUDService = Depends(get_service)):
32 | return service.create(user_data.dict())
33 |
34 |
35 | @app.delete('/users/{id}')
36 | def user_delete_route(id: str, service: CRUDService = Depends(get_service)):
37 | return service.delete(id=id)
38 |
39 |
40 | @app.put('/users/{id}')
41 | def user_update_route(
42 | user_data: UserCreateSchema,
43 | id: str,
44 | service: CRUDService = Depends(get_service),
45 | ):
46 | return service.update(id=id, obj_data=user_data.dict())
47 |
--------------------------------------------------------------------------------
/examples/fastapi_crud_example/models.py:
--------------------------------------------------------------------------------
1 | import uuid
2 | from typing import List
3 |
4 | from sqlalchemy import (
5 | create_engine, Column, String, Float,
6 | Integer, ForeignKey, UniqueConstraint,
7 | Table, UUID, Text,
8 | )
9 | from sqlalchemy.orm import relationship, registry
10 |
11 | from assimilator.core.database import BaseModel
12 | from assimilator.mongo.database import MongoModel
13 | from assimilator.redis_.database import RedisModel
14 |
15 | engine = create_engine(url="sqlite:///crud_database.db")
16 | mapper_registry = registry()
17 |
18 | users = Table(
19 | "users",
20 | mapper_registry.metadata,
21 | Column("id", Text(), default=lambda: str(uuid.uuid4()), primary_key=True),
22 | Column("username", String()),
23 | Column("email", String()),
24 | )
25 |
26 |
27 | balances = Table(
28 | "balances",
29 | mapper_registry.metadata,
30 | Column("id", Text(), default=lambda: str(uuid.uuid4()), primary_key=True),
31 | Column('user_id', ForeignKey("users.id", ondelete="CASCADE")),
32 | Column('balance', Float(), server_default='0'),
33 | Column('currency_id', ForeignKey("currency.id")),
34 |
35 | UniqueConstraint("balance", "user_id"),
36 | )
37 |
38 |
39 | currency = Table(
40 | "currency",
41 | mapper_registry.metadata,
42 | Column("id", Text(), default=lambda: str(uuid.uuid4()), primary_key=True),
43 | Column('currency', String(length=20)),
44 | Column('country', String(length=20)),
45 | )
46 |
47 |
48 | class AlchemyUser:
49 | pass
50 |
51 |
52 | class AlchemyBalance:
53 | pass
54 |
55 |
56 | class AlchemyCurrency:
57 | pass
58 |
59 |
60 | mapper_registry.map_imperatively(
61 | AlchemyUser,
62 | users,
63 | properties={
64 | "balances": relationship(AlchemyBalance, uselist=True, lazy='select'),
65 | },
66 | )
67 |
68 | mapper_registry.map_imperatively(
69 | AlchemyBalance,
70 | balances,
71 | properties={
72 | "currency": relationship(AlchemyCurrency, uselist=False, lazy='select'),
73 | },
74 | )
75 |
76 | mapper_registry.map_imperatively(AlchemyCurrency, currency)
77 | mapper_registry.metadata.create_all(bind=engine, tables=[users, balances, currency])
78 |
79 |
80 | class InternalCurrency(BaseModel):
81 | currency: str
82 | country: str
83 |
84 |
85 | class InternalBalance(BaseModel):
86 | balance: float
87 | currency: InternalCurrency
88 |
89 |
90 | class InternalUser(BaseModel):
91 | username: str
92 | email: str
93 | balances: List[InternalBalance] = []
94 |
95 |
96 | class RedisCurrency(InternalCurrency):
97 | pass
98 |
99 |
100 | class RedisBalance(InternalBalance, RedisModel):
101 | currency: RedisCurrency
102 |
103 |
104 | class RedisUser(InternalUser, RedisModel):
105 | balances: List[RedisBalance] = []
106 |
107 |
108 | class MongoCurrency(MongoModel):
109 | class AssimilatorConfig:
110 | collection: str = "currencies"
111 | autogenerate_id = True
112 |
113 | currency: str
114 | country: str
115 |
116 |
117 | class MongoBalance(MongoModel):
118 | class AssimilatorConfig:
119 | collection: str = "balances"
120 |
121 | balance: float
122 | currency: MongoCurrency
123 |
124 |
125 | class MongoUser(MongoModel):
126 | class AssimilatorConfig:
127 | collection: str = "users"
128 |
129 | balances: List[MongoBalance] = []
130 | username: str
131 | email: str
132 |
--------------------------------------------------------------------------------
/examples/fastapi_crud_example/schema.py:
--------------------------------------------------------------------------------
1 | from typing import List, Union
2 | from uuid import UUID
3 |
4 | from bson import ObjectId
5 | from pydantic import BaseModel
6 |
7 |
8 | class BaseSchema(BaseModel):
9 | class Config:
10 | orm_mode = True
11 | arbitrary_types_allowed = True
12 |
13 |
14 | class CurrencySchema(BaseSchema):
15 | currency: str
16 | country: str
17 |
18 |
19 | class BalanceSchema(BaseSchema):
20 | balance: int
21 | currency: CurrencySchema
22 |
23 |
24 | class UserCreateSchema(BaseSchema):
25 | username: str
26 | balances: List[BalanceSchema]
27 | email: str
28 |
--------------------------------------------------------------------------------
/examples/simple_database/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/knucklesuganda/py_assimilator/43108e3575750f3cd6e9dff6d59068d1a5be5e55/examples/simple_database/__init__.py
--------------------------------------------------------------------------------
/examples/simple_database/dependencies.py:
--------------------------------------------------------------------------------
1 | import sys
2 |
3 | import redis
4 | import pymongo
5 | from sqlalchemy.orm import sessionmaker
6 |
7 | from core.usability.pattern_creator import create_uow
8 | from examples.simple_database.models import engine, AlchemyUser, InternalUser, RedisUser, MongoUser
9 |
10 |
11 | alchemy_session_creator = sessionmaker(bind=engine)
12 | internal_session = {}
13 |
14 |
15 | # Database registry contains all the possible patterns and their settings.
16 | # We do that just as an example, you can try or do whatever you want!
17 | database_registry = {
18 | 'alchemy': {
19 | 'model': AlchemyUser, # Model to be used
20 | 'session_creator': lambda: alchemy_session_creator(), # function that can create sessions
21 | 'kwargs_repository': {}, # Additional settings for the repository
22 | },
23 | 'internal': {
24 | 'model': InternalUser,
25 | 'session_creator': lambda: internal_session,
26 | 'kwargs_repository': {},
27 | },
28 | 'redis': {
29 | 'model': RedisUser,
30 | 'session_creator': lambda: redis.Redis(),
31 | 'kwargs_repository': {},
32 | },
33 | 'mongo': {
34 | 'model': MongoUser,
35 | 'session_creator': lambda: pymongo.MongoClient(),
36 | 'kwargs_repository': {
37 | 'database': 'assimilator_users',
38 | },
39 | }
40 | }
41 |
42 | database_provider = sys.argv[1] if len(sys.argv) > 1 else "alchemy" # get the provider from args
43 | User = database_registry[database_provider]['model'] # get the model
44 |
45 |
46 | def get_uow():
47 | dependencies = database_registry[database_provider]
48 | model = dependencies['model']
49 | session = dependencies['session_creator']() # create a new connection to the database
50 | kwargs_repository = dependencies['kwargs_repository']
51 |
52 | return create_uow(
53 | provider=database_provider,
54 | model=model,
55 | session=session,
56 | kwargs_repository=kwargs_repository,
57 | )
58 |
--------------------------------------------------------------------------------
/examples/simple_database/main.py:
--------------------------------------------------------------------------------
1 | import operator
2 |
3 | import os
4 | os.environ['PY_ASSIMILATOR_MESSAGE'] = 'False'
5 |
6 | from assimilator.core.database import filter_
7 | from assimilator.core.patterns import LazyCommand
8 | from assimilator.redis_.database import RedisRepository
9 | from assimilator.core.database import UnitOfWork, Repository
10 | from assimilator.mongo.database import MongoRepository
11 | from assimilator.core.database import NotFoundError
12 | from assimilator.internal.database import InternalRepository
13 | from assimilator.alchemy.database import AlchemyRepository
14 | from assimilator.internal.database.specifications.filtering_options import find_attribute
15 |
16 | from dependencies import get_uow, User
17 |
18 |
19 | def create_user__kwargs(uow: UnitOfWork):
20 | with uow:
21 | uow.repository.save(
22 | username='Andrey',
23 | email='python.on.papyrus@gmail.com',
24 | balance=1000,
25 | )
26 | uow.commit()
27 |
28 |
29 | def create_user_model(uow: UnitOfWork):
30 | with uow:
31 | user = User(
32 | username='Andrey-2',
33 | email='python.on.papyrus@gmail.com',
34 | balance=2000,
35 | )
36 | uow.repository.save(user)
37 | uow.commit()
38 |
39 |
40 | def read_user(username: str, repository: Repository):
41 | user = repository.get(filter_(username=username, email="python.on.papyrus@gmail.com"))
42 | print("User:", user.id, user.username, user.email, user.balance)
43 | return user
44 |
45 |
46 | def read_user_direct(username: str, repository: Repository):
47 | if isinstance(repository, AlchemyRepository): # Awful! Try to use filtering options
48 | user = repository.get(filter_(User.username == username))
49 | elif isinstance(repository, (InternalRepository, RedisRepository)):
50 | user = repository.get(filter_(
51 | find_attribute(operator.eq, 'username', username),
52 | # will call eq(model.username, username) for every user
53 | ))
54 | elif isinstance(repository, MongoRepository):
55 | user = repository.get(filter_(
56 | {'username': username},
57 | # will call eq(model.username, username) for every user
58 | ))
59 | else:
60 | raise ValueError("Direct repository filter not found")
61 |
62 | print("User direct:", user.id, user.username, user.email, user.balance)
63 | return user
64 |
65 |
66 | def update_user(uow: UnitOfWork):
67 | with uow:
68 | user = uow.repository.get(filter_(username="Andrey"))
69 |
70 | user.balance += 1000
71 | uow.repository.update(user)
72 | uow.commit()
73 |
74 |
75 | def update_user_direct(user, uow: UnitOfWork):
76 | with uow:
77 | user.balance += 1000
78 | uow.repository.update(user)
79 | uow.commit()
80 |
81 |
82 | def create_many_users(uow: UnitOfWork):
83 | with uow:
84 | for i in range(100):
85 | uow.repository.save(
86 | username=f"User-{i}",
87 | email=f"user-{i}@py_assimilator.com",
88 | balance=i * 100,
89 | )
90 |
91 | uow.commit()
92 |
93 |
94 | def create_many_users_direct(uow: UnitOfWork):
95 | with uow:
96 | for i in range(100):
97 | uow.repository.save(
98 | User(
99 | username=f"User-{i}",
100 | email=f"user-{i}@py_assimilator.com",
101 | balance=i * 100,
102 | )
103 | )
104 |
105 | uow.commit()
106 |
107 |
108 | def filter_users(repository: Repository):
109 | users = repository.filter(
110 | repository.specs.filter(balance__gt=50) & filter_(balance__gt=50) & filter_(balance__eq=10),
111 | )
112 |
113 | for user in users:
114 | print("Filtered User:", user.id, user.username, user.email, user.balance)
115 |
116 |
117 | def count_users(repository: Repository):
118 | print("Total users:", repository.count())
119 | print(
120 | "Users with balance greater than 5000:",
121 | repository.count(filter_(balance__gt=5000))
122 | )
123 |
124 |
125 | def filter_users_lazy(repository: Repository):
126 | users: LazyCommand[User] = repository.filter(filter_(balance__eq=0), lazy=True)
127 |
128 | for user in users: # Queries the database here
129 | print("User without any money:", user.username, user.balance)
130 |
131 |
132 | def update_many_users(uow: UnitOfWork):
133 | username_filter = filter_(username__like="User-%")
134 |
135 | with uow:
136 | uow.repository.update(username_filter, balance=10)
137 | uow.commit()
138 |
139 | assert all(user.balance == 10 for user in uow.repository.filter(username_filter, lazy=True))
140 |
141 |
142 | def delete_many_users(uow: UnitOfWork):
143 | with uow:
144 | uow.repository.delete(filter_(username__regex=r'User-\w*'))
145 | uow.commit()
146 |
147 | assert uow.repository.count(filter_(balance=10)) == 0
148 | print("Total users left:", uow.repository.count())
149 |
150 |
151 | def create_users_error(uow: UnitOfWork):
152 | with uow:
153 | uow.repository.save(
154 | username='Not saved',
155 | email='not-saved@user.com',
156 | balance=0,
157 | )
158 | uow.repository.save(
159 | username='Not saved 2',
160 | email='not-saved-2@user.com',
161 | balance=0,
162 | )
163 |
164 | 1 / 0 # Error. Changes are discarded
165 | uow.commit()
166 |
167 |
168 | def check_users_not_saved(uow: UnitOfWork):
169 | try:
170 | read_user(username="Not saved", repository=uow.repository) # Must return NotFound
171 | raise ValueError("User 1 was saved!")
172 | except NotFoundError:
173 | print("User 1 changes were discarded!")
174 |
175 | try:
176 | read_user(username="Not saved 2", repository=uow.repository) # Must return NotFound
177 | raise ValueError("User 2 was saved!")
178 | except NotFoundError:
179 | print("User 2 changes were discarded!")
180 |
181 |
182 | if __name__ == '__main__':
183 | create_user__kwargs(get_uow())
184 | create_user_model(get_uow())
185 |
186 | read_user(username="Andrey", repository=get_uow().repository)
187 | read_user_direct(username="Andrey-2", repository=get_uow().repository)
188 |
189 | update_user(get_uow())
190 | read_user(username="Andrey", repository=get_uow().repository)
191 |
192 | second_user = read_user(username="Andrey-2", repository=get_uow().repository)
193 | update_user_direct(user=second_user, uow=get_uow())
194 |
195 | try:
196 | create_users_error(uow=get_uow())
197 | except ZeroDivisionError:
198 | pass
199 |
200 | check_users_not_saved(uow=get_uow())
201 |
202 | create_many_users(get_uow())
203 | create_many_users_direct(get_uow())
204 |
205 | update_many_users(get_uow())
206 | delete_many_users(get_uow())
207 |
208 | persistent_uow = get_uow()
209 | filter_users(persistent_uow.repository)
210 | count_users(persistent_uow.repository)
211 | filter_users_lazy(persistent_uow.repository)
212 |
--------------------------------------------------------------------------------
/examples/simple_database/models.py:
--------------------------------------------------------------------------------
1 | from sqlalchemy import create_engine, Column, String, Float, Integer
2 | from sqlalchemy.orm import declarative_base
3 |
4 | from assimilator.core.database import BaseModel
5 | from assimilator.mongo.database import MongoModel
6 | from assimilator.redis_.database import RedisModel
7 |
8 | engine = create_engine(url="sqlite:///:memory:")
9 | Base = declarative_base()
10 |
11 |
12 | class AlchemyUser(Base):
13 | __tablename__ = "users"
14 |
15 | id = Column(Integer(), primary_key=True)
16 | username = Column(String())
17 | email = Column(String())
18 | balance = Column(Float())
19 |
20 | def __str__(self):
21 | return f"{self.id} {self.username} {self.email}"
22 |
23 |
24 | Base.metadata.create_all(engine)
25 |
26 |
27 | class InternalUser(BaseModel):
28 | username: str
29 | email: str
30 | balance: float = 0
31 |
32 |
33 | class RedisUser(RedisModel):
34 | username: str
35 | email: str
36 | balance: float = 0
37 |
38 |
39 | class MongoUser(MongoModel):
40 | class AssimilatorConfig:
41 | collection = "users"
42 |
43 | username: str
44 | email: str
45 | balance: float = 0
46 |
--------------------------------------------------------------------------------
/examples/simple_events/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/knucklesuganda/py_assimilator/43108e3575750f3cd6e9dff6d59068d1a5be5e55/examples/simple_events/__init__.py
--------------------------------------------------------------------------------
/examples/simple_events/dependencies.py:
--------------------------------------------------------------------------------
1 | import sys
2 |
3 | import redis
4 |
5 | from assimilator.internal.events import InternalEventProducer, InternalEventConsumer
6 | from assimilator.redis_.events import RedisEventConsumer, RedisEventProducer
7 |
8 | if len(sys.argv) == 1 or sys.argv[1] == "internal":
9 | event_storage = []
10 |
11 | def get_internal_consumer():
12 | return InternalEventConsumer(event_storage)
13 |
14 | def get_internal_producer():
15 | return InternalEventProducer(event_storage)
16 |
17 | get_producer = get_internal_producer
18 | get_consumer = get_internal_consumer
19 |
20 | elif sys.argv[1] == "redis":
21 | redis_client = redis.Redis()
22 |
23 | def get_redis_consumer():
24 | return RedisEventConsumer(
25 | channels=["records"],
26 | session=redis_client,
27 | )
28 |
29 | def get_redis_producer():
30 | return RedisEventProducer(
31 | channel="records",
32 | session=redis_client,
33 | )
34 |
35 | get_producer = get_redis_producer
36 | get_consumer = get_redis_consumer
37 |
38 | elif sys.argv[1] == "redis":
39 | User = RedisUser
40 | get_uow = get_redis_uow
41 | redis_session.flushdb()
42 |
--------------------------------------------------------------------------------
/examples/simple_events/events.py:
--------------------------------------------------------------------------------
1 | from assimilator.core.events import Event
2 |
3 |
4 | class RecordCreated(Event):
5 | record_name: str
6 | event_name = "record_created"
7 |
--------------------------------------------------------------------------------
/examples/simple_events/main.py:
--------------------------------------------------------------------------------
1 | from assimilator.core.events import EventProducer, EventConsumer
2 | from examples.simple_events.dependencies import get_producer, get_consumer
3 | from examples.simple_events.events import RecordCreated
4 |
5 |
6 | def emit_event(producer: EventProducer):
7 | with producer:
8 | record_event = RecordCreated(record_name="firstRecord")
9 | producer.produce(record_event)
10 |
11 |
12 | def consume_events(consumer: EventConsumer):
13 | with consumer:
14 | for event in consumer.consume():
15 | print(event)
16 |
17 |
18 | if __name__ == '__main__':
19 | emit_event(get_producer())
20 | consume_events(get_consumer())
21 |
--------------------------------------------------------------------------------
/examples/simplest_example.py:
--------------------------------------------------------------------------------
1 | """
2 | That is the example for the people who are just starting with programming.
3 | Some concepts like Dependency Injection are not followed here.
4 |
5 | You will see how your code becomes easier step-by-step by using CRUDService pattern.
6 | """
7 |
8 | from sqlalchemy.orm import declarative_base, sessionmaker
9 | from sqlalchemy import create_engine, Column, Integer, String, Float
10 | from assimilator.core.usability.pattern_creator import create_crud
11 | from assimilator.core.usability.registry import find_provider
12 |
13 |
14 | find_provider('assimilator.alchemy') # Import SQLAlchemy patterns
15 |
16 | # Firstly, we need to create our User model. That code is just normal SQLAlchemy:
17 | engine = create_engine(url="sqlite:///:memory:")
18 | Base = declarative_base()
19 |
20 |
21 | class AlchemyUser(Base):
22 | __tablename__ = "users"
23 |
24 | id = Column(Integer(), primary_key=True)
25 | username = Column(String())
26 | email = Column(String())
27 | balance = Column(Float())
28 |
29 | def __str__(self):
30 | return f"{self.id} {self.username} {self.email}"
31 |
32 |
33 | Base.metadata.create_all(engine)
34 | session_creator = sessionmaker(bind=engine)
35 |
36 | # Now, we use PyAssimilator to create CRUDService pattern. It allows us to do Create/Read/Update/Delete operations
37 | crud = create_crud(
38 | provider='alchemy', # Provider is the external library that we are using. In our case - SQLAlchemy
39 | model=AlchemyUser, # Model is the main entity that we will be working with.
40 | session=session_creator(), # Session is the connection to the data source. In our case - SQLite
41 | )
42 |
43 |
44 | def create_user():
45 | # Now, let's create a function that will allow us to add a user
46 | new_user = crud.create({ # Create a user
47 | "username": "Andrey",
48 | "email": "python.on.papyrus@gmail.com",
49 | "balance": 1000000,
50 | })
51 |
52 | # The user is created! Let's print our username:
53 | print("Hello,", new_user.username)
54 |
55 |
56 | def get_user():
57 | # Now, let's get a user using his username:
58 | user = crud.get(username="Andrey") # We use crud.get() to retrieve a single user
59 | print(user.username, "has a balance of", user.balance)
60 |
61 |
62 | """
63 | So, what did we do?
64 |
65 | We created a CRUDService pattern that allows us to add a user to the database with:
66 | - No dependencies
67 | - Transaction management
68 | - Less code
69 | - Reproducibility
70 |
71 | I hope you liked it and will use PyAssimilator in your projects!
72 | """
73 |
74 |
75 | if __name__ == '__main__':
76 | create_user()
77 | get_user()
78 |
--------------------------------------------------------------------------------
/mkdocs.yml:
--------------------------------------------------------------------------------
1 | site_name: Assimilator - the best Python patterns
2 | site_description: Assimilator Python framework, Domain-Driven Design, DDD, high performance, easy to learn, fast to code
3 | theme:
4 | name: material
5 | custom_dir: docs/docs_theme
6 | logo: images/logo.png
7 | favicon: images/icon.png
8 | features:
9 | - search.suggest
10 | - search.highlight
11 | - content.tabs.link
12 | - navigation.footer
13 | palette:
14 | - media: '(prefers-color-scheme: light)'
15 | scheme: default
16 | primary: deep purple
17 | accent: purple
18 | toggle:
19 | icon: material/lightbulb
20 | name: Switch to light mode
21 | - media: '(prefers-color-scheme: dark)'
22 | scheme: slate
23 | primary: deep purple
24 | accent: purple
25 | toggle:
26 | icon: material/lightbulb-outline
27 | name: Switch to dark mode
28 | nav:
29 | - Introduction: index.md
30 | - Concepts: concepts.md
31 | - Basic tutorials:
32 | - Important things: tutorial/important.md
33 | - Database tutorial: tutorial/database.md
34 | - Advanced database tutorial: tutorial/advanced_database.md
35 | - How to create patterns: tutorial/how_to_create.md
36 | - Architecture tutorial: tutorial/architecture_tutorial.md
37 | # - Events tutorial: tutorial/events.md
38 | - SQLAlchemy:
39 | - Database: alchemy/database.md
40 | # - Events: alchemy/events.md
41 | - Internal:
42 | - Database: internal/database.md
43 | # - Events: internal/events.md
44 | # - Kafka:
45 | # - Events: kafka/events.md
46 | - Redis:
47 | - Database: redis/database.md
48 | # - Events: redis/events.md
49 | - MongoDB:
50 | - Database: mongo/database.md
51 | - Services Tutorial: services.md
52 | - Unidentified Patterns: unidentified_patterns.md
53 | - Video Tutorials: video_tutorials.md
54 | - Help us make the future🤩: help_framework.md
55 | - ✨Usability Update(1.3.0)✨: new_changes.md
56 | - 🕢Next update🕢: next_update.md
57 |
58 | repo_url: https://github.com/knucklesuganda/py_assimilator
59 | repo_name: knucklesuganda/py_assimilator
60 | site_author: Andrey Ivanov | Python
61 | markdown_extensions:
62 | - pymdownx.highlight:
63 | use_pygments: true
64 | - pymdownx.superfences
65 | - pymdownx.emoji:
66 | emoji_index: !!python/name:materialx.emoji.twemoji
67 | emoji_generator: !!python/name:materialx.emoji.to_svg
68 | extra:
69 | analytics:
70 | provider: google
71 | property: G-7PEMV9YSS5
72 | consent:
73 | title: Cookie consent
74 | description: >-
75 | We use cookies to recognize your repeated visits and preferences, as well
76 | as to measure the effectiveness of our documentation and whether users
77 | find what they're searching for. With your consent, you're helping us to
78 | make our documentation better.
79 | extra:
80 | social:
81 | - icon: fontawesome/solid/paper-plane
82 | link: mailto:python.on.papyrus@gmail.com
83 | copyright: Copyright © 2023 Andrey Ivanov
84 | extra_javascript:
85 | - https://pagead2.googlesyndication.com/pagead/js/adsbygoogle.js?client=ca-pub-8262462913055533
86 | - scripts/add_footer.js
87 | - scripts/feedback.js
88 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [build-system]
2 | requires = ["hatchling"]
3 | build-backend = "hatchling.build"
4 |
5 | [tool.hatch.build]
6 | ignore-vcs = true
7 | include = [
8 | "assimilator"
9 | ]
10 |
11 | [project]
12 | name = "py_assimilator"
13 | version = "1.3.1"
14 | authors = [
15 | { name="Andrey Ivanov", email="python.on.papyrus@gmail.com" },
16 | ]
17 | maintainers = [
18 | { name="Andrey Ivanov", email="python.on.papyrus@gmail.com" },
19 | ]
20 | keywords = ["DDD", "Domain-driven design", "Database", "Events", "Architecture", "Patterns", "Backend"]
21 | description = "The best Domain-driven design patterns for your projects"
22 | readme = "README.md"
23 | requires-python = ">=3.8"
24 | license = "MIT"
25 |
26 | classifiers = [
27 | "Intended Audience :: Information Technology",
28 | "Intended Audience :: System Administrators",
29 | "Intended Audience :: Education",
30 | "Intended Audience :: Science/Research",
31 | "Intended Audience :: Science/Research",
32 | "Operating System :: OS Independent",
33 | "Programming Language :: Python :: 3",
34 | "Programming Language :: Python",
35 | "Topic :: Internet",
36 | "Topic :: Database",
37 | "Topic :: Software Development :: Testing :: Unit",
38 | "Topic :: Software Development :: Testing :: Acceptance",
39 | "Topic :: Software Development :: Libraries :: Application Frameworks",
40 | "Topic :: Software Development :: Libraries :: Python Modules",
41 | "Topic :: Software Development :: Libraries",
42 | "Topic :: Software Development",
43 | "Typing :: Typed",
44 | "Development Status :: 5 - Production/Stable",
45 | "Environment :: Web Environment",
46 | "Intended Audience :: Developers",
47 | "License :: OSI Approved :: MIT License",
48 | "Programming Language :: Python :: 3 :: Only",
49 | "Programming Language :: Python :: 3.8",
50 | "Programming Language :: Python :: 3.9",
51 | "Programming Language :: Python :: 3.10",
52 | "Programming Language :: Python :: 3.11",
53 | ]
54 | dependencies = [
55 | "pydantic >=1.6.2,<2.0.0",
56 | ]
57 |
58 | [project.optional-dependencies]
59 | alchemy = [
60 | 'SQLAlchemy>=2.0.0'
61 | ]
62 | kafka = [
63 | 'kafka-python>=2.0.2'
64 | ]
65 | redis = [
66 | 'redis>=4.4.0'
67 | ]
68 | mongo = [
69 | 'pymongo>=4.3.3'
70 | ]
71 |
72 | [project.urls]
73 | 'Documentation' = 'https://knucklesuganda.github.io/py_assimilator/'
74 | 'Github' = 'https://github.com/knucklesuganda/py_assimilator/'
75 | 'Youtube RU' = 'https://www.youtube.com/channel/UCSNpJHMOU7FqjD4Ttux0uuw'
76 | 'Youtube ENG' = 'https://www.youtube.com/channel/UCeC9LNDwRP9OfjyOFHaSikA'
77 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/knucklesuganda/py_assimilator/43108e3575750f3cd6e9dff6d59068d1a5be5e55/tests/__init__.py
--------------------------------------------------------------------------------