├── .gitattributes ├── .github └── workflows │ ├── build.yaml │ └── checks.yaml ├── .gitignore ├── Dockerfile ├── LICENSE ├── Makefile ├── README.md ├── alembic.ini ├── docker-compose.local.yml ├── docker-compose.yml ├── plugin_store ├── api │ ├── __init__.py │ ├── models │ │ ├── __init__.py │ │ ├── announcements.py │ │ ├── base.py │ │ ├── delete.py │ │ ├── list.py │ │ ├── submit.py │ │ └── update.py │ └── utils.py ├── cdn.py ├── constants.py ├── database │ ├── __init__.py │ ├── database.py │ ├── migrations │ │ ├── env.py │ │ ├── script.py.mako │ │ └── versions │ │ │ ├── 2022_11_07_0136_4fc55239b4d6_initial_db_setup.py │ │ │ ├── 2022_11_09_1905-642324500b07_add_tags_unique_constraint.py │ │ │ ├── 2022_11_19_1823-abe90daeb874_initial_db_setup.py │ │ │ ├── 2023_06_13_2205-492a599cd718_version_uniqueness.py │ │ │ ├── 2023_06_26_0057-00b050c80d6d_add_artifact_image_field.py │ │ │ ├── 2023_10_16_1710-f5a91a25a410_download_field.py │ │ │ └── 2024_08_10_2158-469f48c143b9_announcements.py │ ├── models │ │ ├── Artifact.py │ │ ├── Base.py │ │ ├── Version.py │ │ ├── __init__.py │ │ └── announcements.py │ └── utils.py ├── discord.py ├── main.py └── templates │ └── plugin_browser.html ├── poetry.lock ├── pyproject.toml └── tests ├── conftest.py ├── db_helpers.py ├── dummy_data └── plugin-image.png ├── test_announcement_views.py ├── test_misc_views.py └── test_plugin_views.py /.gitattributes: -------------------------------------------------------------------------------- 1 | # Auto detect text files and perform LF normalization 2 | * text=auto 3 | *.png filter=lfs diff=lfs merge=lfs -text 4 | -------------------------------------------------------------------------------- /.github/workflows/build.yaml: -------------------------------------------------------------------------------- 1 | on: 2 | workflow_call: 3 | outputs: 4 | image: 5 | description: "Resulting docker image (tagged with commit SHA)" 6 | value: ${{ jobs.set_image_name.outputs.image }} 7 | workflow_dispatch: 8 | inputs: 9 | tag: 10 | type: string 11 | description: Image tag 12 | env: 13 | # Use docker.io for Docker Hub if empty 14 | REGISTRY: ghcr.io 15 | # github.repository as / 16 | IMAGE_NAME: ${{ github.repository }} 17 | 18 | jobs: 19 | docker-build: 20 | runs-on: ubuntu-latest 21 | permissions: 22 | contents: read 23 | packages: write 24 | steps: 25 | - name: Checkout repository 26 | uses: actions/checkout@v3 27 | 28 | # Workaround: https://github.com/docker/build-push-action/issues/461 29 | - name: Setup Docker buildx 30 | uses: docker/setup-buildx-action@79abd3f86f79a9d68a23c75a09a9a85889262adf 31 | 32 | - name: Set up QEMU 33 | uses: docker/setup-qemu-action@v3 34 | - name: Set up Docker Buildx 35 | uses: docker/setup-buildx-action@v3 36 | 37 | # Login against a Docker registry except on PR 38 | # https://github.com/docker/login-action 39 | - name: Log into registry ${{ env.REGISTRY }} 40 | if: github.event_name != 'pull_request' 41 | uses: docker/login-action@28218f9b04b4f3f62068d7b6ce6ca5b26e35336c 42 | with: 43 | registry: ${{ env.REGISTRY }} 44 | username: ${{ github.actor }} 45 | password: ${{ secrets.GITHUB_TOKEN }} 46 | 47 | # Extract metadata (tags, labels) for Docker 48 | # https://github.com/docker/metadata-action 49 | - name: Extract Docker metadata 50 | id: meta 51 | uses: docker/metadata-action@98669ae865ea3cffbcbaa878cf57c20bbf1c6c38 52 | with: 53 | images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} 54 | tags: | 55 | type=sha,format=long 56 | flavor: | 57 | latest=false 58 | 59 | # Build and push Docker image with Buildx (don't push on PR) 60 | # https://github.com/docker/build-push-action 61 | - name: Build and push Docker image 62 | id: build-and-push 63 | uses: docker/build-push-action@ac9327eae2b366085ac7f6a2d02df8aa8ead720a 64 | with: 65 | context: . 66 | push: ${{ github.event_name != 'pull_request' }} 67 | tags: ${{ github.event.inputs.tag || steps.meta.outputs.tags }} 68 | platforms: linux/amd64,linux/arm64 69 | labels: ${{ steps.meta.outputs.labels }} 70 | cache-from: type=gha 71 | cache-to: type=gha,mode=max 72 | 73 | set_image_name: 74 | runs-on: ubuntu-latest 75 | outputs: 76 | image: ${{ steps.set.outputs.image }} 77 | steps: 78 | - name: Set output 79 | id: set 80 | shell: bash 81 | run: | 82 | name="${{ env.REGISTRY }}/${{ github.repository }}:sha-${{ github.sha }}" 83 | echo "image=${name,,}" >> $GITHUB_OUTPUT 84 | -------------------------------------------------------------------------------- /.github/workflows/checks.yaml: -------------------------------------------------------------------------------- 1 | name: Checks 2 | 3 | on: 4 | pull_request_target: 5 | 6 | defaults: 7 | run: 8 | working-directory: "/app" 9 | 10 | 11 | jobs: 12 | build: 13 | uses: ./.github/workflows/build.yaml 14 | 15 | lint: 16 | runs-on: ubuntu-latest 17 | needs: 18 | - build 19 | container: 20 | image: ${{ needs.build.outputs.image }} 21 | credentials: 22 | username: ${{ github.actor }} 23 | password: ${{ secrets.github_token }} 24 | strategy: 25 | matrix: 26 | linter: [flake8, isort, black, mypy] 27 | steps: 28 | - name: "Checkout" 29 | uses: actions/checkout@v3 30 | - run: "make lint/${{ matrix.linter }}" 31 | 32 | test: 33 | runs-on: ubuntu-latest 34 | needs: 35 | - build 36 | - lint 37 | container: 38 | image: ${{ needs.build.outputs.image }} 39 | credentials: 40 | username: ${{ github.actor }} 41 | password: ${{ secrets.github_token }} 42 | steps: 43 | - name: "Checkout" 44 | uses: actions/checkout@v3 45 | with: 46 | ref: ${{ github.event.pull_request.head.sha }} 47 | - run: "make test" 48 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .nox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | *.py,cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | cover/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | .pybuilder/ 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | # For a library or package, you might want to ignore these files since the code is 87 | # intended to run in multiple environments; otherwise, check them in: 88 | # .python-version 89 | 90 | # pipenv 91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 94 | # install all needed dependencies. 95 | #Pipfile.lock 96 | 97 | # poetry 98 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 99 | # This is especially recommended for binary packages to ensure reproducibility, and is more 100 | # commonly ignored for libraries. 101 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 102 | #poetry.lock 103 | 104 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 105 | __pypackages__/ 106 | 107 | # Celery stuff 108 | celerybeat-schedule 109 | celerybeat.pid 110 | 111 | # SageMath parsed files 112 | *.sage.py 113 | 114 | # Environments 115 | .env 116 | .venv 117 | env/ 118 | venv/ 119 | ENV/ 120 | env.bak/ 121 | venv.bak/ 122 | 123 | # Spyder project settings 124 | .spyderproject 125 | .spyproject 126 | 127 | # Rope project settings 128 | .ropeproject 129 | 130 | # mkdocs documentation 131 | /site 132 | 133 | # mypy 134 | .mypy_cache/ 135 | .dmypy.json 136 | dmypy.json 137 | 138 | # Pyre type checker 139 | .pyre/ 140 | 141 | # pytype static type analyzer 142 | .pytype/ 143 | 144 | # Cython debug symbols 145 | cython_debug/ 146 | 147 | # PyCharm 148 | # JetBrains specific template is maintainted in a separate JetBrains.gitignore that can 149 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 150 | # and can be added to the global gitignore or merged into this file. For a more nuclear 151 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 152 | .idea/ 153 | 154 | # Database file location used for local developmentt 155 | .database/ 156 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.12.1-alpine3.19 2 | 3 | ENV POETRY_INSTALLER_MAX_WORKERS=1 4 | ENV POETRY_VIRTUALENVS_IN_PROJECT=false 5 | ENV POETRY_VIRTUALENVS_PATH="/root/.venvs" 6 | ENV VENV_PATH="${POETRY_VIRTUALENVS_PATH}/decky-plugin-store-9TtSrW0h-py3.12" 7 | 8 | RUN apk add build-base 9 | RUN apk add openssl-dev 10 | RUN apk add python3-dev 11 | RUN apk add curl libffi-dev \ 12 | && curl -sSL https://install.python-poetry.org | python - --version 1.7.1 \ 13 | && apk del curl libffi-dev 14 | 15 | ENV PATH="$POETRY_HOME/bin:$VENV_PATH/bin:/root/.local/bin:$PATH" 16 | 17 | WORKDIR /app 18 | 19 | COPY ./pyproject.toml ./poetry.lock /app/ 20 | RUN poetry install --no-interaction --no-root 21 | 22 | # All directories are unpacked. Due to it, each file must be specified separately! 23 | COPY ./alembic.ini ./LICENSE ./Makefile ./README.md /app/ 24 | COPY ./plugin_store/ /app/plugin_store 25 | COPY ./tests/ /app/tests 26 | WORKDIR /app/plugin_store 27 | ENV PYTHONUNBUFFERED=0 28 | 29 | CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "5566"] -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | GNU AFFERO GENERAL PUBLIC LICENSE 2 | Version 3, 19 November 2007 3 | 4 | Copyright (C) 2007 Free Software Foundation, Inc. 5 | Everyone is permitted to copy and distribute verbatim copies 6 | of this license document, but changing it is not allowed. 7 | 8 | Preamble 9 | 10 | The GNU Affero General Public License is a free, copyleft license for 11 | software and other kinds of works, specifically designed to ensure 12 | cooperation with the community in the case of network server software. 13 | 14 | The licenses for most software and other practical works are designed 15 | to take away your freedom to share and change the works. By contrast, 16 | our General Public Licenses are intended to guarantee your freedom to 17 | share and change all versions of a program--to make sure it remains free 18 | software for all its users. 19 | 20 | When we speak of free software, we are referring to freedom, not 21 | price. Our General Public Licenses are designed to make sure that you 22 | have the freedom to distribute copies of free software (and charge for 23 | them if you wish), that you receive source code or can get it if you 24 | want it, that you can change the software or use pieces of it in new 25 | free programs, and that you know you can do these things. 26 | 27 | Developers that use our General Public Licenses protect your rights 28 | with two steps: (1) assert copyright on the software, and (2) offer 29 | you this License which gives you legal permission to copy, distribute 30 | and/or modify the software. 31 | 32 | A secondary benefit of defending all users' freedom is that 33 | improvements made in alternate versions of the program, if they 34 | receive widespread use, become available for other developers to 35 | incorporate. Many developers of free software are heartened and 36 | encouraged by the resulting cooperation. However, in the case of 37 | software used on network servers, this result may fail to come about. 38 | The GNU General Public License permits making a modified version and 39 | letting the public access it on a server without ever releasing its 40 | source code to the public. 41 | 42 | The GNU Affero General Public License is designed specifically to 43 | ensure that, in such cases, the modified source code becomes available 44 | to the community. It requires the operator of a network server to 45 | provide the source code of the modified version running there to the 46 | users of that server. Therefore, public use of a modified version, on 47 | a publicly accessible server, gives the public access to the source 48 | code of the modified version. 49 | 50 | An older license, called the Affero General Public License and 51 | published by Affero, was designed to accomplish similar goals. This is 52 | a different license, not a version of the Affero GPL, but Affero has 53 | released a new version of the Affero GPL which permits relicensing under 54 | this license. 55 | 56 | The precise terms and conditions for copying, distribution and 57 | modification follow. 58 | 59 | TERMS AND CONDITIONS 60 | 61 | 0. Definitions. 62 | 63 | "This License" refers to version 3 of the GNU Affero General Public License. 64 | 65 | "Copyright" also means copyright-like laws that apply to other kinds of 66 | works, such as semiconductor masks. 67 | 68 | "The Program" refers to any copyrightable work licensed under this 69 | License. Each licensee is addressed as "you". "Licensees" and 70 | "recipients" may be individuals or organizations. 71 | 72 | To "modify" a work means to copy from or adapt all or part of the work 73 | in a fashion requiring copyright permission, other than the making of an 74 | exact copy. The resulting work is called a "modified version" of the 75 | earlier work or a work "based on" the earlier work. 76 | 77 | A "covered work" means either the unmodified Program or a work based 78 | on the Program. 79 | 80 | To "propagate" a work means to do anything with it that, without 81 | permission, would make you directly or secondarily liable for 82 | infringement under applicable copyright law, except executing it on a 83 | computer or modifying a private copy. Propagation includes copying, 84 | distribution (with or without modification), making available to the 85 | public, and in some countries other activities as well. 86 | 87 | To "convey" a work means any kind of propagation that enables other 88 | parties to make or receive copies. Mere interaction with a user through 89 | a computer network, with no transfer of a copy, is not conveying. 90 | 91 | An interactive user interface displays "Appropriate Legal Notices" 92 | to the extent that it includes a convenient and prominently visible 93 | feature that (1) displays an appropriate copyright notice, and (2) 94 | tells the user that there is no warranty for the work (except to the 95 | extent that warranties are provided), that licensees may convey the 96 | work under this License, and how to view a copy of this License. If 97 | the interface presents a list of user commands or options, such as a 98 | menu, a prominent item in the list meets this criterion. 99 | 100 | 1. Source Code. 101 | 102 | The "source code" for a work means the preferred form of the work 103 | for making modifications to it. "Object code" means any non-source 104 | form of a work. 105 | 106 | A "Standard Interface" means an interface that either is an official 107 | standard defined by a recognized standards body, or, in the case of 108 | interfaces specified for a particular programming language, one that 109 | is widely used among developers working in that language. 110 | 111 | The "System Libraries" of an executable work include anything, other 112 | than the work as a whole, that (a) is included in the normal form of 113 | packaging a Major Component, but which is not part of that Major 114 | Component, and (b) serves only to enable use of the work with that 115 | Major Component, or to implement a Standard Interface for which an 116 | implementation is available to the public in source code form. A 117 | "Major Component", in this context, means a major essential component 118 | (kernel, window system, and so on) of the specific operating system 119 | (if any) on which the executable work runs, or a compiler used to 120 | produce the work, or an object code interpreter used to run it. 121 | 122 | The "Corresponding Source" for a work in object code form means all 123 | the source code needed to generate, install, and (for an executable 124 | work) run the object code and to modify the work, including scripts to 125 | control those activities. However, it does not include the work's 126 | System Libraries, or general-purpose tools or generally available free 127 | programs which are used unmodified in performing those activities but 128 | which are not part of the work. For example, Corresponding Source 129 | includes interface definition files associated with source files for 130 | the work, and the source code for shared libraries and dynamically 131 | linked subprograms that the work is specifically designed to require, 132 | such as by intimate data communication or control flow between those 133 | subprograms and other parts of the work. 134 | 135 | The Corresponding Source need not include anything that users 136 | can regenerate automatically from other parts of the Corresponding 137 | Source. 138 | 139 | The Corresponding Source for a work in source code form is that 140 | same work. 141 | 142 | 2. Basic Permissions. 143 | 144 | All rights granted under this License are granted for the term of 145 | copyright on the Program, and are irrevocable provided the stated 146 | conditions are met. This License explicitly affirms your unlimited 147 | permission to run the unmodified Program. The output from running a 148 | covered work is covered by this License only if the output, given its 149 | content, constitutes a covered work. This License acknowledges your 150 | rights of fair use or other equivalent, as provided by copyright law. 151 | 152 | You may make, run and propagate covered works that you do not 153 | convey, without conditions so long as your license otherwise remains 154 | in force. You may convey covered works to others for the sole purpose 155 | of having them make modifications exclusively for you, or provide you 156 | with facilities for running those works, provided that you comply with 157 | the terms of this License in conveying all material for which you do 158 | not control copyright. Those thus making or running the covered works 159 | for you must do so exclusively on your behalf, under your direction 160 | and control, on terms that prohibit them from making any copies of 161 | your copyrighted material outside their relationship with you. 162 | 163 | Conveying under any other circumstances is permitted solely under 164 | the conditions stated below. Sublicensing is not allowed; section 10 165 | makes it unnecessary. 166 | 167 | 3. Protecting Users' Legal Rights From Anti-Circumvention Law. 168 | 169 | No covered work shall be deemed part of an effective technological 170 | measure under any applicable law fulfilling obligations under article 171 | 11 of the WIPO copyright treaty adopted on 20 December 1996, or 172 | similar laws prohibiting or restricting circumvention of such 173 | measures. 174 | 175 | When you convey a covered work, you waive any legal power to forbid 176 | circumvention of technological measures to the extent such circumvention 177 | is effected by exercising rights under this License with respect to 178 | the covered work, and you disclaim any intention to limit operation or 179 | modification of the work as a means of enforcing, against the work's 180 | users, your or third parties' legal rights to forbid circumvention of 181 | technological measures. 182 | 183 | 4. Conveying Verbatim Copies. 184 | 185 | You may convey verbatim copies of the Program's source code as you 186 | receive it, in any medium, provided that you conspicuously and 187 | appropriately publish on each copy an appropriate copyright notice; 188 | keep intact all notices stating that this License and any 189 | non-permissive terms added in accord with section 7 apply to the code; 190 | keep intact all notices of the absence of any warranty; and give all 191 | recipients a copy of this License along with the Program. 192 | 193 | You may charge any price or no price for each copy that you convey, 194 | and you may offer support or warranty protection for a fee. 195 | 196 | 5. Conveying Modified Source Versions. 197 | 198 | You may convey a work based on the Program, or the modifications to 199 | produce it from the Program, in the form of source code under the 200 | terms of section 4, provided that you also meet all of these conditions: 201 | 202 | a) The work must carry prominent notices stating that you modified 203 | it, and giving a relevant date. 204 | 205 | b) The work must carry prominent notices stating that it is 206 | released under this License and any conditions added under section 207 | 7. This requirement modifies the requirement in section 4 to 208 | "keep intact all notices". 209 | 210 | c) You must license the entire work, as a whole, under this 211 | License to anyone who comes into possession of a copy. This 212 | License will therefore apply, along with any applicable section 7 213 | additional terms, to the whole of the work, and all its parts, 214 | regardless of how they are packaged. This License gives no 215 | permission to license the work in any other way, but it does not 216 | invalidate such permission if you have separately received it. 217 | 218 | d) If the work has interactive user interfaces, each must display 219 | Appropriate Legal Notices; however, if the Program has interactive 220 | interfaces that do not display Appropriate Legal Notices, your 221 | work need not make them do so. 222 | 223 | A compilation of a covered work with other separate and independent 224 | works, which are not by their nature extensions of the covered work, 225 | and which are not combined with it such as to form a larger program, 226 | in or on a volume of a storage or distribution medium, is called an 227 | "aggregate" if the compilation and its resulting copyright are not 228 | used to limit the access or legal rights of the compilation's users 229 | beyond what the individual works permit. Inclusion of a covered work 230 | in an aggregate does not cause this License to apply to the other 231 | parts of the aggregate. 232 | 233 | 6. Conveying Non-Source Forms. 234 | 235 | You may convey a covered work in object code form under the terms 236 | of sections 4 and 5, provided that you also convey the 237 | machine-readable Corresponding Source under the terms of this License, 238 | in one of these ways: 239 | 240 | a) Convey the object code in, or embodied in, a physical product 241 | (including a physical distribution medium), accompanied by the 242 | Corresponding Source fixed on a durable physical medium 243 | customarily used for software interchange. 244 | 245 | b) Convey the object code in, or embodied in, a physical product 246 | (including a physical distribution medium), accompanied by a 247 | written offer, valid for at least three years and valid for as 248 | long as you offer spare parts or customer support for that product 249 | model, to give anyone who possesses the object code either (1) a 250 | copy of the Corresponding Source for all the software in the 251 | product that is covered by this License, on a durable physical 252 | medium customarily used for software interchange, for a price no 253 | more than your reasonable cost of physically performing this 254 | conveying of source, or (2) access to copy the 255 | Corresponding Source from a network server at no charge. 256 | 257 | c) Convey individual copies of the object code with a copy of the 258 | written offer to provide the Corresponding Source. This 259 | alternative is allowed only occasionally and noncommercially, and 260 | only if you received the object code with such an offer, in accord 261 | with subsection 6b. 262 | 263 | d) Convey the object code by offering access from a designated 264 | place (gratis or for a charge), and offer equivalent access to the 265 | Corresponding Source in the same way through the same place at no 266 | further charge. You need not require recipients to copy the 267 | Corresponding Source along with the object code. If the place to 268 | copy the object code is a network server, the Corresponding Source 269 | may be on a different server (operated by you or a third party) 270 | that supports equivalent copying facilities, provided you maintain 271 | clear directions next to the object code saying where to find the 272 | Corresponding Source. Regardless of what server hosts the 273 | Corresponding Source, you remain obligated to ensure that it is 274 | available for as long as needed to satisfy these requirements. 275 | 276 | e) Convey the object code using peer-to-peer transmission, provided 277 | you inform other peers where the object code and Corresponding 278 | Source of the work are being offered to the general public at no 279 | charge under subsection 6d. 280 | 281 | A separable portion of the object code, whose source code is excluded 282 | from the Corresponding Source as a System Library, need not be 283 | included in conveying the object code work. 284 | 285 | A "User Product" is either (1) a "consumer product", which means any 286 | tangible personal property which is normally used for personal, family, 287 | or household purposes, or (2) anything designed or sold for incorporation 288 | into a dwelling. In determining whether a product is a consumer product, 289 | doubtful cases shall be resolved in favor of coverage. For a particular 290 | product received by a particular user, "normally used" refers to a 291 | typical or common use of that class of product, regardless of the status 292 | of the particular user or of the way in which the particular user 293 | actually uses, or expects or is expected to use, the product. A product 294 | is a consumer product regardless of whether the product has substantial 295 | commercial, industrial or non-consumer uses, unless such uses represent 296 | the only significant mode of use of the product. 297 | 298 | "Installation Information" for a User Product means any methods, 299 | procedures, authorization keys, or other information required to install 300 | and execute modified versions of a covered work in that User Product from 301 | a modified version of its Corresponding Source. The information must 302 | suffice to ensure that the continued functioning of the modified object 303 | code is in no case prevented or interfered with solely because 304 | modification has been made. 305 | 306 | If you convey an object code work under this section in, or with, or 307 | specifically for use in, a User Product, and the conveying occurs as 308 | part of a transaction in which the right of possession and use of the 309 | User Product is transferred to the recipient in perpetuity or for a 310 | fixed term (regardless of how the transaction is characterized), the 311 | Corresponding Source conveyed under this section must be accompanied 312 | by the Installation Information. But this requirement does not apply 313 | if neither you nor any third party retains the ability to install 314 | modified object code on the User Product (for example, the work has 315 | been installed in ROM). 316 | 317 | The requirement to provide Installation Information does not include a 318 | requirement to continue to provide support service, warranty, or updates 319 | for a work that has been modified or installed by the recipient, or for 320 | the User Product in which it has been modified or installed. Access to a 321 | network may be denied when the modification itself materially and 322 | adversely affects the operation of the network or violates the rules and 323 | protocols for communication across the network. 324 | 325 | Corresponding Source conveyed, and Installation Information provided, 326 | in accord with this section must be in a format that is publicly 327 | documented (and with an implementation available to the public in 328 | source code form), and must require no special password or key for 329 | unpacking, reading or copying. 330 | 331 | 7. Additional Terms. 332 | 333 | "Additional permissions" are terms that supplement the terms of this 334 | License by making exceptions from one or more of its conditions. 335 | Additional permissions that are applicable to the entire Program shall 336 | be treated as though they were included in this License, to the extent 337 | that they are valid under applicable law. If additional permissions 338 | apply only to part of the Program, that part may be used separately 339 | under those permissions, but the entire Program remains governed by 340 | this License without regard to the additional permissions. 341 | 342 | When you convey a copy of a covered work, you may at your option 343 | remove any additional permissions from that copy, or from any part of 344 | it. (Additional permissions may be written to require their own 345 | removal in certain cases when you modify the work.) You may place 346 | additional permissions on material, added by you to a covered work, 347 | for which you have or can give appropriate copyright permission. 348 | 349 | Notwithstanding any other provision of this License, for material you 350 | add to a covered work, you may (if authorized by the copyright holders of 351 | that material) supplement the terms of this License with terms: 352 | 353 | a) Disclaiming warranty or limiting liability differently from the 354 | terms of sections 15 and 16 of this License; or 355 | 356 | b) Requiring preservation of specified reasonable legal notices or 357 | author attributions in that material or in the Appropriate Legal 358 | Notices displayed by works containing it; or 359 | 360 | c) Prohibiting misrepresentation of the origin of that material, or 361 | requiring that modified versions of such material be marked in 362 | reasonable ways as different from the original version; or 363 | 364 | d) Limiting the use for publicity purposes of names of licensors or 365 | authors of the material; or 366 | 367 | e) Declining to grant rights under trademark law for use of some 368 | trade names, trademarks, or service marks; or 369 | 370 | f) Requiring indemnification of licensors and authors of that 371 | material by anyone who conveys the material (or modified versions of 372 | it) with contractual assumptions of liability to the recipient, for 373 | any liability that these contractual assumptions directly impose on 374 | those licensors and authors. 375 | 376 | All other non-permissive additional terms are considered "further 377 | restrictions" within the meaning of section 10. If the Program as you 378 | received it, or any part of it, contains a notice stating that it is 379 | governed by this License along with a term that is a further 380 | restriction, you may remove that term. If a license document contains 381 | a further restriction but permits relicensing or conveying under this 382 | License, you may add to a covered work material governed by the terms 383 | of that license document, provided that the further restriction does 384 | not survive such relicensing or conveying. 385 | 386 | If you add terms to a covered work in accord with this section, you 387 | must place, in the relevant source files, a statement of the 388 | additional terms that apply to those files, or a notice indicating 389 | where to find the applicable terms. 390 | 391 | Additional terms, permissive or non-permissive, may be stated in the 392 | form of a separately written license, or stated as exceptions; 393 | the above requirements apply either way. 394 | 395 | 8. Termination. 396 | 397 | You may not propagate or modify a covered work except as expressly 398 | provided under this License. Any attempt otherwise to propagate or 399 | modify it is void, and will automatically terminate your rights under 400 | this License (including any patent licenses granted under the third 401 | paragraph of section 11). 402 | 403 | However, if you cease all violation of this License, then your 404 | license from a particular copyright holder is reinstated (a) 405 | provisionally, unless and until the copyright holder explicitly and 406 | finally terminates your license, and (b) permanently, if the copyright 407 | holder fails to notify you of the violation by some reasonable means 408 | prior to 60 days after the cessation. 409 | 410 | Moreover, your license from a particular copyright holder is 411 | reinstated permanently if the copyright holder notifies you of the 412 | violation by some reasonable means, this is the first time you have 413 | received notice of violation of this License (for any work) from that 414 | copyright holder, and you cure the violation prior to 30 days after 415 | your receipt of the notice. 416 | 417 | Termination of your rights under this section does not terminate the 418 | licenses of parties who have received copies or rights from you under 419 | this License. If your rights have been terminated and not permanently 420 | reinstated, you do not qualify to receive new licenses for the same 421 | material under section 10. 422 | 423 | 9. Acceptance Not Required for Having Copies. 424 | 425 | You are not required to accept this License in order to receive or 426 | run a copy of the Program. Ancillary propagation of a covered work 427 | occurring solely as a consequence of using peer-to-peer transmission 428 | to receive a copy likewise does not require acceptance. However, 429 | nothing other than this License grants you permission to propagate or 430 | modify any covered work. These actions infringe copyright if you do 431 | not accept this License. Therefore, by modifying or propagating a 432 | covered work, you indicate your acceptance of this License to do so. 433 | 434 | 10. Automatic Licensing of Downstream Recipients. 435 | 436 | Each time you convey a covered work, the recipient automatically 437 | receives a license from the original licensors, to run, modify and 438 | propagate that work, subject to this License. You are not responsible 439 | for enforcing compliance by third parties with this License. 440 | 441 | An "entity transaction" is a transaction transferring control of an 442 | organization, or substantially all assets of one, or subdividing an 443 | organization, or merging organizations. If propagation of a covered 444 | work results from an entity transaction, each party to that 445 | transaction who receives a copy of the work also receives whatever 446 | licenses to the work the party's predecessor in interest had or could 447 | give under the previous paragraph, plus a right to possession of the 448 | Corresponding Source of the work from the predecessor in interest, if 449 | the predecessor has it or can get it with reasonable efforts. 450 | 451 | You may not impose any further restrictions on the exercise of the 452 | rights granted or affirmed under this License. For example, you may 453 | not impose a license fee, royalty, or other charge for exercise of 454 | rights granted under this License, and you may not initiate litigation 455 | (including a cross-claim or counterclaim in a lawsuit) alleging that 456 | any patent claim is infringed by making, using, selling, offering for 457 | sale, or importing the Program or any portion of it. 458 | 459 | 11. Patents. 460 | 461 | A "contributor" is a copyright holder who authorizes use under this 462 | License of the Program or a work on which the Program is based. The 463 | work thus licensed is called the contributor's "contributor version". 464 | 465 | A contributor's "essential patent claims" are all patent claims 466 | owned or controlled by the contributor, whether already acquired or 467 | hereafter acquired, that would be infringed by some manner, permitted 468 | by this License, of making, using, or selling its contributor version, 469 | but do not include claims that would be infringed only as a 470 | consequence of further modification of the contributor version. For 471 | purposes of this definition, "control" includes the right to grant 472 | patent sublicenses in a manner consistent with the requirements of 473 | this License. 474 | 475 | Each contributor grants you a non-exclusive, worldwide, royalty-free 476 | patent license under the contributor's essential patent claims, to 477 | make, use, sell, offer for sale, import and otherwise run, modify and 478 | propagate the contents of its contributor version. 479 | 480 | In the following three paragraphs, a "patent license" is any express 481 | agreement or commitment, however denominated, not to enforce a patent 482 | (such as an express permission to practice a patent or covenant not to 483 | sue for patent infringement). To "grant" such a patent license to a 484 | party means to make such an agreement or commitment not to enforce a 485 | patent against the party. 486 | 487 | If you convey a covered work, knowingly relying on a patent license, 488 | and the Corresponding Source of the work is not available for anyone 489 | to copy, free of charge and under the terms of this License, through a 490 | publicly available network server or other readily accessible means, 491 | then you must either (1) cause the Corresponding Source to be so 492 | available, or (2) arrange to deprive yourself of the benefit of the 493 | patent license for this particular work, or (3) arrange, in a manner 494 | consistent with the requirements of this License, to extend the patent 495 | license to downstream recipients. "Knowingly relying" means you have 496 | actual knowledge that, but for the patent license, your conveying the 497 | covered work in a country, or your recipient's use of the covered work 498 | in a country, would infringe one or more identifiable patents in that 499 | country that you have reason to believe are valid. 500 | 501 | If, pursuant to or in connection with a single transaction or 502 | arrangement, you convey, or propagate by procuring conveyance of, a 503 | covered work, and grant a patent license to some of the parties 504 | receiving the covered work authorizing them to use, propagate, modify 505 | or convey a specific copy of the covered work, then the patent license 506 | you grant is automatically extended to all recipients of the covered 507 | work and works based on it. 508 | 509 | A patent license is "discriminatory" if it does not include within 510 | the scope of its coverage, prohibits the exercise of, or is 511 | conditioned on the non-exercise of one or more of the rights that are 512 | specifically granted under this License. You may not convey a covered 513 | work if you are a party to an arrangement with a third party that is 514 | in the business of distributing software, under which you make payment 515 | to the third party based on the extent of your activity of conveying 516 | the work, and under which the third party grants, to any of the 517 | parties who would receive the covered work from you, a discriminatory 518 | patent license (a) in connection with copies of the covered work 519 | conveyed by you (or copies made from those copies), or (b) primarily 520 | for and in connection with specific products or compilations that 521 | contain the covered work, unless you entered into that arrangement, 522 | or that patent license was granted, prior to 28 March 2007. 523 | 524 | Nothing in this License shall be construed as excluding or limiting 525 | any implied license or other defenses to infringement that may 526 | otherwise be available to you under applicable patent law. 527 | 528 | 12. No Surrender of Others' Freedom. 529 | 530 | If conditions are imposed on you (whether by court order, agreement or 531 | otherwise) that contradict the conditions of this License, they do not 532 | excuse you from the conditions of this License. If you cannot convey a 533 | covered work so as to satisfy simultaneously your obligations under this 534 | License and any other pertinent obligations, then as a consequence you may 535 | not convey it at all. For example, if you agree to terms that obligate you 536 | to collect a royalty for further conveying from those to whom you convey 537 | the Program, the only way you could satisfy both those terms and this 538 | License would be to refrain entirely from conveying the Program. 539 | 540 | 13. Remote Network Interaction; Use with the GNU General Public License. 541 | 542 | Notwithstanding any other provision of this License, if you modify the 543 | Program, your modified version must prominently offer all users 544 | interacting with it remotely through a computer network (if your version 545 | supports such interaction) an opportunity to receive the Corresponding 546 | Source of your version by providing access to the Corresponding Source 547 | from a network server at no charge, through some standard or customary 548 | means of facilitating copying of software. This Corresponding Source 549 | shall include the Corresponding Source for any work covered by version 3 550 | of the GNU General Public License that is incorporated pursuant to the 551 | following paragraph. 552 | 553 | Notwithstanding any other provision of this License, you have 554 | permission to link or combine any covered work with a work licensed 555 | under version 3 of the GNU General Public License into a single 556 | combined work, and to convey the resulting work. The terms of this 557 | License will continue to apply to the part which is the covered work, 558 | but the work with which it is combined will remain governed by version 559 | 3 of the GNU General Public License. 560 | 561 | 14. Revised Versions of this License. 562 | 563 | The Free Software Foundation may publish revised and/or new versions of 564 | the GNU Affero General Public License from time to time. Such new versions 565 | will be similar in spirit to the present version, but may differ in detail to 566 | address new problems or concerns. 567 | 568 | Each version is given a distinguishing version number. If the 569 | Program specifies that a certain numbered version of the GNU Affero General 570 | Public License "or any later version" applies to it, you have the 571 | option of following the terms and conditions either of that numbered 572 | version or of any later version published by the Free Software 573 | Foundation. If the Program does not specify a version number of the 574 | GNU Affero General Public License, you may choose any version ever published 575 | by the Free Software Foundation. 576 | 577 | If the Program specifies that a proxy can decide which future 578 | versions of the GNU Affero General Public License can be used, that proxy's 579 | public statement of acceptance of a version permanently authorizes you 580 | to choose that version for the Program. 581 | 582 | Later license versions may give you additional or different 583 | permissions. However, no additional obligations are imposed on any 584 | author or copyright holder as a result of your choosing to follow a 585 | later version. 586 | 587 | 15. Disclaimer of Warranty. 588 | 589 | THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY 590 | APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT 591 | HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY 592 | OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, 593 | THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 594 | PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM 595 | IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF 596 | ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 597 | 598 | 16. Limitation of Liability. 599 | 600 | IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING 601 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS 602 | THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY 603 | GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE 604 | USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF 605 | DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD 606 | PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), 607 | EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF 608 | SUCH DAMAGES. 609 | 610 | 17. Interpretation of Sections 15 and 16. 611 | 612 | If the disclaimer of warranty and limitation of liability provided 613 | above cannot be given local legal effect according to their terms, 614 | reviewing courts shall apply local law that most closely approximates 615 | an absolute waiver of all civil liability in connection with the 616 | Program, unless a warranty or assumption of liability accompanies a 617 | copy of the Program in return for a fee. 618 | 619 | END OF TERMS AND CONDITIONS 620 | 621 | How to Apply These Terms to Your New Programs 622 | 623 | If you develop a new program, and you want it to be of the greatest 624 | possible use to the public, the best way to achieve this is to make it 625 | free software which everyone can redistribute and change under these terms. 626 | 627 | To do so, attach the following notices to the program. It is safest 628 | to attach them to the start of each source file to most effectively 629 | state the exclusion of warranty; and each file should have at least 630 | the "copyright" line and a pointer to where the full notice is found. 631 | 632 | 633 | Copyright (C) 634 | 635 | This program is free software: you can redistribute it and/or modify 636 | it under the terms of the GNU Affero General Public License as published 637 | by the Free Software Foundation, either version 3 of the License, or 638 | (at your option) any later version. 639 | 640 | This program is distributed in the hope that it will be useful, 641 | but WITHOUT ANY WARRANTY; without even the implied warranty of 642 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 643 | GNU Affero General Public License for more details. 644 | 645 | You should have received a copy of the GNU Affero General Public License 646 | along with this program. If not, see . 647 | 648 | Also add information on how to contact you by electronic and paper mail. 649 | 650 | If your software can interact with users remotely through a computer 651 | network, you should also make sure that it provides a way for users to 652 | get its source. For example, if your program is a web application, its 653 | interface could display a "Source" link that leads users to an archive 654 | of the code. There are many ways you could offer source, and different 655 | solutions will be better for different programs; see section 13 for the 656 | specific requirements. 657 | 658 | You should also get your employer (if you work as a programmer) or school, 659 | if any, to sign a "copyright disclaimer" for the program, if necessary. 660 | For more information on this, and how to apply and follow the GNU AGPL, see 661 | . 662 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: Makefile 2 | help: 3 | @echo "Read the Makefile plz" 4 | 5 | autoformat/black: 6 | black plugin_store/ tests/ 7 | 8 | autoformat/isort: 9 | isort plugin_store/ tests/ 10 | 11 | autoformat: autoformat/black autoformat/isort 12 | 13 | lint/flake8: 14 | flake8 plugin_store/ tests/ 15 | 16 | lint/isort: 17 | isort --check --diff plugin_store/ tests/ 18 | 19 | lint/black: 20 | black --check --diff plugin_store/ tests/ 21 | 22 | lint/mypy: 23 | PYTHON_PATH=./plugin_store mypy plugin_store/ tests/ 24 | 25 | lint: lint/black lint/isort lint/flake8 lint/mypy 26 | 27 | migrations/apply: 28 | alembic upgrade head 29 | 30 | migrations/autogenerate: 31 | alembic revision --autogenerate 32 | 33 | migrations/create: 34 | alembic revision 35 | 36 | dc/build: 37 | docker compose -f docker-compose.local.yml build 38 | 39 | dc/%: 40 | docker compose -f docker-compose.local.yml run -w /app plugin_store make $* 41 | 42 | deps/lock: 43 | poetry lock --no-update 44 | 45 | deps/upgrade: 46 | poetry lock 47 | 48 | test: 49 | SQLALCHEMY_WARN_20=1 pytest ./tests -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Plugin-Store 2 | 3 | This project powers Decky plugin store @ https://plugins.deckbrew.xyz/. It is using fastapi and sqlalchemy. 4 | 5 | ## Contributing 6 | 7 | ### Running in docker 8 | 9 | As standard `docker-compose.yml` file is used for deployment, there is a separate `docker-compose.local.yml` file you 10 | can use. Just use `docker-compose -f docker-compose.local.yml up` to bring up the project. You can use any 11 | `docker-compose` commands normally as long as you add `-f docker-compose.local.yml` argument. 12 | 13 | ### Using Makefile 14 | 15 | There is a handy `Makefile` placed in the root directory of the project. It is not being used in a conventional way 16 | (to build the code or install built software), it's just creates nice aliases for commands. Here is a list of them: 17 | 18 | - `autoformat` - runs autoformatting on the whole Python codebase. 19 | - `autoformat/black` - runs only `black` command for autoformatting, which unifies codestyle. 20 | - `autoformat/isort` - runs only `isort` command for autoformatting, which reorders imports. 21 | - `lint` - runs lint check on the whole project. 22 | - `lint/black` - runs only `black` in check mode. After running black autoformatting, this check should pass. 23 | - `lint/isort` - runs only `isort` in check mode. After running isort autoformatting, this check should pass. 24 | - `lint/flake8` - runs only `flake8` linter. This does not have its own autoformat command, but it should be more 25 | or less covered by `black` autoformatting. It's here to make sure black does not leave any gaps in pep8 compliance. 26 | - `lint/mypy` runs only `mypy` linter. This does not have its own autoformat command either and errors needs to be 27 | fixed manually. 28 | - `deps/lock` - recreates lockfile without changing any package versions when possible. Needs to be executed after 29 | changing project dependencies. 30 | - `deps/upgrade` - recreates lockfile while trying to upgrade all packages to the newest compatible version. 31 | - `test` - runs project tests. 32 | 33 | All commands above can be prefixed with `dc/` to run them directly in a docker container. There are also additional, 34 | docker only commands: 35 | - `dc/build` - rebuilds docker images. Needs to be run after `Dockerfile` or project dependencies change. 36 | 37 | ### Updating dependencies 38 | 39 | This project is using Poetry to manage python packages required for the project. Poetry also keeps the lock file to make 40 | sure every environment where the same version of project is used, is as consistent as possible. 41 | 42 | If you want to add any dependency, preferably add it manually in the `pyproject.toml` file. Please keep dependencies 43 | alphabetically sorted to avoid merge conflicts. 44 | 45 | If adding or updating a single dependency inside the `pyproject.toml`, you need to update the lockfile. Please run 46 | `make deps/lock` to do as little changes to the lockfile as possible, unless your intention is to refresh every single 47 | dependency, then `make deps/upgrade` should be a better option. But you probably shouldn't use it unless you really 48 | need to. 49 | 50 | ### Running tests 51 | 52 | Simply run `make test` to run tests on your local machine. If using development docker-compose file, you shall use 53 | `make dc/test` instead. 54 | 55 | ### Writing tests 56 | 57 | This project uses `pytest` for running tests. Get familiar with it and fixtures system first. On top of `pytest`, async 58 | tests are supported via `pytest.mark.asyncio` decorator provided by `pytest-asyncio`. As project is using `fastapi` with 59 | async views as well as async DB, most of the tests need to be async. 60 | 61 | All tests and configuration for them lives in `tests` directory. You can find some useful fixtures in the `conftest.py` 62 | file. If creating any more fixtures, please place them in this file unless you have a good reason not to do so. 63 | 64 | There are two automatically applied fixtures, one patches over any external API calls, so they aren't really executed 65 | when running tests, second one overrides constants specifying any external resources. If adding any new external service 66 | dependencies to the project, please update those fixtures to patch them over as well. -------------------------------------------------------------------------------- /alembic.ini: -------------------------------------------------------------------------------- 1 | # A generic, single database configuration. 2 | 3 | [alembic] 4 | # path to migration scripts 5 | script_location = /app/plugin_store/database/migrations 6 | 7 | # template used to generate migration file names; The default value is %%(rev)s_%%(slug)s 8 | # Uncomment the line below if you want the files to be prepended with date and time 9 | file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s 10 | 11 | # sys.path path, will be prepended to sys.path if present. 12 | # defaults to the current working directory. 13 | prepend_sys_path = plugin_store 14 | 15 | # timezone to use when rendering the date within the migration file 16 | # as well as the filename. 17 | # If specified, requires the python-dateutil library that can be 18 | # installed by adding `alembic[tz]` to the pip requirements 19 | # string value is passed to dateutil.tz.gettz() 20 | # leave blank for localtime 21 | # timezone = 22 | 23 | # max length of characters to apply to the 24 | # "slug" field 25 | # truncate_slug_length = 40 26 | 27 | # set to 'true' to run the environment during 28 | # the 'revision' command, regardless of autogenerate 29 | # revision_environment = false 30 | 31 | # set to 'true' to allow .pyc and .pyo files without 32 | # a source .py file to be detected as revisions in the 33 | # versions/ directory 34 | # sourceless = false 35 | 36 | # version location specification; This defaults 37 | # to /app/plugin_store/database/migrations/versions. When using multiple version 38 | # directories, initial revisions must be specified with --version-path. 39 | # The path separator used here should be the separator specified by "version_path_separator" below. 40 | # version_locations = %(here)s/bar:%(here)s/bat:/app/plugin_store/database/migrations/versions 41 | 42 | # version path separator; As mentioned above, this is the character used to split 43 | # version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. 44 | # If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. 45 | # Valid values for version_path_separator are: 46 | # 47 | # version_path_separator = : 48 | # version_path_separator = ; 49 | # version_path_separator = space 50 | version_path_separator = os # Use os.pathsep. Default configuration used for new projects. 51 | 52 | # the output encoding used when revision files 53 | # are written from script.py.mako 54 | # output_encoding = utf-8 55 | 56 | [post_write_hooks] 57 | # post_write_hooks defines scripts or Python functions that are run 58 | # on newly generated revision scripts. See the documentation for further 59 | # detail and examples 60 | 61 | # format using "black" - use the console_scripts runner, against the "black" entrypoint 62 | hooks = black,isort 63 | 64 | black.type = console_scripts 65 | black.entrypoint = black 66 | 67 | isort.type = console_scripts 68 | isort.entrypoint = isort 69 | 70 | # Logging configuration 71 | [loggers] 72 | keys = root,sqlalchemy,alembic 73 | 74 | [handlers] 75 | keys = console 76 | 77 | [formatters] 78 | keys = generic 79 | 80 | [logger_root] 81 | level = WARN 82 | handlers = console 83 | qualname = 84 | 85 | [logger_sqlalchemy] 86 | level = WARN 87 | handlers = 88 | qualname = sqlalchemy.engine 89 | 90 | [logger_alembic] 91 | level = INFO 92 | handlers = 93 | qualname = alembic 94 | 95 | [handler_console] 96 | class = StreamHandler 97 | args = (sys.stderr,) 98 | level = NOTSET 99 | formatter = generic 100 | 101 | [formatter_generic] 102 | format = %(levelname)-5.5s [%(name)s] %(message)s 103 | datefmt = %H:%M:%S 104 | -------------------------------------------------------------------------------- /docker-compose.local.yml: -------------------------------------------------------------------------------- 1 | services: 2 | plugin_store: 3 | build: . 4 | container_name: plugin_store 5 | environment: 6 | - DB_URL=postgresql+asyncpg://decky:decky@postgres_db/decky 7 | - ANNOUNCEMENT_WEBHOOK 8 | - SUBMIT_AUTH_KEY=deadbeef 9 | - B2_APP_KEY_ID 10 | - B2_APP_KEY 11 | - B2_BUCKET_ID 12 | volumes: 13 | - .:/app 14 | restart: unless-stopped 15 | ports: 16 | - "5566:5566" 17 | command: uvicorn main:app --reload --host 0.0.0.0 --port 5566 18 | depends_on: 19 | postgres_db: 20 | condition: service_healthy 21 | 22 | redis_db: 23 | image: redis:latest 24 | restart: unless-stopped 25 | environment: 26 | - REDIS_PORT=6379 27 | 28 | postgres_db: 29 | image: postgres:16 # Postgres databases are only compatible with their same major version 30 | restart: unless-stopped 31 | environment: 32 | - POSTGRES_DB=decky 33 | - POSTGRES_USER=decky 34 | - POSTGRES_PASSWORD=decky 35 | volumes: 36 | - store-postgres:/var/lib/postgresql/data 37 | healthcheck: 38 | test: pg_isready -U myuser -d db_prod 39 | interval: 10s 40 | timeout: 3s 41 | retries: 3 42 | 43 | volumes: 44 | store-postgres: -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | services: 2 | plugin_store: 3 | build: . 4 | container_name: "${DEPLOYMENT_NAME}" 5 | environment: 6 | - DB_URL 7 | - ANNOUNCEMENT_WEBHOOK 8 | - SUBMIT_AUTH_KEY 9 | - B2_APP_KEY_ID 10 | - B2_APP_KEY 11 | - B2_BUCKET_ID 12 | networks: 13 | - plugins-network 14 | - default 15 | restart: unless-stopped 16 | 17 | redis_db: 18 | image: redis:latest 19 | restart: unless-stopped 20 | environment: 21 | - REDIS_PORT=6379 22 | 23 | postgres_db: 24 | image: postgres:16 # Postgres databases are only compatible with their same major version 25 | restart: unless-stopped 26 | environment: 27 | - POSTGRES_DB=decky 28 | - POSTGRES_USER=decky 29 | - POSTGRES_PASSWORD 30 | volumes: 31 | - ${DB_PATH}:/var/lib/postgresql/data 32 | 33 | networks: 34 | plugins-network: 35 | external: true 36 | driver: bridge 37 | name: caddy__bridge 38 | -------------------------------------------------------------------------------- /plugin_store/api/__init__.py: -------------------------------------------------------------------------------- 1 | from functools import reduce 2 | from operator import add 3 | from os import getenv 4 | from typing import Annotated, Optional 5 | 6 | import fastapi 7 | from fastapi import Depends, FastAPI, HTTPException, Request 8 | from fastapi.middleware.cors import CORSMiddleware 9 | from fastapi.responses import HTMLResponse, JSONResponse, Response 10 | from fastapi.security import APIKeyHeader 11 | from fastapi.utils import is_body_allowed_for_status_code 12 | from limits import parse, storage, strategies 13 | 14 | from cdn import upload_image, upload_version 15 | from constants import SortDirection, SortType, TEMPLATES_DIR 16 | from database.database import database, Database 17 | from database.models import Announcement 18 | from discord import post_announcement 19 | 20 | from .models import announcements as api_announcements 21 | from .models import delete as api_delete 22 | from .models import list as api_list 23 | from .models import submit as api_submit 24 | from .models import update as api_update 25 | from .utils import FormBody, getIpHash, UUID7 26 | 27 | app = FastAPI() 28 | 29 | INDEX_PAGE = (TEMPLATES_DIR / "plugin_browser.html").read_text() 30 | 31 | cors_origins = [ 32 | "https://steamloopback.host", 33 | ] 34 | 35 | app.add_middleware( 36 | CORSMiddleware, 37 | allow_origins=cors_origins, 38 | allow_credentials=True, 39 | allow_methods=["*"], 40 | allow_headers=["*"], 41 | expose_headers=["*"], 42 | ) 43 | 44 | rate_limit_storage = storage.RedisStorage("redis://redis_db:6379") 45 | increment_limit_per_plugin = parse("2/day") 46 | rate_limit = strategies.FixedWindowRateLimiter(rate_limit_storage) 47 | 48 | 49 | @app.exception_handler(HTTPException) 50 | async def http_exception_handler(request: "Request", exc: "HTTPException") -> "Response": 51 | headers = getattr(exc, "headers", None) 52 | if not is_body_allowed_for_status_code(exc.status_code): 53 | return Response(status_code=exc.status_code, headers=headers) 54 | return JSONResponse( 55 | {"detail": exc.detail, "message": exc.detail}, 56 | status_code=exc.status_code, 57 | headers=headers, 58 | ) 59 | 60 | 61 | async def auth_token(authorization: str = Depends(APIKeyHeader(name="Authorization"))) -> None: 62 | if authorization != getenv("SUBMIT_AUTH_KEY"): 63 | raise fastapi.HTTPException(status_code=fastapi.status.HTTP_403_FORBIDDEN, detail="INVALID AUTH KEY") 64 | 65 | 66 | @app.get("/", response_class=HTMLResponse) 67 | async def index(): 68 | return INDEX_PAGE 69 | 70 | 71 | @app.get( 72 | "/v1/announcements", 73 | dependencies=[Depends(auth_token)], 74 | response_model=list[api_announcements.AnnouncementResponse], 75 | ) 76 | async def list_announcements( 77 | db: Annotated["Database", Depends(database)], 78 | ): 79 | return await db.list_announcements(active=False) 80 | 81 | 82 | @app.post( 83 | "/v1/announcements", 84 | dependencies=[Depends(auth_token)], 85 | response_model=api_announcements.AnnouncementResponse, 86 | status_code=fastapi.status.HTTP_201_CREATED, 87 | ) 88 | async def create_announcement( 89 | db: Annotated["Database", Depends(database)], 90 | announcement: api_announcements.AnnouncementRequest, 91 | ): 92 | return await db.create_announcement(title=announcement.title, text=announcement.text, active=announcement.active) 93 | 94 | 95 | @app.get("/v1/announcements/-/current", response_model=list[api_announcements.CurrentAnnouncementResponse]) 96 | async def list_current_announcements( 97 | db: Annotated["Database", Depends(database)], 98 | ): 99 | return await db.list_announcements() 100 | 101 | 102 | @app.get( 103 | "/v1/announcements/{announcement_id}", 104 | dependencies=[Depends(auth_token)], 105 | response_model=api_announcements.AnnouncementResponse, 106 | ) 107 | async def get_announcement( 108 | db: Annotated["Database", Depends(database)], 109 | announcement_id: UUID7, 110 | ): 111 | return await db.get_announcement(announcement_id) 112 | 113 | 114 | @app.put( 115 | "/v1/announcements/{announcement_id}", 116 | dependencies=[Depends(auth_token)], 117 | response_model=api_announcements.AnnouncementResponse, 118 | ) 119 | async def update_announcement( 120 | db: Annotated["Database", Depends(database)], 121 | existing_announcement: Annotated["Announcement", Depends(get_announcement)], 122 | new_announcement: api_announcements.AnnouncementRequest, 123 | ): 124 | return await db.update_announcement( 125 | existing_announcement, 126 | title=new_announcement.title, 127 | text=new_announcement.text, 128 | active=new_announcement.active, 129 | ) 130 | 131 | 132 | @app.delete( 133 | "/v1/announcements/{announcement_id}", 134 | dependencies=[Depends(auth_token)], 135 | status_code=fastapi.status.HTTP_204_NO_CONTENT, 136 | ) 137 | async def delete_announcement( 138 | db: Annotated["Database", Depends(database)], 139 | announcement_id: UUID7, 140 | ): 141 | await db.delete_announcement(announcement_id) 142 | 143 | 144 | @app.get("/plugins", response_model=list[api_list.ListPluginResponse]) 145 | async def plugins_list( 146 | query: str = "", 147 | tags: list[str] = fastapi.Query(default=[]), 148 | hidden: bool = False, 149 | sort_by: Optional[SortType] = None, 150 | sort_direction: SortDirection = SortDirection.ASC, 151 | db: "Database" = Depends(database), 152 | ): 153 | tags = list(filter(None, reduce(add, (el.split(",") for el in tags), []))) 154 | plugins = await db.search(db.session, query, tags, hidden, sort_by, sort_direction) 155 | return plugins 156 | 157 | 158 | @app.post("/plugins/{plugin_name}/versions/{version_name}/increment", responses={404: {}, 429: {}}) 159 | async def increment_plugin_install_count( 160 | request: Request, 161 | plugin_name: str, 162 | version_name: str, 163 | isUpdate: bool = True, 164 | db: "Database" = Depends(database), 165 | ): 166 | ip = getIpHash(request) 167 | if not rate_limit.test(increment_limit_per_plugin, plugin_name, ip): 168 | return Response(status_code=fastapi.status.HTTP_429_TOO_MANY_REQUESTS) 169 | success = await db.increment_installs(db.session, plugin_name, version_name, isUpdate) 170 | if success: 171 | rate_limit.hit(increment_limit_per_plugin, plugin_name, ip) 172 | return Response(status_code=fastapi.status.HTTP_200_OK) 173 | else: 174 | return Response(status_code=fastapi.status.HTTP_404_NOT_FOUND) 175 | 176 | 177 | @app.post("/__auth", response_model=str, dependencies=[Depends(auth_token)]) 178 | async def auth_check(): 179 | return "Success" 180 | 181 | 182 | @app.post( 183 | "/__submit", 184 | dependencies=[Depends(auth_token)], 185 | response_model=api_submit.SubmitProductResponse, 186 | status_code=fastapi.status.HTTP_201_CREATED, 187 | ) 188 | async def submit_release( 189 | data: "api_submit.SubmitProductRequest" = FormBody(api_submit.SubmitProductRequest), 190 | db: "Database" = Depends(database), 191 | ): 192 | plugin = await db.get_plugin_by_name(db.session, data.name) 193 | 194 | if plugin and data.force: 195 | await db.delete_plugin(db.session, plugin.id) 196 | plugin = None 197 | 198 | image_path = await upload_image(data.name, data.image) 199 | 200 | if plugin is not None: 201 | if data.version_name in [i.name for i in plugin.versions]: 202 | raise HTTPException(status_code=400, detail="Version already exists") 203 | plugin = await db.update_artifact( 204 | db.session, 205 | plugin, 206 | author=data.author, 207 | description=data.description, 208 | image_path=image_path, 209 | tags=list(filter(None, reduce(add, (el.split(",") for el in data.tags), []))), 210 | ) 211 | else: 212 | plugin = await db.insert_artifact( 213 | session=db.session, 214 | name=data.name, 215 | author=data.author, 216 | description=data.description, 217 | image_path=image_path, 218 | tags=list(filter(None, reduce(add, (el.split(",") for el in data.tags), []))), 219 | ) 220 | 221 | version = await db.insert_version(db.session, plugin.id, name=data.version_name, **await upload_version(data.file)) 222 | 223 | await db.session.refresh(plugin) 224 | await post_announcement(plugin, version) 225 | return plugin 226 | 227 | 228 | @app.post("/__update", dependencies=[Depends(auth_token)], response_model=api_update.UpdatePluginResponse) 229 | async def update_plugin(data: "api_update.UpdatePluginRequest", db: "Database" = Depends(database)): 230 | old_plugin = await db.get_plugin_by_id(db.session, data.id) 231 | version_dates = {version.name: version.created for version in old_plugin.versions} 232 | await db.delete_plugin(db.session, data.id) 233 | new_plugin = await db.insert_artifact( 234 | db.session, 235 | image_path=old_plugin._image_path, 236 | **data.dict(exclude={"versions"}), 237 | ) 238 | 239 | for version in reversed(data.versions): 240 | await db.insert_version( 241 | db.session, 242 | artifact_id=new_plugin.id, 243 | created=version_dates.get(version.name), 244 | **version.dict(), 245 | ) 246 | await db.session.refresh(new_plugin) 247 | return new_plugin 248 | 249 | 250 | @app.post("/__delete", dependencies=[Depends(auth_token)], status_code=fastapi.status.HTTP_204_NO_CONTENT) 251 | async def delete_plugin(data: "api_delete.DeletePluginRequest", db: "Database" = Depends(database)): 252 | await db.delete_plugin(db.session, data.id) 253 | -------------------------------------------------------------------------------- /plugin_store/api/models/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SteamDeckHomebrew/decky-plugin-store/41c9b26f548dcc3d7a5f65057a3b318c936933c4/plugin_store/api/models/__init__.py -------------------------------------------------------------------------------- /plugin_store/api/models/announcements.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | 3 | from ..utils import UUID7 4 | from .base import BaseModel 5 | 6 | 7 | class CurrentAnnouncementResponse(BaseModel): 8 | class Config: 9 | orm_mode = True 10 | 11 | id: UUID7 12 | 13 | title: str 14 | text: str 15 | 16 | created: datetime 17 | updated: datetime 18 | 19 | 20 | class AnnouncementResponse(BaseModel): 21 | class Config: 22 | orm_mode = True 23 | 24 | id: UUID7 25 | 26 | title: str 27 | text: str 28 | 29 | active: bool 30 | 31 | created: datetime 32 | updated: datetime 33 | 34 | 35 | class AnnouncementRequest(BaseModel): 36 | title: str 37 | text: str 38 | 39 | active: bool = True 40 | -------------------------------------------------------------------------------- /plugin_store/api/models/base.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | from typing import Optional, TYPE_CHECKING 3 | from zoneinfo import ZoneInfo 4 | 5 | from pydantic import BaseModel as PydanticBaseModel 6 | from pydantic import Field 7 | from pydantic.utils import ROOT_KEY 8 | 9 | if TYPE_CHECKING: 10 | from typing import Any 11 | 12 | from pydantic.typing import DictStrAny 13 | 14 | UTC = ZoneInfo("UTC") 15 | 16 | 17 | def datetime_iso_8601(dt: datetime) -> str: 18 | if dt.tzinfo and dt.tzinfo == UTC: 19 | return f"{dt:%Y-%m-%dT%H:%M:%SZ}" 20 | else: 21 | return dt.isoformat() 22 | 23 | 24 | class BaseModel(PydanticBaseModel): 25 | class Config: 26 | json_encoders = { 27 | datetime: datetime_iso_8601, 28 | } 29 | 30 | 31 | class PluginVersion(BaseModel): 32 | name: str 33 | hash: str 34 | 35 | 36 | class BasePlugin(BaseModel): 37 | id: int 38 | name: str 39 | author: str 40 | description: str 41 | tags: list[str] 42 | versions: list[PluginVersion] 43 | visible: bool 44 | 45 | 46 | class BasePluginRequest(BasePlugin): 47 | pass 48 | 49 | 50 | class PluginTagResponse(BaseModel): 51 | class Config: 52 | orm_mode = True 53 | allow_population_by_field_name = True 54 | 55 | __root__: str = Field(alias="tag") 56 | 57 | @classmethod 58 | def _enforce_dict_if_root(cls, obj: "Any") -> "Any": 59 | if cls.__custom_root_type__ and cls.__fields__[ROOT_KEY].alt_alias: 60 | return dict(cls._decompose_class(obj)) 61 | 62 | return super()._enforce_dict_if_root(obj) 63 | 64 | def dict(self, **kwargs) -> "DictStrAny": 65 | if self.__custom_root_type__: 66 | kwargs["by_alias"] = False 67 | data = super().dict(**kwargs) 68 | return data[ROOT_KEY] 69 | return super().dict(**kwargs) 70 | 71 | 72 | class PluginVersionResponse(PluginVersion): 73 | class Config: 74 | orm_mode = True 75 | 76 | created: datetime 77 | downloads: int 78 | updates: int 79 | 80 | 81 | class BasePluginResponse(BasePlugin): 82 | class Config: 83 | orm_mode = True 84 | 85 | tags: list[PluginTagResponse] # type: ignore[assignment] 86 | versions: list[PluginVersionResponse] # type: ignore[assignment] 87 | 88 | image_url: str 89 | downloads: Optional[int] 90 | updates: Optional[int] 91 | created: Optional[datetime] 92 | updated: Optional[datetime] 93 | -------------------------------------------------------------------------------- /plugin_store/api/models/delete.py: -------------------------------------------------------------------------------- 1 | from pydantic import BaseModel 2 | 3 | 4 | class DeletePluginRequest(BaseModel): 5 | id: int 6 | -------------------------------------------------------------------------------- /plugin_store/api/models/list.py: -------------------------------------------------------------------------------- 1 | from api.models.base import BasePluginResponse 2 | 3 | 4 | class ListPluginResponse(BasePluginResponse): 5 | pass 6 | -------------------------------------------------------------------------------- /plugin_store/api/models/submit.py: -------------------------------------------------------------------------------- 1 | from fastapi import UploadFile 2 | from pydantic import BaseModel, HttpUrl 3 | 4 | from api.models.base import BasePluginResponse 5 | 6 | 7 | class SubmitProductRequest(BaseModel): 8 | name: str 9 | author: str 10 | description: str 11 | tags: list[str] # Comma separated values 12 | version_name: str 13 | image: HttpUrl 14 | file: UploadFile 15 | force: bool = False 16 | 17 | 18 | class SubmitProductResponse(BasePluginResponse): 19 | pass 20 | -------------------------------------------------------------------------------- /plugin_store/api/models/update.py: -------------------------------------------------------------------------------- 1 | from api.models.base import BasePluginRequest, BasePluginResponse 2 | 3 | 4 | class UpdatePluginRequest(BasePluginRequest): 5 | pass 6 | 7 | 8 | class UpdatePluginResponse(BasePluginResponse): 9 | pass 10 | -------------------------------------------------------------------------------- /plugin_store/api/utils.py: -------------------------------------------------------------------------------- 1 | import inspect 2 | from typing import Any 3 | 4 | from fastapi import File, Form, Request, UploadFile 5 | from fastapi.params import Depends 6 | from pydantic import UUID1 7 | 8 | 9 | def getIpHash(request: Request): 10 | ip = request.headers.get("cf-connecting-ip") 11 | if ip is None: 12 | ip = request.client.host # type: ignore [union-attr] 13 | return hash(ip) 14 | 15 | 16 | def form_body(cls): 17 | # noinspection PyProtectedMember 18 | cls.__signature__ = cls.__signature__.replace( 19 | parameters=[ 20 | arg.replace( 21 | default=(File if arg.annotation == UploadFile else Form)( 22 | **{} if arg.default is inspect._empty else {"default": arg.default}, 23 | ), 24 | ) 25 | for arg in cls.__signature__.parameters.values() 26 | ] 27 | ) 28 | return cls 29 | 30 | 31 | class FormBodyCls(Depends): 32 | def __init__(self, model: Any = None, *, use_cache: bool = True): 33 | super().__init__(form_body(model) if model else None, use_cache=use_cache) 34 | 35 | 36 | # noinspection PyPep8Naming 37 | def FormBody(model: Any = None, *, use_cache: bool = True) -> Any: 38 | return FormBodyCls(model=model, use_cache=use_cache) 39 | 40 | 41 | class UUID7(UUID1): 42 | _required_version = 7 43 | -------------------------------------------------------------------------------- /plugin_store/cdn.py: -------------------------------------------------------------------------------- 1 | from asyncio import sleep 2 | from base64 import b64encode 3 | from hashlib import sha1, sha256 4 | from logging import getLogger 5 | from os import getenv 6 | from typing import TYPE_CHECKING 7 | from urllib.parse import quote 8 | 9 | from aiohttp import ClientSession 10 | 11 | from constants import CDN_ERROR_RETRY_TIMES 12 | 13 | if TYPE_CHECKING: 14 | from fastapi import UploadFile 15 | 16 | 17 | IMAGE_TYPES = { 18 | "image/png": ".png", 19 | "image/jpeg": ".jpg", 20 | "image/webp": ".webp", 21 | "image/avif": ".avif", 22 | } 23 | 24 | 25 | class B2UploadError(Exception): 26 | pass 27 | 28 | 29 | def construct_image_path(plugin_name: str, file_hash: str, mime_type: str) -> str: 30 | return f"artifact_images/{quote(plugin_name)}-{file_hash}{IMAGE_TYPES[mime_type]}" 31 | 32 | 33 | async def _b2_upload(filename: str, binary: "bytes", mime_type: str = "b2/x-auto"): 34 | async with ClientSession(raise_for_status=True) as web: 35 | auth_str = f"{getenv('B2_APP_KEY_ID')}:{getenv('B2_APP_KEY')}".encode("utf-8") 36 | async with web.get( 37 | "https://api.backblazeb2.com/b2api/v2/b2_authorize_account", 38 | headers={"Authorization": f"Basic {b64encode(auth_str).decode('utf-8')}"}, 39 | ) as res: 40 | if not res.status == 200: 41 | getLogger().error(f"B2 LOGIN ERROR {await res.read()!r}") 42 | return 43 | res_data = await res.json() 44 | 45 | async with web.post( 46 | f"{res_data['apiUrl']}/b2api/v2/b2_get_upload_url", 47 | json={"bucketId": getenv("B2_BUCKET_ID")}, 48 | headers={"Authorization": res_data["authorizationToken"]}, 49 | ) as res_data: 50 | if not res_data.status == 200: 51 | res_data.raise_for_status() 52 | return print("B2 GET_UPLOAD_URL ERROR ", await res_data.read()) 53 | res_data = await res_data.json() 54 | 55 | res_data = await web.post( 56 | res_data["uploadUrl"], 57 | data=binary, 58 | headers={ 59 | "Authorization": res_data["authorizationToken"], 60 | "Content-Type": mime_type, 61 | "Content-Length": str(len(binary)), 62 | "X-Bz-Content-Sha1": sha1(binary).hexdigest(), 63 | "X-Bz-File-Name": filename, 64 | }, 65 | ) 66 | t = await res_data.text() 67 | if res.status == 200: 68 | return t 69 | raise B2UploadError(t) 70 | 71 | 72 | async def b2_upload(filename: str, binary: "bytes", mime_type: str = "b2/x-auto"): 73 | attempt = 1 74 | while True: 75 | try: 76 | return await _b2_upload(filename, binary, mime_type) 77 | except B2UploadError as e: 78 | getLogger().error( 79 | f"B2 Upload Failed: {e}. Retrying in {attempt * 5} seconds (Attempt: {attempt}/{CDN_ERROR_RETRY_TIMES})" 80 | ) 81 | await sleep(attempt * 5) 82 | attempt += 1 83 | if attempt == CDN_ERROR_RETRY_TIMES + 1: 84 | getLogger().error(f"Retried upload {CDN_ERROR_RETRY_TIMES} times. Aborting...") 85 | return 86 | 87 | 88 | async def fetch_image(image_url: str) -> "tuple[bytes, str] | None": 89 | async with ClientSession() as web: 90 | async with web.get(image_url) as res: 91 | if res.status == 200 and (mime_type := res.headers.get("Content-Type")) in IMAGE_TYPES: 92 | return await res.read(), mime_type 93 | return None 94 | 95 | 96 | async def upload_image(plugin_name: str, image_url: str) -> "str | None": 97 | fetched = await fetch_image(image_url) 98 | if fetched is not None: 99 | binary, mime_type = fetched 100 | file_hash = sha256(binary).hexdigest() 101 | file_path = construct_image_path(plugin_name, file_hash, mime_type) 102 | await b2_upload(file_path, binary) 103 | return file_path 104 | return None 105 | 106 | 107 | async def upload_version(file: "UploadFile"): 108 | binary = await file.read() 109 | file_hash = sha256(binary).hexdigest() 110 | await b2_upload(f"versions/{file_hash}.zip", binary) 111 | return { 112 | "hash": file_hash, 113 | } 114 | -------------------------------------------------------------------------------- /plugin_store/constants.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | from pathlib import Path 3 | 4 | BASE_DIR = Path(__file__).expanduser().resolve().parent 5 | TEMPLATES_DIR = BASE_DIR / "templates" 6 | 7 | CDN_URL = "https://cdn.tzatzikiweeb.moe/file/steam-deck-homebrew/" 8 | CDN_ERROR_RETRY_TIMES = 5 9 | 10 | 11 | class SortDirection(Enum): 12 | DESC = "desc" 13 | ASC = "asc" 14 | 15 | 16 | class SortType(Enum): 17 | NAME = "name" 18 | DATE = "date" 19 | DOWNLOADS = "downloads" 20 | -------------------------------------------------------------------------------- /plugin_store/database/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SteamDeckHomebrew/decky-plugin-store/41c9b26f548dcc3d7a5f65057a3b318c936933c4/plugin_store/database/__init__.py -------------------------------------------------------------------------------- /plugin_store/database/database.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from asyncio import Lock 3 | from datetime import datetime 4 | from os import getenv 5 | from typing import Optional, TYPE_CHECKING 6 | from uuid import UUID 7 | from zoneinfo import ZoneInfo 8 | 9 | from alembic import command 10 | from alembic.config import Config 11 | from asgiref.sync import sync_to_async 12 | from fastapi import Depends 13 | from sqlalchemy import asc, desc 14 | from sqlalchemy.exc import NoResultFound, SQLAlchemyError 15 | from sqlalchemy.ext.asyncio import async_sessionmaker, AsyncSession, create_async_engine 16 | from sqlalchemy.sql import delete, select, update 17 | 18 | from constants import SortDirection, SortType 19 | 20 | from .models.announcements import Announcement 21 | from .models.Artifact import Artifact, PluginTag, Tag 22 | from .models.Version import Version 23 | 24 | if TYPE_CHECKING: 25 | from typing import AsyncIterator, Iterable, Sequence 26 | 27 | logger = logging.getLogger() 28 | 29 | UTC = ZoneInfo("UTC") 30 | 31 | db_url = getenv("DB_URL") 32 | if not db_url: 33 | raise Exception("DB_URL not provided or invalid!") 34 | async_engine = create_async_engine( 35 | db_url, 36 | pool_pre_ping=True, 37 | # echo=settings.ECHO_SQL, 38 | ) 39 | AsyncSessionLocal = async_sessionmaker(bind=async_engine, autoflush=False, future=True, expire_on_commit=False) 40 | 41 | db_lock = Lock() 42 | 43 | 44 | async def get_session() -> "AsyncIterator[AsyncSession]": 45 | try: 46 | yield AsyncSessionLocal() 47 | except SQLAlchemyError as e: 48 | logger.exception(e) 49 | 50 | 51 | async def database(session: "AsyncSession" = Depends(get_session)) -> "AsyncIterator[Database]": 52 | db = Database(session, db_lock) 53 | try: 54 | yield db 55 | except Exception: 56 | await session.rollback() 57 | raise 58 | else: 59 | await session.close() 60 | 61 | 62 | class Database: 63 | def __init__(self, session, lock): 64 | self.session = session 65 | self.lock = lock 66 | 67 | @sync_to_async() 68 | def init(self): 69 | alembic_cfg = Config("/alembic.ini") 70 | command.upgrade(alembic_cfg, "head") 71 | 72 | async def list_announcements(self, active: bool = True): 73 | statement = select(Announcement) 74 | if active: 75 | statement = statement.where(Announcement.active.is_(True)) 76 | statement = statement.order_by(desc(Announcement.created)) 77 | result = (await self.session.execute(statement)).scalars().all() 78 | return result or [] 79 | 80 | async def get_announcement(self, announcement_id: UUID) -> Announcement | None: 81 | statement = select(Announcement).where(Announcement.id == announcement_id) 82 | try: 83 | return (await self.session.execute(statement)).scalars().first() 84 | except NoResultFound: 85 | return None 86 | 87 | async def create_announcement(self, title: str, text: str, active: bool) -> Announcement | None: 88 | nested = await self.session.begin_nested() 89 | async with self.lock: 90 | announcement = Announcement( 91 | title=title, 92 | text=text, 93 | active=active, 94 | ) 95 | try: 96 | self.session.add(announcement) 97 | except Exception: 98 | await nested.rollback() 99 | raise 100 | await self.session.commit() 101 | return await self.get_announcement(announcement.id) 102 | 103 | async def update_announcement(self, announcement: Announcement, **kwargs) -> Announcement | None: 104 | nested = await self.session.begin_nested() 105 | async with self.lock: 106 | if "title" in kwargs: 107 | announcement.title = kwargs["title"] 108 | if "text" in kwargs: 109 | announcement.text = kwargs["text"] 110 | if "active" in kwargs: 111 | announcement.active = kwargs["active"] 112 | try: 113 | self.session.add(announcement) 114 | except Exception: 115 | await nested.rollback() 116 | raise 117 | await self.session.commit() 118 | return await self.get_announcement(announcement.id) 119 | 120 | async def delete_announcement(self, announcement_id: UUID) -> None: 121 | await self.session.execute(delete(Announcement).where(Announcement.id == announcement_id)) 122 | await self.session.commit() 123 | 124 | async def prepare_tags(self, session: "AsyncSession", tag_names: list[str]) -> "list[Tag]": 125 | try: 126 | statement = select(Tag).where(Tag.tag.in_(tag_names)).order_by(Tag.id) 127 | tags = list((await session.execute(statement)).scalars()) 128 | existing = [tag.tag for tag in tags] 129 | for tag_name in tag_names: 130 | if tag_name not in existing: 131 | tag = Tag(tag=tag_name) 132 | session.add(tag) 133 | tags.append(tag) 134 | except Exception: 135 | raise 136 | return tags 137 | 138 | async def insert_artifact( 139 | self, 140 | session: "AsyncSession", 141 | *, 142 | name: "str", 143 | author: "str", 144 | description: "str", 145 | tags: "list[str]", 146 | image_path: "str | None" = None, 147 | id: "int | None" = None, 148 | visible: "bool" = True, 149 | ) -> "Artifact": 150 | nested = await session.begin_nested() 151 | async with self.lock: 152 | tag_objs = await self.prepare_tags(session, tags) 153 | plugin = Artifact( 154 | name=name, 155 | author=author, 156 | description=description, 157 | _image_path=image_path, 158 | tags=tag_objs, 159 | visible=visible, 160 | ) 161 | if id is not None: 162 | plugin.id = id 163 | try: 164 | session.add(plugin) 165 | except Exception: 166 | await nested.rollback() 167 | raise 168 | await session.commit() 169 | return await self.get_plugin_by_id(session, plugin.id) 170 | 171 | async def update_artifact(self, session: "AsyncSession", plugin: "Artifact", **kwargs) -> "Artifact": 172 | nested = await session.begin_nested() 173 | async with self.lock: 174 | if "author" in kwargs: 175 | plugin.author = kwargs["author"] 176 | if "description" in kwargs: 177 | plugin.description = kwargs["description"] 178 | if "image_path" in kwargs: 179 | plugin._image_path = kwargs["image_path"] 180 | if "tags" in kwargs: 181 | plugin.tags = await self.prepare_tags(session, kwargs["tags"]) 182 | try: 183 | session.add(plugin) 184 | except Exception: 185 | await nested.rollback() 186 | raise 187 | await session.commit() 188 | return await self.get_plugin_by_id(session, plugin.id) 189 | 190 | async def insert_version( 191 | self, 192 | session: "AsyncSession", 193 | artifact_id: int, 194 | name: str, 195 | hash: str, 196 | created: "datetime | None" = None, 197 | ) -> "Version": 198 | version = Version(artifact_id=artifact_id, name=name, hash=hash, created=created or datetime.now(UTC)) 199 | async with self.lock: 200 | session.add(version) 201 | await session.commit() 202 | return version 203 | 204 | async def search( 205 | self, 206 | session: "AsyncSession", 207 | name: "str | None" = None, 208 | tags: "Iterable[str] | None" = None, 209 | include_hidden: "bool" = False, 210 | sort_by: Optional[SortType] = None, 211 | sort_direction: SortDirection = SortDirection.DESC, 212 | limit: int = 50, 213 | page: int = 0, 214 | ) -> "Sequence[Artifact]": 215 | statement = select(Artifact).offset(limit * page) 216 | if name: 217 | statement = statement.where(Artifact.name.like(f"%{name}%")) 218 | if tags: 219 | for tag in tags: 220 | statement = statement.filter(Artifact.tags.any(tag=tag)) 221 | if not include_hidden: 222 | statement = statement.where(Artifact.visible.is_(True)) 223 | 224 | if sort_direction == SortDirection.ASC: 225 | direction = asc 226 | else: 227 | direction = desc 228 | 229 | if sort_by == SortType.NAME: 230 | statement = statement.order_by(direction(Artifact.name)) 231 | elif sort_by == SortType.DATE: 232 | statement = statement.order_by(direction(Artifact.created)) 233 | elif sort_by == SortType.DOWNLOADS: 234 | statement = statement.order_by(direction(Artifact.downloads)) 235 | else: 236 | statement = statement.order_by(direction(Artifact.id)) 237 | 238 | result = (await session.execute(statement)).scalars().all() 239 | return result or [] 240 | 241 | async def get_plugin_by_name(self, session: "AsyncSession", name: str) -> "Artifact | None": 242 | statement = select(Artifact).where(Artifact.name == name) 243 | try: 244 | return (await session.execute(statement)).scalars().first() 245 | except NoResultFound: 246 | return None 247 | 248 | async def get_plugin_by_id(self, session: "AsyncSession", id: int) -> "Artifact": 249 | statement = select(Artifact).where(Artifact.id == id) 250 | return (await session.execute(statement)).scalars().one() 251 | 252 | async def delete_plugin(self, session: "AsyncSession", id: int): 253 | await session.execute(delete(PluginTag).where(PluginTag.c.artifact_id == id)) 254 | await session.execute(delete(Version).where(Version.artifact_id == id)) 255 | await session.execute(delete(Artifact).where(Artifact.id == id)) 256 | return await session.commit() 257 | 258 | async def increment_installs( 259 | self, session: "AsyncSession", plugin_name: str, version_name: str, isUpdate: bool 260 | ) -> bool: 261 | statement = update(Version) 262 | if isUpdate: 263 | statement = statement.values(updates=Version.updates + 1) 264 | else: 265 | statement = statement.values(downloads=Version.downloads + 1) 266 | plugin_id = (await session.execute(select(Artifact.id).where(Artifact.name == plugin_name))).scalar() 267 | if plugin_id is None: 268 | return False 269 | r = await session.execute(statement.where((Version.name == version_name) & (Version.artifact_id == plugin_id))) 270 | await session.commit() 271 | # if rowcount is zero then the version wasn't found 272 | return r.rowcount == 1 # type: ignore[attr-defined] 273 | -------------------------------------------------------------------------------- /plugin_store/database/migrations/env.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from logging.config import fileConfig 3 | from os import getenv 4 | 5 | from alembic import context 6 | from sqlalchemy import pool 7 | from sqlalchemy.engine import Connection 8 | from sqlalchemy.ext.asyncio import create_async_engine 9 | 10 | from database.models import Base 11 | 12 | # this is the Alembic Config object, which provides 13 | # access to the values within the .ini file in use. 14 | config = context.config 15 | 16 | # Interpret the config file for Python logging. 17 | # This line sets up loggers basically. 18 | if config.config_file_name is not None: 19 | fileConfig(config.config_file_name) 20 | 21 | # add your model's MetaData object here 22 | # for 'autogenerate' support 23 | # from myapp import mymodel 24 | # target_metadata = mymodel.Base.metadata 25 | target_metadata = Base.metadata 26 | 27 | # other values from the config, defined by the needs of env.py, 28 | # can be acquired: 29 | # my_important_option = config.get_main_option("my_important_option") 30 | # ... etc. 31 | 32 | 33 | def run_migrations_offline() -> None: 34 | """Run migrations in 'offline' mode. 35 | 36 | This configures the context with just a URL 37 | and not an Engine, though an Engine is acceptable 38 | here as well. By skipping the Engine creation 39 | we don't even need a DBAPI to be available. 40 | 41 | Calls to context.execute() here emit the given string to the 42 | script output. 43 | 44 | """ 45 | context.configure( 46 | url=getenv("DB_URL"), 47 | target_metadata=target_metadata, 48 | literal_binds=True, 49 | dialect_opts={"paramstyle": "named"}, 50 | ) 51 | 52 | with context.begin_transaction(): 53 | context.run_migrations() 54 | 55 | 56 | def do_run_migrations(connection: Connection) -> None: 57 | context.configure(connection=connection, target_metadata=target_metadata) 58 | 59 | with context.begin_transaction(): 60 | context.run_migrations() 61 | 62 | 63 | async def run_migrations_online() -> None: 64 | """Run migrations in 'online' mode. 65 | 66 | In this scenario we need to create an Engine 67 | and associate a connection with the context. 68 | 69 | """ 70 | db_url = getenv("DB_URL") 71 | if not db_url: 72 | raise Exception("DB_URL not provided or invalid!") 73 | connectable = create_async_engine( 74 | db_url, 75 | poolclass=pool.NullPool, 76 | future=True, 77 | ) 78 | 79 | async with connectable.connect() as connection: 80 | await connection.run_sync(do_run_migrations) 81 | 82 | await connectable.dispose() 83 | 84 | 85 | if context.is_offline_mode(): 86 | run_migrations_offline() 87 | else: 88 | asyncio.run(run_migrations_online()) 89 | -------------------------------------------------------------------------------- /plugin_store/database/migrations/script.py.mako: -------------------------------------------------------------------------------- 1 | """${message} 2 | 3 | Revision ID: ${up_revision} 4 | Revises: ${down_revision | comma,n} 5 | Create Date: ${create_date} 6 | 7 | """ 8 | from alembic import op 9 | import sqlalchemy as sa 10 | ${imports if imports else ""} 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = ${repr(up_revision)} 14 | down_revision = ${repr(down_revision)} 15 | branch_labels = ${repr(branch_labels)} 16 | depends_on = ${repr(depends_on)} 17 | 18 | 19 | def upgrade() -> None: 20 | ${upgrades if upgrades else "pass"} 21 | 22 | 23 | def downgrade() -> None: 24 | ${downgrades if downgrades else "pass"} 25 | -------------------------------------------------------------------------------- /plugin_store/database/migrations/versions/2022_11_07_0136_4fc55239b4d6_initial_db_setup.py: -------------------------------------------------------------------------------- 1 | """initial db setup 2 | 3 | Revision ID: 4fc55239b4d6 4 | Revises: None 5 | Create Date: 2022-11-07 01:36:59.727609 6 | 7 | """ 8 | 9 | import sqlalchemy as sa 10 | from alembic import op 11 | 12 | from database import utils 13 | 14 | # revision identifiers, used by Alembic. 15 | revision = "4fc55239b4d6" 16 | down_revision = None 17 | branch_labels = None 18 | depends_on = None 19 | 20 | 21 | def upgrade() -> None: 22 | op.create_table( 23 | "artifacts", 24 | sa.Column("id", sa.Integer(), autoincrement=True, nullable=False), 25 | sa.Column("name", sa.Text(), nullable=True), 26 | sa.Column("author", sa.Text(), nullable=True), 27 | sa.Column("description", sa.Text(), nullable=True), 28 | sa.PrimaryKeyConstraint("id"), 29 | ) 30 | op.create_table( 31 | "tags", 32 | sa.Column("id", sa.Integer(), nullable=False), 33 | sa.Column("tag", sa.Text(), nullable=True), 34 | sa.PrimaryKeyConstraint("id"), 35 | ) 36 | op.create_table( 37 | "plugin_tag", 38 | sa.Column("artifact_id", sa.Integer(), nullable=True), 39 | sa.Column("tag_id", sa.Integer(), nullable=True), 40 | sa.ForeignKeyConstraint(("artifact_id",), ["artifacts.id"]), 41 | sa.ForeignKeyConstraint(("tag_id",), ["tags.id"]), 42 | ) 43 | op.create_table( 44 | "versions", 45 | sa.Column("id", sa.Integer(), autoincrement=True, nullable=False), 46 | sa.Column("artifact_id", sa.Integer(), nullable=True), 47 | sa.Column("name", sa.Text(), nullable=True), 48 | sa.Column("hash", sa.Text(), nullable=True), 49 | sa.Column("added_on", utils.TZDateTime(), nullable=True), 50 | sa.ForeignKeyConstraint(("artifact_id",), ["artifacts.id"]), 51 | sa.PrimaryKeyConstraint("id"), 52 | ) 53 | 54 | 55 | def downgrade() -> None: 56 | op.drop_table("versions") 57 | op.drop_table("plugin_tag") 58 | op.drop_table("tags") 59 | op.drop_table("artifacts") 60 | -------------------------------------------------------------------------------- /plugin_store/database/migrations/versions/2022_11_09_1905-642324500b07_add_tags_unique_constraint.py: -------------------------------------------------------------------------------- 1 | """add-tags-unique-constraint 2 | 3 | Revision ID: 642324500b07 4 | Revises: 4fc55239b4d6 5 | Create Date: 2022-11-09 19:05:26.487490 6 | 7 | """ 8 | 9 | import sqlalchemy as sa 10 | from alembic import op 11 | 12 | from database.models import Artifact, PluginTag, Tag 13 | 14 | # revision identifiers, used by Alembic. 15 | revision = "642324500b07" 16 | down_revision = "4fc55239b4d6" 17 | branch_labels = None 18 | depends_on = None 19 | 20 | 21 | def upgrade() -> None: 22 | 23 | conn = op.get_bind() 24 | statement = sa.select(Tag.tag, sa.func.string_agg(str(Tag.id), ",").label("ids")).group_by(Tag.tag) 25 | tags = conn.execute(statement) 26 | replacements = {(ids[0], tag.tag): ids[1:] for tag in tags if len(ids := sorted(map(int, tag.ids.split(",")))) > 1} 27 | for (dest_id, tag), src_ids in replacements.items(): 28 | # Fetch all plugins relating to any of src_ids 29 | get_matching_plugins = ( 30 | sa.select(Artifact) 31 | .join(PluginTag) 32 | .where( 33 | PluginTag.c.tag_id.in_(src_ids), 34 | Artifact.id.not_in(sa.select(Artifact.id).join(PluginTag).where(PluginTag.c.tag_id == dest_id)), 35 | ) 36 | .distinct() 37 | ) 38 | plugins = list(conn.execute(get_matching_plugins).scalars().all()) 39 | 40 | op.bulk_insert(PluginTag, [{"artifact_id": plugin_id, "tag_id": dest_id} for plugin_id in plugins]) 41 | 42 | conn.execute(sa.delete(PluginTag).where(PluginTag.c.tag_id.in_(src_ids))) 43 | conn.execute(sa.delete(Tag).where(Tag.id.in_(src_ids))) 44 | 45 | # Remove all relations matching src_ids (ignore things fetched above) 46 | 47 | with op.batch_alter_table(Tag.__table__) as batch_op: # type: ignore[arg-type] 48 | batch_op.create_unique_constraint("unique_tag_tag", ["tag"]) # type: ignore[attr-defined] 49 | 50 | 51 | def downgrade() -> None: 52 | with op.batch_alter_table(Tag.__table__) as batch_op: # type: ignore[arg-type] 53 | batch_op.drop_constraint("unique_tag_tag", type_="unique") # type: ignore[attr-defined] 54 | -------------------------------------------------------------------------------- /plugin_store/database/migrations/versions/2022_11_19_1823-abe90daeb874_initial_db_setup.py: -------------------------------------------------------------------------------- 1 | """initial db setup 2 | 3 | Revision ID: abe90daeb874 4 | Revises: 4fc55239b4d6 5 | Create Date: 2022-11-19 18:23:52.915293 6 | 7 | """ 8 | 9 | import sqlalchemy as sa 10 | from alembic import op 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = "abe90daeb874" 14 | down_revision = "642324500b07" 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade() -> None: 20 | op.add_column("artifacts", sa.Column("visible", sa.Boolean(), server_default="1", nullable=True)) 21 | with op.batch_alter_table("artifacts") as batch_op: 22 | batch_op.alter_column("visible", nullable=False) # type: ignore[attr-defined] 23 | 24 | 25 | def downgrade() -> None: 26 | op.drop_column("artifacts", "visible") 27 | -------------------------------------------------------------------------------- /plugin_store/database/migrations/versions/2023_06_13_2205-492a599cd718_version_uniqueness.py: -------------------------------------------------------------------------------- 1 | """empty message 2 | 3 | Revision ID: 492a599cd718 4 | Revises: abe90daeb874 5 | Create Date: 2023-06-13 22:05:19.849032 6 | 7 | """ 8 | 9 | import sqlalchemy as sa 10 | from alembic import op 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = "492a599cd718" 14 | down_revision = "abe90daeb874" 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade() -> None: 20 | with op.batch_alter_table("artifacts") as batch_op_artifacts: 21 | batch_op_artifacts.alter_column( 22 | "visible", 23 | existing_type=sa.BOOLEAN(), 24 | nullable=True, 25 | existing_server_default=sa.text("'1'"), # type: ignore[arg-type] 26 | ) 27 | with op.batch_alter_table("versions") as batch_op_versions: 28 | batch_op_versions.add_column(sa.Column("file", sa.Text(), nullable=True)) 29 | batch_op_versions.create_unique_constraint( 30 | "unique_version_artifact_id_name", 31 | ["artifact_id", "name"], 32 | ) 33 | 34 | 35 | def downgrade() -> None: 36 | with op.batch_alter_table("versions") as batch_op_versions: 37 | batch_op_versions.drop_constraint( 38 | "unique_version_artifact_id_name", 39 | type_="unique", 40 | ) 41 | batch_op_versions.drop_column("file") 42 | with op.batch_alter_table("artifacts") as batch_op_artifacts: 43 | batch_op_artifacts.alter_column( 44 | "visible", 45 | existing_type=sa.BOOLEAN(), 46 | nullable=False, 47 | existing_server_default=sa.text("'1'"), # type: ignore[arg-type] 48 | ) 49 | -------------------------------------------------------------------------------- /plugin_store/database/migrations/versions/2023_06_26_0057-00b050c80d6d_add_artifact_image_field.py: -------------------------------------------------------------------------------- 1 | """add artifact image field 2 | 3 | Revision ID: 00b050c80d6d 4 | Revises: 492a599cd718 5 | Create Date: 2023-06-26 00:57:41.153757 6 | 7 | """ 8 | 9 | import sqlalchemy as sa 10 | from alembic import op 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = "00b050c80d6d" 14 | down_revision = "492a599cd718" 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade() -> None: 20 | op.add_column("artifacts", sa.Column("image_path", sa.Text(), nullable=True)) 21 | 22 | 23 | def downgrade() -> None: 24 | op.drop_column("artifacts", "image_path") 25 | -------------------------------------------------------------------------------- /plugin_store/database/migrations/versions/2023_10_16_1710-f5a91a25a410_download_field.py: -------------------------------------------------------------------------------- 1 | """add download and update fields 2 | 3 | Revision ID: f5a91a25a410 4 | Revises: 00b050c80d6d 5 | Create Date: 2023-10-16 17:10:46.948405 6 | 7 | """ 8 | 9 | import sqlalchemy as sa 10 | from alembic import op 11 | 12 | # revision identifiers, used by Alembic. 13 | revision = "f5a91a25a410" 14 | down_revision = "00b050c80d6d" 15 | branch_labels = None 16 | depends_on = None 17 | 18 | 19 | def upgrade() -> None: 20 | op.add_column("versions", sa.Column("downloads", sa.Integer(), nullable=False, server_default="0")) 21 | op.add_column("versions", sa.Column("updates", sa.Integer(), nullable=False, server_default="0")) 22 | 23 | 24 | def downgrade() -> None: 25 | op.drop_column("versions", "updates") 26 | op.drop_column("versions", "downloads") 27 | -------------------------------------------------------------------------------- /plugin_store/database/migrations/versions/2024_08_10_2158-469f48c143b9_announcements.py: -------------------------------------------------------------------------------- 1 | """empty message 2 | 3 | Revision ID: 469f48c143b9 4 | Revises: f5a91a25a410 5 | Create Date: 2024-08-10 21:58:11.798321 6 | 7 | """ 8 | 9 | import sqlalchemy as sa 10 | from alembic import op 11 | 12 | from database import utils 13 | 14 | # revision identifiers, used by Alembic. 15 | revision = "469f48c143b9" 16 | down_revision = "f5a91a25a410" 17 | branch_labels = None 18 | depends_on = None 19 | 20 | 21 | def upgrade() -> None: 22 | # ### commands auto generated by Alembic - please adjust! ### 23 | op.create_table( 24 | "announcements", 25 | sa.Column("id", sa.Uuid(), nullable=False), 26 | sa.Column("title", sa.Text(), nullable=False), 27 | sa.Column("text", sa.Text(), nullable=False), 28 | sa.Column("active", sa.Boolean(), nullable=False), 29 | sa.Column("updated", utils.TZDateTime(), nullable=False), 30 | sa.Column("created", utils.TZDateTime(), nullable=False), 31 | sa.PrimaryKeyConstraint("id"), 32 | ) 33 | # ### end Alembic commands ### 34 | 35 | 36 | def downgrade() -> None: 37 | # ### commands auto generated by Alembic - please adjust! ### 38 | op.drop_table("announcements") 39 | # ### end Alembic commands ### 40 | -------------------------------------------------------------------------------- /plugin_store/database/models/Artifact.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | from urllib.parse import quote 3 | 4 | from sqlalchemy import Boolean, Column, ForeignKey, func, Integer, select, Table, Text, UniqueConstraint 5 | from sqlalchemy.orm import column_property, Mapped, relationship 6 | 7 | import constants 8 | 9 | from .Base import Base 10 | from .Version import Version 11 | 12 | 13 | class Tag(Base): 14 | __tablename__ = "tags" 15 | __table_args__ = (UniqueConstraint("tag", name="unique_tag_tag"),) 16 | 17 | id = Column(Integer, primary_key=True) 18 | tag = Column(Text) 19 | 20 | 21 | PluginTag = Table( 22 | "plugin_tag", 23 | Base.metadata, 24 | Column("artifact_id", Integer, ForeignKey("artifacts.id")), 25 | Column("tag_id", Integer, ForeignKey("tags.id")), 26 | ) 27 | 28 | 29 | class Artifact(Base): 30 | __tablename__ = "artifacts" 31 | 32 | id: Mapped[int] = Column(Integer, autoincrement=True, primary_key=True) 33 | name: Mapped[str] = Column(Text) 34 | author: Mapped[str] = Column(Text) 35 | description: Mapped[str] = Column(Text) 36 | _image_path: Mapped[str | None] = Column("image_path", Text, nullable=True) 37 | tags: "Mapped[list[Tag]]" = relationship( 38 | "Tag", secondary=PluginTag, cascade="all, delete", order_by="Tag.tag", lazy="selectin" 39 | ) 40 | versions: "Mapped[list[Version]]" = relationship( 41 | "Version", cascade="all, delete", lazy="selectin", order_by="Version.created.desc(), Version.id.asc()" 42 | ) 43 | visible: Mapped[bool] = Column(Boolean, default=True) 44 | 45 | # Properties computed from relations 46 | downloads: Mapped[int] = column_property( 47 | select(func.sum(Version.downloads)).where(Version.artifact_id == id).correlate_except(Version).scalar_subquery() 48 | ) 49 | updates: Mapped[int] = column_property( 50 | select(func.sum(Version.updates)).where(Version.artifact_id == id).correlate_except(Version).scalar_subquery() 51 | ) 52 | 53 | created: Mapped[datetime] = column_property( 54 | select(func.min(Version.created)).where(Version.artifact_id == id).correlate_except(Version).scalar_subquery() 55 | ) 56 | updated: Mapped[datetime] = column_property( 57 | select(func.max(Version.created)).where(Version.artifact_id == id).correlate_except(Version).scalar_subquery() 58 | ) 59 | 60 | UniqueConstraint("name") 61 | 62 | @property 63 | def image_url(self): 64 | return f"{constants.CDN_URL}{self.image_path}" 65 | 66 | @property 67 | def image_path(self): 68 | if self._image_path is not None: 69 | return self._image_path 70 | return f"artifact_images/{quote(self.name)}.png" 71 | -------------------------------------------------------------------------------- /plugin_store/database/models/Base.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy.orm import declarative_base 2 | 3 | Base = declarative_base() 4 | -------------------------------------------------------------------------------- /plugin_store/database/models/Version.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy import Column, ForeignKey, Integer, Text, UniqueConstraint 2 | 3 | import constants 4 | 5 | from ..utils import TZDateTime 6 | from .Base import Base 7 | 8 | 9 | class Version(Base): 10 | __tablename__ = "versions" 11 | __table_args__ = (UniqueConstraint("artifact_id", "name", name="unique_version_artifact_id_name"),) 12 | 13 | id = Column(Integer, primary_key=True, autoincrement=True) 14 | artifact_id = Column(Integer, ForeignKey("artifacts.id")) 15 | name = Column(Text) 16 | hash = Column(Text) 17 | file_field = Column("file", Text, nullable=True) 18 | downloads = Column(Integer, default=0, nullable=False) 19 | updates = Column(Integer, default=0, nullable=False) 20 | 21 | created = Column("added_on", TZDateTime) 22 | 23 | @property 24 | def file_url(self): 25 | return f"{constants.CDN_URL}{self.download_path}" 26 | 27 | @property 28 | def file(self): 29 | return f"artifact_images/{self.hash}.zip" 30 | -------------------------------------------------------------------------------- /plugin_store/database/models/__init__.py: -------------------------------------------------------------------------------- 1 | from .announcements import Announcement 2 | from .Artifact import Artifact, PluginTag, Tag 3 | from .Base import Base 4 | from .Version import Version 5 | 6 | __all__ = [ 7 | "Announcement", 8 | "Artifact", 9 | "Base", 10 | "PluginTag", 11 | "Tag", 12 | "Version", 13 | ] 14 | -------------------------------------------------------------------------------- /plugin_store/database/models/announcements.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | from uuid import UUID 3 | from zoneinfo import ZoneInfo 4 | 5 | from sqlalchemy import Boolean, Column, Text, Uuid 6 | from sqlalchemy.orm import Mapped, mapped_column 7 | 8 | from ..utils import TZDateTime, uuid7 9 | from .Base import Base 10 | 11 | UTC = ZoneInfo("UTC") 12 | 13 | 14 | def utcnow() -> datetime: 15 | return datetime.now(UTC) 16 | 17 | 18 | class Announcement(Base): 19 | __tablename__ = "announcements" 20 | 21 | id: Mapped[UUID] = mapped_column(Uuid, primary_key=True, default=uuid7) 22 | 23 | title: Mapped[str] = Column(Text, nullable=False) 24 | text: Mapped[str] = Column(Text, nullable=False) 25 | 26 | active: Mapped[bool] = Column(Boolean, nullable=False) 27 | 28 | created: Mapped[datetime] = Column(TZDateTime, nullable=False, default=utcnow) 29 | updated: Mapped[datetime] = Column(TZDateTime, nullable=False, default=utcnow, onupdate=utcnow) 30 | -------------------------------------------------------------------------------- /plugin_store/database/utils.py: -------------------------------------------------------------------------------- 1 | import os 2 | from datetime import datetime 3 | from typing import TYPE_CHECKING 4 | from uuid import UUID 5 | from zoneinfo import ZoneInfo 6 | 7 | from sqlalchemy import DateTime 8 | from sqlalchemy.types import TypeDecorator 9 | 10 | if TYPE_CHECKING: 11 | from typing import Any 12 | 13 | from sqlalchemy.engine import Dialect 14 | 15 | UTC = ZoneInfo("UTC") 16 | 17 | 18 | class TZDateTime(TypeDecorator[datetime]): 19 | """ 20 | A DateTime type which can only store tz-aware DateTimes. 21 | """ 22 | 23 | impl = DateTime(timezone=True) 24 | cache_ok = True 25 | 26 | def process_bind_param(self, value: "datetime | None", dialect: "Dialect"): 27 | if isinstance(value, datetime): 28 | if value.tzinfo is None: 29 | raise ValueError(f"{value!r} must be TZ-aware") 30 | return value.astimezone(UTC) 31 | return value 32 | 33 | def process_result_value(self, value: "Any | None", dialect: "Dialect") -> "datetime | None": 34 | if isinstance(value, datetime) and value.tzinfo is None: 35 | return value.replace(tzinfo=UTC) 36 | return value 37 | 38 | def __repr__(self): 39 | return "TZDateTime()" 40 | 41 | 42 | _last_timestamp_v7 = None 43 | _last_counter_v7 = 0 # 42-bit counter 44 | 45 | 46 | def uuid7(): 47 | """Generate a UUID from a Unix timestamp in milliseconds and random bits. 48 | UUIDv7 objects feature monotonicity within a millisecond. 49 | """ 50 | # --- 48 --- -- 4 -- --- 12 --- -- 2 -- --- 30 --- - 32 - 51 | # unix_ts_ms | version | counter_hi | variant | counter_lo | random 52 | # 53 | # 'counter = counter_hi | counter_lo' is a 42-bit counter constructed 54 | # with Method 1 of RFC 9562, §6.2, and its MSB is set to 0. 55 | # 56 | # 'random' is a 32-bit random value regenerated for every new UUID. 57 | # 58 | # If multiple UUIDs are generated within the same millisecond, the LSB 59 | # of 'counter' is incremented by 1. When overflowing, the timestamp is 60 | # advanced and the counter is reset to a random 42-bit integer with MSB 61 | # set to 0. 62 | 63 | def get_counter_and_tail(): 64 | rand = int.from_bytes(os.urandom(10)) 65 | # 42-bit counter with MSB set to 0 66 | rand_counter = (rand >> 32) & 0x1FFFFFFFFFF 67 | # 32-bit random data 68 | rand_tail = rand & 0xFFFFFFFF 69 | return rand_counter, rand_tail 70 | 71 | global _last_timestamp_v7 72 | global _last_counter_v7 73 | 74 | import time 75 | 76 | nanoseconds = time.time_ns() 77 | timestamp_ms, _ = divmod(nanoseconds, 1_000_000) 78 | 79 | if _last_timestamp_v7 is None or timestamp_ms > _last_timestamp_v7: 80 | counter, tail = get_counter_and_tail() 81 | else: 82 | if timestamp_ms < _last_timestamp_v7: 83 | timestamp_ms = _last_timestamp_v7 + 1 84 | # advance the counter 85 | counter = _last_counter_v7 + 1 86 | if counter > 0x3FFFFFFFFFF: 87 | timestamp_ms += 1 # advance the timestamp 88 | counter, tail = get_counter_and_tail() 89 | else: 90 | tail = int.from_bytes(os.urandom(4)) 91 | 92 | _last_timestamp_v7 = timestamp_ms 93 | _last_counter_v7 = counter 94 | 95 | int_uuid_7 = (timestamp_ms & 0xFFFFFFFFFFFF) << 80 96 | int_uuid_7 |= ((counter >> 30) & 0xFFF) << 64 97 | int_uuid_7 |= (counter & 0x3FFFFFFF) << 32 98 | int_uuid_7 |= tail & 0xFFFFFFFF 99 | # Set the variant to RFC 4122. 100 | int_uuid_7 &= ~(0xC000 << 48) 101 | int_uuid_7 |= 0x8000 << 48 102 | 103 | # Set the version number to 7. 104 | int_uuid_7 |= 0x7000 << 64 105 | return UUID(int=int_uuid_7) 106 | -------------------------------------------------------------------------------- /plugin_store/discord.py: -------------------------------------------------------------------------------- 1 | from os import getenv 2 | from typing import TYPE_CHECKING 3 | 4 | from discord_webhook import AsyncDiscordWebhook, DiscordEmbed 5 | 6 | import constants 7 | 8 | if TYPE_CHECKING: 9 | from database.models import Artifact, Version 10 | 11 | 12 | async def post_announcement(plugin: "Artifact", version: "Version"): 13 | webhook = AsyncDiscordWebhook(url=getenv("ANNOUNCEMENT_WEBHOOK")) 14 | embed = DiscordEmbed(title=plugin.name, description=plugin.description, color=0x213997) 15 | 16 | embed.set_author( 17 | name=plugin.author, 18 | icon_url=f"{constants.CDN_URL}SDHomeBrewwwww.png", 19 | ) 20 | embed.set_thumbnail(url=plugin.image_url) 21 | embed.set_footer(text=f"Version {version.name}") 22 | 23 | webhook.add_embed(embed) 24 | await webhook.execute() 25 | -------------------------------------------------------------------------------- /plugin_store/main.py: -------------------------------------------------------------------------------- 1 | from api import app 2 | 3 | __all__ = [ 4 | "app", 5 | ] 6 | -------------------------------------------------------------------------------- /plugin_store/templates/plugin_browser.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 |
14 |
15 | 27 |
28 |
29 |
30 |
31 |
32 |
33 | 36 |
37 |
38 |
{{ plugin.name }}
39 |

40 | Author: {{ plugin.author 41 | }} 42 | Requires 43 | root 44 |

45 |

46 | Tags: 47 | {{ tag }} 48 |

49 |

{{ plugin.description }}

50 |
51 |
52 |
53 |
54 |
55 |
56 | 90 | 91 | 92 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["poetry-core"] 3 | build-backend = "poetry.core.masonry.api" 4 | 5 | [tool.poetry] 6 | name = "decky-plugin-store" 7 | version = "0.1.0" 8 | description = "Plugin Store backend for Decky" 9 | authors = ["Your Name "] 10 | readme = "README.md" 11 | packages = [] 12 | 13 | [tool.poetry.dependencies] 14 | python = "^3.11" 15 | aiohttp = "^3.10.11" 16 | alembic = "^1.13.2" 17 | aiosqlite = "^0.20.0" # for async sqlite in sqlalchemy 18 | asgiref = "^3.8.1" 19 | discord-webhook = {version = "^1.3.1", extras = ["async"]} 20 | fastapi = "^0.112.0" 21 | pydantic = "^1.10.17" 22 | python-multipart = "^0.0.18" # for multipart forms in fastapi 23 | sqlalchemy = "^2.0.32" 24 | uvicorn = "^0.30.6" 25 | limits = {extras = ["redis"], version = "^3.13.0"} 26 | redis = "^5.0.8" 27 | asyncpg = "^0.29.0" # for async postgres in sqalchemy 28 | 29 | [tool.poetry.group.dev.dependencies] 30 | black = "^24.8.0" 31 | flake8 = "^7.1.1" 32 | flake8-pyproject = "^1.2.3" 33 | httpx = "^0.23.3" 34 | isort = "^5.13.2" 35 | mypy = "^1.11.1" 36 | pytest = "^7.4.4" 37 | pytest-asyncio = "^0.23.8" 38 | pytest-env = "^1.1.3" 39 | pytest-freezer = "^0.4.8" 40 | pytest-lazy-fixture = "^0.6.3" 41 | pytest-mock = "^3.14.0" 42 | types-pytest-lazy-fixture = "^0.6.3.20240707" 43 | 44 | [tool.black] 45 | line-length = 120 46 | target-version = ['py39', 'py310', 'py311'] 47 | include = '\.pyi?$' 48 | exclude = ''' 49 | ( 50 | /( 51 | \.eggs 52 | | \.git 53 | | \.hg 54 | | \.mypy_cache 55 | | \.tox 56 | | \.venv 57 | | _build 58 | | buck-out 59 | | build 60 | | dist 61 | )/ 62 | ) 63 | ''' 64 | 65 | [tool.flake8] 66 | max-line-length = 120 67 | 68 | [tool.isort] 69 | line_length = 120 70 | order_by_type = false 71 | profile = "black" 72 | src_paths = ["plugin_store", "tests"] 73 | 74 | [tool.mypy] 75 | plugins = [ 76 | "sqlalchemy.ext.mypy.plugin", 77 | ] 78 | 79 | [[tool.mypy.overrides]] 80 | module = ["discord_webhook", "limits"] 81 | ignore_missing_imports = true 82 | 83 | [tool.pytest.ini_options] 84 | pythonpath = "/app/plugin_store" 85 | env = [ 86 | "DB_URL=sqlite+aiosqlite:///:memory:", 87 | "SUBMIT_AUTH_KEY=deadbeef", 88 | ] 89 | asyncio_mode = "auto" 90 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | from os import getenv 2 | from pathlib import Path 3 | from typing import TYPE_CHECKING 4 | 5 | import pytest 6 | import pytest_asyncio 7 | from httpx import ASGITransport, AsyncClient 8 | from pytest_mock import MockFixture 9 | from sqlalchemy.ext.asyncio import async_sessionmaker, AsyncEngine, AsyncSession 10 | from sqlalchemy.orm import sessionmaker 11 | 12 | import main 13 | from api import database as db_dependency 14 | from database.database import Database 15 | from db_helpers import ( 16 | create_test_db_engine, 17 | create_test_db_sessionmaker, 18 | prepare_test_db, 19 | prepare_transactioned_db_session, 20 | ) 21 | 22 | if TYPE_CHECKING: 23 | from typing import AsyncIterator 24 | 25 | from fastapi import FastAPI 26 | 27 | APP_PATH = Path("./plugin_store").absolute() 28 | TESTS_PATH = Path(__file__).expanduser().resolve().parent 29 | DUMMY_DATA_PATH = TESTS_PATH / "dummy_data" 30 | 31 | 32 | @pytest.fixture(scope="session", autouse=True) 33 | def mock_external_services(session_mocker: "MockFixture"): 34 | session_mocker.patch("cdn.b2_upload") 35 | session_mocker.patch( 36 | "cdn.fetch_image", 37 | return_value=((DUMMY_DATA_PATH / "plugin-image.png").read_bytes(), "image/png"), 38 | ) 39 | discord_mock = session_mocker.patch("discord.AsyncDiscordWebhook", new=session_mocker.AsyncMock) 40 | discord_mock.add_embed = session_mocker.Mock() 41 | 42 | 43 | @pytest.fixture(scope="session", autouse=True) 44 | def mock_constants(session_mocker: "MockFixture"): 45 | """ 46 | Auto-mocking some constants to make sure they are used instead of hardcoded values. 47 | """ 48 | session_mocker.patch("constants.CDN_URL", new="hxxp://fake.domain/") 49 | 50 | 51 | @pytest.fixture() 52 | def plugin_store() -> "FastAPI": 53 | return main.app 54 | 55 | 56 | # Client for aiohttp server 57 | @pytest_asyncio.fixture() 58 | async def client_unauth( 59 | plugin_store: "FastAPI", 60 | ) -> "AsyncIterator[AsyncClient]": 61 | async with AsyncClient( 62 | base_url="http://test", 63 | transport=ASGITransport(app=plugin_store), # type: ignore[arg-type] 64 | ) as client: 65 | yield client 66 | 67 | 68 | @pytest_asyncio.fixture() 69 | async def client_auth(client_unauth: "AsyncClient") -> "AsyncClient": 70 | client_unauth.headers["Authorization"] = getenv("SUBMIT_AUTH_KEY", "") 71 | return client_unauth 72 | 73 | 74 | @pytest_asyncio.fixture(scope="session") 75 | async def seed_db_engine() -> tuple["AsyncEngine", "async_sessionmaker"]: 76 | engine = create_test_db_engine() 77 | db_sessionmaker = create_test_db_sessionmaker(engine) 78 | await prepare_test_db(engine, db_sessionmaker, True) 79 | return engine, db_sessionmaker 80 | 81 | 82 | @pytest.fixture(scope="session") 83 | def seed_db_sessionmaker(seed_db_engine: tuple["AsyncEngine", "sessionmaker"]) -> "sessionmaker": 84 | return seed_db_engine[1] 85 | 86 | 87 | @pytest_asyncio.fixture() 88 | async def seed_db_session(seed_db_engine: tuple["AsyncEngine", "sessionmaker"]) -> "AsyncIterator[AsyncSession]": 89 | async with prepare_transactioned_db_session(*seed_db_engine) as session: 90 | yield session 91 | 92 | 93 | @pytest_asyncio.fixture() 94 | async def seed_db(plugin_store: "FastAPI", seed_db_session: "AsyncSession", mocker: "MockFixture") -> "Database": 95 | database = Database(seed_db_session, lock=mocker.MagicMock()) 96 | main.app.dependency_overrides[db_dependency] = lambda: database 97 | return database 98 | 99 | 100 | @pytest.fixture() 101 | def plugin_submit_data(request: "pytest.FixtureRequest") -> "tuple[dict, dict]": 102 | data = { 103 | "name": request.param, 104 | "author": "plugin-author-of-new-plugin", 105 | "description": "Description of our brand new plugin!", 106 | "tags": "tag-1,new-tag-2", 107 | "version_name": "2.0.0", 108 | "image": "https://example.com/image.png", 109 | } 110 | files = { 111 | "file": ("new-release.bin", b"this-is-a-test-file-content", "application/x-binary"), 112 | } 113 | return data, files 114 | 115 | 116 | @pytest.fixture() 117 | def index_template(): 118 | return (APP_PATH / "templates/plugin_browser.html").read_text() 119 | 120 | 121 | @pytest.fixture() 122 | def _mock_uuidv7_generation(mocker: "MockFixture") -> None: 123 | """ 124 | Mocking UUID generation randomness 125 | 126 | Random generator for UUID will return 0x11, so the UUID will be predictable as long as you also freeze the time. 127 | UUIDs will still increment, as the generator keeps the count of generated UUIDs in a single time tick and increments 128 | it by 1 at 5th byte, so for 1st UUID the last segment will be `111111111111`, 2nd UUID will have `111211111111` etc. 129 | """ 130 | 131 | def urandom_side_effect(size): 132 | return b"\x11" * size 133 | 134 | mocker.patch("database.utils.os.urandom", side_effect=urandom_side_effect) 135 | -------------------------------------------------------------------------------- /tests/db_helpers.py: -------------------------------------------------------------------------------- 1 | import random 2 | from contextlib import asynccontextmanager 3 | from datetime import datetime, timedelta, UTC 4 | from hashlib import sha256 5 | from os import getenv 6 | from typing import TYPE_CHECKING 7 | from uuid import UUID 8 | 9 | from sqlalchemy import event 10 | from sqlalchemy.ext.asyncio import async_sessionmaker, AsyncEngine, AsyncSession, create_async_engine 11 | from sqlalchemy.orm import sessionmaker 12 | from sqlalchemy.sql import select 13 | 14 | from database.models import Announcement, Artifact, Base, Tag, Version 15 | 16 | if TYPE_CHECKING: 17 | from typing import AsyncIterator 18 | 19 | 20 | class FakePluginGenerator: 21 | def __init__( 22 | self, 23 | session: "AsyncSession", 24 | date: datetime | None = None, 25 | ): 26 | self.created_plugins_count = 0 27 | self.session = session 28 | self.date = datetime.now(tz=UTC) if date is None else date 29 | 30 | def move_date(self, seconds: int = 0, minutes: int = 0, hours: int = 0, days: int = 0, weeks: int = 0): 31 | self.date += timedelta(seconds=seconds, minutes=minutes, hours=hours, days=days, weeks=weeks) 32 | 33 | async def _prepare_tags(self, tag_names): 34 | statement = select(Tag).where(Tag.tag.in_(tag_names)).order_by(Tag.id) 35 | tags = list((await self.session.execute(statement)).scalars()) 36 | existing = [tag.tag for tag in tags] 37 | for tag_name in tag_names: 38 | if tag_name not in existing: 39 | tag = Tag(tag=tag_name) 40 | self.session.add(tag) 41 | tags.append(tag) 42 | 43 | return tags 44 | 45 | async def _create_plugin( 46 | self, 47 | name, 48 | author, 49 | description, 50 | image_path, 51 | tags, 52 | visible, 53 | id_=None, 54 | ): 55 | plugin = Artifact( 56 | name=name, 57 | author=author, 58 | description=description, 59 | _image_path=image_path, 60 | tags=tags, 61 | visible=visible, 62 | ) 63 | if id_ is not None: 64 | plugin.id = id_ 65 | self.session.add(plugin) 66 | await self.session.commit() 67 | return plugin 68 | 69 | async def _create_versions(self, plugin, versions): 70 | for version in versions: 71 | version = Version( 72 | artifact_id=plugin.id, 73 | name=version, 74 | hash=sha256(f"{plugin.id}-{version}".encode()).hexdigest(), 75 | created=self.date, 76 | ) 77 | self.session.add(version) 78 | await self.session.commit() 79 | self.move_date(seconds=1) 80 | 81 | async def create( 82 | self, 83 | name: "str | None" = None, 84 | author: "str | None" = None, 85 | description: "str | None" = None, 86 | image: "str | None" = None, 87 | tags: "int | list[str] | None" = None, 88 | versions: "int | list[str] | None" = None, 89 | visible: bool = True, 90 | ): 91 | if not name: 92 | name = f"plugin-{self.created_plugins_count + 1}" 93 | 94 | if not author: 95 | author = f"author-of-{name}" 96 | 97 | if not description: 98 | description = f"Description of {name}" 99 | 100 | if tags is None: 101 | tags = random.randint(1, 4) 102 | 103 | if isinstance(tags, int): 104 | tags = [f"tag-{i}" for i in range(tags)] 105 | 106 | tag_objs = await self._prepare_tags(tags) 107 | 108 | plugin = await self._create_plugin(name, author, description, image, tag_objs, visible) 109 | 110 | if versions is None: 111 | versions = random.randint(1, 4) 112 | 113 | if isinstance(versions, int): 114 | versions = [f"0.{i + 1}.0" for i in range(versions)] 115 | 116 | await self._create_versions(plugin, versions) 117 | 118 | self.created_plugins_count += 1 119 | 120 | 121 | def create_test_db_engine() -> "AsyncEngine": 122 | db_url = getenv("DB_URL") 123 | if not db_url: 124 | raise Exception("DB_URL not provided or invalid!") 125 | return create_async_engine( 126 | db_url, 127 | pool_pre_ping=True, 128 | # echo=True, 129 | ) 130 | 131 | 132 | def create_test_db_sessionmaker(engine: "AsyncEngine") -> "async_sessionmaker": 133 | return async_sessionmaker( 134 | bind=engine, 135 | autoflush=False, 136 | future=True, 137 | expire_on_commit=False, 138 | autocommit=False, 139 | ) 140 | 141 | 142 | async def migrate_test_db(engine: "AsyncEngine") -> None: 143 | async with engine.begin() as conn: 144 | await conn.run_sync(Base.metadata.create_all) 145 | 146 | 147 | async def seed_test_db(db_sessionmaker: "async_sessionmaker") -> None: 148 | session = db_sessionmaker() 149 | generator = FakePluginGenerator(session, datetime(2022, 2, 25, 0, 0, 0, tzinfo=UTC)) 150 | await generator.create(tags=["tag-1", "tag-2"], versions=["0.1.0", "0.2.0", "1.0.0"]) 151 | generator.date = datetime(2022, 2, 25, 0, 1, 0, 0, tzinfo=UTC) 152 | await generator.create(image="2.png", tags=["tag-2"], versions=["1.1.0", "2.0.0"]) 153 | generator.date = datetime(2022, 2, 25, 0, 2, 0, 0, tzinfo=UTC) 154 | await generator.create("third", tags=["tag-2", "tag-3"], versions=["3.0.0", "3.1.0", "3.2.0"]) 155 | generator.date = datetime(2022, 2, 25, 0, 3, 0, 0, tzinfo=UTC) 156 | await generator.create(tags=["tag-1", "tag-3"], versions=["1.0.0", "2.0.0", "3.0.0", "4.0.0"]) 157 | generator.date = datetime(2022, 2, 25, 0, 4, 0, 0, tzinfo=UTC) 158 | await generator.create(tags=["tag-1", "tag-2"], versions=["0.1.0", "0.2.0", "1.0.0"], visible=False) 159 | generator.date = datetime(2022, 2, 25, 0, 5, 0, 0, tzinfo=UTC) 160 | await generator.create(image="6.png", tags=["tag-2"], versions=["1.1.0", "2.0.0"], visible=False) 161 | generator.date = datetime(2022, 2, 25, 0, 6, 0, 0, tzinfo=UTC) 162 | await generator.create("seventh", tags=["tag-2", "tag-3"], versions=["3.0.0", "3.1.0", "3.2.0"], visible=False) 163 | generator.date = datetime(2022, 2, 25, 0, 7, 0, 0, tzinfo=UTC) 164 | await generator.create(tags=["tag-1", "tag-3"], versions=["1.0.0", "2.0.0", "3.0.0", "4.0.0"], visible=False) 165 | announcement1 = Announcement( # type: ignore[call-arg] 166 | id=UUID("01234568-79ab-7cde-a445-b9f117ca645d"), 167 | title="Test announcement 1", 168 | text="This is only a drill!", 169 | created=datetime(2023, 11, 16, 0, 0, 0, tzinfo=UTC), 170 | updated=datetime(2023, 11, 16, 0, 0, 0, tzinfo=UTC), 171 | active=True, 172 | ) 173 | session.add(announcement1) 174 | announcement2 = Announcement( # type: ignore[call-arg] 175 | id=UUID("89abcdef-79ab-7cde-99e0-56b0d2e2dcdb"), 176 | title="Test announcement 2", 177 | text="Seriously! Just a drill!", 178 | created=datetime(2023, 11, 16, 0, 1, 0, tzinfo=UTC), 179 | updated=datetime(2023, 11, 16, 0, 1, 0, tzinfo=UTC), 180 | active=True, 181 | ) 182 | session.add(announcement2) 183 | announcement3 = Announcement( # type: ignore[call-arg] 184 | id=UUID("89abcdef-79ab-7cde-99e0-9870d2e2dcdb"), 185 | title="Hidden test announcement", 186 | text="This one is inactive!!", 187 | created=datetime(2023, 11, 16, 0, 2, 0, tzinfo=UTC), 188 | updated=datetime(2023, 11, 16, 0, 2, 0, tzinfo=UTC), 189 | active=False, 190 | ) 191 | session.add(announcement3) 192 | await session.commit() 193 | 194 | 195 | async def prepare_test_db( 196 | engine: "AsyncEngine", 197 | db_sessionmaker: "async_sessionmaker", 198 | seed: bool = False, 199 | ) -> None: 200 | await migrate_test_db(engine) 201 | if seed: 202 | await seed_test_db(db_sessionmaker) 203 | 204 | 205 | @asynccontextmanager 206 | async def prepare_transactioned_db_session( 207 | engine: "AsyncEngine", 208 | db_sessionmaker: "sessionmaker", 209 | ) -> "AsyncIterator[AsyncSession]": 210 | connection = await engine.connect() 211 | outer_transaction = await connection.begin() 212 | async_session = db_sessionmaker(bind=connection) 213 | # seems like for sqlite releasing last savepoint commits the whole transaction. This should fix that. 214 | await connection.begin_nested() 215 | nested = await connection.begin_nested() 216 | 217 | @event.listens_for(async_session.sync_session, "after_transaction_end") 218 | def end_savepoint(session, transaction): 219 | nonlocal nested 220 | 221 | if not nested.is_active: 222 | nested = connection.sync_connection.begin_nested() 223 | 224 | yield async_session 225 | 226 | await outer_transaction.rollback() 227 | await async_session.close() 228 | await connection.close() 229 | -------------------------------------------------------------------------------- /tests/dummy_data/plugin-image.png: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:9a011602781e4c4af2d08e2ba879e1e3e077a990e0afbbc2a6ad384ab06148b6 3 | size 111189 4 | -------------------------------------------------------------------------------- /tests/test_announcement_views.py: -------------------------------------------------------------------------------- 1 | from typing import TYPE_CHECKING 2 | 3 | import pytest 4 | from fastapi import status 5 | from pytest_lazyfixture import lazy_fixture 6 | 7 | if TYPE_CHECKING: 8 | from freezegun.api import FrozenDateTimeFactory 9 | from httpx import AsyncClient 10 | 11 | from database.database import Database 12 | 13 | 14 | @pytest.mark.parametrize( 15 | ("endpoint", "method"), 16 | [ 17 | ("/v1/announcements", "GET"), 18 | ("/v1/announcements", "POST"), 19 | ("/v1/announcements/01234568-79ab-7cde-a445-b9f117ca645d", "GET"), 20 | ("/v1/announcements/01234568-79ab-7cde-a445-b9f117ca645d", "PUT"), 21 | ("/v1/announcements/01234568-79ab-7cde-a445-b9f117ca645d", "DELETE"), 22 | ], 23 | ) 24 | async def test_endpoints_requiring_auth(client_unauth: "AsyncClient", endpoint: str, method: str): 25 | response = await client_unauth.request(method, endpoint) 26 | assert response.status_code == 403 27 | 28 | 29 | @pytest.mark.parametrize("client", [lazy_fixture("client_unauth"), lazy_fixture("client_auth")]) 30 | @pytest.mark.parametrize( 31 | ("origin", "result"), 32 | [("https://example.com", status.HTTP_400_BAD_REQUEST), ("https://steamloopback.host", status.HTTP_200_OK)], 33 | ) 34 | async def test_current_announcement_list_endpoint_cors( 35 | client: "AsyncClient", 36 | origin: str, 37 | result: int, 38 | ): 39 | headers = { 40 | "Origin": origin, 41 | "Access-Control-Request-Method": "GET", 42 | "Access-Control-Request-Headers": "X-Example", 43 | } 44 | response = await client.options("/v1/announcements/-/current", headers=headers) 45 | 46 | assert response.status_code == result 47 | 48 | 49 | async def test_announcement_list( 50 | client_auth: "AsyncClient", 51 | seed_db: "Database", 52 | ): 53 | response = await client_auth.get("/v1/announcements") 54 | 55 | assert response.status_code == 200 56 | data = response.json() 57 | 58 | assert len(data) == 3 59 | assert data[0] == { 60 | "id": "89abcdef-79ab-7cde-99e0-9870d2e2dcdb", 61 | "title": "Hidden test announcement", 62 | "text": "This one is inactive!!", 63 | "active": False, 64 | "created": "2023-11-16T00:02:00Z", 65 | "updated": "2023-11-16T00:02:00Z", 66 | } 67 | assert data[1] == { 68 | "id": "89abcdef-79ab-7cde-99e0-56b0d2e2dcdb", 69 | "title": "Test announcement 2", 70 | "text": "Seriously! Just a drill!", 71 | "active": True, 72 | "created": "2023-11-16T00:01:00Z", 73 | "updated": "2023-11-16T00:01:00Z", 74 | } 75 | assert data[2] == { 76 | "id": "01234568-79ab-7cde-a445-b9f117ca645d", 77 | "title": "Test announcement 1", 78 | "text": "This is only a drill!", 79 | "active": True, 80 | "created": "2023-11-16T00:00:00Z", 81 | "updated": "2023-11-16T00:00:00Z", 82 | } 83 | 84 | 85 | async def test_announcement_create( 86 | client_auth: "AsyncClient", 87 | seed_db: "Database", 88 | freezer: "FrozenDateTimeFactory", 89 | _mock_uuidv7_generation, 90 | ): 91 | freezer.move_to("2023-12-01T00:00:00Z") 92 | response = await client_auth.post("/v1/announcements", json={"title": "Test 3", "text": "Drill test!"}) 93 | 94 | assert response.status_code == 201 95 | data = response.json() 96 | 97 | assert data["title"] == "Test 3" 98 | assert data["text"] == "Drill test!" 99 | assert data == { 100 | "id": "018c22ac-d000-7444-9111-111111111111", 101 | "title": "Test 3", 102 | "text": "Drill test!", 103 | "active": True, 104 | "created": "2023-12-01T00:00:00Z", 105 | "updated": "2023-12-01T00:00:00Z", 106 | } 107 | 108 | response = await client_auth.get("/v1/announcements") 109 | 110 | assert response.status_code == 200 111 | data = response.json() 112 | 113 | assert len(data) == 4 114 | 115 | 116 | @pytest.mark.parametrize("client", [lazy_fixture("client_unauth"), lazy_fixture("client_auth")]) 117 | async def test_announcement_list_current( 118 | client: "AsyncClient", 119 | seed_db: "Database", 120 | ): 121 | response = await client.get("/v1/announcements/-/current") 122 | 123 | assert response.status_code == 200 124 | data = response.json() 125 | 126 | assert len(data) == 2, data 127 | assert data[0] == { 128 | "id": "89abcdef-79ab-7cde-99e0-56b0d2e2dcdb", 129 | "title": "Test announcement 2", 130 | "text": "Seriously! Just a drill!", 131 | "created": "2023-11-16T00:01:00Z", 132 | "updated": "2023-11-16T00:01:00Z", 133 | } 134 | assert data[1] == { 135 | "id": "01234568-79ab-7cde-a445-b9f117ca645d", 136 | "title": "Test announcement 1", 137 | "text": "This is only a drill!", 138 | "created": "2023-11-16T00:00:00Z", 139 | "updated": "2023-11-16T00:00:00Z", 140 | } 141 | 142 | 143 | async def test_announcement_fetch( 144 | client_auth: "AsyncClient", 145 | seed_db: "Database", 146 | ): 147 | response = await client_auth.get("/v1/announcements/01234568-79ab-7cde-a445-b9f117ca645d") 148 | 149 | assert response.status_code == 200 150 | data = response.json() 151 | 152 | assert data == { 153 | "id": "01234568-79ab-7cde-a445-b9f117ca645d", 154 | "title": "Test announcement 1", 155 | "text": "This is only a drill!", 156 | "active": True, 157 | "created": "2023-11-16T00:00:00Z", 158 | "updated": "2023-11-16T00:00:00Z", 159 | } 160 | 161 | 162 | async def test_announcement_update( 163 | client_auth: "AsyncClient", 164 | seed_db: "Database", 165 | freezer: "FrozenDateTimeFactory", 166 | ): 167 | freezer.move_to("2023-12-01T00:00:00Z") 168 | response = await client_auth.put( 169 | "/v1/announcements/01234568-79ab-7cde-a445-b9f117ca645d", 170 | json={"title": "First test announcement", "text": "Drilling!"}, 171 | ) 172 | 173 | assert response.status_code == 200, response.text 174 | data = response.json() 175 | 176 | assert data == { 177 | "id": "01234568-79ab-7cde-a445-b9f117ca645d", 178 | "title": "First test announcement", 179 | "text": "Drilling!", 180 | "active": True, 181 | "created": "2023-11-16T00:00:00Z", 182 | "updated": "2023-12-01T00:00:00Z", 183 | } 184 | 185 | 186 | async def test_announcement_delete( 187 | client_auth: "AsyncClient", 188 | seed_db: "Database", 189 | freezer: "FrozenDateTimeFactory", 190 | ): 191 | freezer.move_to("2023-12-01T00:00:00Z") 192 | response = await client_auth.delete("/v1/announcements/01234568-79ab-7cde-a445-b9f117ca645d") 193 | 194 | assert response.status_code == 204, response.text 195 | 196 | response = await client_auth.get("/v1/announcements") 197 | 198 | assert response.status_code == 200 199 | data = response.json() 200 | 201 | assert len(data) == 2 202 | assert data[0]["id"] != "01234568-79ab-7cde-a445-b9f117ca645d" 203 | assert data[1]["id"] != "01234568-79ab-7cde-a445-b9f117ca645d" 204 | -------------------------------------------------------------------------------- /tests/test_misc_views.py: -------------------------------------------------------------------------------- 1 | from typing import TYPE_CHECKING 2 | 3 | import pytest 4 | from pytest_lazyfixture import lazy_fixture 5 | 6 | if TYPE_CHECKING: 7 | from httpx import AsyncClient 8 | 9 | 10 | @pytest.mark.asyncio 11 | @pytest.mark.parametrize("client", [lazy_fixture("client_unauth"), lazy_fixture("client_auth")]) 12 | async def test_index_endpoint(client: "AsyncClient", index_template: str): 13 | response = await client.get("/") 14 | assert response.status_code == 200 15 | assert response.text == index_template 16 | 17 | 18 | @pytest.mark.asyncio 19 | @pytest.mark.parametrize( 20 | ("client", "return_code"), 21 | [(lazy_fixture("client_unauth"), 403), (lazy_fixture("client_auth"), 200)], 22 | ) 23 | async def test_auth_endpoint(client: "AsyncClient", return_code: int): 24 | response = await client.post("/__auth") 25 | assert response.status_code == return_code 26 | -------------------------------------------------------------------------------- /tests/test_plugin_views.py: -------------------------------------------------------------------------------- 1 | from typing import TYPE_CHECKING 2 | from urllib.parse import urlencode 3 | 4 | import pytest 5 | from fastapi import status 6 | from pytest_lazyfixture import lazy_fixture 7 | from pytest_mock import MockFixture 8 | from sqlalchemy import func, select 9 | from sqlalchemy.exc import NoResultFound 10 | 11 | from constants import SortDirection, SortType 12 | from database.models.Artifact import Tag 13 | 14 | if TYPE_CHECKING: 15 | from typing import Union 16 | 17 | from freezegun.api import FrozenDateTimeFactory 18 | from httpx import AsyncClient 19 | 20 | from database.database import Database 21 | 22 | 23 | @pytest.mark.asyncio 24 | @pytest.mark.parametrize("client", [lazy_fixture("client_unauth"), lazy_fixture("client_auth")]) 25 | @pytest.mark.parametrize("isUpdate", [True, False, None], ids=["update", "download", "no_isUpdate"]) 26 | @pytest.mark.parametrize( 27 | ("plugin_name", "version_name", "return_code"), 28 | [ 29 | pytest.param("plugin-1", "1.0.0", 200, id="real"), 30 | pytest.param("plugin-1", "not_a_real_version", 404, id="invalid_version"), 31 | pytest.param("not_a_real_name", "1.0.0", 404, id="invalid_name"), 32 | ], 33 | ) 34 | async def test_increment_endpoint( 35 | seed_db: "Database", 36 | client: "AsyncClient", 37 | plugin_name: str, 38 | version_name: str, 39 | return_code: int, 40 | isUpdate: "bool | None", 41 | mocker: "MockFixture", 42 | ): 43 | mocker.patch("api.rate_limit") # remove ratelimit 44 | if isUpdate is None: 45 | response = await client.post(f"/plugins/{plugin_name}/versions/{version_name}/increment") 46 | else: 47 | response = await client.post(f"/plugins/{plugin_name}/versions/{version_name}/increment?isUpdate={isUpdate}") 48 | 49 | assert response.status_code == return_code 50 | if response.status_code == 200: 51 | plugin = await seed_db.get_plugin_by_id(seed_db.session, 1) 52 | if isUpdate is False: 53 | assert plugin.versions[0].downloads == 1 54 | assert plugin.downloads == 1 55 | assert plugin.versions[0].updates == 0 56 | assert plugin.updates == 0 57 | else: 58 | assert plugin.versions[0].downloads == 0 59 | assert plugin.downloads == 0 60 | assert plugin.versions[0].updates == 1 61 | assert plugin.updates == 1 62 | 63 | 64 | @pytest.mark.asyncio 65 | @pytest.mark.parametrize("client", [lazy_fixture("client_unauth"), lazy_fixture("client_auth")]) 66 | @pytest.mark.parametrize( 67 | ("origin", "result"), 68 | [("https://example.com", status.HTTP_400_BAD_REQUEST), ("https://steamloopback.host", status.HTTP_200_OK)], 69 | ) 70 | async def test_plugin_list_endpoint_cors( 71 | client: "AsyncClient", 72 | origin: str, 73 | result: int, 74 | ): 75 | headers = { 76 | "Origin": origin, 77 | "Access-Control-Request-Method": "GET", 78 | "Access-Control-Request-Headers": "X-Example", 79 | } 80 | response = await client.options("/plugins", headers=headers) 81 | 82 | assert response.status_code == result 83 | 84 | 85 | @pytest.mark.asyncio 86 | @pytest.mark.parametrize("client", [lazy_fixture("client_unauth"), lazy_fixture("client_auth")]) 87 | @pytest.mark.parametrize( 88 | ("query_filter", "query_plugin_ids"), 89 | [ 90 | pytest.param(None, {1, 2, 3, 4, 5, 6, 7, 8}, id="no-query"), 91 | pytest.param("plugin-", {1, 2, 4, 5, 6, 8}, id="query-plugin-"), 92 | pytest.param("", {1, 2, 3, 4, 5, 6, 7, 8}, id="query-empty"), 93 | pytest.param("third", {3}, id="query-third"), 94 | ], 95 | ) 96 | @pytest.mark.parametrize( 97 | ("tags_filter", "tag_plugin_ids"), 98 | [ 99 | pytest.param(None, {1, 2, 3, 4, 5, 6, 7, 8}, id="no-tags"), 100 | pytest.param("tag-2", {1, 2, 3, 5, 6, 7}, id="tags-tag-2"), 101 | pytest.param("", {1, 2, 3, 4, 5, 6, 7, 8}, id="tags-empty"), 102 | pytest.param("tag-1,tag-3", {4, 8}, id="tags-tag-1-tag-3"), 103 | ], 104 | ) 105 | @pytest.mark.parametrize( 106 | ("hidden_filter", "hidden_plugin_ids"), 107 | [ 108 | pytest.param(None, {1, 2, 3, 4}, id="no-hidden"), 109 | pytest.param("0", {1, 2, 3, 4}, id="hidden-0"), 110 | pytest.param("false", {1, 2, 3, 4}, id="hidden-false"), 111 | pytest.param("False", {1, 2, 3, 4}, id="hidden-False"), 112 | pytest.param("f", {1, 2, 3, 4}, id="hidden=f"), 113 | pytest.param("1", {1, 2, 3, 4, 5, 6, 7, 8}, id="hidden-1"), 114 | pytest.param("true", {1, 2, 3, 4, 5, 6, 7, 8}, id="hidden-true"), 115 | pytest.param("True", {1, 2, 3, 4, 5, 6, 7, 8}, id="hidden-True"), 116 | pytest.param("t", {1, 2, 3, 4, 5, 6, 7, 8}, id="hidden-t"), 117 | ], 118 | ) 119 | @pytest.mark.parametrize( 120 | ("plugin_sort", "plugin_sort_direction", "id_order"), 121 | [ 122 | pytest.param(None, None, [1, 2, 3, 4, 5, 6, 7, 8], id="no-sort"), 123 | pytest.param(SortType.NAME, None, [1, 2, 4, 5, 6, 8, 7, 3], id="name-sort"), 124 | pytest.param(SortType.NAME, SortDirection.DESC, [3, 7, 8, 6, 5, 4, 2, 1], id="name-desc-sort"), 125 | pytest.param(SortType.NAME, SortDirection.ASC, [1, 2, 4, 5, 6, 8, 7, 3], id="name-asc-sort"), 126 | pytest.param(SortType.DATE, None, [1, 2, 3, 4, 5, 6, 7, 8], id="date-sort"), 127 | pytest.param(SortType.DATE, SortDirection.DESC, [8, 7, 6, 5, 4, 3, 2, 1], id="date-desc-sort"), 128 | pytest.param(SortType.DATE, SortDirection.ASC, [1, 2, 3, 4, 5, 6, 7, 8], id="date-asc-sort"), 129 | ], 130 | ) 131 | async def test_plugins_list_endpoint( 132 | seed_db: "Database", 133 | client: "AsyncClient", 134 | query_filter: "str | None", 135 | query_plugin_ids: set[int], 136 | tags_filter: "str | None", 137 | tag_plugin_ids: set[int], 138 | hidden_filter: "str | bool | None", 139 | hidden_plugin_ids: set[int], 140 | plugin_sort: "SortType", 141 | plugin_sort_direction: "SortDirection", 142 | id_order: list[int], 143 | ): 144 | plugin_ids = query_plugin_ids & tag_plugin_ids & hidden_plugin_ids 145 | plugin_id_order = [id for id in id_order if id in plugin_ids] 146 | params: "dict[str, Union[str | bool]]" = {} 147 | if query_filter is not None: 148 | params["query"] = query_filter 149 | if tags_filter is not None: 150 | params["tags"] = tags_filter 151 | if hidden_filter is not None: 152 | params["hidden"] = hidden_filter 153 | if plugin_sort is not None: 154 | params["sort_by"] = plugin_sort.value 155 | if plugin_sort_direction is not None: 156 | params["sort_direction"] = plugin_sort_direction.value 157 | response = await client.get(f"/plugins?{urlencode(params)}") 158 | expected_response = [ 159 | { 160 | "id": 1, 161 | "name": "plugin-1", 162 | "author": "author-of-plugin-1", 163 | "description": "Description of plugin-1", 164 | "tags": ["tag-1", "tag-2"], 165 | "image_url": "hxxp://fake.domain/artifact_images/plugin-1.png", 166 | "downloads": 0, 167 | "updates": 0, 168 | "created": "2022-02-25T00:00:00Z", 169 | "updated": "2022-02-25T00:00:02Z", 170 | "versions": [ 171 | { 172 | "name": "1.0.0", 173 | "hash": "f06b77407d0ef08f5667591ab386eeff2090c340f3eadf76006db6d1ac721029", 174 | "created": "2022-02-25T00:00:02Z", 175 | "downloads": 0, 176 | "updates": 0, 177 | }, 178 | { 179 | "name": "0.2.0", 180 | "hash": "750e557099102527b927be4b9e79392c8f4e011d8a5848480afb61fc0de4f5af", 181 | "created": "2022-02-25T00:00:01Z", 182 | "downloads": 0, 183 | "updates": 0, 184 | }, 185 | { 186 | "name": "0.1.0", 187 | "hash": "44733735485ece810402fff9e7a608a49039c0b363e52ff62d07b84ab2b40b06", 188 | "created": "2022-02-25T00:00:00Z", 189 | "downloads": 0, 190 | "updates": 0, 191 | }, 192 | ], 193 | "visible": True, 194 | }, 195 | { 196 | "id": 2, 197 | "name": "plugin-2", 198 | "author": "author-of-plugin-2", 199 | "description": "Description of plugin-2", 200 | "tags": ["tag-2"], 201 | "image_url": "hxxp://fake.domain/2.png", 202 | "downloads": 0, 203 | "updates": 0, 204 | "created": "2022-02-25T00:01:00Z", 205 | "updated": "2022-02-25T00:01:01Z", 206 | "versions": [ 207 | { 208 | "name": "2.0.0", 209 | "hash": "56635138a27a6b0c57f0f06cdd58eadf58fff966516c38fca530e2d0f12a3190", 210 | "created": "2022-02-25T00:01:01Z", 211 | "downloads": 0, 212 | "updates": 0, 213 | }, 214 | { 215 | "name": "1.1.0", 216 | "hash": "aeee42b51db3d73c6b75c08ccd46feff21b6de5f41bf1494d147471df850d947", 217 | "created": "2022-02-25T00:01:00Z", 218 | "downloads": 0, 219 | "updates": 0, 220 | }, 221 | ], 222 | "visible": True, 223 | }, 224 | { 225 | "id": 3, 226 | "name": "third", 227 | "author": "author-of-third", 228 | "description": "Description of third", 229 | "tags": ["tag-2", "tag-3"], 230 | "image_url": "hxxp://fake.domain/artifact_images/third.png", 231 | "downloads": 0, 232 | "updates": 0, 233 | "created": "2022-02-25T00:02:00Z", 234 | "updated": "2022-02-25T00:02:02Z", 235 | "versions": [ 236 | { 237 | "name": "3.2.0", 238 | "hash": "ec2516b144cb429b1473104efcbe345da2b82347fbbb587193a22429a0dc6ab6", 239 | "created": "2022-02-25T00:02:02Z", 240 | "downloads": 0, 241 | "updates": 0, 242 | }, 243 | { 244 | "name": "3.1.0", 245 | "hash": "8d9a561a9fc5c7509b5fe0e54213641e502e3b1e456af34cc44aa0a526f85f9b", 246 | "created": "2022-02-25T00:02:01Z", 247 | "downloads": 0, 248 | "updates": 0, 249 | }, 250 | { 251 | "name": "3.0.0", 252 | "hash": "9463611d748129d063f697ec7bdd770b7d5b82c50b93582e31e6440236ba8f66", 253 | "created": "2022-02-25T00:02:00Z", 254 | "downloads": 0, 255 | "updates": 0, 256 | }, 257 | ], 258 | "visible": True, 259 | }, 260 | { 261 | "id": 4, 262 | "name": "plugin-4", 263 | "author": "author-of-plugin-4", 264 | "description": "Description of plugin-4", 265 | "tags": ["tag-1", "tag-3"], 266 | "image_url": "hxxp://fake.domain/artifact_images/plugin-4.png", 267 | "downloads": 0, 268 | "updates": 0, 269 | "created": "2022-02-25T00:03:00Z", 270 | "updated": "2022-02-25T00:03:03Z", 271 | "versions": [ 272 | { 273 | "name": "4.0.0", 274 | "hash": "8eee479a02359eeb0f30f86f0bec493ba7b31ff738509a3df0f5261dcad8f45f", 275 | "created": "2022-02-25T00:03:03Z", 276 | "downloads": 0, 277 | "updates": 0, 278 | }, 279 | { 280 | "name": "3.0.0", 281 | "hash": "bb70c8d12deee43fb3f2529807b132432c63253c9d27cb9f15f3c4ceae5cfc62", 282 | "created": "2022-02-25T00:03:02Z", 283 | "downloads": 0, 284 | "updates": 0, 285 | }, 286 | { 287 | "name": "2.0.0", 288 | "hash": "02dd930214f64c3694122435b8a58641da279c83cd9beb9b47adf5173e07e6e5", 289 | "created": "2022-02-25T00:03:01Z", 290 | "downloads": 0, 291 | "updates": 0, 292 | }, 293 | { 294 | "name": "1.0.0", 295 | "hash": "51ab66013d901f12a45142248132c0c98539c749b6a3b341ab4da2b9df4cdc09", 296 | "created": "2022-02-25T00:03:00Z", 297 | "downloads": 0, 298 | "updates": 0, 299 | }, 300 | ], 301 | "visible": True, 302 | }, 303 | { 304 | "id": 5, 305 | "name": "plugin-5", 306 | "author": "author-of-plugin-5", 307 | "description": "Description of plugin-5", 308 | "tags": ["tag-1", "tag-2"], 309 | "image_url": "hxxp://fake.domain/artifact_images/plugin-5.png", 310 | "downloads": 0, 311 | "updates": 0, 312 | "created": "2022-02-25T00:04:00Z", 313 | "updated": "2022-02-25T00:04:02Z", 314 | "versions": [ 315 | { 316 | "name": "1.0.0", 317 | "hash": "562eec14bf4b01c5769acb1b8854b3382b7bbc7333f45d2fd200a752f72fa3a0", 318 | "created": "2022-02-25T00:04:02Z", 319 | "downloads": 0, 320 | "updates": 0, 321 | }, 322 | { 323 | "name": "0.2.0", 324 | "hash": "37014c0eca288692ff8992ce1ef0d590a76c3eb1c44f7d9dc1e6963221ec87f8", 325 | "created": "2022-02-25T00:04:01Z", 326 | "downloads": 0, 327 | "updates": 0, 328 | }, 329 | { 330 | "name": "0.1.0", 331 | "hash": "6c5e8ab31c430eaed0d9876ea164769913e64094848ca8bfd44d322a769e49cd", 332 | "created": "2022-02-25T00:04:00Z", 333 | "downloads": 0, 334 | "updates": 0, 335 | }, 336 | ], 337 | "visible": False, 338 | }, 339 | { 340 | "id": 6, 341 | "name": "plugin-6", 342 | "author": "author-of-plugin-6", 343 | "description": "Description of plugin-6", 344 | "tags": ["tag-2"], 345 | "image_url": "hxxp://fake.domain/6.png", 346 | "downloads": 0, 347 | "updates": 0, 348 | "created": "2022-02-25T00:05:00Z", 349 | "updated": "2022-02-25T00:05:01Z", 350 | "versions": [ 351 | { 352 | "name": "2.0.0", 353 | "hash": "611a4f133a0e2f7ca4285478d4be7c6e09acc256c9f47d71a075d2af279d2c96", 354 | "created": "2022-02-25T00:05:01Z", 355 | "downloads": 0, 356 | "updates": 0, 357 | }, 358 | { 359 | "name": "1.1.0", 360 | "hash": "dd3ea0a0674ac176f431d0dd3ae11df7a56368f1ce8965c6bf41ae264cbb0eb3", 361 | "created": "2022-02-25T00:05:00Z", 362 | "downloads": 0, 363 | "updates": 0, 364 | }, 365 | ], 366 | "visible": False, 367 | }, 368 | { 369 | "id": 7, 370 | "name": "seventh", 371 | "author": "author-of-seventh", 372 | "description": "Description of seventh", 373 | "tags": ["tag-2", "tag-3"], 374 | "image_url": "hxxp://fake.domain/artifact_images/seventh.png", 375 | "downloads": 0, 376 | "updates": 0, 377 | "created": "2022-02-25T00:06:00Z", 378 | "updated": "2022-02-25T00:06:02Z", 379 | "versions": [ 380 | { 381 | "name": "3.2.0", 382 | "hash": "a4410618d61cf061f508d0c20fb7145bf28ae218eec7154071c3ec03ec04ec5b", 383 | "created": "2022-02-25T00:06:02Z", 384 | "downloads": 0, 385 | "updates": 0, 386 | }, 387 | { 388 | "name": "3.1.0", 389 | "hash": "9848a9d18e91da6cd678adccbbdfa09474cc587e96234dfd72c2a1d0f0c8132c", 390 | "created": "2022-02-25T00:06:01Z", 391 | "downloads": 0, 392 | "updates": 0, 393 | }, 394 | { 395 | "name": "3.0.0", 396 | "hash": "370e6e290c94ba02af39fc11d67f0e8769e00bcb3b7e21499bc0be622fe676e9", 397 | "created": "2022-02-25T00:06:00Z", 398 | "downloads": 0, 399 | "updates": 0, 400 | }, 401 | ], 402 | "visible": False, 403 | }, 404 | { 405 | "id": 8, 406 | "name": "plugin-8", 407 | "author": "author-of-plugin-8", 408 | "description": "Description of plugin-8", 409 | "tags": ["tag-1", "tag-3"], 410 | "image_url": "hxxp://fake.domain/artifact_images/plugin-8.png", 411 | "downloads": 0, 412 | "updates": 0, 413 | "created": "2022-02-25T00:07:00Z", 414 | "updated": "2022-02-25T00:07:03Z", 415 | "versions": [ 416 | { 417 | "name": "4.0.0", 418 | "hash": "44bc28702614ff73ae8c68dc6298369bb25e792776925930bd38ea592df36af9", 419 | "created": "2022-02-25T00:07:03Z", 420 | "downloads": 0, 421 | "updates": 0, 422 | }, 423 | { 424 | "name": "3.0.0", 425 | "hash": "c9514fc40d9c32dee69033b104102abac98e6689ccfe48d947e30991e1778a88", 426 | "created": "2022-02-25T00:07:02Z", 427 | "downloads": 0, 428 | "updates": 0, 429 | }, 430 | { 431 | "name": "2.0.0", 432 | "hash": "6f55affd9be35d799a6d6967bbf6822240f19d22a9cbe340443d5c499a4a75ab", 433 | "created": "2022-02-25T00:07:01Z", 434 | "downloads": 0, 435 | "updates": 0, 436 | }, 437 | { 438 | "name": "1.0.0", 439 | "hash": "bae8a710fe1e925b3f1489b7a4e50d6555be40182f238e65736ced607489e3b3", 440 | "created": "2022-02-25T00:07:00Z", 441 | "downloads": 0, 442 | "updates": 0, 443 | }, 444 | ], 445 | "visible": False, 446 | }, 447 | ] 448 | assert response.status_code == 200 449 | assert response.json() == sorted( 450 | [response_obj for response_obj in expected_response if response_obj["id"] in plugin_ids], 451 | key=lambda obj: plugin_id_order.index(obj["id"]), # type: ignore[arg-type] 452 | ) 453 | 454 | 455 | @pytest.mark.asyncio 456 | async def test_submit_endpoint_requires_auth(client_unauth: "AsyncClient"): 457 | response = await client_unauth.post("/__submit") 458 | assert response.status_code == 403 459 | 460 | 461 | @pytest.mark.parametrize( 462 | ( 463 | "db_fixture", 464 | "plugin_submit_data", 465 | "plugin_id", 466 | "name", 467 | "return_code", 468 | "resulting_versions", 469 | "resulting_created_time", 470 | "resulting_updated_time", 471 | "error_msg", 472 | "is_visible", 473 | ), 474 | [ 475 | ( 476 | lazy_fixture("seed_db"), 477 | "new-plugin", 478 | 9, 479 | "new-plugin", 480 | status.HTTP_201_CREATED, 481 | [ 482 | { 483 | "name": "2.0.0", 484 | "hash": "378d3213bf3c5d1924891c05659425e7d62bb786665cb2eb5c88564a327b03c7", 485 | "created": "2022-04-04T00:00:00Z", 486 | "downloads": 0, 487 | "updates": 0, 488 | } 489 | ], 490 | "2022-04-04T00:00:00Z", 491 | "2022-04-04T00:00:00Z", 492 | None, 493 | True, 494 | ), 495 | ( 496 | lazy_fixture("seed_db"), 497 | "plugin-1", 498 | 1, 499 | "plugin-1", 500 | status.HTTP_201_CREATED, 501 | [ 502 | { 503 | "name": "2.0.0", 504 | "hash": "378d3213bf3c5d1924891c05659425e7d62bb786665cb2eb5c88564a327b03c7", 505 | "created": "2022-04-04T00:00:00Z", 506 | "downloads": 0, 507 | "updates": 0, 508 | }, 509 | { 510 | "name": "1.0.0", 511 | "hash": "f06b77407d0ef08f5667591ab386eeff2090c340f3eadf76006db6d1ac721029", 512 | "created": "2022-02-25T00:00:02Z", 513 | "downloads": 0, 514 | "updates": 0, 515 | }, 516 | { 517 | "name": "0.2.0", 518 | "hash": "750e557099102527b927be4b9e79392c8f4e011d8a5848480afb61fc0de4f5af", 519 | "created": "2022-02-25T00:00:01Z", 520 | "downloads": 0, 521 | "updates": 0, 522 | }, 523 | { 524 | "name": "0.1.0", 525 | "hash": "44733735485ece810402fff9e7a608a49039c0b363e52ff62d07b84ab2b40b06", 526 | "created": "2022-02-25T00:00:00Z", 527 | "downloads": 0, 528 | "updates": 0, 529 | }, 530 | ], 531 | "2022-02-25T00:00:00Z", 532 | "2022-04-04T00:00:00Z", 533 | None, 534 | True, 535 | ), 536 | ( 537 | lazy_fixture("seed_db"), 538 | "plugin-5", 539 | 5, 540 | "plugin-5", 541 | status.HTTP_201_CREATED, 542 | [ 543 | { 544 | "name": "2.0.0", 545 | "hash": "378d3213bf3c5d1924891c05659425e7d62bb786665cb2eb5c88564a327b03c7", 546 | "created": "2022-04-04T00:00:00Z", 547 | "downloads": 0, 548 | "updates": 0, 549 | }, 550 | { 551 | "name": "1.0.0", 552 | "hash": "562eec14bf4b01c5769acb1b8854b3382b7bbc7333f45d2fd200a752f72fa3a0", 553 | "created": "2022-02-25T00:04:02Z", 554 | "downloads": 0, 555 | "updates": 0, 556 | }, 557 | { 558 | "name": "0.2.0", 559 | "hash": "37014c0eca288692ff8992ce1ef0d590a76c3eb1c44f7d9dc1e6963221ec87f8", 560 | "created": "2022-02-25T00:04:01Z", 561 | "downloads": 0, 562 | "updates": 0, 563 | }, 564 | { 565 | "name": "0.1.0", 566 | "hash": "6c5e8ab31c430eaed0d9876ea164769913e64094848ca8bfd44d322a769e49cd", 567 | "created": "2022-02-25T00:04:00Z", 568 | "downloads": 0, 569 | "updates": 0, 570 | }, 571 | ], 572 | "2022-02-25T00:04:00Z", 573 | "2022-04-04T00:00:00Z", 574 | None, 575 | False, 576 | ), 577 | (lazy_fixture("seed_db"), "plugin-2", 1, "plugin-2", 400, [], "", "", "Version already exists", True), 578 | ], 579 | ids=[ 580 | "creates_new_plugin", 581 | "uploads_new_version", 582 | "uploads_new_version_for_hidden", 583 | "blocks_overriding_existing_version", 584 | ], 585 | indirect=["plugin_submit_data"], 586 | ) 587 | @pytest.mark.asyncio 588 | async def test_submit_endpoint( 589 | client_auth: "AsyncClient", 590 | db_fixture: "Database", 591 | plugin_submit_data: "tuple[dict, dict]", 592 | freezer: "FrozenDateTimeFactory", 593 | plugin_id: int, 594 | name: str, 595 | return_code: int, 596 | resulting_versions: list[dict], 597 | resulting_created_time: "str", 598 | resulting_updated_time: "str", 599 | error_msg: "str | None", 600 | is_visible: bool, 601 | ): 602 | freezer.move_to("2022-04-04T00:00:00Z") 603 | submit_data, submit_files = plugin_submit_data 604 | response = await client_auth.post("/__submit", data=submit_data, files=submit_files) 605 | assert response.status_code == return_code, response.text 606 | if return_code >= status.HTTP_400_BAD_REQUEST: 607 | assert response.json()["message"] == error_msg 608 | else: 609 | assert response.json() == { 610 | "id": plugin_id, 611 | "name": name, 612 | "author": "plugin-author-of-new-plugin", 613 | "description": "Description of our brand new plugin!", 614 | "tags": ["new-tag-2", "tag-1"], 615 | "image_url": ( 616 | f"hxxp://fake.domain/artifact_images/" 617 | f"{name}-c68fb83de3e223e8e79568427c4f4461ff8733bb63465f94330bb1fa7030d236.png" 618 | ), 619 | "created": resulting_created_time, 620 | "updated": resulting_updated_time, 621 | "versions": resulting_versions, 622 | "visible": is_visible, 623 | "downloads": 0, 624 | "updates": 0, 625 | } 626 | 627 | plugin = await db_fixture.get_plugin_by_id(db_fixture.session, plugin_id) 628 | 629 | assert plugin is not None 630 | 631 | assert plugin.name == name 632 | assert plugin.author == "plugin-author-of-new-plugin" 633 | assert plugin.description == "Description of our brand new plugin!" 634 | assert ( 635 | plugin._image_path 636 | == f"artifact_images/{name}-c68fb83de3e223e8e79568427c4f4461ff8733bb63465f94330bb1fa7030d236.png" 637 | ) 638 | assert len(plugin.tags) == 2 639 | assert plugin.tags[0].tag == "new-tag-2" 640 | assert plugin.tags[1].tag == "tag-1" 641 | assert len(plugin.versions) == len(resulting_versions) 642 | assert plugin.visible is is_visible 643 | for actual, expected in zip(plugin.versions, resulting_versions): 644 | assert actual.name == expected["name"] 645 | assert actual.hash == expected["hash"] 646 | assert actual.downloads == 0 647 | assert actual.updates == 0 648 | 649 | statement = select(Tag).where(Tag.tag == "tag-1").with_only_columns(func.count()).order_by(None) 650 | assert (await db_fixture.session.execute(statement)).scalar() == 1 651 | statement = select(Tag).where(Tag.tag == "new-tag-2").with_only_columns(func.count()).order_by(None) 652 | assert (await db_fixture.session.execute(statement)).scalar() == 1 653 | 654 | list_response = await client_auth.get("/plugins") 655 | returned_ids = {plugin["id"] for plugin in list_response.json()} 656 | if is_visible: 657 | assert plugin_id in returned_ids 658 | else: 659 | assert plugin_id not in returned_ids 660 | 661 | 662 | @pytest.mark.asyncio 663 | async def test_update_endpoint_requires_auth(client_unauth: "AsyncClient"): 664 | response = await client_unauth.post("/__update") 665 | assert response.status_code == 403 666 | 667 | 668 | @pytest.mark.parametrize("make_visible", (True, False), ids=["make_visible", "make_hidden"]) 669 | @pytest.mark.parametrize("pick_visible", (True, False), ids=["pick_visible", "pick_hidden"]) 670 | @pytest.mark.parametrize( 671 | ("with_versions", "override_versions", "custom_image"), 672 | ( 673 | pytest.param( 674 | [ 675 | {"name": "0.1.0", "hash": "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"}, 676 | {"name": "0.2.0", "hash": "yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy"}, 677 | {"name": "1.0.0", "hash": "zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz"}, 678 | ], 679 | False, 680 | False, 681 | id="without_image-keep_versions", 682 | ), 683 | pytest.param( 684 | [ 685 | {"name": "30.0.0", "hash": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"}, 686 | {"name": "32.0.0", "hash": "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb"}, 687 | ], 688 | True, 689 | False, 690 | id="without_image-override_versions", 691 | ), 692 | pytest.param( 693 | [ 694 | {"name": "1.1.0", "hash": "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"}, 695 | {"name": "2.0.0", "hash": "yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy"}, 696 | ], 697 | False, 698 | True, 699 | id="with_image-keep_versions", 700 | ), 701 | pytest.param( 702 | [ 703 | {"name": "30.0.0", "hash": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"}, 704 | {"name": "32.0.0", "hash": "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb"}, 705 | ], 706 | True, 707 | True, 708 | id="with_image-override_versions", 709 | ), 710 | ), 711 | ) 712 | @pytest.mark.asyncio 713 | async def test_update_endpoint( 714 | client_auth: "AsyncClient", 715 | seed_db: "Database", 716 | freezer: "FrozenDateTimeFactory", 717 | custom_image: bool, 718 | pick_visible: bool, 719 | make_visible: bool, 720 | with_versions: list[dict], 721 | override_versions: bool, 722 | ): 723 | plugin_id = { 724 | (False, False): 5, 725 | (False, True): 1, 726 | (True, False): 6, 727 | (True, True): 2, 728 | }[custom_image, pick_visible] 729 | image_path = f"{plugin_id}.png" if custom_image else "artifact_images/new-plugin-name.png" 730 | resulting_versions_dates = ( 731 | ["2022-04-04T00:00:00Z", "2022-04-04T00:00:00Z"] 732 | if override_versions 733 | else { 734 | (False, False): ["2022-02-25T00:04:02Z", "2022-02-25T00:04:01Z", "2022-02-25T00:04:00Z"], 735 | (False, True): ["2022-02-25T00:00:02Z", "2022-02-25T00:00:01Z", "2022-02-25T00:00:00Z"], 736 | (True, False): ["2022-02-25T00:05:01Z", "2022-02-25T00:05:00Z"], 737 | (True, True): ["2022-02-25T00:01:01Z", "2022-02-25T00:01:00Z"], 738 | }[custom_image, pick_visible] 739 | ) 740 | 741 | freezer.move_to("2022-04-04T00:00:00Z") 742 | response = await client_auth.post( 743 | "/__update", 744 | json={ 745 | "id": plugin_id, 746 | "name": "new-plugin-name", 747 | "author": "New Author", 748 | "description": "New description", 749 | "tags": ["new-tag-1", "tag-2"], 750 | "versions": with_versions, 751 | "visible": "true" if make_visible else "false", 752 | }, 753 | ) 754 | 755 | assert response.status_code == status.HTTP_200_OK, response.json() 756 | 757 | assert (response.json()) == { 758 | "id": plugin_id, 759 | "name": "new-plugin-name", 760 | "author": "New Author", 761 | "description": "New description", 762 | "tags": ["new-tag-1", "tag-2"], 763 | "image_url": f"hxxp://fake.domain/{image_path}", 764 | "created": min(resulting_versions_dates), 765 | "updated": max(resulting_versions_dates), 766 | "versions": [ 767 | {**version, "created": date, "updates": 0, "downloads": 0} 768 | for version, date in zip(reversed(with_versions), resulting_versions_dates) 769 | ], 770 | "visible": make_visible, 771 | "downloads": 0, 772 | "updates": 0, 773 | } 774 | 775 | plugin = await seed_db.get_plugin_by_id(seed_db.session, plugin_id) 776 | 777 | assert plugin is not None 778 | 779 | assert plugin.name == "new-plugin-name" 780 | assert plugin.author == "New Author" 781 | assert plugin.description == "New description" 782 | assert plugin._image_path == (image_path if custom_image else None) 783 | assert plugin.visible is make_visible 784 | assert len(plugin.tags) == 2 785 | assert plugin.tags[0].tag == "new-tag-1" 786 | assert plugin.tags[1].tag == "tag-2" 787 | assert plugin.created.isoformat().replace("+00:00", "Z") == min(resulting_versions_dates) 788 | assert plugin.updated.isoformat().replace("+00:00", "Z") == max(resulting_versions_dates) 789 | assert len(plugin.versions) == len(with_versions) 790 | for actual, expected in zip( 791 | plugin.versions, 792 | [{**version, "created": date} for version, date in zip(reversed(with_versions), resulting_versions_dates)], 793 | ): 794 | assert actual.name == expected["name"] 795 | assert actual.hash == expected["hash"] 796 | assert actual.created.isoformat().replace("+00:00", "Z") == expected["created"] # type:ignore[union-attr] 797 | 798 | statement = select(Tag).where(Tag.tag == "new-tag-1").with_only_columns(func.count()).order_by(None) 799 | assert (await seed_db.session.execute(statement)).scalar() == 1 800 | statement = select(Tag).where(Tag.tag == "tag-2").with_only_columns(func.count()).order_by(None) 801 | assert (await seed_db.session.execute(statement)).scalar() == 1 802 | 803 | 804 | @pytest.mark.asyncio 805 | async def test_delete_endpoint_requires_auth(client_unauth: "AsyncClient"): 806 | response = await client_unauth.post("/__delete") 807 | assert response.status_code == status.HTTP_403_FORBIDDEN 808 | 809 | 810 | @pytest.mark.asyncio 811 | async def test_delete_endpoint( 812 | client_auth: "AsyncClient", 813 | seed_db: "Database", 814 | ): 815 | response = await client_auth.post("/__delete", json={"id": 1}) 816 | 817 | assert response.status_code == status.HTTP_204_NO_CONTENT 818 | 819 | with pytest.raises(NoResultFound): 820 | await seed_db.get_plugin_by_id(seed_db.session, 1) 821 | --------------------------------------------------------------------------------