├── .clippy.toml
├── .dockerignore
├── .envrc
├── .github
└── workflows
│ ├── registry-docker.yml
│ └── registry-flavor-demo.yml
├── .gitignore
├── .gitmodules
├── .selfignore
├── Cargo.lock
├── Cargo.toml
├── Dockerfile
├── LICENSE-APACHE
├── LICENSE-MIT
├── README.md
├── constitution.md
├── devenv.nix
├── devenv.yaml
├── docker-compose.yml
├── migrations
├── 20221228013635_init.sql
└── 20230917042610_activity.sql
├── openapi.json
├── public
├── plexo_gh_banner.png
├── plexo_platform_demo.png
└── plexo_platform_demo_2.png
└── src
├── auth
├── core.rs
├── engine.rs
├── jwt.rs
└── mod.rs
├── commons
├── authorization.rs
└── mod.rs
├── config.rs
├── errors
├── definitions.rs
└── mod.rs
├── foundation
└── mod.rs
├── graphql
├── auth.rs
├── mod.rs
├── mutations
│ ├── auth.rs
│ ├── mod.rs
│ └── resources.rs
├── queries
│ ├── ai_functions.rs
│ ├── mod.rs
│ └── resources.rs
└── subscription.rs
├── handlers.rs
├── lib.rs
├── llm
├── mod.rs
├── openai.rs
└── suggestions.rs
├── main.rs
├── openapi
├── api.rs
└── mod.rs
├── sdk
├── activity.rs
├── labels.rs
├── loaders.rs
├── member.rs
├── mod.rs
├── organization.rs
├── project.rs
├── task.rs
├── team.rs
└── utilities.rs
├── statics.rs
└── system
├── core.rs
├── members.rs
├── mod.rs
├── prelude.rs
├── schema.rs
└── subscriptions.rs
/.clippy.toml:
--------------------------------------------------------------------------------
1 |
2 | too-many-arguments-threshold=15
--------------------------------------------------------------------------------
/.dockerignore:
--------------------------------------------------------------------------------
1 | /.env
2 | target
3 | Secrets.toml
--------------------------------------------------------------------------------
/.envrc:
--------------------------------------------------------------------------------
1 | watch_file devenv.nix
2 | watch_file devenv.yaml
3 | watch_file devenv.lock
4 | eval "$(devenv print-dev-env)"
--------------------------------------------------------------------------------
/.github/workflows/registry-docker.yml:
--------------------------------------------------------------------------------
1 | name: "Build image and push to official docker registry"
2 |
3 | on:
4 | push:
5 | branches:
6 | - main
7 |
8 | jobs:
9 | build-push-registry:
10 | name: Build image and push to official docker registry
11 | runs-on: ubuntu-latest
12 | steps:
13 | - name: Checkout
14 | uses: actions/checkout@v3
15 | with:
16 | submodules: recursive
17 |
18 | - name: Login to Registry
19 | uses: docker/login-action@v2
20 | with:
21 | registry: docker.io
22 | username: ${{ secrets.DOCKER_USERNAME }}
23 | password: ${{ secrets.DOCKER_PASSWORD }}
24 |
25 | - name: Extracting Cargo Package Version
26 | id: cargo_version
27 | run: |
28 | echo "version=v$(cargo pkgid | cut -d@ -f2 | cut -d: -f2)" >> $GITHUB_OUTPUT
29 |
30 | - name: Docker meta
31 | id: docker_meta
32 | uses: docker/metadata-action@v4
33 | with:
34 | images: minskylab/plexo
35 | flavor: |
36 | latest=true
37 | tags: |
38 | type=sha,format=long,prefix=sha-
39 | type=raw,value=staging,enable=${{ github.ref == 'refs/heads/dev' }}
40 | type=raw,value=stable,enable=${{ github.ref == 'refs/heads/main' }}
41 | type=raw,value=${{ steps.cargo_version.outputs.version }},enable=${{ github.ref == 'refs/heads/main' }}
42 |
43 | - name: Build and push
44 | id: docker_build
45 | uses: docker/build-push-action@v4
46 | with:
47 | # cache-from: type=gha
48 | # cache-to: type=gha,mode=max
49 | push: true
50 | labels: ${{ steps.docker_meta.outputs.labels }}
51 | tags: ${{ steps.docker_meta.outputs.tags }}
52 |
53 | - name: Telegram Notification
54 | uses: appleboy/telegram-action@master
55 | with:
56 | to: ${{ secrets.TELEGRAM_TO }}
57 | token: ${{ secrets.TELEGRAM_TOKEN }}
58 | message: |
59 | New image pushed to docker registry
60 |
61 | Docker Tags: ${{ steps.docker_meta.outputs.tags }}
62 | Commit message: ${{ github.event.commits[0].message }}
63 |
64 | See changes: https://github.com/${{ github.repository }}/commit/${{github.sha}}
65 |
--------------------------------------------------------------------------------
/.github/workflows/registry-flavor-demo.yml:
--------------------------------------------------------------------------------
1 | name: "Build demo image and push to private registry"
2 |
3 | on:
4 | push:
5 | branches:
6 | - flavor/demo
7 |
8 | jobs:
9 | build-push-registry:
10 | name: Build image and push to official docker registry
11 | runs-on: ubuntu-latest
12 | # Permissions to use OIDC token authentication
13 | permissions:
14 | contents: read
15 | id-token: write
16 | # Allows pushing to the GitHub Container Registry
17 | packages: write
18 |
19 | steps:
20 | - name: Checkout
21 | uses: actions/checkout@v3
22 | with:
23 | submodules: recursive
24 |
25 | - name: Login to Registry
26 | uses: docker/login-action@v2
27 | with:
28 | registry: docker.io
29 | username: ${{ secrets.DOCKER_USERNAME }}
30 | password: ${{ secrets.DOCKER_PASSWORD }}
31 |
32 | - name: Extracting Cargo Package Version
33 | id: cargo_version
34 | run: |
35 | echo "version=v$(cargo pkgid | cut -d@ -f2 | cut -d: -f2)" >> $GITHUB_OUTPUT
36 |
37 | - name: Docker meta
38 | id: docker_meta
39 | uses: docker/metadata-action@v4
40 | with:
41 | images: minskylab/plexo
42 | flavor: |
43 | latest=false
44 | tags: |
45 | type=raw,value=${{ steps.cargo_version.outputs.version }}-demo,enable=${{ github.ref == 'refs/heads/flavor/demo' }}
46 |
47 | - uses: depot/setup-action@v1
48 |
49 | - name: Build and push
50 | id: docker_build
51 | uses: depot/build-push-action@v1
52 | with:
53 | project: qk8wpgrv4g
54 | push: true
55 | labels: ${{ steps.docker_meta.outputs.labels }}
56 | tags: ${{ steps.docker_meta.outputs.tags }}
57 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | /target
2 | .env
3 | /postgres-data
4 | # Devenv
5 | .devenv*
6 | devenv.local.nix
7 | data
8 | .blob
9 | Secrets.toml
10 | .vscode
11 | .DS_Store
12 | .sqlx
13 | .idea
--------------------------------------------------------------------------------
/.gitmodules:
--------------------------------------------------------------------------------
1 | [submodule "plexo-platform"]
2 | path = plexo-platform
3 | url = https://github.com/minskylab/plexo-platform
4 |
--------------------------------------------------------------------------------
/.selfignore:
--------------------------------------------------------------------------------
1 | plexo-platform
2 | Cargo.lock
3 | output.json
4 | output.yaml
--------------------------------------------------------------------------------
/Cargo.toml:
--------------------------------------------------------------------------------
1 | [package]
2 | edition = '2021'
3 | name = 'plexo'
4 | version = '0.2.27'
5 |
6 | [dependencies]
7 | async-graphql = { version = "7.0.1", features = [
8 | "decimal",
9 | "chrono",
10 | "dataloader",
11 | "uuid",
12 | ] }
13 | async-graphql-poem = { version = "7.0.1" }
14 | poem = { version = "2.0.0", features = ["cookie", "static-files"] }
15 | tracing = { version = "0.1.40" }
16 | tracing-subscriber = { version = "0.3.18" }
17 | lazy_static = { version = "1.4.0" }
18 | tokio-stream = "0.1.14"
19 | sqlx = { version = "0.7.3", features = [
20 | "runtime-tokio-native-tls",
21 | "postgres",
22 | "uuid",
23 | "time",
24 | "json",
25 | ] }
26 | tokio = { version = "1.36.0", features = ["full"] }
27 | dotenvy = "0.15.7"
28 | chrono = "0.4.34"
29 | serde = "1.0.196"
30 | serde_json = "1.0.113"
31 | oauth2 = { version = "4.4.2", features = ["reqwest"] }
32 | reqwest = { version = "0.11.24", features = ["json"] }
33 | jsonwebtoken = "9.2.0"
34 | async-trait = "0.1.77"
35 | percent-encoding = "2.3.1"
36 | mime = "0.3.17"
37 | async-openai = "0.18.3"
38 | cookie = "0.18.0"
39 | thiserror = "1.0.57"
40 | uuid = { version = "1.7.0", features = ["v4", "fast-rng", "macro-diagnostics"] }
41 | argon2 = "0.5.3"
42 | poem-openapi = { version = "4.0.0", features = [
43 | "swagger-ui",
44 | "chrono",
45 | "uuid",
46 | ] }
47 |
48 |
49 | [workspace]
50 | members = []
51 |
52 | # [lib]
53 | # proc-macro = true
54 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | # Install dependencies only when needed
2 | FROM node:16-alpine AS platform-deps
3 | # Check https://github.com/nodejs/docker-node/tree/b4117f9333da4138b03a546ec926ef50a31506c3#nodealpine to understand why libc6-compat might be needed.
4 | RUN apk add --no-cache libc6-compat
5 | WORKDIR /app
6 |
7 | # Install dependencies based on the preferred package manager
8 | COPY plexo-platform/package.json plexo-platform/yarn.lock ./
9 | RUN \
10 | if [ -f yarn.lock ]; then yarn --frozen-lockfile; \
11 | elif [ -f package-lock.json ]; then npm ci; \
12 | elif [ -f pnpm-lock.yaml ]; then yarn global add pnpm && pnpm i --frozen-lockfile; \
13 | else echo "Lockfile not found." && exit 1; \
14 | fi
15 |
16 |
17 | # Rebuild the source code only when needed
18 | FROM node:16-alpine AS platform-builder
19 | WORKDIR /app
20 | COPY --from=platform-deps /app/node_modules ./node_modules
21 | COPY ./plexo-platform .
22 |
23 | # Next.js collects completely anonymous telemetry data about general usage.
24 | # Learn more here: https://nextjs.org/telemetry
25 | # Uncomment the following line in case you want to disable telemetry during the build.
26 | # ENV NEXT_TELEMETRY_DISABLED 1
27 |
28 | RUN yarn build
29 |
30 | # Start with a rust alpine image
31 | FROM rust:1-alpine3.16 as core-builder
32 | # This is important, see https://github.com/rust-lang/docker-rust/issues/85
33 | ENV RUSTFLAGS="-C target-feature=-crt-static"
34 | # if needed, add additional dependencies here
35 | RUN apk add --no-cache musl-dev
36 | # RUN apk add --no-cache pkgconfig
37 | RUN apk add --no-cache libressl-dev
38 |
39 | # set the workdir and copy the source into it
40 | WORKDIR /app
41 | COPY ./ /app
42 | # do a release build
43 | RUN cargo build --release
44 | RUN strip target/release/plexo
45 |
46 | # use a plain alpine image, the alpine version needs to match the builder
47 | FROM alpine:3.16 as core
48 | # if needed, install additional dependencies here
49 | RUN apk add --no-cache libgcc
50 | RUN apk add --no-cache libressl-dev
51 |
52 | COPY --from=platform-builder /app/out ./plexo-platform/out
53 | # copy the binary into the final image
54 | COPY --from=core-builder /app/target/release/plexo .
55 | # set the binary as entrypoint
56 | ENTRYPOINT ["/plexo"]
--------------------------------------------------------------------------------
/LICENSE-APACHE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
--------------------------------------------------------------------------------
/LICENSE-MIT:
--------------------------------------------------------------------------------
1 | Permission is hereby granted, free of charge, to any
2 | person obtaining a copy of this software and associated
3 | documentation files (the "Software"), to deal in the
4 | Software without restriction, including without
5 | limitation the rights to use, copy, modify, merge,
6 | publish, distribute, sublicense, and/or sell copies of
7 | the Software, and to permit persons to whom the Software
8 | is furnished to do so, subject to the following
9 | conditions:
10 |
11 | The above copyright notice and this permission notice
12 | shall be included in all copies or substantial portions
13 | of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
16 | ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
17 | TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
18 | PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
19 | SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
20 | CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
21 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
22 | IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
23 | DEALINGS IN THE SOFTWARE.
24 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | ➡️ Live Demo ⬅️
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 | # Plexo
20 |
21 | Plexo is an innovative, open-source project management platform that harnesses the power of AI to streamline the way you work. Designed to simplify task tracking within projects and teams, Plexo cuts through the complexities of traditional project management, replacing them with intuitive and efficient solutions.
22 |
23 | Plexo's advanced AI functionalities are the heart of the platform. The AI autonomously generates tasks necessary for project completion, taking into account the project's requirements, deadlines, and the team's capabilities. This intelligent task creation optimizes the planning process, freeing up your team to focus on the core work at hand.
24 |
25 |
26 |
27 |
28 |
29 | Plexo is designed to serve as a benchmark for project execution and description, promoting seamless interoperability among diverse teams and organizations. This is achieved by adhering to the principle that system designs reflect their organization's communication structure. This principle, known as Conway's Law, is deeply ingrained in Plexo, making it a highly effective tool for mirroring and enhancing team communication.
30 |
31 | Adopt Plexo to enhance your software project planning and elevate team synergy.
32 |
33 | ## Features
34 |
35 | - 🧠 **AI-Powered Suggestions**: Plexo provides intelligent suggestions to aid in project planning and task management.
36 |
37 | - 📈 **Active Task Tracking**: Follow the progress of tasks/issues in real-time within a project, team, or individual context.
38 |
39 | - 🤖 **Autonomous Task Creation**: Plexo can autonomously generate tasks necessary for project completion, optimizing the planning process.
40 |
41 | - 🤝 **Seamless Collaboration**: Plexo facilitates collaboration between team members, streamlining communication and increasing efficiency.
42 |
43 | - 🔀 **Interoperability**: Designed to become a standard in project description and execution, Plexo aims to enhance interoperability between different organizations and teams.
44 |
45 | - 🔓 **Open-Source and Free Forever**: Plexo is committed to remaining an open-source project, fostering a community of contributors and users.
46 |
47 | - 🍃 **Lightweight and Self-Hosted**: Plexo is designed to be lightweight and self-hostable, reducing dependencies and providing flexibility.
48 |
49 | - 🔄 **Conway's Law Inspired**: Plexo is modeled on the principle that organizations design systems are analogous to their communication structure, thus mirroring team communication in its project management system.
50 |
51 | ## Quick Start
52 |
53 | You can try our demo [here](https://demo.plexo.app/). And if you want to deploy your own instance of Plexo-core, actually you need a Postgresql database, a OpenAI API Key and a Github OAuth app. Then you can run the following command:
54 |
55 | ```bash
56 | docker run \
57 | -p 8080:8080 \
58 | -e DATABASE_URL="postgres://postgres:postgres@localhost:5432/plexo" \
59 | -e OPENAI_API_KEY="" \
60 | -e GITHUB_CLIENT_ID="" \
61 | -e GITHUB_CLIENT_SECRET="" \
62 | -e JWT_ACCESS_TOKEN_SECRET="" \
63 | -e JWT_REFRESH_TOKEN_SECRET="" \
64 | minskylab/plexo
65 | ```
66 |
67 | ⚠️ We're working on a way to deploy Plexo-core without the need of a Github OAuth app. If you want to contribute, please check [this issue](https://github.com/minskylab/plexo-core/issues/9).
68 |
69 |
119 |
120 | ## Contribution
121 |
122 | We welcome all contributions to the Plexo project! Whether you're interested in fixing bugs, adding new features, or improving documentation, your input is greatly valued.
123 |
124 | ## License
125 |
126 | Plexo-core is released under both the MIT and Apache 2.0 licenses. Users are free to use, modify, and distribute the software. Comments and feedback are greatly appreciated.
127 |
--------------------------------------------------------------------------------
/constitution.md:
--------------------------------------------------------------------------------
1 | # system
2 |
3 | Your task is to create a simplified Resources and Operations (R&O) representation of a specified piece of source code. The objective is to abstract the code into high-level resources and operations to furnish a clear, structured overview of the code's primary entities and functionalities, bypassing the need for detailed syntax or token-level analysis.
4 |
5 | Definitions:
6 |
7 | - A "Resource" refers to crucial structures, entities, or data types within the code.
8 | - An "Operation" refers to significant actions, functions, or methods executed within the code.
9 |
10 | Guidelines for R&O Representation:
11 |
12 | 1. Resources Identification:
13 | a. Library Imports: List the primary libraries or modules being imported.
14 | b. Input Filters: Catalog input structures or filters.
15 | c. Main Object: Identify the principal object, struct, or class.
16 |
17 | 2. Operations Identification:
18 | a. Under the main object, struct, or class, list the associated operations.
19 | b. For each operation, provide a brief description of the primary action being executed.
20 |
21 | 3. Structuring:
22 | a. Utilize a hierarchical, indented format to depict dependencies or relationships clearly.
23 | b. Ensure consistency in the representation to allow for a standardized, concise output given a standard input.
24 |
25 | 4. Conciseness and Abstraction:
26 | a. Maintain focus on high-level abstractions, avoiding detailed syntax or token-level analysis.
27 | b. Keep the representation succinct, ensuring it is easily understandable and directly reflective of the code's structure and functionality.
28 |
29 | Examples:
30 |
31 | input:
32 | path: /Users/bregy/Documents/minskylab/plexo-core/src/graphql/queries/resources.rs
33 | source:
34 |
35 | ```rust
36 | use std::str::FromStr;
37 |
38 | use async_graphql::{Context, InputObject, Object, Result};
39 | use chrono::{DateTime, Utc};
40 | use uuid::Uuid;
41 |
42 | use crate::{
43 | graphql::auth::extract_context,
44 | sdk::{
45 | activity::{Activity, ActivityOperationType, ActivityResourceType},
46 | labels::Label,
47 | member::{Member, MemberRole},
48 | project::Project,
49 | task::{Task, TaskPriority, TaskStatus},
50 | team::{Team, TeamVisibility},
51 | utilities::DateTimeBridge,
52 | },
53 | };
54 |
55 |
56 | #[derive(Default)]
57 | pub struct ResourcesQuery;
58 |
59 | #[derive(InputObject)]
60 | pub struct TaskFilter {
61 | // placeholder
62 | }
63 |
64 | #[derive(InputObject)]
65 | pub struct MemberFilter {
66 | // placeholder
67 | }
68 |
69 | #[derive(InputObject)]
70 | pub struct TeamFilter {
71 | // placeholder
72 | }
73 |
74 | #[derive(InputObject)]
75 | pub struct ProjectFilter {
76 | // placeholder
77 | }
78 |
79 | #[Object]
80 | impl ResourcesQuery {
81 | async fn tasks(&self, ctx: &Context<'_>, _filter: Option) -> Result> {
82 | // placeholder
83 | }
84 |
85 | async fn task_by_id(&self, ctx: &Context<'_>, id: Uuid) -> Result {
86 | // placeholder
87 | }
88 |
89 | async fn members(
90 | &self,
91 | ctx: &Context<'_>,
92 | _filter: Option,
93 | ) -> Result> {
94 | // placeholder
95 | }
96 |
97 | async fn member_by_id(&self, ctx: &Context<'_>, id: Uuid) -> Result {
98 | // placeholder
99 | }
100 |
101 | async fn member_by_email(&self, ctx: &Context<'_>, email: String) -> Result {
102 | // placeholder
103 | }
104 |
105 | async fn projects(
106 | &self,
107 | ctx: &Context<'_>,
108 | _filter: Option,
109 | ) -> Result> {
110 | // placeholder
111 | }
112 |
113 | async fn project_by_id(&self, ctx: &Context<'_>, id: Uuid) -> Result {
114 | // placeholder
115 | }
116 |
117 | async fn teams(&self, ctx: &Context<'_>, _filter: Option) -> Result> {
118 | // placeholder
119 | }
120 |
121 | async fn team_by_id(&self, ctx: &Context<'_>, id: Uuid) -> Result {
122 | // placeholder
123 | }
124 |
125 | async fn labels(&self, ctx: &Context<'_>) -> Result> {
126 | // placeholder
127 | }
128 |
129 | async fn me(&self, ctx: &Context<'_>) -> Result {
130 | // placeholder
131 | }
132 |
133 | async fn activity(
134 | &self,
135 | ctx: &Context<'_>,
136 | resource_type: Option,
137 | resource_id: Option,
138 | operation_type: Option,
139 | member_id: Option,
140 | ) -> Result> {
141 | // placeholder
142 | }
143 | }
144 | ```
145 |
146 | output:
147 |
148 | ```yaml
149 | Resource: Library Imports
150 | - std, async_graphql, chrono, uuid, crate
151 |
152 | Resource: Input Filters
153 | - TaskFilter, MemberFilter, TeamFilter, ProjectFilter
154 |
155 | Resource: ResourcesQuery Object
156 | Operation: tasks
157 | - Query tasks from database
158 | Operation: task_by_id
159 | - Query a specific task by ID from database
160 | Operation: members
161 | - Query members from database
162 | Operation: member_by_id
163 | - Query a specific member by ID from database
164 | Operation: member_by_email
165 | - Query a specific member by email from database
166 | Operation: projects
167 | - Query projects from database
168 | Operation: project_by_id
169 | - Query a specific project by ID from database
170 | Operation: teams
171 | - Query teams from database
172 | Operation: team_by_id
173 | - Query a specific team by ID from database
174 | Operation: labels
175 | - Query labels from database
176 | Operation: me
177 | - Query the authenticated member's data from database
178 | Operation: activity
179 | - Query activity logs from database with optional filters
180 | ```
181 |
182 | # input
183 |
184 | {{#if element.is_file}}
185 |
186 | path: {{element.path}}
187 | source:
188 |
189 | ```
190 | {{element.content}}
191 | ```
192 |
193 | {{else}}
194 |
195 | path: {{element.path}}
196 | sources:
197 | {{#each element.children as |child|}}
198 |
199 | - path: {{child.path}}
200 | source:
201 | ```
202 | {{child.content}}
203 | ```
204 |
205 | {{/each}}
206 |
207 | {{/if}}
208 |
209 | give me only the output (in plain yaml format, don't use yaml code box syntax, only a parsable yaml result).
210 |
--------------------------------------------------------------------------------
/devenv.nix:
--------------------------------------------------------------------------------
1 | { pkgs, ... }:
2 |
3 | {
4 | # https://devenv.sh/basics/
5 | env.GREET = "devenv";
6 |
7 | # https://devenv.sh/packages/
8 | packages = [
9 | pkgs.git
10 | pkgs.rustc
11 | pkgs.cargo
12 | ];
13 |
14 | enterShell = ''
15 | hello
16 | git --version
17 | '';
18 |
19 | # https://devenv.sh/languages/
20 | # languages.nix.enable = true;
21 |
22 | # https://devenv.sh/scripts/
23 | # scripts.hello.exec = "echo hello from $GREET";
24 |
25 | # https://devenv.sh/pre-commit-hooks/
26 | # pre-commit.hooks.shellcheck.enable = true;
27 |
28 | # https://devenv.sh/processes/
29 | # processes.ping.exec = "ping example.com";
30 | }
31 |
--------------------------------------------------------------------------------
/devenv.yaml:
--------------------------------------------------------------------------------
1 | inputs:
2 | nixpkgs:
3 | url: github:NixOS/nixpkgs/nixpkgs-unstable
--------------------------------------------------------------------------------
/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '3.7'
2 | services:
3 | postgres:
4 | image: postgres:15.2
5 | restart: always
6 | environment:
7 | - POSTGRES_USER=plexo
8 | - POSTGRES_PASSWORD=example
9 | logging:
10 | options:
11 | max-size: 10m
12 | max-file: "3"
13 | ports:
14 | - '5438:5432'
15 | volumes:
16 | - ./postgres-data:/var/lib/postgresql/data
17 | - ./sql/create_tables.sql:/docker-entrypoint-initdb.d/create_tables.sql
18 | # copy the sql script to fill tables
19 |
20 |
21 | pgadmin:
22 | container_name: pgadmin4_container
23 | image: dpage/pgadmin4
24 | restart: always
25 | environment:
26 | PGADMIN_DEFAULT_EMAIL: admin@admin.com
27 | PGADMIN_DEFAULT_PASSWORD: root
28 | ports:
29 | - "5050:80"
30 | plexo:
31 | build:
32 | context: .
33 | dockerfile: Dockerfile
34 | restart: always
35 | environment:
36 | DATABASE_URL: postgres://plexo:example@postgres:5432/plexo
37 | env_file:
38 | - .env
39 | volumes:
40 | - ./data:/data
41 | depends_on:
42 | - postgres
43 | ports:
44 | - 8080:8080
--------------------------------------------------------------------------------
/migrations/20221228013635_init.sql:
--------------------------------------------------------------------------------
1 | --
2 | -- PostgreSQL database dump
3 | --
4 |
5 | -- Dumped from database version 15.2
6 | -- Dumped by pg_dump version 15.2
7 |
8 | SET statement_timeout = 0;
9 | SET lock_timeout = 0;
10 | SET idle_in_transaction_session_timeout = 0;
11 | SET client_encoding = 'UTF8';
12 | SET standard_conforming_strings = on;
13 | SET check_function_bodies = false;
14 | SET xmloption = content;
15 | SET client_min_messages = warning;
16 | SET row_security = off;
17 |
18 | --
19 | -- Name: public; Type: SCHEMA; Schema: -; Owner: -
20 | --
21 |
22 | CREATE SCHEMA IF NOT EXISTS public;
23 |
24 |
25 | --
26 | -- Name: SCHEMA public; Type: COMMENT; Schema: -; Owner: -
27 | --
28 |
29 | -- COMMENT ON SCHEMA public IS 'standard public schema';
30 |
31 |
32 | --
33 | -- Name: set_current_timestamp_updated_at(); Type: FUNCTION; Schema: public; Owner: -
34 | --
35 |
36 | CREATE FUNCTION public.set_current_timestamp_updated_at() RETURNS trigger
37 | LANGUAGE plpgsql
38 | AS $$
39 | DECLARE
40 | _new record;
41 | BEGIN
42 | _new := NEW;
43 | _new."updated_at" = NOW();
44 | RETURN _new;
45 | END;
46 | $$;
47 |
48 |
49 | SET default_tablespace = '';
50 |
51 | SET default_table_access_method = heap;
52 |
53 | --
54 | -- Name: labels; Type: TABLE; Schema: public; Owner: -
55 | --
56 |
57 | CREATE TABLE public.labels (
58 | id uuid DEFAULT gen_random_uuid() NOT NULL,
59 | created_at timestamp with time zone DEFAULT now() NOT NULL,
60 | updated_at timestamp with time zone DEFAULT now() NOT NULL,
61 | name text NOT NULL,
62 | description text,
63 | color character varying
64 | );
65 |
66 |
67 | --
68 | -- Name: labels_by_tasks; Type: TABLE; Schema: public; Owner: -
69 | --
70 |
71 | CREATE TABLE public.labels_by_tasks (
72 | label_id uuid NOT NULL,
73 | task_id uuid NOT NULL
74 | );
75 |
76 |
77 | --
78 | -- Name: members; Type: TABLE; Schema: public; Owner: -
79 | --
80 |
81 | CREATE TABLE public.members (
82 | id uuid DEFAULT gen_random_uuid() NOT NULL,
83 | created_at timestamp with time zone DEFAULT now() NOT NULL,
84 | updated_at timestamp with time zone DEFAULT now() NOT NULL,
85 | name text NOT NULL,
86 | email character varying NOT NULL,
87 | password_hash character varying,
88 | github_id character varying,
89 | google_id character varying,
90 | photo_url character varying,
91 | role character varying
92 | );
93 |
94 |
95 | --
96 | -- Name: members_by_projects; Type: TABLE; Schema: public; Owner: -
97 | --
98 |
99 | CREATE TABLE public.members_by_projects (
100 | member_id uuid NOT NULL,
101 | project_id uuid NOT NULL
102 | );
103 |
104 |
105 | --
106 | -- Name: members_by_teams; Type: TABLE; Schema: public; Owner: -
107 | --
108 |
109 | CREATE TABLE public.members_by_teams (
110 | team_id uuid NOT NULL,
111 | member_id uuid NOT NULL,
112 | role character varying DEFAULT 'Member'::character varying
113 | );
114 |
115 |
116 | --
117 | -- Name: projects; Type: TABLE; Schema: public; Owner: -
118 | --
119 |
120 | CREATE TABLE public.projects (
121 | id uuid DEFAULT gen_random_uuid() NOT NULL,
122 | created_at timestamp with time zone DEFAULT now() NOT NULL,
123 | updated_at timestamp with time zone DEFAULT now() NOT NULL,
124 | name text NOT NULL,
125 | prefix character varying,
126 | owner_id uuid NOT NULL,
127 | description text,
128 | lead_id uuid,
129 | start_date timestamp with time zone,
130 | due_date timestamp with time zone
131 | );
132 |
133 |
134 | --
135 | -- Name: self; Type: TABLE; Schema: public; Owner: -
136 | --
137 |
138 | CREATE TABLE public.self (
139 | id uuid DEFAULT gen_random_uuid() NOT NULL,
140 | created_at timestamp with time zone DEFAULT now() NOT NULL,
141 | updated_at timestamp with time zone DEFAULT now() NOT NULL,
142 | name text NOT NULL
143 | );
144 |
145 |
146 | --
147 | -- Name: tasks; Type: TABLE; Schema: public; Owner: -
148 | --
149 |
150 | CREATE TABLE public.tasks (
151 | id uuid DEFAULT gen_random_uuid() NOT NULL,
152 | created_at timestamp with time zone DEFAULT now() NOT NULL,
153 | updated_at timestamp with time zone DEFAULT now() NOT NULL,
154 | title text NOT NULL,
155 | description text,
156 | owner_id uuid NOT NULL,
157 | status character varying,
158 | priority character varying,
159 | due_date timestamp with time zone,
160 | project_id uuid,
161 | lead_id uuid,
162 | labels jsonb,
163 | count integer NOT NULL,
164 | parent_id uuid
165 | );
166 |
167 |
168 | --
169 | -- Name: tasks_by_assignees; Type: TABLE; Schema: public; Owner: -
170 | --
171 |
172 | CREATE TABLE public.tasks_by_assignees (
173 | task_id uuid NOT NULL,
174 | assignee_id uuid NOT NULL
175 | );
176 |
177 |
178 | --
179 | -- Name: tasks_by_projects; Type: TABLE; Schema: public; Owner: -
180 | --
181 |
182 | CREATE TABLE public.tasks_by_projects (
183 | task_id uuid NOT NULL,
184 | project_id uuid NOT NULL
185 | );
186 |
187 |
188 | --
189 | -- Name: tasks_count_seq; Type: SEQUENCE; Schema: public; Owner: -
190 | --
191 |
192 | CREATE SEQUENCE public.tasks_count_seq
193 | AS integer
194 | START WITH 1
195 | INCREMENT BY 1
196 | NO MINVALUE
197 | NO MAXVALUE
198 | CACHE 1;
199 |
200 |
201 | --
202 | -- Name: tasks_count_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
203 | --
204 |
205 | ALTER SEQUENCE public.tasks_count_seq OWNED BY public.tasks.count;
206 |
207 |
208 | --
209 | -- Name: teams; Type: TABLE; Schema: public; Owner: -
210 | --
211 |
212 | CREATE TABLE public.teams (
213 | id uuid DEFAULT gen_random_uuid() NOT NULL,
214 | created_at timestamp with time zone DEFAULT now() NOT NULL,
215 | updated_at timestamp with time zone DEFAULT now() NOT NULL,
216 | name character varying NOT NULL,
217 | owner_id uuid NOT NULL,
218 | visibility character varying,
219 | prefix text
220 | );
221 |
222 |
223 | --
224 | -- Name: teams_by_projects; Type: TABLE; Schema: public; Owner: -
225 | --
226 |
227 | CREATE TABLE public.teams_by_projects (
228 | team_id uuid NOT NULL,
229 | project_id uuid NOT NULL
230 | );
231 |
232 |
233 | --
234 | -- Name: tasks count; Type: DEFAULT; Schema: public; Owner: -
235 | --
236 |
237 | ALTER TABLE ONLY public.tasks ALTER COLUMN count SET DEFAULT nextval('public.tasks_count_seq'::regclass);
238 |
239 |
240 | --
241 | -- Name: labels_by_tasks labels_by_tasks_pkey; Type: CONSTRAINT; Schema: public; Owner: -
242 | --
243 |
244 | ALTER TABLE ONLY public.labels_by_tasks
245 | ADD CONSTRAINT labels_by_tasks_pkey PRIMARY KEY (label_id, task_id);
246 |
247 |
248 | --
249 | -- Name: labels labels_name_key; Type: CONSTRAINT; Schema: public; Owner: -
250 | --
251 |
252 | ALTER TABLE ONLY public.labels
253 | ADD CONSTRAINT labels_name_key UNIQUE (name);
254 |
255 |
256 | --
257 | -- Name: labels labels_pkey; Type: CONSTRAINT; Schema: public; Owner: -
258 | --
259 |
260 | ALTER TABLE ONLY public.labels
261 | ADD CONSTRAINT labels_pkey PRIMARY KEY (id);
262 |
263 |
264 | --
265 | -- Name: members_by_projects members_by_projects_pkey; Type: CONSTRAINT; Schema: public; Owner: -
266 | --
267 |
268 | ALTER TABLE ONLY public.members_by_projects
269 | ADD CONSTRAINT members_by_projects_pkey PRIMARY KEY (member_id, project_id);
270 |
271 |
272 | --
273 | -- Name: members_by_teams members_by_teams_pkey; Type: CONSTRAINT; Schema: public; Owner: -
274 | --
275 |
276 | ALTER TABLE ONLY public.members_by_teams
277 | ADD CONSTRAINT members_by_teams_pkey PRIMARY KEY (team_id, member_id);
278 |
279 |
280 | --
281 | -- Name: members members_github_id_key; Type: CONSTRAINT; Schema: public; Owner: -
282 | --
283 |
284 | ALTER TABLE ONLY public.members
285 | ADD CONSTRAINT members_github_id_key UNIQUE (github_id);
286 |
287 |
288 | --
289 | -- Name: members members_google_id_key; Type: CONSTRAINT; Schema: public; Owner: -
290 | --
291 |
292 | ALTER TABLE ONLY public.members
293 | ADD CONSTRAINT members_google_id_key UNIQUE (google_id);
294 |
295 |
296 | --
297 | -- Name: members members_pkey; Type: CONSTRAINT; Schema: public; Owner: -
298 | --
299 |
300 | ALTER TABLE ONLY public.members
301 | ADD CONSTRAINT members_pkey PRIMARY KEY (id);
302 |
303 |
304 | --
305 | -- Name: projects projects_pkey; Type: CONSTRAINT; Schema: public; Owner: -
306 | --
307 |
308 | ALTER TABLE ONLY public.projects
309 | ADD CONSTRAINT projects_pkey PRIMARY KEY (id);
310 |
311 |
312 | --
313 | -- Name: self self_pkey; Type: CONSTRAINT; Schema: public; Owner: -
314 | --
315 |
316 | ALTER TABLE ONLY public.self
317 | ADD CONSTRAINT self_pkey PRIMARY KEY (id);
318 |
319 |
320 | --
321 | -- Name: tasks_by_assignees tasks_by_assignees_pkey; Type: CONSTRAINT; Schema: public; Owner: -
322 | --
323 |
324 | ALTER TABLE ONLY public.tasks_by_assignees
325 | ADD CONSTRAINT tasks_by_assignees_pkey PRIMARY KEY (task_id, assignee_id);
326 |
327 |
328 | --
329 | -- Name: tasks_by_projects tasks_by_projects_pkey; Type: CONSTRAINT; Schema: public; Owner: -
330 | --
331 |
332 | ALTER TABLE ONLY public.tasks_by_projects
333 | ADD CONSTRAINT tasks_by_projects_pkey PRIMARY KEY (task_id, project_id);
334 |
335 |
336 | --
337 | -- Name: tasks tasks_pkey; Type: CONSTRAINT; Schema: public; Owner: -
338 | --
339 |
340 | ALTER TABLE ONLY public.tasks
341 | ADD CONSTRAINT tasks_pkey PRIMARY KEY (id);
342 |
343 |
344 | --
345 | -- Name: teams_by_projects teams_by_projects_pkey; Type: CONSTRAINT; Schema: public; Owner: -
346 | --
347 |
348 | ALTER TABLE ONLY public.teams_by_projects
349 | ADD CONSTRAINT teams_by_projects_pkey PRIMARY KEY (team_id, project_id);
350 |
351 |
352 | --
353 | -- Name: teams teams_pkey; Type: CONSTRAINT; Schema: public; Owner: -
354 | --
355 |
356 | ALTER TABLE ONLY public.teams
357 | ADD CONSTRAINT teams_pkey PRIMARY KEY (id);
358 |
359 |
360 | --
361 | -- Name: teams teams_prefix_key; Type: CONSTRAINT; Schema: public; Owner: -
362 | --
363 |
364 | ALTER TABLE ONLY public.teams
365 | ADD CONSTRAINT teams_prefix_key UNIQUE (prefix);
366 |
367 |
368 | --
369 | -- Name: labels set_public_labels_updated_at; Type: TRIGGER; Schema: public; Owner: -
370 | --
371 |
372 | CREATE TRIGGER set_public_labels_updated_at BEFORE UPDATE ON public.labels FOR EACH ROW EXECUTE FUNCTION public.set_current_timestamp_updated_at();
373 |
374 |
375 | --
376 | -- Name: TRIGGER set_public_labels_updated_at ON labels; Type: COMMENT; Schema: public; Owner: -
377 | --
378 |
379 | COMMENT ON TRIGGER set_public_labels_updated_at ON public.labels IS 'trigger to set value of column "updated_at" to current timestamp on row update';
380 |
381 |
382 | --
383 | -- Name: members set_public_members_updated_at; Type: TRIGGER; Schema: public; Owner: -
384 | --
385 |
386 | CREATE TRIGGER set_public_members_updated_at BEFORE UPDATE ON public.members FOR EACH ROW EXECUTE FUNCTION public.set_current_timestamp_updated_at();
387 |
388 |
389 | --
390 | -- Name: TRIGGER set_public_members_updated_at ON members; Type: COMMENT; Schema: public; Owner: -
391 | --
392 |
393 | COMMENT ON TRIGGER set_public_members_updated_at ON public.members IS 'trigger to set value of column "updated_at" to current timestamp on row update';
394 |
395 |
396 | --
397 | -- Name: projects set_public_projects_updated_at; Type: TRIGGER; Schema: public; Owner: -
398 | --
399 |
400 | CREATE TRIGGER set_public_projects_updated_at BEFORE UPDATE ON public.projects FOR EACH ROW EXECUTE FUNCTION public.set_current_timestamp_updated_at();
401 |
402 |
403 | --
404 | -- Name: TRIGGER set_public_projects_updated_at ON projects; Type: COMMENT; Schema: public; Owner: -
405 | --
406 |
407 | COMMENT ON TRIGGER set_public_projects_updated_at ON public.projects IS 'trigger to set value of column "updated_at" to current timestamp on row update';
408 |
409 |
410 | --
411 | -- Name: self set_public_self_updated_at; Type: TRIGGER; Schema: public; Owner: -
412 | --
413 |
414 | CREATE TRIGGER set_public_self_updated_at BEFORE UPDATE ON public.self FOR EACH ROW EXECUTE FUNCTION public.set_current_timestamp_updated_at();
415 |
416 |
417 | --
418 | -- Name: TRIGGER set_public_self_updated_at ON self; Type: COMMENT; Schema: public; Owner: -
419 | --
420 |
421 | COMMENT ON TRIGGER set_public_self_updated_at ON public.self IS 'trigger to set value of column "updated_at" to current timestamp on row update';
422 |
423 |
424 | --
425 | -- Name: tasks set_public_tasks_updated_at; Type: TRIGGER; Schema: public; Owner: -
426 | --
427 |
428 | CREATE TRIGGER set_public_tasks_updated_at BEFORE UPDATE ON public.tasks FOR EACH ROW EXECUTE FUNCTION public.set_current_timestamp_updated_at();
429 |
430 |
431 | --
432 | -- Name: TRIGGER set_public_tasks_updated_at ON tasks; Type: COMMENT; Schema: public; Owner: -
433 | --
434 |
435 | COMMENT ON TRIGGER set_public_tasks_updated_at ON public.tasks IS 'trigger to set value of column "updated_at" to current timestamp on row update';
436 |
437 |
438 | --
439 | -- Name: teams set_public_teams_updated_at; Type: TRIGGER; Schema: public; Owner: -
440 | --
441 |
442 | CREATE TRIGGER set_public_teams_updated_at BEFORE UPDATE ON public.teams FOR EACH ROW EXECUTE FUNCTION public.set_current_timestamp_updated_at();
443 |
444 |
445 | --
446 | -- Name: TRIGGER set_public_teams_updated_at ON teams; Type: COMMENT; Schema: public; Owner: -
447 | --
448 |
449 | COMMENT ON TRIGGER set_public_teams_updated_at ON public.teams IS 'trigger to set value of column "updated_at" to current timestamp on row update';
450 |
451 |
452 | --
453 | -- Name: projects projects_owner_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: -
454 | --
455 |
456 | ALTER TABLE ONLY public.projects
457 | ADD CONSTRAINT projects_owner_id_fkey FOREIGN KEY (owner_id) REFERENCES public.members(id) ON UPDATE CASCADE ON DELETE SET NULL;
458 |
459 |
460 | --
461 | -- Name: tasks_by_assignees tasks_by_assignees_assignee_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: -
462 | --
463 |
464 | ALTER TABLE ONLY public.tasks_by_assignees
465 | ADD CONSTRAINT tasks_by_assignees_assignee_id_fkey FOREIGN KEY (assignee_id) REFERENCES public.members(id) ON UPDATE CASCADE ON DELETE CASCADE;
466 |
467 |
468 | --
469 | -- Name: tasks_by_assignees tasks_by_assignees_task_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: -
470 | --
471 |
472 | ALTER TABLE ONLY public.tasks_by_assignees
473 | ADD CONSTRAINT tasks_by_assignees_task_id_fkey FOREIGN KEY (task_id) REFERENCES public.tasks(id) ON UPDATE CASCADE ON DELETE CASCADE;
474 |
475 |
476 | --
477 | -- Name: tasks_by_projects tasks_by_projects_project_fkey; Type: FK CONSTRAINT; Schema: public; Owner: -
478 | --
479 |
480 | ALTER TABLE ONLY public.tasks_by_projects
481 | ADD CONSTRAINT tasks_by_projects_project_fkey FOREIGN KEY (project_id) REFERENCES public.projects(id) ON UPDATE CASCADE ON DELETE CASCADE;
482 |
483 |
484 | --
485 | -- Name: tasks_by_projects tasks_by_projects_task_fkey; Type: FK CONSTRAINT; Schema: public; Owner: -
486 | --
487 |
488 | ALTER TABLE ONLY public.tasks_by_projects
489 | ADD CONSTRAINT tasks_by_projects_task_fkey FOREIGN KEY (task_id) REFERENCES public.tasks(id) ON UPDATE CASCADE ON DELETE CASCADE;
490 |
491 |
492 | --
493 | -- Name: tasks tasks_owner_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: -
494 | --
495 |
496 | ALTER TABLE ONLY public.tasks
497 | ADD CONSTRAINT tasks_owner_id_fkey FOREIGN KEY (owner_id) REFERENCES public.members(id) ON UPDATE CASCADE ON DELETE SET NULL;
498 |
499 |
500 | --
501 | -- Name: SCHEMA public; Type: ACL; Schema: -; Owner: -
502 | --
503 |
504 | -- GRANT CREATE ON SCHEMA public TO web_access;
505 |
506 |
507 | --
508 | -- PostgreSQL database dump complete
509 | --
510 |
511 |
--------------------------------------------------------------------------------
/migrations/20230917042610_activity.sql:
--------------------------------------------------------------------------------
1 | -- Add migration script here
2 |
3 | CREATE TABLE public.activity (
4 | id uuid DEFAULT gen_random_uuid() NOT NULL,
5 | created_at timestamp with time zone DEFAULT now() NOT NULL,
6 | updated_at timestamp with time zone DEFAULT now() NOT NULL,
7 |
8 | member_id uuid NOT NULL,
9 | resource_id uuid NOT NULL,
10 |
11 | operation text NOT NULL,
12 | resource_type text NOT NULL
13 | );
14 |
15 |
16 | ALTER TABLE ONLY public.activity
17 | ADD CONSTRAINT activity_pkey PRIMARY KEY (id);
18 |
19 | ALTER TABLE ONLY public.activity
20 | ADD CONSTRAINT activity_member_id_fkey FOREIGN KEY (member_id) REFERENCES public.members(id) ON DELETE CASCADE;
21 |
22 | CREATE INDEX activity_member_id_idx ON public.activity USING btree (member_id);
23 |
24 | CREATE INDEX activity_resource_id_idx ON public.activity USING btree (resource_id);
--------------------------------------------------------------------------------
/openapi.json:
--------------------------------------------------------------------------------
1 | {
2 | "openapi": "3.0.0",
3 | "info": {
4 | "title": "Hello World",
5 | "version": "1.0"
6 | },
7 | "servers": [
8 | {
9 | "url": "http://localhost:3000/api"
10 | }
11 | ],
12 | "tags": [
13 | {
14 | "name": "Member",
15 | "description": "Operations about members"
16 | },
17 | {
18 | "name": "Project",
19 | "description": "Operations about projects"
20 | },
21 | {
22 | "name": "Task",
23 | "description": "Operations about tasks"
24 | },
25 | {
26 | "name": "Team",
27 | "description": "Operations about teams"
28 | }
29 | ],
30 | "paths": {
31 | "/tasks": {
32 | "post": {
33 | "tags": [
34 | "Task"
35 | ],
36 | "requestBody": {
37 | "content": {
38 | "application/json; charset=utf-8": {
39 | "schema": {
40 | "$ref": "#/components/schemas/Task"
41 | }
42 | }
43 | },
44 | "required": true
45 | },
46 | "responses": {
47 | "200": {
48 | "description": "Returns when the user is successfully created.",
49 | "content": {
50 | "application/json; charset=utf-8": {
51 | "schema": {
52 | "$ref": "#/components/schemas/Task"
53 | }
54 | }
55 | }
56 | }
57 | },
58 | "operationId": "create_task"
59 | },
60 | "get": {
61 | "tags": [
62 | "Task"
63 | ],
64 | "responses": {
65 | "200": {
66 | "description": "Returns when the user is successfully created.",
67 | "content": {
68 | "application/json; charset=utf-8": {
69 | "schema": {
70 | "type": "array",
71 | "items": {
72 | "$ref": "#/components/schemas/Task"
73 | }
74 | }
75 | }
76 | }
77 | }
78 | },
79 | "operationId": "list_tasks"
80 | }
81 | },
82 | "/tasks/{id}": {
83 | "get": {
84 | "tags": [
85 | "Task"
86 | ],
87 | "parameters": [
88 | {
89 | "name": "id",
90 | "schema": {
91 | "type": "string"
92 | },
93 | "in": "path",
94 | "required": true,
95 | "deprecated": false,
96 | "explode": true
97 | }
98 | ],
99 | "responses": {
100 | "200": {
101 | "description": "Returns when the user is successfully created.",
102 | "content": {
103 | "application/json; charset=utf-8": {
104 | "schema": {
105 | "$ref": "#/components/schemas/Task"
106 | }
107 | }
108 | }
109 | },
110 | "404": {
111 | "description": ""
112 | }
113 | },
114 | "operationId": "get_task"
115 | },
116 | "put": {
117 | "tags": [
118 | "Task"
119 | ],
120 | "parameters": [
121 | {
122 | "name": "id",
123 | "schema": {
124 | "type": "string"
125 | },
126 | "in": "path",
127 | "required": true,
128 | "deprecated": false,
129 | "explode": true
130 | }
131 | ],
132 | "requestBody": {
133 | "content": {
134 | "application/json; charset=utf-8": {
135 | "schema": {
136 | "$ref": "#/components/schemas/Task"
137 | }
138 | }
139 | },
140 | "required": true
141 | },
142 | "responses": {
143 | "200": {
144 | "description": "Returns when the user is successfully created.",
145 | "content": {
146 | "application/json; charset=utf-8": {
147 | "schema": {
148 | "$ref": "#/components/schemas/Task"
149 | }
150 | }
151 | }
152 | },
153 | "404": {
154 | "description": ""
155 | }
156 | },
157 | "operationId": "update_task"
158 | },
159 | "delete": {
160 | "tags": [
161 | "Task"
162 | ],
163 | "parameters": [
164 | {
165 | "name": "id",
166 | "schema": {
167 | "type": "string"
168 | },
169 | "in": "path",
170 | "required": true,
171 | "deprecated": false,
172 | "explode": true
173 | }
174 | ],
175 | "responses": {
176 | "200": {
177 | "description": "Returns when the user is successfully created.",
178 | "content": {
179 | "application/json; charset=utf-8": {
180 | "schema": {
181 | "$ref": "#/components/schemas/Task"
182 | }
183 | }
184 | }
185 | },
186 | "404": {
187 | "description": ""
188 | }
189 | },
190 | "operationId": "delete_task"
191 | }
192 | },
193 | "/projects": {
194 | "post": {
195 | "tags": [
196 | "Project"
197 | ],
198 | "requestBody": {
199 | "content": {
200 | "application/json; charset=utf-8": {
201 | "schema": {
202 | "$ref": "#/components/schemas/Project"
203 | }
204 | }
205 | },
206 | "required": true
207 | },
208 | "responses": {
209 | "200": {
210 | "description": "Returns when the user is successfully created.",
211 | "content": {
212 | "application/json; charset=utf-8": {
213 | "schema": {
214 | "$ref": "#/components/schemas/Project"
215 | }
216 | }
217 | }
218 | }
219 | },
220 | "operationId": "create_project"
221 | },
222 | "get": {
223 | "tags": [
224 | "Project"
225 | ],
226 | "responses": {
227 | "200": {
228 | "description": "Returns when the user is successfully created.",
229 | "content": {
230 | "application/json; charset=utf-8": {
231 | "schema": {
232 | "type": "array",
233 | "items": {
234 | "$ref": "#/components/schemas/Project"
235 | }
236 | }
237 | }
238 | }
239 | }
240 | },
241 | "operationId": "list_projects"
242 | }
243 | }
244 | },
245 | "components": {
246 | "schemas": {
247 | "Project": {
248 | "type": "object",
249 | "required": [
250 | "id",
251 | "created_at",
252 | "updated_at",
253 | "name",
254 | "owner_id"
255 | ],
256 | "properties": {
257 | "id": {
258 | "type": "string",
259 | "format": "uuid"
260 | },
261 | "created_at": {
262 | "type": "string",
263 | "format": "date-time"
264 | },
265 | "updated_at": {
266 | "type": "string",
267 | "format": "date-time"
268 | },
269 | "name": {
270 | "type": "string"
271 | },
272 | "prefix": {
273 | "type": "string"
274 | },
275 | "owner_id": {
276 | "type": "string",
277 | "format": "uuid"
278 | },
279 | "description": {
280 | "type": "string"
281 | },
282 | "lead_id": {
283 | "type": "string",
284 | "format": "uuid"
285 | },
286 | "start_date": {
287 | "type": "string",
288 | "format": "date-time"
289 | },
290 | "due_date": {
291 | "type": "string",
292 | "format": "date-time"
293 | }
294 | }
295 | },
296 | "Task": {
297 | "type": "object",
298 | "required": [
299 | "id",
300 | "created_at",
301 | "updated_at",
302 | "title",
303 | "owner_id",
304 | "status",
305 | "priority",
306 | "count"
307 | ],
308 | "properties": {
309 | "id": {
310 | "type": "string",
311 | "format": "uuid"
312 | },
313 | "created_at": {
314 | "type": "string",
315 | "format": "date-time"
316 | },
317 | "updated_at": {
318 | "type": "string",
319 | "format": "date-time"
320 | },
321 | "title": {
322 | "type": "string"
323 | },
324 | "description": {
325 | "type": "string"
326 | },
327 | "owner_id": {
328 | "type": "string",
329 | "format": "uuid"
330 | },
331 | "status": {
332 | "$ref": "#/components/schemas/TaskStatus"
333 | },
334 | "priority": {
335 | "$ref": "#/components/schemas/TaskPriority"
336 | },
337 | "due_date": {
338 | "type": "string",
339 | "format": "date-time"
340 | },
341 | "project_id": {
342 | "type": "string",
343 | "format": "uuid"
344 | },
345 | "lead_id": {
346 | "type": "string",
347 | "format": "uuid"
348 | },
349 | "count": {
350 | "type": "integer",
351 | "format": "int32"
352 | },
353 | "parent_id": {
354 | "type": "string",
355 | "format": "uuid"
356 | }
357 | }
358 | },
359 | "TaskPriority": {
360 | "type": "string",
361 | "enum": [
362 | "None",
363 | "Low",
364 | "Medium",
365 | "High",
366 | "Urgent"
367 | ]
368 | },
369 | "TaskStatus": {
370 | "type": "string",
371 | "enum": [
372 | "None",
373 | "Backlog",
374 | "ToDo",
375 | "InProgress",
376 | "Done",
377 | "Canceled"
378 | ]
379 | }
380 | }
381 | }
382 | }
--------------------------------------------------------------------------------
/public/plexo_gh_banner.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/minskylab/plexo-core/c7cc23341e9c641d4589d68dba28d9a1d1ecdce1/public/plexo_gh_banner.png
--------------------------------------------------------------------------------
/public/plexo_platform_demo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/minskylab/plexo-core/c7cc23341e9c641d4589d68dba28d9a1d1ecdce1/public/plexo_platform_demo.png
--------------------------------------------------------------------------------
/public/plexo_platform_demo_2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/minskylab/plexo-core/c7cc23341e9c641d4589d68dba28d9a1d1ecdce1/public/plexo_platform_demo_2.png
--------------------------------------------------------------------------------
/src/auth/core.rs:
--------------------------------------------------------------------------------
1 | use async_graphql::Error;
2 | use chrono::{Duration, Utc};
3 | use oauth2::{AuthorizationCode, CsrfToken};
4 | use poem::http::header::SET_COOKIE;
5 | use poem::http::HeaderMap;
6 | use poem::web::cookie::{Cookie, SameSite};
7 | use poem::web::{Data, Json, Query, Redirect};
8 | use poem::{
9 | handler,
10 | http::{
11 | header::{CACHE_CONTROL, EXPIRES, LOCATION, PRAGMA},
12 | StatusCode,
13 | },
14 | Body, IntoResponse, Response, Result,
15 | };
16 |
17 | use serde::{Deserialize, Serialize};
18 | use serde_json::{json, Value};
19 |
20 | use crate::commons::authorization::{get_token_from_cookie, get_token_from_headers};
21 | use crate::errors::definitions::PlexoAppError;
22 | use crate::system::core::Engine;
23 |
24 | #[derive(Debug, Deserialize)]
25 | pub struct GithubCallbackParams {
26 | code: String,
27 | state: String,
28 | }
29 |
30 | #[derive(Debug, Deserialize, Serialize)]
31 | pub struct AuthenticationResponse {
32 | access_token: String,
33 | token_type: Option,
34 | scope: Option,
35 | }
36 |
37 | pub struct PlexoAuthToken(pub String);
38 |
39 | const GITHUB_USER_API: &str = "https://api.github.com/user";
40 | pub const COOKIE_SESSION_TOKEN_NAME: &str = "plexo-session-token";
41 |
42 | #[handler]
43 | pub async fn github_sign_in_handler(plexo_engine: Data<&Engine>) -> impl IntoResponse {
44 | let Some((url, _)) = plexo_engine.0.auth.new_github_authorize_url() else {
45 | return Response::builder()
46 | .status(StatusCode::INTERNAL_SERVER_ERROR)
47 | .header("Content-Type", "application/json")
48 | .body(Body::from_json(&Error::new("Internal Server Error (github)")).unwrap());
49 | };
50 |
51 | Redirect::temporary(url.to_string())
52 | // .with_header("Set-Cookie", session_token_cookie.to_string())
53 | // .with_header(CACHE_CONTROL, "no-cache, no-store, must-revalidate")
54 | // .with_header(PRAGMA, "no-cache")
55 | // .with_header(EXPIRES, "0")
56 | .into_response()
57 | }
58 |
59 | #[handler]
60 | pub async fn github_callback_handler(
61 | plexo_engine: Data<&Engine>,
62 | params: Query,
63 | ) -> impl IntoResponse {
64 | let code = AuthorizationCode::new(params.code.clone());
65 | let state = CsrfToken::new(params.state.clone());
66 |
67 | let gh_response = plexo_engine.auth.exchange_github_code(code, state).await;
68 |
69 | let Ok(access_token) = gh_response else {
70 | return Response::builder()
71 | .status(StatusCode::INTERNAL_SERVER_ERROR)
72 | .header("Content-Type", "application/json")
73 | .body(Body::from_json(&gh_response).unwrap());
74 | };
75 |
76 | let client = reqwest::Client::new();
77 |
78 | let github_user_data = client
79 | .get(GITHUB_USER_API)
80 | .header("Authorization", format!("token {}", access_token))
81 | .header("User-Agent", "plexo-agent")
82 | .send()
83 | .await
84 | .unwrap()
85 | .json::()
86 | .await
87 | .unwrap();
88 |
89 | let github_id: String = github_user_data
90 | .get("id")
91 | .unwrap()
92 | .as_i64()
93 | .unwrap()
94 | .to_string();
95 |
96 | let user_email = github_user_data
97 | .get("email")
98 | .map(|v| {
99 | v.as_str()
100 | .map(|s| s.to_string())
101 | .unwrap_or(format!("{}@no-email.github.com", github_id.clone()))
102 | })
103 | .unwrap();
104 |
105 | let user_name = github_user_data
106 | .get("name")
107 | .map(|v| {
108 | v.as_str()
109 | .map(|s| s.to_string())
110 | .unwrap_or(github_id.clone())
111 | })
112 | .unwrap();
113 |
114 | let member: crate::sdk::member::Member = match plexo_engine
115 | .get_member_by_github_id(github_id.clone())
116 | .await
117 | {
118 | Some(member) => member,
119 | None => {
120 | plexo_engine
121 | .create_member_from_github(user_email, user_name, github_id)
122 | .await
123 | }
124 | };
125 |
126 | let Ok(session_token) = plexo_engine.auth.jwt_engine.create_session_token(&member) else {
127 | return Response::builder()
128 | .status(StatusCode::INTERNAL_SERVER_ERROR)
129 | .header("Content-Type", "application/json")
130 | .body(Body::from_json(&Error::new("Internal Server Error")).unwrap());
131 | };
132 |
133 | let mut session_token_cookie = Cookie::named(COOKIE_SESSION_TOKEN_NAME);
134 |
135 | session_token_cookie.set_value_str(session_token);
136 | session_token_cookie.set_http_only(true);
137 | session_token_cookie.set_secure(true);
138 | session_token_cookie.set_same_site(SameSite::Lax);
139 | session_token_cookie.set_expires(Utc::now() + Duration::days(7));
140 | session_token_cookie.set_path("/");
141 |
142 | Response::builder()
143 | .status(StatusCode::FOUND)
144 | .header(LOCATION, "/")
145 | .header(CACHE_CONTROL, "no-cache, no-store, must-revalidate")
146 | .header(PRAGMA, "no-cache")
147 | .header(EXPIRES, "0")
148 | .header(SET_COOKIE, session_token_cookie.to_string())
149 | .body(Body::empty())
150 | }
151 |
152 | #[handler]
153 | pub fn logout() -> impl IntoResponse {
154 | let mut session_token_cookie = Cookie::named(COOKIE_SESSION_TOKEN_NAME);
155 |
156 | session_token_cookie.set_value_str("");
157 | session_token_cookie.set_http_only(true);
158 | session_token_cookie.set_secure(true);
159 | session_token_cookie.set_same_site(SameSite::Strict);
160 | session_token_cookie.set_expires(Utc::now() - Duration::days(1));
161 | session_token_cookie.set_path("/");
162 |
163 | Redirect::moved_permanent("/")
164 | .with_header("Set-Cookie", session_token_cookie.to_string())
165 | .with_header(CACHE_CONTROL, "no-cache, no-store, must-revalidate")
166 | .with_header(PRAGMA, "no-cache")
167 | .with_header(EXPIRES, "0")
168 | .into_response()
169 | }
170 |
171 | #[derive(Debug, Deserialize)]
172 | pub struct EmailLoginParams {
173 | pub email: String,
174 | pub password: String,
175 | }
176 |
177 | #[handler]
178 | pub async fn email_basic_login_handler(
179 | plexo_engine: Data<&Engine>,
180 | params: Json,
181 | ) -> impl IntoResponse {
182 | let Some(member) = plexo_engine.get_member_by_email(params.email.clone()).await else {
183 | return Response::builder()
184 | .status(StatusCode::UNAUTHORIZED)
185 | .header("Content-Type", "application/json")
186 | .body(
187 | Body::from_json(json!({
188 | "error": "Member not found"
189 | }))
190 | .unwrap(),
191 | );
192 | };
193 |
194 | let Some(password_hash) = member.password_hash.clone() else {
195 | return Response::builder()
196 | .status(StatusCode::UNAUTHORIZED)
197 | .header("Content-Type", "application/json")
198 | .body(
199 | Body::from_json(json!({
200 | "error": "Invalid password"
201 | }))
202 | .unwrap(),
203 | );
204 | };
205 |
206 | if !plexo_engine
207 | .auth
208 | .validate_password(params.password.as_str(), password_hash.as_str())
209 | {
210 | return Response::builder()
211 | .status(StatusCode::UNAUTHORIZED)
212 | .header("Content-Type", "application/json")
213 | .body(
214 | Body::from_json(json!({
215 | "error": "Invalid password"
216 | }))
217 | .unwrap(),
218 | );
219 | };
220 |
221 | let Ok(session_token) = plexo_engine.auth.jwt_engine.create_session_token(&member) else {
222 | return Response::builder()
223 | .status(StatusCode::INTERNAL_SERVER_ERROR)
224 | .header("Content-Type", "application/json")
225 | .body(Body::from_json(&Error::new("Internal Server Error")).unwrap());
226 | };
227 |
228 | let mut session_token_cookie = Cookie::named(COOKIE_SESSION_TOKEN_NAME);
229 |
230 | session_token_cookie.set_value_str(session_token.clone());
231 | session_token_cookie.set_http_only(true);
232 | session_token_cookie.set_secure(true);
233 | session_token_cookie.set_same_site(SameSite::Lax);
234 | session_token_cookie.set_expires(Utc::now() + Duration::days(7));
235 | session_token_cookie.set_path("/");
236 |
237 | Response::builder()
238 | .status(StatusCode::OK)
239 | .header(SET_COOKIE, session_token_cookie.to_string())
240 | .header("Content-Type", "application/json")
241 | .body(Body::from_json(json!({ "access_token": session_token })).unwrap())
242 | }
243 |
244 | #[derive(Debug, Deserialize)]
245 | pub struct EmailRegisterParams {
246 | pub email: String,
247 | pub name: String,
248 | pub password: String,
249 | }
250 |
251 | fn _get_token(headers: &HeaderMap) -> Result {
252 | if let Some(token) = get_token_from_headers(headers) {
253 | return Ok(token);
254 | }
255 |
256 | if let Some(token) = get_token_from_cookie(headers) {
257 | return Ok(token);
258 | }
259 |
260 | Err(PlexoAppError::PoemError(poem::error::NotFoundError))
261 | }
262 |
263 | #[handler]
264 | pub async fn email_basic_register_handler(
265 | // headers: &HeaderMap,
266 | plexo_engine: Data<&Engine>,
267 | params: Json,
268 | ) -> Result {
269 | // let token = get_token(headers)?;
270 |
271 | // let (plexo_engine, member_id) = extract_context(ctx)?;
272 |
273 | if (plexo_engine.get_member_by_email(params.email.clone()).await).is_some() {
274 | return Ok(Response::builder()
275 | .status(StatusCode::UNAUTHORIZED)
276 | .header("Content-Type", "application/json")
277 | .body(
278 | Body::from_json(json!({
279 | "error": "Member already exists"
280 | }))
281 | .unwrap(),
282 | ));
283 | };
284 |
285 | let password_hash = plexo_engine.auth.hash_password(params.password.as_str());
286 |
287 | let Some(member) = plexo_engine
288 | .create_member_from_email(params.email.clone(), params.name.clone(), password_hash)
289 | .await
290 | else {
291 | return Ok(Response::builder()
292 | .status(StatusCode::INTERNAL_SERVER_ERROR)
293 | .header("Content-Type", "application/json")
294 | .body(Body::from_json(&Error::new("Internal Server Error")).unwrap()));
295 | };
296 |
297 | let Ok(session_token) = plexo_engine.auth.jwt_engine.create_session_token(&member) else {
298 | return Ok(Response::builder()
299 | .status(StatusCode::INTERNAL_SERVER_ERROR)
300 | .header("Content-Type", "application/json")
301 | .body(Body::from_json(&Error::new("Internal Server Error")).unwrap()));
302 | };
303 |
304 | let mut session_token_cookie = Cookie::named(COOKIE_SESSION_TOKEN_NAME);
305 |
306 | session_token_cookie.set_value_str(session_token.clone());
307 | session_token_cookie.set_http_only(true);
308 | session_token_cookie.set_secure(true);
309 | session_token_cookie.set_same_site(SameSite::Lax);
310 | session_token_cookie.set_expires(Utc::now() + Duration::days(7));
311 | session_token_cookie.set_path("/");
312 |
313 | Ok(Response::builder()
314 | .status(StatusCode::OK)
315 | .header(SET_COOKIE, session_token_cookie.to_string())
316 | .header("Content-Type", "application/json")
317 | .body(Body::from_json(json!({ "access_token": session_token })).unwrap()))
318 | }
319 |
320 | #[handler]
321 | pub async fn logout_handler() -> Result {
322 | // plexo_engine: Data<&Engine>
323 | let mut session_token_cookie = Cookie::named(COOKIE_SESSION_TOKEN_NAME);
324 |
325 | session_token_cookie.set_value_str("");
326 | session_token_cookie.set_http_only(true);
327 | session_token_cookie.set_secure(true);
328 | session_token_cookie.set_same_site(SameSite::Strict);
329 | session_token_cookie.set_expires(Utc::now() - Duration::days(1));
330 | session_token_cookie.set_path("/");
331 |
332 | Ok(Response::builder()
333 | .status(StatusCode::OK)
334 | .header(SET_COOKIE, session_token_cookie.to_string())
335 | .header("Content-Type", "application/json")
336 | .body(Body::from_json(json!({ "access_token": "" })).unwrap()))
337 | }
338 |
--------------------------------------------------------------------------------
/src/auth/engine.rs:
--------------------------------------------------------------------------------
1 | use std::error::Error;
2 |
3 | use argon2::{
4 | password_hash::{rand_core::OsRng, SaltString},
5 | Argon2, PasswordHash, PasswordHasher, PasswordVerifier,
6 | };
7 | use oauth2::{
8 | basic::BasicClient, reqwest::async_http_client, AuthUrl, AuthorizationCode, ClientId,
9 | ClientSecret, CsrfToken, RedirectUrl, Scope, TokenResponse, TokenUrl,
10 | };
11 |
12 | use reqwest::Url;
13 |
14 | use super::{
15 | core::PlexoAuthToken,
16 | jwt::{JWTEngine, PlexoAuthTokenClaims},
17 | };
18 |
19 | #[derive(Clone)]
20 | pub struct AuthEngine {
21 | pub jwt_engine: JWTEngine,
22 |
23 | github_client: Option,
24 | _google_client: Option,
25 | }
26 |
27 | impl AuthEngine {
28 | pub fn new(
29 | jwt_access_token_secret: String,
30 | jwt_refresh_token_secret: String,
31 | //
32 | github_client_id: Option,
33 | github_client_secret: Option,
34 | github_redirect_url: Option,
35 | ) -> Self {
36 | let mut github_client: Option = None;
37 |
38 | if let (Some(github_client_id), Some(github_client_secret), Some(github_redirect_url)) =
39 | (github_client_id, github_client_secret, github_redirect_url)
40 | {
41 | let github_client_id = ClientId::new(github_client_id.to_string());
42 | let github_client_secret = ClientSecret::new(github_client_secret.to_string());
43 |
44 | let auth_url = AuthUrl::new("https://github.com/login/oauth/authorize".to_string())
45 | .expect("Invalid authorization endpoint URL");
46 | let token_url =
47 | TokenUrl::new("https://github.com/login/oauth/access_token".to_string())
48 | .expect("Invalid token endpoint URL");
49 |
50 | github_client = Some(
51 | BasicClient::new(
52 | github_client_id,
53 | Some(github_client_secret),
54 | auth_url,
55 | Some(token_url),
56 | )
57 | .set_redirect_uri(
58 | RedirectUrl::new(github_redirect_url.to_string())
59 | .expect("Invalid redirect URL"),
60 | ),
61 | );
62 | }
63 |
64 | // match (github_client_id, github_client_secret, github_redirect_url) {
65 | // (Some(github_client_id), Some(github_client_secret), Some(github_redirect_url)) => {
66 | // let github_client_id = ClientId::new(github_client_id.to_string());
67 | // let github_client_secret = ClientSecret::new(github_client_secret.to_string());
68 |
69 | // let auth_url = AuthUrl::new("https://github.com/login/oauth/authorize".to_string())
70 | // .expect("Invalid authorization endpoint URL");
71 | // let token_url =
72 | // TokenUrl::new("https://github.com/login/oauth/access_token".to_string())
73 | // .expect("Invalid token endpoint URL");
74 |
75 | // github_client = Some(
76 | // BasicClient::new(
77 | // github_client_id,
78 | // Some(github_client_secret),
79 | // auth_url,
80 | // Some(token_url),
81 | // )
82 | // .set_redirect_uri(
83 | // RedirectUrl::new(github_redirect_url.to_string())
84 | // .expect("Invalid redirect URL"),
85 | // ),
86 | // );
87 | // }
88 | // _ => {}
89 | // }
90 |
91 | let jwt_engine = JWTEngine::new(
92 | jwt_access_token_secret.to_string(),
93 | jwt_refresh_token_secret.to_string(),
94 | );
95 |
96 | Self {
97 | jwt_engine,
98 | github_client,
99 | _google_client: None,
100 | }
101 | }
102 |
103 | pub fn new_github_authorize_url(&self) -> Option<(Url, CsrfToken)> {
104 | self.github_client.as_ref().map(|client| {
105 | client
106 | .authorize_url(CsrfToken::new_random)
107 | .add_scope(Scope::new("user:email".to_string()))
108 | .url()
109 | })
110 | }
111 |
112 | pub async fn exchange_github_code(
113 | &self,
114 | code: AuthorizationCode,
115 | _state: CsrfToken,
116 | ) -> Result {
117 | let token_result = self
118 | .github_client
119 | .as_ref()
120 | .unwrap()
121 | .exchange_code(code)
122 | .request_async(async_http_client)
123 | .await;
124 |
125 | match token_result {
126 | Ok(token) => Ok(token.access_token().secret().to_string()),
127 | Err(e) => Err(e.to_string()),
128 | }
129 | }
130 |
131 | pub fn extract_claims(
132 | &self,
133 | plexo_auth_token: &PlexoAuthToken,
134 | ) -> Result> {
135 | Ok(self
136 | .jwt_engine
137 | .decode_session_token(plexo_auth_token.0.as_str())?)
138 | }
139 |
140 | pub fn validate_password(&self, password: &str, password_hash: &str) -> bool {
141 | let Ok(parsed_hash) = PasswordHash::new(password_hash) else {
142 | return false;
143 | };
144 |
145 | Argon2::default()
146 | .verify_password(password.as_bytes(), &parsed_hash)
147 | .is_ok()
148 | }
149 |
150 | pub fn hash_password(&self, password: &str) -> String {
151 | let salt = SaltString::generate(&mut OsRng);
152 |
153 | Argon2::default()
154 | .hash_password(password.as_bytes(), &salt)
155 | .unwrap()
156 | .to_string()
157 | }
158 |
159 | pub fn has_github_client(&self) -> bool {
160 | self.github_client.is_some()
161 | }
162 |
163 | pub fn has_google_client(&self) -> bool {
164 | self._google_client.is_some()
165 | }
166 | }
167 |
--------------------------------------------------------------------------------
/src/auth/jwt.rs:
--------------------------------------------------------------------------------
1 | use chrono::Utc;
2 | use jsonwebtoken::{decode, encode, errors::Error, DecodingKey, EncodingKey, Header};
3 | use serde::{Deserialize, Serialize};
4 | use uuid::Uuid;
5 |
6 | use crate::sdk::member::Member;
7 |
8 | #[derive(Clone)]
9 | pub struct JWTEngine {
10 | access_token_secret: String,
11 | // refresh_token_secret: String,
12 | }
13 |
14 | #[derive(Debug, Serialize, Deserialize)]
15 | pub struct PlexoAuthTokenClaims {
16 | iss: String,
17 | aud: String,
18 | sub: String,
19 | exp: usize,
20 | }
21 |
22 | impl PlexoAuthTokenClaims {
23 | pub fn member_id(&self) -> Uuid {
24 | Uuid::parse_str(&self.sub).unwrap()
25 | }
26 | }
27 |
28 | impl JWTEngine {
29 | pub fn new(access_token_secret: String, _refresh_token_secret: String) -> Self {
30 | Self {
31 | access_token_secret,
32 | // refresh_token_secret,
33 | }
34 | }
35 |
36 | pub fn create_session_token(&self, member: &Member) -> Result {
37 | let claims = PlexoAuthTokenClaims {
38 | iss: "Plexo".to_string(),
39 | aud: "session.plexo.app".to_string(),
40 | sub: member.id.to_string(),
41 | exp: (Utc::now() + chrono::Duration::days(7)).timestamp() as usize,
42 | };
43 |
44 | let token = encode(
45 | &Header::default(),
46 | &claims,
47 | &EncodingKey::from_secret(self.access_token_secret.as_ref()),
48 | )?;
49 |
50 | Ok(token)
51 | }
52 |
53 | pub fn decode_session_token(&self, token: &str) -> Result {
54 | let token_data = decode::(
55 | token,
56 | &DecodingKey::from_secret(self.access_token_secret.as_ref()),
57 | &jsonwebtoken::Validation::default(),
58 | )?;
59 |
60 | Ok(token_data.claims)
61 | }
62 |
63 | // pub fn decode_access_token(&self, token: &str) -> Result {
64 | // let token_data = decode::(
65 | // token,
66 | // &DecodingKey::from_secret(self.access_token_secret.as_ref()),
67 | // &jsonwebtoken::Validation::default(),
68 | // )?;
69 |
70 | // Ok(token_data.claims)
71 | // }
72 |
73 | // pub fn decode_refresh_token(&self, token: &str) -> Result {
74 | // let token_data = decode::(
75 | // token,
76 | // &DecodingKey::from_secret(self.refresh_token_secret.as_ref()),
77 | // &jsonwebtoken::Validation::default(),
78 | // )?;
79 |
80 | // Ok(token_data.claims)
81 | // }
82 |
83 | // pub fn refresh_access_token(
84 | // &self,
85 | // access_token: &str,
86 | // refresh_token: &str,
87 | // ) -> Result {
88 | // let mut claims_access_token = self.decode_access_token(access_token)?;
89 | // let _claims_refresh_token = self.decode_refresh_token(refresh_token)?;
90 |
91 | // claims_access_token.exp += 1000; // TODO
92 |
93 | // let token = encode(
94 | // &Header::default(),
95 | // &claims_access_token,
96 | // &EncodingKey::from_secret(self.access_token_secret.as_ref()),
97 | // )?;
98 |
99 | // Ok(token)
100 | // }
101 | }
102 |
--------------------------------------------------------------------------------
/src/auth/mod.rs:
--------------------------------------------------------------------------------
1 | pub mod core;
2 | pub mod engine;
3 | pub mod jwt;
4 |
--------------------------------------------------------------------------------
/src/commons/authorization.rs:
--------------------------------------------------------------------------------
1 | use cookie::Cookie;
2 | use poem::http::HeaderMap;
3 |
4 | use crate::auth::core::{PlexoAuthToken, COOKIE_SESSION_TOKEN_NAME};
5 |
6 | pub fn get_token_from_headers(headers: &HeaderMap) -> Option {
7 | headers
8 | .get("Authorization")
9 | .and_then(|value| value.to_str().map(|s| PlexoAuthToken(s.to_string())).ok())
10 | }
11 |
12 | pub fn get_token_from_cookie(headers: &HeaderMap) -> Option {
13 | let raw_cookie = headers.get("Cookie").and_then(|c| c.to_str().ok())?;
14 |
15 | get_token_from_raw_cookie(raw_cookie)
16 | }
17 |
18 | pub fn get_token_from_raw_cookie(raw_cookie: &str) -> Option {
19 | for cookie in Cookie::split_parse(raw_cookie) {
20 | let Ok(cookie) = cookie else {
21 | println!("Error parsing cookie");
22 | continue;
23 | };
24 |
25 | if cookie.name() == COOKIE_SESSION_TOKEN_NAME {
26 | return Some(PlexoAuthToken(cookie.value().to_string()));
27 | }
28 | }
29 |
30 | None
31 | }
32 |
--------------------------------------------------------------------------------
/src/commons/mod.rs:
--------------------------------------------------------------------------------
1 | pub mod authorization;
2 |
--------------------------------------------------------------------------------
/src/config.rs:
--------------------------------------------------------------------------------
1 | use std::env::var;
2 |
3 | use lazy_static::lazy_static;
4 |
5 | lazy_static! {
6 | pub static ref HOST: String = var("HOST").unwrap_or("0.0.0.0".into());
7 | pub static ref PORT: String = var("PORT").unwrap_or("8080".into());
8 | pub static ref URL: String = var("URL").unwrap_or(format!("{}:{}", *HOST, *PORT));
9 | pub static ref SCHEMA: String = var("SCHEMA").unwrap_or("http".into());
10 | pub static ref DOMAIN: String = var("DOMAIN").unwrap_or(format!("{}://{}", *SCHEMA, *URL));
11 | //
12 | pub static ref DATABASE_URL: String = var("DATABASE_URL").expect("DATABASE_URL environment variable not set");
13 | pub static ref GITHUB_CLIENT_ID: Option = var("GITHUB_CLIENT_ID").ok();
14 | pub static ref GITHUB_CLIENT_SECRET: Option = var("GITHUB_CLIENT_SECRET").ok();
15 | pub static ref GITHUB_REDIRECT_URL: String = var("GITHUB_REDIRECT_URL").unwrap_or(format!("{}/auth/github/callback", *DOMAIN));
16 |
17 | pub static ref LLM_MODEL_NAME: String = var("LLM_MODEL_NAME").unwrap_or("gpt-3.5-turbo".into());
18 |
19 | pub static ref ADMIN_EMAIL: String = var("ADMIN_EMAIL").unwrap_or("admin@plexo.app".into());
20 | pub static ref ADMIN_PASSWORD: String = var("ADMIN_PASSWORD").unwrap_or("admin".into());
21 | pub static ref ADMIN_NAME: String = var("ADMIN_NAME").unwrap_or("Admin".into());
22 |
23 | pub static ref ORGANIZATION_NAME: String = var("ORGANIZATION_NAME").unwrap_or("Plexo".into());
24 |
25 | pub static ref JWT_ACCESS_TOKEN_SECRET: String = var("JWT_ACCESS_TOKEN_SECRET").unwrap_or("secret".into());
26 | pub static ref JWT_REFRESH_TOKEN_SECRET: String = var("JWT_REFRESH_TOKEN_SECRET").unwrap_or("secret".into());
27 |
28 | pub static ref STATIC_PAGE_ENABLED: bool = var("STATIC_PAGE_ENABLED").unwrap_or("false".into()).to_lowercase() == "true";
29 | }
30 |
--------------------------------------------------------------------------------
/src/errors/definitions.rs:
--------------------------------------------------------------------------------
1 | use thiserror::Error;
2 |
3 | #[derive(Error, Debug)]
4 | pub enum PlexoAppError {
5 | #[error("Authorization token not provided")]
6 | MissingAuthorizationToken,
7 | #[error("Invalid authorization token")]
8 | InvalidAuthorizationToken,
9 | #[error("Email already in use")]
10 | EmailAlreadyInUse,
11 | #[error("Password isn't valid")]
12 | InvalidPassword,
13 | #[error("Email not found")]
14 | EmailNotFound,
15 | #[error("Email already exists")]
16 | EmailAlreadyExists,
17 | #[error("Poem error")]
18 | PoemError(#[from] poem::error::NotFoundError),
19 | }
20 |
--------------------------------------------------------------------------------
/src/errors/mod.rs:
--------------------------------------------------------------------------------
1 | pub mod definitions;
2 |
--------------------------------------------------------------------------------
/src/foundation/mod.rs:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/src/graphql/auth.rs:
--------------------------------------------------------------------------------
1 | use async_graphql::{Context, Result};
2 | use uuid::Uuid;
3 |
4 | use crate::{auth::core::PlexoAuthToken, errors::definitions::PlexoAppError, system::core::Engine};
5 |
6 | pub fn extract_context(ctx: &Context<'_>) -> Result<(Engine, Uuid)> {
7 | let Ok(auth_token) = &ctx.data::() else {
8 | return Err(PlexoAppError:: MissingAuthorizationToken.into());
9 | };
10 |
11 | let plexo_engine = ctx.data::()?.to_owned();
12 |
13 | let Ok(claims) = plexo_engine.auth.extract_claims(auth_token) else {
14 | return Err(PlexoAppError:: InvalidAuthorizationToken.into());
15 | };
16 |
17 | let member_id = claims.member_id();
18 |
19 | Ok((plexo_engine, member_id))
20 | }
21 |
--------------------------------------------------------------------------------
/src/graphql/mod.rs:
--------------------------------------------------------------------------------
1 | pub mod auth;
2 | pub mod mutations;
3 | pub mod queries;
4 | pub mod subscription;
5 |
--------------------------------------------------------------------------------
/src/graphql/mutations/auth.rs:
--------------------------------------------------------------------------------
1 | use async_graphql::{Context, Object, Result, SimpleObject};
2 |
3 | use crate::{
4 | errors::definitions::PlexoAppError, graphql::auth::extract_context, system::core::Engine,
5 | };
6 |
7 | #[derive(Default)]
8 | pub struct AuthMutation;
9 |
10 | #[derive(SimpleObject)]
11 | struct LoginResponse {
12 | token: String,
13 | member_id: String,
14 | }
15 |
16 | #[Object]
17 | impl AuthMutation {
18 | async fn login(
19 | &self,
20 | ctx: &Context<'_>,
21 | email: String,
22 | password: String,
23 | ) -> Result {
24 | let plexo_engine = ctx.data::()?.to_owned();
25 |
26 | let Some(member) = plexo_engine.get_member_by_email(email.clone()).await else {
27 | return Err(PlexoAppError::EmailNotFound.into());
28 | };
29 |
30 | let Some(password_hash) = member.password_hash.clone() else {
31 | return Err(PlexoAppError::InvalidPassword.into());
32 | };
33 |
34 | if !plexo_engine
35 | .auth
36 | .validate_password(password.as_str(), password_hash.as_str())
37 | {
38 | return Err(PlexoAppError::InvalidPassword.into());
39 | };
40 |
41 | let Ok(session_token) = plexo_engine.auth.jwt_engine.create_session_token(&member) else {
42 | return Err(PlexoAppError::InvalidPassword.into());
43 | };
44 |
45 | Ok(LoginResponse {
46 | token: session_token,
47 | member_id: member.id.to_string(),
48 | })
49 | }
50 |
51 | async fn register(
52 | &self,
53 | ctx: &Context<'_>,
54 | email: String,
55 | name: String,
56 | password: String,
57 | ) -> Result {
58 | let (plexo_engine, _member_id) = extract_context(ctx)?;
59 |
60 | if (plexo_engine.get_member_by_email(email.clone()).await).is_some() {
61 | return Err(PlexoAppError::EmailAlreadyExists.into());
62 | };
63 |
64 | let password_hash = plexo_engine.auth.hash_password(password.as_str());
65 |
66 | let Some(member) = plexo_engine
67 | .create_member_from_email(email.clone(), name.clone(), password_hash)
68 | .await
69 | else {
70 | return Err(PlexoAppError::EmailAlreadyExists.into());
71 | };
72 |
73 | let Ok(session_token) = plexo_engine.auth.jwt_engine.create_session_token(&member) else {
74 | return Err(PlexoAppError::InvalidPassword.into());
75 | };
76 |
77 | Ok(LoginResponse {
78 | token: session_token,
79 | member_id: member.id.to_string(),
80 | })
81 | }
82 | }
83 |
--------------------------------------------------------------------------------
/src/graphql/mutations/mod.rs:
--------------------------------------------------------------------------------
1 | pub mod auth;
2 | pub mod resources;
3 |
4 | use async_graphql::MergedObject;
5 |
6 | use self::{auth::AuthMutation, resources::ResourcesMutation};
7 |
8 | // use super::{auth_mutation:i:AuthMutation, resources_mutation::ResourcesMutation};
9 |
10 | #[derive(MergedObject, Default)]
11 | pub struct MutationRoot(ResourcesMutation, AuthMutation);
12 |
--------------------------------------------------------------------------------
/src/graphql/queries/ai_functions.rs:
--------------------------------------------------------------------------------
1 | use async_graphql::{Context, Object, Result};
2 |
3 | use crate::{
4 | graphql::auth::extract_context,
5 | llm::suggestions::{TaskSuggestionInput, TaskSuggestionResult},
6 | };
7 |
8 | #[derive(Default)]
9 | pub struct AIFunctionsQuery;
10 |
11 | #[Object]
12 | impl AIFunctionsQuery {
13 | async fn suggest_new_task(
14 | &self,
15 | ctx: &Context<'_>,
16 | task: TaskSuggestionInput,
17 | ) -> Result {
18 | let (plexo_engine, _member_id) = extract_context(ctx)?;
19 |
20 | let Ok(raw_suggestion) = plexo_engine
21 | .auto_suggestions_engine
22 | .get_suggestions(task, None)
23 | .await
24 | else {
25 | return Err("Failed to get suggestions".into());
26 | };
27 |
28 | Ok(raw_suggestion)
29 | }
30 |
31 | async fn subdivide_task(
32 | &self,
33 | ctx: &Context<'_>,
34 | task_id: String,
35 | #[graphql(default = 5)] subtasks: u32,
36 | ) -> Result> {
37 | let (plexo_engine, _member_id) = extract_context(ctx)?;
38 |
39 | let task_id = task_id.parse::()?;
40 |
41 | let suggestions = plexo_engine
42 | .auto_suggestions_engine
43 | .subdivide_task(task_id, subtasks)
44 | .await
45 | .unwrap();
46 |
47 | // let raw_suggestion = plexo_engine
48 | // .auto_suggestions_engine
49 | // .get_suggestions(TaskSuggestion {
50 | // title: task.title,
51 | // description: task.description,
52 | // status: task.status,
53 | // priority: task.priority,
54 | // due_date: task.due_date,
55 | // })
56 | // .await;
57 | // let new_task = plexo_engine
58 | // .task_engine
59 | // .create_task(
60 | // task.title,
61 | // task.description,
62 | // task.status,
63 | // task.priority,
64 | // task.due_date,
65 | // )
66 | // .await?;
67 |
68 | // plexo_engine
69 | // .task_engine
70 | // .add_subtask(task_id, new_task.id)
71 | // .await?;
72 |
73 | // Ok(task_id.to_string())
74 | Ok(suggestions)
75 | }
76 | }
77 |
--------------------------------------------------------------------------------
/src/graphql/queries/mod.rs:
--------------------------------------------------------------------------------
1 | use async_graphql::MergedObject;
2 |
3 | use self::{ai_functions::AIFunctionsQuery, resources::ResourcesQuery};
4 |
5 | pub mod ai_functions;
6 | pub mod resources;
7 |
8 | // use self::{auth::AuthMutation, resources::ResourcesMutation};
9 |
10 | // use super::{auth_mutation:i:AuthMutation, resources_mutation::ResourcesMutation};
11 | #[derive(MergedObject, Default)]
12 | pub struct QueryRoot(ResourcesQuery, AIFunctionsQuery);
13 |
--------------------------------------------------------------------------------
/src/graphql/queries/resources.rs:
--------------------------------------------------------------------------------
1 | use std::str::FromStr;
2 |
3 | use async_graphql::{Context, InputObject, Object, Result};
4 | use chrono::{DateTime, Utc};
5 | use uuid::Uuid;
6 |
7 | use crate::{
8 | graphql::auth::extract_context,
9 | sdk::{
10 | activity::{Activity, ActivityOperationType, ActivityResourceType},
11 | labels::Label,
12 | member::{Member, MemberRole},
13 | project::Project,
14 | task::{Task, TaskPriority, TaskStatus},
15 | team::{Team, TeamVisibility},
16 | utilities::DateTimeBridge,
17 | },
18 | };
19 |
20 | // use super::auth::extract_context;
21 |
22 | #[derive(Default)]
23 | pub struct ResourcesQuery;
24 |
25 | #[derive(InputObject)]
26 | pub struct TaskFilter {
27 | pub project_id: Option,
28 | pub lead_id: Option,
29 | pub status: Option,
30 | pub priority: Option,
31 | pub due_date_from: Option>,
32 | pub due_date_to: Option>,
33 | }
34 |
35 | #[derive(InputObject)]
36 | pub struct MemberFilter {
37 | pub name: Option,
38 | pub email: Option,
39 | pub github_id: Option,
40 | pub role: Option,
41 | }
42 |
43 | #[derive(InputObject)]
44 | pub struct TeamFilter {
45 | pub visibility: Option,
46 | pub name: Option,
47 | }
48 |
49 | #[derive(InputObject)]
50 | pub struct ProjectFilter {
51 | pub title: Option,
52 | pub description: Option,
53 | }
54 |
55 | #[Object]
56 | impl ResourcesQuery {
57 | async fn tasks(&self, ctx: &Context<'_>, _filter: Option) -> Result> {
58 | let (plexo_engine, _member_id) = extract_context(ctx)?;
59 |
60 | let tasks = sqlx::query!(
61 | r#"
62 | SELECT * FROM tasks
63 | "#
64 | )
65 | .fetch_all(&*plexo_engine.pool)
66 | .await
67 | .unwrap();
68 |
69 | Ok(tasks
70 | .iter()
71 | .map(|r| Task {
72 | id: r.id,
73 | created_at: DateTimeBridge::from_offset_date_time(r.created_at),
74 | updated_at: DateTimeBridge::from_offset_date_time(r.updated_at),
75 | title: r.title.clone(),
76 | description: r.description.clone(),
77 | status: TaskStatus::from_optional_str(&r.status),
78 | priority: TaskPriority::from_optional_str(&r.priority),
79 | due_date: r.due_date.map(DateTimeBridge::from_offset_date_time),
80 | project_id: r.project_id,
81 | lead_id: r.lead_id,
82 | owner_id: r.owner_id,
83 | count: r.count,
84 | parent_id: r.parent_id,
85 | })
86 | .collect())
87 | }
88 |
89 | async fn task_by_id(&self, ctx: &Context<'_>, id: Uuid) -> Result {
90 | let (plexo_engine, _member_id) = extract_context(ctx)?;
91 |
92 | let task = sqlx::query!(
93 | r#"
94 | SELECT * FROM tasks
95 | WHERE id = $1
96 | "#,
97 | id
98 | )
99 | .fetch_one(&*plexo_engine.pool)
100 | .await
101 | .unwrap();
102 |
103 | Ok(Task {
104 | id: task.id,
105 | created_at: DateTimeBridge::from_offset_date_time(task.created_at),
106 | updated_at: DateTimeBridge::from_offset_date_time(task.updated_at),
107 | title: task.title.clone(),
108 | description: task.description.clone(),
109 | status: TaskStatus::from_optional_str(&task.status),
110 | priority: TaskPriority::from_optional_str(&task.priority),
111 | due_date: task.due_date.map(DateTimeBridge::from_offset_date_time),
112 | project_id: task.project_id,
113 | lead_id: task.lead_id,
114 | owner_id: task.owner_id,
115 | count: task.count,
116 | parent_id: task.parent_id,
117 | })
118 | }
119 |
120 | async fn members(
121 | &self,
122 | ctx: &Context<'_>,
123 | _filter: Option,
124 | ) -> Result> {
125 | let (plexo_engine, _member_id) = extract_context(ctx)?;
126 |
127 | let members = sqlx::query!(
128 | r#"
129 | SELECT * FROM members
130 | "#
131 | )
132 | .fetch_all(&*plexo_engine.pool)
133 | .await
134 | .unwrap();
135 |
136 | Ok(members
137 | .iter()
138 | .map(|r| Member {
139 | id: r.id,
140 | created_at: DateTimeBridge::from_offset_date_time(r.created_at),
141 | updated_at: DateTimeBridge::from_offset_date_time(r.updated_at),
142 | name: r.name.clone(),
143 | email: r.email.clone(),
144 | github_id: r.github_id.clone(),
145 | google_id: r.google_id.clone(),
146 | photo_url: r.photo_url.clone(),
147 | role: MemberRole::from_optional_str(&r.role),
148 | password_hash: None,
149 | })
150 | .collect())
151 | }
152 |
153 | async fn member_by_id(&self, ctx: &Context<'_>, id: Uuid) -> Result {
154 | let (plexo_engine, _member_id) = extract_context(ctx)?;
155 |
156 | let member = sqlx::query!(
157 | r#"
158 | SELECT * FROM members
159 | WHERE id = $1
160 | "#,
161 | id
162 | )
163 | .fetch_one(&*plexo_engine.pool)
164 | .await
165 | .unwrap();
166 |
167 | Ok(Member {
168 | id: member.id,
169 | created_at: DateTimeBridge::from_offset_date_time(member.created_at),
170 | updated_at: DateTimeBridge::from_offset_date_time(member.updated_at),
171 | name: member.name.clone(),
172 | email: member.email.clone(),
173 | github_id: member.github_id.clone(),
174 | google_id: member.google_id.clone(),
175 | photo_url: member.photo_url.clone(),
176 | role: MemberRole::from_optional_str(&member.role),
177 | password_hash: None,
178 | })
179 | }
180 |
181 | async fn member_by_email(&self, ctx: &Context<'_>, email: String) -> Result {
182 | let (plexo_engine, _member_id) = extract_context(ctx)?;
183 |
184 | let member = sqlx::query!(
185 | r#"
186 | SELECT * FROM members
187 | WHERE email = $1
188 | "#,
189 | email
190 | )
191 | .fetch_one(&*plexo_engine.pool)
192 | .await
193 | .unwrap();
194 |
195 | Ok(Member {
196 | id: member.id,
197 | created_at: DateTimeBridge::from_offset_date_time(member.created_at),
198 | updated_at: DateTimeBridge::from_offset_date_time(member.updated_at),
199 | name: member.name.clone(),
200 | email: member.email.clone(),
201 | github_id: member.github_id.clone(),
202 | google_id: member.google_id.clone(),
203 | photo_url: member.photo_url.clone(),
204 | role: MemberRole::from_optional_str(&member.role),
205 | password_hash: None,
206 | })
207 | }
208 |
209 | async fn projects(
210 | &self,
211 | ctx: &Context<'_>,
212 | _filter: Option,
213 | ) -> Result> {
214 | let (plexo_engine, _member_id) = extract_context(ctx)?;
215 |
216 | let projects = sqlx::query!(
217 | r#"
218 | SELECT * FROM projects
219 | "#
220 | )
221 | .fetch_all(&*plexo_engine.pool)
222 | .await
223 | .unwrap();
224 |
225 | Ok(projects
226 | .iter()
227 | .map(|r| Project {
228 | id: r.id,
229 | created_at: DateTimeBridge::from_offset_date_time(r.created_at),
230 | updated_at: DateTimeBridge::from_offset_date_time(r.updated_at),
231 | name: r.name.clone(),
232 | prefix: r.prefix.clone(),
233 | owner_id: r.owner_id,
234 | description: r.description.clone(),
235 | lead_id: r.lead_id,
236 | start_date: r.start_date.map(DateTimeBridge::from_offset_date_time),
237 | due_date: r.due_date.map(DateTimeBridge::from_offset_date_time),
238 | })
239 | .collect())
240 | }
241 |
242 | async fn project_by_id(&self, ctx: &Context<'_>, id: Uuid) -> Result {
243 | let (plexo_engine, _member_id) = extract_context(ctx)?;
244 |
245 | let project = sqlx::query!(
246 | r#"
247 | SELECT * FROM projects
248 | WHERE id = $1
249 | "#,
250 | id
251 | )
252 | .fetch_one(&*plexo_engine.pool)
253 | .await
254 | .unwrap();
255 |
256 | Ok(Project {
257 | id: project.id,
258 | created_at: DateTimeBridge::from_offset_date_time(project.created_at),
259 | updated_at: DateTimeBridge::from_offset_date_time(project.updated_at),
260 | name: project.name.clone(),
261 | description: project.description.clone(),
262 | prefix: project.prefix.clone(),
263 | owner_id: project.owner_id,
264 | lead_id: project.lead_id,
265 | start_date: project
266 | .start_date
267 | .map(DateTimeBridge::from_offset_date_time),
268 | due_date: project.due_date.map(DateTimeBridge::from_offset_date_time),
269 | })
270 | }
271 |
272 | async fn teams(&self, ctx: &Context<'_>, _filter: Option) -> Result> {
273 | let (plexo_engine, _member_id) = extract_context(ctx)?;
274 |
275 | let teams = sqlx::query!(
276 | r#"
277 | SELECT *
278 | FROM teams
279 | "#
280 | )
281 | .fetch_all(&*plexo_engine.pool)
282 | .await
283 | .unwrap();
284 |
285 | Ok(teams
286 | .iter()
287 | .map(|r| Team {
288 | id: r.id,
289 | created_at: DateTimeBridge::from_offset_date_time(r.created_at),
290 | updated_at: DateTimeBridge::from_offset_date_time(r.updated_at),
291 | name: r.name.clone(),
292 | owner_id: r.owner_id,
293 | visibility: TeamVisibility::from_optional_str(&r.visibility),
294 | prefix: r.prefix.clone(),
295 | })
296 | .collect())
297 | }
298 |
299 | async fn team_by_id(&self, ctx: &Context<'_>, id: Uuid) -> Result {
300 | let (plexo_engine, _member_id) = extract_context(ctx)?;
301 |
302 | let team = sqlx::query!(
303 | r#"
304 | SELECT * FROM teams
305 | WHERE id = $1
306 | "#,
307 | id
308 | )
309 | .fetch_one(&*plexo_engine.pool)
310 | .await
311 | .unwrap();
312 |
313 | Ok(Team {
314 | id: team.id,
315 | created_at: DateTimeBridge::from_offset_date_time(team.created_at),
316 | updated_at: DateTimeBridge::from_offset_date_time(team.updated_at),
317 | name: team.name,
318 | owner_id: team.owner_id,
319 | visibility: TeamVisibility::from_optional_str(&team.visibility),
320 | prefix: team.prefix,
321 | })
322 | }
323 |
324 | async fn labels(&self, ctx: &Context<'_>) -> Result> {
325 | let (plexo_engine, _member_id) = extract_context(ctx)?;
326 |
327 | let labels = sqlx::query!(
328 | r#"
329 | SELECT * FROM labels
330 | "#
331 | )
332 | .fetch_all(&*plexo_engine.pool)
333 | .await
334 | .unwrap();
335 |
336 | Ok(labels
337 | .iter()
338 | .map(|r| Label {
339 | id: r.id,
340 | created_at: DateTimeBridge::from_offset_date_time(r.created_at),
341 | updated_at: DateTimeBridge::from_offset_date_time(r.updated_at),
342 | name: r.name.clone(),
343 | color: r.color.clone(),
344 | description: r.description.clone(),
345 | })
346 | .collect())
347 | }
348 |
349 | async fn me(&self, ctx: &Context<'_>) -> Result {
350 | let (plexo_engine, member_id) = extract_context(ctx)?;
351 |
352 | let r = sqlx::query!(
353 | r#"
354 | SELECT * FROM members
355 | WHERE id = $1
356 | "#,
357 | member_id
358 | )
359 | .fetch_one(&*plexo_engine.pool)
360 | .await
361 | .unwrap();
362 |
363 | Ok(Member {
364 | id: r.id,
365 | created_at: DateTimeBridge::from_offset_date_time(r.created_at),
366 | updated_at: DateTimeBridge::from_offset_date_time(r.updated_at),
367 | name: r.name.clone(),
368 | email: r.email.clone(),
369 | github_id: r.github_id.clone(),
370 | google_id: r.google_id.clone(),
371 | photo_url: r.photo_url.clone(),
372 | role: MemberRole::from_optional_str(&r.role),
373 | password_hash: None,
374 | })
375 | }
376 |
377 | async fn activity(
378 | &self,
379 | ctx: &Context<'_>,
380 | resource_type: Option,
381 | resource_id: Option,
382 | operation_type: Option,
383 | member_id: Option,
384 | ) -> Result> {
385 | let (plexo_engine, _member_id) = extract_context(ctx)?;
386 |
387 | let activities = sqlx::query!(
388 | r#"
389 | SELECT * FROM activity
390 | WHERE
391 | resource_type = COALESCE($1, resource_type)
392 | AND resource_id = COALESCE($2, resource_id)
393 | AND operation = COALESCE($3, operation)
394 | AND member_id = COALESCE($4, member_id)
395 | "#,
396 | resource_type.map(|r| r.to_string()),
397 | resource_id,
398 | operation_type.map(|r| r.to_string()),
399 | member_id
400 | )
401 | .fetch_all(&*plexo_engine.pool)
402 | .await
403 | .unwrap();
404 |
405 | Ok(activities
406 | .iter()
407 | .map(|r| Activity {
408 | id: r.id,
409 | created_at: DateTimeBridge::from_offset_date_time(r.created_at),
410 | updated_at: DateTimeBridge::from_offset_date_time(r.updated_at),
411 | resource_type: ActivityResourceType::from_str(&r.resource_type).unwrap(),
412 | operation: ActivityOperationType::from_str(&r.operation).unwrap(),
413 | resource_id: r.resource_id,
414 | member_id: r.member_id,
415 | })
416 | .collect())
417 | }
418 | }
419 |
--------------------------------------------------------------------------------
/src/graphql/subscription.rs:
--------------------------------------------------------------------------------
1 | use std::time::Duration;
2 |
3 | use async_graphql::{
4 | async_stream::stream, futures_util::StreamExt, Context, FieldResult, Subscription,
5 | };
6 | use chrono::Utc;
7 | use std::pin::Pin;
8 | use tokio::sync::mpsc::channel;
9 | use tokio_stream::Stream;
10 | use uuid::Uuid;
11 |
12 | use crate::system::subscriptions::DataContainer;
13 | use crate::{
14 | sdk::{
15 | project::Project,
16 | task::{Task, TaskPriority, TaskStatus},
17 | team::{Team, TeamVisibility},
18 | },
19 | system::core::Engine,
20 | };
21 |
22 | #[derive(Default)]
23 | pub struct SubscriptionRoot;
24 |
25 | #[Subscription]
26 | impl SubscriptionRoot {
27 | async fn subscribe_task(
28 | &self,
29 | ctx: &Context<'_>,
30 | ) -> FieldResult> + Send>>> {
31 | let (sender, mut receiver) = channel(100);
32 | let subscription_manager = &ctx.data::().unwrap().subscription_manager;
33 | let new_uuid = Uuid::new_v4().to_string();
34 |
35 | let suscription_added = subscription_manager.add_subscription(sender, 1).await?;
36 | if suscription_added == new_uuid.clone() {
37 | println!("Subscription_Task added");
38 | }
39 |
40 | let mapped_stream = stream! {
41 | loop {
42 | match receiver.recv().await {
43 | Some(DataContainer::TaskContainer(task)) => {
44 | println!("{}", task.title);
45 | let last_task = Some(task);
46 | yield last_task.clone();
47 | },
48 | Some(DataContainer::ProjectContainer(_task)) => {
49 | yield None;
50 |
51 | },
52 | Some(DataContainer::TeamContainer(_task)) => {
53 | yield None;
54 | },
55 | None => {
56 | println!("None");
57 | yield None;
58 | },
59 | }
60 | }
61 | };
62 |
63 | Ok(Box::pin(mapped_stream))
64 | }
65 |
66 | async fn subscribe_project(
67 | &self,
68 | ctx: &Context<'_>,
69 | ) -> FieldResult> + Send>>> {
70 | let (sender, mut receiver) = channel(100);
71 | let subscription_manager = &ctx.data::().unwrap().subscription_manager;
72 | let new_uuid = Uuid::new_v4().to_string();
73 |
74 | let suscription_added = subscription_manager.add_subscription(sender, 2).await?;
75 | if suscription_added == new_uuid.clone() {
76 | println!("Subscription_Project added");
77 | }
78 |
79 | let mapped_stream = stream! {
80 | loop {
81 | match receiver.recv().await {
82 | Some(DataContainer::TaskContainer(_task)) => {
83 | yield None;
84 |
85 | },
86 | Some(DataContainer::ProjectContainer(task)) => {
87 | println!("{}", task.id);
88 | let last_task = Some(task);
89 | yield last_task.clone();
90 | },
91 | Some(DataContainer::TeamContainer(_task)) => {
92 | yield None;
93 | },
94 | None => {
95 | println!("None");
96 | yield None;
97 | },
98 | }
99 | }
100 | };
101 |
102 | Ok(Box::pin(mapped_stream))
103 | }
104 |
105 | async fn subscribe_team(
106 | &self,
107 | ctx: &Context<'_>,
108 | ) -> FieldResult> + Send>>> {
109 | let (sender, mut receiver) = channel(100);
110 | let subscription_manager = &ctx.data::().unwrap().subscription_manager;
111 | let new_uuid = Uuid::new_v4().to_string();
112 |
113 | let suscription_added = subscription_manager.add_subscription(sender, 3).await?;
114 | if suscription_added == new_uuid.clone() {
115 | println!("Subscription_Team added");
116 | }
117 |
118 | let mapped_stream = stream! {
119 | loop {
120 | match receiver.recv().await {
121 | Some(DataContainer::TaskContainer(_task)) => {
122 | yield None;
123 |
124 | },
125 | Some(DataContainer::ProjectContainer(_task)) => {
126 | yield None;
127 |
128 | },
129 | Some(DataContainer::TeamContainer(task)) => {
130 | println!("{}", task.id);
131 | let last_task = Some(task);
132 | yield last_task.clone(); },
133 | None => {
134 | println!("None");
135 | yield None;
136 | },
137 | }
138 | }
139 | };
140 |
141 | Ok(Box::pin(mapped_stream))
142 | }
143 |
144 | async fn tasks(&self, ctx: &Context<'_>) -> impl Stream- {
145 | let _auth_token = ctx.data::
().unwrap();
146 |
147 | tokio_stream::wrappers::IntervalStream::new(tokio::time::interval(Duration::from_secs(1)))
148 | .map(|_| Task {
149 | id: Uuid::new_v4(),
150 | title: "Task 1".to_string(),
151 | created_at: Utc::now(),
152 | updated_at: Utc::now(),
153 | description: None,
154 |
155 | status: TaskStatus::Backlog,
156 | priority: TaskPriority::High,
157 |
158 | owner_id: Uuid::new_v4(),
159 |
160 | // labels: vec![],
161 | lead_id: None,
162 | project_id: None,
163 |
164 | due_date: None,
165 | count: 0,
166 | parent_id: None,
167 | })
168 | }
169 |
170 | async fn task_by_id(&self, id: Uuid) -> impl Stream- {
171 | tokio_stream::wrappers::IntervalStream::new(tokio::time::interval(Duration::from_secs(1)))
172 | .map(move |_| Task {
173 | id,
174 | title: "Task 1".to_string(),
175 | created_at: Utc::now(),
176 | updated_at: Utc::now(),
177 | description: None,
178 |
179 | status: TaskStatus::Backlog,
180 | priority: TaskPriority::High,
181 |
182 | owner_id: Uuid::new_v4(),
183 |
184 | // labels: vec![],
185 | lead_id: None,
186 | project_id: None,
187 |
188 | due_date: None,
189 | count: 0,
190 | parent_id: None,
191 | })
192 | }
193 |
194 | async fn projects(&self, ctx: &Context<'_>) -> impl Stream
- {
195 | let _auth_token = ctx.data::
().unwrap();
196 |
197 | tokio_stream::wrappers::IntervalStream::new(tokio::time::interval(Duration::from_secs(1)))
198 | .map(|_| Project {
199 | id: Uuid::new_v4(),
200 | created_at: Utc::now(),
201 | updated_at: Utc::now(),
202 | name: "Project X".to_string(),
203 | description: None,
204 | owner_id: Uuid::new_v4(),
205 | prefix: None,
206 | lead_id: None,
207 | start_date: None,
208 | due_date: None,
209 | })
210 | }
211 |
212 | async fn teams(&self, ctx: &Context<'_>) -> impl Stream- {
213 | let _auth_token = ctx.data::
().unwrap();
214 |
215 | tokio_stream::wrappers::IntervalStream::new(tokio::time::interval(Duration::from_secs(1)))
216 | .map(|_| Team {
217 | id: Uuid::new_v4(),
218 | name: "Team X".to_string(),
219 | created_at: Utc::now(),
220 | updated_at: Utc::now(),
221 | owner_id: Uuid::new_v4(),
222 | visibility: TeamVisibility::Public,
223 | prefix: None,
224 | })
225 | }
226 | }
227 |
--------------------------------------------------------------------------------
/src/handlers.rs:
--------------------------------------------------------------------------------
1 | use async_graphql::{
2 | http::{GraphiQLSource, ALL_WEBSOCKET_PROTOCOLS},
3 | Data, Schema,
4 | };
5 |
6 | use async_graphql_poem::{GraphQLProtocol, GraphQLRequest, GraphQLResponse, GraphQLWebSocket};
7 | use serde_json::Value;
8 |
9 | use crate::{
10 | commons::authorization::{get_token_from_cookie, get_token_from_headers},
11 | config::DOMAIN,
12 | graphql::{mutations::MutationRoot, queries::QueryRoot, subscription::SubscriptionRoot},
13 | };
14 |
15 | use poem::{
16 | handler,
17 | http::HeaderMap,
18 | web::Html,
19 | web::{websocket::WebSocket, Data as PoemData},
20 | IntoResponse,
21 | };
22 |
23 | #[handler]
24 | pub async fn graphiq_handler() -> impl IntoResponse {
25 | Html(
26 | GraphiQLSource::build()
27 | .endpoint(format!("{}/graphql", *DOMAIN).as_str())
28 | .subscription_endpoint(format!("{}/graphql/ws", DOMAIN.replace("http", "ws")).as_str())
29 | .finish(),
30 | )
31 | }
32 |
33 | #[handler]
34 | pub async fn index_handler(
35 | schema: PoemData<&Schema>,
36 | headers: &HeaderMap,
37 | req: GraphQLRequest,
38 | ) -> GraphQLResponse {
39 | let mut req = req.0;
40 | // let mut with_token = false;
41 |
42 | if let Some(token) = get_token_from_headers(headers) {
43 | req = req.data(token);
44 | // with_token = true;
45 | }
46 |
47 | if let Some(token) = get_token_from_cookie(headers) {
48 | req = req.data(token);
49 | // with_token = true;
50 | }
51 |
52 | schema.execute(req).await.into()
53 | }
54 |
55 | #[handler]
56 | pub async fn ws_switch_handler(
57 | schema: PoemData<&Schema>,
58 | protocol: GraphQLProtocol,
59 | websocket: WebSocket,
60 | ) -> impl IntoResponse {
61 | let schema = schema.0.clone();
62 | websocket
63 | .protocols(ALL_WEBSOCKET_PROTOCOLS)
64 | .on_upgrade(move |stream| {
65 | GraphQLWebSocket::new(stream, schema, protocol)
66 | .on_connection_init(on_connection_init)
67 | .serve()
68 | })
69 | }
70 |
71 | pub async fn on_connection_init(value: Value) -> async_graphql::Result {
72 | match &value {
73 | Value::Object(map) => {
74 | if let Some(Value::String(token)) = map.get("Authorization") {
75 | let mut data = Data::default();
76 | data.insert(token.to_string());
77 | Ok(data)
78 | } else {
79 | Err("Authorization token is required".into())
80 | }
81 | }
82 | _ => Err("Authorization token is required".into()),
83 | }
84 | }
85 |
--------------------------------------------------------------------------------
/src/lib.rs:
--------------------------------------------------------------------------------
1 | pub mod auth;
2 | pub mod commons;
3 | pub mod config;
4 | pub mod errors;
5 | pub mod graphql;
6 | pub mod handlers;
7 | pub mod llm;
8 | pub mod openapi;
9 | pub mod sdk;
10 | pub mod statics;
11 | pub mod system;
12 |
--------------------------------------------------------------------------------
/src/llm/mod.rs:
--------------------------------------------------------------------------------
1 | mod openai;
2 | pub mod suggestions;
3 |
--------------------------------------------------------------------------------
/src/llm/openai.rs:
--------------------------------------------------------------------------------
1 | use async_openai::{
2 | config::OpenAIConfig,
3 | types::{
4 | ChatCompletionRequestSystemMessageArgs, ChatCompletionRequestUserMessageArgs,
5 | CreateChatCompletionRequestArgs,
6 | },
7 | Client,
8 | };
9 |
10 | use crate::config::LLM_MODEL_NAME;
11 |
12 | #[derive(Clone)]
13 | pub struct LLMEngine {
14 | client: Client,
15 | }
16 |
17 | impl LLMEngine {
18 | pub fn new() -> Self {
19 | let client = Client::new();
20 | Self { client }
21 | }
22 |
23 | pub async fn chat_completion(&self, system_message: String, user_message: String) -> String {
24 | let request = CreateChatCompletionRequestArgs::default()
25 | .max_tokens(512u16)
26 | .model(LLM_MODEL_NAME.to_string())
27 | .messages([
28 | ChatCompletionRequestSystemMessageArgs::default()
29 | .content(system_message)
30 | .build()
31 | .unwrap()
32 | .into(),
33 | ChatCompletionRequestUserMessageArgs::default()
34 | .content(user_message)
35 | .build()
36 | .unwrap()
37 | .into(),
38 | ])
39 | .build()
40 | .unwrap();
41 |
42 | let response = self.client.chat().create(request).await.unwrap();
43 |
44 | response
45 | .choices
46 | .first()
47 | .unwrap()
48 | .message
49 | .content
50 | .clone()
51 | .unwrap()
52 | }
53 | }
54 |
--------------------------------------------------------------------------------
/src/llm/suggestions.rs:
--------------------------------------------------------------------------------
1 | use async_graphql::{InputObject, SimpleObject};
2 | use chrono::{DateTime, Local, Utc};
3 | use serde::Deserialize;
4 | use serde_json::Result;
5 | use sqlx::{query, Pool, Postgres};
6 | use uuid::Uuid;
7 |
8 | use crate::sdk::{
9 | task::{Task, TaskPriority, TaskStatus},
10 | utilities::DateTimeBridge,
11 | };
12 |
13 | use super::openai::LLMEngine;
14 |
15 | #[derive(Clone)]
16 | pub struct AutoSuggestionsEngine {
17 | llm_engine: LLMEngine,
18 | pool: Box>,
19 | }
20 |
21 | #[derive(InputObject, Clone)]
22 | pub struct TaskSuggestionInput {
23 | pub title: Option,
24 | pub description: Option,
25 | pub status: Option,
26 | pub priority: Option,
27 | pub due_date: Option>,
28 | }
29 |
30 | #[derive(SimpleObject, Clone, Deserialize)]
31 | pub struct TaskSuggestionResult {
32 | pub title: String,
33 | pub description: String,
34 | pub status: TaskStatus,
35 | pub priority: TaskPriority,
36 | pub due_date: DateTime,
37 | }
38 |
39 | #[derive(SimpleObject, Clone, Deserialize)]
40 | pub struct SuggestionContext {
41 | project_id: Option,
42 | team_id: Option,
43 | }
44 |
45 | impl AutoSuggestionsEngine {
46 | pub fn new(pool: Box>) -> Self {
47 | let llm_engine = LLMEngine::new();
48 | Self { llm_engine, pool }
49 | }
50 |
51 | fn calculate_task_fingerprint(task: Task) -> String {
52 | serde_json::to_string(&task).unwrap()
53 | }
54 |
55 | fn calculate_task_suggestion_fingerprint(task_suggestion: TaskSuggestionInput) -> String {
56 | format!(
57 | "Task Title: {}
58 | Task Description: {}
59 | Task Status: {}
60 | Task Priority: {}
61 | Task Due Date: {}",
62 | task_suggestion.title.unwrap_or("".to_string()),
63 | task_suggestion
64 | .description
65 | .unwrap_or("".to_string()),
66 | task_suggestion
67 | .status
68 | .map(|s| s.to_str())
69 | .unwrap_or(""),
70 | task_suggestion
71 | .priority
72 | .map(|p| p.to_str())
73 | .unwrap_or(""),
74 | task_suggestion
75 | .due_date
76 | .map(|d| d.to_rfc3339())
77 | .unwrap_or("".to_string()),
78 | )
79 | }
80 |
81 | async fn acquire_tasks_fingerprints(&self) -> Vec {
82 | let tasks = query!(
83 | r#"
84 | SELECT *
85 | FROM tasks
86 | LIMIT 10
87 | "#,
88 | )
89 | .fetch_all(&*self.pool)
90 | .await
91 | .unwrap();
92 |
93 | tasks
94 | .iter()
95 | .map(|r| Task {
96 | id: r.id,
97 | created_at: DateTimeBridge::from_offset_date_time(r.created_at),
98 | updated_at: DateTimeBridge::from_offset_date_time(r.updated_at),
99 | title: r.title.clone(),
100 | description: r.description.clone(),
101 | status: TaskStatus::from_optional_str(&r.status),
102 | priority: TaskPriority::from_optional_str(&r.priority),
103 | due_date: r.due_date.map(DateTimeBridge::from_offset_date_time),
104 | project_id: r.project_id,
105 | lead_id: r.lead_id,
106 | owner_id: r.owner_id,
107 | count: r.count,
108 | parent_id: r.parent_id,
109 | })
110 | .map(Self::calculate_task_fingerprint)
111 | .collect::>()
112 | }
113 |
114 | pub async fn get_suggestions(
115 | &self,
116 | proto_task: TaskSuggestionInput,
117 | _context: Option,
118 | ) -> Result {
119 | let tasks_fingerprints = self.acquire_tasks_fingerprints().await;
120 |
121 | let system_message = "The user pass to you a list of tasks and you should predict the following based on the input of the user.
122 | Please return only a valid json with the following struct {
123 | title: String,
124 | description: String,
125 | status: TaskStatus,
126 | priority: TaskPriority,
127 | due_date: DateTime
128 | }".to_string();
129 |
130 | let user_message = format!(
131 | "
132 | Current Time:
133 | {}
134 |
135 | Current Tasks Context:
136 | {}
137 |
138 | With the above context, complete the following task, only fill the fields:
139 | {}",
140 | Local::now(),
141 | tasks_fingerprints.join("\n\n"),
142 | Self::calculate_task_suggestion_fingerprint(proto_task),
143 | );
144 |
145 | let result = self
146 | .llm_engine
147 | .chat_completion(system_message, user_message)
148 | .await;
149 |
150 | let suggestion_result: TaskSuggestionResult = serde_json::from_str(&result)?;
151 |
152 | Ok(suggestion_result)
153 | }
154 |
155 | pub async fn subdivide_task(
156 | &self,
157 | task_id: Uuid,
158 | subtasks: u32,
159 | ) -> Result> {
160 | let task = sqlx::query!(
161 | r#"
162 | SELECT * FROM tasks
163 | WHERE id = $1
164 | "#,
165 | task_id
166 | )
167 | .fetch_one(&*self.pool)
168 | .await
169 | .unwrap();
170 |
171 | let task = Task {
172 | id: task.id,
173 | created_at: DateTimeBridge::from_offset_date_time(task.created_at),
174 | updated_at: DateTimeBridge::from_offset_date_time(task.updated_at),
175 | title: task.title.clone(),
176 | description: task.description.clone(),
177 | status: TaskStatus::from_optional_str(&task.status),
178 | priority: TaskPriority::from_optional_str(&task.priority),
179 | due_date: task.due_date.map(DateTimeBridge::from_offset_date_time),
180 | project_id: task.project_id,
181 | lead_id: task.lead_id,
182 | owner_id: task.owner_id,
183 | count: task.count,
184 | parent_id: task.parent_id,
185 | };
186 |
187 | let system_message =
188 | "The user pass to you one task and you should predict a list of subtasks.
189 | Please return only a valid json with the following struct [{
190 | title: String,
191 | description: String,
192 | status: TaskStatus,
193 | priority: TaskPriority,
194 | due_date: DateTime
195 | }]
196 | For TaskStatus and TaskPriority, please use the following values:
197 | TaskStatus: None, Backlog, ToDo, InProgress, Done, Canceled
198 | TaskPriority: None, Low, Medium, High, Urgent
199 | "
200 | .to_string();
201 |
202 | let user_message = format!(
203 | "
204 | Current Time:
205 | {}
206 |
207 | Parent Task:
208 | {}
209 |
210 | With the above context, generate {} subtasks.",
211 | Local::now(),
212 | Self::calculate_task_fingerprint(task),
213 | subtasks,
214 | );
215 |
216 | let result = self
217 | .llm_engine
218 | .chat_completion(system_message, user_message)
219 | .await;
220 |
221 | let subtasks: Vec = serde_json::from_str(&result)?;
222 |
223 | Ok(subtasks)
224 | }
225 |
226 | // pub async fn get_
227 | }
228 |
--------------------------------------------------------------------------------
/src/main.rs:
--------------------------------------------------------------------------------
1 | use dotenvy::dotenv;
2 | use plexo::{
3 | auth::{
4 | core::{
5 | email_basic_login_handler, github_callback_handler, github_sign_in_handler,
6 | logout_handler,
7 | },
8 | engine::AuthEngine,
9 | },
10 | config::{
11 | DATABASE_URL, DOMAIN, GITHUB_CLIENT_ID, GITHUB_CLIENT_SECRET, GITHUB_REDIRECT_URL,
12 | JWT_ACCESS_TOKEN_SECRET, STATIC_PAGE_ENABLED, URL,
13 | },
14 | handlers::{graphiq_handler, index_handler, ws_switch_handler},
15 | openapi::api::Api,
16 | statics::StaticServer,
17 | system::{core::Engine, prelude::Prelude, schema::GraphQLSchema},
18 | };
19 | use poem::{get, listener::TcpListener, middleware::Cors, post, EndpointExt, Route, Server};
20 | use poem_openapi::OpenApiService;
21 | use sqlx::postgres::PgPoolOptions;
22 |
23 | #[tokio::main]
24 | async fn main() {
25 | dotenv().ok();
26 |
27 | let plexo_engine = Engine::new(
28 | PgPoolOptions::new()
29 | .max_connections(3)
30 | .connect(&DATABASE_URL)
31 | .await
32 | .unwrap(),
33 | AuthEngine::new(
34 | // TODO: That's horrible, fix it
35 | (*JWT_ACCESS_TOKEN_SECRET).to_string(),
36 | (*JWT_ACCESS_TOKEN_SECRET).to_string(),
37 | (*GITHUB_CLIENT_ID).to_owned(),
38 | (*GITHUB_CLIENT_SECRET).to_owned(),
39 | Some((*GITHUB_REDIRECT_URL).to_owned()),
40 | ),
41 | );
42 |
43 | plexo_engine.prelude().await;
44 |
45 | let schema = plexo_engine.graphql_api_schema();
46 |
47 | let api_service = OpenApiService::new(Api::default(), "Hello World", "1.0")
48 | .server("http://localhost:3000/api");
49 | let ui = api_service.swagger_ui();
50 |
51 | let spec = api_service.spec();
52 |
53 | std::fs::write("openapi.json", spec).unwrap();
54 |
55 | let mut app = Route::new()
56 | .nest("/api", api_service)
57 | .nest("/", ui)
58 | // .nest("/", static_page)
59 | // Non authenticated routes
60 | .at("/auth/email/login", post(email_basic_login_handler))
61 | // .at("/auth/email/register", post(email_basic_register_handler))
62 | //
63 | .at("/auth/github", get(github_sign_in_handler))
64 | .at("/auth/github/callback", get(github_callback_handler))
65 | //
66 | .at("/auth/logout", get(logout_handler))
67 | //
68 | .at("/playground", get(graphiq_handler))
69 | .at("/graphql", post(index_handler))
70 | .at("/graphql/ws", get(ws_switch_handler));
71 |
72 | if *STATIC_PAGE_ENABLED {
73 | let static_page_root_path = "plexo-platform/out".to_string();
74 |
75 | let static_page =
76 | StaticServer::new(static_page_root_path, plexo_engine.clone()).index_file("index.html");
77 |
78 | app = app.nest("/", static_page);
79 |
80 | println!("Static page enabled");
81 | }
82 |
83 | let app = app
84 | .with(
85 | Cors::new().allow_credentials(true), // .expose_header("Set-Cookie"),
86 | )
87 | .data(schema)
88 | .data(plexo_engine.clone());
89 |
90 | println!("Visit GraphQL Playground at {}/playground", *DOMAIN);
91 |
92 | Server::new(TcpListener::bind(URL.to_owned()))
93 | .run(app)
94 | .await
95 | .expect("Fail to start web server");
96 | }
97 |
--------------------------------------------------------------------------------
/src/openapi/api.rs:
--------------------------------------------------------------------------------
1 | // use poem::{listener::TcpListener, Route};
2 | use crate::sdk::project::Project;
3 | use poem_openapi::param::Path;
4 | use poem_openapi::payload::Json;
5 | use poem_openapi::{ApiResponse, OpenApi, Tags};
6 | use tokio::sync::Mutex;
7 |
8 | use crate::sdk::task::Task;
9 |
10 | #[derive(Tags)]
11 | enum ApiTags {
12 | /// Operations about tasks
13 | Task,
14 | /// Operations about members
15 | // Member,
16 | /// Operations about projects
17 | Project,
18 | // /// Operations about teams
19 | // Team,
20 | }
21 |
22 | #[derive(Default)]
23 | pub struct Api {
24 | pub tasks: Mutex>,
25 | }
26 |
27 | #[OpenApi]
28 | impl Api {
29 | // #[oai(path = "/hello", method = "get", operation_id = "hello")]
30 | // async fn index(&self, name: Query>) -> PlainText {
31 | // match name.0 {
32 | // Some(name) => PlainText(format!("hello, {}!", name)),
33 | // None => PlainText("hello!".to_string()),
34 | // }
35 | // }
36 |
37 | #[oai(
38 | path = "/tasks",
39 | method = "post",
40 | tag = "ApiTags::Task",
41 | operation_id = "create_task"
42 | )]
43 | async fn create_task(&self, task: Json) -> CreateTaskResponse {
44 | let mut users = self.tasks.lock().await;
45 | users.insert(0, task.0.clone());
46 |
47 | CreateTaskResponse::Ok(Json(task.0))
48 | }
49 |
50 | #[oai(
51 | path = "/tasks",
52 | method = "get",
53 | tag = "ApiTags::Task",
54 | operation_id = "list_tasks"
55 | )]
56 | async fn list_tasks(&self) -> ListTasksResponse {
57 | let users = self.tasks.lock().await;
58 | ListTasksResponse::Ok(Json(users.clone()))
59 | }
60 |
61 | #[oai(
62 | path = "/tasks/:id",
63 | method = "get",
64 | tag = "ApiTags::Task",
65 | operation_id = "get_task"
66 | )]
67 | async fn get_task(&self, _id: Path) -> GetTaskResponse {
68 | // let users = self.tasks.lock().await;
69 | // let task = users.iter().find(|task| task.id == Uuid::from_str(id.0.as_str()));
70 |
71 | // match task {
72 | // Some(task) => GetTaskResponse::Ok(Json(task.clone())),
73 | // None => GetTaskResponse::NotFound,
74 | // }
75 |
76 | GetTaskResponse::NotFound
77 | }
78 |
79 | #[oai(
80 | path = "/tasks/:id",
81 | method = "put",
82 | tag = "ApiTags::Task",
83 | operation_id = "update_task"
84 | )]
85 | async fn update_task(&self, _id: Path, _task: Json) -> GetTaskResponse {
86 | // let mut users = self.tasks.lock().await;
87 | // let task = users.iter_mut().find(|task| task.id == id.0.into());
88 | //
89 | // match task {
90 | // Some(task) => {
91 | // *task = task.clone();
92 | // GetTaskResponse::Ok(Json(task.clone()))
93 | // },
94 | // None => GetTaskResponse::NotFound,
95 | // }
96 |
97 | GetTaskResponse::NotFound
98 | }
99 |
100 | #[oai(
101 | path = "/tasks/:id",
102 | method = "delete",
103 | tag = "ApiTags::Task",
104 | operation_id = "delete_task"
105 | )]
106 | async fn delete_task(&self, _id: Path) -> GetTaskResponse {
107 | // let mut users = self.tasks.lock().await;
108 | // let task = users.iter().find(|task| task.id == id.0.into());
109 |
110 | // match task {
111 | // Some(task) => {
112 | // // users.remove_item(task);
113 | // GetTaskResponse::Ok(Json(task.clone()))
114 | // },
115 | // None => GetTaskResponse::NotFound,
116 | // }
117 |
118 | GetTaskResponse::NotFound
119 | }
120 |
121 | #[oai(
122 | path = "/projects",
123 | method = "post",
124 | tag = "ApiTags::Project",
125 | operation_id = "create_project"
126 | )]
127 | async fn create_project(&self, task: Json) -> CreateProjectResponse {
128 | // let mut users = self.tasks.lock().await;
129 | // users.insert(0, task.0.clone());
130 |
131 | CreateProjectResponse::Ok(Json(task.0))
132 | }
133 |
134 | #[oai(
135 | path = "/projects",
136 | method = "get",
137 | tag = "ApiTags::Project",
138 | operation_id = "list_projects"
139 | )]
140 | async fn list_projects(&self) -> ListProjectsResponse {
141 | // let users = self.tasks.lock().await;
142 | // ListTasksResponse::Ok(Json(users.clone()))
143 | ListProjectsResponse::Ok(Json(vec![]))
144 | }
145 | }
146 |
147 | #[derive(ApiResponse)]
148 | enum CreateProjectResponse {
149 | /// Returns when the user is successfully created.
150 | #[oai(status = 200)]
151 | Ok(Json),
152 | }
153 |
154 | #[derive(ApiResponse)]
155 | enum ListProjectsResponse {
156 | /// Returns when the user is successfully created.
157 | #[oai(status = 200)]
158 | Ok(Json>),
159 | }
160 | #[derive(ApiResponse)]
161 | enum CreateTaskResponse {
162 | /// Returns when the user is successfully created.
163 | #[oai(status = 200)]
164 | Ok(Json),
165 | }
166 |
167 | #[derive(ApiResponse)]
168 | enum ListTasksResponse {
169 | /// Returns when the user is successfully created.
170 | #[oai(status = 200)]
171 | Ok(Json>),
172 | }
173 |
174 | #[derive(ApiResponse)]
175 | enum GetTaskResponse {
176 | /// Returns when the user is successfully created.
177 | // #[oai(status = 200)]
178 | // Ok(Json),
179 | #[oai(status = 404)]
180 | NotFound,
181 | }
182 |
--------------------------------------------------------------------------------
/src/openapi/mod.rs:
--------------------------------------------------------------------------------
1 | pub mod api;
2 |
--------------------------------------------------------------------------------
/src/sdk/activity.rs:
--------------------------------------------------------------------------------
1 | use async_graphql::{dataloader::DataLoader, ComplexObject, Context, Enum, Result, SimpleObject};
2 | use chrono::{DateTime, Utc};
3 | use uuid::Uuid;
4 |
5 | use std::str::FromStr;
6 |
7 | use super::loaders::MemberLoader;
8 | use super::member::Member;
9 | use crate::graphql::auth::extract_context;
10 |
11 | #[derive(SimpleObject, Clone, Debug)]
12 | #[graphql(complex)]
13 | pub struct Activity {
14 | pub id: Uuid,
15 | pub created_at: DateTime,
16 | pub updated_at: DateTime,
17 |
18 | pub member_id: Uuid,
19 | pub resource_id: Uuid,
20 |
21 | pub operation: ActivityOperationType,
22 | pub resource_type: ActivityResourceType,
23 | }
24 |
25 | #[ComplexObject]
26 | impl Activity {
27 | pub async fn member(&self, ctx: &Context<'_>) -> Result {
28 | let (_plexo_engine, _member_id) = extract_context(ctx)?;
29 |
30 | let loader = ctx.data::>()?;
31 |
32 | Ok(loader.load_one(self.member_id).await?.unwrap())
33 | }
34 | }
35 |
36 | #[derive(Enum, Copy, Clone, Eq, PartialEq, Debug)]
37 | pub enum ActivityOperationType {
38 | Create,
39 | Update,
40 | Delete,
41 | }
42 |
43 | impl ToString for ActivityOperationType {
44 | fn to_string(&self) -> String {
45 | match self {
46 | ActivityOperationType::Create => "Create".to_string(),
47 | ActivityOperationType::Update => "Update".to_string(),
48 | ActivityOperationType::Delete => "Delete".to_string(),
49 | }
50 | }
51 | }
52 |
53 | impl FromStr for ActivityOperationType {
54 | type Err = ();
55 |
56 | fn from_str(s: &str) -> Result {
57 | match s {
58 | "Create" => Ok(ActivityOperationType::Create),
59 | "Update" => Ok(ActivityOperationType::Update),
60 | "Delete" => Ok(ActivityOperationType::Delete),
61 | _ => Err(()),
62 | }
63 | }
64 | }
65 |
66 | #[derive(Enum, Copy, Clone, Eq, PartialEq, Debug)]
67 | pub enum ActivityResourceType {
68 | Task,
69 | Project,
70 | Team,
71 | Member,
72 | Label,
73 | Organization,
74 | }
75 |
76 | impl ToString for ActivityResourceType {
77 | fn to_string(&self) -> String {
78 | match self {
79 | ActivityResourceType::Task => "Task".to_string(),
80 | ActivityResourceType::Project => "Project".to_string(),
81 | ActivityResourceType::Team => "Team".to_string(),
82 | ActivityResourceType::Member => "Member".to_string(),
83 | ActivityResourceType::Label => "Label".to_string(),
84 | ActivityResourceType::Organization => "Organization".to_string(),
85 | }
86 | }
87 | }
88 |
89 | impl FromStr for ActivityResourceType {
90 | type Err = ();
91 |
92 | fn from_str(s: &str) -> Result {
93 | match s {
94 | "Task" => Ok(ActivityResourceType::Task),
95 | "Project" => Ok(ActivityResourceType::Project),
96 | "Team" => Ok(ActivityResourceType::Team),
97 | "Member" => Ok(ActivityResourceType::Member),
98 | "Label" => Ok(ActivityResourceType::Label),
99 | "Organization" => Ok(ActivityResourceType::Organization),
100 | _ => Err(()),
101 | }
102 | }
103 | }
104 |
--------------------------------------------------------------------------------
/src/sdk/labels.rs:
--------------------------------------------------------------------------------
1 | use crate::graphql::auth::extract_context;
2 | use async_graphql::{dataloader::DataLoader, ComplexObject, Context, Result, SimpleObject};
3 | use chrono::{DateTime, Utc};
4 | use uuid::Uuid;
5 |
6 | use super::loaders::TaskLoader;
7 | use super::task::Task;
8 |
9 | #[derive(SimpleObject, Clone)]
10 | #[graphql(complex)]
11 | pub struct Label {
12 | pub id: Uuid,
13 | pub created_at: DateTime,
14 | pub updated_at: DateTime,
15 |
16 | pub name: String,
17 | pub description: Option,
18 | pub color: Option,
19 | }
20 |
21 | #[ComplexObject]
22 | impl Label {
23 | pub async fn tasks(&self, ctx: &Context<'_>) -> Result> {
24 | let (plexo_engine, _member_id) = extract_context(ctx)?;
25 |
26 | let loader = ctx.data::>().unwrap();
27 |
28 | let ids: Vec = sqlx::query!(
29 | r#"
30 | SELECT task_id FROM labels_by_tasks
31 | WHERE label_id = $1
32 | "#,
33 | &self.id
34 | )
35 | .fetch_all(&*plexo_engine.pool)
36 | .await
37 | .unwrap()
38 | .into_iter()
39 | .map(|id| id.task_id)
40 | .collect();
41 |
42 | let tasks_map = loader.load_many(ids.clone()).await.unwrap();
43 |
44 | let tasks: &Vec = &ids
45 | .into_iter()
46 | .map(|id| tasks_map.get(&id).unwrap().clone())
47 | .collect();
48 |
49 | Ok(tasks.clone())
50 | }
51 | }
52 |
--------------------------------------------------------------------------------
/src/sdk/loaders.rs:
--------------------------------------------------------------------------------
1 | use std::{collections::HashMap, sync::Arc};
2 |
3 | use crate::system::core::Engine;
4 | use async_graphql::dataloader::Loader;
5 |
6 | use uuid::Uuid;
7 |
8 | use super::{
9 | labels::Label,
10 | member::{Member, MemberRole},
11 | project::Project,
12 | task::{Task, TaskPriority, TaskStatus},
13 | team::{Team, TeamVisibility},
14 | utilities::DateTimeBridge,
15 | };
16 |
17 | pub struct TaskLoader(Engine);
18 | pub struct ProjectLoader(Engine);
19 | pub struct MemberLoader(Engine);
20 | pub struct LabelLoader(Engine);
21 | pub struct TeamLoader(Engine);
22 |
23 | impl TaskLoader {
24 | pub fn new(e: Engine) -> Self {
25 | Self(e)
26 | }
27 | }
28 |
29 | impl ProjectLoader {
30 | pub fn new(e: Engine) -> Self {
31 | Self(e)
32 | }
33 | }
34 |
35 | impl MemberLoader {
36 | pub fn new(e: Engine) -> Self {
37 | Self(e)
38 | }
39 | }
40 |
41 | impl LabelLoader {
42 | pub fn new(e: Engine) -> Self {
43 | Self(e)
44 | }
45 | }
46 |
47 | impl TeamLoader {
48 | pub fn new(e: Engine) -> Self {
49 | Self(e)
50 | }
51 | }
52 |
53 | #[async_trait::async_trait]
54 | impl Loader for TaskLoader {
55 | type Value = Task;
56 | type Error = Arc;
57 |
58 | async fn load(&self, keys: &'_ [Uuid]) -> Result, Self::Error> {
59 | let tasks = sqlx::query!(
60 | r#"
61 | SELECT * FROM tasks WHERE id = ANY($1)
62 | "#,
63 | &keys
64 | )
65 | .fetch_all(&*self.0.pool)
66 | .await
67 | .unwrap();
68 |
69 | //iterate to get the hashmap
70 | let tasks_map: HashMap = tasks
71 | .iter()
72 | .map(|task| {
73 | (
74 | task.id,
75 | Task {
76 | id: task.id,
77 | created_at: DateTimeBridge::from_offset_date_time(task.created_at),
78 | updated_at: DateTimeBridge::from_offset_date_time(task.updated_at),
79 | title: task.title.clone(),
80 | description: task.description.clone(),
81 | owner_id: task.owner_id,
82 | status: TaskStatus::from_optional_str(&task.status),
83 | priority: TaskPriority::from_optional_str(&task.priority),
84 | due_date: task.due_date.map(DateTimeBridge::from_offset_date_time),
85 | project_id: task.project_id,
86 | lead_id: task.lead_id,
87 | count: task.count,
88 | parent_id: task.parent_id,
89 | },
90 | )
91 | })
92 | .collect();
93 |
94 | // println!("{:?}", tasks_map);
95 | Ok(tasks_map)
96 | }
97 | }
98 |
99 | #[async_trait::async_trait]
100 | impl Loader for ProjectLoader {
101 | type Value = Project;
102 | type Error = Arc;
103 |
104 | async fn load(&self, keys: &'_ [Uuid]) -> Result, Self::Error> {
105 | let projects = sqlx::query!(
106 | r#"
107 | SELECT * FROM projects WHERE id = ANY($1)
108 | "#,
109 | &keys
110 | )
111 | .fetch_all(&*self.0.pool)
112 | .await
113 | .unwrap();
114 |
115 | //iterate to get the hashmap
116 | let projects_map: HashMap = projects
117 | .iter()
118 | .map(|project| {
119 | (
120 | project.id,
121 | Project {
122 | id: project.id,
123 | created_at: DateTimeBridge::from_offset_date_time(project.created_at),
124 | updated_at: DateTimeBridge::from_offset_date_time(project.updated_at),
125 | name: project.name.clone(),
126 | description: project.description.clone(),
127 | prefix: project.prefix.clone(),
128 | owner_id: project.owner_id,
129 | lead_id: project.lead_id,
130 | start_date: project
131 | .start_date
132 | .map(DateTimeBridge::from_offset_date_time),
133 | due_date: project.due_date.map(DateTimeBridge::from_offset_date_time),
134 | },
135 | )
136 | })
137 | .collect();
138 |
139 | //println!("{:?}", projects);
140 | Ok(projects_map)
141 | }
142 | }
143 |
144 | #[async_trait::async_trait]
145 | impl Loader for MemberLoader {
146 | type Value = Member;
147 | type Error = Arc;
148 |
149 | async fn load(&self, keys: &'_ [Uuid]) -> Result, Self::Error> {
150 | let members = sqlx::query!(
151 | r#"
152 | SELECT * FROM members WHERE id = ANY($1)
153 | "#,
154 | &keys
155 | )
156 | .fetch_all(&*self.0.pool)
157 | .await
158 | .unwrap();
159 |
160 | //iterate to get the hashmap
161 | let members_map: HashMap = members
162 | .iter()
163 | .map(|member| {
164 | (
165 | member.id,
166 | Member {
167 | id: member.id,
168 | created_at: DateTimeBridge::from_offset_date_time(member.created_at),
169 | updated_at: DateTimeBridge::from_offset_date_time(member.updated_at),
170 | name: member.name.clone(),
171 | email: member.email.clone(),
172 | github_id: member.github_id.clone(),
173 | google_id: member.google_id.clone(),
174 | photo_url: member.photo_url.clone(),
175 | role: MemberRole::from_optional_str(&member.role),
176 | password_hash: None,
177 | },
178 | )
179 | })
180 | .collect();
181 |
182 | //println!("{:?}", members);
183 | Ok(members_map)
184 | }
185 | }
186 |
187 | #[async_trait::async_trait]
188 | impl Loader for LabelLoader {
189 | type Value = Label;
190 | type Error = Arc;
191 |
192 | async fn load(&self, keys: &'_ [Uuid]) -> Result, Self::Error> {
193 | let labels = sqlx::query!(
194 | r#"
195 | SELECT * FROM labels WHERE id = ANY($1)
196 | "#,
197 | &keys
198 | )
199 | .fetch_all(&*self.0.pool)
200 | .await
201 | .unwrap();
202 |
203 | //iterate to get the hashmap
204 | let labels_map: HashMap = labels
205 | .iter()
206 | .map(|r| {
207 | (
208 | r.id,
209 | Label {
210 | id: r.id,
211 | created_at: DateTimeBridge::from_offset_date_time(r.created_at),
212 | updated_at: DateTimeBridge::from_offset_date_time(r.updated_at),
213 | name: r.name.clone(),
214 | color: r.color.clone(),
215 | description: r.description.clone(),
216 | },
217 | )
218 | })
219 | .collect();
220 |
221 | // println!("{:?}", labels_map);
222 | Ok(labels_map)
223 | }
224 | }
225 |
226 | #[async_trait::async_trait]
227 | impl Loader for TeamLoader {
228 | type Value = Team;
229 | type Error = Arc;
230 |
231 | async fn load(&self, keys: &'_ [Uuid]) -> Result, Self::Error> {
232 | let teams = sqlx::query!(
233 | r#"
234 | SELECT * FROM teams WHERE id = ANY($1)
235 | "#,
236 | &keys
237 | )
238 | .fetch_all(&*self.0.pool)
239 | .await
240 | .unwrap();
241 |
242 | //iterate to get the hashmap
243 | let teams_map: HashMap = teams
244 | .iter()
245 | .map(|team| {
246 | (
247 | team.id,
248 | Team {
249 | id: team.id,
250 | created_at: DateTimeBridge::from_offset_date_time(team.created_at),
251 | updated_at: DateTimeBridge::from_offset_date_time(team.updated_at),
252 | name: team.name.clone(),
253 | owner_id: team.owner_id,
254 | visibility: TeamVisibility::from_optional_str(&team.visibility),
255 | prefix: team.prefix.clone(),
256 | },
257 | )
258 | })
259 | .collect();
260 |
261 | //println!("ga:{:?}", teams_map);
262 |
263 | Ok(teams_map)
264 | }
265 | }
266 |
--------------------------------------------------------------------------------
/src/sdk/member.rs:
--------------------------------------------------------------------------------
1 | use std::str::FromStr;
2 |
3 | use async_graphql::{dataloader::DataLoader, ComplexObject, Context, Enum, Result, SimpleObject};
4 | use chrono::{DateTime, Utc};
5 | use uuid::Uuid;
6 |
7 | use crate::{
8 | graphql::auth::extract_context,
9 | sdk::{
10 | project::Project,
11 | task::{Task, TaskPriority, TaskStatus},
12 | team::Team,
13 | utilities::DateTimeBridge,
14 | },
15 | };
16 |
17 | use super::loaders::{ProjectLoader, TaskLoader, TeamLoader};
18 |
19 | #[derive(SimpleObject, Clone, Debug)]
20 | #[graphql(complex)]
21 | pub struct Member {
22 | pub id: Uuid,
23 | pub created_at: DateTime,
24 | pub updated_at: DateTime,
25 |
26 | pub name: String,
27 | pub email: String,
28 |
29 | pub github_id: Option,
30 | pub google_id: Option,
31 |
32 | pub photo_url: Option,
33 |
34 | pub role: MemberRole,
35 |
36 | #[graphql(skip)]
37 | pub password_hash: Option,
38 | }
39 |
40 | #[ComplexObject]
41 | impl Member {
42 | pub async fn owned_tasks(&self, ctx: &Context<'_>) -> Result> {
43 | let (plexo_engine, _member_id) = extract_context(ctx)?;
44 |
45 | let tasks = sqlx::query!(r#"SELECT * FROM tasks WHERE owner_id = $1"#, &self.id)
46 | .fetch_all(&*plexo_engine.pool)
47 | .await
48 | .unwrap();
49 |
50 | Ok(tasks
51 | .iter()
52 | .map(|r| Task {
53 | id: r.id,
54 | created_at: DateTimeBridge::from_offset_date_time(r.created_at),
55 | updated_at: DateTimeBridge::from_offset_date_time(r.updated_at),
56 | title: r.title.clone(),
57 | description: r.description.clone(),
58 | status: TaskStatus::from_optional_str(&r.status),
59 | priority: TaskPriority::from_optional_str(&r.priority),
60 | due_date: r.due_date.map(DateTimeBridge::from_offset_date_time),
61 | project_id: r.project_id,
62 | lead_id: r.lead_id,
63 | owner_id: r.owner_id,
64 | count: r.count,
65 | parent_id: r.parent_id,
66 | })
67 | .collect())
68 | }
69 |
70 | pub async fn leading_tasks(&self, ctx: &Context<'_>) -> Result> {
71 | let (plexo_engine, _member_id) = extract_context(ctx)?;
72 |
73 | let tasks = sqlx::query!(r#"SELECT * FROM tasks WHERE lead_id = $1"#, &self.id)
74 | .fetch_all(&*plexo_engine.pool)
75 | .await
76 | .unwrap();
77 |
78 | Ok(tasks
79 | .iter()
80 | .map(|r| Task {
81 | id: r.id,
82 | created_at: DateTimeBridge::from_offset_date_time(r.created_at),
83 | updated_at: DateTimeBridge::from_offset_date_time(r.updated_at),
84 | title: r.title.clone(),
85 | description: r.description.clone(),
86 | status: TaskStatus::from_optional_str(&r.status),
87 | priority: TaskPriority::from_optional_str(&r.priority),
88 | due_date: r.due_date.map(DateTimeBridge::from_offset_date_time),
89 | project_id: r.project_id,
90 | lead_id: r.lead_id,
91 | owner_id: r.owner_id,
92 | count: r.count,
93 | parent_id: r.parent_id,
94 | })
95 | .collect())
96 | }
97 |
98 | pub async fn tasks(&self, ctx: &Context<'_>) -> Result> {
99 | let (plexo_engine, _member_id) = extract_context(ctx)?;
100 |
101 | let loader = ctx.data::>().unwrap();
102 |
103 | let ids: Vec = sqlx::query!(
104 | r#"
105 | SELECT task_id FROM tasks_by_assignees
106 | WHERE assignee_id = $1
107 | "#,
108 | &self.id
109 | )
110 | .fetch_all(&*plexo_engine.pool)
111 | .await
112 | .unwrap()
113 | .into_iter()
114 | .map(|id| id.task_id)
115 | .collect();
116 |
117 | let tasks_map = loader.load_many(ids.clone()).await.unwrap();
118 |
119 | let tasks: &Vec = &ids
120 | .into_iter()
121 | .map(|id| tasks_map.get(&id).unwrap().clone())
122 | .collect();
123 |
124 | Ok(tasks.clone())
125 | }
126 |
127 | pub async fn owned_projects(&self, ctx: &Context<'_>) -> Result> {
128 | let (plexo_engine, _member_id) = extract_context(ctx)?;
129 |
130 | let projects = sqlx::query!(r#"SELECT * FROM projects WHERE owner_id = $1"#, &self.id)
131 | .fetch_all(&*plexo_engine.pool)
132 | .await
133 | .unwrap();
134 |
135 | Ok(projects
136 | .iter()
137 | .map(|r| Project {
138 | id: r.id,
139 | created_at: DateTimeBridge::from_offset_date_time(r.created_at),
140 | updated_at: DateTimeBridge::from_offset_date_time(r.updated_at),
141 | name: r.name.clone(),
142 | description: r.description.clone(),
143 | prefix: r.prefix.clone(),
144 | owner_id: r.owner_id,
145 | lead_id: r.lead_id,
146 | start_date: r.start_date.map(DateTimeBridge::from_offset_date_time),
147 | due_date: r.due_date.map(DateTimeBridge::from_offset_date_time),
148 | })
149 | .collect())
150 | }
151 |
152 | pub async fn projects(&self, ctx: &Context<'_>) -> Result> {
153 | let (plexo_engine, _member_id) = extract_context(ctx)?;
154 |
155 | let loader = ctx.data::>().unwrap();
156 |
157 | let ids: Vec = sqlx::query!(
158 | r#"
159 | SELECT project_id FROM members_by_projects
160 | WHERE member_id = $1
161 | "#,
162 | &self.id
163 | )
164 | .fetch_all(&*plexo_engine.pool)
165 | .await
166 | .unwrap()
167 | .into_iter()
168 | .map(|id| id.project_id)
169 | .collect();
170 |
171 | let projects_map = loader.load_many(ids.clone()).await?;
172 |
173 | let projects: &Vec = &ids
174 | .into_iter()
175 | .map(|id| projects_map.get(&id).unwrap().clone())
176 | .collect();
177 |
178 | Ok(projects.clone())
179 | }
180 |
181 | pub async fn teams(&self, ctx: &Context<'_>) -> Result> {
182 | let (plexo_engine, _member_id) = extract_context(ctx)?;
183 |
184 | let loader = ctx.data::>()?;
185 |
186 | let ids: Vec = sqlx::query!(
187 | r#"
188 | SELECT team_id FROM members_by_teams
189 | WHERE member_id = $1
190 | "#,
191 | &self.id
192 | )
193 | .fetch_all(&*plexo_engine.pool)
194 | .await?
195 | .into_iter()
196 | .map(|id| id.team_id)
197 | .collect();
198 |
199 | let teams_map = loader.load_many(ids.clone()).await?;
200 |
201 | let teams: &Vec = &ids
202 | .into_iter()
203 | .map(|id| teams_map.get(&id).unwrap().clone())
204 | .collect();
205 |
206 | Ok(teams.clone())
207 | }
208 | }
209 | #[derive(Enum, Copy, Clone, Eq, PartialEq, Debug)]
210 | pub enum MemberRole {
211 | Admin,
212 | Member,
213 | ReadOnly,
214 | }
215 |
216 | impl MemberRole {
217 | pub fn from_optional_str(s: &Option) -> Self {
218 | match s {
219 | Some(s) => Self::from_str(s.as_str()).unwrap_or(Self::ReadOnly),
220 | None => Self::ReadOnly,
221 | }
222 | }
223 |
224 | pub fn to_str(&self) -> &'static str {
225 | match self {
226 | Self::Admin => "Admin",
227 | Self::Member => "Member",
228 | Self::ReadOnly => "ReadOnly",
229 | }
230 | }
231 | }
232 |
233 | impl FromStr for MemberRole {
234 | type Err = ();
235 |
236 | fn from_str(s: &str) -> Result {
237 | match s {
238 | "Admin" => Ok(Self::Admin),
239 | "Member" => Ok(Self::Member),
240 | "ReadOnly" => Ok(Self::ReadOnly),
241 | _ => Err(()),
242 | }
243 | }
244 | }
245 |
--------------------------------------------------------------------------------
/src/sdk/mod.rs:
--------------------------------------------------------------------------------
1 | pub mod activity;
2 | pub mod labels;
3 | pub mod loaders;
4 | pub mod member;
5 | pub mod project;
6 | pub mod task;
7 | pub mod team;
8 | pub mod utilities;
9 |
--------------------------------------------------------------------------------
/src/sdk/organization.rs:
--------------------------------------------------------------------------------
1 | use async_graphql::{ComplexObject, SimpleObject};
2 | use chrono::{DateTime, Utc};
3 | use uuid::Uuid;
4 |
5 | #[derive(SimpleObject, Clone)]
6 | #[graphql(complex)]
7 | pub struct Organization {
8 | pub id: Uuid,
9 | pub created_at: DateTime,
10 | pub updated_at: DateTime,
11 |
12 | pub name: String,
13 | }
14 |
15 | #[ComplexObject]
16 | impl Organization {}
17 |
--------------------------------------------------------------------------------
/src/sdk/project.rs:
--------------------------------------------------------------------------------
1 | use async_graphql::{ComplexObject, Context, Result, SimpleObject};
2 | use chrono::{DateTime, Utc};
3 | use uuid::Uuid;
4 |
5 | use async_graphql::dataloader::DataLoader;
6 | use poem_openapi::Object;
7 |
8 | use super::loaders::{MemberLoader, TeamLoader};
9 | use crate::{
10 | graphql::auth::extract_context,
11 | sdk::{
12 | member::Member,
13 | task::{Task, TaskPriority, TaskStatus},
14 | team::Team,
15 | utilities::DateTimeBridge,
16 | },
17 | };
18 |
19 | #[derive(SimpleObject, Object, Clone)]
20 | #[graphql(complex)]
21 | pub struct Project {
22 | pub id: Uuid,
23 | pub created_at: DateTime,
24 | pub updated_at: DateTime,
25 |
26 | pub name: String,
27 | pub prefix: Option,
28 |
29 | pub owner_id: Uuid,
30 | pub description: Option,
31 |
32 | pub lead_id: Option,
33 | pub start_date: Option>,
34 | pub due_date: Option>,
35 | }
36 |
37 | #[ComplexObject]
38 | impl Project {
39 | pub async fn owner(&self, ctx: &Context<'_>) -> Result> {
40 | let (_plexo_engine, _member_id) = extract_context(ctx)?;
41 |
42 | let loader = ctx.data::>().unwrap();
43 |
44 | Ok(loader.load_one(self.owner_id).await.unwrap())
45 | }
46 |
47 | pub async fn members(&self, ctx: &Context<'_>) -> Result> {
48 | let (plexo_engine, _member_id) = extract_context(ctx)?;
49 |
50 | let loader = ctx.data::>().unwrap();
51 |
52 | let ids: Vec = sqlx::query!(
53 | r#"
54 | SELECT member_id FROM members_by_projects
55 | WHERE project_id = $1
56 | "#,
57 | &self.id
58 | )
59 | .fetch_all(&*plexo_engine.pool)
60 | .await
61 | .unwrap()
62 | .into_iter()
63 | .map(|id| id.member_id)
64 | .collect();
65 |
66 | let members_map = loader.load_many(ids.clone()).await.unwrap();
67 |
68 | let members: &Vec = &ids
69 | .into_iter()
70 | .map(|id| members_map.get(&id).unwrap().clone())
71 | .collect();
72 |
73 | Ok(members.clone())
74 | }
75 |
76 | pub async fn tasks(&self, ctx: &Context<'_>) -> Result> {
77 | //este caso específico necesita revisión
78 | let (plexo_engine, _member_id) = extract_context(ctx)?;
79 |
80 | let tasks = sqlx::query!(
81 | r#"
82 | SELECT * FROM tasks
83 | WHERE project_id = $1"#,
84 | &self.id
85 | )
86 | .fetch_all(&*plexo_engine.pool)
87 | .await
88 | .unwrap();
89 |
90 | Ok(tasks
91 | .iter()
92 | .map(|r| Task {
93 | id: r.id,
94 | created_at: DateTimeBridge::from_offset_date_time(r.created_at),
95 | updated_at: DateTimeBridge::from_offset_date_time(r.updated_at),
96 | title: r.title.clone(),
97 | description: r.description.clone(),
98 | status: TaskStatus::from_optional_str(&r.status),
99 | priority: TaskPriority::from_optional_str(&r.priority),
100 | due_date: r.due_date.map(DateTimeBridge::from_offset_date_time),
101 | project_id: r.project_id,
102 | lead_id: r.lead_id,
103 | owner_id: r.owner_id,
104 | count: r.count,
105 | parent_id: r.parent_id,
106 | })
107 | .collect())
108 | }
109 |
110 | pub async fn teams(&self, ctx: &Context<'_>) -> Result> {
111 | let (plexo_engine, _member_id) = extract_context(ctx)?;
112 |
113 | let loader = ctx.data::>().unwrap();
114 |
115 | let ids: Vec = sqlx::query!(
116 | r#"
117 | SELECT team_id FROM teams_by_projects
118 | WHERE project_id = $1
119 | "#,
120 | &self.id
121 | )
122 | .fetch_all(&*plexo_engine.pool)
123 | .await
124 | .unwrap()
125 | .into_iter()
126 | .map(|id| id.team_id)
127 | .collect();
128 |
129 | let teams_map = loader.load_many(ids.clone()).await.unwrap();
130 |
131 | let teams: &Vec = &ids
132 | .into_iter()
133 | .map(|id| teams_map.get(&id).unwrap().clone())
134 | .collect();
135 |
136 | Ok(teams.clone())
137 | }
138 |
139 | pub async fn leader(&self, ctx: &Context<'_>) -> Option {
140 | let loader = ctx.data::>().unwrap();
141 |
142 | //match to see is project_id is none
143 | match self.lead_id {
144 | Some(lead_id) => loader.load_one(lead_id).await.unwrap(),
145 | None => None,
146 | }
147 | }
148 | }
149 |
--------------------------------------------------------------------------------
/src/sdk/task.rs:
--------------------------------------------------------------------------------
1 | use std::str::FromStr;
2 |
3 | use async_graphql::{ComplexObject, Context, Enum, Result, SimpleObject};
4 | use chrono::{DateTime, Utc};
5 |
6 | use async_graphql::dataloader::DataLoader;
7 | use poem_openapi::Object;
8 | use uuid::Uuid;
9 |
10 | use serde::Deserialize;
11 |
12 | use super::{labels::Label, member::Member, project::Project};
13 |
14 | use super::loaders::{LabelLoader, MemberLoader, ProjectLoader, TaskLoader};
15 | use crate::graphql::auth::extract_context;
16 | use poem_openapi::Enum as OpenApiEnum;
17 | use serde::Serialize;
18 |
19 | #[derive(SimpleObject, Object, Clone, Debug, Serialize)]
20 | #[graphql(complex)]
21 | pub struct Task {
22 | pub id: Uuid,
23 | pub created_at: DateTime,
24 | pub updated_at: DateTime,
25 |
26 | pub title: String,
27 | pub description: Option,
28 |
29 | pub owner_id: Uuid,
30 |
31 | pub status: TaskStatus,
32 | pub priority: TaskPriority,
33 |
34 | pub due_date: Option>,
35 |
36 | pub project_id: Option,
37 | pub lead_id: Option,
38 |
39 | pub count: i32,
40 |
41 | pub parent_id: Option,
42 | }
43 |
44 | #[ComplexObject]
45 | impl Task {
46 | pub async fn owner(&self, ctx: &Context<'_>) -> Result> {
47 | let (_plexo_engine, _member_id) = extract_context(ctx)?;
48 |
49 | let loader = ctx.data::>().unwrap();
50 |
51 | //match to see is project_id is none
52 | Ok(loader.load_one(self.owner_id).await.unwrap())
53 | }
54 |
55 | pub async fn leader(&self, ctx: &Context<'_>) -> Result> {
56 | let (_plexo_engine, _member_id) = extract_context(ctx)?;
57 |
58 | let loader = ctx.data::>().unwrap();
59 |
60 | //match to see is project_id is none
61 | Ok(match self.lead_id {
62 | Some(lead_id) => loader.load_one(lead_id).await.unwrap(),
63 | None => None,
64 | })
65 | }
66 |
67 | pub async fn project(&self, ctx: &Context<'_>) -> Result> {
68 | let (_plexo_engine, _member_id) = extract_context(ctx)?;
69 |
70 | let loader = ctx.data::>().unwrap();
71 |
72 | //match to see is project_id is none
73 | Ok(match self.project_id {
74 | Some(project_id) => loader.load_one(project_id).await.unwrap(),
75 | None => None,
76 | })
77 | }
78 |
79 | pub async fn assignees(&self, ctx: &Context<'_>) -> Result> {
80 | let (plexo_engine, _member_id) = extract_context(ctx)?;
81 |
82 | let loader = ctx.data::>().unwrap();
83 |
84 | let ids: Vec = sqlx::query!(
85 | r#"
86 | SELECT assignee_id FROM tasks_by_assignees
87 | WHERE task_id = $1
88 | "#,
89 | &self.id
90 | )
91 | .fetch_all(&*plexo_engine.pool)
92 | .await
93 | .unwrap()
94 | .into_iter()
95 | .map(|id| id.assignee_id)
96 | .collect();
97 |
98 | let members_map = loader.load_many(ids.clone()).await.unwrap();
99 |
100 | let members: &Vec = &ids
101 | .into_iter()
102 | .map(|id| members_map.get(&id).unwrap().clone())
103 | .collect();
104 |
105 | Ok(members.clone())
106 | }
107 |
108 | pub async fn labels(&self, ctx: &Context<'_>) -> Result> {
109 | let (plexo_engine, _member_id) = extract_context(ctx)?;
110 |
111 | let loader = ctx.data::>().unwrap();
112 |
113 | let ids: Vec = sqlx::query!(
114 | r#"
115 | SELECT label_id FROM labels_by_tasks
116 | WHERE task_id = $1
117 | "#,
118 | &self.id
119 | )
120 | .fetch_all(&*plexo_engine.pool)
121 | .await
122 | .unwrap()
123 | .into_iter()
124 | .map(|id| id.label_id)
125 | .collect();
126 |
127 | let labels_map = loader.load_many(ids.clone()).await.unwrap();
128 |
129 | let labels: &Vec = &ids
130 | .into_iter()
131 | .map(|id| labels_map.get(&id).unwrap().clone())
132 | .collect();
133 |
134 | Ok(labels.clone())
135 | }
136 |
137 | pub async fn parent(&self, ctx: &Context<'_>) -> Result> {
138 | let (_plexo_engine, _member_id) = extract_context(ctx)?;
139 |
140 | let loader = ctx.data::