├── src ├── domain │ ├── mod.rs │ └── user.rs ├── views │ ├── mod.rs │ └── admin.rs ├── api │ ├── mod.rs │ ├── health.rs │ └── auth.rs ├── handler │ ├── mod.rs │ ├── logging.rs │ ├── auth.rs │ └── errors.rs ├── services │ ├── mod.rs │ ├── email_service.rs │ ├── jwt_service.rs │ └── group_service.rs ├── lib.rs ├── migrations │ └── V1__create_user_table.sql └── repository │ └── mod.rs ├── templates ├── welcome.html ├── login.html ├── register.html ├── errors │ ├── 400.html │ ├── 404.html │ ├── 403.html │ ├── 500.html │ └── 503.html ├── base.html ├── dashboard.html ├── forgot_password.html ├── reset_password.html ├── change_password.html └── mail │ └── password_reset.html ├── hurl.env ├── .sqlx ├── query-df013c8a6d402586dbd9a5dfcafdf7fe15f96ecb8c6e35fa71af4d376ff2af76.json ├── query-68e38d420430ee0328eb899540e835bb84037ba8df7c19aaeccf6b9055d26460.json ├── query-95bf17c9f82725ffbbd1cc66a495d943c7b2ecb1c51f5263f8c1db6566b947b3.json ├── query-7e0e72ac2d7a63e53ce1d3efa53c622aa340567c1d8b6cacce1bc3fec6561bd7.json ├── query-a91c966bc66d1efec23e3a336fe515fa9149901b2882bbbd6eec5772d215927b.json ├── query-55754e1dd7082a06867e10686bd38c123949e4c4c80d98a006d2999e7b365a78.json ├── query-293633706090984c01cc04f1aa9469a915550204c53953c03b457805e004f229.json ├── query-0c20fbbc3563e75fbff5a132f33e2e1d9e9c43e2f84ebfee5b09ab7801943911.json ├── query-4ef9d78aaf530e01409f4f1fa6d8787b3f65600c3b06aa1f693a5a8bfa2bcc99.json ├── query-679d925b63860400f56ac878dda80f5eff5ad329f3dfa8b7ae48aba490cd8b69.json ├── query-dc64e1d25d9ced3a49130cee99f6edc3f70a4917910cf3b76faefc24ac32159d.json ├── query-eea84cb1cff53062066d8c2a098c902e6a5899580bc39d615360588deeef35a9.json ├── query-95114e2b92fa070e175362d2bae7c825a672dd12bc3c6d537315db42c6b6f6b6.json ├── query-21bc65bcf84123a99bb5f8f42e21dfebb3ba946b48640d2635f2252ffb88601d.json ├── query-d3c5ad665db0ed87d615e1eadbb582c46425e047b0e01481d3b2b03a56e8a714.json ├── query-aa7b4532770146c3773d37d3ed41972203b349ae0723d7e81495edfbd5356abd.json ├── query-e6a21b7872a21babe310bc4b54b7af3a4fba450b17c613dd43842a0321d2b901.json ├── query-376f7eb4ea933656da1a27ba0f9c0cecd63a9a09ddb289824a1df2b38265779a.json ├── query-c8232064c4907dca27ec50b16d8a80a2ed39c31dd06cc4a82afea18996ba7736.json ├── query-ef85f457e7fbcb1aad277198b4e984bc7bde2f596a218260d1c9e0df54b1c851.json ├── query-516045641716ad9a84221b2d032c86990f933aea9a10917cdd84d89ccf35f856.json ├── query-9bb01184647b17d479d5a8f8b7dba53c8a5788be0ce3cf92700d8f704ffe2012.json ├── query-9cba992505d6b80cb356909d3e2360385af579fe6eccd1089c96a613989a1caa.json ├── query-2d5c0aa80bc6ab079d56ae8937c4c6691d01edc8f211245d3006c3125e72df34.json ├── query-5c695b62cccea048108b3f63e130a8a187f22a23e35e9a310f1f188e6a75e5a7.json ├── query-470cb7bce44297b903acf0dc0d25aff88ac8f2ec68cbfbc193d9c16fbcc8a303.json ├── query-501c9ab794cb66daddf6c4ebcef8df2f9f696e0ae0f403551cc0bb9c3cd18cc8.json ├── query-f2934589f15af1085b812e01ba8de1ee7a1015dfc86ea082f8d5a98a10ac0613.json ├── query-2c42062a084c2b179b9f84bfcc808f01694dcd8187589bd9c852b4b7853ec821.json ├── query-521cb02df6a1e3a7aa5d633bdbf446f6aabbffa1c0f0acd423783c6369062b3c.json ├── query-f240852fc5c60f82c7a1412a53fe42db6106195d8b2e20e9bd80d378934b34e7.json └── query-3742b4721561ed076810a5eb757d0d9b21914813e2e468a102e6fb20cf68ee9e.json ├── hurl ├── auth.hurl ├── check_user_role.hurl ├── run_hurl_tests.sh ├── user_roles.hurl └── admin.hurl ├── docker-compose.yml ├── .env-example ├── .gitignore ├── Cargo.toml ├── migrations ├── 20250819092538_create_users_table.sql └── 20250908121827_create_groups_and_policies.sql ├── docker-compose-prod-example.yml ├── CHANGELOG.md ├── Dockerfile ├── CODE_OF_CONDUCT.md ├── cliff.toml ├── .github └── workflows │ └── ci.yml ├── CONTRIBUTING.md ├── tests ├── admin_view_test.rs ├── sqlx_impl_tests.rs └── group_service_test.rs ├── README.md └── Makefile /src/domain/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod user; 2 | -------------------------------------------------------------------------------- /src/views/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod admin; 2 | pub mod auth; 3 | -------------------------------------------------------------------------------- /src/api/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod admin; 2 | pub mod auth; 3 | pub mod health; 4 | -------------------------------------------------------------------------------- /src/handler/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod auth; 2 | pub mod errors; 3 | pub mod logging; 4 | -------------------------------------------------------------------------------- /src/services/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod email_service; 2 | pub mod group_service; 3 | pub mod jwt_service; 4 | pub mod policy_service; 5 | pub mod user_service; 6 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | pub mod api; 2 | pub mod domain; 3 | pub mod handler; 4 | pub mod repository; 5 | pub mod services; 6 | pub mod views; 7 | 8 | // re-exports for ease 9 | pub use repository::*; 10 | pub use services::*; 11 | -------------------------------------------------------------------------------- /templates/welcome.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | 3 | {% block content %} 4 |
5 |

Bem-vindo, {{ username }}!

6 |

Login realizado com sucesso.

7 |
8 | {% endblock %} 9 | -------------------------------------------------------------------------------- /hurl.env: -------------------------------------------------------------------------------- 1 | # replace with a user credential with superadmin group 2 | admin_email=hurl@example.com 3 | admin_username="Hurlmanda Whurler" 4 | admin_password="hurl12345" 5 | 6 | group_user_email=grouphurl@example.com 7 | group_user_username="Phurl Whurlker" 8 | group_user_password="hurl12345" -------------------------------------------------------------------------------- /.sqlx/query-df013c8a6d402586dbd9a5dfcafdf7fe15f96ecb8c6e35fa71af4d376ff2af76.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "DELETE FROM password_reset_tokens WHERE expires_at < now()", 4 | "describe": { 5 | "columns": [], 6 | "parameters": { 7 | "Left": [] 8 | }, 9 | "nullable": [] 10 | }, 11 | "hash": "df013c8a6d402586dbd9a5dfcafdf7fe15f96ecb8c6e35fa71af4d376ff2af76" 12 | } 13 | -------------------------------------------------------------------------------- /hurl/auth.hurl: -------------------------------------------------------------------------------- 1 | # Register User 2 | POST http://localhost:3000/api/register 3 | Content-Type: application/json 4 | 5 | { 6 | "email": "{{email}}", 7 | "username": "{{username}}", 8 | "password": "{{password}}" 9 | } 10 | 11 | # Login with this user 12 | POST http://localhost:3000/api/login 13 | Content-Type: application/json 14 | 15 | { 16 | "identity": "{{email}}", 17 | "password": "{{password}}" 18 | } 19 | -------------------------------------------------------------------------------- /.sqlx/query-68e38d420430ee0328eb899540e835bb84037ba8df7c19aaeccf6b9055d26460.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "UPDATE password_reset_tokens SET used_at = now() WHERE token_id = $1", 4 | "describe": { 5 | "columns": [], 6 | "parameters": { 7 | "Left": [ 8 | "Int8" 9 | ] 10 | }, 11 | "nullable": [] 12 | }, 13 | "hash": "68e38d420430ee0328eb899540e835bb84037ba8df7c19aaeccf6b9055d26460" 14 | } 15 | -------------------------------------------------------------------------------- /.sqlx/query-95bf17c9f82725ffbbd1cc66a495d943c7b2ecb1c51f5263f8c1db6566b947b3.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "DELETE FROM user_groups WHERE user_id = $1 AND group_id = $2", 4 | "describe": { 5 | "columns": [], 6 | "parameters": { 7 | "Left": [ 8 | "Int8", 9 | "Int8" 10 | ] 11 | }, 12 | "nullable": [] 13 | }, 14 | "hash": "95bf17c9f82725ffbbd1cc66a495d943c7b2ecb1c51f5263f8c1db6566b947b3" 15 | } 16 | -------------------------------------------------------------------------------- /.sqlx/query-7e0e72ac2d7a63e53ce1d3efa53c622aa340567c1d8b6cacce1bc3fec6561bd7.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "DELETE FROM user_policies WHERE user_id = $1 AND policy_id = $2", 4 | "describe": { 5 | "columns": [], 6 | "parameters": { 7 | "Left": [ 8 | "Int8", 9 | "Int8" 10 | ] 11 | }, 12 | "nullable": [] 13 | }, 14 | "hash": "7e0e72ac2d7a63e53ce1d3efa53c622aa340567c1d8b6cacce1bc3fec6561bd7" 15 | } 16 | -------------------------------------------------------------------------------- /.sqlx/query-a91c966bc66d1efec23e3a336fe515fa9149901b2882bbbd6eec5772d215927b.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "DELETE FROM group_policies WHERE group_id = $1 AND policy_id = $2", 4 | "describe": { 5 | "columns": [], 6 | "parameters": { 7 | "Left": [ 8 | "Int8", 9 | "Int8" 10 | ] 11 | }, 12 | "nullable": [] 13 | }, 14 | "hash": "a91c966bc66d1efec23e3a336fe515fa9149901b2882bbbd6eec5772d215927b" 15 | } 16 | -------------------------------------------------------------------------------- /.sqlx/query-55754e1dd7082a06867e10686bd38c123949e4c4c80d98a006d2999e7b365a78.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "UPDATE users SET first_login = $1, updated_at = now() WHERE user_id = $2", 4 | "describe": { 5 | "columns": [], 6 | "parameters": { 7 | "Left": [ 8 | "Bool", 9 | "Int8" 10 | ] 11 | }, 12 | "nullable": [] 13 | }, 14 | "hash": "55754e1dd7082a06867e10686bd38c123949e4c4c80d98a006d2999e7b365a78" 15 | } 16 | -------------------------------------------------------------------------------- /.sqlx/query-293633706090984c01cc04f1aa9469a915550204c53953c03b457805e004f229.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "UPDATE users SET password_hash = $1, updated_at = now() WHERE user_id = $2", 4 | "describe": { 5 | "columns": [], 6 | "parameters": { 7 | "Left": [ 8 | "Text", 9 | "Int8" 10 | ] 11 | }, 12 | "nullable": [] 13 | }, 14 | "hash": "293633706090984c01cc04f1aa9469a915550204c53953c03b457805e004f229" 15 | } 16 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | services: 2 | keyrunes-database: 3 | image: postgres:17 4 | container_name: keyrunes-database 5 | environment: 6 | POSTGRES_USER: ${POSTGRES_USER} 7 | POSTGRES_PASSWORD: ${POSTGRES_PASSWORD} 8 | POSTGRES_DB: ${POSTGRES_DB} 9 | ports: 10 | - "${POSTGRES_PORT:-5432}:5432" 11 | volumes: 12 | - keyrunes_postgres_data:/var/lib/postgresql/data 13 | restart: unless-stopped 14 | 15 | volumes: 16 | keyrunes_postgres_data: 17 | -------------------------------------------------------------------------------- /.env-example: -------------------------------------------------------------------------------- 1 | POSTGRES_USER=postgres 2 | POSTGRES_PASSWORD=123456 3 | POSTGRES_DB=keyrunes 4 | POSTGRES_PORT=5432 5 | DATABASE_URL="postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@localhost:${POSTGRES_PORT}/${POSTGRES_DB}" 6 | JWT_SECRET=your-super-secret-jwt-key-change-in-production 7 | LOG_LEVEL=info 8 | SMTP_USERNAME=noreply@example.com 9 | SMTP_PASSWORD=your_smtp_password 10 | SMTP_HOST=smtp.gmail.com 11 | SMTP_PORT=587 12 | FROM_EMAIL=noreply@example.com 13 | FROM_NAME=KeyRunes 14 | FRONTEND_URL=http://localhost:3000 15 | -------------------------------------------------------------------------------- /.sqlx/query-0c20fbbc3563e75fbff5a132f33e2e1d9e9c43e2f84ebfee5b09ab7801943911.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "INSERT INTO user_groups (user_id, group_id, assigned_by) VALUES ($1, $2, $3) ON CONFLICT DO NOTHING", 4 | "describe": { 5 | "columns": [], 6 | "parameters": { 7 | "Left": [ 8 | "Int8", 9 | "Int8", 10 | "Int8" 11 | ] 12 | }, 13 | "nullable": [] 14 | }, 15 | "hash": "0c20fbbc3563e75fbff5a132f33e2e1d9e9c43e2f84ebfee5b09ab7801943911" 16 | } 17 | -------------------------------------------------------------------------------- /.sqlx/query-4ef9d78aaf530e01409f4f1fa6d8787b3f65600c3b06aa1f693a5a8bfa2bcc99.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "INSERT INTO group_policies (group_id, policy_id, assigned_by) VALUES ($1, $2, $3) ON CONFLICT DO NOTHING", 4 | "describe": { 5 | "columns": [], 6 | "parameters": { 7 | "Left": [ 8 | "Int8", 9 | "Int8", 10 | "Int8" 11 | ] 12 | }, 13 | "nullable": [] 14 | }, 15 | "hash": "4ef9d78aaf530e01409f4f1fa6d8787b3f65600c3b06aa1f693a5a8bfa2bcc99" 16 | } 17 | -------------------------------------------------------------------------------- /.sqlx/query-679d925b63860400f56ac878dda80f5eff5ad329f3dfa8b7ae48aba490cd8b69.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "INSERT INTO user_policies (user_id, policy_id, assigned_by) VALUES ($1, $2, $3) ON CONFLICT DO NOTHING", 4 | "describe": { 5 | "columns": [], 6 | "parameters": { 7 | "Left": [ 8 | "Int8", 9 | "Int8", 10 | "Int8" 11 | ] 12 | }, 13 | "nullable": [] 14 | }, 15 | "hash": "679d925b63860400f56ac878dda80f5eff5ad329f3dfa8b7ae48aba490cd8b69" 16 | } 17 | -------------------------------------------------------------------------------- /.sqlx/query-dc64e1d25d9ced3a49130cee99f6edc3f70a4917910cf3b76faefc24ac32159d.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "SELECT COUNT(*) FROM users", 4 | "describe": { 5 | "columns": [ 6 | { 7 | "ordinal": 0, 8 | "name": "count", 9 | "type_info": "Int8" 10 | } 11 | ], 12 | "parameters": { 13 | "Left": [] 14 | }, 15 | "nullable": [ 16 | null 17 | ] 18 | }, 19 | "hash": "dc64e1d25d9ced3a49130cee99f6edc3f70a4917910cf3b76faefc24ac32159d" 20 | } 21 | -------------------------------------------------------------------------------- /.sqlx/query-eea84cb1cff53062066d8c2a098c902e6a5899580bc39d615360588deeef35a9.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "SELECT COUNT(*) FROM groups", 4 | "describe": { 5 | "columns": [ 6 | { 7 | "ordinal": 0, 8 | "name": "count", 9 | "type_info": "Int8" 10 | } 11 | ], 12 | "parameters": { 13 | "Left": [] 14 | }, 15 | "nullable": [ 16 | null 17 | ] 18 | }, 19 | "hash": "eea84cb1cff53062066d8c2a098c902e6a5899580bc39d615360588deeef35a9" 20 | } 21 | -------------------------------------------------------------------------------- /.sqlx/query-95114e2b92fa070e175362d2bae7c825a672dd12bc3c6d537315db42c6b6f6b6.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "SELECT COUNT(*) FROM policies", 4 | "describe": { 5 | "columns": [ 6 | { 7 | "ordinal": 0, 8 | "name": "count", 9 | "type_info": "Int8" 10 | } 11 | ], 12 | "parameters": { 13 | "Left": [] 14 | }, 15 | "nullable": [ 16 | null 17 | ] 18 | }, 19 | "hash": "95114e2b92fa070e175362d2bae7c825a672dd12bc3c6d537315db42c6b6f6b6" 20 | } 21 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Generated by Cargo 2 | # will have compiled files and executables 3 | debug/ 4 | target/ 5 | 6 | # These are backup files generated by rustfmt 7 | **/*.rs.bk 8 | 9 | # MSVC Windows builds of rustc generate these, which store debugging information 10 | *.pdb 11 | 12 | # Generated by cargo mutants 13 | # Contains mutation testing data 14 | **/mutants.out*/ 15 | 16 | # RustRover 17 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 18 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 19 | # and can be added to the global gitignore or merged into this file. For a more nuclear 20 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 21 | .idea/ 22 | *.env 23 | -------------------------------------------------------------------------------- /templates/login.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | 3 | {% block content %} 4 |
5 |

Login

6 | 7 | {% if error %} 8 |
{{ error }}
9 | {% endif %} 10 | 11 |
12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 |
20 |

21 | Esqueci a senha 22 |

23 |
24 | {% endblock %} 25 | -------------------------------------------------------------------------------- /templates/register.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | 3 | {% block content %} 4 |
5 |

Registrar

6 | 7 | {% if error %} 8 |
{{ error }}
9 | {% endif %} 10 | 11 |
12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 |
25 |
26 | {% endblock %} 27 | -------------------------------------------------------------------------------- /templates/errors/400.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | 3 | {% block content %} 4 |
5 |

400

6 |

Bad Request

7 | 8 |

9 | The request could not be processed. Please check your data and try again. 10 |

11 | 12 | {% if error_message %} 13 |
14 | Details: {{ error_message }} 15 |
16 | {% endif %} 17 | 18 |
19 | ← Go Back 20 | Go to Home 21 |
22 |
23 | {% endblock %} 24 | -------------------------------------------------------------------------------- /src/migrations/V1__create_user_table.sql: -------------------------------------------------------------------------------- 1 | CREATE EXTENSION IF NOT EXISTS pgcrypto; 2 | 3 | CREATE TABLE IF NOT EXISTS users ( 4 | user_id BIGSERIAL PRIMARY KEY, 5 | external_id UUID NOT NULL DEFAULT gen_random_uuid(), 6 | password_hash TEXT NOT NULL, 7 | email VARCHAR(255) NOT NULL, 8 | username VARCHAR(50) NOT NULL, 9 | created_at TIMESTAMPTZ NOT NULL DEFAULT now(), 10 | updated_at TIMESTAMPTZ NOT NULL DEFAULT now() 11 | ); 12 | 13 | CREATE UNIQUE INDEX IF NOT EXISTS users_external_id_idx ON users (external_id); 14 | CREATE UNIQUE INDEX IF NOT EXISTS users_username_idx ON users (username); 15 | CREATE UNIQUE INDEX IF NOT EXISTS users_email_lower_idx ON users ((lower(email))); 16 | 17 | CREATE OR REPLACE FUNCTION set_updated_at() 18 | RETURNS TRIGGER AS $$ 19 | BEGIN 20 | NEW.updated_at = now(); 21 | RETURN NEW; 22 | END; 23 | $$ LANGUAGE plpgsql; 24 | 25 | DROP TRIGGER IF EXISTS trg_set_updated_at ON users; 26 | CREATE TRIGGER trg_set_updated_at 27 | BEFORE UPDATE ON users 28 | FOR EACH ROW 29 | EXECUTE PROCEDURE set_updated_at(); 30 | -------------------------------------------------------------------------------- /templates/base.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | {% if title %}{{ title }} - Keyrunes{% else %}My Keyrunes{% endif %} 7 | 8 | 9 | 10 | 15 | 16 | 17 |
18 | {% block header %} 19 |

Keyrunes

20 | 24 |
25 | {% endblock header %} 26 |
27 | 28 |
29 | {% block content %}{% endblock %} 30 |
31 | 32 | 36 | 37 | 38 | -------------------------------------------------------------------------------- /src/views/admin.rs: -------------------------------------------------------------------------------- 1 | use axum::{ 2 | extract::Extension, 3 | response::{Html, IntoResponse}, 4 | }; 5 | use tera::Tera; 6 | 7 | use crate::handler::auth::AuthenticatedUser; 8 | 9 | pub async fn admin_page( 10 | Extension(user): Extension, 11 | Extension(tera): Extension, 12 | ) -> impl IntoResponse { 13 | // Check if user is superadmin 14 | if !user.groups.contains(&"superadmin".to_string()) { 15 | return Html("

403 Forbidden

Superadmin access required

".to_string()) 16 | .into_response(); 17 | } 18 | 19 | let mut context = tera::Context::new(); 20 | context.insert( 21 | "user", 22 | &serde_json::json!({ 23 | "user_id": user.user_id, 24 | "username": user.username, 25 | "email": user.email, 26 | "groups": user.groups, 27 | }), 28 | ); 29 | 30 | match tera.render("admin.html", &context) { 31 | Ok(html) => Html(html).into_response(), 32 | Err(e) => { 33 | tracing::error!("Template error: {}", e); 34 | Html(format!("

Error rendering template

{}

", e)).into_response() 35 | } 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /.sqlx/query-21bc65bcf84123a99bb5f8f42e21dfebb3ba946b48640d2635f2252ffb88601d.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "SELECT group_id, external_id, name, description, created_at, updated_at FROM groups ORDER BY name", 4 | "describe": { 5 | "columns": [ 6 | { 7 | "ordinal": 0, 8 | "name": "group_id", 9 | "type_info": "Int8" 10 | }, 11 | { 12 | "ordinal": 1, 13 | "name": "external_id", 14 | "type_info": "Uuid" 15 | }, 16 | { 17 | "ordinal": 2, 18 | "name": "name", 19 | "type_info": "Varchar" 20 | }, 21 | { 22 | "ordinal": 3, 23 | "name": "description", 24 | "type_info": "Text" 25 | }, 26 | { 27 | "ordinal": 4, 28 | "name": "created_at", 29 | "type_info": "Timestamptz" 30 | }, 31 | { 32 | "ordinal": 5, 33 | "name": "updated_at", 34 | "type_info": "Timestamptz" 35 | } 36 | ], 37 | "parameters": { 38 | "Left": [] 39 | }, 40 | "nullable": [ 41 | false, 42 | false, 43 | false, 44 | true, 45 | false, 46 | false 47 | ] 48 | }, 49 | "hash": "21bc65bcf84123a99bb5f8f42e21dfebb3ba946b48640d2635f2252ffb88601d" 50 | } 51 | -------------------------------------------------------------------------------- /.sqlx/query-d3c5ad665db0ed87d615e1eadbb582c46425e047b0e01481d3b2b03a56e8a714.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "SELECT group_id, external_id, name, description, created_at, updated_at FROM groups WHERE name = $1", 4 | "describe": { 5 | "columns": [ 6 | { 7 | "ordinal": 0, 8 | "name": "group_id", 9 | "type_info": "Int8" 10 | }, 11 | { 12 | "ordinal": 1, 13 | "name": "external_id", 14 | "type_info": "Uuid" 15 | }, 16 | { 17 | "ordinal": 2, 18 | "name": "name", 19 | "type_info": "Varchar" 20 | }, 21 | { 22 | "ordinal": 3, 23 | "name": "description", 24 | "type_info": "Text" 25 | }, 26 | { 27 | "ordinal": 4, 28 | "name": "created_at", 29 | "type_info": "Timestamptz" 30 | }, 31 | { 32 | "ordinal": 5, 33 | "name": "updated_at", 34 | "type_info": "Timestamptz" 35 | } 36 | ], 37 | "parameters": { 38 | "Left": [ 39 | "Text" 40 | ] 41 | }, 42 | "nullable": [ 43 | false, 44 | false, 45 | false, 46 | true, 47 | false, 48 | false 49 | ] 50 | }, 51 | "hash": "d3c5ad665db0ed87d615e1eadbb582c46425e047b0e01481d3b2b03a56e8a714" 52 | } 53 | -------------------------------------------------------------------------------- /.sqlx/query-aa7b4532770146c3773d37d3ed41972203b349ae0723d7e81495edfbd5356abd.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "SELECT group_id, external_id, name, description, created_at, updated_at FROM groups WHERE group_id = $1", 4 | "describe": { 5 | "columns": [ 6 | { 7 | "ordinal": 0, 8 | "name": "group_id", 9 | "type_info": "Int8" 10 | }, 11 | { 12 | "ordinal": 1, 13 | "name": "external_id", 14 | "type_info": "Uuid" 15 | }, 16 | { 17 | "ordinal": 2, 18 | "name": "name", 19 | "type_info": "Varchar" 20 | }, 21 | { 22 | "ordinal": 3, 23 | "name": "description", 24 | "type_info": "Text" 25 | }, 26 | { 27 | "ordinal": 4, 28 | "name": "created_at", 29 | "type_info": "Timestamptz" 30 | }, 31 | { 32 | "ordinal": 5, 33 | "name": "updated_at", 34 | "type_info": "Timestamptz" 35 | } 36 | ], 37 | "parameters": { 38 | "Left": [ 39 | "Int8" 40 | ] 41 | }, 42 | "nullable": [ 43 | false, 44 | false, 45 | false, 46 | true, 47 | false, 48 | false 49 | ] 50 | }, 51 | "hash": "aa7b4532770146c3773d37d3ed41972203b349ae0723d7e81495edfbd5356abd" 52 | } 53 | -------------------------------------------------------------------------------- /hurl/check_user_role.hurl: -------------------------------------------------------------------------------- 1 | POST http://localhost:3000/api/login 2 | Content-Type: application/json 3 | { 4 | "identity": "{{email}}", 5 | "password": "{{password}}" 6 | } 7 | HTTP 200 8 | [Captures] 9 | user_token: jsonpath "$.token" 10 | user_id: jsonpath "$.user.user_id" 11 | username: jsonpath "$.user.username" 12 | user_email: jsonpath "$.user.email" 13 | user_groups: jsonpath "$.user.groups" 14 | [Asserts] 15 | jsonpath "$.token" exists 16 | jsonpath "$.user.user_id" exists 17 | jsonpath "$.user.groups" isCollection 18 | jsonpath "$.user.groups" count >= 1 19 | 20 | POST http://localhost:3000/api/login 21 | Content-Type: application/json 22 | { 23 | "identity": "{{email}}", 24 | "password": "{{password}}" 25 | } 26 | HTTP 200 27 | [Asserts] 28 | POST http://localhost:3000/api/login 29 | Content-Type: application/json 30 | { 31 | "identity": "{{email}}", 32 | "password": "{{password}}" 33 | } 34 | HTTP 200 35 | [Captures] 36 | is_superadmin: jsonpath "$.user.groups" contains "superadmin" 37 | 38 | GET http://localhost:3000/api/admin/dashboard 39 | Authorization: Bearer {{user_token}} 40 | HTTP * 41 | [Asserts] 42 | status >= 200 43 | status < 500 44 | 45 | POST http://localhost:3000/api/login 46 | Content-Type: application/json 47 | { 48 | "identity": "{{email}}", 49 | "password": "{{password}}" 50 | } 51 | HTTP 200 52 | -------------------------------------------------------------------------------- /templates/errors/404.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | 3 | {% block content %} 4 |
5 |

404

6 |

Page Not Found

7 | 8 |

9 | The page you are looking for does not exist or has been moved. This can happen when a link is broken or the address was typed incorrectly. 10 |

11 | 12 | {% if path %} 13 |
14 | Requested path: {{ path }} 15 |
16 | {% endif %} 17 | 18 |
19 | ← Go Back 20 | Go to Home 21 |
22 | 23 |
24 |

Useful pages:

25 | Login 26 | Register 27 | Dashboard 28 |
29 |
30 | {% endblock %} 31 | -------------------------------------------------------------------------------- /templates/errors/403.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | 3 | {% block content %} 4 |
5 |

403

6 |

Access Denied

7 | 8 |

9 | You do not have permission to access this resource. This can happen if you are not authenticated or do not have the necessary permissions. 10 |

11 | 12 | {% if required_permission %} 13 |
14 | Required permission: {{ required_permission }} 15 |
16 | {% endif %} 17 | 18 |
19 | 🔑 Login 20 | Go to Home 21 |
22 | 23 |
24 |

25 | 💡 If you believe you should have access to this resource, please contact the system administrator. 26 |

27 |
28 |
29 | {% endblock %} 30 | -------------------------------------------------------------------------------- /.sqlx/query-e6a21b7872a21babe310bc4b54b7af3a4fba450b17c613dd43842a0321d2b901.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "SELECT token_id, user_id, token, expires_at, used_at, created_at \n FROM password_reset_tokens \n WHERE token = $1 AND expires_at > now() AND used_at IS NULL", 4 | "describe": { 5 | "columns": [ 6 | { 7 | "ordinal": 0, 8 | "name": "token_id", 9 | "type_info": "Int8" 10 | }, 11 | { 12 | "ordinal": 1, 13 | "name": "user_id", 14 | "type_info": "Int8" 15 | }, 16 | { 17 | "ordinal": 2, 18 | "name": "token", 19 | "type_info": "Varchar" 20 | }, 21 | { 22 | "ordinal": 3, 23 | "name": "expires_at", 24 | "type_info": "Timestamptz" 25 | }, 26 | { 27 | "ordinal": 4, 28 | "name": "used_at", 29 | "type_info": "Timestamptz" 30 | }, 31 | { 32 | "ordinal": 5, 33 | "name": "created_at", 34 | "type_info": "Timestamptz" 35 | } 36 | ], 37 | "parameters": { 38 | "Left": [ 39 | "Text" 40 | ] 41 | }, 42 | "nullable": [ 43 | false, 44 | false, 45 | false, 46 | false, 47 | true, 48 | false 49 | ] 50 | }, 51 | "hash": "e6a21b7872a21babe310bc4b54b7af3a4fba450b17c613dd43842a0321d2b901" 52 | } 53 | -------------------------------------------------------------------------------- /.sqlx/query-376f7eb4ea933656da1a27ba0f9c0cecd63a9a09ddb289824a1df2b38265779a.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "INSERT INTO groups (external_id, name, description) \n VALUES ($1, $2, $3) \n RETURNING group_id, external_id, name, description, created_at, updated_at", 4 | "describe": { 5 | "columns": [ 6 | { 7 | "ordinal": 0, 8 | "name": "group_id", 9 | "type_info": "Int8" 10 | }, 11 | { 12 | "ordinal": 1, 13 | "name": "external_id", 14 | "type_info": "Uuid" 15 | }, 16 | { 17 | "ordinal": 2, 18 | "name": "name", 19 | "type_info": "Varchar" 20 | }, 21 | { 22 | "ordinal": 3, 23 | "name": "description", 24 | "type_info": "Text" 25 | }, 26 | { 27 | "ordinal": 4, 28 | "name": "created_at", 29 | "type_info": "Timestamptz" 30 | }, 31 | { 32 | "ordinal": 5, 33 | "name": "updated_at", 34 | "type_info": "Timestamptz" 35 | } 36 | ], 37 | "parameters": { 38 | "Left": [ 39 | "Uuid", 40 | "Varchar", 41 | "Text" 42 | ] 43 | }, 44 | "nullable": [ 45 | false, 46 | false, 47 | false, 48 | true, 49 | false, 50 | false 51 | ] 52 | }, 53 | "hash": "376f7eb4ea933656da1a27ba0f9c0cecd63a9a09ddb289824a1df2b38265779a" 54 | } 55 | -------------------------------------------------------------------------------- /.sqlx/query-c8232064c4907dca27ec50b16d8a80a2ed39c31dd06cc4a82afea18996ba7736.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "SELECT g.group_id, g.external_id, g.name, g.description, g.created_at, g.updated_at\n FROM groups g\n INNER JOIN user_groups ug ON g.group_id = ug.group_id\n WHERE ug.user_id = $1", 4 | "describe": { 5 | "columns": [ 6 | { 7 | "ordinal": 0, 8 | "name": "group_id", 9 | "type_info": "Int8" 10 | }, 11 | { 12 | "ordinal": 1, 13 | "name": "external_id", 14 | "type_info": "Uuid" 15 | }, 16 | { 17 | "ordinal": 2, 18 | "name": "name", 19 | "type_info": "Varchar" 20 | }, 21 | { 22 | "ordinal": 3, 23 | "name": "description", 24 | "type_info": "Text" 25 | }, 26 | { 27 | "ordinal": 4, 28 | "name": "created_at", 29 | "type_info": "Timestamptz" 30 | }, 31 | { 32 | "ordinal": 5, 33 | "name": "updated_at", 34 | "type_info": "Timestamptz" 35 | } 36 | ], 37 | "parameters": { 38 | "Left": [ 39 | "Int8" 40 | ] 41 | }, 42 | "nullable": [ 43 | false, 44 | false, 45 | false, 46 | true, 47 | false, 48 | false 49 | ] 50 | }, 51 | "hash": "c8232064c4907dca27ec50b16d8a80a2ed39c31dd06cc4a82afea18996ba7736" 52 | } 53 | -------------------------------------------------------------------------------- /.sqlx/query-ef85f457e7fbcb1aad277198b4e984bc7bde2f596a218260d1c9e0df54b1c851.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "INSERT INTO password_reset_tokens (user_id, token, expires_at) \n VALUES ($1, $2, $3) \n RETURNING token_id, user_id, token, expires_at, used_at, created_at", 4 | "describe": { 5 | "columns": [ 6 | { 7 | "ordinal": 0, 8 | "name": "token_id", 9 | "type_info": "Int8" 10 | }, 11 | { 12 | "ordinal": 1, 13 | "name": "user_id", 14 | "type_info": "Int8" 15 | }, 16 | { 17 | "ordinal": 2, 18 | "name": "token", 19 | "type_info": "Varchar" 20 | }, 21 | { 22 | "ordinal": 3, 23 | "name": "expires_at", 24 | "type_info": "Timestamptz" 25 | }, 26 | { 27 | "ordinal": 4, 28 | "name": "used_at", 29 | "type_info": "Timestamptz" 30 | }, 31 | { 32 | "ordinal": 5, 33 | "name": "created_at", 34 | "type_info": "Timestamptz" 35 | } 36 | ], 37 | "parameters": { 38 | "Left": [ 39 | "Int8", 40 | "Varchar", 41 | "Timestamptz" 42 | ] 43 | }, 44 | "nullable": [ 45 | false, 46 | false, 47 | false, 48 | false, 49 | true, 50 | false 51 | ] 52 | }, 53 | "hash": "ef85f457e7fbcb1aad277198b4e984bc7bde2f596a218260d1c9e0df54b1c851" 54 | } 55 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "keyrunes" 3 | version = "0.1.0" 4 | edition = "2024" 5 | default-run = "keyrunes" 6 | 7 | [dependencies] 8 | tokio = { version = "1", features = ["full"] } 9 | axum = "0.8" 10 | serde = { version = "1", features = ["derive"] } 11 | serde_json = "1.0" 12 | sqlx = { version = "0.8", features = ["runtime-tokio", "macros", "postgres", "uuid", "chrono"] } 13 | tera = "1.20" 14 | chrono = { version = "0.4", features = ["serde"] } 15 | thiserror = "2.0.17" 16 | tracing = "0.1" 17 | clap = { version = "4", features = ["derive"] } 18 | argon2 = "0.5.3" 19 | password-hash = "0.5" 20 | uuid = { version = "1", features = ["serde", "v4"] } 21 | anyhow = "1.0" 22 | async-trait = "0.1" 23 | rand = "0.9.2" 24 | regex = "1" 25 | tokio-stream = "0.1" 26 | tower-http = { version = "0.6.7", features = ["fs", "cors"] } 27 | url = "2.5.4" 28 | dotenvy = "0.15.7" 29 | tracing-subscriber = { version = "0.3.20", features = ["env-filter"]} 30 | serde_with = "3.14.0" 31 | hex = "0.4.3" 32 | futures = "0.3.31" 33 | secrecy = { version = "0.10.3", features = ["serde"] } 34 | tower = "0.5.2" 35 | lettre = { version = "0.11", features = ["tokio1", "tokio1-native-tls", "smtp-transport", "builder"] } 36 | josekit = "0.10.3" 37 | 38 | [dev-dependencies] 39 | test-case = "3.3.1" 40 | rstest = "0.26.1" 41 | proptest = "1.7.0" 42 | tokio-test = "0.4.4" 43 | fake = { version = "4.4", features = ["derive", "chrono", "uuid"] } 44 | criterion = { version = "0.7", features = ["html_reports"] } 45 | serial_test = "3.2.0" 46 | -------------------------------------------------------------------------------- /migrations/20250819092538_create_users_table.sql: -------------------------------------------------------------------------------- 1 | CREATE EXTENSION IF NOT EXISTS pgcrypto; 2 | 3 | CREATE TABLE IF NOT EXISTS users ( 4 | user_id BIGSERIAL PRIMARY KEY, 5 | external_id UUID NOT NULL DEFAULT gen_random_uuid(), 6 | password_hash TEXT NOT NULL, 7 | email VARCHAR(255) NOT NULL, 8 | username VARCHAR(50) NOT NULL, 9 | created_at TIMESTAMPTZ NOT NULL DEFAULT now(), 10 | updated_at TIMESTAMPTZ NOT NULL DEFAULT now() 11 | ); 12 | 13 | CREATE UNIQUE INDEX IF NOT EXISTS users_external_id_idx ON users (external_id); 14 | CREATE UNIQUE INDEX IF NOT EXISTS users_username_idx ON users (username); 15 | CREATE UNIQUE INDEX IF NOT EXISTS users_email_lower_idx ON users ((lower(email))); 16 | 17 | CREATE TABLE IF NOT EXISTS settings ( 18 | settings_id SERIAL PRIMARY KEY, 19 | key VARCHAR(100) NOT NULL UNIQUE, 20 | value TEXT NOT NULL, 21 | description TEXT, 22 | created_at TIMESTAMPTZ NOT NULL DEFAULT now(), 23 | updated_at TIMESTAMPTZ NOT NULL DEFAULT now() 24 | ); 25 | 26 | INSERT INTO settings (key, value, description) 27 | VALUES ('BASE_URL', 'http://127.0.0.1:3000', 'base url for local testing') 28 | ON CONFLICT (key) DO NOTHING; 29 | 30 | CREATE OR REPLACE FUNCTION set_updated_at() 31 | RETURNS TRIGGER AS $$ 32 | BEGIN 33 | NEW.updated_at = now(); 34 | RETURN NEW; 35 | END; 36 | $$ LANGUAGE plpgsql; 37 | 38 | DROP TRIGGER IF EXISTS trg_set_updated_at ON users; 39 | CREATE TRIGGER trg_set_updated_at 40 | BEFORE UPDATE ON users 41 | FOR EACH ROW 42 | EXECUTE PROCEDURE set_updated_at(); 43 | -------------------------------------------------------------------------------- /templates/errors/500.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | 3 | {% block content %} 4 |
5 |

500

6 |

Internal Server Error

7 | 8 |

9 | Something went wrong on the server while processing your request. Our team has been notified and is working to resolve the issue. 10 |

11 | 12 | {% if error_id %} 13 |
14 | Error ID: {{ error_id }} 15 |

16 | Use this ID when contacting support. 17 |

18 |
19 | {% endif %} 20 | 21 |
22 | 🔄 Try Again 23 | Go to Home 24 |
25 | 26 |
27 |

28 | 💡 Tip: If the problem persists, try clearing your browser cache or contact support at contact@jonatasoliveira.dev 29 |

30 |
31 |
32 | {% endblock %} 33 | -------------------------------------------------------------------------------- /templates/dashboard.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | 3 | {% block header %} 4 |

Keyrunes

5 |
6 | {% endblock header %} 7 | 8 | {% block content %} 9 |
10 |

Dashboard

11 | 12 | {% if user %} 13 |
14 | Welcome, {{ user.username }}! ({{ user.email }}) 15 |
16 | 17 |

Your Groups

18 |
    19 | {% for group in user.groups %} 20 |
  • {{ group }}
  • 21 | {% else %} 22 |
  • No groups assigned
  • 23 | {% endfor %} 24 |
25 | 26 |
27 | 28 | {% if user.first_login %} 29 | Change Password 30 | {% endif %} 31 |
32 | {% else %} 33 |
34 | You are not logged in. Please login to continue. 35 |
36 | {% endif %} 37 |
38 | 39 | 52 | {% endblock %} 53 | -------------------------------------------------------------------------------- /.sqlx/query-516045641716ad9a84221b2d032c86990f933aea9a10917cdd84d89ccf35f856.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "SELECT user_id, external_id, email, username, password_hash, created_at, first_login, updated_at FROM users WHERE email = $1", 4 | "describe": { 5 | "columns": [ 6 | { 7 | "ordinal": 0, 8 | "name": "user_id", 9 | "type_info": "Int8" 10 | }, 11 | { 12 | "ordinal": 1, 13 | "name": "external_id", 14 | "type_info": "Uuid" 15 | }, 16 | { 17 | "ordinal": 2, 18 | "name": "email", 19 | "type_info": "Varchar" 20 | }, 21 | { 22 | "ordinal": 3, 23 | "name": "username", 24 | "type_info": "Varchar" 25 | }, 26 | { 27 | "ordinal": 4, 28 | "name": "password_hash", 29 | "type_info": "Text" 30 | }, 31 | { 32 | "ordinal": 5, 33 | "name": "created_at", 34 | "type_info": "Timestamptz" 35 | }, 36 | { 37 | "ordinal": 6, 38 | "name": "first_login", 39 | "type_info": "Bool" 40 | }, 41 | { 42 | "ordinal": 7, 43 | "name": "updated_at", 44 | "type_info": "Timestamptz" 45 | } 46 | ], 47 | "parameters": { 48 | "Left": [ 49 | "Text" 50 | ] 51 | }, 52 | "nullable": [ 53 | false, 54 | false, 55 | false, 56 | false, 57 | false, 58 | false, 59 | false, 60 | false 61 | ] 62 | }, 63 | "hash": "516045641716ad9a84221b2d032c86990f933aea9a10917cdd84d89ccf35f856" 64 | } 65 | -------------------------------------------------------------------------------- /.sqlx/query-9bb01184647b17d479d5a8f8b7dba53c8a5788be0ce3cf92700d8f704ffe2012.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "SELECT user_id, external_id, email, username, password_hash, first_login, created_at, updated_at FROM users WHERE user_id = $1", 4 | "describe": { 5 | "columns": [ 6 | { 7 | "ordinal": 0, 8 | "name": "user_id", 9 | "type_info": "Int8" 10 | }, 11 | { 12 | "ordinal": 1, 13 | "name": "external_id", 14 | "type_info": "Uuid" 15 | }, 16 | { 17 | "ordinal": 2, 18 | "name": "email", 19 | "type_info": "Varchar" 20 | }, 21 | { 22 | "ordinal": 3, 23 | "name": "username", 24 | "type_info": "Varchar" 25 | }, 26 | { 27 | "ordinal": 4, 28 | "name": "password_hash", 29 | "type_info": "Text" 30 | }, 31 | { 32 | "ordinal": 5, 33 | "name": "first_login", 34 | "type_info": "Bool" 35 | }, 36 | { 37 | "ordinal": 6, 38 | "name": "created_at", 39 | "type_info": "Timestamptz" 40 | }, 41 | { 42 | "ordinal": 7, 43 | "name": "updated_at", 44 | "type_info": "Timestamptz" 45 | } 46 | ], 47 | "parameters": { 48 | "Left": [ 49 | "Int8" 50 | ] 51 | }, 52 | "nullable": [ 53 | false, 54 | false, 55 | false, 56 | false, 57 | false, 58 | false, 59 | false, 60 | false 61 | ] 62 | }, 63 | "hash": "9bb01184647b17d479d5a8f8b7dba53c8a5788be0ce3cf92700d8f704ffe2012" 64 | } 65 | -------------------------------------------------------------------------------- /.sqlx/query-9cba992505d6b80cb356909d3e2360385af579fe6eccd1089c96a613989a1caa.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "SELECT user_id, external_id, email, username, password_hash, created_at, first_login, updated_at FROM users WHERE username = $1", 4 | "describe": { 5 | "columns": [ 6 | { 7 | "ordinal": 0, 8 | "name": "user_id", 9 | "type_info": "Int8" 10 | }, 11 | { 12 | "ordinal": 1, 13 | "name": "external_id", 14 | "type_info": "Uuid" 15 | }, 16 | { 17 | "ordinal": 2, 18 | "name": "email", 19 | "type_info": "Varchar" 20 | }, 21 | { 22 | "ordinal": 3, 23 | "name": "username", 24 | "type_info": "Varchar" 25 | }, 26 | { 27 | "ordinal": 4, 28 | "name": "password_hash", 29 | "type_info": "Text" 30 | }, 31 | { 32 | "ordinal": 5, 33 | "name": "created_at", 34 | "type_info": "Timestamptz" 35 | }, 36 | { 37 | "ordinal": 6, 38 | "name": "first_login", 39 | "type_info": "Bool" 40 | }, 41 | { 42 | "ordinal": 7, 43 | "name": "updated_at", 44 | "type_info": "Timestamptz" 45 | } 46 | ], 47 | "parameters": { 48 | "Left": [ 49 | "Text" 50 | ] 51 | }, 52 | "nullable": [ 53 | false, 54 | false, 55 | false, 56 | false, 57 | false, 58 | false, 59 | false, 60 | false 61 | ] 62 | }, 63 | "hash": "9cba992505d6b80cb356909d3e2360385af579fe6eccd1089c96a613989a1caa" 64 | } 65 | -------------------------------------------------------------------------------- /.sqlx/query-2d5c0aa80bc6ab079d56ae8937c4c6691d01edc8f211245d3006c3125e72df34.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "\n SELECT u.user_id, u.external_id, u.email, u.username, u.first_login, u.created_at,\n array_agg(g.name) as groups\n FROM users u\n LEFT JOIN user_groups ug ON u.user_id = ug.user_id\n LEFT JOIN groups g ON ug.group_id = g.group_id\n GROUP BY u.user_id\n ORDER BY u.created_at DESC\n ", 4 | "describe": { 5 | "columns": [ 6 | { 7 | "ordinal": 0, 8 | "name": "user_id", 9 | "type_info": "Int8" 10 | }, 11 | { 12 | "ordinal": 1, 13 | "name": "external_id", 14 | "type_info": "Uuid" 15 | }, 16 | { 17 | "ordinal": 2, 18 | "name": "email", 19 | "type_info": "Varchar" 20 | }, 21 | { 22 | "ordinal": 3, 23 | "name": "username", 24 | "type_info": "Varchar" 25 | }, 26 | { 27 | "ordinal": 4, 28 | "name": "first_login", 29 | "type_info": "Bool" 30 | }, 31 | { 32 | "ordinal": 5, 33 | "name": "created_at", 34 | "type_info": "Timestamptz" 35 | }, 36 | { 37 | "ordinal": 6, 38 | "name": "groups", 39 | "type_info": "VarcharArray" 40 | } 41 | ], 42 | "parameters": { 43 | "Left": [] 44 | }, 45 | "nullable": [ 46 | false, 47 | false, 48 | false, 49 | false, 50 | false, 51 | false, 52 | null 53 | ] 54 | }, 55 | "hash": "2d5c0aa80bc6ab079d56ae8937c4c6691d01edc8f211245d3006c3125e72df34" 56 | } 57 | -------------------------------------------------------------------------------- /templates/errors/503.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | 3 | {% block content %} 4 |
5 |

503

6 |

Service Temporarily Unavailable

7 | 8 |

9 | The service is temporarily unavailable due to maintenance or overload. Please try again in a few moments. 10 |

11 | 12 | {% if retry_after %} 13 |
14 | Try again in: {{ retry_after }} seconds 15 |
16 | {% endif %} 17 | 18 |
19 | 22 | Go to Home 23 |
24 | 25 |
26 |

27 | 💡 System Status: You can check the real-time system status on our health check page. 28 |

29 |
30 | 31 | 39 |
40 | {% endblock %} 41 | -------------------------------------------------------------------------------- /.sqlx/query-5c695b62cccea048108b3f63e130a8a187f22a23e35e9a310f1f188e6a75e5a7.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "INSERT INTO users (external_id, email, username, password_hash, first_login) VALUES ($1, $2, $3, $4, $5) RETURNING user_id, external_id, email, username, password_hash, first_login, created_at, updated_at", 4 | "describe": { 5 | "columns": [ 6 | { 7 | "ordinal": 0, 8 | "name": "user_id", 9 | "type_info": "Int8" 10 | }, 11 | { 12 | "ordinal": 1, 13 | "name": "external_id", 14 | "type_info": "Uuid" 15 | }, 16 | { 17 | "ordinal": 2, 18 | "name": "email", 19 | "type_info": "Varchar" 20 | }, 21 | { 22 | "ordinal": 3, 23 | "name": "username", 24 | "type_info": "Varchar" 25 | }, 26 | { 27 | "ordinal": 4, 28 | "name": "password_hash", 29 | "type_info": "Text" 30 | }, 31 | { 32 | "ordinal": 5, 33 | "name": "first_login", 34 | "type_info": "Bool" 35 | }, 36 | { 37 | "ordinal": 6, 38 | "name": "created_at", 39 | "type_info": "Timestamptz" 40 | }, 41 | { 42 | "ordinal": 7, 43 | "name": "updated_at", 44 | "type_info": "Timestamptz" 45 | } 46 | ], 47 | "parameters": { 48 | "Left": [ 49 | "Uuid", 50 | "Varchar", 51 | "Varchar", 52 | "Text", 53 | "Bool" 54 | ] 55 | }, 56 | "nullable": [ 57 | false, 58 | false, 59 | false, 60 | false, 61 | false, 62 | false, 63 | false, 64 | false 65 | ] 66 | }, 67 | "hash": "5c695b62cccea048108b3f63e130a8a187f22a23e35e9a310f1f188e6a75e5a7" 68 | } 69 | -------------------------------------------------------------------------------- /docker-compose-prod-example.yml: -------------------------------------------------------------------------------- 1 | # KeyRunes - Docker Compose Configuration 2 | # 3 | # Instructions: 4 | # 1. Copy this file to docker-compose.yml 5 | # 2. Create a .env file with your configuration 6 | # 3. Run: docker-compose up -d 7 | # 8 | # For more details, see DOCKER_README.md 9 | 10 | services: 11 | keyrunes-database: 12 | image: postgres:17 13 | container_name: keyrunes-database 14 | environment: 15 | POSTGRES_USER: ${POSTGRES_USER:-postgres} 16 | POSTGRES_PASSWORD: ${POSTGRES_PASSWORD} 17 | POSTGRES_DB: ${POSTGRES_DB:-keyrunes} 18 | ports: 19 | - "${POSTGRES_PORT:-5432}:5432" 20 | volumes: 21 | - keyrunes_data:/var/lib/postgresql/data 22 | restart: unless-stopped 23 | healthcheck: 24 | test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER:-postgres}"] 25 | interval: 10s 26 | timeout: 5s 27 | retries: 5 28 | 29 | keyrunes: 30 | image: jonatasoli/keyrunes:latest 31 | container_name: keyrunes 32 | depends_on: 33 | keyrunes-database: 34 | condition: service_healthy 35 | environment: 36 | DATABASE_URL: ${DATABASE_URL} 37 | JWT_SECRET: ${JWT_SECRET} 38 | LOG_LEVEL: ${LOG_LEVEL:-info} 39 | SMTP_USERNAME: ${SMTP_USERNAME} 40 | SMTP_PASSWORD: ${SMTP_PASSWORD} 41 | SMTP_HOST: ${SMTP_HOST} 42 | SMTP_PORT: ${SMTP_PORT:-587} 43 | FROM_EMAIL: ${FROM_EMAIL} 44 | FROM_NAME: ${FROM_NAME:-KeyRunes} 45 | FRONTEND_URL: ${FRONTEND_URL:-http://localhost:3000} 46 | ports: 47 | - "3000:3000" 48 | restart: unless-stopped 49 | healthcheck: 50 | test: ["CMD", "curl", "-f", "http://localhost:3000/api/health"] 51 | interval: 30s 52 | timeout: 10s 53 | retries: 3 54 | start_period: 40s 55 | 56 | volumes: 57 | keyrunes_data: 58 | driver: local 59 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | ## [0.1.1] - 2025-11-29 2 | 3 | ### 🚀 Features 4 | 5 | - Add check groups in api and cli 6 | 7 | ### 🐛 Bug Fixes 8 | 9 | - Change jsonwebtoken to josekit and remove sqlx-mysql and rsa 10 | 11 | ### 💼 Other 12 | 13 | - Update libraries 14 | - Add ignore in cargo audit false positive 15 | 16 | ### ⚙️ Miscellaneous Tasks 17 | 18 | - Update changelog 19 | ## [0.1.0] - 2025-11-27 20 | 21 | ### 🚀 Features 22 | 23 | - Merge main repository 24 | - Add dotenvy cargo to read the environment variables 25 | - Add table reset password, groups and policies 26 | - Add forgot password, start new login and groups and polices features 27 | - Add tests 28 | - *(user)* Add admin endpoint for user registration with groups 29 | - Improve logs and tracings and add new pages 30 | - Cli tool for password recovery 31 | - Add forgot password view route 32 | 33 | ### 🐛 Bug Fixes 34 | 35 | - Fix error in register with first login 36 | - Register router for signup and login 37 | - Removed redundant routes 38 | - Register and login nav links removed after login 39 | - Provide database_url 40 | - Redirect to dashboard properly 41 | - Adding default run 42 | - Saved tokens in db and added settings table 43 | - Resolved comments 44 | - Fix merge 45 | 46 | ### 💼 Other 47 | 48 | - Add github actions 49 | - Fix clippy errors 50 | 51 | ### 🚜 Refactor 52 | 53 | - Add tests for settings functionality 54 | 55 | ### 📚 Documentation 56 | 57 | - Add contribuitors document 58 | - Add code of conduct 59 | - Fixed dev setup with sqlx and docker compose 60 | 61 | ### 🧪 Testing 62 | 63 | - Fix tests and remove tests 64 | - Add ignore tests and fix formating 65 | 66 | ### ⚙️ Miscellaneous Tasks 67 | 68 | - Add env example file 69 | - Ignore `.env` file 70 | - Add `docker-compose` to wake up the postgres service 71 | - Update README.md 72 | - Add badge in README 73 | - Add changelog 74 | -------------------------------------------------------------------------------- /.sqlx/query-470cb7bce44297b903acf0dc0d25aff88ac8f2ec68cbfbc193d9c16fbcc8a303.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "SELECT policy_id, external_id, name, description, resource, action, \n effect as \"effect_str\", conditions, created_at, updated_at \n FROM policies ORDER BY name", 4 | "describe": { 5 | "columns": [ 6 | { 7 | "ordinal": 0, 8 | "name": "policy_id", 9 | "type_info": "Int8" 10 | }, 11 | { 12 | "ordinal": 1, 13 | "name": "external_id", 14 | "type_info": "Uuid" 15 | }, 16 | { 17 | "ordinal": 2, 18 | "name": "name", 19 | "type_info": "Varchar" 20 | }, 21 | { 22 | "ordinal": 3, 23 | "name": "description", 24 | "type_info": "Text" 25 | }, 26 | { 27 | "ordinal": 4, 28 | "name": "resource", 29 | "type_info": "Varchar" 30 | }, 31 | { 32 | "ordinal": 5, 33 | "name": "action", 34 | "type_info": "Varchar" 35 | }, 36 | { 37 | "ordinal": 6, 38 | "name": "effect_str", 39 | "type_info": "Varchar" 40 | }, 41 | { 42 | "ordinal": 7, 43 | "name": "conditions", 44 | "type_info": "Jsonb" 45 | }, 46 | { 47 | "ordinal": 8, 48 | "name": "created_at", 49 | "type_info": "Timestamptz" 50 | }, 51 | { 52 | "ordinal": 9, 53 | "name": "updated_at", 54 | "type_info": "Timestamptz" 55 | } 56 | ], 57 | "parameters": { 58 | "Left": [] 59 | }, 60 | "nullable": [ 61 | false, 62 | false, 63 | false, 64 | true, 65 | false, 66 | false, 67 | false, 68 | true, 69 | false, 70 | false 71 | ] 72 | }, 73 | "hash": "470cb7bce44297b903acf0dc0d25aff88ac8f2ec68cbfbc193d9c16fbcc8a303" 74 | } 75 | -------------------------------------------------------------------------------- /.sqlx/query-501c9ab794cb66daddf6c4ebcef8df2f9f696e0ae0f403551cc0bb9c3cd18cc8.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "SELECT policy_id, external_id, name, description, resource, action, \n effect as \"effect_str\", conditions, created_at, updated_at \n FROM policies WHERE name = $1", 4 | "describe": { 5 | "columns": [ 6 | { 7 | "ordinal": 0, 8 | "name": "policy_id", 9 | "type_info": "Int8" 10 | }, 11 | { 12 | "ordinal": 1, 13 | "name": "external_id", 14 | "type_info": "Uuid" 15 | }, 16 | { 17 | "ordinal": 2, 18 | "name": "name", 19 | "type_info": "Varchar" 20 | }, 21 | { 22 | "ordinal": 3, 23 | "name": "description", 24 | "type_info": "Text" 25 | }, 26 | { 27 | "ordinal": 4, 28 | "name": "resource", 29 | "type_info": "Varchar" 30 | }, 31 | { 32 | "ordinal": 5, 33 | "name": "action", 34 | "type_info": "Varchar" 35 | }, 36 | { 37 | "ordinal": 6, 38 | "name": "effect_str", 39 | "type_info": "Varchar" 40 | }, 41 | { 42 | "ordinal": 7, 43 | "name": "conditions", 44 | "type_info": "Jsonb" 45 | }, 46 | { 47 | "ordinal": 8, 48 | "name": "created_at", 49 | "type_info": "Timestamptz" 50 | }, 51 | { 52 | "ordinal": 9, 53 | "name": "updated_at", 54 | "type_info": "Timestamptz" 55 | } 56 | ], 57 | "parameters": { 58 | "Left": [ 59 | "Text" 60 | ] 61 | }, 62 | "nullable": [ 63 | false, 64 | false, 65 | false, 66 | true, 67 | false, 68 | false, 69 | false, 70 | true, 71 | false, 72 | false 73 | ] 74 | }, 75 | "hash": "501c9ab794cb66daddf6c4ebcef8df2f9f696e0ae0f403551cc0bb9c3cd18cc8" 76 | } 77 | -------------------------------------------------------------------------------- /.sqlx/query-f2934589f15af1085b812e01ba8de1ee7a1015dfc86ea082f8d5a98a10ac0613.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "SELECT policy_id, external_id, name, description, resource, action, \n effect as \"effect_str\", conditions, created_at, updated_at \n FROM policies WHERE policy_id = $1", 4 | "describe": { 5 | "columns": [ 6 | { 7 | "ordinal": 0, 8 | "name": "policy_id", 9 | "type_info": "Int8" 10 | }, 11 | { 12 | "ordinal": 1, 13 | "name": "external_id", 14 | "type_info": "Uuid" 15 | }, 16 | { 17 | "ordinal": 2, 18 | "name": "name", 19 | "type_info": "Varchar" 20 | }, 21 | { 22 | "ordinal": 3, 23 | "name": "description", 24 | "type_info": "Text" 25 | }, 26 | { 27 | "ordinal": 4, 28 | "name": "resource", 29 | "type_info": "Varchar" 30 | }, 31 | { 32 | "ordinal": 5, 33 | "name": "action", 34 | "type_info": "Varchar" 35 | }, 36 | { 37 | "ordinal": 6, 38 | "name": "effect_str", 39 | "type_info": "Varchar" 40 | }, 41 | { 42 | "ordinal": 7, 43 | "name": "conditions", 44 | "type_info": "Jsonb" 45 | }, 46 | { 47 | "ordinal": 8, 48 | "name": "created_at", 49 | "type_info": "Timestamptz" 50 | }, 51 | { 52 | "ordinal": 9, 53 | "name": "updated_at", 54 | "type_info": "Timestamptz" 55 | } 56 | ], 57 | "parameters": { 58 | "Left": [ 59 | "Int8" 60 | ] 61 | }, 62 | "nullable": [ 63 | false, 64 | false, 65 | false, 66 | true, 67 | false, 68 | false, 69 | false, 70 | true, 71 | false, 72 | false 73 | ] 74 | }, 75 | "hash": "f2934589f15af1085b812e01ba8de1ee7a1015dfc86ea082f8d5a98a10ac0613" 76 | } 77 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | # Build stage 2 | FROM rust:latest as builder 3 | 4 | # Install system dependencies 5 | RUN apt-get update && apt-get install -y \ 6 | pkg-config \ 7 | libssl-dev \ 8 | libpq-dev \ 9 | && rm -rf /var/lib/apt/lists/* 10 | 11 | # Create app user 12 | RUN useradd -m -u 1001 keyrunes 13 | 14 | # Set working directory 15 | WORKDIR /app 16 | 17 | # Copy Cargo files for dependency caching 18 | COPY Cargo.toml Cargo.lock ./ 19 | 20 | # Create dummy main.rs to build dependencies 21 | RUN mkdir src && echo "fn main() {}" > src/main.rs 22 | 23 | # Build dependencies (cached layer) 24 | RUN cargo build --release && rm -rf src 25 | 26 | # Copy source code 27 | COPY src ./src 28 | COPY migrations ./migrations 29 | COPY templates ./templates 30 | COPY .sqlx ./.sqlx 31 | 32 | # Build the application 33 | ENV SQLX_OFFLINE=true 34 | RUN cargo build --release 35 | 36 | # Runtime stage 37 | FROM debian:bookworm-slim 38 | 39 | # Install runtime dependencies 40 | RUN apt-get update && apt-get install -y \ 41 | ca-certificates \ 42 | libpq5 \ 43 | libssl3 \ 44 | curl \ 45 | && rm -rf /var/lib/apt/lists/* 46 | 47 | # Create app user 48 | RUN useradd -m -u 1001 keyrunes 49 | 50 | # Set working directory 51 | WORKDIR /app 52 | 53 | # Copy binary from builder stage 54 | COPY --from=builder /app/target/release/keyrunes /usr/local/bin/keyrunes 55 | COPY --from=builder /app/target/release/cli /usr/local/bin/keyrunes-cli 56 | 57 | # Copy runtime files 58 | COPY --from=builder /app/migrations ./migrations 59 | COPY --from=builder /app/templates ./templates 60 | 61 | RUN chown -R keyrunes:keyrunes /app 62 | 63 | # Switch to app user 64 | USER keyrunes 65 | 66 | # Expose port 67 | EXPOSE 3000 68 | 69 | # Health check 70 | HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \ 71 | CMD curl -f http://localhost:3000/api/health || exit 1 72 | 73 | # Environment variables 74 | ENV RUST_LOG=info 75 | ENV PORT=3000 76 | 77 | # Run the application 78 | CMD ["keyrunes"] 79 | -------------------------------------------------------------------------------- /.sqlx/query-2c42062a084c2b179b9f84bfcc808f01694dcd8187589bd9c852b4b7853ec821.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "SELECT p.policy_id, p.external_id, p.name, p.description, p.resource, p.action, \n p.effect as \"effect_str\", p.conditions, p.created_at, p.updated_at\n FROM policies p\n INNER JOIN group_policies gp ON p.policy_id = gp.policy_id\n WHERE gp.group_id = $1", 4 | "describe": { 5 | "columns": [ 6 | { 7 | "ordinal": 0, 8 | "name": "policy_id", 9 | "type_info": "Int8" 10 | }, 11 | { 12 | "ordinal": 1, 13 | "name": "external_id", 14 | "type_info": "Uuid" 15 | }, 16 | { 17 | "ordinal": 2, 18 | "name": "name", 19 | "type_info": "Varchar" 20 | }, 21 | { 22 | "ordinal": 3, 23 | "name": "description", 24 | "type_info": "Text" 25 | }, 26 | { 27 | "ordinal": 4, 28 | "name": "resource", 29 | "type_info": "Varchar" 30 | }, 31 | { 32 | "ordinal": 5, 33 | "name": "action", 34 | "type_info": "Varchar" 35 | }, 36 | { 37 | "ordinal": 6, 38 | "name": "effect_str", 39 | "type_info": "Varchar" 40 | }, 41 | { 42 | "ordinal": 7, 43 | "name": "conditions", 44 | "type_info": "Jsonb" 45 | }, 46 | { 47 | "ordinal": 8, 48 | "name": "created_at", 49 | "type_info": "Timestamptz" 50 | }, 51 | { 52 | "ordinal": 9, 53 | "name": "updated_at", 54 | "type_info": "Timestamptz" 55 | } 56 | ], 57 | "parameters": { 58 | "Left": [ 59 | "Int8" 60 | ] 61 | }, 62 | "nullable": [ 63 | false, 64 | false, 65 | false, 66 | true, 67 | false, 68 | false, 69 | false, 70 | true, 71 | false, 72 | false 73 | ] 74 | }, 75 | "hash": "2c42062a084c2b179b9f84bfcc808f01694dcd8187589bd9c852b4b7853ec821" 76 | } 77 | -------------------------------------------------------------------------------- /.sqlx/query-521cb02df6a1e3a7aa5d633bdbf446f6aabbffa1c0f0acd423783c6369062b3c.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "SELECT p.policy_id, p.external_id, p.name, p.description, p.resource, p.action, \n p.effect as \"effect_str\", p.conditions, p.created_at, p.updated_at\n FROM policies p\n INNER JOIN user_policies up ON p.policy_id = up.policy_id\n WHERE up.user_id = $1", 4 | "describe": { 5 | "columns": [ 6 | { 7 | "ordinal": 0, 8 | "name": "policy_id", 9 | "type_info": "Int8" 10 | }, 11 | { 12 | "ordinal": 1, 13 | "name": "external_id", 14 | "type_info": "Uuid" 15 | }, 16 | { 17 | "ordinal": 2, 18 | "name": "name", 19 | "type_info": "Varchar" 20 | }, 21 | { 22 | "ordinal": 3, 23 | "name": "description", 24 | "type_info": "Text" 25 | }, 26 | { 27 | "ordinal": 4, 28 | "name": "resource", 29 | "type_info": "Varchar" 30 | }, 31 | { 32 | "ordinal": 5, 33 | "name": "action", 34 | "type_info": "Varchar" 35 | }, 36 | { 37 | "ordinal": 6, 38 | "name": "effect_str", 39 | "type_info": "Varchar" 40 | }, 41 | { 42 | "ordinal": 7, 43 | "name": "conditions", 44 | "type_info": "Jsonb" 45 | }, 46 | { 47 | "ordinal": 8, 48 | "name": "created_at", 49 | "type_info": "Timestamptz" 50 | }, 51 | { 52 | "ordinal": 9, 53 | "name": "updated_at", 54 | "type_info": "Timestamptz" 55 | } 56 | ], 57 | "parameters": { 58 | "Left": [ 59 | "Int8" 60 | ] 61 | }, 62 | "nullable": [ 63 | false, 64 | false, 65 | false, 66 | true, 67 | false, 68 | false, 69 | false, 70 | true, 71 | false, 72 | false 73 | ] 74 | }, 75 | "hash": "521cb02df6a1e3a7aa5d633bdbf446f6aabbffa1c0f0acd423783c6369062b3c" 76 | } 77 | -------------------------------------------------------------------------------- /src/domain/user.rs: -------------------------------------------------------------------------------- 1 | use std::fmt; 2 | 3 | use regex::Regex; 4 | use secrecy::{ExposeSecret, SecretString}; 5 | use serde::Deserialize; 6 | 7 | #[derive(Debug, Clone)] 8 | pub struct Email(String); 9 | 10 | impl TryFrom<&str> for Email { 11 | type Error = anyhow::Error; 12 | 13 | fn try_from(value: &str) -> Result { 14 | let email_re = Regex::new(r"^[\w.+-]+@[\w-]+\.[\w.-]+$").unwrap(); 15 | if !email_re.is_match(value) { 16 | anyhow::bail!("invalid email"); 17 | } 18 | Ok(Self(value.to_owned())) 19 | } 20 | } 21 | 22 | impl<'de> Deserialize<'de> for Email { 23 | fn deserialize(deserializer: D) -> Result 24 | where 25 | D: serde::Deserializer<'de>, 26 | { 27 | let email = String::deserialize(deserializer)?; 28 | Self::try_from(email.as_str()).map_err(|e| serde::de::Error::custom(e.to_string())) 29 | } 30 | } 31 | 32 | impl fmt::Display for Email { 33 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 34 | write!(f, "{}", self.0) 35 | } 36 | } 37 | 38 | impl AsRef for Email { 39 | fn as_ref(&self) -> &str { 40 | self.0.as_str() 41 | } 42 | } 43 | 44 | #[derive(Debug, Clone)] 45 | pub struct Password(SecretString); 46 | 47 | impl TryFrom<&str> for Password { 48 | type Error = anyhow::Error; 49 | 50 | fn try_from(value: &str) -> Result { 51 | if value.len() < 8 { 52 | anyhow::bail!("password too short"); 53 | } 54 | 55 | Ok(Self(SecretString::from(value))) 56 | } 57 | } 58 | 59 | impl<'de> Deserialize<'de> for Password { 60 | fn deserialize(deserializer: D) -> Result 61 | where 62 | D: serde::Deserializer<'de>, 63 | { 64 | let password = String::deserialize(deserializer)?; 65 | 66 | Ok(Self::try_from(password.as_str()).map_err(|e| serde::de::Error::custom(e.to_string())))? 67 | } 68 | } 69 | 70 | impl Password { 71 | pub fn expose(&self) -> &str { 72 | self.0.expose_secret() 73 | } 74 | } 75 | -------------------------------------------------------------------------------- /.sqlx/query-f240852fc5c60f82c7a1412a53fe42db6106195d8b2e20e9bd80d378934b34e7.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "INSERT INTO policies (external_id, name, description, resource, action, effect, conditions) \n VALUES ($1, $2, $3, $4, $5, $6, $7) \n RETURNING policy_id, external_id, name, description, resource, action, \n effect as \"effect_str\", conditions, created_at, updated_at", 4 | "describe": { 5 | "columns": [ 6 | { 7 | "ordinal": 0, 8 | "name": "policy_id", 9 | "type_info": "Int8" 10 | }, 11 | { 12 | "ordinal": 1, 13 | "name": "external_id", 14 | "type_info": "Uuid" 15 | }, 16 | { 17 | "ordinal": 2, 18 | "name": "name", 19 | "type_info": "Varchar" 20 | }, 21 | { 22 | "ordinal": 3, 23 | "name": "description", 24 | "type_info": "Text" 25 | }, 26 | { 27 | "ordinal": 4, 28 | "name": "resource", 29 | "type_info": "Varchar" 30 | }, 31 | { 32 | "ordinal": 5, 33 | "name": "action", 34 | "type_info": "Varchar" 35 | }, 36 | { 37 | "ordinal": 6, 38 | "name": "effect_str", 39 | "type_info": "Varchar" 40 | }, 41 | { 42 | "ordinal": 7, 43 | "name": "conditions", 44 | "type_info": "Jsonb" 45 | }, 46 | { 47 | "ordinal": 8, 48 | "name": "created_at", 49 | "type_info": "Timestamptz" 50 | }, 51 | { 52 | "ordinal": 9, 53 | "name": "updated_at", 54 | "type_info": "Timestamptz" 55 | } 56 | ], 57 | "parameters": { 58 | "Left": [ 59 | "Uuid", 60 | "Varchar", 61 | "Text", 62 | "Varchar", 63 | "Varchar", 64 | "Varchar", 65 | "Jsonb" 66 | ] 67 | }, 68 | "nullable": [ 69 | false, 70 | false, 71 | false, 72 | true, 73 | false, 74 | false, 75 | false, 76 | true, 77 | false, 78 | false 79 | ] 80 | }, 81 | "hash": "f240852fc5c60f82c7a1412a53fe42db6106195d8b2e20e9bd80d378934b34e7" 82 | } 83 | -------------------------------------------------------------------------------- /templates/forgot_password.html: -------------------------------------------------------------------------------- 1 | {% block content %} 2 |
3 |

Esqueci a Senha

4 | 5 | {% if error %} 6 |
{{ error }}
7 | {% endif %} 8 | 9 | 10 | 11 | 12 |
13 | 14 | 15 | 16 | 17 |
18 | 19 |

20 | Voltar ao Login 21 |

22 |
23 | 24 | 63 | {% endblock %} 64 | -------------------------------------------------------------------------------- /.sqlx/query-3742b4721561ed076810a5eb757d0d9b21914813e2e468a102e6fb20cf68ee9e.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "SELECT DISTINCT p.policy_id, p.external_id, p.name, p.description, p.resource, p.action, \n p.effect as \"effect_str\", p.conditions, p.created_at, p.updated_at\n FROM policies p\n LEFT JOIN user_policies up ON p.policy_id = up.policy_id AND up.user_id = $1\n LEFT JOIN group_policies gp ON p.policy_id = gp.policy_id\n LEFT JOIN user_groups ug ON gp.group_id = ug.group_id AND ug.user_id = $1\n WHERE up.user_id IS NOT NULL OR ug.user_id IS NOT NULL", 4 | "describe": { 5 | "columns": [ 6 | { 7 | "ordinal": 0, 8 | "name": "policy_id", 9 | "type_info": "Int8" 10 | }, 11 | { 12 | "ordinal": 1, 13 | "name": "external_id", 14 | "type_info": "Uuid" 15 | }, 16 | { 17 | "ordinal": 2, 18 | "name": "name", 19 | "type_info": "Varchar" 20 | }, 21 | { 22 | "ordinal": 3, 23 | "name": "description", 24 | "type_info": "Text" 25 | }, 26 | { 27 | "ordinal": 4, 28 | "name": "resource", 29 | "type_info": "Varchar" 30 | }, 31 | { 32 | "ordinal": 5, 33 | "name": "action", 34 | "type_info": "Varchar" 35 | }, 36 | { 37 | "ordinal": 6, 38 | "name": "effect_str", 39 | "type_info": "Varchar" 40 | }, 41 | { 42 | "ordinal": 7, 43 | "name": "conditions", 44 | "type_info": "Jsonb" 45 | }, 46 | { 47 | "ordinal": 8, 48 | "name": "created_at", 49 | "type_info": "Timestamptz" 50 | }, 51 | { 52 | "ordinal": 9, 53 | "name": "updated_at", 54 | "type_info": "Timestamptz" 55 | } 56 | ], 57 | "parameters": { 58 | "Left": [ 59 | "Int8" 60 | ] 61 | }, 62 | "nullable": [ 63 | false, 64 | false, 65 | false, 66 | true, 67 | false, 68 | false, 69 | false, 70 | true, 71 | false, 72 | false 73 | ] 74 | }, 75 | "hash": "3742b4721561ed076810a5eb757d0d9b21914813e2e468a102e6fb20cf68ee9e" 76 | } 77 | -------------------------------------------------------------------------------- /hurl/run_hurl_tests.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e 4 | 5 | # Colors 6 | RED='\033[0;31m' 7 | GREEN='\033[0;32m' 8 | YELLOW='\033[1;33m' 9 | NC='\033[0m' # No Color 10 | 11 | HURL_DIR="hurl" 12 | VERBOSE=false 13 | 14 | if [[ "$1" == "--verbose" ]] || [[ "$1" == "-v" ]]; then 15 | VERBOSE=true 16 | fi 17 | 18 | export admin_email="${ADMIN_EMAIL:-admin@example.com}" 19 | export admin_password="${ADMIN_PASSWORD:-Admin123}" 20 | export group_user_username="${GROUP_USER_USERNAME:-testuser}" 21 | export group_user_password="${GROUP_USER_PASSWORD:-Test123}" 22 | 23 | echo -e "${YELLOW}🧪 Running Hurl tests...${NC}" 24 | echo "" 25 | 26 | if ! curl -s http://localhost:3000/api/health > /dev/null 2>&1; then 27 | echo -e "${RED}❌ Server is not running on http://localhost:3000${NC}" 28 | echo -e "${YELLOW}💡 Start the server first:${NC}" 29 | echo " cargo run" 30 | exit 1 31 | fi 32 | 33 | echo -e "${GREEN}✓ Server is running${NC}" 34 | echo "" 35 | 36 | total_files=0 37 | passed_files=0 38 | failed_files=0 39 | 40 | for hurl_file in "$HURL_DIR"/*.hurl; do 41 | if [ ! -f "$hurl_file" ]; then 42 | continue 43 | fi 44 | 45 | total_files=$((total_files + 1)) 46 | filename=$(basename "$hurl_file") 47 | 48 | echo -e "${YELLOW}▶ Running $filename${NC}" 49 | 50 | if [ "$VERBOSE" = true ]; then 51 | if hurl --test --very-verbose "$hurl_file"; then 52 | echo -e "${GREEN}✓ $filename passed${NC}" 53 | passed_files=$((passed_files + 1)) 54 | else 55 | echo -e "${RED}✗ $filename failed${NC}" 56 | failed_files=$((failed_files + 1)) 57 | fi 58 | else 59 | if hurl --test "$hurl_file" 2>&1 | grep -v "^$"; then 60 | echo -e "${GREEN}✓ $filename passed${NC}" 61 | passed_files=$((passed_files + 1)) 62 | else 63 | echo -e "${RED}✗ $filename failed${NC}" 64 | failed_files=$((failed_files + 1)) 65 | fi 66 | fi 67 | 68 | echo "" 69 | done 70 | 71 | # Resumo 72 | echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" 73 | echo -e "${YELLOW}📊 Test Summary:${NC}" 74 | echo " Total files: $total_files" 75 | echo -e " ${GREEN}Passed: $passed_files${NC}" 76 | echo -e " ${RED}Failed: $failed_files${NC}" 77 | echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" 78 | 79 | if [ $failed_files -eq 0 ]; then 80 | echo -e "${GREEN}✨ All tests passed!${NC}" 81 | exit 0 82 | else 83 | echo -e "${RED}💥 Some tests failed${NC}" 84 | exit 1 85 | fi 86 | -------------------------------------------------------------------------------- /templates/reset_password.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | 3 | {% block content %} 4 |
5 |

Reset Password

6 | 7 | {% if error %} 8 |
{{ error }}
9 | {% endif %} 10 | 11 | {% if success %} 12 |
{{ success }}
13 | {% else %} 14 |
15 | 16 | 17 | 18 | 19 | Password must be at least 8 characters long 20 | 21 | 22 | 23 | 24 | 25 |
26 | 27 | 67 | {% endif %} 68 |
69 | {% endblock %} 70 | -------------------------------------------------------------------------------- /templates/change_password.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | 3 | {% block content %} 4 |
5 |

Change Password Required

6 | 7 |
8 | First Login Detected!
9 | For security reasons, you must change your password before continuing. 10 |
11 | 12 | {% if error %} 13 |
{{ error }}
14 | {% endif %} 15 | 16 |
17 | 18 | 19 | 20 | 21 | 22 | Password must be at least 8 characters long 23 | 24 | 25 | 26 | 27 | 28 |
29 | 30 | 71 |
72 | {% endblock %} 73 | -------------------------------------------------------------------------------- /templates/mail/password_reset.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | Password Reset - Keyrunes 7 | 65 | 66 | 67 |
68 |

Password Reset - Keyrunes

69 | 70 |

Hello,

71 | 72 |

You requested a password reset. Click the button below to create a new password:

73 | 74 |

75 | Reset Password 76 |

77 | 78 |

Or copy and paste this link into your browser:

79 |
{{ reset_url }}
80 | 81 |
82 | Important: 83 |
    84 |
  • This link is valid for 24 hours
  • 85 |
  • Can only be used once
  • 86 |
  • If you didn't request this reset, please ignore this email
  • 87 |
88 |
89 | 90 | 94 |
95 | 96 | 97 | -------------------------------------------------------------------------------- /src/handler/logging.rs: -------------------------------------------------------------------------------- 1 | use axum::{extract::Request, middleware::Next, response::Response}; 2 | use std::time::Instant; 3 | 4 | #[derive(Debug, Clone, Copy, PartialEq, Eq)] 5 | pub enum LogLevel { 6 | Info, 7 | Debug, 8 | Error, 9 | Critical, 10 | } 11 | 12 | /// Initialize logging with given log level 13 | pub fn init_logging(level: LogLevel) { 14 | use tracing_subscriber::filter::LevelFilter; 15 | 16 | let filter = match level { 17 | LogLevel::Info => LevelFilter::INFO, 18 | LogLevel::Debug => LevelFilter::DEBUG, 19 | LogLevel::Error => LevelFilter::ERROR, 20 | LogLevel::Critical => LevelFilter::ERROR, // Map Critical to ERROR 21 | }; 22 | 23 | tracing_subscriber::fmt() 24 | .with_max_level(filter) 25 | .with_target(false) 26 | .with_thread_ids(false) 27 | .with_file(true) 28 | .with_line_number(true) 29 | .init(); 30 | } 31 | 32 | /// Request logging middleware - logs all requests 33 | /// 34 | /// NOTE: This middleware does NOT require ConnectInfo. 35 | /// It logs: method, path, status code, and response time. 36 | pub async fn request_logging_middleware(request: Request, next: Next) -> Response { 37 | let method = request.method().clone(); 38 | let uri = request.uri().clone(); 39 | let path = uri.path().to_string(); 40 | 41 | let start = Instant::now(); 42 | 43 | // Process request 44 | let response = next.run(request).await; 45 | 46 | let duration = start.elapsed(); 47 | let status = response.status(); 48 | 49 | // Log the request 50 | match status.as_u16() { 51 | 200..=299 => { 52 | tracing::info!( 53 | "{} {} - {} - {}ms", 54 | method, 55 | path, 56 | status.as_u16(), 57 | duration.as_millis() 58 | ); 59 | } 60 | 400..=499 => { 61 | tracing::warn!( 62 | "{} {} - {} - {}ms", 63 | method, 64 | path, 65 | status.as_u16(), 66 | duration.as_millis() 67 | ); 68 | } 69 | 500..=599 => { 70 | tracing::error!( 71 | "{} {} - {} - {}ms", 72 | method, 73 | path, 74 | status.as_u16(), 75 | duration.as_millis() 76 | ); 77 | } 78 | _ => { 79 | tracing::debug!( 80 | "{} {} - {} - {}ms", 81 | method, 82 | path, 83 | status.as_u16(), 84 | duration.as_millis() 85 | ); 86 | } 87 | } 88 | 89 | response 90 | } 91 | 92 | #[cfg(test)] 93 | mod tests { 94 | use super::*; 95 | 96 | #[test] 97 | fn test_log_level_equality() { 98 | assert_eq!(LogLevel::Info, LogLevel::Info); 99 | assert_eq!(LogLevel::Debug, LogLevel::Debug); 100 | assert_ne!(LogLevel::Info, LogLevel::Debug); 101 | } 102 | 103 | #[test] 104 | fn test_log_level_debug() { 105 | let level = LogLevel::Debug; 106 | assert_eq!(level, LogLevel::Debug); 107 | } 108 | } 109 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # KeyRunes Code of Conduct 2 | 3 | ## Our Pledge 4 | 5 | In the interest of fostering an open and welcoming community, we as contributors and maintainers pledge to make participation in KeyRunes a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation. 6 | 7 | We pledge to act and interact in ways that contribute to an open, inclusive, and supportive environment. 8 | 9 | --- 10 | 11 | ## Our Standards 12 | 13 | Examples of behavior that contributes to a positive environment for our community include: 14 | 15 | - Using welcoming and inclusive language. 16 | - Being respectful of differing viewpoints and experiences. 17 | - Gracefully accepting constructive criticism. 18 | - Focusing on what is best for the community. 19 | - Showing empathy toward other community members. 20 | 21 | Examples of unacceptable behavior by participants include: 22 | 23 | - Harassment, intimidation, or discrimination in any form. 24 | - Public or private insults or attacks. 25 | - Trolling, insulting/derogatory comments, or personal or political attacks. 26 | - Publishing others’ private information, such as a physical or electronic address, without explicit permission. 27 | - Other conduct which could reasonably be considered inappropriate in a professional setting. 28 | 29 | --- 30 | 31 | ## Enforcement Responsibilities 32 | 33 | Project maintainers are responsible for clarifying and enforcing our standards of acceptable behavior and will take appropriate and fair corrective action in response to any behavior that violates this Code of Conduct. 34 | 35 | Maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned with this Code of Conduct, or to temporarily or permanently ban any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful. 36 | 37 | --- 38 | 39 | ## Reporting Guidelines 40 | 41 | If you are subject to or witness unacceptable behavior, or have any other concerns, please report it by contacting the project maintainers at **[contact@jonatasoliveira.dev]**. All complaints will be reviewed and investigated promptly and fairly. 42 | 43 | You can also report violations to the broader community by contacting GitHub support if necessary. 44 | 45 | --- 46 | 47 | ## Scope 48 | 49 | This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. 50 | 51 | --- 52 | 53 | ## Enforcement 54 | 55 | Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at **[contact@jonatasoliveira.dev]**. All complaints will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. 56 | 57 | Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by the community. 58 | 59 | --- 60 | 61 | ## Attribution 62 | 63 | This Code of Conduct is adapted from the [Contributor Covenant](https://www.contributor-covenant.org/), version 2.1, available at https://www.contributor-covenant.org/version/2/1/code_of_conduct.html 64 | -------------------------------------------------------------------------------- /cliff.toml: -------------------------------------------------------------------------------- 1 | # git-cliff ~ configuration file 2 | # https://git-cliff.org/docs/configuration 3 | 4 | 5 | [changelog] 6 | # A Tera template to be rendered for each release in the changelog. 7 | # See https://keats.github.io/tera/docs/#introduction 8 | body = """ 9 | {% if version %}\ 10 | ## [{{ version | trim_start_matches(pat="v") }}] - {{ timestamp | date(format="%Y-%m-%d") }} 11 | {% else %}\ 12 | ## [unreleased] 13 | {% endif %}\ 14 | {% for group, commits in commits | group_by(attribute="group") %} 15 | ### {{ group | striptags | trim | upper_first }} 16 | {% for commit in commits %} 17 | - {% if commit.scope %}*({{ commit.scope }})* {% endif %}\ 18 | {% if commit.breaking %}[**breaking**] {% endif %}\ 19 | {{ commit.message | upper_first }}\ 20 | {% endfor %} 21 | {% endfor %} 22 | """ 23 | # Remove leading and trailing whitespaces from the changelog's body. 24 | trim = true 25 | # Render body even when there are no releases to process. 26 | render_always = true 27 | # An array of regex based postprocessors to modify the changelog. 28 | postprocessors = [ 29 | # Replace the placeholder with a URL. 30 | #{ pattern = '', replace = "https://github.com/orhun/git-cliff" }, 31 | ] 32 | # render body even when there are no releases to process 33 | # render_always = true 34 | # output file path 35 | # output = "test.md" 36 | 37 | [git] 38 | # Parse commits according to the conventional commits specification. 39 | # See https://www.conventionalcommits.org 40 | conventional_commits = true 41 | # Exclude commits that do not match the conventional commits specification. 42 | filter_unconventional = true 43 | # Require all commits to be conventional. 44 | # Takes precedence over filter_unconventional. 45 | require_conventional = false 46 | # Split commits on newlines, treating each line as an individual commit. 47 | split_commits = false 48 | # An array of regex based parsers to modify commit messages prior to further processing. 49 | commit_preprocessors = [ 50 | # Replace issue numbers with link templates to be updated in `changelog.postprocessors`. 51 | #{ pattern = '\((\w+\s)?#([0-9]+)\)', replace = "([#${2}](/issues/${2}))"}, 52 | # Check spelling of the commit message using https://github.com/crate-ci/typos. 53 | # If the spelling is incorrect, it will be fixed automatically. 54 | #{ pattern = '.*', replace_command = 'typos --write-changes -' }, 55 | ] 56 | # Prevent commits that are breaking from being excluded by commit parsers. 57 | protect_breaking_commits = false 58 | # An array of regex based parsers for extracting data from the commit message. 59 | # Assigns commits to groups. 60 | # Optionally sets the commit's scope and can decide to exclude commits from further processing. 61 | commit_parsers = [ 62 | { message = "^feat", group = "🚀 Features" }, 63 | { message = "^fix", group = "🐛 Bug Fixes" }, 64 | { message = "^doc", group = "📚 Documentation" }, 65 | { message = "^perf", group = "⚡ Performance" }, 66 | { message = "^refactor", group = "🚜 Refactor" }, 67 | { message = "^style", group = "🎨 Styling" }, 68 | { message = "^test", group = "🧪 Testing" }, 69 | { message = "^chore\\(release\\): prepare for", skip = true }, 70 | { message = "^chore\\(deps.*\\)", skip = true }, 71 | { message = "^chore\\(pr\\)", skip = true }, 72 | { message = "^chore\\(pull\\)", skip = true }, 73 | { message = "^chore|^ci", group = "⚙️ Miscellaneous Tasks" }, 74 | { body = ".*security", group = "🛡️ Security" }, 75 | { message = "^revert", group = "◀️ Revert" }, 76 | { message = ".*", group = "💼 Other" }, 77 | ] 78 | # Exclude commits that are not matched by any commit parser. 79 | filter_commits = false 80 | # An array of link parsers for extracting external references, and turning them into URLs, using regex. 81 | link_parsers = [] 82 | # Include only the tags that belong to the current branch. 83 | use_branch_tags = false 84 | # Order releases topologically instead of chronologically. 85 | topo_order = false 86 | # Order releases topologically instead of chronologically. 87 | topo_order_commits = true 88 | # Order of commits in each group/release within the changelog. 89 | # Allowed values: newest, oldest 90 | sort_commits = "oldest" 91 | # Process submodules commits 92 | recurse_submodules = false 93 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: 4 | push: 5 | branches: [ main, develop ] 6 | pull_request: 7 | branches: [ main, develop ] 8 | 9 | env: 10 | CARGO_TERM_COLOR: always 11 | RUST_BACKTRACE: 1 12 | DATABASE_URL: postgresql://postgres:postgres@localhost:5432/keyrunes_test 13 | JWT_SECRET: test_secret_key_for_ci 14 | 15 | jobs: 16 | test: 17 | name: Test Suite 18 | runs-on: ubuntu-latest 19 | 20 | services: 21 | postgres: 22 | image: postgres:17 23 | env: 24 | POSTGRES_USER: postgres 25 | POSTGRES_PASSWORD: postgres 26 | POSTGRES_DB: postgres 27 | options: >- 28 | --health-cmd pg_isready 29 | --health-interval 10s 30 | --health-timeout 5s 31 | --health-retries 5 32 | ports: 33 | - 5432:5432 34 | 35 | steps: 36 | - name: Checkout code 37 | uses: actions/checkout@v4 38 | 39 | - name: Install Rust toolchain 40 | uses: dtolnay/rust-toolchain@stable 41 | with: 42 | components: rustfmt, clippy 43 | 44 | - name: Cache cargo registry 45 | uses: actions/cache@v4 46 | with: 47 | path: ~/.cargo/registry 48 | key: ${{ runner.os }}-cargo-registry-${{ hashFiles('**/Cargo.lock') }} 49 | restore-keys: | 50 | ${{ runner.os }}-cargo-registry- 51 | 52 | - name: Cache cargo index 53 | uses: actions/cache@v4 54 | with: 55 | path: ~/.cargo/git 56 | key: ${{ runner.os }}-cargo-git-${{ hashFiles('**/Cargo.lock') }} 57 | restore-keys: | 58 | ${{ runner.os }}-cargo-git- 59 | 60 | - name: Cache cargo build 61 | uses: actions/cache@v4 62 | with: 63 | path: target 64 | key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('**/Cargo.lock') }} 65 | restore-keys: | 66 | ${{ runner.os }}-cargo-build-target- 67 | 68 | - name: Install sqlx-cli 69 | run: cargo install sqlx-cli --no-default-features --features postgres 70 | 71 | - name: Create test database 72 | run: | 73 | sqlx database create --database-url $DATABASE_URL 74 | 75 | - name: Run migrations 76 | run: | 77 | sqlx migrate run --database-url $DATABASE_URL 78 | 79 | - name: Check formatting 80 | run: cargo fmt --all -- --check 81 | 82 | - name: Run clippy 83 | run: cargo clippy -- -D warnings 84 | 85 | - name: Build 86 | run: cargo build --verbose 87 | 88 | - name: Run tests 89 | run: cargo test --verbose 90 | 91 | coverage: 92 | name: Code Coverage 93 | runs-on: ubuntu-latest 94 | 95 | services: 96 | postgres: 97 | image: postgres:17 98 | env: 99 | POSTGRES_USER: postgres 100 | POSTGRES_PASSWORD: postgres 101 | POSTGRES_DB: postgres 102 | options: >- 103 | --health-cmd pg_isready 104 | --health-interval 10s 105 | --health-timeout 5s 106 | --health-retries 5 107 | ports: 108 | - 5432:5432 109 | 110 | steps: 111 | - name: Checkout code 112 | uses: actions/checkout@v4 113 | 114 | - name: Install Rust toolchain 115 | uses: dtolnay/rust-toolchain@stable 116 | 117 | - name: Install sqlx-cli 118 | run: cargo install sqlx-cli --no-default-features --features postgres 119 | 120 | - name: Create test database 121 | run: | 122 | sqlx database create --database-url $DATABASE_URL 123 | 124 | - name: Run migrations 125 | run: | 126 | sqlx migrate run --database-url $DATABASE_URL 127 | 128 | - name: Install tarpaulin 129 | run: cargo install cargo-tarpaulin 130 | 131 | - name: Generate coverage 132 | run: cargo tarpaulin --verbose --all-features --workspace --timeout 120 --out Xml 133 | 134 | - name: Upload coverage to Codecov 135 | uses: codecov/codecov-action@v4 136 | with: 137 | token: ${{ secrets.CODECOV_TOKEN }} 138 | fail_ci_if_error: false 139 | 140 | security-audit: 141 | name: Security Audit 142 | runs-on: ubuntu-latest 143 | steps: 144 | - name: Checkout code 145 | uses: actions/checkout@v4 146 | 147 | - name: Run security audit 148 | uses: rustsec/audit-check@v1 149 | with: 150 | token: ${{ secrets.GITHUB_TOKEN }} 151 | ignore: RUSTSEC-2023-0071 152 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing to KeyRunes 2 | 3 | First off, thank you for considering contributing to KeyRunes! Whether it's code, documentation, bug reports, or feature requests, your help makes KeyRunes better for everyone. This guide will help you get started and make the process smooth for you and the maintainers. 4 | 5 | --- 6 | 7 | ## Table of Contents 8 | 9 | 1. [How to Contribute](#how-to-contribute) 10 | 2. [Reporting Bugs](#reporting-bugs) 11 | 3. [Requesting Features](#requesting-features) 12 | 4. [Development Setup](#development-setup) 13 | 5. [Code Style and Standards](#code-style-and-standards) 14 | 6. [Submitting Pull Requests](#submitting-pull-requests) 15 | 7. [Community Guidelines](#community-guidelines) 16 | 8. [Acknowledgements](#acknowledgements) 17 | 18 | --- 19 | 20 | ## How to Contribute 21 | 22 | There are many ways you can contribute: 23 | 24 | - **Code Contributions**: Fix bugs, implement new features, or improve existing code. 25 | - **Documentation**: Improve README, write guides, or clarify examples. 26 | - **Testing**: Add or improve unit, integration, or end-to-end tests. 27 | - **Feedback**: Report issues, suggest features, or provide performance insights. 28 | 29 | Before contributing code, please check the existing issues to avoid duplication. 30 | 31 | --- 32 | 33 | ## Reporting Bugs 34 | 35 | If you find a bug, please submit an issue with: 36 | 37 | - A clear and descriptive title. 38 | - Steps to reproduce the problem. 39 | - Expected vs actual behavior. 40 | - Relevant environment information (OS, Rust version, DB, etc.). 41 | - Logs or screenshots, if applicable. 42 | 43 | This helps maintainers reproduce and fix the issue faster. 44 | 45 | --- 46 | 47 | ## Requesting Features 48 | 49 | Feature requests should include: 50 | 51 | - A clear description of the feature. 52 | - Why it is needed and how it improves KeyRunes. 53 | - Optional examples or mockups. 54 | 55 | Feature requests are discussed openly and may be implemented collaboratively. 56 | 57 | --- 58 | 59 | ## Development Setup 60 | 61 | Follow these steps to get KeyRunes running locally: 62 | 63 | 1. **Clone the repository:** 64 | ```bash 65 | git clone https://github.com/jonatasoli/keyrunes.git && cd keyrunes 66 | ``` 67 | 68 | Start the database and services using Docker Compose: 69 | 70 | Create a file named `.env` This allows docker compose to pick up env vars instead of manually passing them in the cli command everytime. 71 | Setup your env variables with values using `.env-example` file listed in the main directory as an example. 72 | 73 | Run the below to create db tables / migrations 74 | ```bash 75 | sqlx migrate run 76 | ``` 77 | 78 | make sure to add the `DATABASE_URL` env var to your `.env` when running the above command. Take note, 79 | you can ran the app directly and face no issue, but an sql migration issue can come up because the `sqlx` command looks for the 80 | database url env var 81 | 82 | Run the below 83 | ```bash 84 | docker-compose up 85 | ``` 86 | 87 | Run the web application: 88 | 89 | ```bash 90 | cargo run --bin keyrunes 91 | ``` 92 | 93 | Run the CLI application: 94 | ```bash 95 | cargo run --bin cli 96 | ``` 97 | 98 | Before you run tests, make sure to register a test user with the below details. Some unit tests depends on this data. 99 | Also make sure you have ran ``cargo build`` to get the binary available in the release directory 100 | 101 | ``` 102 | username = test 103 | email = test@gmail.com 104 | password = password 105 | ``` 106 | 107 | Run tests: 108 | ```bash 109 | cargo test 110 | ``` 111 | 112 | Code Style and Standards 113 | 114 | Follow Rust community conventions. 115 | 116 | Use meaningful variable and function names. 117 | 118 | Write tests for new features and bug fixes. 119 | 120 | Keep code modular and well-documented. 121 | 122 | Use Clippy to catch warnings: 123 | 124 | ```bash 125 | cargo clippy 126 | ``` 127 | 128 | Submitting Pull Requests 129 | Fork the repository. 130 | 131 | Create a new branch for your feature/bugfix. 132 | 133 | Write code and tests according to the guidelines. 134 | 135 | Ensure all tests pass. 136 | 137 | Submit a pull request with a clear description of the changes. 138 | 139 | Pull requests are reviewed collaboratively. Be prepared to make changes based on feedback. 140 | 141 | Community Guidelines 142 | 143 | - We value respectful and constructive communication. Please follow: 144 | 145 | - Be respectful to all contributors. 146 | 147 | - Provide clear and concise feedback. 148 | 149 | - Stay on-topic and avoid off-topic discussions in issues/PRs. 150 | 151 | - Follow the [Code of Conduct](CODE_OF_CONDUCT.md) -------------------------------------------------------------------------------- /tests/admin_view_test.rs: -------------------------------------------------------------------------------- 1 | use axum::{ 2 | body::Body, 3 | http::{Request, StatusCode}, 4 | }; 5 | use tower::ServiceExt; 6 | 7 | #[tokio::test] 8 | async fn test_admin_endpoint_structure() { 9 | let endpoints = vec![ 10 | "/api/admin/dashboard", 11 | "/api/admin/users", 12 | "/api/admin/user", 13 | "/api/admin/groups", 14 | "/api/admin/policies", 15 | "/api/admin/users/:user_id/groups/:group_id", 16 | "/api/admin/check-permission", 17 | ]; 18 | 19 | for endpoint in endpoints { 20 | println!("Endpoint exists: {}", endpoint); 21 | } 22 | } 23 | 24 | #[test] 25 | fn test_check_permission_request_structure() { 26 | use serde_json::json; 27 | 28 | let request = json!({ 29 | "user_id": 1, 30 | "group_name": "developers", 31 | "resource": "user:*", 32 | "action": "read" 33 | }); 34 | 35 | assert!(request["user_id"].is_number()); 36 | assert!(request["group_name"].is_string()); 37 | assert!(request["resource"].is_string()); 38 | assert!(request["action"].is_string()); 39 | } 40 | 41 | #[test] 42 | fn test_admin_dashboard_response_structure() { 43 | use serde_json::json; 44 | 45 | let response = json!({ 46 | "total_users": 10, 47 | "total_groups": 3, 48 | "total_policies": 5, 49 | "current_admin": { 50 | "user_id": 1, 51 | "username": "admin", 52 | "email": "admin@example.com", 53 | "groups": ["superadmin"] 54 | } 55 | }); 56 | 57 | assert!(response["total_users"].is_number()); 58 | assert!(response["total_groups"].is_number()); 59 | assert!(response["total_policies"].is_number()); 60 | assert!(response["current_admin"]["groups"].is_array()); 61 | } 62 | 63 | #[test] 64 | fn test_user_list_response_structure() { 65 | use serde_json::json; 66 | 67 | let response = json!([ 68 | { 69 | "user_id": 1, 70 | "external_id": "550e8400-e29b-41d4-a716-446655440000", 71 | "email": "user@example.com", 72 | "username": "testuser", 73 | "first_login": false, 74 | "groups": ["users"], 75 | "created_at": "2025-11-27T10:00:00Z" 76 | } 77 | ]); 78 | 79 | assert!(response.is_array()); 80 | assert!(response[0]["user_id"].is_number()); 81 | assert!(response[0]["email"].is_string()); 82 | assert!(response[0]["groups"].is_array()); 83 | } 84 | 85 | #[test] 86 | fn test_group_creation_request_structure() { 87 | use serde_json::json; 88 | 89 | let request = json!({ 90 | "name": "developers", 91 | "description": "Development team" 92 | }); 93 | 94 | assert!(request["name"].is_string()); 95 | assert!(request["description"].is_string() || request["description"].is_null()); 96 | } 97 | 98 | #[test] 99 | fn test_assign_group_response_structure() { 100 | use serde_json::json; 101 | 102 | let response = json!({ 103 | "message": "User assigned to group successfully" 104 | }); 105 | 106 | assert_eq!(response["message"], "User assigned to group successfully"); 107 | } 108 | 109 | #[test] 110 | fn test_permission_check_response_structure() { 111 | use serde_json::json; 112 | 113 | let response = json!({ 114 | "user_id": 1, 115 | "group_name": "developers", 116 | "resource": "user:*", 117 | "action": "read", 118 | "has_permission": true 119 | }); 120 | 121 | assert!(response["user_id"].is_number()); 122 | assert!(response["group_name"].is_string()); 123 | assert!(response["resource"].is_string()); 124 | assert!(response["action"].is_string()); 125 | assert!(response["has_permission"].is_boolean()); 126 | } 127 | 128 | // Test edge cases 129 | #[test] 130 | fn test_empty_group_name_invalid() { 131 | let name = ""; 132 | assert!(name.is_empty()); 133 | } 134 | 135 | #[test] 136 | fn test_invalid_user_id() { 137 | let user_id: i64 = -1; 138 | assert!(user_id < 0); 139 | } 140 | 141 | #[test] 142 | fn test_wildcard_resource_patterns() { 143 | let patterns = vec![ 144 | "*", // All resources 145 | "user:*", // All user resources 146 | "user:self", // Own user resource 147 | "admin:*", // All admin resources 148 | ]; 149 | 150 | for pattern in patterns { 151 | assert!(!pattern.is_empty()); 152 | } 153 | } 154 | 155 | #[test] 156 | fn test_action_types() { 157 | let actions = vec!["read", "write", "delete", "update", "*"]; 158 | 159 | for action in actions { 160 | assert!(!action.is_empty()); 161 | } 162 | } 163 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # 🛡️ KeyRunes — Modern Access Control Engine in Rust 2 | [![CI](https://github.com/jonatasoli/keyrunes/actions/workflows/ci.yml/badge.svg)](https://github.com/jonatasoli/keyrunes/actions/workflows/ci.yml) 3 | 4 | **KeyRunes** is a high-performance, extensible authorization system designed to compete with and surpass traditional solutions like Keycloak. It brings together advanced access control models such as **RBAC**, **ABAC**, **ReBAC**, and **PBAC**, while offering a great developer experience and enterprise-grade scalability. 5 | 6 | Built for Rust. Inspired by RPG systems. Designed for security-critical platforms. 7 | 8 | > ⚙️ Built for Rust. Inspired by RPG systems. Designed for security-critical platforms. 9 | 10 | --- 11 | 12 | ## Key Features 13 | 14 | ### Advanced Authorization Models 15 | 16 | - **RBAC** (Role-Based Access Control): Global (realm) and per-client roles, including role composition. 17 | - **ABAC** (Attribute-Based Access Control): Policies based on dynamic user/environment attributes (e.g. time, department, device). 18 | - **ReBAC** (Relationship-Based Access Control): Authorization through graph-based relationships (e.g. ownership, collaboration). 19 | - **PBAC** (Policy-Based Access Control): Combine RBAC + ABAC in unified policies. 20 | 21 | ### Scalability & Performance 22 | 23 | - Lightweight **Policy Decision Point (PDP)** with <10ms latency at enterprise scale. 24 | - Optional in-process or external microservice deployment. 25 | - Distributed cache support to reduce calls to external sources (e.g. Keycloak/LDAP). 26 | 27 | ### Developer Experience 28 | 29 | - **Policy-as-Code** using YAML or Rego, versionable via Git. 30 | - CI/CD-ready: Run automated tests for policies. 31 | - Simulate access decisions before deployment with a rich UI. 32 | - SDKs (planned) for Rust, Java, Go, and Python for seamless integration. 33 | 34 | ### Audit & Compliance 35 | 36 | - Complete decision logs with metadata (timestamp, policy, attributes). 37 | - Automated rollback for failed policies in production. 38 | - Compliance reports for standards like HIPAA and PCI. 39 | 40 | ### Integration & Extensibility 41 | 42 | - Federate identities from Keycloak, Okta and others via OIDC. 43 | - Map custom IdP attributes into policies. 44 | - Webhook support for access denial events. 45 | - Plugin system for sourcing attributes from internal systems (CRM, HR). 46 | 47 | ### Multi-Tenant Support 48 | 49 | - Isolated policies and data per tenant. 50 | - Delegated administration (e.g. department leads managing roles). 51 | 52 | ### Real-World Use Cases 53 | 54 | - Hospitals (HIPAA): Role + location + shift access to medical records. 55 | - Banks: Enforce MFA outside corporate network. 56 | - E-commerce: Temporary supplier access. 57 | - IoT: Device-based publish/subscribe permissions. 58 | 59 | --- 60 | 61 | ## 📦 Roadmap (Milestones) 62 | 63 | | Phase | Focus | 64 | |-------|-------| 65 | | MVP | RBAC, Policy-as-Code, SDKs, Keycloak integration | 66 | | V1 | ABAC, ReBAC, Simulators, Attribute Graphs | 67 | | V2 | Multi-tenancy, Audit, Compliance tooling | 68 | | V3 | Edge-case handling, IoT, Delegated access UI | 69 | 70 | --- 71 | 72 | ## 🧪 Quickstart (WIP) 73 | 74 | Follow these steps to quickly start KeyRunes locally: 75 | 76 | ```bash 77 | # 1. Copy environment variables 78 | cp .env-example .env 79 | 80 | # 2. Start the database using Docker 81 | docker-compose up -d 82 | 83 | # 3. Run the web application 84 | cargo run --bin keyrunes 85 | 86 | # 4. Or run the CLI application 87 | cargo run --bin cli 88 | 89 | # 5. Run tests 90 | cargo test 91 | 92 | # 6. Stop the database container when done 93 | docker-compose down 94 | ``` 95 | --- 96 | 97 | ## Project Structure (Planned) 98 | 99 | /src 100 | 101 | /core # Policy engine 102 | 103 | /models # Roles, attributes, relationships 104 | 105 | /parser # Policy-as-code parser (YAML/Rego) 106 | 107 | /sdk # API bindings 108 | 109 | /tests 110 | 111 | /docs 112 | 113 | --- 114 | 115 | ## Contributing 116 | 117 | Contributions are welcome! If you’re interested in: 118 | - Access control systems 119 | - Graph-based security 120 | - High-performance Rust services 121 | 122 | …then feel free to open issues, suggest ideas, or contribute code once we’re live 🚀 123 | 124 | See in [CONTRIBUTING](CONTRIBUTING.md) 125 | 126 | --- 127 | 128 | ## License 129 | 130 | [MIT](LICENSE) 131 | [CODE OF CONDUCT](CODE_OF_CONDUCT.md) 132 | 133 | --- 134 | 135 | ## 🧙‍♂️ About the Name 136 | 137 | Just like magical runes control access to forbidden realms in fantasy worlds, **KeyRunes** grants or denies access to sensitive resources: through logic, context, and relationships. 138 | 139 | > 🔒 **Security meets storytelling.** 140 | -------------------------------------------------------------------------------- /hurl/user_roles.hurl: -------------------------------------------------------------------------------- 1 | POST http://localhost:3000/api/login 2 | Content-Type: application/json 3 | { 4 | "identity": "{{admin_email}}", 5 | "password": "{{admin_password}}" 6 | } 7 | HTTP 200 8 | [Captures] 9 | admin_token: jsonpath "$.token" 10 | 11 | POST http://localhost:3000/api/admin/user 12 | Content-Type: application/json 13 | Authorization: Bearer {{admin_token}} 14 | { 15 | "email": "multiRole_{{newUuid}}@example.com", 16 | "username": "multirole_{{newDate}}", 17 | "password": "{{test_user_password}}", 18 | "groups": ["superadmin"] 19 | } 20 | HTTP 201 21 | [Captures] 22 | test_user_id: jsonpath "$.user_id" 23 | test_user_email: jsonpath "$.email" 24 | [Asserts] 25 | jsonpath "$.groups" count == 1 26 | jsonpath "$.groups[0]" == "superadmin" 27 | 28 | GET http://localhost:3000/api/admin/groups 29 | Authorization: Bearer {{admin_token}} 30 | HTTP 200 31 | [Captures] 32 | users_group_id: jsonpath "$[?(@.name=='users')].group_id" nth 0 33 | [Asserts] 34 | jsonpath "$[?(@.name=='users')]" exists 35 | 36 | POST http://localhost:3000/api/admin/users/{{test_user_id}}/groups/{{users_group_id}} 37 | Authorization: Bearer {{admin_token}} 38 | HTTP 200 39 | [Asserts] 40 | jsonpath "$.message" == "User assigned to group successfully" 41 | 42 | GET http://localhost:3000/api/admin/users 43 | Authorization: Bearer {{admin_token}} 44 | HTTP 200 45 | [Asserts] 46 | jsonpath "$[?(@.user_id=={{test_user_id}})].groups" count == 1 47 | jsonpath "$[?(@.user_id=={{test_user_id}})].groups[0]" count == 2 48 | 49 | POST http://localhost:3000/api/login 50 | Content-Type: application/json 51 | { 52 | "identity": "{{test_user_email}}", 53 | "password": "{{test_user_password}}" 54 | } 55 | HTTP 200 56 | [Captures] 57 | test_user_token: jsonpath "$.token" 58 | [Asserts] 59 | jsonpath "$.token" exists 60 | jsonpath "$.user.groups" count == 2 61 | jsonpath "$.user.groups" contains "superadmin" 62 | jsonpath "$.user.groups" contains "users" 63 | 64 | GET http://localhost:3000/api/admin/dashboard 65 | Authorization: Bearer {{test_user_token}} 66 | HTTP 200 67 | [Asserts] 68 | jsonpath "$.total_users" isInteger 69 | jsonpath "$.total_groups" isInteger 70 | jsonpath "$.current_admin.username" exists 71 | jsonpath "$.current_admin.groups" count == 2 72 | jsonpath "$.current_admin.groups" contains "superadmin" 73 | jsonpath "$.current_admin.groups" contains "users" 74 | 75 | GET http://localhost:3000/api/admin/users 76 | Authorization: Bearer {{test_user_token}} 77 | HTTP 200 78 | [Asserts] 79 | jsonpath "$" isCollection 80 | jsonpath "$[*].user_id" exists 81 | 82 | POST http://localhost:3000/api/admin/user 83 | Content-Type: application/json 84 | Authorization: Bearer {{test_user_token}} 85 | { 86 | "email": "created_by_multirole_{{newUuid}}@example.com", 87 | "username": "created_{{newDate}}", 88 | "password": "Created123" 89 | } 90 | HTTP 201 91 | [Captures] 92 | created_user_id: jsonpath "$.user_id" 93 | [Asserts] 94 | jsonpath "$.user_id" exists 95 | jsonpath "$.groups" contains "users" 96 | 97 | POST http://localhost:3000/api/admin/check-permission 98 | Content-Type: application/json 99 | Authorization: Bearer {{test_user_token}} 100 | { 101 | "user_id": {{test_user_id}}, 102 | "group_name": "users", 103 | "resource": "user:self", 104 | "action": "read" 105 | } 106 | HTTP 200 107 | [Asserts] 108 | jsonpath "$.user_id" == {{test_user_id}} 109 | jsonpath "$.group_name" == "users" 110 | jsonpath "$.has_permission" isBoolean 111 | 112 | POST http://localhost:3000/api/admin/check-permission 113 | Content-Type: application/json 114 | Authorization: Bearer {{test_user_token}} 115 | { 116 | "user_id": {{test_user_id}}, 117 | "group_name": "superadmin", 118 | "resource": "admin:*", 119 | "action": "*" 120 | } 121 | HTTP 200 122 | [Asserts] 123 | jsonpath "$.user_id" == {{test_user_id}} 124 | jsonpath "$.group_name" == "superadmin" 125 | jsonpath "$.has_permission" isBoolean 126 | 127 | GET http://localhost:3000/api/admin/groups 128 | Authorization: Bearer {{admin_token}} 129 | HTTP 200 130 | [Captures] 131 | superadmin_group_id: jsonpath "$[?(@.name=='superadmin')].group_id" nth 0 132 | 133 | DELETE http://localhost:3000/api/admin/users/{{test_user_id}}/groups/{{superadmin_group_id}} 134 | Authorization: Bearer {{admin_token}} 135 | HTTP 200 136 | [Asserts] 137 | jsonpath "$.message" == "User removed from group successfully" 138 | 139 | POST http://localhost:3000/api/login 140 | Content-Type: application/json 141 | { 142 | "identity": "{{test_user_email}}", 143 | "password": "{{test_user_password}}" 144 | } 145 | HTTP 200 146 | [Captures] 147 | users_only_token: jsonpath "$.token" 148 | [Asserts] 149 | jsonpath "$.user.groups" count == 1 150 | jsonpath "$.user.groups[0]" == "users" 151 | 152 | GET http://localhost:3000/api/admin/users 153 | Authorization: Bearer {{users_only_token}} 154 | HTTP 403 155 | -------------------------------------------------------------------------------- /src/services/email_service.rs: -------------------------------------------------------------------------------- 1 | use anyhow::{Context, Result}; 2 | use lettre::{ 3 | AsyncSmtpTransport, AsyncTransport, Message, Tokio1Executor, message::header::ContentType, 4 | transport::smtp::authentication::Credentials, 5 | }; 6 | use std::sync::Arc; 7 | use tera::Tera; 8 | 9 | /// Service for sending emails via SMTP 10 | #[derive(Clone)] 11 | pub struct EmailService { 12 | smtp_username: String, 13 | smtp_password: String, 14 | smtp_host: String, 15 | smtp_port: u16, 16 | from_email: String, 17 | from_name: String, 18 | frontend_url: String, 19 | tera: Arc, 20 | } 21 | 22 | impl EmailService { 23 | /// Create a new EmailService from environment variables 24 | pub fn from_env(tera: Arc) -> Result { 25 | let smtp_username = 26 | std::env::var("SMTP_USERNAME").context("SMTP_USERNAME not set in environment")?; 27 | let smtp_password = 28 | std::env::var("SMTP_PASSWORD").context("SMTP_PASSWORD not set in environment")?; 29 | let smtp_host = std::env::var("SMTP_HOST").unwrap_or_else(|_| "smtp.gmail.com".to_string()); 30 | let smtp_port = std::env::var("SMTP_PORT") 31 | .unwrap_or_else(|_| "587".to_string()) 32 | .parse::() 33 | .context("Invalid SMTP_PORT")?; 34 | let from_email = std::env::var("FROM_EMAIL").unwrap_or_else(|_| smtp_username.clone()); 35 | let from_name = std::env::var("FROM_NAME").unwrap_or_else(|_| "KeyRunes".to_string()); 36 | let frontend_url = 37 | std::env::var("FRONTEND_URL").unwrap_or_else(|_| "http://localhost:3000".to_string()); 38 | 39 | Ok(Self { 40 | smtp_username, 41 | smtp_password, 42 | smtp_host, 43 | smtp_port, 44 | from_email, 45 | from_name, 46 | frontend_url, 47 | tera, 48 | }) 49 | } 50 | 51 | /// Send a password reset email 52 | pub async fn send_password_reset_email(&self, to_email: &str, reset_token: &str) -> Result<()> { 53 | let reset_url = format!( 54 | "{}/reset-password?forgot_password={}", 55 | self.frontend_url, reset_token 56 | ); 57 | 58 | // Render HTML template using Tera 59 | let mut context = tera::Context::new(); 60 | context.insert("reset_url", &reset_url); 61 | 62 | let html_body = self 63 | .tera 64 | .render("mail/password_reset.html", &context) 65 | .context("Failed to render email template")?; 66 | 67 | let email = Message::builder() 68 | .from(format!("{} <{}>", self.from_name, self.from_email).parse()?) 69 | .to(to_email.parse()?) 70 | .subject("Password Reset - Keyrunes") 71 | .header(ContentType::TEXT_HTML) 72 | .body(html_body)?; 73 | 74 | let creds = Credentials::new(self.smtp_username.clone(), self.smtp_password.clone()); 75 | 76 | let mailer: AsyncSmtpTransport = 77 | AsyncSmtpTransport::::starttls_relay(&self.smtp_host)? 78 | .credentials(creds) 79 | .port(self.smtp_port) 80 | .build(); 81 | 82 | mailer.send(email).await.context("Failed to send email")?; 83 | 84 | tracing::info!("Password reset email sent to {}", to_email); 85 | 86 | Ok(()) 87 | } 88 | } 89 | 90 | #[cfg(test)] 91 | mod tests { 92 | use super::*; 93 | use serial_test::serial; 94 | 95 | fn create_test_tera() -> Arc { 96 | Arc::new(Tera::new("templates/**/*").expect("Failed to load templates")) 97 | } 98 | 99 | #[test] 100 | #[serial] 101 | fn test_email_service_from_env_missing_required() { 102 | let tera = create_test_tera(); 103 | 104 | // This should fail because SMTP_USERNAME and SMTP_PASSWORD are required 105 | unsafe { 106 | std::env::remove_var("SMTP_USERNAME"); 107 | std::env::remove_var("SMTP_PASSWORD"); 108 | } 109 | 110 | let result = EmailService::from_env(tera); 111 | assert!(result.is_err()); 112 | } 113 | 114 | #[test] 115 | #[serial] 116 | fn test_email_service_from_env_with_defaults() { 117 | let tera = create_test_tera(); 118 | 119 | unsafe { 120 | std::env::set_var("SMTP_USERNAME", "test@example.com"); 121 | std::env::set_var("SMTP_PASSWORD", "password"); 122 | } 123 | 124 | let service = EmailService::from_env(tera).unwrap(); 125 | 126 | assert_eq!(service.smtp_host, "smtp.gmail.com"); 127 | assert_eq!(service.smtp_port, 587); 128 | assert_eq!(service.from_name, "KeyRunes"); 129 | assert_eq!(service.frontend_url, "http://localhost:3000"); 130 | 131 | // Cleanup 132 | unsafe { 133 | std::env::remove_var("SMTP_USERNAME"); 134 | std::env::remove_var("SMTP_PASSWORD"); 135 | } 136 | } 137 | } 138 | -------------------------------------------------------------------------------- /migrations/20250908121827_create_groups_and_policies.sql: -------------------------------------------------------------------------------- 1 | -- Add first_login and reset_password columns to users table 2 | ALTER TABLE users 3 | ADD COLUMN first_login BOOLEAN NOT NULL DEFAULT FALSE, 4 | ADD COLUMN reset_password BOOLEAN NOT NULL DEFAULT FALSE; 5 | 6 | -- Add unique constraint on email column for ON CONFLICT to work 7 | -- Note: This is in addition to the existing unique index on lower(email) 8 | DO $$ 9 | BEGIN 10 | IF NOT EXISTS ( 11 | SELECT 1 FROM pg_constraint 12 | WHERE conname = 'users_email_key' AND conrelid = 'users'::regclass 13 | ) THEN 14 | ALTER TABLE users ADD CONSTRAINT users_email_key UNIQUE (email); 15 | END IF; 16 | END $$; 17 | 18 | -- Create groups table 19 | CREATE TABLE IF NOT EXISTS groups ( 20 | group_id BIGSERIAL PRIMARY KEY, 21 | external_id UUID NOT NULL DEFAULT gen_random_uuid(), 22 | name VARCHAR(100) NOT NULL UNIQUE, 23 | description TEXT, 24 | created_at TIMESTAMPTZ NOT NULL DEFAULT now(), 25 | updated_at TIMESTAMPTZ NOT NULL DEFAULT now() 26 | ); 27 | 28 | CREATE UNIQUE INDEX IF NOT EXISTS groups_external_id_idx ON groups (external_id); 29 | 30 | -- Create policies table 31 | CREATE TABLE IF NOT EXISTS policies ( 32 | policy_id BIGSERIAL PRIMARY KEY, 33 | external_id UUID NOT NULL DEFAULT gen_random_uuid(), 34 | name VARCHAR(100) NOT NULL UNIQUE, 35 | description TEXT, 36 | resource VARCHAR(255) NOT NULL, 37 | action VARCHAR(100) NOT NULL, 38 | effect VARCHAR(10) NOT NULL CHECK (effect IN ('ALLOW', 'DENY')), 39 | conditions JSONB, 40 | created_at TIMESTAMPTZ NOT NULL DEFAULT now(), 41 | updated_at TIMESTAMPTZ NOT NULL DEFAULT now() 42 | ); 43 | 44 | CREATE UNIQUE INDEX IF NOT EXISTS policies_external_id_idx ON policies (external_id); 45 | CREATE INDEX IF NOT EXISTS policies_resource_action_idx ON policies (resource, action); 46 | 47 | -- Create user_groups table (many-to-many) 48 | CREATE TABLE IF NOT EXISTS user_groups ( 49 | user_id BIGINT NOT NULL REFERENCES users(user_id) ON DELETE CASCADE, 50 | group_id BIGINT NOT NULL REFERENCES groups(group_id) ON DELETE CASCADE, 51 | assigned_at TIMESTAMPTZ NOT NULL DEFAULT now(), 52 | assigned_by BIGINT REFERENCES users(user_id), 53 | PRIMARY KEY (user_id, group_id) 54 | ); 55 | 56 | -- Create user_policies table (many-to-many) 57 | CREATE TABLE IF NOT EXISTS user_policies ( 58 | user_id BIGINT NOT NULL REFERENCES users(user_id) ON DELETE CASCADE, 59 | policy_id BIGINT NOT NULL REFERENCES policies(policy_id) ON DELETE CASCADE, 60 | assigned_at TIMESTAMPTZ NOT NULL DEFAULT now(), 61 | assigned_by BIGINT REFERENCES users(user_id), 62 | PRIMARY KEY (user_id, policy_id) 63 | ); 64 | 65 | -- Create group_policies table (many-to-many) 66 | CREATE TABLE IF NOT EXISTS group_policies ( 67 | group_id BIGINT NOT NULL REFERENCES groups(group_id) ON DELETE CASCADE, 68 | policy_id BIGINT NOT NULL REFERENCES policies(policy_id) ON DELETE CASCADE, 69 | assigned_at TIMESTAMPTZ NOT NULL DEFAULT now(), 70 | assigned_by BIGINT REFERENCES users(user_id), 71 | PRIMARY KEY (group_id, policy_id) 72 | ); 73 | 74 | -- Create password_reset_tokens table 75 | CREATE TABLE IF NOT EXISTS password_reset_tokens ( 76 | token_id BIGSERIAL PRIMARY KEY, 77 | user_id BIGINT NOT NULL REFERENCES users(user_id) ON DELETE CASCADE, 78 | token VARCHAR(255) NOT NULL, 79 | expires_at TIMESTAMPTZ NOT NULL, 80 | used_at TIMESTAMPTZ, 81 | created_at TIMESTAMPTZ NOT NULL DEFAULT now() 82 | ); 83 | 84 | CREATE UNIQUE INDEX IF NOT EXISTS password_reset_tokens_token_idx ON password_reset_tokens (token); 85 | CREATE INDEX IF NOT EXISTS password_reset_tokens_user_id_idx ON password_reset_tokens (user_id); 86 | 87 | -- Add triggers for updated_at 88 | CREATE TRIGGER trg_set_updated_at_groups 89 | BEFORE UPDATE ON groups 90 | FOR EACH ROW 91 | EXECUTE PROCEDURE set_updated_at(); 92 | 93 | CREATE TRIGGER trg_set_updated_at_policies 94 | BEFORE UPDATE ON policies 95 | FOR EACH ROW 96 | EXECUTE PROCEDURE set_updated_at(); 97 | 98 | -- Insert default groups 99 | INSERT INTO groups (name, description) VALUES 100 | ('superadmin', 'Super administrators with full access'), 101 | ('users', 'Regular users') 102 | ON CONFLICT (name) DO NOTHING; 103 | 104 | -- Insert default policies 105 | INSERT INTO policies (name, description, resource, action, effect) VALUES 106 | ('full_access', 'Full access to all resources', '*', '*', 'ALLOW'), 107 | ('read_only', 'Read-only access to user resources', 'user:*', 'read', 'ALLOW'), 108 | ('user_self_manage', 'Users can manage their own data', 'user:self', '*', 'ALLOW') 109 | ON CONFLICT (name) DO NOTHING; 110 | 111 | -- Assign admin user to superadmin group 112 | INSERT INTO user_groups (user_id, group_id) 113 | SELECT u.user_id, g.group_id 114 | FROM users u, groups g 115 | WHERE u.username = 'admin' AND g.name = 'superadmin' 116 | ON CONFLICT DO NOTHING; 117 | 118 | -- Assign full_access policy to superadmin group 119 | INSERT INTO group_policies (group_id, policy_id) 120 | SELECT g.group_id, p.policy_id 121 | FROM groups g, policies p 122 | WHERE g.name = 'superadmin' AND p.name = 'full_access' 123 | ON CONFLICT DO NOTHING; 124 | 125 | -- Assign read_only and user_self_manage policies to users group 126 | INSERT INTO group_policies (group_id, policy_id) 127 | SELECT g.group_id, p.policy_id 128 | FROM groups g, policies p 129 | WHERE g.name = 'users' AND p.name IN ('read_only', 'user_self_manage') 130 | ON CONFLICT DO NOTHING; 131 | -------------------------------------------------------------------------------- /hurl/admin.hurl: -------------------------------------------------------------------------------- 1 | # Setup - Create Admin 2 | POST http://localhost:3000/api/login 3 | Content-Type: application/json 4 | { 5 | "identity": "{{admin_email}}", 6 | "password": "{{admin_password}}" 7 | } 8 | HTTP 200 9 | [Captures] 10 | token: jsonpath "$['token']" 11 | 12 | # Test - Create user without group 13 | POST http://localhost:3000/api/admin/user 14 | Content-Type: application/json 15 | Authorization: Bearer {{token}} 16 | { 17 | "email": "{{newUuid}}@example.com", 18 | "username": "{{group_user_username}}{{newDate}}", 19 | "password": "{{group_user_password}}" 20 | } 21 | HTTP 201 22 | [Captures] 23 | new_user_id: jsonpath "$.user_id" 24 | [Asserts] 25 | # should belong to user group by default 26 | jsonpath "$.groups" count == 1 27 | jsonpath "$.groups[0]" == "users" 28 | 29 | # Test - Create user with invalid group 30 | POST http://localhost:3000/api/admin/user 31 | Content-Type: application/json 32 | Authorization: Bearer {{token}} 33 | { 34 | "email": "{{newUuid}}@example.com", 35 | "username": "{{group_user_username}}{{newDate}}", 36 | "password": "{{group_user_password}}", 37 | "groups": ["invalid"] 38 | } 39 | HTTP 400 40 | `invalid group specified: \`invalid\`` 41 | 42 | # Test - Create user with valid group 43 | POST http://localhost:3000/api/admin/user 44 | Content-Type: application/json 45 | Authorization: Bearer {{token}} 46 | { 47 | "email": "{{newUuid}}@example.com", 48 | "username": "{{group_user_username}}{{newDate}}", 49 | "password": "{{group_user_password}}", 50 | "groups": ["superadmin"] 51 | } 52 | HTTP 201 53 | [Asserts] 54 | # should have `superadmin` group 55 | jsonpath "$.groups" count == 1 56 | jsonpath "$.groups[0]" == "superadmin" 57 | 58 | # Test - List all users 59 | GET http://localhost:3000/api/admin/users 60 | Authorization: Bearer {{token}} 61 | HTTP 200 62 | [Asserts] 63 | jsonpath "$" isCollection 64 | jsonpath "$[*].user_id" exists 65 | jsonpath "$[*].username" exists 66 | jsonpath "$[*].email" exists 67 | jsonpath "$[*].groups" exists 68 | 69 | # Test - List all groups 70 | GET http://localhost:3000/api/admin/groups 71 | Authorization: Bearer {{token}} 72 | HTTP 200 73 | [Captures] 74 | superadmin_group_id: jsonpath "$[?(@.name=='superadmin')].group_id" nth 0 75 | users_group_id: jsonpath "$[?(@.name=='users')].group_id" nth 0 76 | [Asserts] 77 | jsonpath "$" isCollection 78 | jsonpath "$[?(@.name=='superadmin')]" exists 79 | jsonpath "$[?(@.name=='users')]" exists 80 | 81 | # Test - Create new group 82 | POST http://localhost:3000/api/admin/groups 83 | Content-Type: application/json 84 | Authorization: Bearer {{token}} 85 | { 86 | "name": "developers_{{newDate}}", 87 | "description": "Development team" 88 | } 89 | HTTP 201 90 | [Captures] 91 | new_group_id: jsonpath "$.group_id" 92 | new_group_name: jsonpath "$.name" 93 | [Asserts] 94 | jsonpath "$.name" startsWith "developers_" 95 | jsonpath "$.description" == "Development team" 96 | jsonpath "$.group_id" exists 97 | 98 | # Test - Create duplicate group (should fail) 99 | POST http://localhost:3000/api/admin/groups 100 | Content-Type: application/json 101 | Authorization: Bearer {{token}} 102 | { 103 | "name": "{{new_group_name}}", 104 | "description": "Duplicate" 105 | } 106 | HTTP 400 107 | 108 | # Test - Assign user to group 109 | POST http://localhost:3000/api/admin/users/{{new_user_id}}/groups/{{new_group_id}} 110 | Authorization: Bearer {{token}} 111 | HTTP 200 112 | [Asserts] 113 | jsonpath "$.message" == "User assigned to group successfully" 114 | 115 | # Test - Remove user from group 116 | DELETE http://localhost:3000/api/admin/users/{{new_user_id}}/groups/{{new_group_id}} 117 | Authorization: Bearer {{token}} 118 | HTTP 200 119 | [Asserts] 120 | jsonpath "$.message" == "User removed from group successfully" 121 | 122 | # Test - Get admin dashboard 123 | GET http://localhost:3000/api/admin/dashboard 124 | Authorization: Bearer {{token}} 125 | HTTP 200 126 | [Asserts] 127 | jsonpath "$.total_users" isInteger 128 | jsonpath "$.total_groups" isInteger 129 | jsonpath "$.total_policies" isInteger 130 | jsonpath "$.current_admin.username" exists 131 | jsonpath "$.current_admin.groups" includes "superadmin" 132 | 133 | # Test - List all policies 134 | GET http://localhost:3000/api/admin/policies 135 | Authorization: Bearer {{token}} 136 | HTTP 200 137 | [Asserts] 138 | jsonpath "$" isCollection 139 | jsonpath "$[*].policy_id" exists 140 | jsonpath "$[*].name" exists 141 | 142 | # Test - Check permission (user with superadmin should have full access) 143 | POST http://localhost:3000/api/admin/check-permission 144 | Content-Type: application/json 145 | Authorization: Bearer {{token}} 146 | { 147 | "user_id": {{new_user_id}}, 148 | "group_name": "users", 149 | "resource": "user:*", 150 | "action": "read" 151 | } 152 | HTTP 200 153 | [Asserts] 154 | jsonpath "$.user_id" == {{new_user_id}} 155 | jsonpath "$.has_permission" isBoolean 156 | 157 | # Test - Access without authentication (should fail) 158 | GET http://localhost:3000/api/admin/users 159 | HTTP 401 160 | 161 | # Test - Access with non-superadmin (create regular user first) 162 | POST http://localhost:3000/api/register 163 | Content-Type: application/json 164 | { 165 | "email": "regular_{{newUuid}}@example.com", 166 | "username": "regular_{{newDate}}", 167 | "password": "Regular123" 168 | } 169 | HTTP 201 170 | [Captures] 171 | regular_token: jsonpath "$.token" 172 | 173 | # Try to access admin endpoint with regular user 174 | GET http://localhost:3000/api/admin/users 175 | Authorization: Bearer {{regular_token}} 176 | HTTP 403 177 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: help db-create db-drop db-reset migrate run build test test-unit test-hurl test-all clean dev setup superadmin sqlx-prepare check lint 2 | 3 | # Variáveis 4 | DATABASE_URL ?= postgres://postgres_user:pass123@localhost:5432/keyrunes 5 | ADMIN_EMAIL ?= admin@example.com 6 | ADMIN_USERNAME ?= admin 7 | ADMIN_PASSWORD ?= Admin123 8 | 9 | help: 10 | @echo "Commands available:" 11 | @echo "" 12 | @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf " \033[36m%-20s\033[0m %s\n", $$1, $$2}' 13 | @echo "" 14 | @echo "Variáveis de ambiente:" 15 | @echo " DATABASE_URL=$(DATABASE_URL)" 16 | @echo " ADMIN_EMAIL=$(ADMIN_EMAIL)" 17 | @echo " ADMIN_USERNAME=$(ADMIN_USERNAME)" 18 | 19 | ## Database 20 | db-create: ## Cria o banco de dados 21 | @echo "📦 Criando banco de dados..." 22 | sqlx database create 23 | 24 | db-drop: ## Remove o banco de dados (CUIDADO: apaga todos os dados!) 25 | @echo "🗑️ Removendo banco de dados..." 26 | sqlx database drop -y 27 | 28 | db-reset: db-drop db-create migrate ## Reseta o banco (drop + create + migrate) 29 | @echo "✅ Banco resetado com sucesso!" 30 | 31 | migrate: ## Roda as migrations 32 | @echo "🔄 Rodando migrations..." 33 | sqlx migrate run 34 | @echo "✅ Migrations aplicadas!" 35 | 36 | migrate-revert: ## Reverte a última migration 37 | @echo "↩️ Revertendo última migration..." 38 | sqlx migrate revert 39 | 40 | ## Build & Run 41 | build: ## Compila o projeto 42 | @echo "🔨 Compilando..." 43 | cargo build 44 | 45 | build-release: ## Compila em modo release 46 | @echo "🔨 Compilando release..." 47 | cargo build --release 48 | 49 | run: ## Roda o servidor 50 | @echo "🚀 Iniciando servidor..." 51 | cargo run 52 | 53 | run-release: ## Roda o servidor em modo release 54 | @echo "🚀 Iniciando servidor (release)..." 55 | cargo run --release 56 | 57 | dev: ## Roda o servidor com auto-reload (requer cargo-watch) 58 | @echo "🔥 Modo desenvolvimento com hot-reload..." 59 | cargo watch -x run 60 | 61 | ## CLI 62 | cli-superadmin: ## Cria o primeiro superadmin 63 | @echo "👤 Criando superadmin..." 64 | cargo run --bin cli -- create-superadmin \ 65 | --email $(ADMIN_EMAIL) \ 66 | --username $(ADMIN_USERNAME) \ 67 | --password $(ADMIN_PASSWORD) 68 | @echo "✅ Superadmin criado!" 69 | 70 | cli-list-groups: ## Lista todos os grupos 71 | @echo "📋 Listando grupos..." 72 | cargo run --bin cli -- list-groups 73 | 74 | cli-create-group: ## Cria um grupo (uso: make cli-create-group NAME=developers DESC="Dev team") 75 | @echo "➕ Criando grupo $(NAME)..." 76 | cargo run --bin cli -- create-group --name $(NAME) --description "$(DESC)" 77 | 78 | ## Tests 79 | test: ## Roda todos os testes Rust 80 | @echo "🧪 Rodando testes Rust..." 81 | cargo test 82 | 83 | test-unit: ## Roda apenas testes unitários 84 | @echo "🧪 Rodando testes unitários..." 85 | cargo test --lib 86 | 87 | test-integration: ## Roda apenas testes de integração 88 | @echo "🧪 Rodando testes de integração..." 89 | cargo test --test '*' 90 | 91 | test-hurl: ## Roda testes Hurl (requer servidor rodando) 92 | @echo "🧪 Rodando testes Hurl..." 93 | @if ! curl -s http://localhost:3000/api/health > /dev/null 2>&1; then \ 94 | echo "❌ Servidor não está rodando! Execute 'make run' primeiro."; \ 95 | exit 1; \ 96 | fi 97 | ./run_hurl_tests.sh 98 | 99 | test-hurl-verbose: ## Roda testes Hurl em modo verbose 100 | @echo "🧪 Rodando testes Hurl (verbose)..." 101 | ./run_hurl_tests.sh --verbose 102 | 103 | test-all: test test-hurl ## Roda todos os testes (Rust + Hurl) 104 | 105 | ## SQLx 106 | sqlx-prepare: ## Prepara SQLx metadata offline 107 | @echo "📝 Preparando SQLx metadata..." 108 | cargo sqlx prepare 109 | 110 | sqlx-check: ## Verifica se as queries SQLx estão corretas 111 | @echo "🔍 Verificando queries SQLx..." 112 | cargo sqlx prepare --check 113 | 114 | ## Setup completo 115 | setup: db-create migrate cli-superadmin ## Setup completo (cria DB, migrations, superadmin) 116 | @echo "" 117 | @echo "✨ Setup completo!" 118 | @echo "" 119 | @echo "Próximos passos:" 120 | @echo " 1. Inicie o servidor: make run" 121 | @echo " 2. Acesse: http://127.0.0.1:3000/login" 122 | @echo " 3. Login: $(ADMIN_USERNAME) / $(ADMIN_PASSWORD)" 123 | @echo " 4. Admin: http://127.0.0.1:3000/admin" 124 | @echo "" 125 | 126 | ## Development 127 | check: ## Verifica o código sem compilar 128 | @echo "🔍 Verificando código..." 129 | cargo check --all-targets 130 | 131 | lint: ## Roda clippy (linter) 132 | @echo "🧹 Rodando linter..." 133 | cargo clippy -- -D warnings 134 | 135 | fmt: ## Formata o código 136 | @echo "✨ Formatando código..." 137 | cargo fmt 138 | 139 | fmt-check: ## Verifica formatação sem alterar 140 | @echo "🔍 Verificando formatação..." 141 | cargo fmt -- --check 142 | 143 | clean: ## Limpa arquivos de build 144 | @echo "🧹 Limpando..." 145 | cargo clean 146 | 147 | ## Docker 148 | docker-up: ## Sobe o Postgres via docker-compose 149 | @echo "🐳 Subindo Docker..." 150 | docker-compose up -d 151 | @echo "⏳ Aguardando Postgres iniciar..." 152 | @sleep 3 153 | @echo "✅ Postgres rodando!" 154 | 155 | docker-down: ## Para o Postgres 156 | @echo "🛑 Parando Docker..." 157 | docker-compose down 158 | 159 | docker-reset: docker-down docker-up ## Reseta containers Docker 160 | @echo "✅ Docker resetado!" 161 | 162 | docker-logs: ## Mostra logs do Postgres 163 | docker-compose logs -f postgres 164 | 165 | ## Quick commands 166 | fresh-start: docker-reset db-reset setup ## Começa do zero (Docker + DB + Setup) 167 | @echo "" 168 | @echo "🎉 Ambiente pronto para desenvolvimento!" 169 | @echo "Execute: make run" 170 | 171 | restart: docker-down docker-up migrate ## Reinicia ambiente de desenvolvimento 172 | @echo "✅ Ambiente reiniciado!" 173 | 174 | ## Info 175 | env: ## Mostra variáveis de ambiente 176 | @echo "DATABASE_URL: $(DATABASE_URL)" 177 | @echo "ADMIN_EMAIL: $(ADMIN_EMAIL)" 178 | @echo "ADMIN_USERNAME: $(ADMIN_USERNAME)" 179 | @echo "ADMIN_PASSWORD: $(ADMIN_PASSWORD)" 180 | 181 | status: ## Mostra status do ambiente 182 | @echo "📊 Status do Ambiente" 183 | @echo "" 184 | @echo "Docker:" 185 | @docker-compose ps 2>/dev/null || echo " ⚠️ Docker não está rodando" 186 | @echo "" 187 | @echo "Servidor:" 188 | @if curl -s http://localhost:3000/api/health > /dev/null 2>&1; then \ 189 | echo " ✅ Servidor rodando (http://localhost:3000)"; \ 190 | else \ 191 | echo " ⚠️ Servidor não está rodando"; \ 192 | fi 193 | @echo "" 194 | @echo "Database:" 195 | @if psql $(DATABASE_URL) -c "SELECT 1" > /dev/null 2>&1; then \ 196 | echo " ✅ Conectado"; \ 197 | else \ 198 | echo " ⚠️ Não conectado"; \ 199 | fi 200 | -------------------------------------------------------------------------------- /src/services/jwt_service.rs: -------------------------------------------------------------------------------- 1 | use anyhow::{Result, anyhow}; 2 | use chrono::{Duration, Utc}; 3 | use josekit::jws::HS256; 4 | use josekit::jws::JwsHeader; 5 | use josekit::jwt::{self, JwtPayload}; 6 | use serde::{Deserialize, Serialize}; 7 | use serde_json::Value; 8 | 9 | #[derive(Debug, Serialize, Deserialize)] 10 | pub struct Claims { 11 | pub sub: String, 12 | pub email: String, 13 | pub username: String, 14 | pub groups: Vec, 15 | pub exp: i64, 16 | pub iat: i64, 17 | pub iss: String, 18 | } 19 | 20 | #[derive(Clone)] 21 | pub struct JwtService { 22 | secret: Vec, 23 | issuer: String, 24 | } 25 | 26 | impl JwtService { 27 | pub fn new(secret: &str) -> Self { 28 | Self { 29 | secret: secret.as_bytes().to_vec(), 30 | issuer: "keyrunes".to_string(), 31 | } 32 | } 33 | 34 | pub fn generate_token( 35 | &self, 36 | user_id: i64, 37 | email: &str, 38 | username: &str, 39 | groups: Vec, 40 | ) -> Result { 41 | let now = Utc::now(); 42 | let exp = now + Duration::hours(1); 43 | 44 | let mut payload = JwtPayload::new(); 45 | payload.set_claim("sub", Some(Value::String(user_id.to_string())))?; 46 | payload.set_claim("email", Some(Value::String(email.to_string())))?; 47 | payload.set_claim("username", Some(Value::String(username.to_string())))?; 48 | payload.set_claim("groups", Some(serde_json::to_value(&groups)?))?; 49 | payload.set_claim("exp", Some(Value::Number(exp.timestamp().into())))?; 50 | payload.set_claim("iat", Some(Value::Number(now.timestamp().into())))?; 51 | payload.set_claim("iss", Some(Value::String(self.issuer.clone())))?; 52 | 53 | let mut header = JwsHeader::new(); 54 | header.set_token_type("JWT"); 55 | 56 | let signer = HS256.signer_from_bytes(&self.secret)?; 57 | let token = jwt::encode_with_signer(&payload, &header, &signer)?; 58 | 59 | Ok(token) 60 | } 61 | 62 | pub fn verify_token(&self, token: &str) -> Result { 63 | let verifier = HS256.verifier_from_bytes(&self.secret)?; 64 | let (payload, _header) = jwt::decode_with_verifier(token, &verifier) 65 | .map_err(|e| anyhow!("Failed to decode JWT: {}", e))?; 66 | 67 | let sub = payload 68 | .claim("sub") 69 | .and_then(|v| v.as_str()) 70 | .ok_or_else(|| anyhow!("Missing or invalid 'sub' claim"))? 71 | .to_string(); 72 | let email = payload 73 | .claim("email") 74 | .and_then(|v| v.as_str()) 75 | .ok_or_else(|| anyhow!("Missing or invalid 'email' claim"))? 76 | .to_string(); 77 | let username = payload 78 | .claim("username") 79 | .and_then(|v| v.as_str()) 80 | .ok_or_else(|| anyhow!("Missing or invalid 'username' claim"))? 81 | .to_string(); 82 | let groups = payload 83 | .claim("groups") 84 | .and_then(|v| serde_json::from_value(v.clone()).ok()) 85 | .ok_or_else(|| anyhow!("Missing or invalid 'groups' claim"))?; 86 | let exp = payload 87 | .claim("exp") 88 | .and_then(|v| v.as_i64()) 89 | .ok_or_else(|| anyhow!("Missing or invalid 'exp' claim"))?; 90 | let iat = payload 91 | .claim("iat") 92 | .and_then(|v| v.as_i64()) 93 | .ok_or_else(|| anyhow!("Missing or invalid 'iat' claim"))?; 94 | let iss = payload 95 | .claim("iss") 96 | .and_then(|v| v.as_str()) 97 | .ok_or_else(|| anyhow!("Missing or invalid 'iss' claim"))? 98 | .to_string(); 99 | 100 | Ok(Claims { 101 | sub, 102 | email, 103 | username, 104 | groups, 105 | exp, 106 | iat, 107 | iss, 108 | }) 109 | } 110 | 111 | pub fn refresh_token(&self, token: &str) -> Result { 112 | let claims = self.verify_token(token)?; 113 | self.generate_token( 114 | claims.sub.parse()?, 115 | &claims.email, 116 | &claims.username, 117 | claims.groups, 118 | ) 119 | } 120 | 121 | pub fn extract_user_id(&self, token: &str) -> Result { 122 | let claims = self.verify_token(token)?; 123 | claims 124 | .sub 125 | .parse() 126 | .map_err(|e| anyhow!("Invalid user ID in token: {}", e)) 127 | } 128 | } 129 | 130 | #[cfg(test)] 131 | mod tests { 132 | use super::*; 133 | use std::thread; 134 | use std::time::Duration as StdDuration; 135 | 136 | #[test] 137 | fn test_jwt_token_generation_and_verification() { 138 | let service = JwtService::new("0123456789ABCDEF0123456789ABCDEF"); 139 | let groups = vec!["users".to_string(), "admin".to_string()]; 140 | 141 | let token = service 142 | .generate_token(1, "test@example.com", "testuser", groups.clone()) 143 | .unwrap(); 144 | let claims = service.verify_token(&token).unwrap(); 145 | 146 | assert_eq!(claims.sub, "1"); 147 | assert_eq!(claims.email, "test@example.com"); 148 | assert_eq!(claims.username, "testuser"); 149 | assert_eq!(claims.groups, groups); 150 | assert_eq!(claims.iss, "keyrunes"); 151 | } 152 | 153 | #[test] 154 | fn test_refresh_token() { 155 | let service = JwtService::new("0123456789ABCDEF0123456789ABCDEF"); 156 | let groups = vec!["users".to_string()]; 157 | 158 | let original_token = service 159 | .generate_token(1, "test@example.com", "testuser", groups.clone()) 160 | .unwrap(); 161 | 162 | thread::sleep(StdDuration::from_secs(1)); 163 | 164 | let refreshed_token = service.refresh_token(&original_token).unwrap(); 165 | 166 | let original_claims = service.verify_token(&original_token).unwrap(); 167 | let refreshed_claims = service.verify_token(&refreshed_token).unwrap(); 168 | 169 | assert_eq!(original_claims.sub, refreshed_claims.sub); 170 | assert_eq!(original_claims.email, refreshed_claims.email); 171 | assert!(refreshed_claims.exp > original_claims.exp); 172 | } 173 | 174 | #[test] 175 | fn test_extract_user_id() { 176 | let service = JwtService::new("0123456789ABCDEF0123456789ABCDEF"); 177 | let groups = vec!["users".to_string()]; 178 | 179 | let token = service 180 | .generate_token(42, "test@example.com", "testuser", groups) 181 | .unwrap(); 182 | let user_id = service.extract_user_id(&token).unwrap(); 183 | 184 | assert_eq!(user_id, 42); 185 | } 186 | } 187 | -------------------------------------------------------------------------------- /src/repository/mod.rs: -------------------------------------------------------------------------------- 1 | use anyhow::Result; 2 | use async_trait::async_trait; 3 | use chrono::{DateTime, Utc}; 4 | use serde::{Deserialize, Serialize}; 5 | use serde_json::Value as JsonValue; 6 | use uuid::Uuid; 7 | 8 | #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] 9 | pub struct User { 10 | pub user_id: i64, 11 | pub external_id: Uuid, 12 | pub email: String, 13 | pub username: String, 14 | pub password_hash: String, 15 | pub first_login: bool, 16 | pub created_at: DateTime, 17 | pub updated_at: DateTime, 18 | } 19 | 20 | #[derive(Debug, Clone)] 21 | pub struct NewUser { 22 | pub external_id: Uuid, 23 | pub email: String, 24 | pub username: String, 25 | pub password_hash: String, 26 | pub first_login: bool, 27 | } 28 | 29 | #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] 30 | pub struct Group { 31 | pub group_id: i64, 32 | pub external_id: Uuid, 33 | pub name: String, 34 | pub description: Option, 35 | pub created_at: DateTime, 36 | pub updated_at: DateTime, 37 | } 38 | 39 | #[derive(Debug, Clone)] 40 | pub struct NewGroup { 41 | pub external_id: Uuid, 42 | pub name: String, 43 | pub description: Option, 44 | } 45 | 46 | #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] 47 | pub struct Policy { 48 | pub policy_id: i64, 49 | pub external_id: Uuid, 50 | pub name: String, 51 | pub description: Option, 52 | pub resource: String, 53 | pub action: String, 54 | pub effect: PolicyEffect, 55 | pub conditions: Option, 56 | pub created_at: DateTime, 57 | pub updated_at: DateTime, 58 | } 59 | 60 | #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] 61 | pub enum PolicyEffect { 62 | #[serde(rename = "ALLOW")] 63 | Allow, 64 | #[serde(rename = "DENY")] 65 | Deny, 66 | } 67 | 68 | impl std::fmt::Display for PolicyEffect { 69 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 70 | match self { 71 | PolicyEffect::Allow => write!(f, "ALLOW"), 72 | PolicyEffect::Deny => write!(f, "DENY"), 73 | } 74 | } 75 | } 76 | 77 | #[derive(Debug, Clone)] 78 | pub struct NewPolicy { 79 | pub external_id: Uuid, 80 | pub name: String, 81 | pub description: Option, 82 | pub resource: String, 83 | pub action: String, 84 | pub effect: PolicyEffect, 85 | pub conditions: Option, 86 | } 87 | 88 | #[allow(dead_code)] 89 | #[derive(Debug, Clone, Serialize, Deserialize)] 90 | pub struct UserGroup { 91 | pub user_id: i64, 92 | pub group_id: i64, 93 | pub assigned_at: DateTime, 94 | pub assigned_by: Option, 95 | } 96 | 97 | #[allow(dead_code)] 98 | #[derive(Debug, Clone, Serialize, Deserialize)] 99 | pub struct UserPolicy { 100 | pub user_id: i64, 101 | pub policy_id: i64, 102 | pub assigned_at: DateTime, 103 | pub assigned_by: Option, 104 | } 105 | 106 | #[allow(dead_code)] 107 | #[derive(Debug, Clone, Serialize, Deserialize)] 108 | pub struct GroupPolicy { 109 | pub group_id: i64, 110 | pub policy_id: i64, 111 | pub assigned_at: DateTime, 112 | pub assigned_by: Option, 113 | } 114 | 115 | #[derive(Debug, Clone, Serialize, Deserialize)] 116 | pub struct PasswordResetToken { 117 | pub token_id: i64, 118 | pub user_id: i64, 119 | pub token: String, 120 | pub expires_at: DateTime, 121 | pub used_at: Option>, 122 | pub created_at: DateTime, 123 | } 124 | 125 | #[derive(Debug, Clone)] 126 | pub struct NewPasswordResetToken { 127 | pub user_id: i64, 128 | pub token: String, 129 | pub expires_at: DateTime, 130 | } 131 | 132 | #[derive(sqlx::FromRow, Debug, Clone)] 133 | #[allow(dead_code)] 134 | pub struct Settings { 135 | pub settings_id: i32, 136 | pub key: String, 137 | pub value: String, 138 | pub description: Option, 139 | pub created_at: DateTime, 140 | pub updated_at: DateTime, 141 | } 142 | 143 | #[derive(Debug)] 144 | pub struct CreateSettings { 145 | pub key: String, 146 | pub value: String, 147 | pub description: Option, 148 | } 149 | 150 | // User Repository Trait 151 | #[allow(dead_code)] 152 | #[async_trait] 153 | pub trait UserRepository: Send + Sync + 'static { 154 | async fn find_by_email(&self, email: &str) -> Result>; 155 | async fn find_by_username(&self, username: &str) -> Result>; 156 | async fn find_by_id(&self, user_id: i64) -> Result>; 157 | async fn insert_user(&self, new_user: NewUser) -> Result; 158 | async fn update_user_password(&self, user_id: i64, new_password_hash: &str) -> Result<()>; 159 | async fn set_first_login(&self, user_id: i64, first_login: bool) -> Result<()>; 160 | async fn get_user_groups(&self, user_id: i64) -> Result>; 161 | async fn get_user_policies(&self, user_id: i64) -> Result>; 162 | async fn get_user_all_policies(&self, user_id: i64) -> Result>; 163 | } 164 | 165 | // Group Repository Trait 166 | #[async_trait] 167 | pub trait GroupRepository: Send + Sync + 'static { 168 | async fn find_by_name(&self, name: &str) -> Result>; 169 | async fn find_by_id(&self, group_id: i64) -> Result>; 170 | async fn insert_group(&self, new_group: NewGroup) -> Result; 171 | async fn list_groups(&self) -> Result>; 172 | async fn assign_user_to_group( 173 | &self, 174 | user_id: i64, 175 | group_id: i64, 176 | assigned_by: Option, 177 | ) -> Result<()>; 178 | async fn assign_user_to_groups( 179 | &self, 180 | user_id: i64, 181 | group_ids: &[i64], 182 | assigned_by: Option, 183 | ) -> Result<()> { 184 | for group_id in group_ids { 185 | let _ = self 186 | .assign_user_to_group(user_id, *group_id, assigned_by) 187 | .await?; 188 | } 189 | Ok(()) 190 | } 191 | async fn remove_user_from_group(&self, user_id: i64, group_id: i64) -> Result<()>; 192 | async fn get_group_policies(&self, group_id: i64) -> Result>; 193 | } 194 | 195 | // Policy Repository Trait 196 | #[async_trait] 197 | pub trait PolicyRepository: Send + Sync + 'static { 198 | async fn find_by_name(&self, name: &str) -> Result>; 199 | async fn find_by_id(&self, policy_id: i64) -> Result>; 200 | async fn insert_policy(&self, new_policy: NewPolicy) -> Result; 201 | async fn list_policies(&self) -> Result>; 202 | async fn assign_policy_to_user( 203 | &self, 204 | user_id: i64, 205 | policy_id: i64, 206 | assigned_by: Option, 207 | ) -> Result<()>; 208 | async fn assign_policy_to_group( 209 | &self, 210 | group_id: i64, 211 | policy_id: i64, 212 | assigned_by: Option, 213 | ) -> Result<()>; 214 | async fn remove_policy_from_user(&self, user_id: i64, policy_id: i64) -> Result<()>; 215 | async fn remove_policy_from_group(&self, group_id: i64, policy_id: i64) -> Result<()>; 216 | } 217 | 218 | // Password Reset Repository Trait 219 | #[async_trait] 220 | pub trait PasswordResetRepository: Send + Sync + 'static { 221 | async fn create_reset_token(&self, token: NewPasswordResetToken) -> Result; 222 | async fn find_valid_token(&self, token: &str) -> Result>; 223 | async fn mark_token_used(&self, token_id: i64) -> Result<()>; 224 | async fn cleanup_expired_tokens(&self) -> Result<()>; 225 | } 226 | 227 | #[async_trait] 228 | pub trait SettingsRepository: Send + Sync + 'static { 229 | async fn create_settings(&self, settings: CreateSettings) -> Result>; 230 | async fn get_settings_by_key(&self, key: &str) -> Result>; 231 | async fn get_all_settings(&self) -> Result>; 232 | async fn update_settings_by_key(&self, key: &str, value: &str) -> Result<()>; 233 | async fn delete_settings_by_key(&self, key: &str) -> Result<()>; 234 | } 235 | 236 | pub mod sqlx_impl; 237 | -------------------------------------------------------------------------------- /src/services/group_service.rs: -------------------------------------------------------------------------------- 1 | use crate::repository::{Group, GroupRepository, NewGroup}; 2 | use anyhow::{Result, anyhow}; 3 | use serde::{Deserialize, Serialize}; 4 | use std::sync::Arc; 5 | use uuid::Uuid; 6 | 7 | #[derive(Debug, Clone, Deserialize)] 8 | pub struct CreateGroupRequest { 9 | pub name: String, 10 | pub description: Option, 11 | } 12 | 13 | #[derive(Debug, Clone, Serialize)] 14 | pub struct GroupResponse { 15 | pub group_id: i64, 16 | pub external_id: Uuid, 17 | pub name: String, 18 | pub description: Option, 19 | pub policies: Vec, 20 | } 21 | 22 | #[derive(Debug, Clone, Serialize)] 23 | pub struct PolicyResponse { 24 | pub policy_id: i64, 25 | pub external_id: Uuid, 26 | pub name: String, 27 | pub description: Option, 28 | pub resource: String, 29 | pub action: String, 30 | pub effect: String, 31 | } 32 | 33 | #[derive(Debug, Clone)] 34 | pub struct GroupService { 35 | pub repo: Arc, 36 | } 37 | 38 | impl GroupService { 39 | pub fn new(repo: Arc) -> Self { 40 | Self { repo } 41 | } 42 | 43 | pub async fn create_group(&self, req: CreateGroupRequest) -> Result { 44 | // Check if group already exists 45 | if self.repo.find_by_name(&req.name).await?.is_some() { 46 | return Err(anyhow!("group name already exists")); 47 | } 48 | 49 | let new_group = NewGroup { 50 | external_id: Uuid::new_v4(), 51 | name: req.name, 52 | description: req.description, 53 | }; 54 | 55 | self.repo.insert_group(new_group).await 56 | } 57 | 58 | #[allow(dead_code)] 59 | pub async fn get_group_by_name(&self, name: &str) -> Result> { 60 | self.repo.find_by_name(name).await 61 | } 62 | 63 | #[allow(dead_code)] 64 | pub async fn get_group_by_id(&self, group_id: i64) -> Result> { 65 | self.repo.find_by_id(group_id).await 66 | } 67 | 68 | pub async fn list_groups(&self) -> Result> { 69 | self.repo.list_groups().await 70 | } 71 | 72 | #[allow(dead_code)] 73 | pub async fn get_group_with_policies(&self, group_id: i64) -> Result> { 74 | let group = self.repo.find_by_id(group_id).await?; 75 | 76 | if let Some(group) = group { 77 | let policies = self.repo.get_group_policies(group_id).await?; 78 | let policy_responses: Vec = policies 79 | .into_iter() 80 | .map(|p| PolicyResponse { 81 | policy_id: p.policy_id, 82 | external_id: p.external_id, 83 | name: p.name, 84 | description: p.description, 85 | resource: p.resource, 86 | action: p.action, 87 | effect: p.effect.to_string(), 88 | }) 89 | .collect(); 90 | 91 | Ok(Some(GroupResponse { 92 | group_id: group.group_id, 93 | external_id: group.external_id, 94 | name: group.name, 95 | description: group.description, 96 | policies: policy_responses, 97 | })) 98 | } else { 99 | Ok(None) 100 | } 101 | } 102 | 103 | pub async fn assign_user_to_group( 104 | &self, 105 | user_id: i64, 106 | group_id: i64, 107 | assigned_by: Option, 108 | ) -> Result<()> { 109 | // Verify group exists 110 | if self.repo.find_by_id(group_id).await?.is_none() { 111 | return Err(anyhow!("group not found")); 112 | } 113 | 114 | self.repo 115 | .assign_user_to_group(user_id, group_id, assigned_by) 116 | .await 117 | } 118 | 119 | pub async fn remove_user_from_group(&self, user_id: i64, group_id: i64) -> Result<()> { 120 | self.repo.remove_user_from_group(user_id, group_id).await 121 | } 122 | } 123 | 124 | #[cfg(test)] 125 | mod tests { 126 | use super::*; 127 | use crate::repository::{Group, Policy}; 128 | use anyhow::Result; 129 | use async_trait::async_trait; 130 | use chrono::Utc; 131 | use std::sync::{Arc, Mutex}; 132 | 133 | struct MockGroupRepository { 134 | groups: Mutex>, 135 | user_groups: Mutex>, // (user_id, group_id) 136 | } 137 | 138 | impl MockGroupRepository { 139 | fn new() -> Self { 140 | Self { 141 | groups: Mutex::new(Vec::new()), 142 | user_groups: Mutex::new(Vec::new()), 143 | } 144 | } 145 | } 146 | 147 | #[async_trait] 148 | impl GroupRepository for MockGroupRepository { 149 | async fn find_by_name(&self, name: &str) -> Result> { 150 | let groups = self.groups.lock().unwrap(); 151 | Ok(groups.iter().find(|g| g.name == name).cloned()) 152 | } 153 | 154 | async fn find_by_id(&self, group_id: i64) -> Result> { 155 | let groups = self.groups.lock().unwrap(); 156 | Ok(groups.iter().find(|g| g.group_id == group_id).cloned()) 157 | } 158 | 159 | async fn insert_group(&self, new_group: NewGroup) -> Result { 160 | let mut groups = self.groups.lock().unwrap(); 161 | let group = Group { 162 | group_id: (groups.len() + 1) as i64, 163 | external_id: new_group.external_id, 164 | name: new_group.name, 165 | description: new_group.description, 166 | created_at: Utc::now(), 167 | updated_at: Utc::now(), 168 | }; 169 | groups.push(group.clone()); 170 | Ok(group) 171 | } 172 | 173 | async fn list_groups(&self) -> Result> { 174 | let groups = self.groups.lock().unwrap(); 175 | Ok(groups.clone()) 176 | } 177 | 178 | async fn assign_user_to_group( 179 | &self, 180 | user_id: i64, 181 | group_id: i64, 182 | _assigned_by: Option, 183 | ) -> Result<()> { 184 | let mut user_groups = self.user_groups.lock().unwrap(); 185 | user_groups.push((user_id, group_id)); 186 | Ok(()) 187 | } 188 | 189 | async fn remove_user_from_group(&self, user_id: i64, group_id: i64) -> Result<()> { 190 | let mut user_groups = self.user_groups.lock().unwrap(); 191 | user_groups.retain(|(uid, gid)| !(*uid == user_id && *gid == group_id)); 192 | Ok(()) 193 | } 194 | 195 | async fn get_group_policies(&self, _group_id: i64) -> Result> { 196 | Ok(Vec::new()) 197 | } 198 | } 199 | 200 | #[tokio::test] 201 | async fn test_create_group() { 202 | let repo = Arc::new(MockGroupRepository::new()); 203 | let service = GroupService::new(repo); 204 | 205 | let req = CreateGroupRequest { 206 | name: "test_group".to_string(), 207 | description: Some("Test group description".to_string()), 208 | }; 209 | 210 | let group = service.create_group(req).await.unwrap(); 211 | assert_eq!(group.name, "test_group"); 212 | assert_eq!( 213 | group.description, 214 | Some("Test group description".to_string()) 215 | ); 216 | } 217 | 218 | #[tokio::test] 219 | async fn test_create_duplicate_group() { 220 | let repo = Arc::new(MockGroupRepository::new()); 221 | let service = GroupService::new(repo); 222 | 223 | let req = CreateGroupRequest { 224 | name: "duplicate_group".to_string(), 225 | description: None, 226 | }; 227 | 228 | // Create first group 229 | service.create_group(req.clone()).await.unwrap(); 230 | 231 | // Try to create duplicate 232 | let result = service.create_group(req).await; 233 | assert!(result.is_err()); 234 | assert_eq!(result.unwrap_err().to_string(), "group name already exists"); 235 | } 236 | } 237 | -------------------------------------------------------------------------------- /src/handler/auth.rs: -------------------------------------------------------------------------------- 1 | use axum::{ 2 | body::Body, 3 | extract::{Extension, Request}, 4 | http::HeaderMap, 5 | middleware::Next, 6 | response::{IntoResponse, Response}, 7 | }; 8 | use std::future::Future; 9 | use std::pin::Pin; 10 | use std::sync::Arc; 11 | 12 | use crate::handler::errors::ErrorResponse; 13 | use crate::services::jwt_service::{Claims, JwtService}; 14 | 15 | #[allow(dead_code)] 16 | #[derive(Clone)] 17 | pub struct AuthenticatedUser { 18 | pub user_id: i64, 19 | pub email: String, 20 | pub username: String, 21 | pub groups: Vec, 22 | } 23 | 24 | impl From for AuthenticatedUser { 25 | fn from(claims: Claims) -> Self { 26 | Self { 27 | user_id: claims.sub.parse().unwrap_or(0), 28 | email: claims.email, 29 | username: claims.username, 30 | groups: claims.groups, 31 | } 32 | } 33 | } 34 | 35 | /// Middleware that requires JWT authentication 36 | pub async fn require_auth( 37 | Extension(jwt_service): Extension>, 38 | headers: HeaderMap, 39 | mut request: Request, 40 | next: Next, 41 | ) -> Response { 42 | let token = match extract_bearer_token(&headers) { 43 | Some(token) => token, 44 | None => { 45 | return ErrorResponse::unauthorized("Missing authorization header").into_response(); 46 | } 47 | }; 48 | 49 | match jwt_service.verify_token(&token) { 50 | Ok(claims) => { 51 | let user = AuthenticatedUser::from(claims); 52 | request.extensions_mut().insert(user); 53 | next.run(request).await 54 | } 55 | Err(_) => ErrorResponse::unauthorized("Invalid or expired token").into_response(), 56 | } 57 | } 58 | 59 | /// Middleware that optionally extracts user from JWT if present 60 | #[allow(dead_code)] 61 | pub async fn optional_auth( 62 | Extension(jwt_service): Extension>, 63 | headers: HeaderMap, 64 | mut request: Request, 65 | next: Next, 66 | ) -> Response { 67 | if let Some(token) = extract_bearer_token(&headers) 68 | && let Ok(claims) = jwt_service.verify_token(&token) 69 | { 70 | let user = AuthenticatedUser::from(claims); 71 | request.extensions_mut().insert(user); 72 | } 73 | 74 | next.run(request).await 75 | } 76 | 77 | /// Middleware that requires specific groups 78 | #[allow(dead_code)] 79 | pub fn require_groups( 80 | required_groups: Vec, 81 | ) -> impl Clone 82 | + Fn( 83 | Extension, 84 | Request, 85 | Next, 86 | ) -> Pin + Send>> { 87 | let required_groups: Vec> = required_groups.into_iter().map(Arc::new).collect(); 88 | 89 | move |Extension(user): Extension, request: Request, next: Next| { 90 | let required_groups = required_groups.clone(); 91 | 92 | Box::pin(async move { 93 | let has_required_group = required_groups 94 | .iter() 95 | .any(|group| user.groups.iter().any(|ug| ug == &**group)); 96 | 97 | if has_required_group { 98 | next.run(request).await 99 | } else { 100 | ErrorResponse::forbidden("Insufficient permissions - required group not found") 101 | .into_response() 102 | } 103 | }) 104 | } 105 | } 106 | 107 | /// Middleware that requires superadmin group 108 | pub async fn require_superadmin( 109 | Extension(user): Extension, 110 | request: Request, 111 | next: Next, 112 | ) -> Response { 113 | if user.groups.contains(&"superadmin".to_string()) { 114 | next.run(request).await 115 | } else { 116 | ErrorResponse::forbidden("Superadmin access required").into_response() 117 | } 118 | } 119 | 120 | /// Extract Bearer token from Authorization header or cookies 121 | /// 122 | /// FIXED: Safe cookie parsing using strip_prefix instead of direct indexing 123 | fn extract_bearer_token(headers: &HeaderMap) -> Option { 124 | // First try Authorization header 125 | if let Some(auth_header) = headers.get("authorization") 126 | && let Ok(auth_str) = auth_header.to_str() 127 | && auth_str.starts_with("Bearer ") 128 | && auth_str.len() > 7 129 | { 130 | return Some(auth_str[7..].to_string()); 131 | } 132 | 133 | // Then try cookies - SAFE PARSING 134 | if let Some(cookie_header) = headers.get("cookie") 135 | && let Ok(cookie_str) = cookie_header.to_str() 136 | { 137 | for cookie in cookie_str.split(';') { 138 | let cookie = cookie.trim(); 139 | 140 | // Use strip_prefix instead of direct indexing 141 | if let Some(token_value) = cookie.strip_prefix("jwt_token=") { 142 | // Only return if there's actually a value 143 | if !token_value.is_empty() { 144 | return Some(token_value.to_string()); 145 | } 146 | } 147 | } 148 | } 149 | 150 | None 151 | } 152 | 153 | #[cfg(test)] 154 | mod tests { 155 | use super::*; 156 | use axum::http::{HeaderMap, HeaderValue}; 157 | 158 | #[test] 159 | fn test_extract_bearer_token_from_header() { 160 | let mut headers = HeaderMap::new(); 161 | headers.insert("authorization", HeaderValue::from_static("Bearer test123")); 162 | 163 | let token = extract_bearer_token(&headers); 164 | assert_eq!(token, Some("test123".to_string())); 165 | } 166 | 167 | #[test] 168 | fn test_extract_bearer_token_from_cookie() { 169 | let mut headers = HeaderMap::new(); 170 | headers.insert( 171 | "cookie", 172 | HeaderValue::from_static("jwt_token=test123; other=value"), 173 | ); 174 | 175 | let token = extract_bearer_token(&headers); 176 | assert_eq!(token, Some("test123".to_string())); 177 | } 178 | 179 | #[test] 180 | fn test_extract_bearer_token_from_cookie_only() { 181 | let mut headers = HeaderMap::new(); 182 | headers.insert("cookie", HeaderValue::from_static("jwt_token=abc123")); 183 | 184 | let token = extract_bearer_token(&headers); 185 | assert_eq!(token, Some("abc123".to_string())); 186 | } 187 | 188 | #[test] 189 | fn test_extract_bearer_token_empty_cookie() { 190 | let mut headers = HeaderMap::new(); 191 | headers.insert("cookie", HeaderValue::from_static("jwt_token=")); 192 | 193 | let token = extract_bearer_token(&headers); 194 | assert_eq!(token, None); 195 | } 196 | 197 | #[test] 198 | fn test_extract_bearer_token_missing() { 199 | let headers = HeaderMap::new(); 200 | let token = extract_bearer_token(&headers); 201 | assert_eq!(token, None); 202 | } 203 | 204 | #[test] 205 | fn test_extract_bearer_token_invalid_format() { 206 | let mut headers = HeaderMap::new(); 207 | headers.insert("authorization", HeaderValue::from_static("Basic abc123")); 208 | 209 | let token = extract_bearer_token(&headers); 210 | assert_eq!(token, None); 211 | } 212 | 213 | #[test] 214 | fn test_extract_bearer_token_bearer_too_short() { 215 | let mut headers = HeaderMap::new(); 216 | headers.insert("authorization", HeaderValue::from_static("Bearer ")); 217 | 218 | let token = extract_bearer_token(&headers); 219 | assert_eq!(token, None); 220 | } 221 | 222 | #[test] 223 | fn test_authenticated_user_from_claims() { 224 | let claims = Claims { 225 | sub: "123".to_string(), 226 | email: "test@example.com".to_string(), 227 | username: "testuser".to_string(), 228 | groups: vec!["users".to_string(), "admin".to_string()], 229 | exp: 1234567890, 230 | iat: 1234567890, 231 | iss: "keyrunes".to_string(), 232 | }; 233 | 234 | let user = AuthenticatedUser::from(claims); 235 | assert_eq!(user.user_id, 123); 236 | assert_eq!(user.email, "test@example.com"); 237 | assert_eq!(user.username, "testuser"); 238 | assert_eq!(user.groups, vec!["users", "admin"]); 239 | } 240 | } 241 | -------------------------------------------------------------------------------- /src/api/health.rs: -------------------------------------------------------------------------------- 1 | use axum::{Json, extract::Extension, http::StatusCode, response::IntoResponse}; 2 | use chrono::Utc; 3 | use password_hash::rand_core::OsRng; 4 | use serde::Serialize; 5 | use sqlx::{PgPool, Row}; 6 | use std::time::SystemTime; 7 | 8 | #[derive(Serialize)] 9 | pub struct HealthResponse { 10 | pub status: String, 11 | pub timestamp: String, 12 | pub version: String, 13 | pub uptime_seconds: u64, 14 | pub database: DatabaseHealth, 15 | pub services: ServicesHealth, 16 | } 17 | 18 | #[derive(Serialize)] 19 | pub struct DatabaseHealth { 20 | pub status: String, 21 | pub response_time_ms: Option, 22 | pub active_connections: Option, 23 | } 24 | 25 | #[derive(Serialize)] 26 | pub struct ServicesHealth { 27 | pub jwt_service: String, 28 | pub password_hashing: String, 29 | } 30 | 31 | static START_TIME: std::sync::OnceLock = std::sync::OnceLock::new(); 32 | 33 | pub fn init_health_check() { 34 | START_TIME.set(SystemTime::now()).ok(); 35 | } 36 | 37 | // GET /api/health - Health check endpoint 38 | pub async fn health_check(Extension(pool): Extension) -> impl IntoResponse { 39 | let start_time = START_TIME.get().copied().unwrap_or_else(SystemTime::now); 40 | let uptime = SystemTime::now() 41 | .duration_since(start_time) 42 | .unwrap_or_default() 43 | .as_secs(); 44 | 45 | // Check database health 46 | let db_health = check_database_health(&pool).await; 47 | 48 | // Check other services 49 | let services_health = check_services_health(); 50 | 51 | // Determine overall status 52 | let overall_status = if db_health.status == "healthy" 53 | && services_health.jwt_service == "healthy" 54 | && services_health.password_hashing == "healthy" 55 | { 56 | "healthy" 57 | } else { 58 | "unhealthy" 59 | }; 60 | 61 | let response = HealthResponse { 62 | status: overall_status.to_string(), 63 | timestamp: Utc::now().to_rfc3339(), 64 | version: env!("CARGO_PKG_VERSION").to_string(), 65 | uptime_seconds: uptime, 66 | database: db_health, 67 | services: services_health, 68 | }; 69 | 70 | let status_code = if overall_status == "healthy" { 71 | StatusCode::OK 72 | } else { 73 | StatusCode::SERVICE_UNAVAILABLE 74 | }; 75 | 76 | (status_code, Json(response)) 77 | } 78 | 79 | // GET /api/health/ready - Readiness probe 80 | pub async fn readiness_check(Extension(pool): Extension) -> impl IntoResponse { 81 | let db_result = sqlx::query("SELECT 1 as health_check") 82 | .fetch_one(&pool) 83 | .await; 84 | 85 | match db_result { 86 | Ok(_) => ( 87 | StatusCode::OK, 88 | Json(serde_json::json!({ 89 | "status": "ready", 90 | "timestamp": Utc::now().to_rfc3339() 91 | })), 92 | ), 93 | Err(_) => ( 94 | StatusCode::SERVICE_UNAVAILABLE, 95 | Json(serde_json::json!({ 96 | "status": "not_ready", 97 | "timestamp": Utc::now().to_rfc3339(), 98 | "error": "database_connection_failed" 99 | })), 100 | ), 101 | } 102 | } 103 | 104 | // GET /api/health/live - Liveness probe 105 | pub async fn liveness_check() -> impl IntoResponse { 106 | ( 107 | StatusCode::OK, 108 | Json(serde_json::json!({ 109 | "status": "alive", 110 | "timestamp": Utc::now().to_rfc3339(), 111 | "version": env!("CARGO_PKG_VERSION") 112 | })), 113 | ) 114 | } 115 | 116 | async fn check_database_health(pool: &PgPool) -> DatabaseHealth { 117 | let start = SystemTime::now(); 118 | 119 | let query_result = sqlx::query("SELECT 1 as health_check, count(*) as connection_count FROM pg_stat_activity WHERE state = 'active'") 120 | .fetch_one(pool) 121 | .await; 122 | 123 | let response_time = SystemTime::now() 124 | .duration_since(start) 125 | .unwrap_or_default() 126 | .as_millis() as u64; 127 | 128 | match query_result { 129 | Ok(row) => { 130 | let connection_count: i64 = row.try_get("connection_count").unwrap_or(0); 131 | DatabaseHealth { 132 | status: "healthy".to_string(), 133 | response_time_ms: Some(response_time), 134 | active_connections: Some(connection_count as i32), 135 | } 136 | } 137 | Err(_) => DatabaseHealth { 138 | status: "unhealthy".to_string(), 139 | response_time_ms: Some(response_time), 140 | active_connections: None, 141 | }, 142 | } 143 | } 144 | 145 | fn check_services_health() -> ServicesHealth { 146 | // Test JWT service 147 | let jwt_status = match test_jwt_service() { 148 | Ok(_) => "healthy", 149 | Err(_) => "unhealthy", 150 | }; 151 | 152 | // Test password hashing 153 | let password_status = match test_password_hashing() { 154 | Ok(_) => "healthy", 155 | Err(_) => "unhealthy", 156 | }; 157 | 158 | ServicesHealth { 159 | jwt_service: jwt_status.to_string(), 160 | password_hashing: password_status.to_string(), 161 | } 162 | } 163 | 164 | // Made public for tests 165 | pub fn test_jwt_service() -> Result<(), Box> { 166 | use crate::services::jwt_service::JwtService; 167 | 168 | let jwt_service = JwtService::new("0123456789ABCDEF0123456789ABCDEF"); 169 | let token = jwt_service.generate_token(1, "test@example.com", "test", vec![])?; 170 | jwt_service.verify_token(&token)?; 171 | Ok(()) 172 | } 173 | 174 | // Made public for tests 175 | pub fn test_password_hashing() -> Result<(), Box> { 176 | use argon2::{Argon2, password_hash::PasswordHasher}; 177 | use password_hash::{PasswordHash, PasswordVerifier, SaltString}; 178 | use std::io; 179 | 180 | let password = "test_password"; 181 | 182 | let salt = SaltString::generate(&mut OsRng); 183 | let argon2 = Argon2::default(); 184 | 185 | let hash = argon2 186 | .hash_password(password.as_bytes(), &salt) 187 | .map_err(|e| Box::new(io::Error::other(format!("password hash error: {}", e))))?; 188 | 189 | let binding = hash.to_string(); 190 | let parsed_hash = PasswordHash::new(&binding).map_err(|e| { 191 | Box::new(io::Error::other(format!( 192 | "password hash parse error: {}", 193 | e 194 | ))) 195 | })?; 196 | 197 | argon2 198 | .verify_password(password.as_bytes(), &parsed_hash) 199 | .map_err(|e| Box::new(io::Error::other(format!("password verify error: {}", e))))?; 200 | 201 | Ok(()) 202 | } 203 | 204 | #[cfg(test)] 205 | mod tests { 206 | use super::*; 207 | 208 | #[test] 209 | fn test_jwt_service_health() { 210 | let result = test_jwt_service(); 211 | assert!(result.is_ok()); 212 | } 213 | 214 | #[test] 215 | fn test_password_hashing_health() { 216 | let result = test_password_hashing(); 217 | assert!(result.is_ok()); 218 | } 219 | 220 | #[tokio::test] 221 | async fn test_services_health() { 222 | let health = check_services_health(); 223 | assert_eq!(health.jwt_service, "healthy"); 224 | assert_eq!(health.password_hashing, "healthy"); 225 | } 226 | 227 | #[test] 228 | fn test_health_response_serialization() { 229 | let health = HealthResponse { 230 | status: "healthy".to_string(), 231 | timestamp: "2025-01-01T00:00:00Z".to_string(), 232 | version: "0.1.0".to_string(), 233 | uptime_seconds: 3600, 234 | database: DatabaseHealth { 235 | status: "healthy".to_string(), 236 | response_time_ms: Some(10), 237 | active_connections: Some(5), 238 | }, 239 | services: ServicesHealth { 240 | jwt_service: "healthy".to_string(), 241 | password_hashing: "healthy".to_string(), 242 | }, 243 | }; 244 | 245 | let json = serde_json::to_string(&health).unwrap(); 246 | assert!(json.contains("healthy")); 247 | assert!(json.contains("0.1.0")); 248 | } 249 | } 250 | -------------------------------------------------------------------------------- /tests/sqlx_impl_tests.rs: -------------------------------------------------------------------------------- 1 | use keyrunes::repository::sqlx_impl::PgUserRepository; 2 | use keyrunes::repository::{NewUser, UserRepository}; 3 | use sqlx::{PgPool, migrate::Migrator}; 4 | use std::env; 5 | use url::Url; 6 | use uuid::Uuid; 7 | 8 | static MIGRATOR: Migrator = sqlx::migrate!("./migrations"); 9 | 10 | // Setup test database 11 | async fn setup_test_db() -> (PgPool, String) { 12 | let admin_url = env::var("TEST_DATABASE_URL") 13 | .unwrap_or_else(|_| "postgres://postgres:password@localhost:5432/postgres".to_string()); 14 | 15 | let admin_pool = PgPool::connect(&admin_url).await.unwrap(); 16 | 17 | let db_name = format!("test_db_{}", Uuid::new_v4().to_string().replace("-", "_")); 18 | sqlx::query(&format!(r#"CREATE DATABASE "{}""#, db_name)) 19 | .execute(&admin_pool) 20 | .await 21 | .unwrap(); 22 | 23 | let mut url = Url::parse(&admin_url).unwrap(); 24 | url.set_path(&db_name); 25 | let test_db_url = url.as_str().to_string(); 26 | 27 | let pool = PgPool::connect(&test_db_url).await.unwrap(); 28 | 29 | // Run migrations 30 | MIGRATOR.run(&pool).await.unwrap(); 31 | 32 | (pool, db_name) 33 | } 34 | 35 | async fn teardown_test_db(db_name: String) { 36 | let admin_url = env::var("TEST_DATABASE_URL") 37 | .unwrap_or_else(|_| "postgres://postgres:password@localhost:5432/postgres".to_string()); 38 | let admin_pool = PgPool::connect(&admin_url).await.unwrap(); 39 | 40 | // Revoke connections 41 | sqlx::query(&format!( 42 | "REVOKE CONNECT ON DATABASE \"{}\" FROM PUBLIC;", 43 | db_name 44 | )) 45 | .execute(&admin_pool) 46 | .await 47 | .unwrap(); 48 | 49 | // Terminate existing connections 50 | sqlx::query(&format!( 51 | "SELECT pg_terminate_backend(pid) FROM pg_stat_activity WHERE datname='{}';", 52 | db_name 53 | )) 54 | .execute(&admin_pool) 55 | .await 56 | .unwrap(); 57 | 58 | // Drop database 59 | sqlx::query(&format!("DROP DATABASE \"{}\";", db_name)) 60 | .execute(&admin_pool) 61 | .await 62 | .unwrap(); 63 | } 64 | 65 | #[tokio::test] 66 | #[ignore] 67 | async fn test_insert_and_find_user() { 68 | // Setup 69 | let (pool, db_name) = setup_test_db().await; 70 | let repo = PgUserRepository::new(pool.clone()); 71 | 72 | let new_user = NewUser { 73 | external_id: Uuid::new_v4(), 74 | email: "john@test.com".to_string(), 75 | username: "johndoe".to_string(), 76 | password_hash: "hashed_password".to_string(), 77 | first_login: false, // Added missing field 78 | }; 79 | 80 | // Act - Insert user 81 | let user = repo.insert_user(new_user.clone()).await.unwrap(); 82 | 83 | // Assert - Check inserted user 84 | assert_eq!(user.email, new_user.email); 85 | assert_eq!(user.username, new_user.username); 86 | assert_eq!(user.first_login, new_user.first_login); 87 | 88 | // Test find by email 89 | let found_by_email = repo.find_by_email("john@test.com").await.unwrap().unwrap(); 90 | assert_eq!(found_by_email.email, new_user.email); 91 | assert_eq!(found_by_email.username, new_user.username); 92 | 93 | // Test find by username 94 | let found_by_username = repo.find_by_username("johndoe").await.unwrap().unwrap(); 95 | assert_eq!(found_by_username.email, new_user.email); 96 | assert_eq!(found_by_username.username, new_user.username); 97 | 98 | // Test find by id 99 | let found_by_id = repo.find_by_id(user.user_id).await.unwrap().unwrap(); 100 | assert_eq!(found_by_id.email, new_user.email); 101 | assert_eq!(found_by_id.username, new_user.username); 102 | 103 | // Cleanup 104 | teardown_test_db(db_name).await; 105 | } 106 | 107 | #[tokio::test] 108 | #[ignore] 109 | async fn test_update_user_password() { 110 | let (pool, db_name) = setup_test_db().await; 111 | let repo = PgUserRepository::new(pool.clone()); 112 | 113 | // Insert a user 114 | let new_user = NewUser { 115 | external_id: Uuid::new_v4(), 116 | email: "password@test.com".to_string(), 117 | username: "passworduser".to_string(), 118 | password_hash: "old_hash".to_string(), 119 | first_login: true, 120 | }; 121 | 122 | let user = repo.insert_user(new_user).await.unwrap(); 123 | 124 | // Update password 125 | repo.update_user_password(user.user_id, "new_hash") 126 | .await 127 | .unwrap(); 128 | 129 | // Verify password was updated 130 | let updated_user = repo.find_by_id(user.user_id).await.unwrap().unwrap(); 131 | assert_eq!(updated_user.password_hash, "new_hash"); 132 | 133 | teardown_test_db(db_name).await; 134 | } 135 | 136 | #[tokio::test] 137 | #[ignore] 138 | async fn test_set_first_login() { 139 | let (pool, db_name) = setup_test_db().await; 140 | let repo = PgUserRepository::new(pool.clone()); 141 | 142 | // Insert a user with first_login = true 143 | let new_user = NewUser { 144 | external_id: Uuid::new_v4(), 145 | email: "firstlogin@test.com".to_string(), 146 | username: "firstloginuser".to_string(), 147 | password_hash: "hash".to_string(), 148 | first_login: true, 149 | }; 150 | 151 | let user = repo.insert_user(new_user).await.unwrap(); 152 | assert!(user.first_login); 153 | 154 | // Set first_login to false 155 | repo.set_first_login(user.user_id, false).await.unwrap(); 156 | 157 | // Verify first_login was updated 158 | let updated_user = repo.find_by_id(user.user_id).await.unwrap().unwrap(); 159 | assert!(!updated_user.first_login); 160 | 161 | teardown_test_db(db_name).await; 162 | } 163 | 164 | #[tokio::test] 165 | #[ignore] 166 | async fn test_duplicate_email() { 167 | let (pool, db_name) = setup_test_db().await; 168 | let repo = PgUserRepository::new(pool.clone()); 169 | 170 | let new_user = NewUser { 171 | external_id: Uuid::new_v4(), 172 | email: "duplicate@test.com".to_string(), 173 | username: "user1".to_string(), 174 | password_hash: "hash".to_string(), 175 | first_login: false, 176 | }; 177 | 178 | // First insert should succeed 179 | repo.insert_user(new_user.clone()).await.unwrap(); 180 | 181 | // Second insert with same email but different username should fail 182 | let duplicate_user = NewUser { 183 | external_id: Uuid::new_v4(), 184 | email: "duplicate@test.com".to_string(), 185 | username: "user2".to_string(), 186 | password_hash: "hash".to_string(), 187 | first_login: false, 188 | }; 189 | 190 | let result = repo.insert_user(duplicate_user).await; 191 | assert!(result.is_err()); 192 | 193 | teardown_test_db(db_name).await; 194 | } 195 | 196 | #[tokio::test] 197 | #[ignore] 198 | async fn test_duplicate_username() { 199 | let (pool, db_name) = setup_test_db().await; 200 | let repo = PgUserRepository::new(pool.clone()); 201 | 202 | let new_user = NewUser { 203 | external_id: Uuid::new_v4(), 204 | email: "user1@test.com".to_string(), 205 | username: "duplicateusername".to_string(), 206 | password_hash: "hash".to_string(), 207 | first_login: false, 208 | }; 209 | 210 | // First insert should succeed 211 | repo.insert_user(new_user.clone()).await.unwrap(); 212 | 213 | // Second insert with same username but different email should fail 214 | let duplicate_user = NewUser { 215 | external_id: Uuid::new_v4(), 216 | email: "user2@test.com".to_string(), 217 | username: "duplicateusername".to_string(), 218 | password_hash: "hash".to_string(), 219 | first_login: false, 220 | }; 221 | 222 | let result = repo.insert_user(duplicate_user).await; 223 | assert!(result.is_err()); 224 | 225 | teardown_test_db(db_name).await; 226 | } 227 | 228 | #[tokio::test] 229 | #[ignore] 230 | async fn test_case_insensitive_email() { 231 | let (pool, db_name) = setup_test_db().await; 232 | let repo = PgUserRepository::new(pool.clone()); 233 | 234 | let new_user = NewUser { 235 | external_id: Uuid::new_v4(), 236 | email: "CaseTest@Test.com".to_string(), 237 | username: "caseuser".to_string(), 238 | password_hash: "hash".to_string(), 239 | first_login: false, 240 | }; 241 | 242 | repo.insert_user(new_user.clone()).await.unwrap(); 243 | 244 | // Should find user with different case email 245 | let found = repo.find_by_email("casetest@test.com").await.unwrap(); 246 | assert!(found.is_some()); 247 | assert_eq!(found.unwrap().username, "caseuser"); 248 | 249 | teardown_test_db(db_name).await; 250 | } 251 | -------------------------------------------------------------------------------- /src/handler/errors.rs: -------------------------------------------------------------------------------- 1 | use axum::extract::Request; 2 | use axum::http::{HeaderMap, StatusCode}; 3 | use axum::response::{Html, IntoResponse, Json, Response}; 4 | use serde::Serialize; 5 | 6 | /// Standard error response structure 7 | #[derive(Debug, Serialize, Clone)] 8 | pub struct ErrorResponse { 9 | pub error: String, 10 | pub message: String, 11 | pub status_code: u16, 12 | } 13 | 14 | #[allow(dead_code)] 15 | impl ErrorResponse { 16 | /// Create a new error response 17 | pub fn new(status: StatusCode, message: impl Into) -> Self { 18 | Self { 19 | error: status 20 | .canonical_reason() 21 | .unwrap_or("Unknown Error") 22 | .to_string(), 23 | message: message.into(), 24 | status_code: status.as_u16(), 25 | } 26 | } 27 | 28 | /// Create a 400 Bad Request error 29 | pub fn bad_request(message: impl Into) -> Self { 30 | Self::new(StatusCode::BAD_REQUEST, message) 31 | } 32 | 33 | /// Create a 401 Unauthorized error 34 | pub fn unauthorized(message: impl Into) -> Self { 35 | Self::new(StatusCode::UNAUTHORIZED, message) 36 | } 37 | 38 | /// Create a 403 Forbidden error 39 | pub fn forbidden(message: impl Into) -> Self { 40 | Self::new(StatusCode::FORBIDDEN, message) 41 | } 42 | 43 | /// Create a 404 Not Found error 44 | pub fn not_found(message: impl Into) -> Self { 45 | Self::new(StatusCode::NOT_FOUND, message) 46 | } 47 | 48 | /// Create a 500 Internal Server Error 49 | pub fn internal_server_error(message: impl Into) -> Self { 50 | Self::new(StatusCode::INTERNAL_SERVER_ERROR, message) 51 | } 52 | } 53 | 54 | impl IntoResponse for ErrorResponse { 55 | fn into_response(self) -> Response { 56 | let status = 57 | StatusCode::from_u16(self.status_code).unwrap_or(StatusCode::INTERNAL_SERVER_ERROR); 58 | 59 | (status, Json(self)).into_response() 60 | } 61 | } 62 | 63 | /// Check if request is from API (wants JSON) or browser (wants HTML) 64 | fn wants_json(headers: &HeaderMap) -> bool { 65 | headers 66 | .get("accept") 67 | .and_then(|v| v.to_str().ok()) 68 | .map(|accept| accept.contains("application/json") || accept.contains("*/json")) 69 | .unwrap_or(false) 70 | } 71 | 72 | /// Check if the request path is an API route 73 | fn is_api_route(path: &str) -> bool { 74 | path.starts_with("/api/") 75 | } 76 | 77 | /// Smart 404 handler - returns JSON for API routes, HTML for pages 78 | pub async fn handler_404(req: Request) -> impl IntoResponse { 79 | let uri = req.uri().clone(); 80 | let path = uri.path(); 81 | let headers = req.headers().clone(); 82 | 83 | // API routes always return JSON 84 | if is_api_route(path) || wants_json(&headers) { 85 | return ErrorResponse::not_found("The requested resource was not found").into_response(); 86 | } 87 | 88 | // Browser requests get HTML 404 page 89 | let html = r#" 90 | 91 | 92 | 93 | 94 | 95 | 404 - Page Not Found 96 | 175 | 176 | 177 |
178 |
🔍
179 |
404
180 |

Page Not Found

181 |

182 | Oops! The page you're looking for doesn't exist. 183 | It might have been moved or deleted. 184 |

185 | 189 |
190 | 191 | 192 | "#; 193 | 194 | (StatusCode::NOT_FOUND, Html(html)).into_response() 195 | } 196 | 197 | /// Handler for 400 Bad Request errors 198 | #[allow(dead_code)] 199 | pub async fn handler_400() -> impl IntoResponse { 200 | ErrorResponse::bad_request("Bad request") 201 | } 202 | 203 | /// Handler for 401 Unauthorized errors 204 | #[allow(dead_code)] 205 | pub async fn handler_401() -> impl IntoResponse { 206 | ErrorResponse::unauthorized("Unauthorized - Authentication required") 207 | } 208 | 209 | /// Handler for 403 Forbidden errors 210 | #[allow(dead_code)] 211 | pub async fn handler_403() -> impl IntoResponse { 212 | ErrorResponse::forbidden("Forbidden - Insufficient permissions") 213 | } 214 | 215 | /// Handler for 500 Internal Server Error 216 | #[allow(dead_code)] 217 | pub async fn handler_500() -> impl IntoResponse { 218 | ErrorResponse::internal_server_error("Internal server error occurred") 219 | } 220 | 221 | #[cfg(test)] 222 | mod tests { 223 | use super::*; 224 | use axum::body::Body; 225 | use axum::http::Request as HttpRequest; 226 | 227 | #[tokio::test] 228 | async fn test_error_handlers() { 229 | // Test 404 handler 230 | let req = HttpRequest::builder() 231 | .uri("/nonexistent") 232 | .body(Body::empty()) 233 | .unwrap(); 234 | let response = handler_404(req).await.into_response(); 235 | assert_eq!(response.status(), StatusCode::NOT_FOUND); 236 | 237 | // Test 400 handler 238 | let response = handler_400().await.into_response(); 239 | assert_eq!(response.status(), StatusCode::BAD_REQUEST); 240 | 241 | // Test 401 handler 242 | let response = handler_401().await.into_response(); 243 | assert_eq!(response.status(), StatusCode::UNAUTHORIZED); 244 | 245 | // Test 403 handler 246 | let response = handler_403().await.into_response(); 247 | assert_eq!(response.status(), StatusCode::FORBIDDEN); 248 | 249 | // Test 500 handler 250 | let response = handler_500().await.into_response(); 251 | assert_eq!(response.status(), StatusCode::INTERNAL_SERVER_ERROR); 252 | } 253 | #[tokio::test] 254 | async fn test_404_includes_path() { 255 | let req = HttpRequest::builder() 256 | .uri("/api/nonexistent") 257 | .body(Body::empty()) 258 | .unwrap(); 259 | let response = handler_404(req).await.into_response(); 260 | 261 | assert_eq!(response.status(), StatusCode::NOT_FOUND); 262 | 263 | let body_bytes = axum::body::to_bytes(response.into_body(), usize::MAX) 264 | .await 265 | .unwrap(); 266 | let body_str = String::from_utf8(body_bytes.to_vec()).unwrap(); 267 | 268 | assert!(body_str.contains("not found") || body_str.contains("404")); 269 | } 270 | } 271 | -------------------------------------------------------------------------------- /src/api/auth.rs: -------------------------------------------------------------------------------- 1 | use axum::{ 2 | extract::{Extension, Json, Query}, 3 | http::{HeaderMap, StatusCode}, 4 | response::IntoResponse, 5 | }; 6 | use serde::{Deserialize, Serialize}; 7 | use std::sync::Arc; 8 | 9 | use crate::handler::errors::ErrorResponse; 10 | use crate::repository::sqlx_impl::{ 11 | PgGroupRepository, PgPasswordResetRepository, PgSettingsRepository, PgUserRepository, 12 | }; 13 | use crate::services::user_service::{ 14 | ChangePasswordRequest, ForgotPasswordRequest, RegisterRequest, ResetPasswordRequest, 15 | UserService, 16 | }; 17 | 18 | #[derive(Deserialize)] 19 | pub struct RegisterApi { 20 | pub email: String, 21 | pub username: String, 22 | pub password: String, 23 | } 24 | 25 | #[derive(Deserialize)] 26 | pub struct LoginApi { 27 | pub identity: String, 28 | pub password: String, 29 | } 30 | 31 | #[derive(Deserialize)] 32 | pub struct RefreshTokenRequest { 33 | pub token: String, 34 | } 35 | 36 | #[derive(Serialize)] 37 | pub struct RefreshTokenResponse { 38 | pub token: String, 39 | } 40 | 41 | #[derive(Deserialize)] 42 | pub struct ForgotPasswordApi { 43 | pub email: String, 44 | } 45 | 46 | #[derive(Serialize)] 47 | pub struct ForgotPasswordResponse { 48 | pub message: String, 49 | pub reset_url: String, 50 | } 51 | 52 | #[derive(Deserialize)] 53 | pub struct ResetPasswordQuery { 54 | pub forgot_password: String, 55 | } 56 | 57 | #[derive(Deserialize)] 58 | pub struct ResetPasswordApi { 59 | pub token: String, 60 | pub new_password: String, 61 | } 62 | 63 | #[derive(Serialize)] 64 | pub struct MessageResponse { 65 | pub message: String, 66 | } 67 | 68 | type UserServiceType = UserService< 69 | PgUserRepository, 70 | PgGroupRepository, 71 | PgPasswordResetRepository, 72 | PgSettingsRepository, 73 | >; 74 | 75 | /// POST /api/register 76 | pub async fn register_api( 77 | Extension(service): Extension>, 78 | Json(payload): Json, 79 | ) -> impl IntoResponse { 80 | let req = RegisterRequest { 81 | email: payload.email, 82 | username: payload.username, 83 | password: payload.password, 84 | first_login: Some(true), 85 | }; 86 | 87 | match service.register(req).await { 88 | Ok(auth_response) => (StatusCode::CREATED, Json(auth_response)).into_response(), 89 | Err(e) => ErrorResponse::bad_request(e.to_string()).into_response(), 90 | } 91 | } 92 | 93 | /// POST /api/login 94 | pub async fn login_api( 95 | Extension(service): Extension>, 96 | Json(payload): Json, 97 | ) -> impl IntoResponse { 98 | match service.login(payload.identity, payload.password).await { 99 | Ok(auth_response) => (StatusCode::OK, Json(auth_response)).into_response(), 100 | Err(e) => ErrorResponse::unauthorized(e.to_string()).into_response(), 101 | } 102 | } 103 | 104 | /// POST /api/refresh-token 105 | pub async fn refresh_token_api( 106 | Extension(service): Extension>, 107 | Json(payload): Json, 108 | ) -> impl IntoResponse { 109 | match service.refresh_token(&payload.token).await { 110 | Ok(new_token) => ( 111 | StatusCode::OK, 112 | Json(RefreshTokenResponse { token: new_token }), 113 | ) 114 | .into_response(), 115 | Err(e) => ErrorResponse::unauthorized(e.to_string()).into_response(), 116 | } 117 | } 118 | 119 | /// GET /api/me - Get current user info from JWT token 120 | pub async fn me_api( 121 | Extension(service): Extension>, 122 | headers: HeaderMap, 123 | ) -> impl IntoResponse { 124 | let token = match extract_bearer_token(&headers) { 125 | Some(token) => token, 126 | None => { 127 | return ErrorResponse::unauthorized("Missing authorization header").into_response(); 128 | } 129 | }; 130 | 131 | match service.get_user_by_token(&token).await { 132 | Ok(user) => (StatusCode::OK, Json(user)).into_response(), 133 | Err(e) => ErrorResponse::unauthorized(e.to_string()).into_response(), 134 | } 135 | } 136 | 137 | /// POST /api/change-password 138 | #[allow(dead_code)] 139 | pub async fn change_password_api( 140 | Extension(service): Extension>, 141 | headers: HeaderMap, 142 | Json(payload): Json, 143 | ) -> impl IntoResponse { 144 | let token = match extract_bearer_token(&headers) { 145 | Some(token) => token, 146 | None => { 147 | return ErrorResponse::unauthorized("Missing authorization header").into_response(); 148 | } 149 | }; 150 | 151 | let user_id = match service.jwt_service.extract_user_id(&token) { 152 | Ok(id) => id, 153 | Err(e) => return ErrorResponse::unauthorized(e.to_string()).into_response(), 154 | }; 155 | 156 | match service.change_password(user_id, payload).await { 157 | Ok(_) => ( 158 | StatusCode::OK, 159 | Json(MessageResponse { 160 | message: "Password changed successfully".to_string(), 161 | }), 162 | ) 163 | .into_response(), 164 | Err(e) => ErrorResponse::bad_request(e.to_string()).into_response(), 165 | } 166 | } 167 | 168 | /// POST /api/forgot-password 169 | pub async fn forgot_password_api( 170 | Extension(service): Extension>, 171 | Json(payload): Json, 172 | ) -> impl IntoResponse { 173 | let req = ForgotPasswordRequest { 174 | email: payload.email, 175 | }; 176 | 177 | match service.forgot_password(req).await { 178 | Ok(token) => { 179 | let reset_url = format!( 180 | "{}?forgot_password={}", 181 | std::env::var("FRONTEND_URL") 182 | .unwrap_or_else(|_| "http://localhost:3000/reset-password".to_string()), 183 | token 184 | ); 185 | 186 | ( 187 | StatusCode::OK, 188 | Json(ForgotPasswordResponse { 189 | message: "Password reset email sent".to_string(), 190 | reset_url, 191 | }), 192 | ) 193 | .into_response() 194 | } 195 | Err(e) => ErrorResponse::bad_request(e.to_string()).into_response(), 196 | } 197 | } 198 | 199 | /// GET /reset-password?forgot_password=TOKEN - Display reset password form 200 | #[allow(dead_code)] 201 | pub async fn reset_password_page( 202 | Extension(tmpl): Extension, 203 | Query(params): Query, 204 | ) -> impl IntoResponse { 205 | let mut ctx = tera::Context::new(); 206 | ctx.insert("title", "Reset Password"); 207 | ctx.insert("token", ¶ms.forgot_password); 208 | 209 | match tmpl.render("reset_password.html", &ctx) { 210 | Ok(body) => (StatusCode::OK, axum::response::Html(body)).into_response(), 211 | Err(e) => { 212 | ErrorResponse::internal_server_error(format!("Template error: {}", e)).into_response() 213 | } 214 | } 215 | } 216 | 217 | /// POST /api/reset-password 218 | pub async fn reset_password_api( 219 | Extension(service): Extension>, 220 | Json(payload): Json, 221 | ) -> impl IntoResponse { 222 | let req = ResetPasswordRequest { 223 | token: payload.token, 224 | new_password: payload.new_password, 225 | }; 226 | 227 | match service.reset_password(req).await { 228 | Ok(_) => ( 229 | StatusCode::OK, 230 | Json(MessageResponse { 231 | message: "Password reset successfully".to_string(), 232 | }), 233 | ) 234 | .into_response(), 235 | Err(e) => ErrorResponse::bad_request(e.to_string()).into_response(), 236 | } 237 | } 238 | 239 | /// Helper function to extract Bearer token from Authorization header 240 | fn extract_bearer_token(headers: &HeaderMap) -> Option { 241 | let auth_header = headers.get("authorization")?; 242 | let auth_str = auth_header.to_str().ok()?; 243 | 244 | auth_str.strip_prefix("Bearer ").map(|s| s.to_string()) 245 | } 246 | 247 | #[cfg(test)] 248 | mod tests { 249 | use super::*; 250 | use axum::http::{HeaderMap, HeaderValue}; 251 | 252 | #[test] 253 | fn test_extract_bearer_token() { 254 | let mut headers = HeaderMap::new(); 255 | headers.insert("authorization", HeaderValue::from_static("Bearer abc123")); 256 | 257 | let token = extract_bearer_token(&headers); 258 | assert_eq!(token, Some("abc123".to_string())); 259 | } 260 | 261 | #[test] 262 | fn test_extract_bearer_token_missing() { 263 | let headers = HeaderMap::new(); 264 | let token = extract_bearer_token(&headers); 265 | assert_eq!(token, None); 266 | } 267 | 268 | #[test] 269 | fn test_extract_bearer_token_invalid_format() { 270 | let mut headers = HeaderMap::new(); 271 | headers.insert("authorization", HeaderValue::from_static("Basic abc123")); 272 | 273 | let token = extract_bearer_token(&headers); 274 | assert_eq!(token, None); 275 | } 276 | 277 | #[test] 278 | fn test_register_api_payload() { 279 | let payload = RegisterApi { 280 | email: "test@example.com".to_string(), 281 | username: "testuser".to_string(), 282 | password: "password123".to_string(), 283 | }; 284 | 285 | assert_eq!(payload.email, "test@example.com"); 286 | assert_eq!(payload.username, "testuser"); 287 | } 288 | } 289 | -------------------------------------------------------------------------------- /tests/group_service_test.rs: -------------------------------------------------------------------------------- 1 | use anyhow::Result; 2 | use async_trait::async_trait; 3 | use chrono::Utc; 4 | use keyrunes::group_service::{CreateGroupRequest, GroupService}; 5 | use keyrunes::repository::{Group, GroupRepository, NewGroup, Policy}; 6 | use std::sync::{Arc, Mutex}; 7 | use uuid::Uuid; 8 | 9 | struct MockGroupRepository { 10 | groups: Mutex>, 11 | user_groups: Mutex)>>, // (user_id, group_id, assigned_by) 12 | } 13 | 14 | impl MockGroupRepository { 15 | fn new() -> Self { 16 | let groups = Mutex::new(vec![ 17 | Group { 18 | group_id: 1, 19 | external_id: Uuid::new_v4(), 20 | name: "superadmin".to_string(), 21 | description: Some("Superadmin group".to_string()), 22 | created_at: Utc::now(), 23 | updated_at: Utc::now(), 24 | }, 25 | Group { 26 | group_id: 2, 27 | external_id: Uuid::new_v4(), 28 | name: "users".to_string(), 29 | description: Some("Regular users".to_string()), 30 | created_at: Utc::now(), 31 | updated_at: Utc::now(), 32 | }, 33 | ]); 34 | 35 | Self { 36 | groups, 37 | user_groups: Mutex::new(Vec::new()), 38 | } 39 | } 40 | } 41 | 42 | #[async_trait] 43 | impl GroupRepository for MockGroupRepository { 44 | async fn insert_group(&self, new_group: NewGroup) -> Result { 45 | let mut groups = self.groups.lock().unwrap(); 46 | let group = Group { 47 | group_id: (groups.len() + 1) as i64, 48 | external_id: new_group.external_id, 49 | name: new_group.name, 50 | description: new_group.description, 51 | created_at: Utc::now(), 52 | updated_at: Utc::now(), 53 | }; 54 | groups.push(group.clone()); 55 | Ok(group) 56 | } 57 | 58 | async fn find_by_name(&self, name: &str) -> Result> { 59 | let groups = self.groups.lock().unwrap(); 60 | Ok(groups.iter().find(|g| g.name == name).cloned()) 61 | } 62 | 63 | async fn find_by_id(&self, group_id: i64) -> Result> { 64 | let groups = self.groups.lock().unwrap(); 65 | Ok(groups.iter().find(|g| g.group_id == group_id).cloned()) 66 | } 67 | 68 | async fn list_groups(&self) -> Result> { 69 | Ok(self.groups.lock().unwrap().clone()) 70 | } 71 | 72 | async fn assign_user_to_group( 73 | &self, 74 | user_id: i64, 75 | group_id: i64, 76 | assigned_by: Option, 77 | ) -> Result<()> { 78 | let mut user_groups = self.user_groups.lock().unwrap(); 79 | 80 | if user_groups 81 | .iter() 82 | .any(|(uid, gid, _)| *uid == user_id && *gid == group_id) 83 | { 84 | return Err(anyhow::anyhow!("User already assigned to this group")); 85 | } 86 | 87 | user_groups.push((user_id, group_id, assigned_by)); 88 | Ok(()) 89 | } 90 | 91 | async fn remove_user_from_group(&self, user_id: i64, group_id: i64) -> Result<()> { 92 | let mut user_groups = self.user_groups.lock().unwrap(); 93 | user_groups.retain(|(uid, gid, _)| !(*uid == user_id && *gid == group_id)); 94 | Ok(()) 95 | } 96 | 97 | async fn get_group_policies(&self, _group_id: i64) -> Result> { 98 | Ok(Vec::new()) 99 | } 100 | } 101 | 102 | #[tokio::test] 103 | async fn test_create_group_success() { 104 | let repo = Arc::new(MockGroupRepository::new()); 105 | let service = GroupService::new(repo); 106 | 107 | let req = CreateGroupRequest { 108 | name: "developers".to_string(), 109 | description: Some("Development team".to_string()), 110 | }; 111 | 112 | let result = service.create_group(req).await; 113 | assert!(result.is_ok()); 114 | 115 | let group = result.unwrap(); 116 | assert_eq!(group.name, "developers"); 117 | assert_eq!(group.description, Some("Development team".to_string())); 118 | } 119 | 120 | #[tokio::test] 121 | async fn test_create_group_duplicate_name() { 122 | let repo = Arc::new(MockGroupRepository::new()); 123 | let service = GroupService::new(repo); 124 | 125 | let req = CreateGroupRequest { 126 | name: "superadmin".to_string(), // Already exists in mock 127 | description: Some("Duplicate".to_string()), 128 | }; 129 | 130 | let result = service.create_group(req).await; 131 | assert!(result.is_err()); 132 | assert_eq!(result.unwrap_err().to_string(), "group name already exists"); 133 | } 134 | 135 | #[tokio::test] 136 | async fn test_list_groups() { 137 | let repo = Arc::new(MockGroupRepository::new()); 138 | let service = GroupService::new(repo); 139 | 140 | let result = service.list_groups().await; 141 | assert!(result.is_ok()); 142 | 143 | let groups = result.unwrap(); 144 | assert_eq!(groups.len(), 2); 145 | assert!(groups.iter().any(|g| g.name == "superadmin")); 146 | assert!(groups.iter().any(|g| g.name == "users")); 147 | } 148 | 149 | #[tokio::test] 150 | async fn test_get_group_by_name() { 151 | let repo = Arc::new(MockGroupRepository::new()); 152 | let service = GroupService::new(repo); 153 | 154 | let result = service.get_group_by_name("superadmin").await; 155 | assert!(result.is_ok()); 156 | 157 | let group = result.unwrap(); 158 | assert!(group.is_some()); 159 | assert_eq!(group.unwrap().name, "superadmin"); 160 | } 161 | 162 | #[tokio::test] 163 | async fn test_get_group_by_name_not_found() { 164 | let repo = Arc::new(MockGroupRepository::new()); 165 | let service = GroupService::new(repo); 166 | 167 | let result = service.get_group_by_name("nonexistent").await; 168 | assert!(result.is_ok()); 169 | assert!(result.unwrap().is_none()); 170 | } 171 | 172 | #[tokio::test] 173 | async fn test_get_group_by_id() { 174 | let repo = Arc::new(MockGroupRepository::new()); 175 | let service = GroupService::new(repo); 176 | 177 | let result = service.get_group_by_id(1).await; 178 | assert!(result.is_ok()); 179 | 180 | let group = result.unwrap(); 181 | assert!(group.is_some()); 182 | assert_eq!(group.unwrap().group_id, 1); 183 | } 184 | 185 | #[tokio::test] 186 | async fn test_assign_user_to_group_success() { 187 | let repo = Arc::new(MockGroupRepository::new()); 188 | let service = GroupService::new(repo.clone()); 189 | 190 | let result = service.assign_user_to_group(100, 1, Some(1)).await; 191 | assert!(result.is_ok()); 192 | 193 | let user_groups = repo.user_groups.lock().unwrap(); 194 | assert!( 195 | user_groups 196 | .iter() 197 | .any(|(uid, gid, _)| *uid == 100 && *gid == 1) 198 | ); 199 | } 200 | 201 | #[tokio::test] 202 | async fn test_assign_user_to_group_nonexistent_group() { 203 | let repo = Arc::new(MockGroupRepository::new()); 204 | let service = GroupService::new(repo); 205 | 206 | let result = service.assign_user_to_group(100, 999, Some(1)).await; 207 | assert!(result.is_err()); 208 | assert_eq!(result.unwrap_err().to_string(), "group not found"); 209 | } 210 | 211 | #[tokio::test] 212 | async fn test_assign_user_to_group_duplicate() { 213 | let repo = Arc::new(MockGroupRepository::new()); 214 | let service = GroupService::new(repo); 215 | 216 | let result1 = service.assign_user_to_group(100, 1, Some(1)).await; 217 | assert!(result1.is_ok()); 218 | 219 | let result2 = service.assign_user_to_group(100, 1, Some(1)).await; 220 | assert!(result2.is_err()); 221 | assert_eq!( 222 | result2.unwrap_err().to_string(), 223 | "User already assigned to this group" 224 | ); 225 | } 226 | 227 | #[tokio::test] 228 | async fn test_remove_user_from_group() { 229 | let repo = Arc::new(MockGroupRepository::new()); 230 | let service = GroupService::new(repo.clone()); 231 | 232 | service.assign_user_to_group(100, 1, Some(1)).await.unwrap(); 233 | 234 | { 235 | let user_groups = repo.user_groups.lock().unwrap(); 236 | assert_eq!(user_groups.len(), 1); 237 | } 238 | 239 | let result = service.remove_user_from_group(100, 1).await; 240 | assert!(result.is_ok()); 241 | 242 | let user_groups = repo.user_groups.lock().unwrap(); 243 | assert_eq!(user_groups.len(), 0); 244 | } 245 | 246 | #[tokio::test] 247 | async fn test_create_multiple_groups() { 248 | let repo = Arc::new(MockGroupRepository::new()); 249 | let service = GroupService::new(repo); 250 | 251 | let groups = vec![ 252 | ("developers", "Dev team"), 253 | ("qa", "QA team"), 254 | ("support", "Support team"), 255 | ]; 256 | 257 | for (name, desc) in groups { 258 | let req = CreateGroupRequest { 259 | name: name.to_string(), 260 | description: Some(desc.to_string()), 261 | }; 262 | let result = service.create_group(req).await; 263 | assert!(result.is_ok(), "Failed to create group: {}", name); 264 | } 265 | 266 | let all_groups = service.list_groups().await.unwrap(); 267 | assert_eq!(all_groups.len(), 5); // 2 default + 3 new 268 | } 269 | 270 | #[tokio::test] 271 | async fn test_assign_multiple_users_to_group() { 272 | let repo = Arc::new(MockGroupRepository::new()); 273 | let service = GroupService::new(repo.clone()); 274 | 275 | for user_id in 100..105 { 276 | let result = service.assign_user_to_group(user_id, 1, Some(1)).await; 277 | assert!(result.is_ok()); 278 | } 279 | 280 | let user_groups = repo.user_groups.lock().unwrap(); 281 | assert_eq!(user_groups.len(), 5); 282 | } 283 | 284 | #[tokio::test] 285 | async fn test_assign_user_to_multiple_groups() { 286 | let repo = Arc::new(MockGroupRepository::new()); 287 | let service = GroupService::new(repo.clone()); 288 | 289 | let result1 = service.assign_user_to_group(100, 1, Some(1)).await; 290 | let result2 = service.assign_user_to_group(100, 2, Some(1)).await; 291 | 292 | assert!(result1.is_ok()); 293 | assert!(result2.is_ok()); 294 | 295 | let user_groups = repo.user_groups.lock().unwrap(); 296 | assert_eq!(user_groups.len(), 2); 297 | assert!( 298 | user_groups 299 | .iter() 300 | .any(|(uid, gid, _)| *uid == 100 && *gid == 1) 301 | ); 302 | assert!( 303 | user_groups 304 | .iter() 305 | .any(|(uid, gid, _)| *uid == 100 && *gid == 2) 306 | ); 307 | } 308 | --------------------------------------------------------------------------------