├── src
├── domain
│ ├── mod.rs
│ └── user.rs
├── views
│ ├── mod.rs
│ └── admin.rs
├── api
│ ├── mod.rs
│ ├── health.rs
│ └── auth.rs
├── handler
│ ├── mod.rs
│ ├── logging.rs
│ ├── auth.rs
│ └── errors.rs
├── services
│ ├── mod.rs
│ ├── email_service.rs
│ ├── jwt_service.rs
│ └── group_service.rs
├── lib.rs
├── migrations
│ └── V1__create_user_table.sql
└── repository
│ └── mod.rs
├── templates
├── welcome.html
├── login.html
├── register.html
├── errors
│ ├── 400.html
│ ├── 404.html
│ ├── 403.html
│ ├── 500.html
│ └── 503.html
├── base.html
├── dashboard.html
├── forgot_password.html
├── reset_password.html
├── change_password.html
└── mail
│ └── password_reset.html
├── hurl.env
├── .sqlx
├── query-df013c8a6d402586dbd9a5dfcafdf7fe15f96ecb8c6e35fa71af4d376ff2af76.json
├── query-68e38d420430ee0328eb899540e835bb84037ba8df7c19aaeccf6b9055d26460.json
├── query-95bf17c9f82725ffbbd1cc66a495d943c7b2ecb1c51f5263f8c1db6566b947b3.json
├── query-7e0e72ac2d7a63e53ce1d3efa53c622aa340567c1d8b6cacce1bc3fec6561bd7.json
├── query-a91c966bc66d1efec23e3a336fe515fa9149901b2882bbbd6eec5772d215927b.json
├── query-55754e1dd7082a06867e10686bd38c123949e4c4c80d98a006d2999e7b365a78.json
├── query-293633706090984c01cc04f1aa9469a915550204c53953c03b457805e004f229.json
├── query-0c20fbbc3563e75fbff5a132f33e2e1d9e9c43e2f84ebfee5b09ab7801943911.json
├── query-4ef9d78aaf530e01409f4f1fa6d8787b3f65600c3b06aa1f693a5a8bfa2bcc99.json
├── query-679d925b63860400f56ac878dda80f5eff5ad329f3dfa8b7ae48aba490cd8b69.json
├── query-dc64e1d25d9ced3a49130cee99f6edc3f70a4917910cf3b76faefc24ac32159d.json
├── query-eea84cb1cff53062066d8c2a098c902e6a5899580bc39d615360588deeef35a9.json
├── query-95114e2b92fa070e175362d2bae7c825a672dd12bc3c6d537315db42c6b6f6b6.json
├── query-21bc65bcf84123a99bb5f8f42e21dfebb3ba946b48640d2635f2252ffb88601d.json
├── query-d3c5ad665db0ed87d615e1eadbb582c46425e047b0e01481d3b2b03a56e8a714.json
├── query-aa7b4532770146c3773d37d3ed41972203b349ae0723d7e81495edfbd5356abd.json
├── query-e6a21b7872a21babe310bc4b54b7af3a4fba450b17c613dd43842a0321d2b901.json
├── query-376f7eb4ea933656da1a27ba0f9c0cecd63a9a09ddb289824a1df2b38265779a.json
├── query-c8232064c4907dca27ec50b16d8a80a2ed39c31dd06cc4a82afea18996ba7736.json
├── query-ef85f457e7fbcb1aad277198b4e984bc7bde2f596a218260d1c9e0df54b1c851.json
├── query-516045641716ad9a84221b2d032c86990f933aea9a10917cdd84d89ccf35f856.json
├── query-9bb01184647b17d479d5a8f8b7dba53c8a5788be0ce3cf92700d8f704ffe2012.json
├── query-9cba992505d6b80cb356909d3e2360385af579fe6eccd1089c96a613989a1caa.json
├── query-2d5c0aa80bc6ab079d56ae8937c4c6691d01edc8f211245d3006c3125e72df34.json
├── query-5c695b62cccea048108b3f63e130a8a187f22a23e35e9a310f1f188e6a75e5a7.json
├── query-470cb7bce44297b903acf0dc0d25aff88ac8f2ec68cbfbc193d9c16fbcc8a303.json
├── query-501c9ab794cb66daddf6c4ebcef8df2f9f696e0ae0f403551cc0bb9c3cd18cc8.json
├── query-f2934589f15af1085b812e01ba8de1ee7a1015dfc86ea082f8d5a98a10ac0613.json
├── query-2c42062a084c2b179b9f84bfcc808f01694dcd8187589bd9c852b4b7853ec821.json
├── query-521cb02df6a1e3a7aa5d633bdbf446f6aabbffa1c0f0acd423783c6369062b3c.json
├── query-f240852fc5c60f82c7a1412a53fe42db6106195d8b2e20e9bd80d378934b34e7.json
└── query-3742b4721561ed076810a5eb757d0d9b21914813e2e468a102e6fb20cf68ee9e.json
├── hurl
├── auth.hurl
├── check_user_role.hurl
├── run_hurl_tests.sh
├── user_roles.hurl
└── admin.hurl
├── docker-compose.yml
├── .env-example
├── .gitignore
├── Cargo.toml
├── migrations
├── 20250819092538_create_users_table.sql
└── 20250908121827_create_groups_and_policies.sql
├── docker-compose-prod-example.yml
├── CHANGELOG.md
├── Dockerfile
├── CODE_OF_CONDUCT.md
├── cliff.toml
├── .github
└── workflows
│ └── ci.yml
├── CONTRIBUTING.md
├── tests
├── admin_view_test.rs
├── sqlx_impl_tests.rs
└── group_service_test.rs
├── README.md
└── Makefile
/src/domain/mod.rs:
--------------------------------------------------------------------------------
1 | pub mod user;
2 |
--------------------------------------------------------------------------------
/src/views/mod.rs:
--------------------------------------------------------------------------------
1 | pub mod admin;
2 | pub mod auth;
3 |
--------------------------------------------------------------------------------
/src/api/mod.rs:
--------------------------------------------------------------------------------
1 | pub mod admin;
2 | pub mod auth;
3 | pub mod health;
4 |
--------------------------------------------------------------------------------
/src/handler/mod.rs:
--------------------------------------------------------------------------------
1 | pub mod auth;
2 | pub mod errors;
3 | pub mod logging;
4 |
--------------------------------------------------------------------------------
/src/services/mod.rs:
--------------------------------------------------------------------------------
1 | pub mod email_service;
2 | pub mod group_service;
3 | pub mod jwt_service;
4 | pub mod policy_service;
5 | pub mod user_service;
6 |
--------------------------------------------------------------------------------
/src/lib.rs:
--------------------------------------------------------------------------------
1 | pub mod api;
2 | pub mod domain;
3 | pub mod handler;
4 | pub mod repository;
5 | pub mod services;
6 | pub mod views;
7 |
8 | // re-exports for ease
9 | pub use repository::*;
10 | pub use services::*;
11 |
--------------------------------------------------------------------------------
/templates/welcome.html:
--------------------------------------------------------------------------------
1 | {% extends "base.html" %}
2 |
3 | {% block content %}
4 |
5 | Bem-vindo, {{ username }}!
6 | Login realizado com sucesso.
7 |
8 | {% endblock %}
9 |
--------------------------------------------------------------------------------
/hurl.env:
--------------------------------------------------------------------------------
1 | # replace with a user credential with superadmin group
2 | admin_email=hurl@example.com
3 | admin_username="Hurlmanda Whurler"
4 | admin_password="hurl12345"
5 |
6 | group_user_email=grouphurl@example.com
7 | group_user_username="Phurl Whurlker"
8 | group_user_password="hurl12345"
--------------------------------------------------------------------------------
/.sqlx/query-df013c8a6d402586dbd9a5dfcafdf7fe15f96ecb8c6e35fa71af4d376ff2af76.json:
--------------------------------------------------------------------------------
1 | {
2 | "db_name": "PostgreSQL",
3 | "query": "DELETE FROM password_reset_tokens WHERE expires_at < now()",
4 | "describe": {
5 | "columns": [],
6 | "parameters": {
7 | "Left": []
8 | },
9 | "nullable": []
10 | },
11 | "hash": "df013c8a6d402586dbd9a5dfcafdf7fe15f96ecb8c6e35fa71af4d376ff2af76"
12 | }
13 |
--------------------------------------------------------------------------------
/hurl/auth.hurl:
--------------------------------------------------------------------------------
1 | # Register User
2 | POST http://localhost:3000/api/register
3 | Content-Type: application/json
4 |
5 | {
6 | "email": "{{email}}",
7 | "username": "{{username}}",
8 | "password": "{{password}}"
9 | }
10 |
11 | # Login with this user
12 | POST http://localhost:3000/api/login
13 | Content-Type: application/json
14 |
15 | {
16 | "identity": "{{email}}",
17 | "password": "{{password}}"
18 | }
19 |
--------------------------------------------------------------------------------
/.sqlx/query-68e38d420430ee0328eb899540e835bb84037ba8df7c19aaeccf6b9055d26460.json:
--------------------------------------------------------------------------------
1 | {
2 | "db_name": "PostgreSQL",
3 | "query": "UPDATE password_reset_tokens SET used_at = now() WHERE token_id = $1",
4 | "describe": {
5 | "columns": [],
6 | "parameters": {
7 | "Left": [
8 | "Int8"
9 | ]
10 | },
11 | "nullable": []
12 | },
13 | "hash": "68e38d420430ee0328eb899540e835bb84037ba8df7c19aaeccf6b9055d26460"
14 | }
15 |
--------------------------------------------------------------------------------
/.sqlx/query-95bf17c9f82725ffbbd1cc66a495d943c7b2ecb1c51f5263f8c1db6566b947b3.json:
--------------------------------------------------------------------------------
1 | {
2 | "db_name": "PostgreSQL",
3 | "query": "DELETE FROM user_groups WHERE user_id = $1 AND group_id = $2",
4 | "describe": {
5 | "columns": [],
6 | "parameters": {
7 | "Left": [
8 | "Int8",
9 | "Int8"
10 | ]
11 | },
12 | "nullable": []
13 | },
14 | "hash": "95bf17c9f82725ffbbd1cc66a495d943c7b2ecb1c51f5263f8c1db6566b947b3"
15 | }
16 |
--------------------------------------------------------------------------------
/.sqlx/query-7e0e72ac2d7a63e53ce1d3efa53c622aa340567c1d8b6cacce1bc3fec6561bd7.json:
--------------------------------------------------------------------------------
1 | {
2 | "db_name": "PostgreSQL",
3 | "query": "DELETE FROM user_policies WHERE user_id = $1 AND policy_id = $2",
4 | "describe": {
5 | "columns": [],
6 | "parameters": {
7 | "Left": [
8 | "Int8",
9 | "Int8"
10 | ]
11 | },
12 | "nullable": []
13 | },
14 | "hash": "7e0e72ac2d7a63e53ce1d3efa53c622aa340567c1d8b6cacce1bc3fec6561bd7"
15 | }
16 |
--------------------------------------------------------------------------------
/.sqlx/query-a91c966bc66d1efec23e3a336fe515fa9149901b2882bbbd6eec5772d215927b.json:
--------------------------------------------------------------------------------
1 | {
2 | "db_name": "PostgreSQL",
3 | "query": "DELETE FROM group_policies WHERE group_id = $1 AND policy_id = $2",
4 | "describe": {
5 | "columns": [],
6 | "parameters": {
7 | "Left": [
8 | "Int8",
9 | "Int8"
10 | ]
11 | },
12 | "nullable": []
13 | },
14 | "hash": "a91c966bc66d1efec23e3a336fe515fa9149901b2882bbbd6eec5772d215927b"
15 | }
16 |
--------------------------------------------------------------------------------
/.sqlx/query-55754e1dd7082a06867e10686bd38c123949e4c4c80d98a006d2999e7b365a78.json:
--------------------------------------------------------------------------------
1 | {
2 | "db_name": "PostgreSQL",
3 | "query": "UPDATE users SET first_login = $1, updated_at = now() WHERE user_id = $2",
4 | "describe": {
5 | "columns": [],
6 | "parameters": {
7 | "Left": [
8 | "Bool",
9 | "Int8"
10 | ]
11 | },
12 | "nullable": []
13 | },
14 | "hash": "55754e1dd7082a06867e10686bd38c123949e4c4c80d98a006d2999e7b365a78"
15 | }
16 |
--------------------------------------------------------------------------------
/.sqlx/query-293633706090984c01cc04f1aa9469a915550204c53953c03b457805e004f229.json:
--------------------------------------------------------------------------------
1 | {
2 | "db_name": "PostgreSQL",
3 | "query": "UPDATE users SET password_hash = $1, updated_at = now() WHERE user_id = $2",
4 | "describe": {
5 | "columns": [],
6 | "parameters": {
7 | "Left": [
8 | "Text",
9 | "Int8"
10 | ]
11 | },
12 | "nullable": []
13 | },
14 | "hash": "293633706090984c01cc04f1aa9469a915550204c53953c03b457805e004f229"
15 | }
16 |
--------------------------------------------------------------------------------
/docker-compose.yml:
--------------------------------------------------------------------------------
1 | services:
2 | keyrunes-database:
3 | image: postgres:17
4 | container_name: keyrunes-database
5 | environment:
6 | POSTGRES_USER: ${POSTGRES_USER}
7 | POSTGRES_PASSWORD: ${POSTGRES_PASSWORD}
8 | POSTGRES_DB: ${POSTGRES_DB}
9 | ports:
10 | - "${POSTGRES_PORT:-5432}:5432"
11 | volumes:
12 | - keyrunes_postgres_data:/var/lib/postgresql/data
13 | restart: unless-stopped
14 |
15 | volumes:
16 | keyrunes_postgres_data:
17 |
--------------------------------------------------------------------------------
/.env-example:
--------------------------------------------------------------------------------
1 | POSTGRES_USER=postgres
2 | POSTGRES_PASSWORD=123456
3 | POSTGRES_DB=keyrunes
4 | POSTGRES_PORT=5432
5 | DATABASE_URL="postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@localhost:${POSTGRES_PORT}/${POSTGRES_DB}"
6 | JWT_SECRET=your-super-secret-jwt-key-change-in-production
7 | LOG_LEVEL=info
8 | SMTP_USERNAME=noreply@example.com
9 | SMTP_PASSWORD=your_smtp_password
10 | SMTP_HOST=smtp.gmail.com
11 | SMTP_PORT=587
12 | FROM_EMAIL=noreply@example.com
13 | FROM_NAME=KeyRunes
14 | FRONTEND_URL=http://localhost:3000
15 |
--------------------------------------------------------------------------------
/.sqlx/query-0c20fbbc3563e75fbff5a132f33e2e1d9e9c43e2f84ebfee5b09ab7801943911.json:
--------------------------------------------------------------------------------
1 | {
2 | "db_name": "PostgreSQL",
3 | "query": "INSERT INTO user_groups (user_id, group_id, assigned_by) VALUES ($1, $2, $3) ON CONFLICT DO NOTHING",
4 | "describe": {
5 | "columns": [],
6 | "parameters": {
7 | "Left": [
8 | "Int8",
9 | "Int8",
10 | "Int8"
11 | ]
12 | },
13 | "nullable": []
14 | },
15 | "hash": "0c20fbbc3563e75fbff5a132f33e2e1d9e9c43e2f84ebfee5b09ab7801943911"
16 | }
17 |
--------------------------------------------------------------------------------
/.sqlx/query-4ef9d78aaf530e01409f4f1fa6d8787b3f65600c3b06aa1f693a5a8bfa2bcc99.json:
--------------------------------------------------------------------------------
1 | {
2 | "db_name": "PostgreSQL",
3 | "query": "INSERT INTO group_policies (group_id, policy_id, assigned_by) VALUES ($1, $2, $3) ON CONFLICT DO NOTHING",
4 | "describe": {
5 | "columns": [],
6 | "parameters": {
7 | "Left": [
8 | "Int8",
9 | "Int8",
10 | "Int8"
11 | ]
12 | },
13 | "nullable": []
14 | },
15 | "hash": "4ef9d78aaf530e01409f4f1fa6d8787b3f65600c3b06aa1f693a5a8bfa2bcc99"
16 | }
17 |
--------------------------------------------------------------------------------
/.sqlx/query-679d925b63860400f56ac878dda80f5eff5ad329f3dfa8b7ae48aba490cd8b69.json:
--------------------------------------------------------------------------------
1 | {
2 | "db_name": "PostgreSQL",
3 | "query": "INSERT INTO user_policies (user_id, policy_id, assigned_by) VALUES ($1, $2, $3) ON CONFLICT DO NOTHING",
4 | "describe": {
5 | "columns": [],
6 | "parameters": {
7 | "Left": [
8 | "Int8",
9 | "Int8",
10 | "Int8"
11 | ]
12 | },
13 | "nullable": []
14 | },
15 | "hash": "679d925b63860400f56ac878dda80f5eff5ad329f3dfa8b7ae48aba490cd8b69"
16 | }
17 |
--------------------------------------------------------------------------------
/.sqlx/query-dc64e1d25d9ced3a49130cee99f6edc3f70a4917910cf3b76faefc24ac32159d.json:
--------------------------------------------------------------------------------
1 | {
2 | "db_name": "PostgreSQL",
3 | "query": "SELECT COUNT(*) FROM users",
4 | "describe": {
5 | "columns": [
6 | {
7 | "ordinal": 0,
8 | "name": "count",
9 | "type_info": "Int8"
10 | }
11 | ],
12 | "parameters": {
13 | "Left": []
14 | },
15 | "nullable": [
16 | null
17 | ]
18 | },
19 | "hash": "dc64e1d25d9ced3a49130cee99f6edc3f70a4917910cf3b76faefc24ac32159d"
20 | }
21 |
--------------------------------------------------------------------------------
/.sqlx/query-eea84cb1cff53062066d8c2a098c902e6a5899580bc39d615360588deeef35a9.json:
--------------------------------------------------------------------------------
1 | {
2 | "db_name": "PostgreSQL",
3 | "query": "SELECT COUNT(*) FROM groups",
4 | "describe": {
5 | "columns": [
6 | {
7 | "ordinal": 0,
8 | "name": "count",
9 | "type_info": "Int8"
10 | }
11 | ],
12 | "parameters": {
13 | "Left": []
14 | },
15 | "nullable": [
16 | null
17 | ]
18 | },
19 | "hash": "eea84cb1cff53062066d8c2a098c902e6a5899580bc39d615360588deeef35a9"
20 | }
21 |
--------------------------------------------------------------------------------
/.sqlx/query-95114e2b92fa070e175362d2bae7c825a672dd12bc3c6d537315db42c6b6f6b6.json:
--------------------------------------------------------------------------------
1 | {
2 | "db_name": "PostgreSQL",
3 | "query": "SELECT COUNT(*) FROM policies",
4 | "describe": {
5 | "columns": [
6 | {
7 | "ordinal": 0,
8 | "name": "count",
9 | "type_info": "Int8"
10 | }
11 | ],
12 | "parameters": {
13 | "Left": []
14 | },
15 | "nullable": [
16 | null
17 | ]
18 | },
19 | "hash": "95114e2b92fa070e175362d2bae7c825a672dd12bc3c6d537315db42c6b6f6b6"
20 | }
21 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Generated by Cargo
2 | # will have compiled files and executables
3 | debug/
4 | target/
5 |
6 | # These are backup files generated by rustfmt
7 | **/*.rs.bk
8 |
9 | # MSVC Windows builds of rustc generate these, which store debugging information
10 | *.pdb
11 |
12 | # Generated by cargo mutants
13 | # Contains mutation testing data
14 | **/mutants.out*/
15 |
16 | # RustRover
17 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can
18 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
19 | # and can be added to the global gitignore or merged into this file. For a more nuclear
20 | # option (not recommended) you can uncomment the following to ignore the entire idea folder.
21 | .idea/
22 | *.env
23 |
--------------------------------------------------------------------------------
/templates/login.html:
--------------------------------------------------------------------------------
1 | {% extends "base.html" %}
2 |
3 | {% block content %}
4 |
5 | Login
6 |
7 | {% if error %}
8 | {{ error }}
9 | {% endif %}
10 |
11 |
20 |
21 | Esqueci a senha
22 |
23 |
24 | {% endblock %}
25 |
--------------------------------------------------------------------------------
/templates/register.html:
--------------------------------------------------------------------------------
1 | {% extends "base.html" %}
2 |
3 | {% block content %}
4 |
5 | Registrar
6 |
7 | {% if error %}
8 | {{ error }}
9 | {% endif %}
10 |
11 |
25 |
26 | {% endblock %}
27 |
--------------------------------------------------------------------------------
/templates/errors/400.html:
--------------------------------------------------------------------------------
1 | {% extends "base.html" %}
2 |
3 | {% block content %}
4 |
5 | 400
6 | Bad Request
7 |
8 |
9 | The request could not be processed. Please check your data and try again.
10 |
11 |
12 | {% if error_message %}
13 |
14 | Details: {{ error_message }}
15 |
16 | {% endif %}
17 |
18 |
22 |
23 | {% endblock %}
24 |
--------------------------------------------------------------------------------
/src/migrations/V1__create_user_table.sql:
--------------------------------------------------------------------------------
1 | CREATE EXTENSION IF NOT EXISTS pgcrypto;
2 |
3 | CREATE TABLE IF NOT EXISTS users (
4 | user_id BIGSERIAL PRIMARY KEY,
5 | external_id UUID NOT NULL DEFAULT gen_random_uuid(),
6 | password_hash TEXT NOT NULL,
7 | email VARCHAR(255) NOT NULL,
8 | username VARCHAR(50) NOT NULL,
9 | created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
10 | updated_at TIMESTAMPTZ NOT NULL DEFAULT now()
11 | );
12 |
13 | CREATE UNIQUE INDEX IF NOT EXISTS users_external_id_idx ON users (external_id);
14 | CREATE UNIQUE INDEX IF NOT EXISTS users_username_idx ON users (username);
15 | CREATE UNIQUE INDEX IF NOT EXISTS users_email_lower_idx ON users ((lower(email)));
16 |
17 | CREATE OR REPLACE FUNCTION set_updated_at()
18 | RETURNS TRIGGER AS $$
19 | BEGIN
20 | NEW.updated_at = now();
21 | RETURN NEW;
22 | END;
23 | $$ LANGUAGE plpgsql;
24 |
25 | DROP TRIGGER IF EXISTS trg_set_updated_at ON users;
26 | CREATE TRIGGER trg_set_updated_at
27 | BEFORE UPDATE ON users
28 | FOR EACH ROW
29 | EXECUTE PROCEDURE set_updated_at();
30 |
--------------------------------------------------------------------------------
/templates/base.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | {% if title %}{{ title }} - Keyrunes{% else %}My Keyrunes{% endif %}
7 |
8 |
9 |
10 |
15 |
16 |
17 |
18 | {% block header %}
19 | Keyrunes
20 |
24 |
25 | {% endblock header %}
26 |
27 |
28 |
29 | {% block content %}{% endblock %}
30 |
31 |
32 |
36 |
37 |
38 |
--------------------------------------------------------------------------------
/src/views/admin.rs:
--------------------------------------------------------------------------------
1 | use axum::{
2 | extract::Extension,
3 | response::{Html, IntoResponse},
4 | };
5 | use tera::Tera;
6 |
7 | use crate::handler::auth::AuthenticatedUser;
8 |
9 | pub async fn admin_page(
10 | Extension(user): Extension,
11 | Extension(tera): Extension,
12 | ) -> impl IntoResponse {
13 | // Check if user is superadmin
14 | if !user.groups.contains(&"superadmin".to_string()) {
15 | return Html("403 Forbidden
Superadmin access required
".to_string())
16 | .into_response();
17 | }
18 |
19 | let mut context = tera::Context::new();
20 | context.insert(
21 | "user",
22 | &serde_json::json!({
23 | "user_id": user.user_id,
24 | "username": user.username,
25 | "email": user.email,
26 | "groups": user.groups,
27 | }),
28 | );
29 |
30 | match tera.render("admin.html", &context) {
31 | Ok(html) => Html(html).into_response(),
32 | Err(e) => {
33 | tracing::error!("Template error: {}", e);
34 | Html(format!("Error rendering template
{}
", e)).into_response()
35 | }
36 | }
37 | }
38 |
--------------------------------------------------------------------------------
/.sqlx/query-21bc65bcf84123a99bb5f8f42e21dfebb3ba946b48640d2635f2252ffb88601d.json:
--------------------------------------------------------------------------------
1 | {
2 | "db_name": "PostgreSQL",
3 | "query": "SELECT group_id, external_id, name, description, created_at, updated_at FROM groups ORDER BY name",
4 | "describe": {
5 | "columns": [
6 | {
7 | "ordinal": 0,
8 | "name": "group_id",
9 | "type_info": "Int8"
10 | },
11 | {
12 | "ordinal": 1,
13 | "name": "external_id",
14 | "type_info": "Uuid"
15 | },
16 | {
17 | "ordinal": 2,
18 | "name": "name",
19 | "type_info": "Varchar"
20 | },
21 | {
22 | "ordinal": 3,
23 | "name": "description",
24 | "type_info": "Text"
25 | },
26 | {
27 | "ordinal": 4,
28 | "name": "created_at",
29 | "type_info": "Timestamptz"
30 | },
31 | {
32 | "ordinal": 5,
33 | "name": "updated_at",
34 | "type_info": "Timestamptz"
35 | }
36 | ],
37 | "parameters": {
38 | "Left": []
39 | },
40 | "nullable": [
41 | false,
42 | false,
43 | false,
44 | true,
45 | false,
46 | false
47 | ]
48 | },
49 | "hash": "21bc65bcf84123a99bb5f8f42e21dfebb3ba946b48640d2635f2252ffb88601d"
50 | }
51 |
--------------------------------------------------------------------------------
/.sqlx/query-d3c5ad665db0ed87d615e1eadbb582c46425e047b0e01481d3b2b03a56e8a714.json:
--------------------------------------------------------------------------------
1 | {
2 | "db_name": "PostgreSQL",
3 | "query": "SELECT group_id, external_id, name, description, created_at, updated_at FROM groups WHERE name = $1",
4 | "describe": {
5 | "columns": [
6 | {
7 | "ordinal": 0,
8 | "name": "group_id",
9 | "type_info": "Int8"
10 | },
11 | {
12 | "ordinal": 1,
13 | "name": "external_id",
14 | "type_info": "Uuid"
15 | },
16 | {
17 | "ordinal": 2,
18 | "name": "name",
19 | "type_info": "Varchar"
20 | },
21 | {
22 | "ordinal": 3,
23 | "name": "description",
24 | "type_info": "Text"
25 | },
26 | {
27 | "ordinal": 4,
28 | "name": "created_at",
29 | "type_info": "Timestamptz"
30 | },
31 | {
32 | "ordinal": 5,
33 | "name": "updated_at",
34 | "type_info": "Timestamptz"
35 | }
36 | ],
37 | "parameters": {
38 | "Left": [
39 | "Text"
40 | ]
41 | },
42 | "nullable": [
43 | false,
44 | false,
45 | false,
46 | true,
47 | false,
48 | false
49 | ]
50 | },
51 | "hash": "d3c5ad665db0ed87d615e1eadbb582c46425e047b0e01481d3b2b03a56e8a714"
52 | }
53 |
--------------------------------------------------------------------------------
/.sqlx/query-aa7b4532770146c3773d37d3ed41972203b349ae0723d7e81495edfbd5356abd.json:
--------------------------------------------------------------------------------
1 | {
2 | "db_name": "PostgreSQL",
3 | "query": "SELECT group_id, external_id, name, description, created_at, updated_at FROM groups WHERE group_id = $1",
4 | "describe": {
5 | "columns": [
6 | {
7 | "ordinal": 0,
8 | "name": "group_id",
9 | "type_info": "Int8"
10 | },
11 | {
12 | "ordinal": 1,
13 | "name": "external_id",
14 | "type_info": "Uuid"
15 | },
16 | {
17 | "ordinal": 2,
18 | "name": "name",
19 | "type_info": "Varchar"
20 | },
21 | {
22 | "ordinal": 3,
23 | "name": "description",
24 | "type_info": "Text"
25 | },
26 | {
27 | "ordinal": 4,
28 | "name": "created_at",
29 | "type_info": "Timestamptz"
30 | },
31 | {
32 | "ordinal": 5,
33 | "name": "updated_at",
34 | "type_info": "Timestamptz"
35 | }
36 | ],
37 | "parameters": {
38 | "Left": [
39 | "Int8"
40 | ]
41 | },
42 | "nullable": [
43 | false,
44 | false,
45 | false,
46 | true,
47 | false,
48 | false
49 | ]
50 | },
51 | "hash": "aa7b4532770146c3773d37d3ed41972203b349ae0723d7e81495edfbd5356abd"
52 | }
53 |
--------------------------------------------------------------------------------
/hurl/check_user_role.hurl:
--------------------------------------------------------------------------------
1 | POST http://localhost:3000/api/login
2 | Content-Type: application/json
3 | {
4 | "identity": "{{email}}",
5 | "password": "{{password}}"
6 | }
7 | HTTP 200
8 | [Captures]
9 | user_token: jsonpath "$.token"
10 | user_id: jsonpath "$.user.user_id"
11 | username: jsonpath "$.user.username"
12 | user_email: jsonpath "$.user.email"
13 | user_groups: jsonpath "$.user.groups"
14 | [Asserts]
15 | jsonpath "$.token" exists
16 | jsonpath "$.user.user_id" exists
17 | jsonpath "$.user.groups" isCollection
18 | jsonpath "$.user.groups" count >= 1
19 |
20 | POST http://localhost:3000/api/login
21 | Content-Type: application/json
22 | {
23 | "identity": "{{email}}",
24 | "password": "{{password}}"
25 | }
26 | HTTP 200
27 | [Asserts]
28 | POST http://localhost:3000/api/login
29 | Content-Type: application/json
30 | {
31 | "identity": "{{email}}",
32 | "password": "{{password}}"
33 | }
34 | HTTP 200
35 | [Captures]
36 | is_superadmin: jsonpath "$.user.groups" contains "superadmin"
37 |
38 | GET http://localhost:3000/api/admin/dashboard
39 | Authorization: Bearer {{user_token}}
40 | HTTP *
41 | [Asserts]
42 | status >= 200
43 | status < 500
44 |
45 | POST http://localhost:3000/api/login
46 | Content-Type: application/json
47 | {
48 | "identity": "{{email}}",
49 | "password": "{{password}}"
50 | }
51 | HTTP 200
52 |
--------------------------------------------------------------------------------
/templates/errors/404.html:
--------------------------------------------------------------------------------
1 | {% extends "base.html" %}
2 |
3 | {% block content %}
4 |
5 | 404
6 | Page Not Found
7 |
8 |
9 | The page you are looking for does not exist or has been moved. This can happen when a link is broken or the address was typed incorrectly.
10 |
11 |
12 | {% if path %}
13 |
14 | Requested path: {{ path }}
15 |
16 | {% endif %}
17 |
18 |
22 |
23 |
29 |
30 | {% endblock %}
31 |
--------------------------------------------------------------------------------
/templates/errors/403.html:
--------------------------------------------------------------------------------
1 | {% extends "base.html" %}
2 |
3 | {% block content %}
4 |
5 | 403
6 | Access Denied
7 |
8 |
9 | You do not have permission to access this resource. This can happen if you are not authenticated or do not have the necessary permissions.
10 |
11 |
12 | {% if required_permission %}
13 |
14 | Required permission: {{ required_permission }}
15 |
16 | {% endif %}
17 |
18 |
22 |
23 |
24 |
25 | 💡 If you believe you should have access to this resource, please contact the system administrator.
26 |
27 |
28 |
29 | {% endblock %}
30 |
--------------------------------------------------------------------------------
/.sqlx/query-e6a21b7872a21babe310bc4b54b7af3a4fba450b17c613dd43842a0321d2b901.json:
--------------------------------------------------------------------------------
1 | {
2 | "db_name": "PostgreSQL",
3 | "query": "SELECT token_id, user_id, token, expires_at, used_at, created_at \n FROM password_reset_tokens \n WHERE token = $1 AND expires_at > now() AND used_at IS NULL",
4 | "describe": {
5 | "columns": [
6 | {
7 | "ordinal": 0,
8 | "name": "token_id",
9 | "type_info": "Int8"
10 | },
11 | {
12 | "ordinal": 1,
13 | "name": "user_id",
14 | "type_info": "Int8"
15 | },
16 | {
17 | "ordinal": 2,
18 | "name": "token",
19 | "type_info": "Varchar"
20 | },
21 | {
22 | "ordinal": 3,
23 | "name": "expires_at",
24 | "type_info": "Timestamptz"
25 | },
26 | {
27 | "ordinal": 4,
28 | "name": "used_at",
29 | "type_info": "Timestamptz"
30 | },
31 | {
32 | "ordinal": 5,
33 | "name": "created_at",
34 | "type_info": "Timestamptz"
35 | }
36 | ],
37 | "parameters": {
38 | "Left": [
39 | "Text"
40 | ]
41 | },
42 | "nullable": [
43 | false,
44 | false,
45 | false,
46 | false,
47 | true,
48 | false
49 | ]
50 | },
51 | "hash": "e6a21b7872a21babe310bc4b54b7af3a4fba450b17c613dd43842a0321d2b901"
52 | }
53 |
--------------------------------------------------------------------------------
/.sqlx/query-376f7eb4ea933656da1a27ba0f9c0cecd63a9a09ddb289824a1df2b38265779a.json:
--------------------------------------------------------------------------------
1 | {
2 | "db_name": "PostgreSQL",
3 | "query": "INSERT INTO groups (external_id, name, description) \n VALUES ($1, $2, $3) \n RETURNING group_id, external_id, name, description, created_at, updated_at",
4 | "describe": {
5 | "columns": [
6 | {
7 | "ordinal": 0,
8 | "name": "group_id",
9 | "type_info": "Int8"
10 | },
11 | {
12 | "ordinal": 1,
13 | "name": "external_id",
14 | "type_info": "Uuid"
15 | },
16 | {
17 | "ordinal": 2,
18 | "name": "name",
19 | "type_info": "Varchar"
20 | },
21 | {
22 | "ordinal": 3,
23 | "name": "description",
24 | "type_info": "Text"
25 | },
26 | {
27 | "ordinal": 4,
28 | "name": "created_at",
29 | "type_info": "Timestamptz"
30 | },
31 | {
32 | "ordinal": 5,
33 | "name": "updated_at",
34 | "type_info": "Timestamptz"
35 | }
36 | ],
37 | "parameters": {
38 | "Left": [
39 | "Uuid",
40 | "Varchar",
41 | "Text"
42 | ]
43 | },
44 | "nullable": [
45 | false,
46 | false,
47 | false,
48 | true,
49 | false,
50 | false
51 | ]
52 | },
53 | "hash": "376f7eb4ea933656da1a27ba0f9c0cecd63a9a09ddb289824a1df2b38265779a"
54 | }
55 |
--------------------------------------------------------------------------------
/.sqlx/query-c8232064c4907dca27ec50b16d8a80a2ed39c31dd06cc4a82afea18996ba7736.json:
--------------------------------------------------------------------------------
1 | {
2 | "db_name": "PostgreSQL",
3 | "query": "SELECT g.group_id, g.external_id, g.name, g.description, g.created_at, g.updated_at\n FROM groups g\n INNER JOIN user_groups ug ON g.group_id = ug.group_id\n WHERE ug.user_id = $1",
4 | "describe": {
5 | "columns": [
6 | {
7 | "ordinal": 0,
8 | "name": "group_id",
9 | "type_info": "Int8"
10 | },
11 | {
12 | "ordinal": 1,
13 | "name": "external_id",
14 | "type_info": "Uuid"
15 | },
16 | {
17 | "ordinal": 2,
18 | "name": "name",
19 | "type_info": "Varchar"
20 | },
21 | {
22 | "ordinal": 3,
23 | "name": "description",
24 | "type_info": "Text"
25 | },
26 | {
27 | "ordinal": 4,
28 | "name": "created_at",
29 | "type_info": "Timestamptz"
30 | },
31 | {
32 | "ordinal": 5,
33 | "name": "updated_at",
34 | "type_info": "Timestamptz"
35 | }
36 | ],
37 | "parameters": {
38 | "Left": [
39 | "Int8"
40 | ]
41 | },
42 | "nullable": [
43 | false,
44 | false,
45 | false,
46 | true,
47 | false,
48 | false
49 | ]
50 | },
51 | "hash": "c8232064c4907dca27ec50b16d8a80a2ed39c31dd06cc4a82afea18996ba7736"
52 | }
53 |
--------------------------------------------------------------------------------
/.sqlx/query-ef85f457e7fbcb1aad277198b4e984bc7bde2f596a218260d1c9e0df54b1c851.json:
--------------------------------------------------------------------------------
1 | {
2 | "db_name": "PostgreSQL",
3 | "query": "INSERT INTO password_reset_tokens (user_id, token, expires_at) \n VALUES ($1, $2, $3) \n RETURNING token_id, user_id, token, expires_at, used_at, created_at",
4 | "describe": {
5 | "columns": [
6 | {
7 | "ordinal": 0,
8 | "name": "token_id",
9 | "type_info": "Int8"
10 | },
11 | {
12 | "ordinal": 1,
13 | "name": "user_id",
14 | "type_info": "Int8"
15 | },
16 | {
17 | "ordinal": 2,
18 | "name": "token",
19 | "type_info": "Varchar"
20 | },
21 | {
22 | "ordinal": 3,
23 | "name": "expires_at",
24 | "type_info": "Timestamptz"
25 | },
26 | {
27 | "ordinal": 4,
28 | "name": "used_at",
29 | "type_info": "Timestamptz"
30 | },
31 | {
32 | "ordinal": 5,
33 | "name": "created_at",
34 | "type_info": "Timestamptz"
35 | }
36 | ],
37 | "parameters": {
38 | "Left": [
39 | "Int8",
40 | "Varchar",
41 | "Timestamptz"
42 | ]
43 | },
44 | "nullable": [
45 | false,
46 | false,
47 | false,
48 | false,
49 | true,
50 | false
51 | ]
52 | },
53 | "hash": "ef85f457e7fbcb1aad277198b4e984bc7bde2f596a218260d1c9e0df54b1c851"
54 | }
55 |
--------------------------------------------------------------------------------
/Cargo.toml:
--------------------------------------------------------------------------------
1 | [package]
2 | name = "keyrunes"
3 | version = "0.1.0"
4 | edition = "2024"
5 | default-run = "keyrunes"
6 |
7 | [dependencies]
8 | tokio = { version = "1", features = ["full"] }
9 | axum = "0.8"
10 | serde = { version = "1", features = ["derive"] }
11 | serde_json = "1.0"
12 | sqlx = { version = "0.8", features = ["runtime-tokio", "macros", "postgres", "uuid", "chrono"] }
13 | tera = "1.20"
14 | chrono = { version = "0.4", features = ["serde"] }
15 | thiserror = "2.0.17"
16 | tracing = "0.1"
17 | clap = { version = "4", features = ["derive"] }
18 | argon2 = "0.5.3"
19 | password-hash = "0.5"
20 | uuid = { version = "1", features = ["serde", "v4"] }
21 | anyhow = "1.0"
22 | async-trait = "0.1"
23 | rand = "0.9.2"
24 | regex = "1"
25 | tokio-stream = "0.1"
26 | tower-http = { version = "0.6.7", features = ["fs", "cors"] }
27 | url = "2.5.4"
28 | dotenvy = "0.15.7"
29 | tracing-subscriber = { version = "0.3.20", features = ["env-filter"]}
30 | serde_with = "3.14.0"
31 | hex = "0.4.3"
32 | futures = "0.3.31"
33 | secrecy = { version = "0.10.3", features = ["serde"] }
34 | tower = "0.5.2"
35 | lettre = { version = "0.11", features = ["tokio1", "tokio1-native-tls", "smtp-transport", "builder"] }
36 | josekit = "0.10.3"
37 |
38 | [dev-dependencies]
39 | test-case = "3.3.1"
40 | rstest = "0.26.1"
41 | proptest = "1.7.0"
42 | tokio-test = "0.4.4"
43 | fake = { version = "4.4", features = ["derive", "chrono", "uuid"] }
44 | criterion = { version = "0.7", features = ["html_reports"] }
45 | serial_test = "3.2.0"
46 |
--------------------------------------------------------------------------------
/migrations/20250819092538_create_users_table.sql:
--------------------------------------------------------------------------------
1 | CREATE EXTENSION IF NOT EXISTS pgcrypto;
2 |
3 | CREATE TABLE IF NOT EXISTS users (
4 | user_id BIGSERIAL PRIMARY KEY,
5 | external_id UUID NOT NULL DEFAULT gen_random_uuid(),
6 | password_hash TEXT NOT NULL,
7 | email VARCHAR(255) NOT NULL,
8 | username VARCHAR(50) NOT NULL,
9 | created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
10 | updated_at TIMESTAMPTZ NOT NULL DEFAULT now()
11 | );
12 |
13 | CREATE UNIQUE INDEX IF NOT EXISTS users_external_id_idx ON users (external_id);
14 | CREATE UNIQUE INDEX IF NOT EXISTS users_username_idx ON users (username);
15 | CREATE UNIQUE INDEX IF NOT EXISTS users_email_lower_idx ON users ((lower(email)));
16 |
17 | CREATE TABLE IF NOT EXISTS settings (
18 | settings_id SERIAL PRIMARY KEY,
19 | key VARCHAR(100) NOT NULL UNIQUE,
20 | value TEXT NOT NULL,
21 | description TEXT,
22 | created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
23 | updated_at TIMESTAMPTZ NOT NULL DEFAULT now()
24 | );
25 |
26 | INSERT INTO settings (key, value, description)
27 | VALUES ('BASE_URL', 'http://127.0.0.1:3000', 'base url for local testing')
28 | ON CONFLICT (key) DO NOTHING;
29 |
30 | CREATE OR REPLACE FUNCTION set_updated_at()
31 | RETURNS TRIGGER AS $$
32 | BEGIN
33 | NEW.updated_at = now();
34 | RETURN NEW;
35 | END;
36 | $$ LANGUAGE plpgsql;
37 |
38 | DROP TRIGGER IF EXISTS trg_set_updated_at ON users;
39 | CREATE TRIGGER trg_set_updated_at
40 | BEFORE UPDATE ON users
41 | FOR EACH ROW
42 | EXECUTE PROCEDURE set_updated_at();
43 |
--------------------------------------------------------------------------------
/templates/errors/500.html:
--------------------------------------------------------------------------------
1 | {% extends "base.html" %}
2 |
3 | {% block content %}
4 |
5 | 500
6 | Internal Server Error
7 |
8 |
9 | Something went wrong on the server while processing your request. Our team has been notified and is working to resolve the issue.
10 |
11 |
12 | {% if error_id %}
13 |
14 |
Error ID: {{ error_id }}
15 |
16 | Use this ID when contacting support.
17 |
18 |
19 | {% endif %}
20 |
21 |
25 |
26 |
31 |
32 | {% endblock %}
33 |
--------------------------------------------------------------------------------
/templates/dashboard.html:
--------------------------------------------------------------------------------
1 | {% extends "base.html" %}
2 |
3 | {% block header %}
4 | Keyrunes
5 |
6 | {% endblock header %}
7 |
8 | {% block content %}
9 |
10 | Dashboard
11 |
12 | {% if user %}
13 |
14 | Welcome, {{ user.username }}! ({{ user.email }})
15 |
16 |
17 | Your Groups
18 |
19 | {% for group in user.groups %}
20 | - {{ group }}
21 | {% else %}
22 | - No groups assigned
23 | {% endfor %}
24 |
25 |
26 |
27 |
28 | {% if user.first_login %}
29 |
Change Password
30 | {% endif %}
31 |
32 | {% else %}
33 |
34 | You are not logged in. Please
login to continue.
35 |
36 | {% endif %}
37 |
38 |
39 |
52 | {% endblock %}
53 |
--------------------------------------------------------------------------------
/.sqlx/query-516045641716ad9a84221b2d032c86990f933aea9a10917cdd84d89ccf35f856.json:
--------------------------------------------------------------------------------
1 | {
2 | "db_name": "PostgreSQL",
3 | "query": "SELECT user_id, external_id, email, username, password_hash, created_at, first_login, updated_at FROM users WHERE email = $1",
4 | "describe": {
5 | "columns": [
6 | {
7 | "ordinal": 0,
8 | "name": "user_id",
9 | "type_info": "Int8"
10 | },
11 | {
12 | "ordinal": 1,
13 | "name": "external_id",
14 | "type_info": "Uuid"
15 | },
16 | {
17 | "ordinal": 2,
18 | "name": "email",
19 | "type_info": "Varchar"
20 | },
21 | {
22 | "ordinal": 3,
23 | "name": "username",
24 | "type_info": "Varchar"
25 | },
26 | {
27 | "ordinal": 4,
28 | "name": "password_hash",
29 | "type_info": "Text"
30 | },
31 | {
32 | "ordinal": 5,
33 | "name": "created_at",
34 | "type_info": "Timestamptz"
35 | },
36 | {
37 | "ordinal": 6,
38 | "name": "first_login",
39 | "type_info": "Bool"
40 | },
41 | {
42 | "ordinal": 7,
43 | "name": "updated_at",
44 | "type_info": "Timestamptz"
45 | }
46 | ],
47 | "parameters": {
48 | "Left": [
49 | "Text"
50 | ]
51 | },
52 | "nullable": [
53 | false,
54 | false,
55 | false,
56 | false,
57 | false,
58 | false,
59 | false,
60 | false
61 | ]
62 | },
63 | "hash": "516045641716ad9a84221b2d032c86990f933aea9a10917cdd84d89ccf35f856"
64 | }
65 |
--------------------------------------------------------------------------------
/.sqlx/query-9bb01184647b17d479d5a8f8b7dba53c8a5788be0ce3cf92700d8f704ffe2012.json:
--------------------------------------------------------------------------------
1 | {
2 | "db_name": "PostgreSQL",
3 | "query": "SELECT user_id, external_id, email, username, password_hash, first_login, created_at, updated_at FROM users WHERE user_id = $1",
4 | "describe": {
5 | "columns": [
6 | {
7 | "ordinal": 0,
8 | "name": "user_id",
9 | "type_info": "Int8"
10 | },
11 | {
12 | "ordinal": 1,
13 | "name": "external_id",
14 | "type_info": "Uuid"
15 | },
16 | {
17 | "ordinal": 2,
18 | "name": "email",
19 | "type_info": "Varchar"
20 | },
21 | {
22 | "ordinal": 3,
23 | "name": "username",
24 | "type_info": "Varchar"
25 | },
26 | {
27 | "ordinal": 4,
28 | "name": "password_hash",
29 | "type_info": "Text"
30 | },
31 | {
32 | "ordinal": 5,
33 | "name": "first_login",
34 | "type_info": "Bool"
35 | },
36 | {
37 | "ordinal": 6,
38 | "name": "created_at",
39 | "type_info": "Timestamptz"
40 | },
41 | {
42 | "ordinal": 7,
43 | "name": "updated_at",
44 | "type_info": "Timestamptz"
45 | }
46 | ],
47 | "parameters": {
48 | "Left": [
49 | "Int8"
50 | ]
51 | },
52 | "nullable": [
53 | false,
54 | false,
55 | false,
56 | false,
57 | false,
58 | false,
59 | false,
60 | false
61 | ]
62 | },
63 | "hash": "9bb01184647b17d479d5a8f8b7dba53c8a5788be0ce3cf92700d8f704ffe2012"
64 | }
65 |
--------------------------------------------------------------------------------
/.sqlx/query-9cba992505d6b80cb356909d3e2360385af579fe6eccd1089c96a613989a1caa.json:
--------------------------------------------------------------------------------
1 | {
2 | "db_name": "PostgreSQL",
3 | "query": "SELECT user_id, external_id, email, username, password_hash, created_at, first_login, updated_at FROM users WHERE username = $1",
4 | "describe": {
5 | "columns": [
6 | {
7 | "ordinal": 0,
8 | "name": "user_id",
9 | "type_info": "Int8"
10 | },
11 | {
12 | "ordinal": 1,
13 | "name": "external_id",
14 | "type_info": "Uuid"
15 | },
16 | {
17 | "ordinal": 2,
18 | "name": "email",
19 | "type_info": "Varchar"
20 | },
21 | {
22 | "ordinal": 3,
23 | "name": "username",
24 | "type_info": "Varchar"
25 | },
26 | {
27 | "ordinal": 4,
28 | "name": "password_hash",
29 | "type_info": "Text"
30 | },
31 | {
32 | "ordinal": 5,
33 | "name": "created_at",
34 | "type_info": "Timestamptz"
35 | },
36 | {
37 | "ordinal": 6,
38 | "name": "first_login",
39 | "type_info": "Bool"
40 | },
41 | {
42 | "ordinal": 7,
43 | "name": "updated_at",
44 | "type_info": "Timestamptz"
45 | }
46 | ],
47 | "parameters": {
48 | "Left": [
49 | "Text"
50 | ]
51 | },
52 | "nullable": [
53 | false,
54 | false,
55 | false,
56 | false,
57 | false,
58 | false,
59 | false,
60 | false
61 | ]
62 | },
63 | "hash": "9cba992505d6b80cb356909d3e2360385af579fe6eccd1089c96a613989a1caa"
64 | }
65 |
--------------------------------------------------------------------------------
/.sqlx/query-2d5c0aa80bc6ab079d56ae8937c4c6691d01edc8f211245d3006c3125e72df34.json:
--------------------------------------------------------------------------------
1 | {
2 | "db_name": "PostgreSQL",
3 | "query": "\n SELECT u.user_id, u.external_id, u.email, u.username, u.first_login, u.created_at,\n array_agg(g.name) as groups\n FROM users u\n LEFT JOIN user_groups ug ON u.user_id = ug.user_id\n LEFT JOIN groups g ON ug.group_id = g.group_id\n GROUP BY u.user_id\n ORDER BY u.created_at DESC\n ",
4 | "describe": {
5 | "columns": [
6 | {
7 | "ordinal": 0,
8 | "name": "user_id",
9 | "type_info": "Int8"
10 | },
11 | {
12 | "ordinal": 1,
13 | "name": "external_id",
14 | "type_info": "Uuid"
15 | },
16 | {
17 | "ordinal": 2,
18 | "name": "email",
19 | "type_info": "Varchar"
20 | },
21 | {
22 | "ordinal": 3,
23 | "name": "username",
24 | "type_info": "Varchar"
25 | },
26 | {
27 | "ordinal": 4,
28 | "name": "first_login",
29 | "type_info": "Bool"
30 | },
31 | {
32 | "ordinal": 5,
33 | "name": "created_at",
34 | "type_info": "Timestamptz"
35 | },
36 | {
37 | "ordinal": 6,
38 | "name": "groups",
39 | "type_info": "VarcharArray"
40 | }
41 | ],
42 | "parameters": {
43 | "Left": []
44 | },
45 | "nullable": [
46 | false,
47 | false,
48 | false,
49 | false,
50 | false,
51 | false,
52 | null
53 | ]
54 | },
55 | "hash": "2d5c0aa80bc6ab079d56ae8937c4c6691d01edc8f211245d3006c3125e72df34"
56 | }
57 |
--------------------------------------------------------------------------------
/templates/errors/503.html:
--------------------------------------------------------------------------------
1 | {% extends "base.html" %}
2 |
3 | {% block content %}
4 |
5 | 503
6 | Service Temporarily Unavailable
7 |
8 |
9 | The service is temporarily unavailable due to maintenance or overload. Please try again in a few moments.
10 |
11 |
12 | {% if retry_after %}
13 |
14 | Try again in: {{ retry_after }} seconds
15 |
16 | {% endif %}
17 |
18 |
24 |
25 |
26 |
27 | 💡 System Status: You can check the real-time system status on our health check page.
28 |
29 |
30 |
31 |
39 |
40 | {% endblock %}
41 |
--------------------------------------------------------------------------------
/.sqlx/query-5c695b62cccea048108b3f63e130a8a187f22a23e35e9a310f1f188e6a75e5a7.json:
--------------------------------------------------------------------------------
1 | {
2 | "db_name": "PostgreSQL",
3 | "query": "INSERT INTO users (external_id, email, username, password_hash, first_login) VALUES ($1, $2, $3, $4, $5) RETURNING user_id, external_id, email, username, password_hash, first_login, created_at, updated_at",
4 | "describe": {
5 | "columns": [
6 | {
7 | "ordinal": 0,
8 | "name": "user_id",
9 | "type_info": "Int8"
10 | },
11 | {
12 | "ordinal": 1,
13 | "name": "external_id",
14 | "type_info": "Uuid"
15 | },
16 | {
17 | "ordinal": 2,
18 | "name": "email",
19 | "type_info": "Varchar"
20 | },
21 | {
22 | "ordinal": 3,
23 | "name": "username",
24 | "type_info": "Varchar"
25 | },
26 | {
27 | "ordinal": 4,
28 | "name": "password_hash",
29 | "type_info": "Text"
30 | },
31 | {
32 | "ordinal": 5,
33 | "name": "first_login",
34 | "type_info": "Bool"
35 | },
36 | {
37 | "ordinal": 6,
38 | "name": "created_at",
39 | "type_info": "Timestamptz"
40 | },
41 | {
42 | "ordinal": 7,
43 | "name": "updated_at",
44 | "type_info": "Timestamptz"
45 | }
46 | ],
47 | "parameters": {
48 | "Left": [
49 | "Uuid",
50 | "Varchar",
51 | "Varchar",
52 | "Text",
53 | "Bool"
54 | ]
55 | },
56 | "nullable": [
57 | false,
58 | false,
59 | false,
60 | false,
61 | false,
62 | false,
63 | false,
64 | false
65 | ]
66 | },
67 | "hash": "5c695b62cccea048108b3f63e130a8a187f22a23e35e9a310f1f188e6a75e5a7"
68 | }
69 |
--------------------------------------------------------------------------------
/docker-compose-prod-example.yml:
--------------------------------------------------------------------------------
1 | # KeyRunes - Docker Compose Configuration
2 | #
3 | # Instructions:
4 | # 1. Copy this file to docker-compose.yml
5 | # 2. Create a .env file with your configuration
6 | # 3. Run: docker-compose up -d
7 | #
8 | # For more details, see DOCKER_README.md
9 |
10 | services:
11 | keyrunes-database:
12 | image: postgres:17
13 | container_name: keyrunes-database
14 | environment:
15 | POSTGRES_USER: ${POSTGRES_USER:-postgres}
16 | POSTGRES_PASSWORD: ${POSTGRES_PASSWORD}
17 | POSTGRES_DB: ${POSTGRES_DB:-keyrunes}
18 | ports:
19 | - "${POSTGRES_PORT:-5432}:5432"
20 | volumes:
21 | - keyrunes_data:/var/lib/postgresql/data
22 | restart: unless-stopped
23 | healthcheck:
24 | test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER:-postgres}"]
25 | interval: 10s
26 | timeout: 5s
27 | retries: 5
28 |
29 | keyrunes:
30 | image: jonatasoli/keyrunes:latest
31 | container_name: keyrunes
32 | depends_on:
33 | keyrunes-database:
34 | condition: service_healthy
35 | environment:
36 | DATABASE_URL: ${DATABASE_URL}
37 | JWT_SECRET: ${JWT_SECRET}
38 | LOG_LEVEL: ${LOG_LEVEL:-info}
39 | SMTP_USERNAME: ${SMTP_USERNAME}
40 | SMTP_PASSWORD: ${SMTP_PASSWORD}
41 | SMTP_HOST: ${SMTP_HOST}
42 | SMTP_PORT: ${SMTP_PORT:-587}
43 | FROM_EMAIL: ${FROM_EMAIL}
44 | FROM_NAME: ${FROM_NAME:-KeyRunes}
45 | FRONTEND_URL: ${FRONTEND_URL:-http://localhost:3000}
46 | ports:
47 | - "3000:3000"
48 | restart: unless-stopped
49 | healthcheck:
50 | test: ["CMD", "curl", "-f", "http://localhost:3000/api/health"]
51 | interval: 30s
52 | timeout: 10s
53 | retries: 3
54 | start_period: 40s
55 |
56 | volumes:
57 | keyrunes_data:
58 | driver: local
59 |
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | ## [0.1.1] - 2025-11-29
2 |
3 | ### 🚀 Features
4 |
5 | - Add check groups in api and cli
6 |
7 | ### 🐛 Bug Fixes
8 |
9 | - Change jsonwebtoken to josekit and remove sqlx-mysql and rsa
10 |
11 | ### 💼 Other
12 |
13 | - Update libraries
14 | - Add ignore in cargo audit false positive
15 |
16 | ### ⚙️ Miscellaneous Tasks
17 |
18 | - Update changelog
19 | ## [0.1.0] - 2025-11-27
20 |
21 | ### 🚀 Features
22 |
23 | - Merge main repository
24 | - Add dotenvy cargo to read the environment variables
25 | - Add table reset password, groups and policies
26 | - Add forgot password, start new login and groups and polices features
27 | - Add tests
28 | - *(user)* Add admin endpoint for user registration with groups
29 | - Improve logs and tracings and add new pages
30 | - Cli tool for password recovery
31 | - Add forgot password view route
32 |
33 | ### 🐛 Bug Fixes
34 |
35 | - Fix error in register with first login
36 | - Register router for signup and login
37 | - Removed redundant routes
38 | - Register and login nav links removed after login
39 | - Provide database_url
40 | - Redirect to dashboard properly
41 | - Adding default run
42 | - Saved tokens in db and added settings table
43 | - Resolved comments
44 | - Fix merge
45 |
46 | ### 💼 Other
47 |
48 | - Add github actions
49 | - Fix clippy errors
50 |
51 | ### 🚜 Refactor
52 |
53 | - Add tests for settings functionality
54 |
55 | ### 📚 Documentation
56 |
57 | - Add contribuitors document
58 | - Add code of conduct
59 | - Fixed dev setup with sqlx and docker compose
60 |
61 | ### 🧪 Testing
62 |
63 | - Fix tests and remove tests
64 | - Add ignore tests and fix formating
65 |
66 | ### ⚙️ Miscellaneous Tasks
67 |
68 | - Add env example file
69 | - Ignore `.env` file
70 | - Add `docker-compose` to wake up the postgres service
71 | - Update README.md
72 | - Add badge in README
73 | - Add changelog
74 |
--------------------------------------------------------------------------------
/.sqlx/query-470cb7bce44297b903acf0dc0d25aff88ac8f2ec68cbfbc193d9c16fbcc8a303.json:
--------------------------------------------------------------------------------
1 | {
2 | "db_name": "PostgreSQL",
3 | "query": "SELECT policy_id, external_id, name, description, resource, action, \n effect as \"effect_str\", conditions, created_at, updated_at \n FROM policies ORDER BY name",
4 | "describe": {
5 | "columns": [
6 | {
7 | "ordinal": 0,
8 | "name": "policy_id",
9 | "type_info": "Int8"
10 | },
11 | {
12 | "ordinal": 1,
13 | "name": "external_id",
14 | "type_info": "Uuid"
15 | },
16 | {
17 | "ordinal": 2,
18 | "name": "name",
19 | "type_info": "Varchar"
20 | },
21 | {
22 | "ordinal": 3,
23 | "name": "description",
24 | "type_info": "Text"
25 | },
26 | {
27 | "ordinal": 4,
28 | "name": "resource",
29 | "type_info": "Varchar"
30 | },
31 | {
32 | "ordinal": 5,
33 | "name": "action",
34 | "type_info": "Varchar"
35 | },
36 | {
37 | "ordinal": 6,
38 | "name": "effect_str",
39 | "type_info": "Varchar"
40 | },
41 | {
42 | "ordinal": 7,
43 | "name": "conditions",
44 | "type_info": "Jsonb"
45 | },
46 | {
47 | "ordinal": 8,
48 | "name": "created_at",
49 | "type_info": "Timestamptz"
50 | },
51 | {
52 | "ordinal": 9,
53 | "name": "updated_at",
54 | "type_info": "Timestamptz"
55 | }
56 | ],
57 | "parameters": {
58 | "Left": []
59 | },
60 | "nullable": [
61 | false,
62 | false,
63 | false,
64 | true,
65 | false,
66 | false,
67 | false,
68 | true,
69 | false,
70 | false
71 | ]
72 | },
73 | "hash": "470cb7bce44297b903acf0dc0d25aff88ac8f2ec68cbfbc193d9c16fbcc8a303"
74 | }
75 |
--------------------------------------------------------------------------------
/.sqlx/query-501c9ab794cb66daddf6c4ebcef8df2f9f696e0ae0f403551cc0bb9c3cd18cc8.json:
--------------------------------------------------------------------------------
1 | {
2 | "db_name": "PostgreSQL",
3 | "query": "SELECT policy_id, external_id, name, description, resource, action, \n effect as \"effect_str\", conditions, created_at, updated_at \n FROM policies WHERE name = $1",
4 | "describe": {
5 | "columns": [
6 | {
7 | "ordinal": 0,
8 | "name": "policy_id",
9 | "type_info": "Int8"
10 | },
11 | {
12 | "ordinal": 1,
13 | "name": "external_id",
14 | "type_info": "Uuid"
15 | },
16 | {
17 | "ordinal": 2,
18 | "name": "name",
19 | "type_info": "Varchar"
20 | },
21 | {
22 | "ordinal": 3,
23 | "name": "description",
24 | "type_info": "Text"
25 | },
26 | {
27 | "ordinal": 4,
28 | "name": "resource",
29 | "type_info": "Varchar"
30 | },
31 | {
32 | "ordinal": 5,
33 | "name": "action",
34 | "type_info": "Varchar"
35 | },
36 | {
37 | "ordinal": 6,
38 | "name": "effect_str",
39 | "type_info": "Varchar"
40 | },
41 | {
42 | "ordinal": 7,
43 | "name": "conditions",
44 | "type_info": "Jsonb"
45 | },
46 | {
47 | "ordinal": 8,
48 | "name": "created_at",
49 | "type_info": "Timestamptz"
50 | },
51 | {
52 | "ordinal": 9,
53 | "name": "updated_at",
54 | "type_info": "Timestamptz"
55 | }
56 | ],
57 | "parameters": {
58 | "Left": [
59 | "Text"
60 | ]
61 | },
62 | "nullable": [
63 | false,
64 | false,
65 | false,
66 | true,
67 | false,
68 | false,
69 | false,
70 | true,
71 | false,
72 | false
73 | ]
74 | },
75 | "hash": "501c9ab794cb66daddf6c4ebcef8df2f9f696e0ae0f403551cc0bb9c3cd18cc8"
76 | }
77 |
--------------------------------------------------------------------------------
/.sqlx/query-f2934589f15af1085b812e01ba8de1ee7a1015dfc86ea082f8d5a98a10ac0613.json:
--------------------------------------------------------------------------------
1 | {
2 | "db_name": "PostgreSQL",
3 | "query": "SELECT policy_id, external_id, name, description, resource, action, \n effect as \"effect_str\", conditions, created_at, updated_at \n FROM policies WHERE policy_id = $1",
4 | "describe": {
5 | "columns": [
6 | {
7 | "ordinal": 0,
8 | "name": "policy_id",
9 | "type_info": "Int8"
10 | },
11 | {
12 | "ordinal": 1,
13 | "name": "external_id",
14 | "type_info": "Uuid"
15 | },
16 | {
17 | "ordinal": 2,
18 | "name": "name",
19 | "type_info": "Varchar"
20 | },
21 | {
22 | "ordinal": 3,
23 | "name": "description",
24 | "type_info": "Text"
25 | },
26 | {
27 | "ordinal": 4,
28 | "name": "resource",
29 | "type_info": "Varchar"
30 | },
31 | {
32 | "ordinal": 5,
33 | "name": "action",
34 | "type_info": "Varchar"
35 | },
36 | {
37 | "ordinal": 6,
38 | "name": "effect_str",
39 | "type_info": "Varchar"
40 | },
41 | {
42 | "ordinal": 7,
43 | "name": "conditions",
44 | "type_info": "Jsonb"
45 | },
46 | {
47 | "ordinal": 8,
48 | "name": "created_at",
49 | "type_info": "Timestamptz"
50 | },
51 | {
52 | "ordinal": 9,
53 | "name": "updated_at",
54 | "type_info": "Timestamptz"
55 | }
56 | ],
57 | "parameters": {
58 | "Left": [
59 | "Int8"
60 | ]
61 | },
62 | "nullable": [
63 | false,
64 | false,
65 | false,
66 | true,
67 | false,
68 | false,
69 | false,
70 | true,
71 | false,
72 | false
73 | ]
74 | },
75 | "hash": "f2934589f15af1085b812e01ba8de1ee7a1015dfc86ea082f8d5a98a10ac0613"
76 | }
77 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | # Build stage
2 | FROM rust:latest as builder
3 |
4 | # Install system dependencies
5 | RUN apt-get update && apt-get install -y \
6 | pkg-config \
7 | libssl-dev \
8 | libpq-dev \
9 | && rm -rf /var/lib/apt/lists/*
10 |
11 | # Create app user
12 | RUN useradd -m -u 1001 keyrunes
13 |
14 | # Set working directory
15 | WORKDIR /app
16 |
17 | # Copy Cargo files for dependency caching
18 | COPY Cargo.toml Cargo.lock ./
19 |
20 | # Create dummy main.rs to build dependencies
21 | RUN mkdir src && echo "fn main() {}" > src/main.rs
22 |
23 | # Build dependencies (cached layer)
24 | RUN cargo build --release && rm -rf src
25 |
26 | # Copy source code
27 | COPY src ./src
28 | COPY migrations ./migrations
29 | COPY templates ./templates
30 | COPY .sqlx ./.sqlx
31 |
32 | # Build the application
33 | ENV SQLX_OFFLINE=true
34 | RUN cargo build --release
35 |
36 | # Runtime stage
37 | FROM debian:bookworm-slim
38 |
39 | # Install runtime dependencies
40 | RUN apt-get update && apt-get install -y \
41 | ca-certificates \
42 | libpq5 \
43 | libssl3 \
44 | curl \
45 | && rm -rf /var/lib/apt/lists/*
46 |
47 | # Create app user
48 | RUN useradd -m -u 1001 keyrunes
49 |
50 | # Set working directory
51 | WORKDIR /app
52 |
53 | # Copy binary from builder stage
54 | COPY --from=builder /app/target/release/keyrunes /usr/local/bin/keyrunes
55 | COPY --from=builder /app/target/release/cli /usr/local/bin/keyrunes-cli
56 |
57 | # Copy runtime files
58 | COPY --from=builder /app/migrations ./migrations
59 | COPY --from=builder /app/templates ./templates
60 |
61 | RUN chown -R keyrunes:keyrunes /app
62 |
63 | # Switch to app user
64 | USER keyrunes
65 |
66 | # Expose port
67 | EXPOSE 3000
68 |
69 | # Health check
70 | HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
71 | CMD curl -f http://localhost:3000/api/health || exit 1
72 |
73 | # Environment variables
74 | ENV RUST_LOG=info
75 | ENV PORT=3000
76 |
77 | # Run the application
78 | CMD ["keyrunes"]
79 |
--------------------------------------------------------------------------------
/.sqlx/query-2c42062a084c2b179b9f84bfcc808f01694dcd8187589bd9c852b4b7853ec821.json:
--------------------------------------------------------------------------------
1 | {
2 | "db_name": "PostgreSQL",
3 | "query": "SELECT p.policy_id, p.external_id, p.name, p.description, p.resource, p.action, \n p.effect as \"effect_str\", p.conditions, p.created_at, p.updated_at\n FROM policies p\n INNER JOIN group_policies gp ON p.policy_id = gp.policy_id\n WHERE gp.group_id = $1",
4 | "describe": {
5 | "columns": [
6 | {
7 | "ordinal": 0,
8 | "name": "policy_id",
9 | "type_info": "Int8"
10 | },
11 | {
12 | "ordinal": 1,
13 | "name": "external_id",
14 | "type_info": "Uuid"
15 | },
16 | {
17 | "ordinal": 2,
18 | "name": "name",
19 | "type_info": "Varchar"
20 | },
21 | {
22 | "ordinal": 3,
23 | "name": "description",
24 | "type_info": "Text"
25 | },
26 | {
27 | "ordinal": 4,
28 | "name": "resource",
29 | "type_info": "Varchar"
30 | },
31 | {
32 | "ordinal": 5,
33 | "name": "action",
34 | "type_info": "Varchar"
35 | },
36 | {
37 | "ordinal": 6,
38 | "name": "effect_str",
39 | "type_info": "Varchar"
40 | },
41 | {
42 | "ordinal": 7,
43 | "name": "conditions",
44 | "type_info": "Jsonb"
45 | },
46 | {
47 | "ordinal": 8,
48 | "name": "created_at",
49 | "type_info": "Timestamptz"
50 | },
51 | {
52 | "ordinal": 9,
53 | "name": "updated_at",
54 | "type_info": "Timestamptz"
55 | }
56 | ],
57 | "parameters": {
58 | "Left": [
59 | "Int8"
60 | ]
61 | },
62 | "nullable": [
63 | false,
64 | false,
65 | false,
66 | true,
67 | false,
68 | false,
69 | false,
70 | true,
71 | false,
72 | false
73 | ]
74 | },
75 | "hash": "2c42062a084c2b179b9f84bfcc808f01694dcd8187589bd9c852b4b7853ec821"
76 | }
77 |
--------------------------------------------------------------------------------
/.sqlx/query-521cb02df6a1e3a7aa5d633bdbf446f6aabbffa1c0f0acd423783c6369062b3c.json:
--------------------------------------------------------------------------------
1 | {
2 | "db_name": "PostgreSQL",
3 | "query": "SELECT p.policy_id, p.external_id, p.name, p.description, p.resource, p.action, \n p.effect as \"effect_str\", p.conditions, p.created_at, p.updated_at\n FROM policies p\n INNER JOIN user_policies up ON p.policy_id = up.policy_id\n WHERE up.user_id = $1",
4 | "describe": {
5 | "columns": [
6 | {
7 | "ordinal": 0,
8 | "name": "policy_id",
9 | "type_info": "Int8"
10 | },
11 | {
12 | "ordinal": 1,
13 | "name": "external_id",
14 | "type_info": "Uuid"
15 | },
16 | {
17 | "ordinal": 2,
18 | "name": "name",
19 | "type_info": "Varchar"
20 | },
21 | {
22 | "ordinal": 3,
23 | "name": "description",
24 | "type_info": "Text"
25 | },
26 | {
27 | "ordinal": 4,
28 | "name": "resource",
29 | "type_info": "Varchar"
30 | },
31 | {
32 | "ordinal": 5,
33 | "name": "action",
34 | "type_info": "Varchar"
35 | },
36 | {
37 | "ordinal": 6,
38 | "name": "effect_str",
39 | "type_info": "Varchar"
40 | },
41 | {
42 | "ordinal": 7,
43 | "name": "conditions",
44 | "type_info": "Jsonb"
45 | },
46 | {
47 | "ordinal": 8,
48 | "name": "created_at",
49 | "type_info": "Timestamptz"
50 | },
51 | {
52 | "ordinal": 9,
53 | "name": "updated_at",
54 | "type_info": "Timestamptz"
55 | }
56 | ],
57 | "parameters": {
58 | "Left": [
59 | "Int8"
60 | ]
61 | },
62 | "nullable": [
63 | false,
64 | false,
65 | false,
66 | true,
67 | false,
68 | false,
69 | false,
70 | true,
71 | false,
72 | false
73 | ]
74 | },
75 | "hash": "521cb02df6a1e3a7aa5d633bdbf446f6aabbffa1c0f0acd423783c6369062b3c"
76 | }
77 |
--------------------------------------------------------------------------------
/src/domain/user.rs:
--------------------------------------------------------------------------------
1 | use std::fmt;
2 |
3 | use regex::Regex;
4 | use secrecy::{ExposeSecret, SecretString};
5 | use serde::Deserialize;
6 |
7 | #[derive(Debug, Clone)]
8 | pub struct Email(String);
9 |
10 | impl TryFrom<&str> for Email {
11 | type Error = anyhow::Error;
12 |
13 | fn try_from(value: &str) -> Result {
14 | let email_re = Regex::new(r"^[\w.+-]+@[\w-]+\.[\w.-]+$").unwrap();
15 | if !email_re.is_match(value) {
16 | anyhow::bail!("invalid email");
17 | }
18 | Ok(Self(value.to_owned()))
19 | }
20 | }
21 |
22 | impl<'de> Deserialize<'de> for Email {
23 | fn deserialize(deserializer: D) -> Result
24 | where
25 | D: serde::Deserializer<'de>,
26 | {
27 | let email = String::deserialize(deserializer)?;
28 | Self::try_from(email.as_str()).map_err(|e| serde::de::Error::custom(e.to_string()))
29 | }
30 | }
31 |
32 | impl fmt::Display for Email {
33 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
34 | write!(f, "{}", self.0)
35 | }
36 | }
37 |
38 | impl AsRef for Email {
39 | fn as_ref(&self) -> &str {
40 | self.0.as_str()
41 | }
42 | }
43 |
44 | #[derive(Debug, Clone)]
45 | pub struct Password(SecretString);
46 |
47 | impl TryFrom<&str> for Password {
48 | type Error = anyhow::Error;
49 |
50 | fn try_from(value: &str) -> Result {
51 | if value.len() < 8 {
52 | anyhow::bail!("password too short");
53 | }
54 |
55 | Ok(Self(SecretString::from(value)))
56 | }
57 | }
58 |
59 | impl<'de> Deserialize<'de> for Password {
60 | fn deserialize(deserializer: D) -> Result
61 | where
62 | D: serde::Deserializer<'de>,
63 | {
64 | let password = String::deserialize(deserializer)?;
65 |
66 | Ok(Self::try_from(password.as_str()).map_err(|e| serde::de::Error::custom(e.to_string())))?
67 | }
68 | }
69 |
70 | impl Password {
71 | pub fn expose(&self) -> &str {
72 | self.0.expose_secret()
73 | }
74 | }
75 |
--------------------------------------------------------------------------------
/.sqlx/query-f240852fc5c60f82c7a1412a53fe42db6106195d8b2e20e9bd80d378934b34e7.json:
--------------------------------------------------------------------------------
1 | {
2 | "db_name": "PostgreSQL",
3 | "query": "INSERT INTO policies (external_id, name, description, resource, action, effect, conditions) \n VALUES ($1, $2, $3, $4, $5, $6, $7) \n RETURNING policy_id, external_id, name, description, resource, action, \n effect as \"effect_str\", conditions, created_at, updated_at",
4 | "describe": {
5 | "columns": [
6 | {
7 | "ordinal": 0,
8 | "name": "policy_id",
9 | "type_info": "Int8"
10 | },
11 | {
12 | "ordinal": 1,
13 | "name": "external_id",
14 | "type_info": "Uuid"
15 | },
16 | {
17 | "ordinal": 2,
18 | "name": "name",
19 | "type_info": "Varchar"
20 | },
21 | {
22 | "ordinal": 3,
23 | "name": "description",
24 | "type_info": "Text"
25 | },
26 | {
27 | "ordinal": 4,
28 | "name": "resource",
29 | "type_info": "Varchar"
30 | },
31 | {
32 | "ordinal": 5,
33 | "name": "action",
34 | "type_info": "Varchar"
35 | },
36 | {
37 | "ordinal": 6,
38 | "name": "effect_str",
39 | "type_info": "Varchar"
40 | },
41 | {
42 | "ordinal": 7,
43 | "name": "conditions",
44 | "type_info": "Jsonb"
45 | },
46 | {
47 | "ordinal": 8,
48 | "name": "created_at",
49 | "type_info": "Timestamptz"
50 | },
51 | {
52 | "ordinal": 9,
53 | "name": "updated_at",
54 | "type_info": "Timestamptz"
55 | }
56 | ],
57 | "parameters": {
58 | "Left": [
59 | "Uuid",
60 | "Varchar",
61 | "Text",
62 | "Varchar",
63 | "Varchar",
64 | "Varchar",
65 | "Jsonb"
66 | ]
67 | },
68 | "nullable": [
69 | false,
70 | false,
71 | false,
72 | true,
73 | false,
74 | false,
75 | false,
76 | true,
77 | false,
78 | false
79 | ]
80 | },
81 | "hash": "f240852fc5c60f82c7a1412a53fe42db6106195d8b2e20e9bd80d378934b34e7"
82 | }
83 |
--------------------------------------------------------------------------------
/templates/forgot_password.html:
--------------------------------------------------------------------------------
1 | {% block content %}
2 |
3 | Esqueci a Senha
4 |
5 | {% if error %}
6 | {{ error }}
7 | {% endif %}
8 |
9 |
10 |
11 |
12 |
18 |
19 |
20 | Voltar ao Login
21 |
22 |
23 |
24 |
63 | {% endblock %}
64 |
--------------------------------------------------------------------------------
/.sqlx/query-3742b4721561ed076810a5eb757d0d9b21914813e2e468a102e6fb20cf68ee9e.json:
--------------------------------------------------------------------------------
1 | {
2 | "db_name": "PostgreSQL",
3 | "query": "SELECT DISTINCT p.policy_id, p.external_id, p.name, p.description, p.resource, p.action, \n p.effect as \"effect_str\", p.conditions, p.created_at, p.updated_at\n FROM policies p\n LEFT JOIN user_policies up ON p.policy_id = up.policy_id AND up.user_id = $1\n LEFT JOIN group_policies gp ON p.policy_id = gp.policy_id\n LEFT JOIN user_groups ug ON gp.group_id = ug.group_id AND ug.user_id = $1\n WHERE up.user_id IS NOT NULL OR ug.user_id IS NOT NULL",
4 | "describe": {
5 | "columns": [
6 | {
7 | "ordinal": 0,
8 | "name": "policy_id",
9 | "type_info": "Int8"
10 | },
11 | {
12 | "ordinal": 1,
13 | "name": "external_id",
14 | "type_info": "Uuid"
15 | },
16 | {
17 | "ordinal": 2,
18 | "name": "name",
19 | "type_info": "Varchar"
20 | },
21 | {
22 | "ordinal": 3,
23 | "name": "description",
24 | "type_info": "Text"
25 | },
26 | {
27 | "ordinal": 4,
28 | "name": "resource",
29 | "type_info": "Varchar"
30 | },
31 | {
32 | "ordinal": 5,
33 | "name": "action",
34 | "type_info": "Varchar"
35 | },
36 | {
37 | "ordinal": 6,
38 | "name": "effect_str",
39 | "type_info": "Varchar"
40 | },
41 | {
42 | "ordinal": 7,
43 | "name": "conditions",
44 | "type_info": "Jsonb"
45 | },
46 | {
47 | "ordinal": 8,
48 | "name": "created_at",
49 | "type_info": "Timestamptz"
50 | },
51 | {
52 | "ordinal": 9,
53 | "name": "updated_at",
54 | "type_info": "Timestamptz"
55 | }
56 | ],
57 | "parameters": {
58 | "Left": [
59 | "Int8"
60 | ]
61 | },
62 | "nullable": [
63 | false,
64 | false,
65 | false,
66 | true,
67 | false,
68 | false,
69 | false,
70 | true,
71 | false,
72 | false
73 | ]
74 | },
75 | "hash": "3742b4721561ed076810a5eb757d0d9b21914813e2e468a102e6fb20cf68ee9e"
76 | }
77 |
--------------------------------------------------------------------------------
/hurl/run_hurl_tests.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -e
4 |
5 | # Colors
6 | RED='\033[0;31m'
7 | GREEN='\033[0;32m'
8 | YELLOW='\033[1;33m'
9 | NC='\033[0m' # No Color
10 |
11 | HURL_DIR="hurl"
12 | VERBOSE=false
13 |
14 | if [[ "$1" == "--verbose" ]] || [[ "$1" == "-v" ]]; then
15 | VERBOSE=true
16 | fi
17 |
18 | export admin_email="${ADMIN_EMAIL:-admin@example.com}"
19 | export admin_password="${ADMIN_PASSWORD:-Admin123}"
20 | export group_user_username="${GROUP_USER_USERNAME:-testuser}"
21 | export group_user_password="${GROUP_USER_PASSWORD:-Test123}"
22 |
23 | echo -e "${YELLOW}🧪 Running Hurl tests...${NC}"
24 | echo ""
25 |
26 | if ! curl -s http://localhost:3000/api/health > /dev/null 2>&1; then
27 | echo -e "${RED}❌ Server is not running on http://localhost:3000${NC}"
28 | echo -e "${YELLOW}💡 Start the server first:${NC}"
29 | echo " cargo run"
30 | exit 1
31 | fi
32 |
33 | echo -e "${GREEN}✓ Server is running${NC}"
34 | echo ""
35 |
36 | total_files=0
37 | passed_files=0
38 | failed_files=0
39 |
40 | for hurl_file in "$HURL_DIR"/*.hurl; do
41 | if [ ! -f "$hurl_file" ]; then
42 | continue
43 | fi
44 |
45 | total_files=$((total_files + 1))
46 | filename=$(basename "$hurl_file")
47 |
48 | echo -e "${YELLOW}▶ Running $filename${NC}"
49 |
50 | if [ "$VERBOSE" = true ]; then
51 | if hurl --test --very-verbose "$hurl_file"; then
52 | echo -e "${GREEN}✓ $filename passed${NC}"
53 | passed_files=$((passed_files + 1))
54 | else
55 | echo -e "${RED}✗ $filename failed${NC}"
56 | failed_files=$((failed_files + 1))
57 | fi
58 | else
59 | if hurl --test "$hurl_file" 2>&1 | grep -v "^$"; then
60 | echo -e "${GREEN}✓ $filename passed${NC}"
61 | passed_files=$((passed_files + 1))
62 | else
63 | echo -e "${RED}✗ $filename failed${NC}"
64 | failed_files=$((failed_files + 1))
65 | fi
66 | fi
67 |
68 | echo ""
69 | done
70 |
71 | # Resumo
72 | echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
73 | echo -e "${YELLOW}📊 Test Summary:${NC}"
74 | echo " Total files: $total_files"
75 | echo -e " ${GREEN}Passed: $passed_files${NC}"
76 | echo -e " ${RED}Failed: $failed_files${NC}"
77 | echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
78 |
79 | if [ $failed_files -eq 0 ]; then
80 | echo -e "${GREEN}✨ All tests passed!${NC}"
81 | exit 0
82 | else
83 | echo -e "${RED}💥 Some tests failed${NC}"
84 | exit 1
85 | fi
86 |
--------------------------------------------------------------------------------
/templates/reset_password.html:
--------------------------------------------------------------------------------
1 | {% extends "base.html" %}
2 |
3 | {% block content %}
4 |
5 | Reset Password
6 |
7 | {% if error %}
8 | {{ error }}
9 | {% endif %}
10 |
11 | {% if success %}
12 | {{ success }}
13 | {% else %}
14 |
26 |
27 |
67 | {% endif %}
68 |
69 | {% endblock %}
70 |
--------------------------------------------------------------------------------
/templates/change_password.html:
--------------------------------------------------------------------------------
1 | {% extends "base.html" %}
2 |
3 | {% block content %}
4 |
5 | Change Password Required
6 |
7 |
8 | First Login Detected!
9 | For security reasons, you must change your password before continuing.
10 |
11 |
12 | {% if error %}
13 | {{ error }}
14 | {% endif %}
15 |
16 |
29 |
30 |
71 |
72 | {% endblock %}
73 |
--------------------------------------------------------------------------------
/templates/mail/password_reset.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | Password Reset - Keyrunes
7 |
65 |
66 |
67 |
68 |
Password Reset - Keyrunes
69 |
70 |
Hello,
71 |
72 |
You requested a password reset. Click the button below to create a new password:
73 |
74 |
75 | Reset Password
76 |
77 |
78 |
Or copy and paste this link into your browser:
79 |
{{ reset_url }}
80 |
81 |
82 |
Important:
83 |
84 | - This link is valid for 24 hours
85 | - Can only be used once
86 | - If you didn't request this reset, please ignore this email
87 |
88 |
89 |
90 |
94 |
95 |
96 |
97 |
--------------------------------------------------------------------------------
/src/handler/logging.rs:
--------------------------------------------------------------------------------
1 | use axum::{extract::Request, middleware::Next, response::Response};
2 | use std::time::Instant;
3 |
4 | #[derive(Debug, Clone, Copy, PartialEq, Eq)]
5 | pub enum LogLevel {
6 | Info,
7 | Debug,
8 | Error,
9 | Critical,
10 | }
11 |
12 | /// Initialize logging with given log level
13 | pub fn init_logging(level: LogLevel) {
14 | use tracing_subscriber::filter::LevelFilter;
15 |
16 | let filter = match level {
17 | LogLevel::Info => LevelFilter::INFO,
18 | LogLevel::Debug => LevelFilter::DEBUG,
19 | LogLevel::Error => LevelFilter::ERROR,
20 | LogLevel::Critical => LevelFilter::ERROR, // Map Critical to ERROR
21 | };
22 |
23 | tracing_subscriber::fmt()
24 | .with_max_level(filter)
25 | .with_target(false)
26 | .with_thread_ids(false)
27 | .with_file(true)
28 | .with_line_number(true)
29 | .init();
30 | }
31 |
32 | /// Request logging middleware - logs all requests
33 | ///
34 | /// NOTE: This middleware does NOT require ConnectInfo.
35 | /// It logs: method, path, status code, and response time.
36 | pub async fn request_logging_middleware(request: Request, next: Next) -> Response {
37 | let method = request.method().clone();
38 | let uri = request.uri().clone();
39 | let path = uri.path().to_string();
40 |
41 | let start = Instant::now();
42 |
43 | // Process request
44 | let response = next.run(request).await;
45 |
46 | let duration = start.elapsed();
47 | let status = response.status();
48 |
49 | // Log the request
50 | match status.as_u16() {
51 | 200..=299 => {
52 | tracing::info!(
53 | "{} {} - {} - {}ms",
54 | method,
55 | path,
56 | status.as_u16(),
57 | duration.as_millis()
58 | );
59 | }
60 | 400..=499 => {
61 | tracing::warn!(
62 | "{} {} - {} - {}ms",
63 | method,
64 | path,
65 | status.as_u16(),
66 | duration.as_millis()
67 | );
68 | }
69 | 500..=599 => {
70 | tracing::error!(
71 | "{} {} - {} - {}ms",
72 | method,
73 | path,
74 | status.as_u16(),
75 | duration.as_millis()
76 | );
77 | }
78 | _ => {
79 | tracing::debug!(
80 | "{} {} - {} - {}ms",
81 | method,
82 | path,
83 | status.as_u16(),
84 | duration.as_millis()
85 | );
86 | }
87 | }
88 |
89 | response
90 | }
91 |
92 | #[cfg(test)]
93 | mod tests {
94 | use super::*;
95 |
96 | #[test]
97 | fn test_log_level_equality() {
98 | assert_eq!(LogLevel::Info, LogLevel::Info);
99 | assert_eq!(LogLevel::Debug, LogLevel::Debug);
100 | assert_ne!(LogLevel::Info, LogLevel::Debug);
101 | }
102 |
103 | #[test]
104 | fn test_log_level_debug() {
105 | let level = LogLevel::Debug;
106 | assert_eq!(level, LogLevel::Debug);
107 | }
108 | }
109 |
--------------------------------------------------------------------------------
/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | # KeyRunes Code of Conduct
2 |
3 | ## Our Pledge
4 |
5 | In the interest of fostering an open and welcoming community, we as contributors and maintainers pledge to make participation in KeyRunes a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation.
6 |
7 | We pledge to act and interact in ways that contribute to an open, inclusive, and supportive environment.
8 |
9 | ---
10 |
11 | ## Our Standards
12 |
13 | Examples of behavior that contributes to a positive environment for our community include:
14 |
15 | - Using welcoming and inclusive language.
16 | - Being respectful of differing viewpoints and experiences.
17 | - Gracefully accepting constructive criticism.
18 | - Focusing on what is best for the community.
19 | - Showing empathy toward other community members.
20 |
21 | Examples of unacceptable behavior by participants include:
22 |
23 | - Harassment, intimidation, or discrimination in any form.
24 | - Public or private insults or attacks.
25 | - Trolling, insulting/derogatory comments, or personal or political attacks.
26 | - Publishing others’ private information, such as a physical or electronic address, without explicit permission.
27 | - Other conduct which could reasonably be considered inappropriate in a professional setting.
28 |
29 | ---
30 |
31 | ## Enforcement Responsibilities
32 |
33 | Project maintainers are responsible for clarifying and enforcing our standards of acceptable behavior and will take appropriate and fair corrective action in response to any behavior that violates this Code of Conduct.
34 |
35 | Maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned with this Code of Conduct, or to temporarily or permanently ban any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful.
36 |
37 | ---
38 |
39 | ## Reporting Guidelines
40 |
41 | If you are subject to or witness unacceptable behavior, or have any other concerns, please report it by contacting the project maintainers at **[contact@jonatasoliveira.dev]**. All complaints will be reviewed and investigated promptly and fairly.
42 |
43 | You can also report violations to the broader community by contacting GitHub support if necessary.
44 |
45 | ---
46 |
47 | ## Scope
48 |
49 | This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event.
50 |
51 | ---
52 |
53 | ## Enforcement
54 |
55 | Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at **[contact@jonatasoliveira.dev]**. All complaints will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident.
56 |
57 | Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by the community.
58 |
59 | ---
60 |
61 | ## Attribution
62 |
63 | This Code of Conduct is adapted from the [Contributor Covenant](https://www.contributor-covenant.org/), version 2.1, available at https://www.contributor-covenant.org/version/2/1/code_of_conduct.html
64 |
--------------------------------------------------------------------------------
/cliff.toml:
--------------------------------------------------------------------------------
1 | # git-cliff ~ configuration file
2 | # https://git-cliff.org/docs/configuration
3 |
4 |
5 | [changelog]
6 | # A Tera template to be rendered for each release in the changelog.
7 | # See https://keats.github.io/tera/docs/#introduction
8 | body = """
9 | {% if version %}\
10 | ## [{{ version | trim_start_matches(pat="v") }}] - {{ timestamp | date(format="%Y-%m-%d") }}
11 | {% else %}\
12 | ## [unreleased]
13 | {% endif %}\
14 | {% for group, commits in commits | group_by(attribute="group") %}
15 | ### {{ group | striptags | trim | upper_first }}
16 | {% for commit in commits %}
17 | - {% if commit.scope %}*({{ commit.scope }})* {% endif %}\
18 | {% if commit.breaking %}[**breaking**] {% endif %}\
19 | {{ commit.message | upper_first }}\
20 | {% endfor %}
21 | {% endfor %}
22 | """
23 | # Remove leading and trailing whitespaces from the changelog's body.
24 | trim = true
25 | # Render body even when there are no releases to process.
26 | render_always = true
27 | # An array of regex based postprocessors to modify the changelog.
28 | postprocessors = [
29 | # Replace the placeholder with a URL.
30 | #{ pattern = '', replace = "https://github.com/orhun/git-cliff" },
31 | ]
32 | # render body even when there are no releases to process
33 | # render_always = true
34 | # output file path
35 | # output = "test.md"
36 |
37 | [git]
38 | # Parse commits according to the conventional commits specification.
39 | # See https://www.conventionalcommits.org
40 | conventional_commits = true
41 | # Exclude commits that do not match the conventional commits specification.
42 | filter_unconventional = true
43 | # Require all commits to be conventional.
44 | # Takes precedence over filter_unconventional.
45 | require_conventional = false
46 | # Split commits on newlines, treating each line as an individual commit.
47 | split_commits = false
48 | # An array of regex based parsers to modify commit messages prior to further processing.
49 | commit_preprocessors = [
50 | # Replace issue numbers with link templates to be updated in `changelog.postprocessors`.
51 | #{ pattern = '\((\w+\s)?#([0-9]+)\)', replace = "([#${2}](/issues/${2}))"},
52 | # Check spelling of the commit message using https://github.com/crate-ci/typos.
53 | # If the spelling is incorrect, it will be fixed automatically.
54 | #{ pattern = '.*', replace_command = 'typos --write-changes -' },
55 | ]
56 | # Prevent commits that are breaking from being excluded by commit parsers.
57 | protect_breaking_commits = false
58 | # An array of regex based parsers for extracting data from the commit message.
59 | # Assigns commits to groups.
60 | # Optionally sets the commit's scope and can decide to exclude commits from further processing.
61 | commit_parsers = [
62 | { message = "^feat", group = "🚀 Features" },
63 | { message = "^fix", group = "🐛 Bug Fixes" },
64 | { message = "^doc", group = "📚 Documentation" },
65 | { message = "^perf", group = "⚡ Performance" },
66 | { message = "^refactor", group = "🚜 Refactor" },
67 | { message = "^style", group = "🎨 Styling" },
68 | { message = "^test", group = "🧪 Testing" },
69 | { message = "^chore\\(release\\): prepare for", skip = true },
70 | { message = "^chore\\(deps.*\\)", skip = true },
71 | { message = "^chore\\(pr\\)", skip = true },
72 | { message = "^chore\\(pull\\)", skip = true },
73 | { message = "^chore|^ci", group = "⚙️ Miscellaneous Tasks" },
74 | { body = ".*security", group = "🛡️ Security" },
75 | { message = "^revert", group = "◀️ Revert" },
76 | { message = ".*", group = "💼 Other" },
77 | ]
78 | # Exclude commits that are not matched by any commit parser.
79 | filter_commits = false
80 | # An array of link parsers for extracting external references, and turning them into URLs, using regex.
81 | link_parsers = []
82 | # Include only the tags that belong to the current branch.
83 | use_branch_tags = false
84 | # Order releases topologically instead of chronologically.
85 | topo_order = false
86 | # Order releases topologically instead of chronologically.
87 | topo_order_commits = true
88 | # Order of commits in each group/release within the changelog.
89 | # Allowed values: newest, oldest
90 | sort_commits = "oldest"
91 | # Process submodules commits
92 | recurse_submodules = false
93 |
--------------------------------------------------------------------------------
/.github/workflows/ci.yml:
--------------------------------------------------------------------------------
1 | name: CI
2 |
3 | on:
4 | push:
5 | branches: [ main, develop ]
6 | pull_request:
7 | branches: [ main, develop ]
8 |
9 | env:
10 | CARGO_TERM_COLOR: always
11 | RUST_BACKTRACE: 1
12 | DATABASE_URL: postgresql://postgres:postgres@localhost:5432/keyrunes_test
13 | JWT_SECRET: test_secret_key_for_ci
14 |
15 | jobs:
16 | test:
17 | name: Test Suite
18 | runs-on: ubuntu-latest
19 |
20 | services:
21 | postgres:
22 | image: postgres:17
23 | env:
24 | POSTGRES_USER: postgres
25 | POSTGRES_PASSWORD: postgres
26 | POSTGRES_DB: postgres
27 | options: >-
28 | --health-cmd pg_isready
29 | --health-interval 10s
30 | --health-timeout 5s
31 | --health-retries 5
32 | ports:
33 | - 5432:5432
34 |
35 | steps:
36 | - name: Checkout code
37 | uses: actions/checkout@v4
38 |
39 | - name: Install Rust toolchain
40 | uses: dtolnay/rust-toolchain@stable
41 | with:
42 | components: rustfmt, clippy
43 |
44 | - name: Cache cargo registry
45 | uses: actions/cache@v4
46 | with:
47 | path: ~/.cargo/registry
48 | key: ${{ runner.os }}-cargo-registry-${{ hashFiles('**/Cargo.lock') }}
49 | restore-keys: |
50 | ${{ runner.os }}-cargo-registry-
51 |
52 | - name: Cache cargo index
53 | uses: actions/cache@v4
54 | with:
55 | path: ~/.cargo/git
56 | key: ${{ runner.os }}-cargo-git-${{ hashFiles('**/Cargo.lock') }}
57 | restore-keys: |
58 | ${{ runner.os }}-cargo-git-
59 |
60 | - name: Cache cargo build
61 | uses: actions/cache@v4
62 | with:
63 | path: target
64 | key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('**/Cargo.lock') }}
65 | restore-keys: |
66 | ${{ runner.os }}-cargo-build-target-
67 |
68 | - name: Install sqlx-cli
69 | run: cargo install sqlx-cli --no-default-features --features postgres
70 |
71 | - name: Create test database
72 | run: |
73 | sqlx database create --database-url $DATABASE_URL
74 |
75 | - name: Run migrations
76 | run: |
77 | sqlx migrate run --database-url $DATABASE_URL
78 |
79 | - name: Check formatting
80 | run: cargo fmt --all -- --check
81 |
82 | - name: Run clippy
83 | run: cargo clippy -- -D warnings
84 |
85 | - name: Build
86 | run: cargo build --verbose
87 |
88 | - name: Run tests
89 | run: cargo test --verbose
90 |
91 | coverage:
92 | name: Code Coverage
93 | runs-on: ubuntu-latest
94 |
95 | services:
96 | postgres:
97 | image: postgres:17
98 | env:
99 | POSTGRES_USER: postgres
100 | POSTGRES_PASSWORD: postgres
101 | POSTGRES_DB: postgres
102 | options: >-
103 | --health-cmd pg_isready
104 | --health-interval 10s
105 | --health-timeout 5s
106 | --health-retries 5
107 | ports:
108 | - 5432:5432
109 |
110 | steps:
111 | - name: Checkout code
112 | uses: actions/checkout@v4
113 |
114 | - name: Install Rust toolchain
115 | uses: dtolnay/rust-toolchain@stable
116 |
117 | - name: Install sqlx-cli
118 | run: cargo install sqlx-cli --no-default-features --features postgres
119 |
120 | - name: Create test database
121 | run: |
122 | sqlx database create --database-url $DATABASE_URL
123 |
124 | - name: Run migrations
125 | run: |
126 | sqlx migrate run --database-url $DATABASE_URL
127 |
128 | - name: Install tarpaulin
129 | run: cargo install cargo-tarpaulin
130 |
131 | - name: Generate coverage
132 | run: cargo tarpaulin --verbose --all-features --workspace --timeout 120 --out Xml
133 |
134 | - name: Upload coverage to Codecov
135 | uses: codecov/codecov-action@v4
136 | with:
137 | token: ${{ secrets.CODECOV_TOKEN }}
138 | fail_ci_if_error: false
139 |
140 | security-audit:
141 | name: Security Audit
142 | runs-on: ubuntu-latest
143 | steps:
144 | - name: Checkout code
145 | uses: actions/checkout@v4
146 |
147 | - name: Run security audit
148 | uses: rustsec/audit-check@v1
149 | with:
150 | token: ${{ secrets.GITHUB_TOKEN }}
151 | ignore: RUSTSEC-2023-0071
152 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contributing to KeyRunes
2 |
3 | First off, thank you for considering contributing to KeyRunes! Whether it's code, documentation, bug reports, or feature requests, your help makes KeyRunes better for everyone. This guide will help you get started and make the process smooth for you and the maintainers.
4 |
5 | ---
6 |
7 | ## Table of Contents
8 |
9 | 1. [How to Contribute](#how-to-contribute)
10 | 2. [Reporting Bugs](#reporting-bugs)
11 | 3. [Requesting Features](#requesting-features)
12 | 4. [Development Setup](#development-setup)
13 | 5. [Code Style and Standards](#code-style-and-standards)
14 | 6. [Submitting Pull Requests](#submitting-pull-requests)
15 | 7. [Community Guidelines](#community-guidelines)
16 | 8. [Acknowledgements](#acknowledgements)
17 |
18 | ---
19 |
20 | ## How to Contribute
21 |
22 | There are many ways you can contribute:
23 |
24 | - **Code Contributions**: Fix bugs, implement new features, or improve existing code.
25 | - **Documentation**: Improve README, write guides, or clarify examples.
26 | - **Testing**: Add or improve unit, integration, or end-to-end tests.
27 | - **Feedback**: Report issues, suggest features, or provide performance insights.
28 |
29 | Before contributing code, please check the existing issues to avoid duplication.
30 |
31 | ---
32 |
33 | ## Reporting Bugs
34 |
35 | If you find a bug, please submit an issue with:
36 |
37 | - A clear and descriptive title.
38 | - Steps to reproduce the problem.
39 | - Expected vs actual behavior.
40 | - Relevant environment information (OS, Rust version, DB, etc.).
41 | - Logs or screenshots, if applicable.
42 |
43 | This helps maintainers reproduce and fix the issue faster.
44 |
45 | ---
46 |
47 | ## Requesting Features
48 |
49 | Feature requests should include:
50 |
51 | - A clear description of the feature.
52 | - Why it is needed and how it improves KeyRunes.
53 | - Optional examples or mockups.
54 |
55 | Feature requests are discussed openly and may be implemented collaboratively.
56 |
57 | ---
58 |
59 | ## Development Setup
60 |
61 | Follow these steps to get KeyRunes running locally:
62 |
63 | 1. **Clone the repository:**
64 | ```bash
65 | git clone https://github.com/jonatasoli/keyrunes.git && cd keyrunes
66 | ```
67 |
68 | Start the database and services using Docker Compose:
69 |
70 | Create a file named `.env` This allows docker compose to pick up env vars instead of manually passing them in the cli command everytime.
71 | Setup your env variables with values using `.env-example` file listed in the main directory as an example.
72 |
73 | Run the below to create db tables / migrations
74 | ```bash
75 | sqlx migrate run
76 | ```
77 |
78 | make sure to add the `DATABASE_URL` env var to your `.env` when running the above command. Take note,
79 | you can ran the app directly and face no issue, but an sql migration issue can come up because the `sqlx` command looks for the
80 | database url env var
81 |
82 | Run the below
83 | ```bash
84 | docker-compose up
85 | ```
86 |
87 | Run the web application:
88 |
89 | ```bash
90 | cargo run --bin keyrunes
91 | ```
92 |
93 | Run the CLI application:
94 | ```bash
95 | cargo run --bin cli
96 | ```
97 |
98 | Before you run tests, make sure to register a test user with the below details. Some unit tests depends on this data.
99 | Also make sure you have ran ``cargo build`` to get the binary available in the release directory
100 |
101 | ```
102 | username = test
103 | email = test@gmail.com
104 | password = password
105 | ```
106 |
107 | Run tests:
108 | ```bash
109 | cargo test
110 | ```
111 |
112 | Code Style and Standards
113 |
114 | Follow Rust community conventions.
115 |
116 | Use meaningful variable and function names.
117 |
118 | Write tests for new features and bug fixes.
119 |
120 | Keep code modular and well-documented.
121 |
122 | Use Clippy to catch warnings:
123 |
124 | ```bash
125 | cargo clippy
126 | ```
127 |
128 | Submitting Pull Requests
129 | Fork the repository.
130 |
131 | Create a new branch for your feature/bugfix.
132 |
133 | Write code and tests according to the guidelines.
134 |
135 | Ensure all tests pass.
136 |
137 | Submit a pull request with a clear description of the changes.
138 |
139 | Pull requests are reviewed collaboratively. Be prepared to make changes based on feedback.
140 |
141 | Community Guidelines
142 |
143 | - We value respectful and constructive communication. Please follow:
144 |
145 | - Be respectful to all contributors.
146 |
147 | - Provide clear and concise feedback.
148 |
149 | - Stay on-topic and avoid off-topic discussions in issues/PRs.
150 |
151 | - Follow the [Code of Conduct](CODE_OF_CONDUCT.md)
--------------------------------------------------------------------------------
/tests/admin_view_test.rs:
--------------------------------------------------------------------------------
1 | use axum::{
2 | body::Body,
3 | http::{Request, StatusCode},
4 | };
5 | use tower::ServiceExt;
6 |
7 | #[tokio::test]
8 | async fn test_admin_endpoint_structure() {
9 | let endpoints = vec![
10 | "/api/admin/dashboard",
11 | "/api/admin/users",
12 | "/api/admin/user",
13 | "/api/admin/groups",
14 | "/api/admin/policies",
15 | "/api/admin/users/:user_id/groups/:group_id",
16 | "/api/admin/check-permission",
17 | ];
18 |
19 | for endpoint in endpoints {
20 | println!("Endpoint exists: {}", endpoint);
21 | }
22 | }
23 |
24 | #[test]
25 | fn test_check_permission_request_structure() {
26 | use serde_json::json;
27 |
28 | let request = json!({
29 | "user_id": 1,
30 | "group_name": "developers",
31 | "resource": "user:*",
32 | "action": "read"
33 | });
34 |
35 | assert!(request["user_id"].is_number());
36 | assert!(request["group_name"].is_string());
37 | assert!(request["resource"].is_string());
38 | assert!(request["action"].is_string());
39 | }
40 |
41 | #[test]
42 | fn test_admin_dashboard_response_structure() {
43 | use serde_json::json;
44 |
45 | let response = json!({
46 | "total_users": 10,
47 | "total_groups": 3,
48 | "total_policies": 5,
49 | "current_admin": {
50 | "user_id": 1,
51 | "username": "admin",
52 | "email": "admin@example.com",
53 | "groups": ["superadmin"]
54 | }
55 | });
56 |
57 | assert!(response["total_users"].is_number());
58 | assert!(response["total_groups"].is_number());
59 | assert!(response["total_policies"].is_number());
60 | assert!(response["current_admin"]["groups"].is_array());
61 | }
62 |
63 | #[test]
64 | fn test_user_list_response_structure() {
65 | use serde_json::json;
66 |
67 | let response = json!([
68 | {
69 | "user_id": 1,
70 | "external_id": "550e8400-e29b-41d4-a716-446655440000",
71 | "email": "user@example.com",
72 | "username": "testuser",
73 | "first_login": false,
74 | "groups": ["users"],
75 | "created_at": "2025-11-27T10:00:00Z"
76 | }
77 | ]);
78 |
79 | assert!(response.is_array());
80 | assert!(response[0]["user_id"].is_number());
81 | assert!(response[0]["email"].is_string());
82 | assert!(response[0]["groups"].is_array());
83 | }
84 |
85 | #[test]
86 | fn test_group_creation_request_structure() {
87 | use serde_json::json;
88 |
89 | let request = json!({
90 | "name": "developers",
91 | "description": "Development team"
92 | });
93 |
94 | assert!(request["name"].is_string());
95 | assert!(request["description"].is_string() || request["description"].is_null());
96 | }
97 |
98 | #[test]
99 | fn test_assign_group_response_structure() {
100 | use serde_json::json;
101 |
102 | let response = json!({
103 | "message": "User assigned to group successfully"
104 | });
105 |
106 | assert_eq!(response["message"], "User assigned to group successfully");
107 | }
108 |
109 | #[test]
110 | fn test_permission_check_response_structure() {
111 | use serde_json::json;
112 |
113 | let response = json!({
114 | "user_id": 1,
115 | "group_name": "developers",
116 | "resource": "user:*",
117 | "action": "read",
118 | "has_permission": true
119 | });
120 |
121 | assert!(response["user_id"].is_number());
122 | assert!(response["group_name"].is_string());
123 | assert!(response["resource"].is_string());
124 | assert!(response["action"].is_string());
125 | assert!(response["has_permission"].is_boolean());
126 | }
127 |
128 | // Test edge cases
129 | #[test]
130 | fn test_empty_group_name_invalid() {
131 | let name = "";
132 | assert!(name.is_empty());
133 | }
134 |
135 | #[test]
136 | fn test_invalid_user_id() {
137 | let user_id: i64 = -1;
138 | assert!(user_id < 0);
139 | }
140 |
141 | #[test]
142 | fn test_wildcard_resource_patterns() {
143 | let patterns = vec![
144 | "*", // All resources
145 | "user:*", // All user resources
146 | "user:self", // Own user resource
147 | "admin:*", // All admin resources
148 | ];
149 |
150 | for pattern in patterns {
151 | assert!(!pattern.is_empty());
152 | }
153 | }
154 |
155 | #[test]
156 | fn test_action_types() {
157 | let actions = vec!["read", "write", "delete", "update", "*"];
158 |
159 | for action in actions {
160 | assert!(!action.is_empty());
161 | }
162 | }
163 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # 🛡️ KeyRunes — Modern Access Control Engine in Rust
2 | [](https://github.com/jonatasoli/keyrunes/actions/workflows/ci.yml)
3 |
4 | **KeyRunes** is a high-performance, extensible authorization system designed to compete with and surpass traditional solutions like Keycloak. It brings together advanced access control models such as **RBAC**, **ABAC**, **ReBAC**, and **PBAC**, while offering a great developer experience and enterprise-grade scalability.
5 |
6 | Built for Rust. Inspired by RPG systems. Designed for security-critical platforms.
7 |
8 | > ⚙️ Built for Rust. Inspired by RPG systems. Designed for security-critical platforms.
9 |
10 | ---
11 |
12 | ## Key Features
13 |
14 | ### Advanced Authorization Models
15 |
16 | - **RBAC** (Role-Based Access Control): Global (realm) and per-client roles, including role composition.
17 | - **ABAC** (Attribute-Based Access Control): Policies based on dynamic user/environment attributes (e.g. time, department, device).
18 | - **ReBAC** (Relationship-Based Access Control): Authorization through graph-based relationships (e.g. ownership, collaboration).
19 | - **PBAC** (Policy-Based Access Control): Combine RBAC + ABAC in unified policies.
20 |
21 | ### Scalability & Performance
22 |
23 | - Lightweight **Policy Decision Point (PDP)** with <10ms latency at enterprise scale.
24 | - Optional in-process or external microservice deployment.
25 | - Distributed cache support to reduce calls to external sources (e.g. Keycloak/LDAP).
26 |
27 | ### Developer Experience
28 |
29 | - **Policy-as-Code** using YAML or Rego, versionable via Git.
30 | - CI/CD-ready: Run automated tests for policies.
31 | - Simulate access decisions before deployment with a rich UI.
32 | - SDKs (planned) for Rust, Java, Go, and Python for seamless integration.
33 |
34 | ### Audit & Compliance
35 |
36 | - Complete decision logs with metadata (timestamp, policy, attributes).
37 | - Automated rollback for failed policies in production.
38 | - Compliance reports for standards like HIPAA and PCI.
39 |
40 | ### Integration & Extensibility
41 |
42 | - Federate identities from Keycloak, Okta and others via OIDC.
43 | - Map custom IdP attributes into policies.
44 | - Webhook support for access denial events.
45 | - Plugin system for sourcing attributes from internal systems (CRM, HR).
46 |
47 | ### Multi-Tenant Support
48 |
49 | - Isolated policies and data per tenant.
50 | - Delegated administration (e.g. department leads managing roles).
51 |
52 | ### Real-World Use Cases
53 |
54 | - Hospitals (HIPAA): Role + location + shift access to medical records.
55 | - Banks: Enforce MFA outside corporate network.
56 | - E-commerce: Temporary supplier access.
57 | - IoT: Device-based publish/subscribe permissions.
58 |
59 | ---
60 |
61 | ## 📦 Roadmap (Milestones)
62 |
63 | | Phase | Focus |
64 | |-------|-------|
65 | | MVP | RBAC, Policy-as-Code, SDKs, Keycloak integration |
66 | | V1 | ABAC, ReBAC, Simulators, Attribute Graphs |
67 | | V2 | Multi-tenancy, Audit, Compliance tooling |
68 | | V3 | Edge-case handling, IoT, Delegated access UI |
69 |
70 | ---
71 |
72 | ## 🧪 Quickstart (WIP)
73 |
74 | Follow these steps to quickly start KeyRunes locally:
75 |
76 | ```bash
77 | # 1. Copy environment variables
78 | cp .env-example .env
79 |
80 | # 2. Start the database using Docker
81 | docker-compose up -d
82 |
83 | # 3. Run the web application
84 | cargo run --bin keyrunes
85 |
86 | # 4. Or run the CLI application
87 | cargo run --bin cli
88 |
89 | # 5. Run tests
90 | cargo test
91 |
92 | # 6. Stop the database container when done
93 | docker-compose down
94 | ```
95 | ---
96 |
97 | ## Project Structure (Planned)
98 |
99 | /src
100 |
101 | /core # Policy engine
102 |
103 | /models # Roles, attributes, relationships
104 |
105 | /parser # Policy-as-code parser (YAML/Rego)
106 |
107 | /sdk # API bindings
108 |
109 | /tests
110 |
111 | /docs
112 |
113 | ---
114 |
115 | ## Contributing
116 |
117 | Contributions are welcome! If you’re interested in:
118 | - Access control systems
119 | - Graph-based security
120 | - High-performance Rust services
121 |
122 | …then feel free to open issues, suggest ideas, or contribute code once we’re live 🚀
123 |
124 | See in [CONTRIBUTING](CONTRIBUTING.md)
125 |
126 | ---
127 |
128 | ## License
129 |
130 | [MIT](LICENSE)
131 | [CODE OF CONDUCT](CODE_OF_CONDUCT.md)
132 |
133 | ---
134 |
135 | ## 🧙♂️ About the Name
136 |
137 | Just like magical runes control access to forbidden realms in fantasy worlds, **KeyRunes** grants or denies access to sensitive resources: through logic, context, and relationships.
138 |
139 | > 🔒 **Security meets storytelling.**
140 |
--------------------------------------------------------------------------------
/hurl/user_roles.hurl:
--------------------------------------------------------------------------------
1 | POST http://localhost:3000/api/login
2 | Content-Type: application/json
3 | {
4 | "identity": "{{admin_email}}",
5 | "password": "{{admin_password}}"
6 | }
7 | HTTP 200
8 | [Captures]
9 | admin_token: jsonpath "$.token"
10 |
11 | POST http://localhost:3000/api/admin/user
12 | Content-Type: application/json
13 | Authorization: Bearer {{admin_token}}
14 | {
15 | "email": "multiRole_{{newUuid}}@example.com",
16 | "username": "multirole_{{newDate}}",
17 | "password": "{{test_user_password}}",
18 | "groups": ["superadmin"]
19 | }
20 | HTTP 201
21 | [Captures]
22 | test_user_id: jsonpath "$.user_id"
23 | test_user_email: jsonpath "$.email"
24 | [Asserts]
25 | jsonpath "$.groups" count == 1
26 | jsonpath "$.groups[0]" == "superadmin"
27 |
28 | GET http://localhost:3000/api/admin/groups
29 | Authorization: Bearer {{admin_token}}
30 | HTTP 200
31 | [Captures]
32 | users_group_id: jsonpath "$[?(@.name=='users')].group_id" nth 0
33 | [Asserts]
34 | jsonpath "$[?(@.name=='users')]" exists
35 |
36 | POST http://localhost:3000/api/admin/users/{{test_user_id}}/groups/{{users_group_id}}
37 | Authorization: Bearer {{admin_token}}
38 | HTTP 200
39 | [Asserts]
40 | jsonpath "$.message" == "User assigned to group successfully"
41 |
42 | GET http://localhost:3000/api/admin/users
43 | Authorization: Bearer {{admin_token}}
44 | HTTP 200
45 | [Asserts]
46 | jsonpath "$[?(@.user_id=={{test_user_id}})].groups" count == 1
47 | jsonpath "$[?(@.user_id=={{test_user_id}})].groups[0]" count == 2
48 |
49 | POST http://localhost:3000/api/login
50 | Content-Type: application/json
51 | {
52 | "identity": "{{test_user_email}}",
53 | "password": "{{test_user_password}}"
54 | }
55 | HTTP 200
56 | [Captures]
57 | test_user_token: jsonpath "$.token"
58 | [Asserts]
59 | jsonpath "$.token" exists
60 | jsonpath "$.user.groups" count == 2
61 | jsonpath "$.user.groups" contains "superadmin"
62 | jsonpath "$.user.groups" contains "users"
63 |
64 | GET http://localhost:3000/api/admin/dashboard
65 | Authorization: Bearer {{test_user_token}}
66 | HTTP 200
67 | [Asserts]
68 | jsonpath "$.total_users" isInteger
69 | jsonpath "$.total_groups" isInteger
70 | jsonpath "$.current_admin.username" exists
71 | jsonpath "$.current_admin.groups" count == 2
72 | jsonpath "$.current_admin.groups" contains "superadmin"
73 | jsonpath "$.current_admin.groups" contains "users"
74 |
75 | GET http://localhost:3000/api/admin/users
76 | Authorization: Bearer {{test_user_token}}
77 | HTTP 200
78 | [Asserts]
79 | jsonpath "$" isCollection
80 | jsonpath "$[*].user_id" exists
81 |
82 | POST http://localhost:3000/api/admin/user
83 | Content-Type: application/json
84 | Authorization: Bearer {{test_user_token}}
85 | {
86 | "email": "created_by_multirole_{{newUuid}}@example.com",
87 | "username": "created_{{newDate}}",
88 | "password": "Created123"
89 | }
90 | HTTP 201
91 | [Captures]
92 | created_user_id: jsonpath "$.user_id"
93 | [Asserts]
94 | jsonpath "$.user_id" exists
95 | jsonpath "$.groups" contains "users"
96 |
97 | POST http://localhost:3000/api/admin/check-permission
98 | Content-Type: application/json
99 | Authorization: Bearer {{test_user_token}}
100 | {
101 | "user_id": {{test_user_id}},
102 | "group_name": "users",
103 | "resource": "user:self",
104 | "action": "read"
105 | }
106 | HTTP 200
107 | [Asserts]
108 | jsonpath "$.user_id" == {{test_user_id}}
109 | jsonpath "$.group_name" == "users"
110 | jsonpath "$.has_permission" isBoolean
111 |
112 | POST http://localhost:3000/api/admin/check-permission
113 | Content-Type: application/json
114 | Authorization: Bearer {{test_user_token}}
115 | {
116 | "user_id": {{test_user_id}},
117 | "group_name": "superadmin",
118 | "resource": "admin:*",
119 | "action": "*"
120 | }
121 | HTTP 200
122 | [Asserts]
123 | jsonpath "$.user_id" == {{test_user_id}}
124 | jsonpath "$.group_name" == "superadmin"
125 | jsonpath "$.has_permission" isBoolean
126 |
127 | GET http://localhost:3000/api/admin/groups
128 | Authorization: Bearer {{admin_token}}
129 | HTTP 200
130 | [Captures]
131 | superadmin_group_id: jsonpath "$[?(@.name=='superadmin')].group_id" nth 0
132 |
133 | DELETE http://localhost:3000/api/admin/users/{{test_user_id}}/groups/{{superadmin_group_id}}
134 | Authorization: Bearer {{admin_token}}
135 | HTTP 200
136 | [Asserts]
137 | jsonpath "$.message" == "User removed from group successfully"
138 |
139 | POST http://localhost:3000/api/login
140 | Content-Type: application/json
141 | {
142 | "identity": "{{test_user_email}}",
143 | "password": "{{test_user_password}}"
144 | }
145 | HTTP 200
146 | [Captures]
147 | users_only_token: jsonpath "$.token"
148 | [Asserts]
149 | jsonpath "$.user.groups" count == 1
150 | jsonpath "$.user.groups[0]" == "users"
151 |
152 | GET http://localhost:3000/api/admin/users
153 | Authorization: Bearer {{users_only_token}}
154 | HTTP 403
155 |
--------------------------------------------------------------------------------
/src/services/email_service.rs:
--------------------------------------------------------------------------------
1 | use anyhow::{Context, Result};
2 | use lettre::{
3 | AsyncSmtpTransport, AsyncTransport, Message, Tokio1Executor, message::header::ContentType,
4 | transport::smtp::authentication::Credentials,
5 | };
6 | use std::sync::Arc;
7 | use tera::Tera;
8 |
9 | /// Service for sending emails via SMTP
10 | #[derive(Clone)]
11 | pub struct EmailService {
12 | smtp_username: String,
13 | smtp_password: String,
14 | smtp_host: String,
15 | smtp_port: u16,
16 | from_email: String,
17 | from_name: String,
18 | frontend_url: String,
19 | tera: Arc,
20 | }
21 |
22 | impl EmailService {
23 | /// Create a new EmailService from environment variables
24 | pub fn from_env(tera: Arc) -> Result {
25 | let smtp_username =
26 | std::env::var("SMTP_USERNAME").context("SMTP_USERNAME not set in environment")?;
27 | let smtp_password =
28 | std::env::var("SMTP_PASSWORD").context("SMTP_PASSWORD not set in environment")?;
29 | let smtp_host = std::env::var("SMTP_HOST").unwrap_or_else(|_| "smtp.gmail.com".to_string());
30 | let smtp_port = std::env::var("SMTP_PORT")
31 | .unwrap_or_else(|_| "587".to_string())
32 | .parse::()
33 | .context("Invalid SMTP_PORT")?;
34 | let from_email = std::env::var("FROM_EMAIL").unwrap_or_else(|_| smtp_username.clone());
35 | let from_name = std::env::var("FROM_NAME").unwrap_or_else(|_| "KeyRunes".to_string());
36 | let frontend_url =
37 | std::env::var("FRONTEND_URL").unwrap_or_else(|_| "http://localhost:3000".to_string());
38 |
39 | Ok(Self {
40 | smtp_username,
41 | smtp_password,
42 | smtp_host,
43 | smtp_port,
44 | from_email,
45 | from_name,
46 | frontend_url,
47 | tera,
48 | })
49 | }
50 |
51 | /// Send a password reset email
52 | pub async fn send_password_reset_email(&self, to_email: &str, reset_token: &str) -> Result<()> {
53 | let reset_url = format!(
54 | "{}/reset-password?forgot_password={}",
55 | self.frontend_url, reset_token
56 | );
57 |
58 | // Render HTML template using Tera
59 | let mut context = tera::Context::new();
60 | context.insert("reset_url", &reset_url);
61 |
62 | let html_body = self
63 | .tera
64 | .render("mail/password_reset.html", &context)
65 | .context("Failed to render email template")?;
66 |
67 | let email = Message::builder()
68 | .from(format!("{} <{}>", self.from_name, self.from_email).parse()?)
69 | .to(to_email.parse()?)
70 | .subject("Password Reset - Keyrunes")
71 | .header(ContentType::TEXT_HTML)
72 | .body(html_body)?;
73 |
74 | let creds = Credentials::new(self.smtp_username.clone(), self.smtp_password.clone());
75 |
76 | let mailer: AsyncSmtpTransport =
77 | AsyncSmtpTransport::::starttls_relay(&self.smtp_host)?
78 | .credentials(creds)
79 | .port(self.smtp_port)
80 | .build();
81 |
82 | mailer.send(email).await.context("Failed to send email")?;
83 |
84 | tracing::info!("Password reset email sent to {}", to_email);
85 |
86 | Ok(())
87 | }
88 | }
89 |
90 | #[cfg(test)]
91 | mod tests {
92 | use super::*;
93 | use serial_test::serial;
94 |
95 | fn create_test_tera() -> Arc {
96 | Arc::new(Tera::new("templates/**/*").expect("Failed to load templates"))
97 | }
98 |
99 | #[test]
100 | #[serial]
101 | fn test_email_service_from_env_missing_required() {
102 | let tera = create_test_tera();
103 |
104 | // This should fail because SMTP_USERNAME and SMTP_PASSWORD are required
105 | unsafe {
106 | std::env::remove_var("SMTP_USERNAME");
107 | std::env::remove_var("SMTP_PASSWORD");
108 | }
109 |
110 | let result = EmailService::from_env(tera);
111 | assert!(result.is_err());
112 | }
113 |
114 | #[test]
115 | #[serial]
116 | fn test_email_service_from_env_with_defaults() {
117 | let tera = create_test_tera();
118 |
119 | unsafe {
120 | std::env::set_var("SMTP_USERNAME", "test@example.com");
121 | std::env::set_var("SMTP_PASSWORD", "password");
122 | }
123 |
124 | let service = EmailService::from_env(tera).unwrap();
125 |
126 | assert_eq!(service.smtp_host, "smtp.gmail.com");
127 | assert_eq!(service.smtp_port, 587);
128 | assert_eq!(service.from_name, "KeyRunes");
129 | assert_eq!(service.frontend_url, "http://localhost:3000");
130 |
131 | // Cleanup
132 | unsafe {
133 | std::env::remove_var("SMTP_USERNAME");
134 | std::env::remove_var("SMTP_PASSWORD");
135 | }
136 | }
137 | }
138 |
--------------------------------------------------------------------------------
/migrations/20250908121827_create_groups_and_policies.sql:
--------------------------------------------------------------------------------
1 | -- Add first_login and reset_password columns to users table
2 | ALTER TABLE users
3 | ADD COLUMN first_login BOOLEAN NOT NULL DEFAULT FALSE,
4 | ADD COLUMN reset_password BOOLEAN NOT NULL DEFAULT FALSE;
5 |
6 | -- Add unique constraint on email column for ON CONFLICT to work
7 | -- Note: This is in addition to the existing unique index on lower(email)
8 | DO $$
9 | BEGIN
10 | IF NOT EXISTS (
11 | SELECT 1 FROM pg_constraint
12 | WHERE conname = 'users_email_key' AND conrelid = 'users'::regclass
13 | ) THEN
14 | ALTER TABLE users ADD CONSTRAINT users_email_key UNIQUE (email);
15 | END IF;
16 | END $$;
17 |
18 | -- Create groups table
19 | CREATE TABLE IF NOT EXISTS groups (
20 | group_id BIGSERIAL PRIMARY KEY,
21 | external_id UUID NOT NULL DEFAULT gen_random_uuid(),
22 | name VARCHAR(100) NOT NULL UNIQUE,
23 | description TEXT,
24 | created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
25 | updated_at TIMESTAMPTZ NOT NULL DEFAULT now()
26 | );
27 |
28 | CREATE UNIQUE INDEX IF NOT EXISTS groups_external_id_idx ON groups (external_id);
29 |
30 | -- Create policies table
31 | CREATE TABLE IF NOT EXISTS policies (
32 | policy_id BIGSERIAL PRIMARY KEY,
33 | external_id UUID NOT NULL DEFAULT gen_random_uuid(),
34 | name VARCHAR(100) NOT NULL UNIQUE,
35 | description TEXT,
36 | resource VARCHAR(255) NOT NULL,
37 | action VARCHAR(100) NOT NULL,
38 | effect VARCHAR(10) NOT NULL CHECK (effect IN ('ALLOW', 'DENY')),
39 | conditions JSONB,
40 | created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
41 | updated_at TIMESTAMPTZ NOT NULL DEFAULT now()
42 | );
43 |
44 | CREATE UNIQUE INDEX IF NOT EXISTS policies_external_id_idx ON policies (external_id);
45 | CREATE INDEX IF NOT EXISTS policies_resource_action_idx ON policies (resource, action);
46 |
47 | -- Create user_groups table (many-to-many)
48 | CREATE TABLE IF NOT EXISTS user_groups (
49 | user_id BIGINT NOT NULL REFERENCES users(user_id) ON DELETE CASCADE,
50 | group_id BIGINT NOT NULL REFERENCES groups(group_id) ON DELETE CASCADE,
51 | assigned_at TIMESTAMPTZ NOT NULL DEFAULT now(),
52 | assigned_by BIGINT REFERENCES users(user_id),
53 | PRIMARY KEY (user_id, group_id)
54 | );
55 |
56 | -- Create user_policies table (many-to-many)
57 | CREATE TABLE IF NOT EXISTS user_policies (
58 | user_id BIGINT NOT NULL REFERENCES users(user_id) ON DELETE CASCADE,
59 | policy_id BIGINT NOT NULL REFERENCES policies(policy_id) ON DELETE CASCADE,
60 | assigned_at TIMESTAMPTZ NOT NULL DEFAULT now(),
61 | assigned_by BIGINT REFERENCES users(user_id),
62 | PRIMARY KEY (user_id, policy_id)
63 | );
64 |
65 | -- Create group_policies table (many-to-many)
66 | CREATE TABLE IF NOT EXISTS group_policies (
67 | group_id BIGINT NOT NULL REFERENCES groups(group_id) ON DELETE CASCADE,
68 | policy_id BIGINT NOT NULL REFERENCES policies(policy_id) ON DELETE CASCADE,
69 | assigned_at TIMESTAMPTZ NOT NULL DEFAULT now(),
70 | assigned_by BIGINT REFERENCES users(user_id),
71 | PRIMARY KEY (group_id, policy_id)
72 | );
73 |
74 | -- Create password_reset_tokens table
75 | CREATE TABLE IF NOT EXISTS password_reset_tokens (
76 | token_id BIGSERIAL PRIMARY KEY,
77 | user_id BIGINT NOT NULL REFERENCES users(user_id) ON DELETE CASCADE,
78 | token VARCHAR(255) NOT NULL,
79 | expires_at TIMESTAMPTZ NOT NULL,
80 | used_at TIMESTAMPTZ,
81 | created_at TIMESTAMPTZ NOT NULL DEFAULT now()
82 | );
83 |
84 | CREATE UNIQUE INDEX IF NOT EXISTS password_reset_tokens_token_idx ON password_reset_tokens (token);
85 | CREATE INDEX IF NOT EXISTS password_reset_tokens_user_id_idx ON password_reset_tokens (user_id);
86 |
87 | -- Add triggers for updated_at
88 | CREATE TRIGGER trg_set_updated_at_groups
89 | BEFORE UPDATE ON groups
90 | FOR EACH ROW
91 | EXECUTE PROCEDURE set_updated_at();
92 |
93 | CREATE TRIGGER trg_set_updated_at_policies
94 | BEFORE UPDATE ON policies
95 | FOR EACH ROW
96 | EXECUTE PROCEDURE set_updated_at();
97 |
98 | -- Insert default groups
99 | INSERT INTO groups (name, description) VALUES
100 | ('superadmin', 'Super administrators with full access'),
101 | ('users', 'Regular users')
102 | ON CONFLICT (name) DO NOTHING;
103 |
104 | -- Insert default policies
105 | INSERT INTO policies (name, description, resource, action, effect) VALUES
106 | ('full_access', 'Full access to all resources', '*', '*', 'ALLOW'),
107 | ('read_only', 'Read-only access to user resources', 'user:*', 'read', 'ALLOW'),
108 | ('user_self_manage', 'Users can manage their own data', 'user:self', '*', 'ALLOW')
109 | ON CONFLICT (name) DO NOTHING;
110 |
111 | -- Assign admin user to superadmin group
112 | INSERT INTO user_groups (user_id, group_id)
113 | SELECT u.user_id, g.group_id
114 | FROM users u, groups g
115 | WHERE u.username = 'admin' AND g.name = 'superadmin'
116 | ON CONFLICT DO NOTHING;
117 |
118 | -- Assign full_access policy to superadmin group
119 | INSERT INTO group_policies (group_id, policy_id)
120 | SELECT g.group_id, p.policy_id
121 | FROM groups g, policies p
122 | WHERE g.name = 'superadmin' AND p.name = 'full_access'
123 | ON CONFLICT DO NOTHING;
124 |
125 | -- Assign read_only and user_self_manage policies to users group
126 | INSERT INTO group_policies (group_id, policy_id)
127 | SELECT g.group_id, p.policy_id
128 | FROM groups g, policies p
129 | WHERE g.name = 'users' AND p.name IN ('read_only', 'user_self_manage')
130 | ON CONFLICT DO NOTHING;
131 |
--------------------------------------------------------------------------------
/hurl/admin.hurl:
--------------------------------------------------------------------------------
1 | # Setup - Create Admin
2 | POST http://localhost:3000/api/login
3 | Content-Type: application/json
4 | {
5 | "identity": "{{admin_email}}",
6 | "password": "{{admin_password}}"
7 | }
8 | HTTP 200
9 | [Captures]
10 | token: jsonpath "$['token']"
11 |
12 | # Test - Create user without group
13 | POST http://localhost:3000/api/admin/user
14 | Content-Type: application/json
15 | Authorization: Bearer {{token}}
16 | {
17 | "email": "{{newUuid}}@example.com",
18 | "username": "{{group_user_username}}{{newDate}}",
19 | "password": "{{group_user_password}}"
20 | }
21 | HTTP 201
22 | [Captures]
23 | new_user_id: jsonpath "$.user_id"
24 | [Asserts]
25 | # should belong to user group by default
26 | jsonpath "$.groups" count == 1
27 | jsonpath "$.groups[0]" == "users"
28 |
29 | # Test - Create user with invalid group
30 | POST http://localhost:3000/api/admin/user
31 | Content-Type: application/json
32 | Authorization: Bearer {{token}}
33 | {
34 | "email": "{{newUuid}}@example.com",
35 | "username": "{{group_user_username}}{{newDate}}",
36 | "password": "{{group_user_password}}",
37 | "groups": ["invalid"]
38 | }
39 | HTTP 400
40 | `invalid group specified: \`invalid\``
41 |
42 | # Test - Create user with valid group
43 | POST http://localhost:3000/api/admin/user
44 | Content-Type: application/json
45 | Authorization: Bearer {{token}}
46 | {
47 | "email": "{{newUuid}}@example.com",
48 | "username": "{{group_user_username}}{{newDate}}",
49 | "password": "{{group_user_password}}",
50 | "groups": ["superadmin"]
51 | }
52 | HTTP 201
53 | [Asserts]
54 | # should have `superadmin` group
55 | jsonpath "$.groups" count == 1
56 | jsonpath "$.groups[0]" == "superadmin"
57 |
58 | # Test - List all users
59 | GET http://localhost:3000/api/admin/users
60 | Authorization: Bearer {{token}}
61 | HTTP 200
62 | [Asserts]
63 | jsonpath "$" isCollection
64 | jsonpath "$[*].user_id" exists
65 | jsonpath "$[*].username" exists
66 | jsonpath "$[*].email" exists
67 | jsonpath "$[*].groups" exists
68 |
69 | # Test - List all groups
70 | GET http://localhost:3000/api/admin/groups
71 | Authorization: Bearer {{token}}
72 | HTTP 200
73 | [Captures]
74 | superadmin_group_id: jsonpath "$[?(@.name=='superadmin')].group_id" nth 0
75 | users_group_id: jsonpath "$[?(@.name=='users')].group_id" nth 0
76 | [Asserts]
77 | jsonpath "$" isCollection
78 | jsonpath "$[?(@.name=='superadmin')]" exists
79 | jsonpath "$[?(@.name=='users')]" exists
80 |
81 | # Test - Create new group
82 | POST http://localhost:3000/api/admin/groups
83 | Content-Type: application/json
84 | Authorization: Bearer {{token}}
85 | {
86 | "name": "developers_{{newDate}}",
87 | "description": "Development team"
88 | }
89 | HTTP 201
90 | [Captures]
91 | new_group_id: jsonpath "$.group_id"
92 | new_group_name: jsonpath "$.name"
93 | [Asserts]
94 | jsonpath "$.name" startsWith "developers_"
95 | jsonpath "$.description" == "Development team"
96 | jsonpath "$.group_id" exists
97 |
98 | # Test - Create duplicate group (should fail)
99 | POST http://localhost:3000/api/admin/groups
100 | Content-Type: application/json
101 | Authorization: Bearer {{token}}
102 | {
103 | "name": "{{new_group_name}}",
104 | "description": "Duplicate"
105 | }
106 | HTTP 400
107 |
108 | # Test - Assign user to group
109 | POST http://localhost:3000/api/admin/users/{{new_user_id}}/groups/{{new_group_id}}
110 | Authorization: Bearer {{token}}
111 | HTTP 200
112 | [Asserts]
113 | jsonpath "$.message" == "User assigned to group successfully"
114 |
115 | # Test - Remove user from group
116 | DELETE http://localhost:3000/api/admin/users/{{new_user_id}}/groups/{{new_group_id}}
117 | Authorization: Bearer {{token}}
118 | HTTP 200
119 | [Asserts]
120 | jsonpath "$.message" == "User removed from group successfully"
121 |
122 | # Test - Get admin dashboard
123 | GET http://localhost:3000/api/admin/dashboard
124 | Authorization: Bearer {{token}}
125 | HTTP 200
126 | [Asserts]
127 | jsonpath "$.total_users" isInteger
128 | jsonpath "$.total_groups" isInteger
129 | jsonpath "$.total_policies" isInteger
130 | jsonpath "$.current_admin.username" exists
131 | jsonpath "$.current_admin.groups" includes "superadmin"
132 |
133 | # Test - List all policies
134 | GET http://localhost:3000/api/admin/policies
135 | Authorization: Bearer {{token}}
136 | HTTP 200
137 | [Asserts]
138 | jsonpath "$" isCollection
139 | jsonpath "$[*].policy_id" exists
140 | jsonpath "$[*].name" exists
141 |
142 | # Test - Check permission (user with superadmin should have full access)
143 | POST http://localhost:3000/api/admin/check-permission
144 | Content-Type: application/json
145 | Authorization: Bearer {{token}}
146 | {
147 | "user_id": {{new_user_id}},
148 | "group_name": "users",
149 | "resource": "user:*",
150 | "action": "read"
151 | }
152 | HTTP 200
153 | [Asserts]
154 | jsonpath "$.user_id" == {{new_user_id}}
155 | jsonpath "$.has_permission" isBoolean
156 |
157 | # Test - Access without authentication (should fail)
158 | GET http://localhost:3000/api/admin/users
159 | HTTP 401
160 |
161 | # Test - Access with non-superadmin (create regular user first)
162 | POST http://localhost:3000/api/register
163 | Content-Type: application/json
164 | {
165 | "email": "regular_{{newUuid}}@example.com",
166 | "username": "regular_{{newDate}}",
167 | "password": "Regular123"
168 | }
169 | HTTP 201
170 | [Captures]
171 | regular_token: jsonpath "$.token"
172 |
173 | # Try to access admin endpoint with regular user
174 | GET http://localhost:3000/api/admin/users
175 | Authorization: Bearer {{regular_token}}
176 | HTTP 403
177 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | .PHONY: help db-create db-drop db-reset migrate run build test test-unit test-hurl test-all clean dev setup superadmin sqlx-prepare check lint
2 |
3 | # Variáveis
4 | DATABASE_URL ?= postgres://postgres_user:pass123@localhost:5432/keyrunes
5 | ADMIN_EMAIL ?= admin@example.com
6 | ADMIN_USERNAME ?= admin
7 | ADMIN_PASSWORD ?= Admin123
8 |
9 | help:
10 | @echo "Commands available:"
11 | @echo ""
12 | @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf " \033[36m%-20s\033[0m %s\n", $$1, $$2}'
13 | @echo ""
14 | @echo "Variáveis de ambiente:"
15 | @echo " DATABASE_URL=$(DATABASE_URL)"
16 | @echo " ADMIN_EMAIL=$(ADMIN_EMAIL)"
17 | @echo " ADMIN_USERNAME=$(ADMIN_USERNAME)"
18 |
19 | ## Database
20 | db-create: ## Cria o banco de dados
21 | @echo "📦 Criando banco de dados..."
22 | sqlx database create
23 |
24 | db-drop: ## Remove o banco de dados (CUIDADO: apaga todos os dados!)
25 | @echo "🗑️ Removendo banco de dados..."
26 | sqlx database drop -y
27 |
28 | db-reset: db-drop db-create migrate ## Reseta o banco (drop + create + migrate)
29 | @echo "✅ Banco resetado com sucesso!"
30 |
31 | migrate: ## Roda as migrations
32 | @echo "🔄 Rodando migrations..."
33 | sqlx migrate run
34 | @echo "✅ Migrations aplicadas!"
35 |
36 | migrate-revert: ## Reverte a última migration
37 | @echo "↩️ Revertendo última migration..."
38 | sqlx migrate revert
39 |
40 | ## Build & Run
41 | build: ## Compila o projeto
42 | @echo "🔨 Compilando..."
43 | cargo build
44 |
45 | build-release: ## Compila em modo release
46 | @echo "🔨 Compilando release..."
47 | cargo build --release
48 |
49 | run: ## Roda o servidor
50 | @echo "🚀 Iniciando servidor..."
51 | cargo run
52 |
53 | run-release: ## Roda o servidor em modo release
54 | @echo "🚀 Iniciando servidor (release)..."
55 | cargo run --release
56 |
57 | dev: ## Roda o servidor com auto-reload (requer cargo-watch)
58 | @echo "🔥 Modo desenvolvimento com hot-reload..."
59 | cargo watch -x run
60 |
61 | ## CLI
62 | cli-superadmin: ## Cria o primeiro superadmin
63 | @echo "👤 Criando superadmin..."
64 | cargo run --bin cli -- create-superadmin \
65 | --email $(ADMIN_EMAIL) \
66 | --username $(ADMIN_USERNAME) \
67 | --password $(ADMIN_PASSWORD)
68 | @echo "✅ Superadmin criado!"
69 |
70 | cli-list-groups: ## Lista todos os grupos
71 | @echo "📋 Listando grupos..."
72 | cargo run --bin cli -- list-groups
73 |
74 | cli-create-group: ## Cria um grupo (uso: make cli-create-group NAME=developers DESC="Dev team")
75 | @echo "➕ Criando grupo $(NAME)..."
76 | cargo run --bin cli -- create-group --name $(NAME) --description "$(DESC)"
77 |
78 | ## Tests
79 | test: ## Roda todos os testes Rust
80 | @echo "🧪 Rodando testes Rust..."
81 | cargo test
82 |
83 | test-unit: ## Roda apenas testes unitários
84 | @echo "🧪 Rodando testes unitários..."
85 | cargo test --lib
86 |
87 | test-integration: ## Roda apenas testes de integração
88 | @echo "🧪 Rodando testes de integração..."
89 | cargo test --test '*'
90 |
91 | test-hurl: ## Roda testes Hurl (requer servidor rodando)
92 | @echo "🧪 Rodando testes Hurl..."
93 | @if ! curl -s http://localhost:3000/api/health > /dev/null 2>&1; then \
94 | echo "❌ Servidor não está rodando! Execute 'make run' primeiro."; \
95 | exit 1; \
96 | fi
97 | ./run_hurl_tests.sh
98 |
99 | test-hurl-verbose: ## Roda testes Hurl em modo verbose
100 | @echo "🧪 Rodando testes Hurl (verbose)..."
101 | ./run_hurl_tests.sh --verbose
102 |
103 | test-all: test test-hurl ## Roda todos os testes (Rust + Hurl)
104 |
105 | ## SQLx
106 | sqlx-prepare: ## Prepara SQLx metadata offline
107 | @echo "📝 Preparando SQLx metadata..."
108 | cargo sqlx prepare
109 |
110 | sqlx-check: ## Verifica se as queries SQLx estão corretas
111 | @echo "🔍 Verificando queries SQLx..."
112 | cargo sqlx prepare --check
113 |
114 | ## Setup completo
115 | setup: db-create migrate cli-superadmin ## Setup completo (cria DB, migrations, superadmin)
116 | @echo ""
117 | @echo "✨ Setup completo!"
118 | @echo ""
119 | @echo "Próximos passos:"
120 | @echo " 1. Inicie o servidor: make run"
121 | @echo " 2. Acesse: http://127.0.0.1:3000/login"
122 | @echo " 3. Login: $(ADMIN_USERNAME) / $(ADMIN_PASSWORD)"
123 | @echo " 4. Admin: http://127.0.0.1:3000/admin"
124 | @echo ""
125 |
126 | ## Development
127 | check: ## Verifica o código sem compilar
128 | @echo "🔍 Verificando código..."
129 | cargo check --all-targets
130 |
131 | lint: ## Roda clippy (linter)
132 | @echo "🧹 Rodando linter..."
133 | cargo clippy -- -D warnings
134 |
135 | fmt: ## Formata o código
136 | @echo "✨ Formatando código..."
137 | cargo fmt
138 |
139 | fmt-check: ## Verifica formatação sem alterar
140 | @echo "🔍 Verificando formatação..."
141 | cargo fmt -- --check
142 |
143 | clean: ## Limpa arquivos de build
144 | @echo "🧹 Limpando..."
145 | cargo clean
146 |
147 | ## Docker
148 | docker-up: ## Sobe o Postgres via docker-compose
149 | @echo "🐳 Subindo Docker..."
150 | docker-compose up -d
151 | @echo "⏳ Aguardando Postgres iniciar..."
152 | @sleep 3
153 | @echo "✅ Postgres rodando!"
154 |
155 | docker-down: ## Para o Postgres
156 | @echo "🛑 Parando Docker..."
157 | docker-compose down
158 |
159 | docker-reset: docker-down docker-up ## Reseta containers Docker
160 | @echo "✅ Docker resetado!"
161 |
162 | docker-logs: ## Mostra logs do Postgres
163 | docker-compose logs -f postgres
164 |
165 | ## Quick commands
166 | fresh-start: docker-reset db-reset setup ## Começa do zero (Docker + DB + Setup)
167 | @echo ""
168 | @echo "🎉 Ambiente pronto para desenvolvimento!"
169 | @echo "Execute: make run"
170 |
171 | restart: docker-down docker-up migrate ## Reinicia ambiente de desenvolvimento
172 | @echo "✅ Ambiente reiniciado!"
173 |
174 | ## Info
175 | env: ## Mostra variáveis de ambiente
176 | @echo "DATABASE_URL: $(DATABASE_URL)"
177 | @echo "ADMIN_EMAIL: $(ADMIN_EMAIL)"
178 | @echo "ADMIN_USERNAME: $(ADMIN_USERNAME)"
179 | @echo "ADMIN_PASSWORD: $(ADMIN_PASSWORD)"
180 |
181 | status: ## Mostra status do ambiente
182 | @echo "📊 Status do Ambiente"
183 | @echo ""
184 | @echo "Docker:"
185 | @docker-compose ps 2>/dev/null || echo " ⚠️ Docker não está rodando"
186 | @echo ""
187 | @echo "Servidor:"
188 | @if curl -s http://localhost:3000/api/health > /dev/null 2>&1; then \
189 | echo " ✅ Servidor rodando (http://localhost:3000)"; \
190 | else \
191 | echo " ⚠️ Servidor não está rodando"; \
192 | fi
193 | @echo ""
194 | @echo "Database:"
195 | @if psql $(DATABASE_URL) -c "SELECT 1" > /dev/null 2>&1; then \
196 | echo " ✅ Conectado"; \
197 | else \
198 | echo " ⚠️ Não conectado"; \
199 | fi
200 |
--------------------------------------------------------------------------------
/src/services/jwt_service.rs:
--------------------------------------------------------------------------------
1 | use anyhow::{Result, anyhow};
2 | use chrono::{Duration, Utc};
3 | use josekit::jws::HS256;
4 | use josekit::jws::JwsHeader;
5 | use josekit::jwt::{self, JwtPayload};
6 | use serde::{Deserialize, Serialize};
7 | use serde_json::Value;
8 |
9 | #[derive(Debug, Serialize, Deserialize)]
10 | pub struct Claims {
11 | pub sub: String,
12 | pub email: String,
13 | pub username: String,
14 | pub groups: Vec,
15 | pub exp: i64,
16 | pub iat: i64,
17 | pub iss: String,
18 | }
19 |
20 | #[derive(Clone)]
21 | pub struct JwtService {
22 | secret: Vec,
23 | issuer: String,
24 | }
25 |
26 | impl JwtService {
27 | pub fn new(secret: &str) -> Self {
28 | Self {
29 | secret: secret.as_bytes().to_vec(),
30 | issuer: "keyrunes".to_string(),
31 | }
32 | }
33 |
34 | pub fn generate_token(
35 | &self,
36 | user_id: i64,
37 | email: &str,
38 | username: &str,
39 | groups: Vec,
40 | ) -> Result {
41 | let now = Utc::now();
42 | let exp = now + Duration::hours(1);
43 |
44 | let mut payload = JwtPayload::new();
45 | payload.set_claim("sub", Some(Value::String(user_id.to_string())))?;
46 | payload.set_claim("email", Some(Value::String(email.to_string())))?;
47 | payload.set_claim("username", Some(Value::String(username.to_string())))?;
48 | payload.set_claim("groups", Some(serde_json::to_value(&groups)?))?;
49 | payload.set_claim("exp", Some(Value::Number(exp.timestamp().into())))?;
50 | payload.set_claim("iat", Some(Value::Number(now.timestamp().into())))?;
51 | payload.set_claim("iss", Some(Value::String(self.issuer.clone())))?;
52 |
53 | let mut header = JwsHeader::new();
54 | header.set_token_type("JWT");
55 |
56 | let signer = HS256.signer_from_bytes(&self.secret)?;
57 | let token = jwt::encode_with_signer(&payload, &header, &signer)?;
58 |
59 | Ok(token)
60 | }
61 |
62 | pub fn verify_token(&self, token: &str) -> Result {
63 | let verifier = HS256.verifier_from_bytes(&self.secret)?;
64 | let (payload, _header) = jwt::decode_with_verifier(token, &verifier)
65 | .map_err(|e| anyhow!("Failed to decode JWT: {}", e))?;
66 |
67 | let sub = payload
68 | .claim("sub")
69 | .and_then(|v| v.as_str())
70 | .ok_or_else(|| anyhow!("Missing or invalid 'sub' claim"))?
71 | .to_string();
72 | let email = payload
73 | .claim("email")
74 | .and_then(|v| v.as_str())
75 | .ok_or_else(|| anyhow!("Missing or invalid 'email' claim"))?
76 | .to_string();
77 | let username = payload
78 | .claim("username")
79 | .and_then(|v| v.as_str())
80 | .ok_or_else(|| anyhow!("Missing or invalid 'username' claim"))?
81 | .to_string();
82 | let groups = payload
83 | .claim("groups")
84 | .and_then(|v| serde_json::from_value(v.clone()).ok())
85 | .ok_or_else(|| anyhow!("Missing or invalid 'groups' claim"))?;
86 | let exp = payload
87 | .claim("exp")
88 | .and_then(|v| v.as_i64())
89 | .ok_or_else(|| anyhow!("Missing or invalid 'exp' claim"))?;
90 | let iat = payload
91 | .claim("iat")
92 | .and_then(|v| v.as_i64())
93 | .ok_or_else(|| anyhow!("Missing or invalid 'iat' claim"))?;
94 | let iss = payload
95 | .claim("iss")
96 | .and_then(|v| v.as_str())
97 | .ok_or_else(|| anyhow!("Missing or invalid 'iss' claim"))?
98 | .to_string();
99 |
100 | Ok(Claims {
101 | sub,
102 | email,
103 | username,
104 | groups,
105 | exp,
106 | iat,
107 | iss,
108 | })
109 | }
110 |
111 | pub fn refresh_token(&self, token: &str) -> Result {
112 | let claims = self.verify_token(token)?;
113 | self.generate_token(
114 | claims.sub.parse()?,
115 | &claims.email,
116 | &claims.username,
117 | claims.groups,
118 | )
119 | }
120 |
121 | pub fn extract_user_id(&self, token: &str) -> Result {
122 | let claims = self.verify_token(token)?;
123 | claims
124 | .sub
125 | .parse()
126 | .map_err(|e| anyhow!("Invalid user ID in token: {}", e))
127 | }
128 | }
129 |
130 | #[cfg(test)]
131 | mod tests {
132 | use super::*;
133 | use std::thread;
134 | use std::time::Duration as StdDuration;
135 |
136 | #[test]
137 | fn test_jwt_token_generation_and_verification() {
138 | let service = JwtService::new("0123456789ABCDEF0123456789ABCDEF");
139 | let groups = vec!["users".to_string(), "admin".to_string()];
140 |
141 | let token = service
142 | .generate_token(1, "test@example.com", "testuser", groups.clone())
143 | .unwrap();
144 | let claims = service.verify_token(&token).unwrap();
145 |
146 | assert_eq!(claims.sub, "1");
147 | assert_eq!(claims.email, "test@example.com");
148 | assert_eq!(claims.username, "testuser");
149 | assert_eq!(claims.groups, groups);
150 | assert_eq!(claims.iss, "keyrunes");
151 | }
152 |
153 | #[test]
154 | fn test_refresh_token() {
155 | let service = JwtService::new("0123456789ABCDEF0123456789ABCDEF");
156 | let groups = vec!["users".to_string()];
157 |
158 | let original_token = service
159 | .generate_token(1, "test@example.com", "testuser", groups.clone())
160 | .unwrap();
161 |
162 | thread::sleep(StdDuration::from_secs(1));
163 |
164 | let refreshed_token = service.refresh_token(&original_token).unwrap();
165 |
166 | let original_claims = service.verify_token(&original_token).unwrap();
167 | let refreshed_claims = service.verify_token(&refreshed_token).unwrap();
168 |
169 | assert_eq!(original_claims.sub, refreshed_claims.sub);
170 | assert_eq!(original_claims.email, refreshed_claims.email);
171 | assert!(refreshed_claims.exp > original_claims.exp);
172 | }
173 |
174 | #[test]
175 | fn test_extract_user_id() {
176 | let service = JwtService::new("0123456789ABCDEF0123456789ABCDEF");
177 | let groups = vec!["users".to_string()];
178 |
179 | let token = service
180 | .generate_token(42, "test@example.com", "testuser", groups)
181 | .unwrap();
182 | let user_id = service.extract_user_id(&token).unwrap();
183 |
184 | assert_eq!(user_id, 42);
185 | }
186 | }
187 |
--------------------------------------------------------------------------------
/src/repository/mod.rs:
--------------------------------------------------------------------------------
1 | use anyhow::Result;
2 | use async_trait::async_trait;
3 | use chrono::{DateTime, Utc};
4 | use serde::{Deserialize, Serialize};
5 | use serde_json::Value as JsonValue;
6 | use uuid::Uuid;
7 |
8 | #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
9 | pub struct User {
10 | pub user_id: i64,
11 | pub external_id: Uuid,
12 | pub email: String,
13 | pub username: String,
14 | pub password_hash: String,
15 | pub first_login: bool,
16 | pub created_at: DateTime,
17 | pub updated_at: DateTime,
18 | }
19 |
20 | #[derive(Debug, Clone)]
21 | pub struct NewUser {
22 | pub external_id: Uuid,
23 | pub email: String,
24 | pub username: String,
25 | pub password_hash: String,
26 | pub first_login: bool,
27 | }
28 |
29 | #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
30 | pub struct Group {
31 | pub group_id: i64,
32 | pub external_id: Uuid,
33 | pub name: String,
34 | pub description: Option,
35 | pub created_at: DateTime,
36 | pub updated_at: DateTime,
37 | }
38 |
39 | #[derive(Debug, Clone)]
40 | pub struct NewGroup {
41 | pub external_id: Uuid,
42 | pub name: String,
43 | pub description: Option,
44 | }
45 |
46 | #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
47 | pub struct Policy {
48 | pub policy_id: i64,
49 | pub external_id: Uuid,
50 | pub name: String,
51 | pub description: Option,
52 | pub resource: String,
53 | pub action: String,
54 | pub effect: PolicyEffect,
55 | pub conditions: Option,
56 | pub created_at: DateTime,
57 | pub updated_at: DateTime,
58 | }
59 |
60 | #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
61 | pub enum PolicyEffect {
62 | #[serde(rename = "ALLOW")]
63 | Allow,
64 | #[serde(rename = "DENY")]
65 | Deny,
66 | }
67 |
68 | impl std::fmt::Display for PolicyEffect {
69 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
70 | match self {
71 | PolicyEffect::Allow => write!(f, "ALLOW"),
72 | PolicyEffect::Deny => write!(f, "DENY"),
73 | }
74 | }
75 | }
76 |
77 | #[derive(Debug, Clone)]
78 | pub struct NewPolicy {
79 | pub external_id: Uuid,
80 | pub name: String,
81 | pub description: Option,
82 | pub resource: String,
83 | pub action: String,
84 | pub effect: PolicyEffect,
85 | pub conditions: Option,
86 | }
87 |
88 | #[allow(dead_code)]
89 | #[derive(Debug, Clone, Serialize, Deserialize)]
90 | pub struct UserGroup {
91 | pub user_id: i64,
92 | pub group_id: i64,
93 | pub assigned_at: DateTime,
94 | pub assigned_by: Option,
95 | }
96 |
97 | #[allow(dead_code)]
98 | #[derive(Debug, Clone, Serialize, Deserialize)]
99 | pub struct UserPolicy {
100 | pub user_id: i64,
101 | pub policy_id: i64,
102 | pub assigned_at: DateTime,
103 | pub assigned_by: Option,
104 | }
105 |
106 | #[allow(dead_code)]
107 | #[derive(Debug, Clone, Serialize, Deserialize)]
108 | pub struct GroupPolicy {
109 | pub group_id: i64,
110 | pub policy_id: i64,
111 | pub assigned_at: DateTime,
112 | pub assigned_by: Option,
113 | }
114 |
115 | #[derive(Debug, Clone, Serialize, Deserialize)]
116 | pub struct PasswordResetToken {
117 | pub token_id: i64,
118 | pub user_id: i64,
119 | pub token: String,
120 | pub expires_at: DateTime,
121 | pub used_at: Option>,
122 | pub created_at: DateTime,
123 | }
124 |
125 | #[derive(Debug, Clone)]
126 | pub struct NewPasswordResetToken {
127 | pub user_id: i64,
128 | pub token: String,
129 | pub expires_at: DateTime,
130 | }
131 |
132 | #[derive(sqlx::FromRow, Debug, Clone)]
133 | #[allow(dead_code)]
134 | pub struct Settings {
135 | pub settings_id: i32,
136 | pub key: String,
137 | pub value: String,
138 | pub description: Option,
139 | pub created_at: DateTime,
140 | pub updated_at: DateTime,
141 | }
142 |
143 | #[derive(Debug)]
144 | pub struct CreateSettings {
145 | pub key: String,
146 | pub value: String,
147 | pub description: Option,
148 | }
149 |
150 | // User Repository Trait
151 | #[allow(dead_code)]
152 | #[async_trait]
153 | pub trait UserRepository: Send + Sync + 'static {
154 | async fn find_by_email(&self, email: &str) -> Result