├── .github
├── dependabot.yml
└── workflows
│ └── staging-deploy.yml
├── .gitignore
├── Dockerfile
├── LICENSE
├── Makefile
├── README.md
├── app.py
├── base_logger.py
├── docs
├── api_v3.md
├── api_versions.md
├── configurations.md
├── contributing.md
├── features_v2.md
├── grpc.md
├── references.md
├── security.md
└── specifications.md
├── example.env
├── grpc_internal_server.py
├── grpc_server.py
├── migrations
├── __init__.py
├── clean_account_identifiers.py
├── entity_migration_script.py
├── populate_signups.py
├── schema
│ ├── CHANGELOG.md
│ ├── README.md
│ ├── __init__.py
│ ├── run.py
│ ├── v1.0.1.json
│ ├── v1.0.2.json
│ └── v1.1.0.json
└── update_entity_created_date.py
├── protos
└── v1
│ └── vault.proto
├── requirements.txt
├── scripts
├── __init__.py
├── cli.py
├── common.sh
├── logger.sh
├── quick-setup.sh
├── quick-start.sh
└── x25519_keygen.py
├── src
├── api_v3.py
├── crypto.py
├── db.py
├── db_models.py
├── device_id.py
├── entity.py
├── grpc_entity_internal_service.py
├── grpc_entity_service.py
├── long_lived_token.py
├── otp_service.py
├── password_rate_limit.py
├── password_validation.py
├── relaysms_payload.py
├── signups.py
├── tokens.py
├── user_metrics.py
└── utils.py
├── supervisord.conf
├── template.env
└── tests
├── __init__.py
├── grpc_test_cases.sh
├── test_crypto.py
├── test_entity.py
├── test_grpc_entity_service.py
├── test_llt.py
└── test_otp_service.py
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | version: 2
2 | updates:
3 | - package-ecosystem: "docker"
4 | directory: "/"
5 | target-branch: "staging"
6 | schedule:
7 | interval: "weekly"
8 | open-pull-requests-limit: 99
9 | labels:
10 | - "dependencies"
11 | - package-ecosystem: "pip"
12 | directory: "/"
13 | target-branch: "staging"
14 | schedule:
15 | interval: "weekly"
16 | open-pull-requests-limit: 99
17 | allow:
18 | - dependency-type: "direct"
19 | - dependency-type: "indirect"
20 | labels:
21 | - "dependencies"
22 | groups:
23 | production-dependencies:
24 | dependency-type: "production"
25 | patterns:
26 | - "*"
27 | development-dependencies:
28 | dependency-type: "development"
29 | patterns:
30 | - "*"
31 |
--------------------------------------------------------------------------------
/.github/workflows/staging-deploy.yml:
--------------------------------------------------------------------------------
1 | name: Staging Server Build Pipeline
2 |
3 | on:
4 | push:
5 | branches:
6 | - staging
7 |
8 | jobs:
9 | deploy:
10 | name: Deploy to Staging Server
11 | runs-on: ubuntu-latest
12 | environment:
13 | name: staging
14 | steps:
15 | - name: Execute Remote SSH Commands
16 | uses: appleboy/ssh-action@master
17 | with:
18 | host: ${{ secrets.HOST }}
19 | username: ${{ secrets.USERNAME }}
20 | key: ${{ secrets.KEY }}
21 | script: |
22 | set -e
23 |
24 | echo "============================"
25 | echo "Updating repository ..."
26 | echo "============================"
27 | if ! assembler clone --branch staging --project vault; then
28 | echo "❌ Error updating repository!"
29 | exit 1
30 | fi
31 | echo "==============================="
32 | echo "✅ Repository update complete"
33 | echo "==============================="
34 |
35 | echo "========================="
36 | echo "Building project ..."
37 | echo "========================="
38 | if ! assembler deploy --project vault --rebuild; then
39 | echo "❌ Error building project!"
40 | exit 1
41 | fi
42 | echo "==========================="
43 | echo "✅ Project build complete"
44 | echo "==========================="
45 |
46 | echo "============================="
47 | echo "Cleaning up staging builds ..."
48 | echo "============================="
49 | if ! ${{ secrets.CLEANUP_CMD }}; then
50 | echo "❌ Error cleaning up builds!"
51 | exit 1
52 | fi
53 | echo "============================="
54 | echo "✅ Cleanup complete"
55 | echo "============================="
56 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | credentials.json
2 | logs/
3 | logos/*.svg
4 | .vscode
5 | # Byte-compiled / optimized / DLL files
6 | __pycache__/
7 | *.py[cod]
8 | *$py.class
9 |
10 | # C extensions
11 | *.so
12 |
13 | # Distribution / packaging
14 | .Python
15 | build/
16 | develop-eggs/
17 | dist/
18 | downloads/
19 | eggs/
20 | .eggs/
21 | lib/
22 | lib64/
23 | parts/
24 | sdist/
25 | var/
26 | wheels/
27 | pip-wheel-metadata/
28 | share/python-wheels/
29 | *.egg-info/
30 | .installed.cfg
31 | *.egg
32 | MANIFEST
33 |
34 | # PyInstaller
35 | # Usually these files are written by a python script from a template
36 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
37 | *.manifest
38 | *.spec
39 |
40 | # Installer logs
41 | pip-log.txt
42 | pip-delete-this-directory.txt
43 |
44 | # Unit test / coverage reports
45 | htmlcov/
46 | .tox/
47 | .nox/
48 | .coverage
49 | .coverage.*
50 | .cache
51 | nosetests.xml
52 | coverage.xml
53 | *.cover
54 | *.py,cover
55 | .hypothesis/
56 | .pytest_cache/
57 |
58 | # Translations
59 | *.mo
60 | *.pot
61 |
62 | # Django stuff:
63 | *.log
64 | local_settings.py
65 | db.sqlite3
66 | db.sqlite3-journal
67 |
68 | # Flask stuff:
69 | instance/
70 | .webassets-cache
71 |
72 | # Scrapy stuff:
73 | .scrapy
74 |
75 | # Sphinx documentation
76 | docs/_build/
77 |
78 | # PyBuilder
79 | target/
80 |
81 | # Jupyter Notebook
82 | .ipynb_checkpoints
83 |
84 | # IPython
85 | profile_default/
86 | ipython_config.py
87 |
88 | # pyenv
89 | .python-version
90 |
91 | # pipenv
92 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
93 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
94 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
95 | # install all needed dependencies.
96 | #Pipfile.lock
97 |
98 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
99 | __pypackages__/
100 |
101 | # Celery stuff
102 | celerybeat-schedule
103 | celerybeat.pid
104 |
105 | # SageMath parsed files
106 | *.sage.py
107 |
108 | # Environments
109 | .env
110 | .env.*
111 | .venv
112 | env/
113 | venv/
114 | ENV/
115 | env.bak/
116 | venv.bak/
117 |
118 | # Spyder project settings
119 | .spyderproject
120 | .spyproject
121 |
122 | # Rope project settings
123 | .ropeproject
124 |
125 | # mkdocs documentation
126 | /site
127 |
128 | # mypy
129 | .mypy_cache/
130 | .dmypy.json
131 | dmypy.json
132 |
133 | # Pyre type checker
134 | .pyre/
135 |
136 | # protoc
137 | *_pb2_grpc.py*
138 | *_pb2.py*
139 |
140 | # Ignore SQLite database files
141 | *.sqlite
142 | *.sqlite3
143 | *.db
144 |
145 | # Ignore all .proto files
146 | *.proto
147 |
148 | # Except for vault.proto
149 | !vault.proto
150 |
151 | # Platforms info
152 | platforms.json
153 |
154 | # Static public keys info
155 | static_x25519_pub_keys.json
156 | *.key
157 | keystore/
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.13.4-slim AS base
2 |
3 | WORKDIR /vault
4 |
5 | RUN apt-get update && \
6 | apt-get install -y --no-install-recommends \
7 | build-essential \
8 | apache2 \
9 | apache2-dev \
10 | default-libmysqlclient-dev \
11 | supervisor \
12 | libsqlcipher-dev \
13 | libsqlite3-dev \
14 | git \
15 | curl \
16 | pkg-config && \
17 | apt-get clean && \
18 | rm -rf /var/lib/apt/lists/*
19 |
20 | COPY requirements.txt .
21 | RUN --mount=type=cache,target=/root/.cache/pip \
22 | pip install --disable-pip-version-check --quiet --no-cache-dir -r requirements.txt
23 |
24 | COPY . .
25 |
26 | RUN make build-setup
27 |
28 | COPY supervisord.conf /etc/supervisor/conf.d/supervisord.conf
29 |
30 | FROM base AS production
31 |
32 | ENV MODE=production
33 |
34 | CMD ["supervisord", "-n", "-c", "/etc/supervisor/conf.d/supervisord.conf"]
35 |
36 | FROM base AS development
37 |
38 | ENV MODE=development
39 |
40 | CMD ["supervisord", "-n", "-c", "/etc/supervisor/conf.d/supervisord.conf"]
41 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | # This program is free software: you can redistribute it under the terms
2 | # of the GNU General Public License, v. 3.0. If a copy of the GNU General
3 | # Public License was not distributed with this file, see .
4 |
5 | PYTHON := python3
6 | SUPPORTED_PLATFORMS_URL="https://raw.githubusercontent.com/smswithoutborders/SMSWithoutBorders-Publisher/main/resources/platforms.json"
7 |
8 | define log_message
9 | @echo "[$(shell date +'%Y-%m-%d %H:%M:%S')] $(1)"
10 | endef
11 |
12 | start-rest-api:
13 | $(call log_message,[INFO] Starting REST API ...)
14 | @if [ "$$MODE" = "production" ]; then \
15 | echo "[INFO] Running in production mode with SSL"; \
16 | gunicorn -w 4 -b 0.0.0.0:$$SSL_PORT \
17 | --log-level=info \
18 | --access-logfile=- \
19 | --certfile=$$SSL_CERTIFICATE \
20 | --keyfile=$$SSL_KEY \
21 | --threads 15 \
22 | --timeout 30 \
23 | app:app; \
24 | else \
25 | echo "[INFO] Running in development mode without SSL"; \
26 | gunicorn -w 1 -b 0.0.0.0:$$PORT \
27 | --log-level=info \
28 | --access-logfile=- \
29 | --threads 3 \
30 | --timeout 30 \
31 | app:app; \
32 | fi
33 | $(call log_message,[INFO] REST API started successfully.)
34 |
35 | grpc-compile:
36 | $(call log_message,[INFO] Compiling gRPC protos ...)
37 | @$(PYTHON) -m grpc_tools.protoc \
38 | -I./protos/v1 \
39 | --python_out=. \
40 | --pyi_out=. \
41 | --grpc_python_out=. \
42 | ./protos/v1/*.proto
43 | $(call log_message,[INFO] gRPC Compilation complete!)
44 |
45 | grpc-server-start:
46 | $(call log_message,[INFO] Starting gRPC server ...)
47 | @$(PYTHON) -u grpc_server.py
48 | $(call log_message,[INFO] gRPC server started successfully.)
49 |
50 | grpc-internal-server-start:
51 | $(call log_message,[INFO] Starting gRPC internal server ...)
52 | @$(PYTHON) -u grpc_internal_server.py
53 | $(call log_message,[INFO] gRPC internal server started successfully.)
54 |
55 | download-platforms:
56 | $(call log_message,[INFO] Downloading platforms JSON file ...)
57 | @curl -sSL -o platforms.json "$(SUPPORTED_PLATFORMS_URL)"
58 | $(call log_message,[INFO] Platforms JSON file downloaded successfully.)
59 |
60 | create-dummy-user:
61 | $(call log_message,[INFO] Creating dummy user ...)
62 | @$(PYTHON) -m scripts.cli create -n +237123456789
63 | $(call log_message,[INFO] Dummy user created successfully.)
64 |
65 | generate-static-keys:
66 | $(call log_message,[INFO] Generating x25519 static keys ...)
67 | @$(PYTHON) -m scripts.x25519_keygen generate -n 255 -v v1 && \
68 | $(PYTHON) -m scripts.x25519_keygen export --skip-if-exists
69 | $(call log_message,[INFO] x25519 static keys generated successfully.)
70 |
71 | build-setup: grpc-compile download-platforms
72 | runtime-setup: create-dummy-user generate-static-keys
73 |
74 | clean:
75 | $(call log_message,[INFO] Cleaning build, environment, and generated files ...)
76 | @rm -f vault_pb2*
77 | @rm -f vault.db
78 | @rm -f hashing.key
79 | @rm -f encryption.key
80 | @rm -f platforms.json
81 | @rm -f .env
82 | @rm -rf venv/
83 | @rm -rf keystore/
84 | $(call log_message,[INFO] Clean complete.)
85 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # RelaySMS Vault
2 |
3 | RelaySMS Vault is the security core of the RelaySMS ecosystem, responsible for:
4 |
5 | - **Authentication & Authorization:** Managing user access and permissions.
6 | - **Access Token Management:** Secure storage and handling of tokens for supported protocols.
7 | - **Data Security:** Encryption of sensitive data and secure message transmission.
8 |
9 | ## Table of Contents
10 |
11 | 1. [Quick Start](#quick-start)
12 | 2. [System Requirements](#system-requirements)
13 | 3. [Installation](#installation)
14 | 4. [Configuration](#configuration)
15 | 5. [References](#references)
16 | 6. [Contributing](#contributing)
17 | 7. [License](#license)
18 |
19 | ## Quick Start
20 |
21 | > [!NOTE]
22 | >
23 | > Ensure all [system dependencies](#system-requirements) are installed before running setup scripts.
24 |
25 | For development, use the provided scripts:
26 |
27 | ```bash
28 | source scripts/quick-setup.sh && ./scripts/quick-start.sh
29 | ```
30 |
31 | - `quick-setup`:
32 |
33 | - Creates a Python virtual environment (if missing)
34 | - Installs Python dependencies
35 | - Sets up a `.env` file (SQLite by default)
36 | - Exports environment variables
37 | - Generates encryption and hashing keys
38 | - Compiles gRPC protos (via `make grpc-compile`)
39 | - Downloads supported platforms JSON (via `make download-platforms`)
40 | - Creates a dummy user (via `make create-dummy-user`)
41 | - Generates static x25519 keys (via `make generate-static-keys`)
42 |
43 | - `quick-start`:
44 | - Launches the gRPC server, internal gRPC server, and REST server
45 |
46 | > [!WARNING]
47 | >
48 | > This setup is for development only. Do not use in production.
49 |
50 | ## System Requirements
51 |
52 | - **Database:** MySQL (≥ 8.0.28), MariaDB, or SQLite
53 | - **Python:** ≥ 3.8.10
54 | - **Virtual Environments:** Python venv
55 |
56 | ### Ubuntu Dependencies
57 |
58 | ```bash
59 | sudo apt update
60 | sudo apt install python3-dev libmysqlclient-dev apache2 apache2-dev make libapache2-mod-wsgi-py3
61 | ```
62 |
63 | ## Installation
64 |
65 | 1. **Create and activate a virtual environment:**
66 |
67 | ```bash
68 | python3 -m venv venv
69 | source venv/bin/activate
70 | ```
71 |
72 | 2. **Install dependencies:**
73 |
74 | ```bash
75 | pip install -r requirements.txt
76 | ```
77 |
78 | 3. **Compile gRPC protos:**
79 |
80 | ```bash
81 | make grpc-compile
82 | ```
83 |
84 | 4. **Download supported platforms JSON:**
85 |
86 | ```bash
87 | make download-platforms
88 | ```
89 |
90 | 5. **Create a dummy user (for development/testing):**
91 |
92 | ```bash
93 | make create-dummy-user
94 | ```
95 |
96 | 6. **Generate static x25519 keys:**
97 | ```bash
98 | make generate-static-keys
99 | ```
100 |
101 | ## Building and Running with Docker
102 |
103 | RelaySMS Vault provides two Docker stages: **production** and **development**.
104 |
105 | ### Development
106 |
107 | #### Build the Docker Image
108 |
109 | ```bash
110 | docker build --target development -t relaysms-vault:dev .
111 | ```
112 |
113 | #### Prepare Environment Files
114 |
115 | ```bash
116 | cp template.env .env && head -c 32 /dev/urandom | base64 > encryption.key && head -c 32 /dev/urandom | base64 > hashing.key
117 | ```
118 |
119 | > Edit `.env` as needed for your environment.
120 |
121 | #### Run the Container
122 |
123 | > [!TIP]
124 | >
125 | > **To allow external access to the container's gRPC network services, update `GRPC_HOST` from `localhost` to `0.0.0.0` in your `.env` file.**
126 | >
127 | > For example, run:
128 | >
129 | > ```bash
130 | > sed -i 's/^GRPC_HOST=localhost/GRPC_HOST=0.0.0.0/' .env
131 | > ```
132 | >
133 | > This ensures the gRPC server listens on all interfaces and is accessible from outside the container.
134 | >
135 | > **For long-term development, you may want to run the container in detached mode (`-d`) and view logs with:**
136 | >
137 | > ```bash
138 | > docker logs -f
139 | > ```
140 |
141 | ```bash
142 | docker run --rm --env-file .env -p 19000:19000 -p 8000:8000 -p 8443:8443 -v $(pwd)/keystore:/vault/keystore -v $(pwd)/encryption.key:/vault/encryption.key -v $(pwd)/hashing.key:/vault/hashing.key relaysms-vault:dev
143 | ```
144 |
145 | > [!TIP]
146 | >
147 | > - To run in detached mode:
148 | > ```bash
149 | > docker run -d --name relaysms-vault-dev --env-file .env -p 19000:19000 -p 8000:8000 -p 8443:8443 -v $(pwd)/keystore:/vault/keystore -v $(pwd)/encryption.key:/vault/encryption.key -v $(pwd)/hashing.key:/vault/hashing.key relaysms-vault:dev
150 | > ```
151 | > Then view logs with:
152 | > ```bash
153 | > docker logs -f relaysms-vault-dev
154 | > ```
155 | > - REST API: `http://localhost:19000` or `https://localhost:19001`
156 | > - gRPC server: `localhost:8000` (plaintext) or `localhost:8001` (SSL)
157 | > - gRPC internal server: `localhost:8443` (plaintext) or `localhost:8444` (SSL)
158 | >
159 | > Expose SSL ports (`19001`, `8001`, `8444`) if you want to test SSL in development.
160 |
161 | ---
162 |
163 | ### Production
164 |
165 | #### Build the Docker Image
166 |
167 | ```bash
168 | docker build --target production -t relaysms-vault:prod .
169 | ```
170 |
171 | #### Prepare Environment Files
172 |
173 | ```bash
174 | cp template.env .env && head -c 32 /dev/urandom | base64 > encryption.key && head -c 32 /dev/urandom | base64 > hashing.key
175 | ```
176 |
177 | > Edit `.env` as needed for your environment.
178 |
179 | #### Run the Container
180 |
181 | > [!TIP]
182 | >
183 | > **To allow external access to the container's gRPC network services, update `GRPC_HOST` from `localhost` to `0.0.0.0` in your `.env` file.**
184 | >
185 | > For example, run:
186 | >
187 | > ```bash
188 | > sed -i 's/^GRPC_HOST=localhost/GRPC_HOST=0.0.0.0/' .env
189 | > ```
190 | >
191 | > This ensures the gRPC server listens on all interfaces and is accessible from outside the container.
192 | >
193 | > **For long-term production use, run in detached mode (`-d`) and view logs with:**
194 | >
195 | > ```bash
196 | > docker logs -f
197 | > ```
198 |
199 | ```bash
200 | docker run --rm \
201 | --env-file .env \
202 | -p 19000:19000 -p 19001:19001 \
203 | -p 8000:8000 -p 8001:8001 \
204 | -p 8443:8443 -p 8444:8444 \
205 | -v $(pwd)/keystore:/vault/keystore \
206 | -v $(pwd)/encryption.key:/vault/encryption.key \
207 | -v $(pwd)/hashing.key:/vault/hashing.key \
208 | relaysms-vault:prod
209 | ```
210 |
211 | > [!TIP]
212 | >
213 | > - To run in detached mode:
214 | > ```bash
215 | > docker run -d \
216 | > --name relaysms-vault-prod \
217 | > --env-file .env \
218 | > -p 19000:19000 -p 19001:19001 \
219 | > -p 8000:8000 -p 8001:8001 \
220 | > -p 8443:8443 -p 8444:8444 \
221 | > -v $(pwd)/keystore:/vault/keystore \
222 | > -v $(pwd)/encryption.key:/vault/encryption.key \
223 | > -v $(pwd)/hashing.key:/vault/hashing.key \
224 | > relaysms-vault:prod
225 | > ```
226 | > Then view logs with:
227 | > ```bash
228 | > docker logs -f relaysms-vault-prod
229 | > ```
230 | > - REST API: `https://localhost:19001`
231 | > - gRPC server: `localhost:8001` (SSL)
232 | > - gRPC internal server: `localhost:8444` (SSL)
233 | >
234 | > Plaintext ports (`19000`, `8000`, `8443`) are available for compatibility but SSL is enforced in production.
235 |
236 | ---
237 |
238 | ## Configuration
239 |
240 | Configure via environment variables, either in your shell or a `.env` file.
241 |
242 | **To load from `.env`:**
243 |
244 | ```bash
245 | set -a
246 | source .env
247 | set +a
248 | ```
249 |
250 | **Or set individually:**
251 |
252 | ```bash
253 | export HOST=localhost
254 | export PORT=19000
255 | # etc.
256 | ```
257 |
258 | ### Server
259 |
260 | - `SSL_SERVER_NAME`: SSL certificate server name (default: `localhost`)
261 | - `HOST`: REST server host (default: `localhost`)
262 | - `PORT`: REST server port (default: `19000`)
263 | - `SSL_PORT`: REST SSL port (default: `19001`)
264 | - `SSL_CERTIFICATE`, `SSL_KEY`, `SSL_PEM`: SSL file paths (optional)
265 |
266 | ### gRPC
267 |
268 | - `GRPC_HOST`: gRPC server host (default: `localhost`)
269 | - `GRPC_PORT`: gRPC server port (default: `8000`)
270 | - `GRPC_SSL_PORT`: gRPC SSL port (default: `8001`)
271 | - `GRPC_INTERNAL_PORT`: Internal gRPC port (default: `8443`)
272 | - `GRPC_INTERNAL_SSL_PORT`: Internal gRPC SSL port (default: `8444`)
273 |
274 | ### Security
275 |
276 | - `SHARED_KEY`: Path to 32-byte encryption key (default: `encryption.key`)
277 | - `HASHING_SALT`: Path to 32-byte hashing salt (default: `hashing.key`)
278 |
279 | ### Database
280 |
281 | - `MYSQL_HOST`: MySQL host (default: `127.0.0.1`)
282 | - `MYSQL_USER`: MySQL username
283 | - `MYSQL_PASSWORD`: MySQL password
284 | - `MYSQL_DATABASE`: MySQL database (default: `relaysms_vault`)
285 | - `SQLITE_DATABASE_PATH`: SQLite file path (default: `vault.db`)
286 |
287 | ### Twilio
288 |
289 | - `TWILIO_ACCOUNT_SID`, `TWILIO_AUTH_TOKEN`, `TWILIO_SERVICE_SID`, `TWILIO_PHONE_NUMBER`: Twilio credentials
290 |
291 | ### OTP
292 |
293 | - `MOCK_OTP`: Enable mock OTP for development (`true` by default)
294 |
295 | ### CORS
296 |
297 | - `ORIGINS`: Allowed CORS origins (default: `[]`)
298 |
299 | ### Keystore
300 |
301 | - `KEYSTORE_PATH`: Keystore directory (default: `keystore`)
302 | - `STATIC_X25519_KEYSTORE_PATH`: Static X25519 keystore (default: `keystore/static_x25519`)
303 |
304 | ### Logging
305 |
306 | - `LOG_LEVEL`: Logging level (default: `info`)
307 |
308 | ### Dummy Data
309 |
310 | - `DUMMY_PHONENUMBERS`: Test phone numbers (default: `+237123456789`)
311 | - `DUMMY_PASSWORD`: Test password (default: `dummy_password`)
312 |
313 | ## References
314 |
315 | - [Security](docs/security.md): Vault security details
316 | - [gRPC](docs/grpc.md): gRPC integration and usage
317 | - [Specifications](docs/specifications.md):
318 | - [Long-Lived Tokens (LLTs)](docs/specifications.md#1-long-lived-tokens-llts)
319 | - [Device IDs](docs/specifications.md#2-device-id)
320 | - [Auth Phrase](docs/specifications.md#3-auth-phrase)
321 | - [REST API Resources](docs/api_versions.md):
322 | - [API V3](docs/api_v3.md)
323 |
324 | ## Contributing
325 |
326 | 1. Fork the repository
327 | 2. Create a feature branch: `git checkout -b feature-branch`
328 | 3. Commit your changes
329 | 4. Push to your branch
330 | 5. Open a pull request
331 |
332 | ## License
333 |
334 | Licensed under the GNU General Public License (GPL). See [LICENSE](LICENSE) for details.
335 |
--------------------------------------------------------------------------------
/app.py:
--------------------------------------------------------------------------------
1 | """
2 | This program is free software: you can redistribute it under the terms
3 | of the GNU General Public License, v. 3.0. If a copy of the GNU General
4 | Public License was not distributed with this file, see .
5 | """
6 |
7 | from flask import Flask
8 | from src.api_v3 import v3_blueprint
9 |
10 | app = Flask(__name__)
11 |
12 | app.register_blueprint(v3_blueprint)
13 |
--------------------------------------------------------------------------------
/base_logger.py:
--------------------------------------------------------------------------------
1 | """Base Logger Module."""
2 |
3 | import os
4 | import logging
5 |
6 | log_level = os.getenv("LOG_LEVEL", "INFO").upper()
7 | numeric_level = getattr(logging, log_level, None)
8 |
9 | if not isinstance(numeric_level, int):
10 | raise ValueError(f"Invalid log level: {log_level}")
11 |
12 | logging.basicConfig(
13 | level=numeric_level, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s"
14 | )
15 |
16 |
17 | def get_logger(name: str = None) -> logging.Logger:
18 | """
19 | Returns a configured logger instance with the specified name.
20 |
21 | Args:
22 | name (str): The name of the logger.
23 |
24 | Returns:
25 | logging.Logger: Configured logger instance.
26 | """
27 | return logging.getLogger(name)
28 |
--------------------------------------------------------------------------------
/docs/api_v3.md:
--------------------------------------------------------------------------------
1 | # **API V3 Resource Documentation**
2 |
3 | ## **Base URL**
4 |
5 | ```
6 | https://vault.smswithoutborders.com
7 | ```
8 |
9 | ## **Endpoints**
10 |
11 | ### **1. Get Static x25519 Public Keys**
12 |
13 | Retrieve static x25519 public keys from the system.
14 |
15 | #### **Endpoint**
16 |
17 | ```
18 | GET /v3/keys/static-x25519
19 | ```
20 |
21 | #### **Response**
22 |
23 | **Status Code: 200 OK**
24 |
25 | **Response Body:**
26 |
27 | > The status can be `active`, `inactive`, or `archived`.
28 |
29 | ```json
30 | {
31 | "v1": [
32 | {
33 | "keypair": "pqkZPzH6jqZN5n45RuY91+k5ESxijfYOR+caOALJe1I=",
34 | "kid": 0,
35 | "status": "active"
36 | },
37 | {
38 | "keypair": "gh7eH6GT5l+wXnz0z/nqMu/BhSlyz7FxqFoneaPbS3E=",
39 | "kid": 1,
40 | "status": "active"
41 | }
42 | ],
43 | "v2": [
44 | {
45 | "keypair": "NJuBTMkQzLors6ZCok+ZBDWOXSB1NDhjKGKGchd1q1k=",
46 | "kid": 0,
47 | "status": "active"
48 | },
49 | {
50 | "keypair": "TdKzENsuN6LHlf6XalCEeE32lstF3KoVy1xi443WDAg=",
51 | "kid": 1,
52 | "status": "active"
53 | }
54 | ]
55 | }
56 | ```
57 |
58 | **Status Code: 500 Internal Server Error**
59 |
60 | **Response Body:**
61 |
62 | ```json
63 | {
64 | "error": "Oops! Something went wrong. Please try again later."
65 | }
66 | ```
67 |
68 | ---
69 |
70 | ### **2. Get Signup Metrics**
71 |
72 | Retrieve metrics for user signups within a specified date range, with options for filtering and pagination.
73 |
74 | #### **Endpoint**
75 |
76 | ```
77 | GET /v3/metrics/signup
78 | ```
79 |
80 | #### **Query Parameters**
81 |
82 | | Parameter | Type | Required | Description |
83 | | -------------- | ------ | -------- | ------------------------------------------------------------------------------------ |
84 | | `start_date` | string | Yes | Start date in "YYYY-MM-DD" format. |
85 | | `end_date` | string | Yes | End date in "YYYY-MM-DD" format. |
86 | | `country_code` | string | No | Country code for filtering signup users. |
87 | | `granularity` | string | No | Granularity for date grouping: "day" or "month". Defaults to "day". |
88 | | `group_by` | string | No | Grouping option: "country", "date", or None. Defaults to None (returns total count). |
89 | | `top` | int | No | Limits the number of results returned (overrides `page` and `page_size`). |
90 | | `page` | int | No | Page number for pagination. Defaults to `1`. |
91 | | `page_size` | int | No | Number of records per page. Defaults to `50`. Maximum recommended value is `100`. |
92 |
93 | #### **Response**
94 |
95 | **Status Code: 200 OK**
96 |
97 | **Response Body:**
98 |
99 | ```json
100 | {
101 | "total_countries": 10,
102 | "total_signup_users": 120,
103 | "total_countries": 2,
104 | "total_signups_from_bridges": 3,
105 | "countries": [
106 | "CM",
107 | "NG"
108 | ],
109 | "pagination": {
110 | "page": 1,
111 | "page_size": 50,
112 | "total_pages": 3,
113 | "total_records": 120
114 | },
115 | "data": [
116 | {
117 | "country_code": "CM",
118 | "signup_users": 30
119 | },
120 | {
121 | "timeframe": "2024-12-01",
122 | "signup_users": 20
123 | },
124 | ...
125 | ]
126 | }
127 | ```
128 |
129 | **Status Code: 400 Bad Request**
130 |
131 | **Response Body (Examples):**
132 |
133 | - Missing required parameters:
134 | ```json
135 | {
136 | "error": "Invalid input parameters. Provide 'start_date' and 'end_date'."
137 | }
138 | ```
139 | - Invalid `page`, `page_size`, or conflicting parameters (e.g., `top` with pagination):
140 | ```json
141 | {
142 | "error": "'top' cannot be used with 'page' or 'page_size'."
143 | }
144 | ```
145 | - Invalid `granularity` value:
146 | ```json
147 | {
148 | "error": "Invalid granularity. Use 'day' or 'month'."
149 | }
150 | ```
151 | - Invalid `group_by` value:
152 | ```json
153 | {
154 | "error": "Invalid group_by value. Use 'country', 'date', or None."
155 | }
156 | ```
157 |
158 | ---
159 |
160 | ### **3. Get Retained User Metrics**
161 |
162 | Retrieve metrics for retained (active) users within a specified date range, with options for filtering and pagination.
163 |
164 | #### **Endpoint**
165 |
166 | ```
167 | GET /v3/metrics/retained
168 | ```
169 |
170 | #### **Query Parameters**
171 |
172 | | Parameter | Type | Required | Description |
173 | | -------------- | ------ | -------- | ------------------------------------------------------------------------------------ |
174 | | `start_date` | string | Yes | Start date in "YYYY-MM-DD" format. |
175 | | `end_date` | string | Yes | End date in "YYYY-MM-DD" format. |
176 | | `country_code` | string | No | Country code for filtering retained users. |
177 | | `granularity` | string | No | Granularity for date grouping: "day" or "month". Defaults to "day". |
178 | | `group_by` | string | No | Grouping option: "country", "date", or None. Defaults to None (returns total count). |
179 | | `top` | int | No | Limits the number of results returned (overrides `page` and `page_size`). |
180 | | `page` | int | No | Page number for pagination. Defaults to `1`. |
181 | | `page_size` | int | No | Number of records per page. Defaults to `50`. Maximum recommended value is `100`. |
182 |
183 | #### **Response**
184 |
185 | **Status Code: 200 OK**
186 |
187 | **Response Body:**
188 |
189 | ```json
190 | {
191 | "total_countries": 10,
192 | "total_retained_users": 75,
193 | "total_retained_users_with_tokens": 70,
194 | "total_countries": 2,
195 | "countries": [
196 | "CM",
197 | "NG"
198 | ],
199 | "pagination": {
200 | "page": 1,
201 | "page_size": 50,
202 | "total_pages": 3,
203 | "total_records": 75
204 | },
205 | "data": [
206 | {
207 | "country_code": "CM",
208 | "retained_users": 25
209 | },
210 | {
211 | "timeframe": "2024-12-01",
212 | "retained_users": 15
213 | },
214 | ...
215 | ]
216 | }
217 | ```
218 |
219 | **Status Code: 400 Bad Request**
220 |
221 | **Response Body (Examples):**
222 |
223 | - Missing required parameters:
224 | ```json
225 | {
226 | "error": "Invalid input parameters. Provide 'start_date' and 'end_date'."
227 | }
228 | ```
229 | - Invalid `page`, `page_size`, or conflicting parameters (e.g., `top` with pagination):
230 | ```json
231 | {
232 | "error": "'top' cannot be used with 'page' or 'page_size'."
233 | }
234 | ```
235 | - Invalid `granularity` value:
236 | ```json
237 | {
238 | "error": "Invalid granularity. Use 'day' or 'month'."
239 | }
240 | ```
241 | - Invalid `group_by` value:
242 | ```json
243 | {
244 | "error": "Invalid group_by value. Use 'country', 'date', or None."
245 | }
246 | ```
247 |
--------------------------------------------------------------------------------
/docs/api_versions.md:
--------------------------------------------------------------------------------
1 | # Available API Versions
2 |
3 | - [APIv3](./api_v3.md)
4 |
--------------------------------------------------------------------------------
/docs/configurations.md:
--------------------------------------------------------------------------------
1 | # Configurations
2 |
3 | ## Table of contents
4 |
5 | 1. [Requirements](#requirements)
6 | 2. [Dependencies](#dependencies)
7 | 3. [Installation](#installation)
8 | 4. [Configuration Options](#Configuration-Options)
9 | 5. [How to use](#how-to-use)
10 | 6. [Docker](#docker)
11 | 7. [Logger](#logger)
12 | 8. [References](#references)
13 |
14 | ## Requirements
15 |
16 | - [MySQL](https://www.mysql.com/) (version >= 8.0.28) ([MariaDB](https://mariadb.org/))
17 | - [Python](https://www.python.org/) (version >= [3.8.10](https://www.python.org/downloads/release/python-3810/))
18 | - [Python Virtual Environments](https://docs.python.org/3/tutorial/venv.html)
19 |
20 | ## Dependencies
21 |
22 | On Ubuntu
23 |
24 | ```bash
25 | $ sudo apt install python3-dev libmysqlclient-dev apache2 apache2-dev make libapache2-mod-wsgi-py3
26 | ```
27 |
28 | ## Linux Environment Variables
29 |
30 | Variables used for the Project:
31 |
32 | - MYSQL_HOST=STRING
33 | - MYSQL_USER=STRING
34 | - MYSQL_PASSWORD=STRING
35 | - MYSQL_DATABASE=STRING
36 | - SHARED_KEY=PATH
37 | - HASHING_SALT=PATH
38 | - HOST=STRING
39 | - PORT=STRING
40 | - SSL_SERVER_NAME=STRING
41 | - SSL_PORT=STRING
42 | - SSL_CERTIFICATE=PATH
43 | - SSL_KEY=PATH
44 | - SSL_PEM=PATH
45 | - ORIGINS=ARRAY
46 | - PLATFORMS_PATH=STRING
47 | - TWILIO_ACCOUNT_SID=STRING
48 | - TWILIO_AUTH_TOKEN=STRING
49 | - TWILIO_SERVICE_SID=STRING
50 | - ENABLE_RECAPTCHA=BOOLEAN
51 | - RECAPTCHA_SECRET_KEY=STRING
52 | - BROADCAST_WHITELIST
53 | - MODE=STRING
54 |
55 | ## Installation
56 |
57 | Install all python packages for SMSWITHOUTBORDERS-BE
58 |
59 | ### Pip
60 |
61 | ```bash
62 | $ python3 -m venv venv
63 | $ . venv/bin/activate
64 | $ pip install -r requirements.txt
65 | ```
66 |
67 | ### Set Keys
68 |
69 | Configure shared-key and hashing-salt
70 |
71 | ```bash
72 | $ MYSQL_HOST= \
73 | MYSQL_USER= \
74 | MYSQL_PASSWORD= \
75 | MYSQL_DATABASE= \
76 | make set-keys
77 | ```
78 |
79 | If running the smswithoutborders-backend docker image use
80 |
81 | ```bash
82 | $ docker exec -it smswithoutborders-backend make set-keys
83 | ```
84 |
85 | > See current shared-key and hashing-salt with the `make get-keys command`
86 |
87 | ### Keys file format
88 |
89 | - Use the SHARED_KEY and HASHING_SALT environment variables to point to your key files.
90 | - Key should be on first line in your key files.
91 | - Key files should end with the suffix `.key`
92 |
93 | > NOTE: SHARED_KEY and HASHING_SALT environment variables must be provided else defaults will be used.
94 |
95 | ### Inject dummy data
96 |
97 | _For testing purposes only!_
98 |
99 | - Fill in all the neccessary [environment variables](#linux-environment-variables)
100 |
101 | ```bash
102 | $ MYSQL_HOST= MYSQL_USER= MYSQL_PASSWORD= make dummy-user-inject
103 | ```
104 |
105 | If running the smswithoutborders-backend docker image use
106 |
107 | ```bash
108 | $ docker exec -it smswithoutborders-backend make dummy-user-inject
109 | ```
110 |
111 | details
112 |
113 | ```
114 | - User ID = dead3662-5f78-11ed-b8e7-6d06c3aaf3c6
115 | - Password = dummy_password
116 | - Name = dummy_user
117 | - Phone NUmber = +237123456789
118 | ```
119 |
120 | ### Configuration Options
121 |
122 | Manages access to the SMS without borders centralize resources and services.
123 |
124 | **API**
125 |
126 | 2. **SECURE COOKIE**: Specifies the boolean value for the [Secure Set-Cookie attribute](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Set-Cookie). When truthy, the Secure attribute is set, otherwise it is not. By default, the Secure sessions attribute is set to truthy.
127 | 3. **COOKIE MAXAGE**: Specifies the number (in milliseconds) to use when calculating the [Expires Set-Cookie attribute](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Set-Cookie). This is done by taking the current server time and adding maxAge milliseconds to the value to calculate an Expires datetime. By default, maximum age is set for two hours (7200000 ms).
128 | 4. **ENABLE BLOCKING**: Specifies the boolean value for tracking user failed [authentication](FEATURES_v2.md#2-authenticate-an-account) attempts.
129 | 5. **SHORT BLOCK ATTEMPTS**: Specifies the number of failed [authentication](FEATURES_v2.md#2-authenticate-an-account) attempts before a short block. Several short blocks results to a long block.
130 | 6. **LONG BLOCK ATTEMPTS**: Specifies the number of failed short block attempts before a long block.
131 | 7. **SHORT BLOCK DURATION**: Specifies the duration (in minutes) of a short block.
132 | 8. **LONG BLOCK DURATION**: Specifies the duration (in minutes) of a long block.
133 |
134 | **OTP**
135 |
136 | A user has four attempts to request an OTP code daily
137 |
138 | 1. **ENABLE_OTP_BLOCKING**: Specifies the boolean value for switching on/off tracking OTP code requests.
139 | 2. **FIRST RESEND DURATION**: Specifies the duration (in milliseconds) for the first OTP request.
140 | 3. **SECOND RESEND DURATION**: Specifies the duration (in milliseconds) for the second OTP request.
141 | 4. **THIRD RESEND DURATION**: Specifies the duration (in milliseconds) for the third OTP request.
142 | 5. **FOURTH RESEND DURATION**: Specifies the duration (in milliseconds) for the fourth OTP request.
143 |
144 | ## How to use
145 |
146 | ### Start Backend API
147 |
148 | **Python**
149 |
150 | ```bash
151 | $ MYSQL_HOST= \
152 | MYSQL_USER= \
153 | MYSQL_PASSWORD= \
154 | MYSQL_DATABASE= \
155 | HOST= \
156 | PORT= \
157 | SSL_SERVER_NAME= \
158 | SSL_PORT= \
159 | SSL_CERTIFICATE= \
160 | SSL_KEY= \
161 | SSL_PEM= \
162 | ORIGINS=[""] \
163 | TWILIO_ACCOUNT_SID= \
164 | TWILIO_AUTH_TOKEN= \
165 | TWILIO_SERVICE_SID= \
166 | ENABLE_RECAPTCHA= \
167 | RECAPTCHA_SECRET_KEY= \
168 | BROADCAST_WHITELIST= \
169 | MODE=production \
170 | python3 server.py
171 | ```
172 |
173 | **MOD_WSGI**
174 |
175 | ```bash
176 | $ MYSQL_HOST= \
177 | MYSQL_USER= \
178 | MYSQL_PASSWORD= \
179 | MYSQL_DATABASE= \
180 | HOST= \
181 | PORT= \
182 | SSL_SERVER_NAME= \
183 | SSL_PORT= \
184 | SSL_CERTIFICATE= \
185 | SSL_KEY= \
186 | SSL_PEM= \
187 | ORIGINS=[""] \
188 | TWILIO_ACCOUNT_SID= \
189 | TWILIO_AUTH_TOKEN= \
190 | TWILIO_SERVICE_SID= \
191 | ENABLE_RECAPTCHA= \
192 | RECAPTCHA_SECRET_KEY= \
193 | BROADCAST_WHITELIST= \
194 | MODE=production \
195 | mod_wsgi-express start-server wsgi_script.py \
196 | --user www-data \
197 | --group www-data \
198 | --port '${PORT}' \
199 | --ssl-certificate-file '${SSL_CERTIFICATE}' \
200 | --ssl-certificate-key-file '${SSL_KEY}' \
201 | --ssl-certificate-chain-file '${SSL_PEM}' \
202 | --https-only \
203 | --server-name '${SSL_SERVER_NAME}' \
204 | --https-port '${SSL_PORT}'
205 | ```
206 |
207 | ## Docker
208 |
209 | ### Build
210 |
211 | Build smswithoutborders-backend development docker image
212 |
213 | ```bash
214 | $ docker build --target development -t smswithoutborders-backend .
215 | ```
216 |
217 | Build smswithoutborders-backend production docker image
218 |
219 | ```bash
220 | $ docker build --target production -t smswithoutborders-backend .
221 | ```
222 |
223 | ### Run
224 |
225 | Run smswithoutborders-backend development docker image. Fill in all the neccessary [environment variables](#linux-environment-variables)
226 |
227 | ```bash
228 | $ docker run -d -p 9000:9000 \
229 | --name smswithoutborders-backend \
230 | --env 'MYSQL_HOST=' \
231 | --env 'MYSQL_USER=' \
232 | --env 'MYSQL_PASSWORD=' \
233 | --env 'MYSQL_DATABASE=' \
234 | --env 'HOST=' \
235 | --env 'PORT=' \
236 | --env 'ORIGINS=[""]' \
237 | --env 'TWILIO_ACCOUNT_SID=' \
238 | --env 'TWILIO_AUTH_TOKEN=' \
239 | --env 'TWILIO_SERVICE_SID=' \
240 | --env 'ENABLE_RECAPTCHA=' \
241 | --env 'RECAPTCHA_SECRET_KEY=' \
242 | --env 'BROADCAST_WHITELIST=' \
243 | smswithoutborders-backend
244 | ```
245 |
246 | Run smswithoutborders-backend production docker image. Fill in all the neccessary [environment variables](#linux-environment-variables)
247 |
248 | ```bash
249 | $ docker run -d -p 9000:9000 \
250 | --name smswithoutborders-backend \
251 | --env 'MYSQL_HOST=' \
252 | --env 'MYSQL_USER=' \
253 | --env 'MYSQL_PASSWORD=' \
254 | --env 'MYSQL_DATABASE=' \
255 | --env 'HOST=' \
256 | --env 'PORT=' \
257 | --env 'SSL_SERVER_NAME=' \
258 | --env 'SSL_PORT=' \
259 | --env 'SSL_CERTIFICATE=' \
260 | --env 'SSL_KEY=' \
261 | --env 'SSL_PEM=' \
262 | --env 'ORIGINS=[""]' \
263 | --env 'TWILIO_ACCOUNT_SID=' \
264 | --env 'TWILIO_AUTH_TOKEN=' \
265 | --env 'TWILIO_SERVICE_SID=' \
266 | --env 'ENABLE_RECAPTCHA=' \
267 | --env 'RECAPTCHA_SECRET_KEY=' \
268 | --env 'BROADCAST_WHITELIST=' \
269 | --env 'MODE=production' \
270 | smswithoutborders-backend
271 | ```
272 |
273 | > Read in a file of environment variables with `--env-file` command e.g. `docker run -d -p 9000:9000 --name smswithoutborders-backend --env-file myenv.txt smswithoutborders-backend`
274 |
275 | > Mount path to SSL files with volume `-v` command e.g. `docker run -v /host/path/to/certs:/container/path/to/certs -d -p 9000:9000 --name smswithoutborders-backend --env-file myenv.txt smswithoutborders-backend`
276 |
277 | ## logger
278 |
279 | ### Python
280 |
281 | ```bash
282 | $ python3 server.py --logs=debug
283 | ```
284 |
285 | ### Docker
286 |
287 | Container logs
288 |
289 | ```bash
290 | $ docker logs smswithoutborders-backend
291 | ```
292 |
293 | API logs in container
294 |
295 | ```bash
296 | $ docker exec -it smswithoutborders-backend tail -f
297 | ```
298 |
299 | ## References
300 |
301 | - [SMSWithoutBorders-BE-Publisher](https://github.com/smswithoutborders/SMSWithoutBorders-BE-Publisher)
302 | - [MySQL Docker official image](https://hub.docker.com/_/mysql)
303 | - [MariaDB Docker official image](https://hub.docker.com/_/mariadb)
304 |
--------------------------------------------------------------------------------
/docs/contributing.md:
--------------------------------------------------------------------------------
1 | # Contributing
2 |
3 | When contributing to this repository, please first discuss the change you wish to make at [Issues](https://github.com/smswithoutborders/SMSwithoutborders-BE/issues).
4 |
5 | ## Development process
6 |
7 | ### Prerequisites
8 |
9 | - [MySQL](https://www.mysql.com/) (version >= 8.0.28) ([MariaDB](https://mariadb.org/))
10 | - [Python](https://www.python.org/) (version >= [3.8.10](https://www.python.org/downloads/release/python-3810/))
11 | - [Python Virtual Environments](https://docs.python.org/3/tutorial/venv.html)
12 |
13 | ### Workflow
14 |
15 | We follow [Git Flow](https://guides.github.com/introduction/flow/) for changes to this repository. If you're not familiar with using git and GitHub on the command line, [GitHub Desktop](https://desktop.github.com) is an easier alternative.
16 |
17 | 1. Fork this repo to create a copy hosted on your GitHub account. The Fork button is in the top right of the page.
18 | - If you're a collaborator on the repo you can instead create a branch.
19 | 2. Clone down your copy of this repo onto your local machine: `git clone `
20 | 3. Navigate to the new directory git created: `cd SMSwithoutborders-BE`
21 | 4. See [Configurations steps](configurations.md) to configure your development environment.
22 | 5. Create a new branch for your work based on main: `git checkout -b ` Your branch name should be something descriptive of the changes you wish to make, and can include the issue number this change is associated with. Example: `feature/1234-update-documentation`
23 | 6. Make your changes. When you're ready to apply your changes, push your changed files to your forked repo:
24 | - `git add `
25 | - `git commit -m ""` Your commit message should be descriptive of the changes you made.
26 | - `git push -u origin HEAD` This will push your changes to the branch you created on your forked repo.
27 | 7. Open a Pull Request to the `SMSwithoutborders-BE` repo:
28 | - Navigate to the [SMSwithoutborders-BE](https://github.com/smswithoutborders/SMSwithoutborders-BE) repo
29 | - Select `New pull request`
30 | - Select `Compare across forks`
31 | - Select `base repository: SMSwithoutborders-BE`
32 | - Select `base branch: main`
33 | - Select `head repository: `
34 | - Select `head branch: `
35 | - Select `Create pull request`
36 |
37 | Your pull request will be reviewed and we'll get back to you!
38 |
--------------------------------------------------------------------------------
/docs/security.md:
--------------------------------------------------------------------------------
1 | # Security
2 |
3 | This document outlines the cryptographic methods used in the RelaySMS Vault. All cryptographic operations are defined in the [`crypto.py`](../src/crypto.py) file.
4 |
5 | ## Cryptographic Methods
6 |
7 | ### AES Encryption
8 |
9 | **Advanced Encryption Standard (AES)** is used for secure data storage.
10 |
11 | - **Key Size**: 256 bits (32 bytes)
12 | - **Mode of Operation**: AES-EAX
13 | - **Purpose**: Encrypts and decrypts data at rest.
14 | - **Reference**: [NIST AES Specification](https://www.nist.gov/publications/advanced-encryption-standard-aes)
15 |
16 | ### HMAC for Integrity Verification
17 |
18 | **Hash-based Message Authentication Code (HMAC)** ensures data integrity.
19 |
20 | - **Algorithm**: SHA-512
21 | - **Key Size**: 256 bits (32 bytes)
22 | - **Purpose**: Verifies data authenticity.
23 | - **Reference**: [RFC 2104 - HMAC](https://datatracker.ietf.org/doc/html/rfc2104)
24 |
25 | ### Fernet Encryption
26 |
27 | **Fernet encryption** is used for token encryption.
28 |
29 | - **Key Size**: 256 bits (32 bytes)
30 | - **Purpose**: Encrypts and decrypts identity tokens.
31 | - **Reference**: [Fernet Cryptography Documentation](https://cryptography.io/en/latest/fernet/)
32 |
33 | ### Message Encryption
34 |
35 | **Signal Double Ratchet Algorithm** encrypts and decrypts messages.
36 |
37 | - **Key Exchange**: X25519 public key exchange.
38 | - **Algorithm**: Double Ratchet for message encryption.
39 | - **Purpose**: Secures message transmission.
40 | - **Reference**: [Signal Protocol Documentation](https://signal.org/docs/specifications/doubleratchet/)
41 |
--------------------------------------------------------------------------------
/docs/specifications.md:
--------------------------------------------------------------------------------
1 | # Specifications
2 |
3 | ## Table of Contents
4 |
5 | 1. [Long-Lived Tokens (LLTs)](#1-long-lived-tokens-llts)
6 | - [Purpose](#purpose)
7 | - [Usage](#usage)
8 | - [Generating LLTs](#1-generating-llts)
9 | - [Retrieving the LLT from Ciphertext](#2-retrieving-the-llt-from-ciphertext)
10 | - [Code Example (Python)](#code-example-python)
11 | 2. [Device ID](#2-device-id)
12 | - [Purpose](#purpose-1)
13 | - [Usage](#usage-1)
14 | - [Generating Device ID](#1-generating-device-id)
15 | - [Code Example (Python)](#code-example-python-1)
16 | 3. [Auth Phrase](#3-auth-phrase)
17 |
18 | ## 1. Long-Lived Tokens (LLTs)
19 |
20 | ### Purpose
21 |
22 | Long-Lived Tokens (LLTs) provide a secure mechanism for managing user sessions
23 | and access control within the vault. They authenticate users and grant access
24 | for a predefined duration, reducing the need for frequent logins.
25 |
26 | ### Usage
27 |
28 | #### 1. Generating LLTs
29 |
30 | - **JWT Creation**: A JSON Web Token (JWT) is created containing user
31 | information, signed using the `HS256` algorithm. The secret key used for
32 | signing is the `device_id shared secret key` (URL-safe base64-encoded)
33 | obtained from the `X25519` handshake between the client and the vault.
34 |
35 | - **Payload**: The JWT payload includes:
36 | - `entity_id (eid)`
37 | - `issuer (iss)`
38 | - `issued_at (iat)`
39 | - `expiration_time (exp)`
40 |
41 | - **Encryption**: The generated JWT is symmetrically encrypted using
42 | [Fernet (symmetric encryption)](https://cryptography.io/en/latest/fernet/).
43 | The Fernet key used for encryption is the `device_id shared secret key`
44 | (URL-safe base64-encoded) obtained from the `X25519` handshake between the
45 | client and the vault.
46 |
47 | The two-step process for generating LLTs ensures the JWT is signed and then
48 | encrypted. This encryption protects the token content from unauthorized access.
49 | Even if intercepted, the token cannot be used without the client's device, which
50 | can perform an `X25519` handshake with the vault.
51 |
52 | #### 2. Retrieving the LLT from Ciphertext
53 |
54 | - **Decrypting**: Upon successful authentication, the user obtains an LLT
55 | ciphertext, which must be decrypted to access the plaintext LLT. Decryption is
56 | performed using
57 | [Fernet (symmetric encryption)](https://cryptography.io/en/latest/fernet/).
58 | The Fernet key used is the `device_id shared secret key` (URL-safe
59 | base64-encoded) obtained from the `X25519` handshake between the client and
60 | the vault.
61 | - **Plaintext LLT**: The plaintext LLT is used for subsequent requests to the
62 | vault. This LLT contains user identification information and is signed to
63 | prevent tampering.
64 |
65 | > [!NOTE]
66 | >
67 | > It is recommended not to store the plaintext LLT. Instead, the client should
68 | > decrypt the LLT ciphertext on-demand using the device ID shared secret key
69 | > obtained from the X25519 handshake. This prevents unauthorized access to the
70 | > plaintext LLT, even if the client device is compromised.
71 |
72 | ### Code Example (Python)
73 |
74 | **Generating LLTs**
75 |
76 | ```python
77 | import base64
78 | from cryptography.fernet import Fernet
79 | from jwt import JWT, jwk_from_dict
80 | from jwt.utils import get_int_from_datetime
81 | from datetime import datetime, timedelta
82 |
83 | # The entity ID
84 | eid = 'entity_id'
85 | # Device ID shared secret key obtained from the X25519 handshake
86 | key = b'shared_secret_key'
87 |
88 | # Create the JWT payload
89 | payload = {
90 | "eid": eid,
91 | "iss": "https://smswithoutborders.com",
92 | "iat": get_int_from_datetime(datetime.now()),
93 | "exp": get_int_from_datetime(datetime.now() + timedelta(minutes=5)),
94 | }
95 |
96 | # Create the signing key
97 | signing_key = jwk_from_dict({
98 | "kty": "oct",
99 | "k": base64.urlsafe_b64encode(key).decode("utf-8")
100 | })
101 |
102 | # Encode the JWT
103 | token_obj = JWT()
104 | llt = token_obj.encode(payload, signing_key, alg="HS256")
105 |
106 | # Encrypt the JWT using Fernet
107 | fernet = Fernet(base64.urlsafe_b64encode(key))
108 | llt_ciphertext = fernet.encrypt(llt.encode("utf-8"))
109 |
110 | # Return the encrypted LLT
111 | print(base64.b64encode(llt_ciphertext).decode("utf-8"))
112 | ```
113 |
114 | **Retrieving the LLT from Ciphertext**
115 |
116 | ```python
117 | import base64
118 | from cryptography.fernet import Fernet
119 |
120 | # Obtained from successful authentication
121 | llt_ciphertext = 'encrypted_llt'
122 | # Device ID shared secret key obtained from the X25519 handshake
123 | key = b'shared_secret_key'
124 |
125 | # Decrypt the LLT using Fernet
126 | fernet = Fernet(base64.urlsafe_b64encode(key))
127 | llt_plaintext = fernet.decrypt(base64.b64decode(llt_ciphertext)).decode("utf-8")
128 |
129 | # Return the decrypted LLT
130 | print(llt_plaintext)
131 | ```
132 |
133 | ## 2. Device ID
134 |
135 | ### Purpose
136 |
137 | The device ID is a unique identifier for a device which can be used to identify
138 | an entity other than their phone number. This is useful as entities can use
139 | other phone numbers other than the one used to create their account with an
140 | authenticated device to be able to publish messages with RelaySMS.
141 |
142 | ### Usage
143 |
144 | #### 1. Generating Device ID:
145 |
146 | - **Hashing**: An `HMAC` with the `SHA-256` hash algorithm is used to hash a combination of the entity's `phone number` ([E.164 format](https://en.wikipedia.org/wiki/E.164), e.g., +237123456789) and the entity's `device ID public key` (in bytes) used for the `X25519` handshake between the client and the vault. The `device_id` shared secret key obtained from the `X25519` handshake between the client and the vault is then used as the `HMAC` key for hashing the combination `(phone_number + public_key_bytes)`. The resulting bytes of the hash then become the computed device ID.
147 |
148 | > [!NOTE]
149 | >
150 | > It is recommended not to store the computed device ID. Instead, it should be computed on-demand on the authorized device. This prevents unauthorized access to the device ID, even if the client device is compromised.
151 |
152 | ### Code Example (Python)
153 |
154 | ```python
155 | import hmac
156 | import hashlib
157 |
158 | def compute_device_id(secret_key: bytes, phone_number: str, public_key: bytes) -> bytes:
159 | """
160 | Compute a device ID using HMAC and SHA-256.
161 |
162 | Args:
163 | secret_key (bytes): The secret key used for HMAC.
164 | phone_number (str): The phone number to be included in the HMAC input.
165 | public_key (bytes): The public key to be included in the HMAC input.
166 |
167 | Returns:
168 | bytes: The bytes representation of the HMAC digest.
169 | """
170 | # Combine phone number and public key
171 | combined_input = phone_number.encode("utf-8") + public_key
172 | # Compute HMAC with SHA-256
173 | hmac_object = hmac.new(secret_key, combined_input, hashlib.sha256)
174 | # Return bytes representation of HMAC digest
175 | return hmac_object.digest()
176 | ```
177 |
178 | ## 3. Auth-Phrase
179 |
180 | ```python
181 | auth_phrase = (
182 | bytes([len(server_publish_pub_key)]) # Length of public key
183 | + server_publish_pub_key # Public key
184 | )
185 | print(base64.b64encode(auth_phrase).decode("utf-8"))
186 | ```
187 |
188 | - **Public Key Length**: The first byte indicates the length of the public key.
189 | - **Public Key**: The actual server's public key.
190 |
191 | The SMS message is formatted as follows:
192 |
193 | ```
194 | RelaySMS Please paste this entire message in your RelaySMS app
195 |
196 | ```
197 |
--------------------------------------------------------------------------------
/example.env:
--------------------------------------------------------------------------------
1 | HOST=localhost
2 | PORT=9000
3 | GRPC_HOST=localhost
4 | GRPC_PORT=8000
5 | GRPC_SSL_PORT=8001
6 | GRPC_INTERNAL_PORT=8443
7 | GRPC_INTERNAL_SSL_PORT=8444
8 | SSL_SERVER_NAME=localhost
9 | SSL_PORT=9001
10 | SSL_CERTIFICATE=
11 | SSL_KEY=
12 | SSL_PEM=
13 | SHARED_KEY=
14 | HASHING_SALT=
15 | BROADCAST_WHITELIST=
16 | MYSQL_HOST=127.0.0.1
17 | MYSQL_USER=
18 | MYSQL_PASSWORD=
19 | MYSQL_DATABASE=smswithoutborders_vault
20 | TWILIO_ACCOUNT_SID=
21 | TWILIO_AUTH_TOKEN=
22 | TWILIO_SERVICE_SID=
23 | MOCK_OTP=false
24 | RECAPTCHA_SECRET_KEY=
25 | ENABLE_RECAPTCHA=
26 | ORIGINS=
27 | GMAIL_CREDENTIALS=
28 | TWITTER_CREDENTIALS=
29 | TELEGRAM_CREDENTIALS=
30 | TELEGRAM_RECORDS=
31 | KEYSTORE_PATH=
--------------------------------------------------------------------------------
/grpc_internal_server.py:
--------------------------------------------------------------------------------
1 | """
2 | This program is free software: you can redistribute it under the terms
3 | of the GNU General Public License, v. 3.0. If a copy of the GNU General
4 | Public License was not distributed with this file, see .
5 | """
6 |
7 | import os
8 | from concurrent import futures
9 |
10 | import grpc
11 | from grpc_interceptor import ServerInterceptor
12 | import vault_pb2_grpc
13 |
14 | from src.utils import get_configs
15 | from src.grpc_entity_internal_service import EntityInternalService
16 | from base_logger import get_logger
17 |
18 | logger = get_logger("vault.grpc.internal.server")
19 |
20 |
21 | class LoggingInterceptor(ServerInterceptor):
22 | """
23 | gRPC server interceptor for logging requests.
24 | """
25 |
26 | def __init__(self):
27 | """
28 | Initialize the LoggingInterceptor.
29 | """
30 | self.logger = logger
31 | self.server_protocol = "HTTP/2.0"
32 |
33 | def intercept(self, method, request_or_iterator, context, method_name):
34 | """
35 | Intercept method calls for each incoming RPC.
36 | """
37 | response = method(request_or_iterator, context)
38 | if context.details():
39 | self.logger.error(
40 | "%s %s - %s -",
41 | method_name,
42 | self.server_protocol,
43 | str(context.code()).split(".")[1],
44 | )
45 | else:
46 | self.logger.info("%s %s - %s -", method_name, self.server_protocol, "OK")
47 | return response
48 |
49 |
50 | def serve():
51 | """
52 | Starts the gRPC internal server and listens for requests.
53 | """
54 | mode = get_configs("MODE", default_value="development")
55 | hostname = get_configs("GRPC_HOST")
56 |
57 | num_cpu_cores = os.cpu_count()
58 | max_workers = 10
59 |
60 | logger.info("Starting server in %s mode...", mode)
61 | logger.info("Hostname: %s", hostname)
62 | logger.info("Logical CPU cores available: %s", num_cpu_cores)
63 | logger.info("gRPC server max workers: %s", max_workers)
64 |
65 | server = grpc.server(
66 | futures.ThreadPoolExecutor(max_workers=max_workers),
67 | interceptors=[LoggingInterceptor()],
68 | )
69 | vault_pb2_grpc.add_EntityInternalServicer_to_server(EntityInternalService(), server)
70 |
71 | if mode == "production":
72 | server_certificate = get_configs("SSL_CERTIFICATE")
73 | private_key = get_configs("SSL_KEY")
74 | secure_port = get_configs("GRPC_INTERNAL_SSL_PORT")
75 |
76 | logger.info("Secure port: %s", secure_port)
77 | try:
78 | with open(server_certificate, "rb") as f:
79 | server_certificate_data = f.read()
80 | with open(private_key, "rb") as f:
81 | private_key_data = f.read()
82 |
83 | server_credentials = grpc.ssl_server_credentials(
84 | ((private_key_data, server_certificate_data),)
85 | )
86 | server.add_secure_port(f"{hostname}:{secure_port}", server_credentials)
87 | logger.info(
88 | "TLS is enabled: The server is securely running at %s:%s",
89 | hostname,
90 | secure_port,
91 | )
92 | except FileNotFoundError as e:
93 | logger.critical(
94 | (
95 | "Unable to start server: TLS certificate or key file not found: %s. "
96 | "Please check your configuration."
97 | ),
98 | e,
99 | )
100 | raise
101 | except Exception as e:
102 | logger.critical(
103 | (
104 | "Unable to start server: Error loading TLS credentials: %s. ",
105 | "Please check your configuration.",
106 | ),
107 | e,
108 | )
109 | raise
110 | else:
111 | port = get_configs("GRPC_INTERNAL_PORT")
112 | logger.info("Insecure port: %s", port)
113 |
114 | server.add_insecure_port(f"{hostname}:{port}")
115 | logger.warning(
116 | "The server is running in insecure mode at %s:%s", hostname, port
117 | )
118 |
119 | server.start()
120 |
121 | try:
122 | server.wait_for_termination()
123 | except KeyboardInterrupt:
124 | logger.info("Shutting down the server...")
125 | server.stop(0)
126 | logger.info("The server has stopped successfully")
127 |
128 |
129 | if __name__ == "__main__":
130 | serve()
131 |
--------------------------------------------------------------------------------
/grpc_server.py:
--------------------------------------------------------------------------------
1 | """
2 | This program is free software: you can redistribute it under the terms
3 | of the GNU General Public License, v. 3.0. If a copy of the GNU General
4 | Public License was not distributed with this file, see .
5 | """
6 |
7 | import os
8 | from concurrent import futures
9 |
10 | import grpc
11 | from grpc_interceptor import ServerInterceptor
12 | import vault_pb2_grpc
13 |
14 | from src.utils import get_configs
15 | from src.grpc_entity_service import EntityService
16 | from base_logger import get_logger
17 |
18 | logger = get_logger("vault.grpc.server")
19 |
20 |
21 | class LoggingInterceptor(ServerInterceptor):
22 | """
23 | gRPC server interceptor for logging requests.
24 | """
25 |
26 | def __init__(self):
27 | """
28 | Initialize the LoggingInterceptor.
29 | """
30 | self.logger = logger
31 | self.server_protocol = "HTTP/2.0"
32 |
33 | def intercept(self, method, request_or_iterator, context, method_name):
34 | """
35 | Intercept method calls for each incoming RPC.
36 | """
37 | response = method(request_or_iterator, context)
38 | if context.details():
39 | self.logger.error(
40 | "%s %s - %s -",
41 | method_name,
42 | self.server_protocol,
43 | str(context.code()).split(".")[1],
44 | )
45 | else:
46 | self.logger.info("%s %s - %s -", method_name, self.server_protocol, "OK")
47 | return response
48 |
49 |
50 | def serve():
51 | """
52 | Starts the gRPC server and listens for requests.
53 | """
54 | mode = get_configs("MODE", default_value="development")
55 | hostname = get_configs("GRPC_HOST")
56 |
57 | num_cpu_cores = os.cpu_count()
58 | max_workers = 10
59 |
60 | logger.info("Starting server in %s mode...", mode)
61 | logger.info("Hostname: %s", hostname)
62 | logger.info("Logical CPU cores available: %s", num_cpu_cores)
63 | logger.info("gRPC server max workers: %s", max_workers)
64 |
65 | server = grpc.server(
66 | futures.ThreadPoolExecutor(max_workers=max_workers),
67 | interceptors=[LoggingInterceptor()],
68 | )
69 | vault_pb2_grpc.add_EntityServicer_to_server(EntityService(), server)
70 |
71 | if mode == "production":
72 | server_certificate = get_configs("SSL_CERTIFICATE", strict=True)
73 | private_key = get_configs("SSL_KEY", strict=True)
74 | secure_port = get_configs("GRPC_SSL_PORT", default_value=8443)
75 |
76 | logger.info("Secure port: %s", secure_port)
77 | try:
78 | with open(server_certificate, "rb") as f:
79 | server_certificate_data = f.read()
80 | with open(private_key, "rb") as f:
81 | private_key_data = f.read()
82 |
83 | server_credentials = grpc.ssl_server_credentials(
84 | ((private_key_data, server_certificate_data),)
85 | )
86 | server.add_secure_port(f"{hostname}:{secure_port}", server_credentials)
87 | logger.info(
88 | "TLS is enabled: The server is securely running at %s:%s",
89 | hostname,
90 | secure_port,
91 | )
92 | except FileNotFoundError as e:
93 | logger.critical(
94 | (
95 | "Unable to start server: TLS certificate or key file not found: %s. "
96 | "Please check your configuration."
97 | ),
98 | e,
99 | )
100 | raise
101 | except Exception as e:
102 | logger.critical(
103 | (
104 | "Unable to start server: Error loading TLS credentials: %s. ",
105 | "Please check your configuration.",
106 | ),
107 | e,
108 | )
109 | raise
110 | else:
111 | port = get_configs("GRPC_PORT", default_value=8000)
112 | logger.info("Insecure port: %s", port)
113 |
114 | server.add_insecure_port(f"{hostname}:{port}")
115 | logger.warning(
116 | "The server is running in insecure mode at %s:%s", hostname, port
117 | )
118 |
119 | server.start()
120 |
121 | try:
122 | server.wait_for_termination()
123 | except KeyboardInterrupt:
124 | logger.info("Shutting down the server...")
125 | server.stop(0)
126 | logger.info("The server has stopped successfully")
127 |
128 |
129 | if __name__ == "__main__":
130 | serve()
131 |
--------------------------------------------------------------------------------
/migrations/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/smswithoutborders/RelaySMS-Vault/10f70b1b0fde90c225789efb804743736728de63/migrations/__init__.py
--------------------------------------------------------------------------------
/migrations/clean_account_identifiers.py:
--------------------------------------------------------------------------------
1 | """
2 | Script to clean and rehash account identifiers in tokens.
3 | """
4 |
5 | import tqdm
6 | from src.db_models import Token
7 | from src.crypto import generate_hmac
8 | from src.utils import decrypt_and_decode, encrypt_and_encode, load_key, get_configs
9 | from base_logger import get_logger
10 |
11 | HASHING_KEY = load_key(get_configs("HASHING_SALT"), 32)
12 |
13 | logger = get_logger("clean.account.identifiers")
14 |
15 |
16 | def fetch_tokens():
17 | """Fetch all tokens from the database."""
18 | return Token.select()
19 |
20 |
21 | def process_token(token, pbar):
22 | """
23 | Process a single token to clean its account identifier if needed.
24 |
25 | Args:
26 | token: The token object to process.
27 | pbar: The tqdm progress bar to update.
28 | """
29 | try:
30 | account_identifier = decrypt_and_decode(token.account_identifier)
31 | account_identifier_hash = token.account_identifier
32 |
33 | clean_account_identifier = account_identifier.strip('"').strip()
34 |
35 | if account_identifier != clean_account_identifier:
36 | clean_account_identifier_hash = generate_hmac(
37 | HASHING_KEY, clean_account_identifier
38 | )
39 | logger.info(
40 | "Cleaning raw account identifier: '%r' to cleaned account identifier: '%r'",
41 | account_identifier,
42 | clean_account_identifier,
43 | )
44 |
45 | token.account_identifier = encrypt_and_encode(clean_account_identifier)
46 | if account_identifier_hash != clean_account_identifier_hash:
47 | token.account_identifier_hash = clean_account_identifier_hash
48 |
49 | token.save()
50 |
51 | return 1, int(account_identifier_hash != clean_account_identifier_hash)
52 | except Exception:
53 | logger.error(
54 | "Error cleaning account_identifier: %s",
55 | token.account_identifier,
56 | exc_info=True,
57 | )
58 | finally:
59 | pbar.update(1)
60 | return 0, 0
61 |
62 |
63 | def main():
64 | """Main function to clean account identifiers in tokens."""
65 | tokens = fetch_tokens()
66 | total_tokens = tokens.count()
67 | total_clean_account_identifier = 0
68 | total_account_identifier_hash = 0
69 |
70 | with tqdm.tqdm(total=total_tokens, desc="Cleaning", unit="tokens") as pbar:
71 | for token in tokens.iterator():
72 | cleaned, rehashed = process_token(token, pbar)
73 | total_clean_account_identifier += cleaned
74 | total_account_identifier_hash += rehashed
75 |
76 | logger.info("Total Cleaned account identifiers: %s", total_clean_account_identifier)
77 | logger.info(
78 | "Total Cleaned account identifiers hash: %s", total_account_identifier_hash
79 | )
80 |
81 |
82 | if __name__ == "__main__":
83 | main()
84 |
--------------------------------------------------------------------------------
/migrations/entity_migration_script.py:
--------------------------------------------------------------------------------
1 | """
2 | Entity Migration Script
3 | """
4 |
5 | import logging
6 | from tqdm import tqdm
7 | from phonenumbers.phonenumberutil import region_code_for_country_code
8 | from src.schemas.users import Users
9 | from src.schemas.usersinfo import UsersInfos
10 | from src.schemas.wallets import Wallets
11 | from src.security.data import Data
12 | from src.utils import generate_eid, encrypt_and_encode
13 | from src.entity import create_entity
14 | from src.tokens import create_entity_token
15 |
16 | data = Data()
17 |
18 | logging.basicConfig(
19 | level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s"
20 | )
21 |
22 |
23 | def fetch_users_data():
24 | """Fetch data from the Users table."""
25 | return Users.select()
26 |
27 |
28 | def fetch_verified_users_infos_data():
29 | """Fetch verified user information from the UsersInfos table."""
30 | return UsersInfos.select().where(UsersInfos.status == "verified")
31 |
32 |
33 | def fetch_users_wallets(user_id):
34 | """Fetch wallets data for a given user."""
35 | return Wallets.select().where(Wallets.userId == user_id)
36 |
37 |
38 | def migrate_user_data(user, user_info, wallets):
39 | """Transform user data and migrate to Entity."""
40 | phone_number_hash = user_info.full_phone_number
41 | eid = generate_eid(phone_number_hash)
42 | password_hash = user.password
43 | country_prefix = int(data.decrypt(data=user_info.country_code))
44 | country_code = region_code_for_country_code(country_prefix)
45 |
46 | entity_data = {
47 | "eid": eid,
48 | "phone_number_hash": phone_number_hash,
49 | "password_hash": password_hash,
50 | "country_code": encrypt_and_encode(country_code),
51 | }
52 |
53 | entity = create_entity(**entity_data)
54 | for wallet in wallets:
55 | account_identifier = data.decrypt(wallet.uniqueId)
56 | account_tokens = data.decrypt(wallet.token)
57 | token_data = {
58 | "platform": wallet.platformId,
59 | "account_identifier": encrypt_and_encode(account_identifier),
60 | "account_identifier_hash": wallet.uniqueIdHash,
61 | "account_tokens": encrypt_and_encode(account_tokens),
62 | }
63 | create_entity_token(entity, **token_data)
64 |
65 |
66 | def migrate_data():
67 | """Main function to migrate data from Users and UsersInfos to Entity."""
68 | users_data = fetch_users_data()
69 | users_infos_data = fetch_verified_users_infos_data()
70 |
71 | total = users_infos_data.count()
72 | with tqdm(total=total, desc="Migrating", unit="users") as pbar:
73 | for user_info in users_infos_data:
74 | try:
75 | user = users_data.where(Users.id == user_info.userId).get()
76 | wallets = fetch_users_wallets(user_info.userId)
77 | migrate_user_data(user, user_info, wallets)
78 | pbar.update(1)
79 | except Exception:
80 | logging.error(
81 | "Error migrating user: %s", user_info.userId, exc_info=True
82 | )
83 | pbar.update(1)
84 | continue
85 |
86 |
87 | if __name__ == "__main__":
88 | migrate_data()
89 |
--------------------------------------------------------------------------------
/migrations/populate_signups.py:
--------------------------------------------------------------------------------
1 | """
2 | This program is free software: you can redistribute it under the terms
3 | of the GNU General Public License, v. 3.0. If a copy of the GNU General
4 | Public License was not distributed with this file, see .
5 | """
6 |
7 | from peewee import fn
8 | from playhouse.shortcuts import chunked
9 | from tqdm import tqdm
10 | from src.utils import decrypt_and_decode
11 | from src.db_models import Entity, Signups
12 | from base_logger import get_logger
13 |
14 | logger = get_logger("populate.signups")
15 | database = Signups._meta.database
16 |
17 |
18 | def process_batch(entities_batch):
19 | """
20 | Process a batch of entities by decrypting and preparing the data for insertion.
21 |
22 | Args:
23 | entities_batch (list): List of Entity objects.
24 |
25 | Returns:
26 | list: A list of dictionaries ready for insertion into the Signups table.
27 | """
28 | processed_batch = []
29 | for entity in entities_batch:
30 | processed_batch.append(
31 | {
32 | "country_code": decrypt_and_decode(entity.country_code),
33 | "source": entity.source,
34 | "date_created": entity.date_created,
35 | }
36 | )
37 | return processed_batch
38 |
39 |
40 | def main():
41 | """Populate the Signups table using data from the Entities table."""
42 |
43 | entities_query = Entity.select(
44 | Entity.country_code,
45 | fn.IF(Entity.password_hash.is_null(), "bridges", "platforms").alias("source"),
46 | Entity.date_created,
47 | )
48 |
49 | total_entities = entities_query.count()
50 | batch_size = 500
51 |
52 | with database.atomic():
53 | for entities_batch in tqdm(
54 | chunked(entities_query.iterator(), batch_size),
55 | total=(total_entities // batch_size)
56 | + (1 if total_entities % batch_size > 0 else 0),
57 | desc="Processing and Inserting Signups",
58 | ):
59 | signups_to_insert = process_batch(entities_batch)
60 | Signups.insert_many(signups_to_insert).execute()
61 |
62 | logger.info("Signups table populated successfully.")
63 |
64 |
65 | if __name__ == "__main__":
66 | main()
67 |
--------------------------------------------------------------------------------
/migrations/schema/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | # Changelog
2 |
3 | ## [v1.1.0] - 2025-03-03
4 |
5 | ### Added
6 |
7 | - Added a new column `language` of type `CharField(null=True, constraints=[SQL("DEFAULT 'en'")])` to the `entities` table.
8 |
9 | ## [v1.0.2] - 2024-10-10
10 |
11 | ### Changed
12 |
13 | - Dropped the `NOT NULL` constraint from the `password_hash` column in the `entities` table.
14 |
15 | ### Added
16 |
17 | - Added a new column `is_bridge_enabled` of type `BooleanField()` to the `entities` table.
18 |
19 | ## [v1.0.1] - 2024-09-18
20 |
21 | ### Changed
22 |
23 | - Dropped the index `token_platform_account_identifier_hash` from the `tokens` table.
24 |
25 | ### Added
26 |
27 | - Added a unique composite index on `platform`, `account_identifier_hash`, and `eid` in the `tokens` table.
28 |
--------------------------------------------------------------------------------
/migrations/schema/README.md:
--------------------------------------------------------------------------------
1 | # Database Migration Script
2 |
3 | This script allows you to apply database migrations or initiate rollbacks using a JSON specification file. It uses the Peewee ORM and the Playhouse migrations module for database operations.
4 |
5 | > [!TIP]
6 | >
7 | > For more details on the Peewee ORM and how to perform database migrations using the Playhouse migrations module, refer to the official documentation:
8 | > [Peewee Playhouse Migrations](https://docs.peewee-orm.com/en/latest/peewee/playhouse.html#migrate)
9 |
10 | ## Getting Started
11 |
12 | ### Prerequisites
13 |
14 | - Python 3.10+ installed on your system
15 | - Peewee ORM (`pip install peewee`)
16 |
17 | ### Usage
18 |
19 | To execute database migrations or rollbacks, use the following command:
20 |
21 | ```bash
22 | python3 -m migrations.schema.run
23 | ```
24 |
25 | - ``: The command to execute, either `migrate` or `rollback`.
26 | - ``: The version of the migration specification file you want to apply (e.g., `v1.0.0`).
27 |
28 | #### Example - Migrate to a specific schema version:
29 |
30 | ```bash
31 | python3 -m migrations.schema.run migrate v1.0.0
32 | ```
33 |
34 | #### Example - Rollback (Placeholder, not yet implemented):
35 |
36 | ```bash
37 | python3 -m migrations.schema.run rollback v1.0.0
38 | ```
39 |
40 | ### Spec File Format
41 |
42 | The migration specification file is a JSON file that defines the schema changes to be applied. Here's a sample format:
43 |
44 | ```json
45 | [
46 | {
47 | "action": "add_column",
48 | "table": "users",
49 | "column_name": "age",
50 | "field": "IntegerField()"
51 | },
52 | {
53 | "action": "drop_column",
54 | "table": "posts",
55 | "column_name": "author_id",
56 | "cascade": true
57 | },
58 | {
59 | "action": "rename_column",
60 | "table": "posts",
61 | "old_name": "title",
62 | "new_name": "post_title"
63 | },
64 | {
65 | "action": "add_not_null",
66 | "table": "comments",
67 | "column": "post_id"
68 | },
69 | {
70 | "action": "rename_table",
71 | "old_name": "posts",
72 | "new_name": "articles"
73 | },
74 | {
75 | "action": "add_index",
76 | "table": "articles",
77 | "columns": ["status", "created_at"],
78 | "unique": true
79 | },
80 | {
81 | "action": "drop_index",
82 | "table": "comments",
83 | "index_name": "post_id"
84 | }
85 | ]
86 | ```
87 |
88 | ### Supported Actions
89 |
90 | - `add_column`
91 | - `drop_column`
92 | - `rename_column`
93 | - `add_not_null`
94 | - `drop_not_null`
95 | - `rename_table`
96 | - `add_index`
97 | - `drop_index`
98 |
99 | Each action requires specific parameters, as mentioned in the sample spec file format.
100 |
101 | ### Rollback (Future Feature)
102 |
103 | Currently, the rollback functionality is not implemented. Future updates will support rolling back schema changes.
104 |
--------------------------------------------------------------------------------
/migrations/schema/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/smswithoutborders/RelaySMS-Vault/10f70b1b0fde90c225789efb804743736728de63/migrations/schema/__init__.py
--------------------------------------------------------------------------------
/migrations/schema/run.py:
--------------------------------------------------------------------------------
1 | """
2 | ════════════════════════════════════════════════════════════════════════
3 | Database Migration Tool Using Peewee ORM
4 | ════════════════════════════════════════════════════════════════════════
5 |
6 | Applies schema changes defined in JSON migration spec files.
7 | """
8 |
9 | import os
10 | import logging
11 | import json
12 | import argparse
13 | import ast
14 | import re
15 | from typing import List, Dict, Any
16 |
17 | import peewee
18 | from playhouse.migrate import MySQLMigrator, migrate
19 | from src.db import connect
20 |
21 | logging.basicConfig(level="DEBUG")
22 |
23 | # ──────────────────────────────────────────────────────────────────────
24 | # Configuration
25 | # ──────────────────────────────────────────────────────────────────────
26 | MIGRATION_DIR = os.path.join("migrations", "schema")
27 | SUCCESS = "✅"
28 | FAILED = "❌"
29 |
30 | db = connect()
31 | migrator = MySQLMigrator(db)
32 |
33 | ALLOWED_FIELDS = {
34 | "CharField": peewee.CharField,
35 | "BooleanField": peewee.BooleanField,
36 | }
37 |
38 | ALLOWED_FUNCTIONS = {
39 | "SQL": peewee.SQL,
40 | }
41 |
42 | ACTIONS = {
43 | "add_column": migrator.add_column,
44 | "drop_column": migrator.drop_column,
45 | "rename_column": migrator.rename_column,
46 | "add_not_null": migrator.add_not_null,
47 | "drop_not_null": migrator.drop_not_null,
48 | "rename_table": migrator.rename_table,
49 | "add_index": migrator.add_index,
50 | "drop_index": migrator.drop_index,
51 | }
52 |
53 |
54 | # ──────────────────────────────────────────────────────────────────────
55 | # Helper Functions
56 | # ──────────────────────────────────────────────────────────────────────
57 | def parse_field(field_str: str) -> peewee.Field:
58 | """Parses a Peewee field definition string.
59 |
60 | Args:
61 | field_str (str): Field definition (e.g., "CharField(max_length=255)").
62 |
63 | Returns:
64 | peewee.Field: The corresponding Peewee field instance.
65 | """
66 | match = re.match(r"(\w+)\((.*)\)", field_str)
67 | if not match:
68 | raise ValueError(f"Invalid field format: {field_str}")
69 |
70 | field_type, field_args = match.groups()
71 | if field_type not in ALLOWED_FIELDS:
72 | raise ValueError(f"Unsupported field type: {field_type}")
73 |
74 | try:
75 | args, kwargs = _parse_arguments(field_args)
76 | return ALLOWED_FIELDS[field_type](*args, **kwargs)
77 |
78 | except (SyntaxError, ValueError) as e:
79 | raise ValueError(f"Error parsing field arguments: {field_args}\n\n{e}") from e
80 |
81 |
82 | def _parse_arguments(field_args: str):
83 | """Parses arguments from a field definition.
84 |
85 | Args:
86 | field_args (str): The arguments in string format.
87 |
88 | Returns:
89 | tuple: (list of positional args, dict of keyword args).
90 | """
91 | args, kwargs = [], {}
92 |
93 | if not field_args:
94 | return args, kwargs
95 |
96 | parsed_args = ast.parse(f"dummy({field_args})").body[0].value
97 |
98 | if isinstance(parsed_args, ast.Call):
99 | args = [_parse_node(arg) for arg in parsed_args.args]
100 | kwargs = {kw.arg: _parse_node(kw.value) for kw in parsed_args.keywords}
101 |
102 | return args, kwargs
103 |
104 |
105 | def _parse_node(node):
106 | """Parses an AST node into a Python value.
107 |
108 | Args:
109 | node (ast.AST): The AST node to parse.
110 |
111 | Returns:
112 | Any: Parsed value (e.g., list, int, string, peewee.SQL).
113 | """
114 | if isinstance(node, ast.List):
115 | return [_parse_node(elt) for elt in node.elts]
116 |
117 | if isinstance(node, ast.Call):
118 | return _parse_function_call(node)
119 |
120 | return ast.literal_eval(node)
121 |
122 |
123 | def _parse_function_call(node):
124 | """Handles function calls within field arguments.
125 |
126 | Args:
127 | node (ast.Call): The AST function call node.
128 |
129 | Returns:
130 | Any: The evaluated function result.
131 |
132 | Raises:
133 | ValueError: If the function is not allowed.
134 | """
135 | if isinstance(node.func, ast.Name):
136 | func_name = node.func.id
137 |
138 | if func_name in ALLOWED_FUNCTIONS:
139 | func = ALLOWED_FUNCTIONS[func_name]
140 | func_args = [_parse_node(arg) for arg in node.args]
141 | return func(*func_args)
142 |
143 | raise ValueError(f"Disallowed function call: {ast.dump(node)}")
144 |
145 |
146 | def get_latest_schema_version() -> str:
147 | """
148 | Retrieve the latest schema version from the migration directory.
149 |
150 | Returns:
151 | str: Latest schema version, or None if no migrations found.
152 | """
153 | if not os.path.isdir(MIGRATION_DIR):
154 | print(f"⚠️ Warning: Migration directory not found: {MIGRATION_DIR}")
155 | return None
156 |
157 | migration_files = sorted(
158 | (
159 | file
160 | for file in os.listdir(MIGRATION_DIR)
161 | if file.startswith("v") and file.endswith(".json")
162 | ),
163 | reverse=True,
164 | )
165 | return migration_files[0].rstrip(".json") if migration_files else None
166 |
167 |
168 | def load_spec(spec_version: str) -> List[Dict[str, Any]]:
169 | """
170 | Load the migration specification file for the given version.
171 |
172 | Args:
173 | spec_version (str): Schema version (e.g., "v1.0").
174 |
175 | Returns:
176 | List[Dict[str, Any]]: Parsed migration operations.
177 |
178 | Raises:
179 | FileNotFoundError: If the spec file does not exist.
180 | """
181 | spec_file_path = os.path.join(MIGRATION_DIR, f"{spec_version}.json")
182 | if not os.path.exists(spec_file_path):
183 | raise FileNotFoundError(f"Spec file '{spec_file_path}' not found.")
184 |
185 | with open(spec_file_path, encoding="utf-8") as f:
186 | return json.load(f)
187 |
188 |
189 | # ──────────────────────────────────────────────────────────────────────
190 | # Migration Management Class
191 | # ──────────────────────────────────────────────────────────────────────
192 | class MigrationManager:
193 | """Handles the execution of database migrations."""
194 |
195 | def __init__(self):
196 | self.migrations_done = 0
197 | self.migrations_failed = 0
198 |
199 | def migrate_operations(self, operations: List[Dict[str, Any]]):
200 | """
201 | Execute migration operations.
202 |
203 | Args:
204 | operations (list): List of migration actions to execute.
205 |
206 | Raises:
207 | ValueError: If unsupported actions or fields are encountered.
208 | """
209 | print("\nMigration Operations:")
210 | print("══════════════════════════════════════════════════════════════════════")
211 |
212 | for operation in operations:
213 | print(f"\n🔄 Performing operation: {operation}")
214 |
215 | try:
216 | action = operation.pop("action")
217 |
218 | if "field" in operation:
219 | operation["field"] = parse_field(operation["field"])
220 |
221 | if action not in ACTIONS:
222 | raise ValueError(f"Unsupported action: {action}")
223 |
224 | migrate(ACTIONS[action](**operation))
225 | self.migrations_done += 1
226 | print(f"{SUCCESS} Operation successful: {operation}")
227 |
228 | except Exception as e:
229 | self.migrations_failed += 1
230 | print(f"{FAILED} Operation failed: {operation}\n Error: {e}")
231 |
232 | print("\nMigration Summary:")
233 | print("══════════════════════════════════════════════════════════════════════")
234 | print(f"{SUCCESS} Completed migrations: {self.migrations_done}")
235 | print(f"{FAILED} Failed migrations: {self.migrations_failed}")
236 |
237 | def check_and_migrate_schema(self, current_schema_version: str):
238 | """
239 | Check the current schema version and run migrations if necessary.
240 |
241 | Args:
242 | current_schema_version (str): Current version of the schema.
243 | """
244 | latest_schema_version = get_latest_schema_version()
245 |
246 | if latest_schema_version and current_schema_version != latest_schema_version:
247 | print(
248 | f"\n🔁 Migration Required: Migrating from version "
249 | f"{current_schema_version} to {latest_schema_version}"
250 | )
251 | spec = load_spec(latest_schema_version)
252 | self.migrate_operations(spec)
253 | print(f"{SUCCESS} Migration to version {latest_schema_version} completed.")
254 | else:
255 | print(f"{SUCCESS} Database schema is up to date.")
256 |
257 |
258 | # ──────────────────────────────────────────────────────────────────────
259 | # Command-line Interface
260 | # ──────────────────────────────────────────────────────────────────────
261 | def run():
262 | """Main function to parse command-line arguments and initiate migration."""
263 | parser = argparse.ArgumentParser(
264 | description="Apply database migrations using a specified schema version."
265 | )
266 | parser.add_argument(
267 | "command", choices=["migrate", "rollback"], help="Command to execute."
268 | )
269 | parser.add_argument("spec_version", help="Schema version to apply.")
270 | args = parser.parse_args()
271 |
272 | print("\nDatabase Schema Migration Tool")
273 | print("══════════════════════════════════════════════════════════════════════")
274 |
275 | manager = MigrationManager()
276 |
277 | match args.command:
278 | case "migrate":
279 | spec = load_spec(args.spec_version)
280 | manager.migrate_operations(spec)
281 | case "rollback":
282 | print(f"{FAILED} Rollback feature is not implemented yet.")
283 |
284 |
285 | if __name__ == "__main__":
286 | run()
287 |
--------------------------------------------------------------------------------
/migrations/schema/v1.0.1.json:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "action": "drop_index",
4 | "table": "tokens",
5 | "index_name": "token_platform_account_identifier_hash"
6 | },
7 | {
8 | "action": "add_index",
9 | "table": "tokens",
10 | "columns": ["platform", "account_identifier_hash", "eid"],
11 | "unique": true
12 | }
13 | ]
14 |
--------------------------------------------------------------------------------
/migrations/schema/v1.0.2.json:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "action": "drop_not_null",
4 | "table": "entities",
5 | "column": "password_hash"
6 | },
7 | {
8 | "action": "add_column",
9 | "table": "entities",
10 | "column_name": "is_bridge_enabled",
11 | "field": "BooleanField(default=True)"
12 | }
13 | ]
14 |
--------------------------------------------------------------------------------
/migrations/schema/v1.1.0.json:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "action": "add_column",
4 | "table": "entities",
5 | "column_name": "language",
6 | "field": "CharField(null=True, constraints=[SQL(\"DEFAULT 'en'\")])"
7 | }
8 | ]
9 |
--------------------------------------------------------------------------------
/migrations/update_entity_created_date.py:
--------------------------------------------------------------------------------
1 | """Script for updating entity 'date_created' for V2 migrated users."""
2 |
3 | import logging
4 | from tqdm import tqdm
5 | from src.schemas.usersinfo import UsersInfos
6 | from src.entity import find_entity
7 |
8 | logging.basicConfig(
9 | level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s"
10 | )
11 | logger = logging.getLogger("migration.script")
12 |
13 |
14 | def fetch_verified_users_infos_data():
15 | """Fetch verified user information from the UsersInfos table."""
16 | return UsersInfos.select().where(UsersInfos.status == "verified")
17 |
18 |
19 | def update_created_date(user_info):
20 | """Update the date_created field for the given user entity."""
21 | phone_number_hash = user_info.full_phone_number
22 | user_created_date = user_info.createdAt
23 |
24 | try:
25 | entity = find_entity(phone_number_hash=phone_number_hash)
26 | if entity:
27 | entity.date_created = user_created_date
28 | entity.save()
29 | except Exception as e:
30 | logger.exception("Error updating user: %s - %s", user_info.userId, str(e))
31 |
32 |
33 | def run():
34 | """Main function to process all verified users and update the date_created."""
35 | users_infos_data = fetch_verified_users_infos_data()
36 |
37 | total = users_infos_data.count()
38 | with tqdm(total=total, desc="Updating", unit="users") as pbar:
39 | for user_info in users_infos_data:
40 | update_created_date(user_info)
41 | pbar.update(1)
42 |
43 |
44 | if __name__ == "__main__":
45 | run()
46 |
--------------------------------------------------------------------------------
/protos/v1/vault.proto:
--------------------------------------------------------------------------------
1 | syntax = "proto3";
2 |
3 | package vault.v1;
4 |
5 | // Request message for creating an entity.
6 | message CreateEntityRequest {
7 | // The ISO 3166-1 alpha-2 country code of the entity.
8 | string country_code = 1;
9 | // The phone number of the entity.
10 | string phone_number = 2;
11 | // The password of the entity.
12 | string password = 3;
13 | // The client's public key for publishing.
14 | string client_publish_pub_key = 4;
15 | // The client's public key for device identification.
16 | string client_device_id_pub_key = 5;
17 | // The ownership proof response from the client.
18 | string ownership_proof_response = 6;
19 | }
20 |
21 | // Response message for creating an entity.
22 | message CreateEntityResponse {
23 | // Indicates if ownership proof is required.
24 | bool requires_ownership_proof = 1;
25 | // A long-lived token for the authenticated entity.
26 | string long_lived_token = 2;
27 | // The server's public key for publishing.
28 | string server_publish_pub_key = 3;
29 | // The server's public key for device identification.
30 | string server_device_id_pub_key = 4;
31 | // A response message.
32 | string message = 5;
33 | // The next available time to request another proof of ownership.
34 | int32 next_attempt_timestamp = 6;
35 | }
36 |
37 | // Request message for authenticating an entity.
38 | message AuthenticateEntityRequest {
39 | // The phone number of the entity.
40 | string phone_number = 1;
41 | // The password of the entity.
42 | string password = 2;
43 | // The client's public key for publishing.
44 | string client_publish_pub_key = 3;
45 | // The client's public key for device identification.
46 | string client_device_id_pub_key = 4;
47 | // The ownership proof response from the client.
48 | string ownership_proof_response = 5;
49 | }
50 |
51 | // Response message for authenticating an entity.
52 | message AuthenticateEntityResponse {
53 | // Indicates if ownership proof is required.
54 | bool requires_ownership_proof = 1;
55 | // A long-lived token for the authenticated entity.
56 | string long_lived_token = 2;
57 | // The server's public key for publishing.
58 | string server_publish_pub_key = 3;
59 | // The server's public key for device identification.
60 | string server_device_id_pub_key = 4;
61 | // A response message.
62 | string message = 5;
63 | // The next available time to request another proof of ownership.
64 | int32 next_attempt_timestamp = 6;
65 | // indicates if user must reset their password
66 | bool requires_password_reset = 7;
67 | }
68 |
69 | // Request message for listing entity's stored tokens.
70 | message ListEntityStoredTokensRequest {
71 | // The long-lived token of the authenticated entity.
72 | string long_lived_token = 1;
73 | // Indicates if the token should be removed from the cloud and sent to the device.
74 | bool migrate_to_device = 2;
75 | }
76 |
77 | // Response message for listing entity's stored tokens.
78 | message ListEntityStoredTokensResponse {
79 | // The list of stored tokens.
80 | repeated Token stored_tokens = 1;
81 | // A response message.
82 | string message = 2;
83 | }
84 |
85 | // Represents a token.
86 | message Token {
87 | // The platform associated with the token.
88 | string platform = 1;
89 | // The unique identifier of the account associated with the token.
90 | string account_identifier = 2;
91 | // Access and refresh tokens
92 | map account_tokens = 3;
93 | // Indicates if the token is already stored on the device.
94 | bool is_stored_on_device = 4;
95 | }
96 |
97 | // Request message for storing an entity's token.
98 | message StoreEntityTokenRequest {
99 | // The long-lived token of the authenticated entity.
100 | string long_lived_token = 1;
101 | // The OAuth2 token to be stored (JSON string).
102 | string token = 2;
103 | // The platform associated with the token.
104 | string platform = 3;
105 | // The identifier of the account associated with the token.
106 | string account_identifier = 4;
107 | }
108 |
109 | // Response message for storing an entity's token.
110 | message StoreEntityTokenResponse {
111 | // A response message.
112 | string message = 1;
113 | // Indicates whether the operation was successful.
114 | bool success = 2;
115 | }
116 |
117 | // Request message for getting entity access token.
118 | message GetEntityAccessTokenRequest {
119 | // Device ID for identifying the requesting device.
120 | string device_id = 1;
121 | // The long-lived token of the authenticated entity.
122 | string long_lived_token = 2;
123 | // The platform associated with the token.
124 | string platform = 3;
125 | // The identifier of the account associated with the token.
126 | string account_identifier = 4;
127 | // The phone number of the entity.
128 | string phone_number = 5;
129 | }
130 |
131 | // Response message for getting entity access token.
132 | message GetEntityAccessTokenResponse {
133 | // Entity access token (JSON string).
134 | string token = 1;
135 | // A response message.
136 | string message = 2;
137 | // Indicates whether the operation was successful.
138 | bool success = 3;
139 | }
140 |
141 | // Request message for decrypting payload.
142 | message DecryptPayloadRequest {
143 | // Device ID for identifying the requesting device.
144 | string device_id = 1;
145 | // Encrypted payload that needs to be decrypted.
146 | string payload_ciphertext = 2;
147 | // The phone number of the entity.
148 | string phone_number = 3;
149 | }
150 |
151 | message DecryptPayloadResponse {
152 | // Decrypted plaintext payload.
153 | string payload_plaintext = 1;
154 | // A response message.
155 | string message = 2;
156 | // Indicates whether the operation was successful.
157 | bool success = 3;
158 | // The ISO 3166-1 alpha-2 country code of the entity.
159 | string country_code = 4;
160 | }
161 |
162 | // Request message for encrypting payload.
163 | message EncryptPayloadRequest {
164 | // Device ID for identifying the requesting device.
165 | string device_id = 1;
166 | // Plaintext payload to be encrypted.
167 | string payload_plaintext = 2;
168 | // The phone number of the entity.
169 | string phone_number = 3;
170 | }
171 |
172 | // Response message for encrypting payload.
173 | message EncryptPayloadResponse {
174 | // Encrypted payload.
175 | string payload_ciphertext = 1;
176 | // A response message.
177 | string message = 2;
178 | // Indicates whether the operation was successful.
179 | bool success = 3;
180 | }
181 |
182 | // Request message for updating an entity's token.
183 | message UpdateEntityTokenRequest {
184 | // Device ID for identifying the requesting device.
185 | string device_id = 1;
186 | // The OAuth2 token to be stored (JSON string).
187 | string token = 2;
188 | // The platform associated with the token.
189 | string platform = 3;
190 | // The identifier of the account associated with the token.
191 | string account_identifier = 4;
192 | // The phone number of the entity.
193 | string phone_number = 5;
194 | }
195 |
196 | // Response message for updating an entity's token.
197 | message UpdateEntityTokenResponse {
198 | // A response message.
199 | string message = 1;
200 | // Indicates whether the operation was successful.
201 | bool success = 2;
202 | }
203 |
204 | // Request message for deleting an entity's token.
205 | message DeleteEntityTokenRequest {
206 | // The long-lived token of the authenticated entity.
207 | string long_lived_token = 1;
208 | // The platform associated with the token.
209 | string platform = 2;
210 | // The identifier of the account associated with the token.
211 | string account_identifier = 3;
212 | }
213 |
214 | // Response message for deleting an entity's token.
215 | message DeleteEntityTokenResponse {
216 | // A response message.
217 | string message = 1;
218 | // Indicates whether the operation was successful.
219 | bool success = 2;
220 | }
221 |
222 | // Request message for deleting an entity.
223 | message DeleteEntityRequest {
224 | // The long-lived token of the authenticated entity.
225 | string long_lived_token = 1;
226 | }
227 |
228 | // Response message for deleting an entity.
229 | message DeleteEntityResponse {
230 | // A response message.
231 | string message = 1;
232 | // Indicates whether the operation was successful.
233 | bool success = 2;
234 | }
235 |
236 | // Request message for resetting an entity's password.
237 | message ResetPasswordRequest {
238 | // The phone number of the entity.
239 | string phone_number = 1;
240 | // The new password of the entity.
241 | string new_password = 2;
242 | // The client's public key for publishing.
243 | string client_publish_pub_key = 3;
244 | // The client's public key for device identification.
245 | string client_device_id_pub_key = 4;
246 | // The ownership proof response from the client.
247 | string ownership_proof_response = 5;
248 | }
249 |
250 | // Response message for resetting an entity's password.
251 | message ResetPasswordResponse {
252 | // Indicates if ownership proof is required.
253 | bool requires_ownership_proof = 1;
254 | // A long-lived token for the authenticated entity.
255 | string long_lived_token = 2;
256 | // The server's public key for publishing.
257 | string server_publish_pub_key = 3;
258 | // The server's public key for device identification.
259 | string server_device_id_pub_key = 4;
260 | // A response message.
261 | string message = 5;
262 | // The next available time to request another proof of ownership.
263 | int32 next_attempt_timestamp = 6;
264 | }
265 |
266 | // Request message for updating an entity's password.
267 | message UpdateEntityPasswordRequest {
268 | // The long-lived token of the authenticated entity.
269 | string long_lived_token = 1;
270 | // The current password of the entity.
271 | string current_password = 2;
272 | // The new password of the entity.
273 | string new_password = 3;
274 | }
275 |
276 | // Response message for updating an entity's password.
277 | message UpdateEntityPasswordResponse {
278 | // A response message.
279 | string message = 1;
280 | // Indicates whether the operation was successful.
281 | bool success = 2;
282 | }
283 |
284 | // Request message for creating a bridge entity.
285 | message CreateBridgeEntityRequest {
286 | // The ISO 3166-1 alpha-2 country code of the entity.
287 | string country_code = 1;
288 | // The phone number of the entity.
289 | string phone_number = 2;
290 | // The client's public key for publishing.
291 | string client_publish_pub_key = 3;
292 | // The ownership proof response from the client.
293 | string ownership_proof_response = 4;
294 | // The server's public key identifier.
295 | string server_pub_key_identifier = 5;
296 | // The server's public key version.
297 | string server_pub_key_version = 6;
298 | // The preferred language of the entity in ISO 639-1 format.
299 | string language = 7;
300 | }
301 |
302 | // Response message for creating a bridge entity.
303 | message CreateBridgeEntityResponse {
304 | // A response message.
305 | string message = 1;
306 | // Indicates whether the operation was successful.
307 | bool success = 2;
308 | }
309 |
310 | // Response message for authenticating a bridge entity,
311 | message AuthenticateBridgeEntityRequest {
312 | // The phone number of the entity.
313 | string phone_number = 1;
314 | // The preferred language of the entity in ISO 639-1 format.
315 | string language = 2;
316 | }
317 |
318 | message AuthenticateBridgeEntityResponse {
319 | // A response message.
320 | string message = 1;
321 | // Indicates whether the operation was successful.
322 | bool success = 2;
323 | // The preferred language of the entity in ISO 639-1 format.
324 | string language = 3;
325 | }
326 |
327 | // Service for managing entities.
328 | service Entity {
329 | // Creates an entity.
330 | rpc CreateEntity(CreateEntityRequest) returns (CreateEntityResponse);
331 | // Authenticates an entity.
332 | rpc AuthenticateEntity(AuthenticateEntityRequest) returns (AuthenticateEntityResponse);
333 | // Lists all stored access tokens for an entity.
334 | rpc ListEntityStoredTokens(ListEntityStoredTokensRequest) returns (ListEntityStoredTokensResponse);
335 | // Deletes an entity.
336 | rpc DeleteEntity(DeleteEntityRequest) returns (DeleteEntityResponse);
337 | // Resets an entity's password
338 | rpc ResetPassword(ResetPasswordRequest) returns (ResetPasswordResponse);
339 | // Updates an entity's password.
340 | rpc UpdateEntityPassword(UpdateEntityPasswordRequest) returns (UpdateEntityPasswordResponse);
341 | }
342 |
343 | // Service for managing entities internally.
344 | service EntityInternal {
345 | // Stores a token for an entity.
346 | rpc StoreEntityToken(StoreEntityTokenRequest) returns (StoreEntityTokenResponse);
347 | // Get an entity's access token.
348 | rpc GetEntityAccessToken(GetEntityAccessTokenRequest) returns (GetEntityAccessTokenResponse);
349 | // Decrypt payload.
350 | rpc DecryptPayload(DecryptPayloadRequest) returns (DecryptPayloadResponse);
351 | // Encrypt payload.
352 | rpc EncryptPayload(EncryptPayloadRequest) returns (EncryptPayloadResponse);
353 | // Updates an entity's access token.
354 | rpc UpdateEntityToken(UpdateEntityTokenRequest) returns (UpdateEntityTokenResponse);
355 | // Deletes an entity's access token.
356 | rpc DeleteEntityToken(DeleteEntityTokenRequest) returns (DeleteEntityTokenResponse);
357 | // Creates a bridge entity.
358 | rpc CreateBridgeEntity(CreateBridgeEntityRequest) returns (CreateBridgeEntityResponse);
359 | // Authenticates a bridge entity.
360 | rpc AuthenticateBridgeEntity(AuthenticateBridgeEntityRequest) returns (AuthenticateBridgeEntityResponse);
361 | }
362 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | asgiref==3.8.1
2 | Flask==3.1.1
3 | Flask-Cors==6.0.0
4 | grpc-interceptor==0.15.4
5 | grpcio==1.72.1
6 | grpcio-testing==1.72.1
7 | grpcio-tools==1.72.1
8 | gunicorn==23.0.0
9 | jwt==1.3.1
10 | mysqlclient==2.2.7
11 | mysql-connector-python==9.3.0
12 | nest-asyncio==1.6.0
13 | peewee==3.18.1
14 | phonenumbers==9.0.7
15 | pycryptodome==3.23.0
16 | pytest==8.4.0
17 | requests==2.32.4
18 | smswithoutborders_libsig @ git+https://github.com/smswithoutborders/lib_signal_double_ratchet_python.git@v0.1.5
19 | tqdm==4.67.1
20 | twilio==9.6.2
21 |
--------------------------------------------------------------------------------
/scripts/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/smswithoutborders/RelaySMS-Vault/10f70b1b0fde90c225789efb804743736728de63/scripts/__init__.py
--------------------------------------------------------------------------------
/scripts/cli.py:
--------------------------------------------------------------------------------
1 | """Vault CLI"""
2 |
3 | import sys
4 | import argparse
5 | from src.crypto import generate_hmac
6 | from src.entity import find_entity, create_entity
7 | from src.utils import (
8 | get_configs,
9 | generate_eid,
10 | encrypt_and_encode,
11 | clear_keystore,
12 | load_key,
13 | )
14 | from base_logger import get_logger
15 |
16 | logger = get_logger("vault.cli")
17 |
18 | HASHING_KEY = load_key(get_configs("HASHING_SALT"), 32)
19 | DUMMY_PHONENUMBERS = get_configs(
20 | "DUMMY_PHONENUMBERS", default_value="+237123456789"
21 | ).split(",")
22 | DUMMY_PASSWORD = get_configs("DUMMY_PASSWORD", default_value="dummy_password")
23 |
24 |
25 | def create(phonenumber, password, country_code):
26 | """Create an Entity (for dummy entities only)."""
27 |
28 | if phonenumber not in DUMMY_PHONENUMBERS:
29 | logger.error("Entity phone number must be a dummy phone number.")
30 | sys.exit(1)
31 |
32 | phone_number_hash = generate_hmac(HASHING_KEY, phonenumber)
33 | entity_obj = find_entity(phone_number_hash=phone_number_hash)
34 |
35 | if entity_obj:
36 | logger.info("Entity with this phone number already exists.")
37 | sys.exit(0)
38 |
39 | eid = generate_eid(phone_number_hash)
40 | password_hash = generate_hmac(HASHING_KEY, password)
41 | country_code_ciphertext_b64 = encrypt_and_encode(country_code)
42 |
43 | clear_keystore(eid)
44 |
45 | fields = {
46 | "eid": eid,
47 | "phone_number_hash": phone_number_hash,
48 | "password_hash": password_hash,
49 | "country_code": country_code_ciphertext_b64,
50 | }
51 |
52 | create_entity(**fields)
53 |
54 | logger.info("Entity created successfully")
55 | sys.exit(0)
56 |
57 |
58 | def main():
59 | """Entry function"""
60 |
61 | parser = argparse.ArgumentParser(description="Vault CLI")
62 | subparsers = parser.add_subparsers(dest="command", description="Expected commands")
63 | create_parser = subparsers.add_parser("create", help="Creates an entity.")
64 | create_parser.add_argument(
65 | "-n", "--phonenumber", type=str, help="Entity's phone number.", required=True
66 | )
67 | args = parser.parse_args()
68 |
69 | if args.command == "create":
70 | create(phonenumber=args.phonenumber, password=DUMMY_PASSWORD, country_code="CM")
71 |
72 |
73 | if __name__ == "__main__":
74 | main()
75 |
--------------------------------------------------------------------------------
/scripts/common.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | SCRIPT_DIR=$(dirname $(readlink -f ${BASH_SOURCE[0]}))
4 | source "${SCRIPT_DIR}/logger.sh" || exit 1
5 |
6 | load_env() {
7 | local env_file=$1
8 |
9 | if [ -f $env_file ]; then
10 | logger INFO "Loading environment variables from $env_file."
11 | set -a
12 | source $env_file
13 | set +a
14 | else
15 | logger WARNING "Environment file not found at $env_file. Skipping loading."
16 | fi
17 | }
18 |
19 | generate_base64_key() {
20 | local output_path=$1
21 |
22 | if [ -z "$output_path" ]; then
23 | logger ERROR "Output file path is required."
24 | return 1
25 | fi
26 |
27 | openssl rand -base64 32 >"$output_path"
28 | if [ $? -eq 0 ]; then
29 | logger INFO "32-byte base64 key generated and saved to $output_path."
30 | else
31 | logger ERROR "Failed to generate base64 key."
32 | return 1
33 | fi
34 | }
35 |
--------------------------------------------------------------------------------
/scripts/logger.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | logger() {
4 | local level=$1
5 | local message=$2
6 | local timestamp
7 | timestamp=$(date +"%Y-%m-%d %H:%M:%S")
8 | local source=$(basename $(readlink -f ${BASH_SOURCE[1]}))
9 |
10 | case $level in
11 | INFO)
12 | echo -e "\033[0;32m[$timestamp] [$source] [INFO] $message\033[0m"
13 | ;;
14 | WARNING)
15 | echo -e "\033[0;33m[$timestamp] [$source] [WARNING] $message\033[0m"
16 | ;;
17 | ERROR)
18 | echo -e "\033[0;31m[$timestamp] [$source] [ERROR] $message\033[0m"
19 | ;;
20 | *)
21 | echo -e "\033[0;37m[$timestamp] [$source] [UNKNOWN] $message\033[0m"
22 | ;;
23 | esac
24 | }
25 |
--------------------------------------------------------------------------------
/scripts/quick-setup.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # Script for quick setup.
4 | # Use for development purposes only.
5 |
6 | SCRIPT_DIR=$(dirname $(readlink -f ${BASH_SOURCE[0]}))
7 | PARENT_DIR=$(dirname $SCRIPT_DIR)
8 | source "${SCRIPT_DIR}/logger.sh" || exit 1
9 | source "${SCRIPT_DIR}/common.sh" || exit 1
10 |
11 | VENV_PATH="${PARENT_DIR}/venv"
12 | ENV_FILE_PATH="${PARENT_DIR}/.env"
13 | ENV_TEMPLATE_PATH="${PARENT_DIR}/template.env"
14 | REQUIREMENTS_FILE_PATH="${PARENT_DIR}/requirements.txt"
15 |
16 | if [ -d $VENV_PATH ]; then
17 | logger INFO "Virtual environment already exists at $VENV_PATH. Skipping creation."
18 | else
19 | logger INFO "Creating virtual environment at $VENV_PATH."
20 | python3 -m venv "$VENV_PATH" || {
21 | logger ERROR "Failed to create virtual environment."
22 | exit 1
23 | }
24 | fi
25 | source "$VENV_PATH/bin/activate" || {
26 | logger ERROR "Failed to activate virtual environment."
27 | exit 1
28 | }
29 | if [ -f $REQUIREMENTS_FILE_PATH ]; then
30 | logger INFO "Installing requirements from $REQUIREMENTS_FILE_PATH."
31 | pip install -r "$REQUIREMENTS_FILE_PATH" || {
32 | logger ERROR "Failed to install requirements."
33 | exit 1
34 | }
35 | else
36 | logger WARNING "Requirements file not found at $REQUIREMENTS_FILE_PATH. Skipping installation."
37 | fi
38 | if [ ! -f $ENV_FILE_PATH ]; then
39 | logger INFO "Creating .env file at $ENV_FILE_PATH."
40 | cp $ENV_TEMPLATE_PATH $ENV_FILE_PATH || {
41 | logger ERROR "Failed to create .env file."
42 | exit 1
43 | }
44 | else
45 | logger INFO ".env file already exists at $ENV_FILE_PATH. Skipping creation."
46 | fi
47 | load_env $ENV_FILE_PATH
48 | logger WARNING "Run 'deactivate' to exit the virtual environment."
49 |
50 | if [ ! -f $SHARED_KEY ]; then
51 | logger INFO "Shared key file not found at $SHARED_KEY. Generating new keys."
52 | generate_base64_key $SHARED_KEY || {
53 | logger ERROR "Failed to generate shared key."
54 | exit 1
55 | }
56 | else
57 | logger INFO "Shared key file already exists at $SHARED_KEY. Skipping key generation."
58 | fi
59 |
60 | if [ ! -f $HASHING_SALT ]; then
61 | logger INFO "Hashing salt file not found at $HASHING_SALT. Generating new salt."
62 | generate_base64_key $HASHING_SALT || {
63 | logger ERROR "Failed to generate hashing salt."
64 | exit 1
65 | }
66 | else
67 | logger INFO "Hashing salt file already exists at $HASHING_SALT. Skipping salt generation."
68 | fi
69 |
70 | logger INFO "Running 'make build-setup'."
71 | make build-setup || {
72 | logger ERROR "'make build-setup' failed."
73 | exit 1
74 | }
75 |
76 | logger INFO "Running 'make runtime-setup'."
77 | make runtime-setup || {
78 | logger ERROR "'make runtime-setup' failed."
79 | exit 1
80 | }
81 |
82 | logger INFO "Quick setup completed successfully."
83 |
--------------------------------------------------------------------------------
/scripts/quick-start.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | trap "kill 0" EXIT
4 |
5 | make grpc-server-start &
6 | make grpc-internal-server-start &
7 | make start-rest-api &
8 |
9 | wait
10 |
--------------------------------------------------------------------------------
/scripts/x25519_keygen.py:
--------------------------------------------------------------------------------
1 | """
2 | This program is free software: you can redistribute it under the terms
3 | of the GNU General Public License, v. 3.0. If a copy of the GNU General
4 | Public License was not distributed with this file, see .
5 | """
6 |
7 | import os
8 | import base64
9 | import argparse
10 | import uuid
11 | import json
12 | import random
13 |
14 | from smswithoutborders_libsig.keypairs import x25519
15 | from base_logger import get_logger
16 | from src.utils import get_configs, load_key
17 | from src.db_models import StaticKeypairs
18 | from src.crypto import encrypt_aes, decrypt_aes
19 |
20 | logger = get_logger("static.x25519.keygen")
21 |
22 | ENCRYPTION_KEY = load_key(get_configs("SHARED_KEY", strict=True), 32)
23 | STATIC_KEYSTORE_PATH = get_configs("STATIC_X25519_KEYSTORE_PATH", strict=True)
24 | DEFAULT_EXPORT_PATH = os.path.join(STATIC_KEYSTORE_PATH, "exported_public_keys.json")
25 |
26 |
27 | def generate_keypair(kid: int, keystore_path: str, version: str) -> None:
28 | """Generates a keypair, encrypts it, and stores it in the database."""
29 | try:
30 | keystore_db_path = os.path.join(keystore_path, f"{uuid.uuid4()}.db")
31 | keypair_obj = x25519(keystore_db_path)
32 | keypair_obj.init()
33 |
34 | keypair_ciphertext = encrypt_aes(
35 | ENCRYPTION_KEY, keypair_obj.serialize(), is_bytes=True
36 | )
37 |
38 | StaticKeypairs.create_keypair(
39 | kid=kid, keypair_bytes=keypair_ciphertext, status="active", version=version
40 | )
41 | logger.debug("Successfully generated and stored keypair %d.", kid)
42 |
43 | except Exception as e:
44 | logger.exception("Failed to generate keypair %d: %s", kid, e)
45 |
46 | if keystore_db_path and os.path.exists(keystore_db_path):
47 | try:
48 | os.remove(keystore_db_path)
49 | logger.info(
50 | "Rolled back and deleted keystore file: %s", keystore_db_path
51 | )
52 | except Exception as rollback_error:
53 | logger.error(
54 | "Failed to delete keystore file %s: %s",
55 | keystore_db_path,
56 | rollback_error,
57 | )
58 |
59 |
60 | def generate_keypairs(count: int, version: str) -> None:
61 | """Generates and stores multiple keypairs."""
62 | if not version.startswith("v"):
63 | logger.error("version must start with 'v'. e.g v1.")
64 | return
65 |
66 | keystore_path = os.path.join(STATIC_KEYSTORE_PATH, version)
67 | if os.path.exists(keystore_path) and os.listdir(keystore_path):
68 | logger.info(
69 | "Keypair generation skipped: '%s' is not empty. To overwrite the content, "
70 | "delete the directory or use a different one.",
71 | keystore_path,
72 | )
73 | return
74 |
75 | os.makedirs(keystore_path, exist_ok=True)
76 |
77 | for i in range(count):
78 | generate_keypair(i, keystore_path, version)
79 |
80 | logger.info("Successfully generated %d keypairs.", count)
81 |
82 |
83 | def export_public_keys_to_file(file_path: str, yes: bool, skip_if_exists: bool) -> None:
84 | """Exports all public keys grouped by version to a specified JSON file."""
85 | file_path = file_path or DEFAULT_EXPORT_PATH
86 | try:
87 | dir_path = os.path.dirname(file_path)
88 | if dir_path:
89 | os.makedirs(dir_path, exist_ok=True)
90 |
91 | if os.path.exists(file_path):
92 | if skip_if_exists:
93 | logger.info("Export skipped: %s already exists.", file_path)
94 | return
95 | if not yes:
96 | confirm = (
97 | input(
98 | f"{file_path} already exists. Do you want to replace it? (y/N): "
99 | )
100 | .strip()
101 | .lower()
102 | )
103 | if confirm != "y":
104 | logger.info("Export aborted by user.")
105 | return
106 |
107 | active_keypairs = StaticKeypairs.get_keypairs(status="active")
108 |
109 | public_keys = [
110 | {
111 | "kid": keypair.kid,
112 | "keypair": base64.b64encode(
113 | x25519()
114 | .deserialize(
115 | decrypt_aes(
116 | ENCRYPTION_KEY, keypair.keypair_bytes, is_bytes=True
117 | )
118 | )
119 | .get_public_key()
120 | ).decode("utf-8"),
121 | "status": keypair.status,
122 | "version": keypair.version,
123 | }
124 | for keypair in active_keypairs
125 | ]
126 |
127 | with open(file_path, "w", encoding="utf-8") as f:
128 | json.dump(public_keys, f, indent=2)
129 |
130 | logger.info("Public keys exported successfully to %s.", file_path)
131 |
132 | except Exception as e:
133 | logger.exception("Failed to export public keys: %s", e)
134 | raise
135 |
136 |
137 | def test_keypairs() -> None:
138 | """Performs a key agreement test using two randomly selected keypairs."""
139 | try:
140 | keypairs = StaticKeypairs.get_keypairs(status="active")
141 | if len(keypairs) < 2:
142 | raise ValueError("Not enough keypairs for key agreement test.")
143 |
144 | keypair1, keypair2 = random.sample(keypairs, 2)
145 |
146 | keypair_1 = x25519().deserialize(
147 | decrypt_aes(ENCRYPTION_KEY, keypair1.keypair_bytes, is_bytes=True)
148 | )
149 | keypair_2 = x25519().deserialize(
150 | decrypt_aes(ENCRYPTION_KEY, keypair2.keypair_bytes, is_bytes=True)
151 | )
152 |
153 | shared_secret_1 = keypair_1.agree(keypair_2.get_public_key())
154 | shared_secret_2 = keypair_2.agree(keypair_1.get_public_key())
155 |
156 | if shared_secret_1 == shared_secret_2:
157 | logger.info("Key agreement successful: shared secret matches.")
158 | else:
159 | logger.error("Key agreement failed: shared secrets do not match.")
160 |
161 | except Exception as e:
162 | logger.exception("Key agreement test failed: %s", e)
163 | raise
164 |
165 |
166 | def main() -> None:
167 | """CLI entry point for key management."""
168 | parser = argparse.ArgumentParser(description="x25519 Key Management CLI")
169 | subparser = parser.add_subparsers(dest="command", required=True)
170 |
171 | generate_parser = subparser.add_parser("generate", help="Generate keypairs")
172 | generate_parser.add_argument(
173 | "-n", "--number", type=int, default=255, help="Number of keypairs to generate"
174 | )
175 | generate_parser.add_argument(
176 | "-v", "--version", type=str, required=True, help="Keypair version"
177 | )
178 |
179 | export_parser = subparser.add_parser("export", help="Export public keys")
180 | export_parser.add_argument(
181 | "-f", "--file", type=str, default=None, help="Path to save the public keys file"
182 | )
183 | export_parser.add_argument(
184 | "-y", "--yes", action="store_true", help="Overwrite the file without prompting"
185 | )
186 | export_parser.add_argument(
187 | "--skip-if-exists", action="store_true", help="Skip export if file exists"
188 | )
189 |
190 | subparser.add_parser("test", help="Test key agreement")
191 |
192 | args = parser.parse_args()
193 |
194 | commands = {
195 | "generate": lambda: generate_keypairs(args.number, args.version),
196 | "export": lambda: export_public_keys_to_file(
197 | args.file, args.yes, args.skip_if_exists
198 | ),
199 | "test": test_keypairs,
200 | }
201 |
202 | commands[args.command]()
203 |
204 |
205 | if __name__ == "__main__":
206 | main()
207 |
--------------------------------------------------------------------------------
/src/api_v3.py:
--------------------------------------------------------------------------------
1 | """
2 | This program is free software: you can redistribute it under the terms
3 | of the GNU General Public License, v. 3.0. If a copy of the GNU General
4 | Public License was not distributed with this file, see .
5 | """
6 |
7 | import os
8 | import json
9 | from datetime import datetime
10 | import calendar
11 |
12 | from flask import Blueprint, request, jsonify
13 | from flask_cors import CORS
14 | from werkzeug.exceptions import BadRequest, NotFound
15 |
16 | from phonenumbers import geocoder
17 |
18 | from src.db import connect
19 | from src.entity import fetch_all_entities
20 | from src.utils import (
21 | decrypt_and_decode,
22 | validate_metrics_args,
23 | filter_dict,
24 | get_configs,
25 | )
26 | from src.user_metrics import get_signup_users, get_retained_users
27 | from base_logger import get_logger
28 |
29 | v3_blueprint = Blueprint("v3", __name__, url_prefix="/v3")
30 | CORS(v3_blueprint)
31 |
32 | database = connect()
33 |
34 | logger = get_logger(__name__)
35 |
36 | STATIC_KEYSTORE_PATH = get_configs("STATIC_X25519_KEYSTORE_PATH", strict=True)
37 | DEFAULT_EXPORT_PATH = os.path.join(STATIC_KEYSTORE_PATH, "exported_public_keys.json")
38 |
39 |
40 | def set_security_headers(response):
41 | """Set security headers for each response."""
42 | security_headers = {
43 | "Strict-Transport-Security": "max-age=63072000; includeSubdomains",
44 | "X-Content-Type-Options": "nosniff",
45 | "Content-Security-Policy": "script-src 'self'; object-src 'self'",
46 | "Referrer-Policy": "strict-origin-when-cross-origin",
47 | "Cache-Control": "no-cache",
48 | "Permissions-Policy": (
49 | "accelerometer=(), ambient-light-sensor=(), autoplay=(), battery=(), camera=(), "
50 | "clipboard-read=(), clipboard-write=(), cross-origin-isolated=(), display-capture=(), "
51 | "document-domain=(), encrypted-media=(), execution-while-not-rendered=(), "
52 | "execution-while-out-of-viewport=(), fullscreen=(), gamepad=(), geolocation=(), "
53 | "gyroscope=(), magnetometer=(), microphone=(), midi=(), navigation-override=(), "
54 | "payment=(), picture-in-picture=(), publickey-credentials-get=(), screen-wake-lock=(), "
55 | "speaker=(), speaker-selection=(), sync-xhr=(), usb=(), web-share=(), "
56 | "xr-spatial-tracking=()"
57 | ),
58 | }
59 |
60 | for header, value in security_headers.items():
61 | response.headers[header] = value
62 |
63 | return response
64 |
65 |
66 | @v3_blueprint.before_request
67 | def _db_connect():
68 | """Connect to the database before processing the request."""
69 | database.connect(reuse_if_open=True)
70 |
71 |
72 | @v3_blueprint.teardown_request
73 | def _db_close(response):
74 | """Close the database connection after processing the request."""
75 | database.close()
76 | return response
77 |
78 |
79 | @v3_blueprint.after_request
80 | def after_request(response):
81 | """Set security headers after each request."""
82 | response = set_security_headers(response)
83 | return response
84 |
85 |
86 | def fetch_entities_by_month(result, start, end):
87 | """Fetch entities grouped by month."""
88 | new_start = datetime.min
89 | new_end = datetime(
90 | end.year, end.month, calendar.monthrange(end.year, end.month)[1], 23, 59, 59
91 | )
92 |
93 | entities = fetch_all_entities(date_range=(new_start, new_end))
94 |
95 | for entity in entities:
96 | entity_date_created = entity.date_created
97 | month_name = calendar.month_name[entity_date_created.month]
98 |
99 | result = update_result_by_time(result, entity_date_created, month_name)
100 | result = update_countries(result, entity)
101 |
102 | result["total_users"] = len(entities)
103 | result["total_countries"] = len(result["countries"])
104 |
105 | return result
106 |
107 |
108 | def fetch_entities_by_day(result, start, end):
109 | """Fetch entities grouped by day."""
110 | new_start = datetime.min
111 | new_end = datetime(end.year, end.month, end.day, 23, 59, 59)
112 |
113 | entities = fetch_all_entities(date_range=(new_start, new_end))
114 |
115 | for entity in entities:
116 | entity_date_created = entity.date_created
117 | day_name = entity_date_created.strftime("%c")
118 |
119 | result = update_result_by_time(result, entity_date_created, day_name)
120 | result = update_countries(result, entity)
121 |
122 | result["total_users"] = len(entities)
123 | result["total_countries"] = len(result["countries"])
124 |
125 | return result
126 |
127 |
128 | def update_result_by_time(result, entity_date_created, time_name):
129 | """Helper to update the result dictionary with time-based data."""
130 | year = str(entity_date_created.year)
131 |
132 | if not result.get(year):
133 | result[year] = []
134 |
135 | if any(time_name in x for x in result[year]):
136 | for x in result[year]:
137 | if x[0] == time_name:
138 | x[1] += 1
139 | else:
140 | result[year].append([time_name, 1])
141 |
142 | return result
143 |
144 |
145 | def update_countries(result, entity):
146 | """Helper to update the result dictionary with country-based data."""
147 | region_code = decrypt_and_decode(entity.country_code)
148 | country_name = geocoder._region_display_name(region_code, "en")
149 |
150 | if any(country_name in x for x in result["countries"]):
151 | for x in result["countries"]:
152 | if x[0] == country_name and x[1] == region_code:
153 | x[2] += 1
154 | else:
155 | result["countries"].append([country_name, region_code, 1])
156 |
157 | return result
158 |
159 |
160 | @v3_blueprint.route("/entities", methods=["GET"])
161 | def get_entities_analysis():
162 | """Retrieve analysis of entities."""
163 | start = request.args.get("start")
164 | end = request.args.get("end")
165 | _format = request.args.get("format", "month")
166 |
167 | if not start or not end:
168 | raise BadRequest("Invalid input parameters. Provide 'start', 'end'.")
169 |
170 | start = datetime.strptime(start, "%Y-%m-%d").date()
171 | end = datetime.strptime(end, "%Y-%m-%d").date()
172 |
173 | if start > end:
174 | raise BadRequest("'start' date cannot be after 'end' date.")
175 |
176 | result = {"total_users": 0, "total_countries": 0, "countries": []}
177 |
178 | if _format == "month":
179 | result = fetch_entities_by_month(result, start, end)
180 | elif _format == "day":
181 | result = fetch_entities_by_day(result, start, end)
182 | else:
183 | raise BadRequest("Invalid format. Expected 'month' or 'day'.")
184 |
185 | logger.info("Successfully fetched entities data.")
186 | return jsonify(result), 200
187 |
188 |
189 | @v3_blueprint.route("/metrics/signup", methods=["GET"])
190 | def signup_users():
191 | """Endpoint to retrieve signup user data based on specified filters and grouping."""
192 |
193 | args = request.args
194 | filters = args.to_dict()
195 |
196 | group_by = args.get("group_by", type=str)
197 | top = args.get("top", type=int)
198 | has_pagination = top is None and group_by is not None
199 |
200 | options = {
201 | "granularity": args.get("granularity", type=str, default="day"),
202 | "top": top,
203 | "page": args.get("page", type=int, default=1) if has_pagination else None,
204 | "page_size": (
205 | args.get("page_size", type=int, default=50) if has_pagination else None
206 | ),
207 | "group_by": group_by,
208 | }
209 |
210 | combined_options = {**filters, **options}
211 |
212 | try:
213 | filtered_options = filter_dict(
214 | combined_options,
215 | include_only=("start_date", "end_date", "top", "page", "page_size"),
216 | )
217 | validate_metrics_args(**filtered_options)
218 | except ValueError as e:
219 | raise BadRequest(str(e)) from e
220 |
221 | try:
222 | result = get_signup_users(filters=filters, group_by=group_by, options=options)
223 | except ValueError as e:
224 | raise BadRequest(str(e)) from e
225 |
226 | logger.info("Successfully fetched signup metrics.")
227 | return jsonify(result)
228 |
229 |
230 | @v3_blueprint.route("/metrics/retained", methods=["GET"])
231 | def retained_users():
232 | """Endpoint to retrieve retained user data based on specified filters and grouping."""
233 |
234 | args = request.args
235 | filters = args.to_dict()
236 |
237 | group_by = args.get("group_by", type=str)
238 | top = args.get("top", type=int)
239 | has_pagination = top is None and group_by is not None
240 |
241 | options = {
242 | "granularity": args.get("granularity", type=str, default="day"),
243 | "top": top,
244 | "page": args.get("page", type=int, default=1) if has_pagination else None,
245 | "page_size": (
246 | args.get("page_size", type=int, default=50) if has_pagination else None
247 | ),
248 | "group_by": group_by,
249 | }
250 |
251 | combined_options = {**filters, **options}
252 |
253 | try:
254 | filtered_options = filter_dict(
255 | combined_options,
256 | include_only=("start_date", "end_date", "top", "page", "page_size"),
257 | )
258 | validate_metrics_args(**filtered_options)
259 | except ValueError as e:
260 | raise BadRequest(str(e)) from e
261 |
262 | try:
263 | result = get_retained_users(filters=filters, group_by=group_by, options=options)
264 | except ValueError as e:
265 | raise BadRequest(str(e)) from e
266 |
267 | logger.info("Successfully fetched retained metrics.")
268 | return jsonify(result)
269 |
270 |
271 | @v3_blueprint.route("/keys/static-x25519", methods=["GET"])
272 | def fetch_static_x25519_keys():
273 | """Fetch static x25519 public keys from a JSON file."""
274 |
275 | try:
276 | with open(DEFAULT_EXPORT_PATH, "r", encoding="utf-8") as file:
277 | static_keys = json.load(file)
278 | except FileNotFoundError as exc:
279 | logger.error("File not found: %s", DEFAULT_EXPORT_PATH)
280 | raise NotFound("Static public keys not found") from exc
281 | except json.JSONDecodeError as exc:
282 | logger.error("Invalid JSON format in file: %s", DEFAULT_EXPORT_PATH)
283 | raise BadRequest("Invalid key file format") from exc
284 |
285 | logger.info("Successfully retrieved static x25519 public keys.")
286 | return jsonify(static_keys)
287 |
288 |
289 | @v3_blueprint.errorhandler(BadRequest)
290 | @v3_blueprint.errorhandler(NotFound)
291 | def handle_bad_request_error(error):
292 | """Handle BadRequest errors."""
293 | logger.error(error.description)
294 | return jsonify({"error": error.description}), error.code
295 |
296 |
297 | @v3_blueprint.errorhandler(Exception)
298 | def handle_generic_error(error):
299 | """Handle generic errors."""
300 | logger.exception(error)
301 | return (
302 | jsonify({"error": "Oops! Something went wrong. Please try again later."}),
303 | 500,
304 | )
305 |
--------------------------------------------------------------------------------
/src/crypto.py:
--------------------------------------------------------------------------------
1 | """
2 | Cryptographic functions.
3 | """
4 |
5 | import hmac
6 | import hashlib
7 | from Crypto.Cipher import AES
8 | from cryptography.fernet import Fernet
9 | from base_logger import get_logger
10 |
11 | logger = get_logger(__name__)
12 |
13 |
14 | def encrypt_aes(key, plaintext, is_bytes=False):
15 | """
16 | Encrypts a plaintext string or bytes using AES-256 encryption.
17 |
18 | Args:
19 | key (bytes): The encryption key (must be 32 bytes long).
20 | plaintext (str or bytes): The plaintext to be encrypted.
21 | is_bytes (bool): If True, plaintext is treated as bytes; otherwise, it's encoded as UTF-8.
22 |
23 | Returns:
24 | bytes: The encrypted ciphertext.
25 | """
26 | if len(key) != 32:
27 | raise ValueError("AES-256 key must be 32 bytes long")
28 |
29 | if not isinstance(plaintext, (str, bytes)):
30 | raise TypeError("Plaintext must be either a string or bytes")
31 |
32 | if not is_bytes and isinstance(plaintext, str):
33 | plaintext = plaintext.encode("utf-8")
34 |
35 | logger.debug("Encrypting plaintext using AES-256...")
36 | cipher = AES.new(key, AES.MODE_EAX)
37 | ciphertext, tag = cipher.encrypt_and_digest(plaintext)
38 |
39 | return cipher.nonce + tag + ciphertext
40 |
41 |
42 | def decrypt_aes(key, ciphertext, is_bytes=False):
43 | """
44 | Decrypts a ciphertext string or bytes using AES-256 decryption.
45 |
46 | Args:
47 | key (bytes): The decryption key (must be 32 bytes long).
48 | ciphertext (bytes): The encrypted ciphertext (nonce + tag + ciphertext).
49 | is_bytes (bool): If True, returns decrypted bytes; otherwise, returns a decoded string.
50 |
51 | Returns:
52 | str or bytes: The decrypted plaintext (either as a string or bytes).
53 | """
54 | if len(key) != 32:
55 | raise ValueError("AES-256 key must be 32 bytes long")
56 |
57 | if not isinstance(ciphertext, bytes):
58 | raise TypeError("Ciphertext must be in bytes")
59 |
60 | logger.debug("Decrypting ciphertext using AES-256...")
61 | nonce = ciphertext[:16]
62 | tag = ciphertext[16:32]
63 | ciphertext = ciphertext[32:]
64 |
65 | cipher = AES.new(key, AES.MODE_EAX, nonce=nonce)
66 | plaintext = cipher.decrypt_and_verify(ciphertext, tag)
67 |
68 | if is_bytes:
69 | return plaintext
70 | return plaintext.decode("utf-8")
71 |
72 |
73 | def generate_hmac(key, message):
74 | """
75 | Generates an HMAC for a given message using the provided key.
76 |
77 | Args:
78 | key (bytes): The key for HMAC generation (must be 32 bytes long).
79 | message (str): The message for which the HMAC is to be generated.
80 |
81 | Returns:
82 | str: The generated HMAC as a hexadecimal string.
83 | """
84 | if len(key) != 32:
85 | raise ValueError("HMAC key must be 32 bytes long")
86 |
87 | logger.debug("Generating HMAC for the message...")
88 | return hmac.new(key, message.encode("utf-8"), hashlib.sha512).hexdigest()
89 |
90 |
91 | def verify_hmac(key, message, hmac_to_verify):
92 | """
93 | Verifies the HMAC of a given message against a provided HMAC.
94 |
95 | Args:
96 | key (bytes): The key for HMAC generation (must be 32 bytes long).
97 | message (str): The message whose HMAC is to be verified.
98 | hmac_to_verify (str): The HMAC to verify against.
99 |
100 | Returns:
101 | bool: True if the HMAC is valid, False otherwise.
102 | """
103 | if len(key) != 32:
104 | raise ValueError("HMAC key must be 32 bytes long")
105 |
106 | logger.debug("Verifying HMAC for the message...")
107 | generated_hmac = generate_hmac(key, message)
108 | return hmac.compare_digest(generated_hmac, hmac_to_verify)
109 |
110 |
111 | def encrypt_fernet(key, plaintext):
112 | """
113 | Encrypts a plaintext string using Fernet encryption.
114 |
115 | Args:
116 | key (bytes): The encryption key (must be 32 bytes long).
117 | plaintext (str): The plaintext string to be encrypted.
118 |
119 | Returns:
120 | bytes: The encrypted ciphertext.
121 | """
122 | logger.debug("Encrypting plaintext using Fernet encryption...")
123 | fernet = Fernet(key)
124 | return fernet.encrypt(plaintext.encode("utf-8"))
125 |
126 |
127 | def decrypt_fernet(key, ciphertext):
128 | """
129 | Decrypts a ciphertext string using Fernet encryption.
130 |
131 | Args:
132 | key (bytes): The decryption key (must be 32 bytes long).
133 | ciphertext (bytes): The encrypted ciphertext.
134 |
135 | Returns:
136 | str: The decrypted plaintext string.
137 | """
138 | logger.debug("Decrypting ciphertext using Fernet encryption...")
139 | fernet = Fernet(key)
140 | return fernet.decrypt(ciphertext).decode("utf-8")
141 |
--------------------------------------------------------------------------------
/src/db.py:
--------------------------------------------------------------------------------
1 | """Module for connecting to a database."""
2 |
3 | from peewee import Database, DatabaseError, MySQLDatabase, SqliteDatabase
4 | from playhouse.shortcuts import ReconnectMixin
5 | from src.utils import ensure_database_exists, get_configs
6 | from base_logger import get_logger
7 |
8 | logger = get_logger(__name__)
9 |
10 | DATABASE_CONFIGS = {
11 | "mode": get_configs("MODE", default_value="development"),
12 | "mysql": {
13 | "database": get_configs("MYSQL_DATABASE"),
14 | "host": get_configs("MYSQL_HOST"),
15 | "password": get_configs("MYSQL_PASSWORD"),
16 | "user": get_configs("MYSQL_USER"),
17 | },
18 | "sqlite": {
19 | "database_path": get_configs("SQLITE_DATABASE_PATH"),
20 | },
21 | }
22 |
23 |
24 | class ReconnectMySQLDatabase(ReconnectMixin, MySQLDatabase):
25 | """
26 | A custom MySQLDatabase class with automatic reconnection capability.
27 |
28 | This class inherits from both ReconnectMixin and MySQLDatabase
29 | to provide automatic reconnection functionality in case the database
30 | connection is lost.
31 | """
32 |
33 |
34 | def is_mysql_config_complete() -> bool:
35 | """
36 | Checks if all required MySQL configurations are present.
37 |
38 | Returns:
39 | bool: True if all MySQL configurations are complete, False otherwise.
40 | """
41 | logger.debug("Checking if MySQL configuration is complete...")
42 | mysql_config = DATABASE_CONFIGS["mysql"]
43 | required_keys = ["database", "host", "password", "user"]
44 | return all(mysql_config.get(key) for key in required_keys)
45 |
46 |
47 | def connect() -> Database:
48 | """
49 | Connects to the appropriate database based on the mode.
50 |
51 | If the mode is 'testing', it returns None.
52 |
53 | If the mode is 'development', it checks if MySQL credentials
54 | are complete. If they are, it connects to the MySQL database,
55 | otherwise, it falls back to the SQLite database.
56 |
57 | If the mode is not 'testing' or 'development', it connects
58 | to the MySQL database.
59 |
60 | Returns:
61 | Database: The connected database object.
62 | """
63 | mode = DATABASE_CONFIGS["mode"]
64 | logger.debug("Database connection mode: %s", mode)
65 |
66 | if mode == "testing":
67 | logger.debug("Mode is 'testing'. No database connection will be made.")
68 | return None
69 |
70 | if mode == "development":
71 | if is_mysql_config_complete():
72 | return connect_to_mysql()
73 | logger.warning(
74 | "MySQL configuration is incomplete. Falling back to SQLite database."
75 | )
76 | return connect_to_sqlite()
77 |
78 | return connect_to_mysql()
79 |
80 |
81 | @ensure_database_exists(
82 | DATABASE_CONFIGS["mysql"]["host"],
83 | DATABASE_CONFIGS["mysql"]["user"],
84 | DATABASE_CONFIGS["mysql"]["password"],
85 | DATABASE_CONFIGS["mysql"]["database"],
86 | )
87 | def connect_to_mysql() -> ReconnectMySQLDatabase:
88 | """
89 | Connects to the MySQL database.
90 |
91 | Returns:
92 | ReconnectMySQLDatabase: The connected MySQL database object with reconnection capability.
93 |
94 | Raises:
95 | DatabaseError: If failed to connect to the database.
96 | """
97 | logger.debug(
98 | "Attempting to connect to MySQL database '%s' at '%s'...",
99 | DATABASE_CONFIGS["mysql"]["database"],
100 | DATABASE_CONFIGS["mysql"]["host"],
101 | )
102 | try:
103 | db = ReconnectMySQLDatabase(
104 | DATABASE_CONFIGS["mysql"]["database"],
105 | user=DATABASE_CONFIGS["mysql"]["user"],
106 | password=DATABASE_CONFIGS["mysql"]["password"],
107 | host=DATABASE_CONFIGS["mysql"]["host"],
108 | )
109 | db.connect()
110 | return db
111 | except DatabaseError as error:
112 | logger.error(
113 | "Failed to connect to MySQL database '%s' at '%s': %s",
114 | DATABASE_CONFIGS["mysql"]["database"],
115 | DATABASE_CONFIGS["mysql"]["host"],
116 | error,
117 | )
118 | raise error
119 |
120 |
121 | def connect_to_sqlite() -> SqliteDatabase:
122 | """
123 | Connects to the SQLite database.
124 |
125 | Returns:
126 | SqliteDatabase: The connected SQLite database object.
127 |
128 | Raises:
129 | DatabaseError: If failed to connect to the database.
130 | """
131 | db_path = DATABASE_CONFIGS["sqlite"]["database_path"]
132 | logger.debug("Attempting to connect to SQLite database at '%s'...", db_path)
133 | try:
134 | db = SqliteDatabase(db_path)
135 | db.connect()
136 | return db
137 | except DatabaseError as error:
138 | logger.error("Failed to connect to SQLite database at '%s': %s", db_path, error)
139 | raise error
140 |
--------------------------------------------------------------------------------
/src/db_models.py:
--------------------------------------------------------------------------------
1 | """
2 | This program is free software: you can redistribute it under the terms
3 | of the GNU General Public License, v. 3.0. If a copy of the GNU General
4 | Public License was not distributed with this file, see .
5 | """
6 |
7 | import datetime
8 | from enum import Enum
9 | from peewee import (
10 | Model,
11 | CharField,
12 | TextField,
13 | DateTimeField,
14 | IntegerField,
15 | UUIDField,
16 | ForeignKeyField,
17 | BlobField,
18 | BooleanField,
19 | SQL,
20 | )
21 | from src.db import connect
22 | from src.utils import create_tables, get_configs
23 |
24 | database = connect()
25 |
26 |
27 | class Entity(Model):
28 | """Model representing Entities Table."""
29 |
30 | eid = UUIDField(primary_key=True)
31 | phone_number_hash = CharField()
32 | password_hash = CharField(null=True)
33 | country_code = CharField()
34 | device_id = CharField(null=True)
35 | client_publish_pub_key = TextField(null=True)
36 | client_device_id_pub_key = TextField(null=True)
37 | publish_keypair = BlobField(null=True)
38 | device_id_keypair = BlobField(null=True)
39 | server_state = BlobField(null=True)
40 | is_bridge_enabled = BooleanField(default=True)
41 | language = CharField(null=True, default="en", constraints=[SQL("DEFAULT 'en'")])
42 | date_created = DateTimeField(default=datetime.datetime.now)
43 |
44 | class Meta:
45 | """Meta class to define database connection."""
46 |
47 | database = database
48 | table_name = "entities"
49 | indexes = (
50 | (("phone_number_hash",), True),
51 | (("device_id",), True),
52 | )
53 |
54 |
55 | class OTPRateLimit(Model):
56 | """Model representing OTP Rate Limits Table."""
57 |
58 | phone_number = CharField()
59 | attempt_count = IntegerField(default=0)
60 | date_expires = DateTimeField(null=True)
61 | date_created = DateTimeField(default=datetime.datetime.now)
62 |
63 | class Meta:
64 | """Meta class to define database connection."""
65 |
66 | database = database
67 | table_name = "otp_rate_limit"
68 | indexes = ((("phone_number",), True),)
69 |
70 |
71 | class Token(Model):
72 | """Model representing Tokens Table."""
73 |
74 | eid = ForeignKeyField(Entity, backref="tokens", column_name="eid")
75 | platform = CharField()
76 | account_identifier_hash = CharField()
77 | account_identifier = CharField()
78 | account_tokens = TextField()
79 | date_created = DateTimeField(default=datetime.datetime.now)
80 |
81 | class Meta:
82 | """Meta class to define database connection."""
83 |
84 | database = database
85 | table_name = "tokens"
86 | indexes = ((("platform", "account_identifier_hash", "eid"), True),)
87 |
88 |
89 | class PasswordRateLimit(Model):
90 | """Model representing Password Rate Limits Table."""
91 |
92 | eid = ForeignKeyField(Entity, backref="password_rate_limit", column_name="eid")
93 | attempt_count = IntegerField(default=0)
94 | date_expires = DateTimeField(null=True)
95 | date_created = DateTimeField(default=datetime.datetime.now)
96 |
97 | class Meta:
98 | """Meta class to define database connection."""
99 |
100 | database = database
101 | table_name = "password_rate_limit"
102 |
103 |
104 | class OTP(Model):
105 | """Model representing OTP Table."""
106 |
107 | phone_number = CharField()
108 | otp_code = CharField(max_length=10)
109 | attempt_count = IntegerField(default=0)
110 | date_expires = DateTimeField()
111 | is_verified = BooleanField(default=False)
112 | date_created = DateTimeField(default=datetime.datetime.now)
113 |
114 | class Meta:
115 | """Meta class to define database connection."""
116 |
117 | database = database
118 | table_name = "otp"
119 | indexes = (
120 | (("phone_number",), False),
121 | (("date_expires",), False),
122 | )
123 |
124 | def is_expired(self):
125 | """Check if the OTP is expired."""
126 | return datetime.datetime.now() > self.date_expires
127 |
128 | def reset_attempt_count(self):
129 | """Reset the attempt count for the OTP."""
130 | self.attempt_count = 0
131 | self.save()
132 |
133 | def increment_attempt_count(self):
134 | """Increment the attempt count for the OTP."""
135 | self.attempt_count += 1
136 | self.save()
137 |
138 |
139 | class Signups(Model):
140 | """Model representing Signup Attempts."""
141 |
142 | country_code = CharField()
143 | source = CharField()
144 | date_created = DateTimeField(default=datetime.datetime.now)
145 |
146 | class Meta:
147 | """Meta class to define database connection."""
148 |
149 | database = database
150 | table_name = "signups"
151 |
152 |
153 | class KeypairStatus(Enum):
154 | """
155 | Enum representing the status of a keypair.
156 | """
157 |
158 | ACTIVE = "active"
159 | INACTIVE = "inactive"
160 | ARCHIVED = "archived"
161 |
162 |
163 | class StaticKeypairs(Model):
164 | """Model representing static x25519 keypairs."""
165 |
166 | kid = IntegerField()
167 | keypair_bytes = BlobField()
168 | status = CharField(
169 | choices=[(status.value, status.value) for status in KeypairStatus]
170 | )
171 | date_last_used = DateTimeField(null=True)
172 | date_created = DateTimeField(default=datetime.datetime.now)
173 | usage_count = IntegerField(default=0)
174 | version = CharField()
175 |
176 | class Meta:
177 | """Meta class to define database connection."""
178 |
179 | database = database
180 | table_name = "static_keypairs"
181 | indexes = ((("kid", "status", "version"), True),)
182 |
183 | @classmethod
184 | def create_keypair(cls, kid, keypair_bytes, status, version):
185 | """Creates and stores a new keypair safely."""
186 | if status not in KeypairStatus._value2member_map_:
187 | raise ValueError(
188 | f"Invalid status: {status}. Allowed: {[s.value for s in KeypairStatus]}"
189 | )
190 | with database.atomic():
191 | return cls.create(
192 | kid=kid, keypair_bytes=keypair_bytes, status=status, version=version
193 | )
194 |
195 | @classmethod
196 | def get_keypairs(cls, **criteria):
197 | """Retrieves keypairs based on dynamic filtering criteria."""
198 | query = cls.select()
199 | if criteria:
200 | for field, value in criteria.items():
201 | query = query.where(getattr(cls, field) == value)
202 | return list(query)
203 |
204 | @classmethod
205 | def get_keypair(cls, kid, version):
206 | """Retrieves a keypair by its ID and version."""
207 | keypair = cls.get_or_none(cls.kid == kid, cls.version == version)
208 | if keypair:
209 | with database.atomic():
210 | keypair.usage_count += 1
211 | keypair.date_last_used = datetime.datetime.now()
212 | keypair.save(only=["date_last_used", "usage_count"])
213 | return keypair
214 |
215 | @classmethod
216 | def keypair_exists(cls, kid):
217 | """Checks if a keypair exists by ID."""
218 | return cls.select().where(cls.kid == kid).exists()
219 |
220 | @classmethod
221 | def update_status(cls, kid, status):
222 | """Updates the status of a keypair safely."""
223 | if status not in KeypairStatus._value2member_map_:
224 | raise ValueError(
225 | f"Invalid status: {status}. Allowed: {[s.value for s in KeypairStatus]}"
226 | )
227 | with database.atomic():
228 | return cls.update(status=status).where(cls.kid == kid).execute()
229 |
230 | @classmethod
231 | def delete_keypair(cls, kid):
232 | """Deletes a keypair safely."""
233 | with database.atomic():
234 | keypair = cls.get_or_none(cls.kid == kid)
235 | if keypair:
236 | return keypair.delete_instance()
237 | return None
238 |
239 |
240 | if get_configs("MODE", default_value="development") in ("production", "development"):
241 | create_tables(
242 | [Entity, OTPRateLimit, Token, PasswordRateLimit, OTP, Signups, StaticKeypairs]
243 | )
244 |
--------------------------------------------------------------------------------
/src/device_id.py:
--------------------------------------------------------------------------------
1 | """
2 | Module for handling Device ID.
3 | """
4 |
5 | import hmac
6 | import hashlib
7 | from base_logger import get_logger
8 |
9 | logger = get_logger(__name__)
10 |
11 |
12 | def compute_device_id(secret_key: bytes, phone_number: str, public_key: bytes) -> bytes:
13 | """
14 | Compute a device ID using HMAC and SHA-256.
15 |
16 | Args:
17 | secret_key (bytes): The secret key used for HMAC.
18 | phone_number (str): The phone number to be included in the HMAC input.
19 | public_key (bytes): The public key to be included in the HMAC input.
20 |
21 | Returns:
22 | bytes: The bytes representation of the HMAC digest.
23 | """
24 | try:
25 | logger.debug("Starting computation of device ID...")
26 | combined_input = phone_number.encode("utf-8") + public_key
27 | hmac_object = hmac.new(secret_key, combined_input, hashlib.sha256)
28 | return hmac_object.hexdigest()
29 | except Exception as e:
30 | logger.exception("Error computing device ID: %s", e)
31 | raise e
32 |
--------------------------------------------------------------------------------
/src/entity.py:
--------------------------------------------------------------------------------
1 | """
2 | Entity Controllers
3 | """
4 |
5 | from peewee import DoesNotExist
6 | from src.db_models import Entity
7 | from base_logger import get_logger
8 |
9 | logger = get_logger(__name__)
10 | database = Entity._meta.database
11 |
12 |
13 | def create_entity(eid, phone_number_hash, password_hash, country_code, **kwargs):
14 | """
15 | Create a new entity.
16 |
17 | Args:
18 | eid (str): The ID of the entity.
19 | phone_number_hash (str): The hashed phone number of the entity.
20 | password_hash (str): The hashed password of the entity.
21 | **kwargs: Additional fields include username, publish_pub_key,
22 | device_id_pub_key.
23 |
24 | Returns:
25 | Entity: The created entity object.
26 | """
27 | with database.atomic():
28 | entity_data = {
29 | "eid": eid,
30 | "phone_number_hash": phone_number_hash,
31 | "password_hash": password_hash,
32 | "country_code": country_code,
33 | **kwargs,
34 | }
35 | logger.debug("Creating a new entity...")
36 | entity = Entity.create(**entity_data)
37 | return entity
38 |
39 |
40 | def find_entity(**search_criteria):
41 | """
42 | Find an entity by the given search criteria.
43 |
44 | Args:
45 | **search_criteria: Keyword arguments representing the fields
46 | and their values to search for.
47 |
48 | Returns:
49 | Entity or None: The found entity if exists, else None.
50 | """
51 | logger.debug("Finding an entity based on the specified criteria...")
52 | with database.connection_context():
53 | try:
54 | entity = Entity.get(**search_criteria)
55 | logger.debug("Entity is found...")
56 | return entity
57 | except DoesNotExist:
58 | logger.debug("Entity is not found...")
59 | return None
60 |
61 |
62 | def fetch_all_entities(
63 | filters=None, date_range=None, truncate_by=None, return_json=False
64 | ):
65 | """
66 | Fetch all entities with optional filters, date range, and date truncation.
67 |
68 | Args:
69 | filters (dict, optional): A dictionary where the keys are field names
70 | and the values are the conditions/criteria to filter the entities by.
71 | Defaults to None.
72 | date_range (tuple, optional): A tuple containing (start_date, end_date) to
73 | filter the 'date_created' field by. Dates should be datetime objects.
74 | truncate_by (str, optional): Specify if the 'date_created' should be
75 | truncated by 'day' or 'month'. If provided, date filtering will apply
76 | to truncated dates.
77 | return_json (bool, optional): If True, return the results as a list of dicts.
78 | If False (default), return the results as a list of model instances.
79 |
80 | Returns:
81 | list: A list of all entities matching the filter criteria.
82 | """
83 | filters = filters or {}
84 | logger.debug(
85 | "Fetching all entities with filters: %s, date_range: %s, truncate_by: %s",
86 | filters,
87 | date_range,
88 | truncate_by,
89 | )
90 |
91 | with database.connection_context():
92 | query = Entity.select()
93 | conditions = []
94 |
95 | for field, value in filters.items():
96 | conditions.append(getattr(Entity, field) == value)
97 |
98 | if date_range:
99 | start_date, end_date = date_range
100 | if truncate_by == "day":
101 | conditions.append(
102 | Entity.date_created.truncate("day").between(start_date, end_date)
103 | )
104 | elif truncate_by == "month":
105 | conditions.append(
106 | Entity.date_created.truncate("month").between(start_date, end_date)
107 | )
108 | else:
109 | conditions.append(Entity.date_created.between(start_date, end_date))
110 |
111 | if conditions:
112 | query = query.where(*conditions)
113 |
114 | query = query.order_by(Entity.date_created.asc())
115 |
116 | total_records = query.count()
117 | logger.debug("Found %d entities", total_records)
118 |
119 | entities = list(query.dicts()) if return_json else list(query.execute())
120 | return entities
121 |
--------------------------------------------------------------------------------
/src/long_lived_token.py:
--------------------------------------------------------------------------------
1 | """
2 | Module for generating and verifying Long-Lived Tokens (LLTs).
3 | """
4 |
5 | from datetime import datetime, timedelta
6 | import base64
7 |
8 | from jwt import JWT, jwk_from_dict
9 | from jwt.utils import get_int_from_datetime
10 |
11 | from src.crypto import encrypt_fernet
12 | from src.utils import convert_to_fernet_key
13 | from base_logger import get_logger
14 |
15 | logger = get_logger(__name__)
16 |
17 |
18 | def generate_llt(eid, key):
19 | """
20 | Generate a Long-Lived Token (LLT) for the given entity ID (eid).
21 |
22 | Args:
23 | eid (str): The entity ID for which LLT is generated.
24 | key (bytes): The key used for encryption.
25 |
26 | Returns:
27 | str: Base64 encoded and encrypted LLT.
28 | """
29 | logger.debug("Generating payload for the long-lived token...")
30 | token_obj = JWT()
31 |
32 | payload = {
33 | "eid": eid,
34 | "iss": "https://smswithoutborders.com",
35 | "iat": get_int_from_datetime(datetime.now()),
36 | "exp": get_int_from_datetime(datetime.now() + timedelta(days=3650)),
37 | }
38 |
39 | logger.debug("Creating signing key...")
40 | signing_key = jwk_from_dict(
41 | {"kty": "oct", "k": base64.urlsafe_b64encode(key).decode("utf-8")}
42 | )
43 |
44 | logger.debug("Encoding the long-lived token...")
45 | llt = token_obj.encode(payload, signing_key, alg="HS256")
46 |
47 | logger.debug("Encrypting the long-lived token...")
48 | llt_ciphertext = encrypt_fernet(convert_to_fernet_key(key), f"{eid}:{llt}")
49 |
50 | return base64.b64encode(llt_ciphertext).decode("utf-8")
51 |
52 |
53 | def verify_llt(llt, key):
54 | """
55 | Verify the integrity and authenticity of a Long-Lived Token (LLT).
56 |
57 | Args:
58 | llt (str): The LLT to be verified.
59 | key (bytes): The key used for encryption.
60 |
61 | Returns:
62 | tuple: A tuple containing two items:
63 | - dict or None: The decoded payload of the LLT if valid, None otherwise.
64 | - str or None: Error message if LLT is invalid or expired, None if LLT is valid.
65 | """
66 | try:
67 | logger.debug("Decoding the long-lived token...")
68 | token_obj = JWT()
69 | signing_key = jwk_from_dict(
70 | {"kty": "oct", "k": base64.urlsafe_b64encode(key).decode("utf-8")}
71 | )
72 | payload = token_obj.decode(llt, signing_key, algorithms=["HS256"])
73 | return payload, None
74 |
75 | except Exception as error:
76 | logger.error("Error verifying long-lived token: %s", error)
77 | return None, error
78 |
--------------------------------------------------------------------------------
/src/otp_service.py:
--------------------------------------------------------------------------------
1 | """OTP Service Module."""
2 |
3 | import datetime
4 | import random
5 |
6 | from twilio.rest import Client
7 | from twilio.base.exceptions import TwilioRestException
8 | from src.db_models import OTPRateLimit, OTP
9 | from src.utils import get_configs
10 | from base_logger import get_logger
11 |
12 | logger = get_logger(__name__)
13 |
14 | TWILIO_ACCOUNT_SID = get_configs("TWILIO_ACCOUNT_SID")
15 | TWILIO_AUTH_TOKEN = get_configs("TWILIO_AUTH_TOKEN")
16 | TWILIO_SERVICE_SID = get_configs("TWILIO_SERVICE_SID")
17 | TWILIO_PHONE_NUMBER = get_configs("TWILIO_PHONE_NUMBER")
18 | MOCK_OTP = get_configs("MOCK_OTP")
19 | MOCK_OTP = MOCK_OTP.lower() == "true" if MOCK_OTP is not None else False
20 | DUMMY_PHONENUMBERS = get_configs(
21 | "DUMMY_PHONENUMBER", default_value="+237123456789"
22 | ).split(",")
23 |
24 | RATE_LIMIT_WINDOWS = [
25 | {"duration": 2, "count": 1}, # 2 minute window
26 | {"duration": 5, "count": 2}, # 5 minute window
27 | {"duration": 15, "count": 3}, # 15 minute window
28 | {"duration": 1440, "count": 4}, # 24 hour window
29 | ]
30 |
31 |
32 | def is_rate_limited(phone_number):
33 | """
34 | Check if the provided phone number has exceeded the rate limit
35 | for OTP (One-Time Password) requests.
36 |
37 | Args:
38 | phone_number (str): The phone number to check.
39 |
40 | Returns:
41 | bool: True if the phone number is rate limited, False otherwise.
42 | """
43 | logger.debug("Checking rate limit for phone number...")
44 | current_time = datetime.datetime.now()
45 | rate_limit = OTPRateLimit.get_or_none(OTPRateLimit.phone_number == phone_number)
46 |
47 | if rate_limit:
48 | clean_rate_limit_store(phone_number)
49 | index = next(
50 | (
51 | i
52 | for i, window in enumerate(RATE_LIMIT_WINDOWS)
53 | if window["count"] == rate_limit.attempt_count
54 | ),
55 | -1,
56 | )
57 |
58 | if rate_limit.date_expires >= current_time:
59 | logger.info(
60 | "Rate limit exceeded in %s-minute window.",
61 | RATE_LIMIT_WINDOWS[index]["duration"],
62 | )
63 | return True
64 | return False
65 |
66 |
67 | def send_otp(phone_number, message_body=None):
68 | """
69 | Sends a One-Time Password (OTP) to the specified phone number.
70 |
71 | Args:
72 | phone_number (str): The recipient's phone number in E.164 format (e.g., "+1234567890").
73 | message_body (str, optional): A custom message body for the OTP.
74 |
75 | Returns:
76 | tuple:
77 | - bool: True if the OTP was sent successfully, False otherwise.
78 | - str: A message indicating the result of the OTP sending process.
79 | - int or None: The OTP expiry time as a Unix timestamp if the OTP was sent successfully;
80 | otherwise, None.
81 | """
82 | logger.debug("Sending OTP to phone number...")
83 | if is_rate_limited(phone_number):
84 | return False, "Too many OTP requests. Please wait and try again later.", None
85 |
86 | expires = None
87 |
88 | if MOCK_OTP:
89 | success, message = mock_send_otp()
90 | elif phone_number in DUMMY_PHONENUMBERS:
91 | success, message = mock_send_otp()
92 | else:
93 | success, message = twilio_send_otp(phone_number, message_body)
94 |
95 | if success:
96 | otp = increment_rate_limit(phone_number)
97 | expires = int(otp.date_expires.timestamp())
98 |
99 | return success, message, expires
100 |
101 |
102 | def verify_otp(phone_number, otp, use_twilio=True):
103 | """
104 | Verify the provided OTP for the given phone number.
105 |
106 | Args:
107 | phone_number (str): The phone number to verify the OTP for.
108 | otp (str): The OTP to verify.
109 | use_twilio (bool, optional): A flag to indicate whether to use
110 | Twilio for verification. Defaults to True.
111 |
112 | Returns:
113 | tuple: A tuple containing the following elements:
114 | - A boolean indicating whether the OTP was verified successfully.
115 | - A message indicating the result of the OTP verification process.
116 | """
117 | logger.debug("Verifying OTP for phone number...")
118 | if not OTPRateLimit.get_or_none(OTPRateLimit.phone_number == phone_number):
119 | return (
120 | False,
121 | "OTP not initiated. Please request a new OTP before attempting to verify.",
122 | )
123 |
124 | if MOCK_OTP:
125 | success, message = mock_verify_otp(otp)
126 | elif phone_number in DUMMY_PHONENUMBERS:
127 | success, message = mock_verify_otp(otp)
128 | elif use_twilio:
129 | success, message = twilio_verify_otp(phone_number, otp)
130 | else:
131 | success, message = verify_inapp_otp(phone_number, otp)
132 |
133 | if success:
134 | clear_rate_limit(phone_number)
135 |
136 | return success, message
137 |
138 |
139 | def twilio_send_otp(phone_number, message_body=None):
140 | """
141 | Sends a One-Time Password (OTP) using Twilio to the specified phone number.
142 |
143 | Args:
144 | phone_number (str): The recipient's phone number in E.164 format (e.g., "+1234567890").
145 | message_body (str, optional): A custom message body for the OTP.
146 |
147 | Returns:
148 | tuple:
149 | - bool: True if the message was sent successfully, False otherwise.
150 | - str: A detailed message indicating the result.
151 | """
152 | client = Client(TWILIO_ACCOUNT_SID, TWILIO_AUTH_TOKEN)
153 |
154 | try:
155 | if message_body:
156 | message = client.messages.create(
157 | body=message_body,
158 | from_=TWILIO_PHONE_NUMBER,
159 | to=phone_number,
160 | )
161 | status = message.status
162 | else:
163 | verification = client.verify.v2.services(
164 | TWILIO_SERVICE_SID
165 | ).verifications.create(to=phone_number, channel="sms")
166 | status = verification.status
167 |
168 | if status in ("accepted", "pending", "queued"):
169 | logger.info("OTP sent successfully.")
170 | return True, "OTP sent successfully. Please check your phone for the code."
171 |
172 | logger.error("Failed to send OTP. Twilio status: %s", status)
173 | return (
174 | False,
175 | "Failed to send OTP. Please ensure your phone number is correct and try again later.",
176 | )
177 | except TwilioRestException as e:
178 | logger.error("Twilio error while sending OTP: %s", e)
179 | return (False, "Failed to send OTP. Please try again later.")
180 |
181 |
182 | def twilio_verify_otp(phone_number, otp):
183 | """
184 | Verify the provided OTP using Twilio for the given phone number.
185 |
186 | Args:
187 | phone_number (str): The phone number to verify the OTP for.
188 | otp (str): The OTP to verify.
189 |
190 | Returns:
191 | tuple: A tuple containing the following elements:
192 | - A boolean indicating whether the OTP was verified successfully.
193 | - A message indicating the result of the OTP verification process.
194 | """
195 | client = Client(TWILIO_ACCOUNT_SID, TWILIO_AUTH_TOKEN)
196 |
197 | try:
198 | verification_check = client.verify.v2.services(
199 | TWILIO_SERVICE_SID
200 | ).verification_checks.create(to=phone_number, code=otp)
201 |
202 | status = verification_check.status
203 |
204 | if status == "approved":
205 | logger.info("OTP verified successfully.")
206 | return True, "OTP verified successfully."
207 |
208 | if status == "pending":
209 | logger.error("Incorrect OTP provided.")
210 | return False, "Incorrect OTP. Please double-check the code and try again."
211 |
212 | logger.warning("Unexpected OTP verification status: %s", status)
213 | return (False, "Failed to verify OTP. Please try again later.")
214 | except TwilioRestException as e:
215 | logger.error("Twilio error while verifying OTP: %s", e)
216 |
217 | if e.status == 400:
218 | return False, "Incorrect OTP. Please double-check the code and try again."
219 |
220 | if e.status == 404:
221 | return False, "OTP verification expired. Please request a new code."
222 |
223 | logger.warning("Unexpected OTP verification status: %s", e.status)
224 | return (False, "Failed to verify OTP. Please try again later.")
225 |
226 |
227 | def mock_send_otp():
228 | """
229 | Mock function to send OTP to a phone number.
230 |
231 | Returns:
232 | tuple: A tuple containing two elements:
233 | - A boolean indicating whether the OTP was sent successfully.
234 | - A string message indicating the result of the OTP sending process.
235 | """
236 | logger.info("Mock OTP sent to phone number.")
237 | return True, "OTP sent successfully. Please check your phone for the code."
238 |
239 |
240 | def mock_verify_otp(otp):
241 | """
242 | Mock function to verify OTP for a phone number.
243 |
244 | Args:
245 | otp (str): The OTP code to verify.
246 |
247 | Returns:
248 | tuple: A tuple containing two elements:
249 | - A boolean indicating whether the OTP was verified successfully.
250 | - A string message indicating the result of the OTP verification process.
251 | """
252 | if otp == "123456":
253 | logger.info("Mock OTP verified successfully.")
254 | return True, "OTP verified successfully."
255 |
256 | logger.warning("Incorrect OTP provided.")
257 | return False, "Incorrect OTP. Please double-check the code and try again."
258 |
259 |
260 | def clean_rate_limit_store(phone_number):
261 | """
262 | Clean up expired rate limit records for the provided phone number.
263 |
264 | Args:
265 | phone_number (str): The phone number to clean up rate limit records for.
266 | """
267 | logger.debug("Cleaning up expired rate limit records for phone number...")
268 | current_time = datetime.datetime.now()
269 |
270 | rows_deleted = (
271 | OTPRateLimit.delete()
272 | .where(
273 | OTPRateLimit.phone_number == phone_number,
274 | OTPRateLimit.date_expires < current_time,
275 | OTPRateLimit.attempt_count >= RATE_LIMIT_WINDOWS[-1]["count"],
276 | )
277 | .execute()
278 | )
279 |
280 | if rows_deleted > 0:
281 | logger.info("Successfully cleaned up expired rate limit records.")
282 |
283 |
284 | def increment_rate_limit(phone_number):
285 | """
286 | Increment the rate limit counter for the provided phone number.
287 |
288 | Args:
289 | phone_number (str): The phone number to increment the rate limit counter for.
290 |
291 | Returns:
292 | OTPRateLimit: The updated or created OTP rate limit record.
293 | """
294 | logger.debug("Incrementing rate limit for phone number...")
295 | current_time = datetime.datetime.now()
296 |
297 | rate_limit, created = OTPRateLimit.get_or_create(
298 | phone_number=phone_number,
299 | defaults={
300 | "date_expires": current_time
301 | + datetime.timedelta(minutes=RATE_LIMIT_WINDOWS[0]["duration"]),
302 | "attempt_count": RATE_LIMIT_WINDOWS[0]["count"],
303 | },
304 | )
305 |
306 | if not created:
307 | rate_limit.attempt_count += 1
308 | index = next(
309 | (
310 | i
311 | for i, window in enumerate(RATE_LIMIT_WINDOWS)
312 | if window["count"] == rate_limit.attempt_count
313 | ),
314 | -1,
315 | )
316 |
317 | rate_limit.date_expires = current_time + datetime.timedelta(
318 | minutes=RATE_LIMIT_WINDOWS[index]["duration"]
319 | )
320 | rate_limit.save()
321 |
322 | logger.info(
323 | "Rate limit incremented for phone number. Attempts: %d, Expires at: %s",
324 | rate_limit.attempt_count,
325 | rate_limit.date_expires,
326 | )
327 |
328 | return rate_limit
329 |
330 |
331 | def clear_rate_limit(phone_number):
332 | """
333 | Clear the rate limit counter for the provided phone number.
334 |
335 | Args:
336 | phone_number (str): The phone number to clear the rate limit counter for.
337 | """
338 | logger.debug("Clearing rate limit for phone number...")
339 | OTPRateLimit.delete().where(OTPRateLimit.phone_number == phone_number).execute()
340 |
341 | logger.info("Rate limit cleared for phone number.")
342 |
343 |
344 | def generate_otp(length=6):
345 | """
346 | Generate a random OTP of specified length.
347 |
348 | Args:
349 | length (int): The length of the OTP to generate.
350 |
351 | Returns:
352 | str: The generated OTP.
353 | """
354 | return str(random.randint(10 ** (length - 1), 10**length - 1))
355 |
356 |
357 | def create_inapp_otp(phone_number, exp_time=1051200):
358 | """
359 | Create or update an OTP for the given phone number.
360 |
361 | Args:
362 | phone_number (str): The phone number for which the OTP will be generated.
363 | exp_time (int): The expiration time in minutes for the OTP. Defaults to 1051200 minutes.
364 |
365 | Returns:
366 | tuple:
367 | - str: A message describing the result of the OTP generation attempt.
368 | - tuple:
369 | - str: The OTP code.
370 | - int: The expiration time as a Unix timestamp (seconds since epoch).
371 | """
372 | otp_entry, created = OTP.get_or_create(
373 | phone_number=phone_number,
374 | is_verified=False,
375 | defaults={
376 | "otp_code": generate_otp(),
377 | "date_expires": datetime.datetime.now()
378 | + datetime.timedelta(minutes=exp_time),
379 | "attempt_count": 0,
380 | },
381 | )
382 |
383 | if not created:
384 | otp_entry.otp_code = generate_otp()
385 | otp_entry.date_expires = datetime.datetime.now() + datetime.timedelta(
386 | minutes=exp_time
387 | )
388 | otp_entry.attempt_count = 0
389 | otp_entry.is_verified = False
390 | otp_entry.save()
391 |
392 | expiration_time = int(otp_entry.date_expires.timestamp())
393 | return "OTP created successfully.", (otp_entry.otp_code, expiration_time)
394 |
395 |
396 | def verify_inapp_otp(phone_number, otp_code):
397 | """
398 | Verify the OTP for a given phone number.
399 |
400 | Args:
401 | phone_number (str): The phone number for which the OTP was generated.
402 | otp_code (str): The OTP code entered for verification.
403 |
404 | Returns:
405 | tuple:
406 | - bool: Indicates whether the OTP verification was successful.
407 | - str: A message describing the result of the OTP verification attempt.
408 | """
409 | otp_entry = OTP.get_or_none(
410 | OTP.phone_number == phone_number,
411 | ~(OTP.is_verified),
412 | )
413 |
414 | if not otp_entry:
415 | verified_otp_entry = OTP.get_or_none(
416 | OTP.phone_number == phone_number,
417 | OTP.is_verified,
418 | OTP.otp_code == otp_code,
419 | )
420 | if verified_otp_entry:
421 | return True, "OTP is already verified for this phone number."
422 | return False, "No OTP record found for this phone number."
423 |
424 | if otp_entry.is_expired():
425 | return False, "The OTP has expired. Please request a new one."
426 |
427 | if otp_entry.otp_code != otp_code:
428 | otp_entry.increment_attempt_count()
429 | return False, "Incorrect OTP. Please try again."
430 |
431 | otp_entry.is_verified = True
432 | otp_entry.save()
433 |
434 | return True, "OTP verified successfully!"
435 |
--------------------------------------------------------------------------------
/src/password_rate_limit.py:
--------------------------------------------------------------------------------
1 | """Password Rate Limit Module."""
2 |
3 | import datetime
4 | from src.db_models import PasswordRateLimit
5 | from base_logger import get_logger
6 |
7 | logger = get_logger(__name__)
8 |
9 | RATE_LIMIT_WINDOWS = [
10 | {"duration": 2, "count": 3}, # 3 attempts in 2 minutes
11 | {"duration": 15, "count": 5}, # 5 attempts in 15 minutes
12 | {"duration": 60, "count": 7}, # 7 attempts in 60 minutes
13 | {"duration": 1440, "count": 9}, # 9 attempts in 24 hours
14 | ]
15 |
16 | ATTEMPT_COUNTS = [window["count"] for window in RATE_LIMIT_WINDOWS]
17 |
18 |
19 | def is_rate_limited(eid):
20 | """
21 | Check if the provided entity has exceeded the rate limit for password attempts.
22 |
23 | Args:
24 | eid (str): The entity to check.
25 |
26 | Returns:
27 | bool: True if the entity is rate limited, False otherwise.
28 | """
29 | logger.debug("Checking rate limit for entity...")
30 | current_time = datetime.datetime.now()
31 | rate_limit = PasswordRateLimit.get_or_none(PasswordRateLimit.eid == eid)
32 |
33 | if rate_limit:
34 | clean_rate_limit_store(eid)
35 |
36 | if rate_limit.attempt_count in ATTEMPT_COUNTS:
37 | if rate_limit.date_expires >= current_time:
38 | index = ATTEMPT_COUNTS.index(rate_limit.attempt_count)
39 | logger.info(
40 | "Rate limit exceeded for entity in %s-minute window.",
41 | RATE_LIMIT_WINDOWS[index]["duration"],
42 | )
43 | return True
44 |
45 | return False
46 |
47 |
48 | def register_password_attempt(eid):
49 | """
50 | Register a password attempt for the provided entity.
51 |
52 | Args:
53 | eid (str): The entity to register the attempt for.
54 | """
55 | logger.debug("Registering password attempt for entity...")
56 | current_time = datetime.datetime.now()
57 |
58 | rate_limit, created = PasswordRateLimit.get_or_create(
59 | eid=eid,
60 | defaults={"attempt_count": 1},
61 | )
62 |
63 | if not created:
64 | rate_limit.attempt_count += 1
65 |
66 | if rate_limit.attempt_count in ATTEMPT_COUNTS:
67 | index = ATTEMPT_COUNTS.index(rate_limit.attempt_count)
68 | rate_limit.date_expires = current_time + datetime.timedelta(
69 | minutes=RATE_LIMIT_WINDOWS[index]["duration"]
70 | )
71 |
72 | rate_limit.save()
73 |
74 | logger.info(
75 | "Registered password attempt for entity. Current attempt count: %d.",
76 | rate_limit.attempt_count,
77 | )
78 |
79 |
80 | def clean_rate_limit_store(eid):
81 | """
82 | Clean up expired rate limit records for the provided entity.
83 |
84 | Args:
85 | eid (str): The entity to clean up rate limit records for.
86 | """
87 | logger.debug("Cleaning up expired rate limit records for entity...")
88 | current_time = datetime.datetime.now()
89 |
90 | rows_deleted = (
91 | PasswordRateLimit.delete()
92 | .where(
93 | PasswordRateLimit.eid == eid,
94 | PasswordRateLimit.date_expires < current_time,
95 | PasswordRateLimit.attempt_count >= RATE_LIMIT_WINDOWS[-1]["count"],
96 | )
97 | .execute()
98 | )
99 |
100 | if rows_deleted > 0:
101 | logger.info("Cleaned up expired rate limit records for entity.")
102 |
103 |
104 | def clear_rate_limit(eid):
105 | """
106 | Clear the rate limit counter for the provided entity.
107 |
108 | Args:
109 | eid (str): The entity to clear the rate limit counter for.
110 | """
111 | logger.debug("Clearing rate limit for entity...")
112 | rows_deleted = (
113 | PasswordRateLimit.delete().where(PasswordRateLimit.eid == eid).execute()
114 | )
115 |
116 | if rows_deleted > 0:
117 | logger.info("Cleared rate limit for entity.")
118 |
--------------------------------------------------------------------------------
/src/password_validation.py:
--------------------------------------------------------------------------------
1 | """
2 | Module for validating password strength and checking if passwords
3 | have been compromised using the Have I Been Pwned API.
4 | """
5 |
6 | import hashlib
7 | import requests
8 | from base_logger import get_logger
9 |
10 | logger = get_logger(__name__)
11 |
12 |
13 | def validate_password_strength(password):
14 | """
15 | Validate password strength based on length, character types,
16 | and check if it has been compromised using the Have I Been Pwned API.
17 |
18 | Args:
19 | password (str): Password to validate.
20 |
21 | Returns:
22 | str or None: None if password is valid, otherwise a string
23 | with reasons why the password is invalid prefixed
24 | with "Password validation failed: ".
25 | """
26 | invalid_password = []
27 |
28 | if len(password) < 8:
29 | return "Password must be at least 8 characters long"
30 |
31 | if not any(c.islower() for c in password):
32 | return "Password must include at least one lowercase letter (a-z)"
33 |
34 | if not any(c.isupper() for c in password):
35 | return "Password must include at least one uppercase letter (A-Z)"
36 |
37 | if not any(c.isdigit() for c in password):
38 | return "Password must include at least one number (0-9)"
39 |
40 | if not any(c in "!@#$%^&*()_+-=" for c in password):
41 | return (
42 | "Password must include at least one special character from the "
43 | "following set: !@#$%^&*()_+-="
44 | )
45 |
46 | if not invalid_password:
47 | password_hash = hashlib.sha1(password.encode("utf-8")).hexdigest().upper()
48 | prefix, suffix = password_hash[:5], password_hash[5:]
49 | url = f"https://api.pwnedpasswords.com/range/{prefix}"
50 |
51 | try:
52 | response = requests.get(url, timeout=5)
53 | if response.ok:
54 | for line in response.text.splitlines():
55 | if line.split(":")[0] == suffix:
56 | return (
57 | "This password has been found in a data breach and should not be used. "
58 | "Please choose a different password."
59 | )
60 | else:
61 | logger.error(
62 | "Failed to check password against the Have I Been Pwned database"
63 | )
64 | except requests.RequestException as e:
65 | logger.error(
66 | "Error checking password against Have I Been Pwned database: %s", e
67 | )
68 |
69 | return None
70 |
--------------------------------------------------------------------------------
/src/relaysms_payload.py:
--------------------------------------------------------------------------------
1 | """
2 | Module for handling encryption, decryption, encoding, and decoding of RelaySMS payloads.
3 | """
4 |
5 | import base64
6 | import struct
7 | from smswithoutborders_libsig.ratchets import Ratchets, States, HEADERS
8 | from base_logger import get_logger
9 |
10 | logger = get_logger(__name__)
11 |
12 |
13 | def decrypt_payload(
14 | server_state, publish_keypair, ratchet_header, encrypted_content, **kwargs
15 | ):
16 | """
17 | Decrypts a RelaySMS payload.
18 |
19 | Args:
20 | server_state (bytes or None): Current state of the server-side ratchet.
21 | If None, initializes a new state.
22 | keypair (object): Object containing encryption and decryption keys.
23 | ratchet_header (bytes): Ratchet header.
24 | encrypted_content (bytes): Encrypted content to be decrypted.
25 | kwargs (dict): Additional keyword arguments:
26 | - publish_shared_key (bytes): Publish shared key.
27 | - client_pub_key (bytes): Client's public key for decryption.
28 |
29 | Returns:
30 | tuple:
31 | - plaintext (str): Decrypted plaintext content.
32 | - state (bytes): Updated server state.
33 | - error (Exception or None)
34 | """
35 | publish_shared_key = kwargs.get("publish_shared_key")
36 | publish_pub_key = kwargs.get("publish_pub_key")
37 |
38 | try:
39 | if not server_state:
40 | state = States()
41 | logger.debug("Initializing ratchet...")
42 | Ratchets.bob_init(state, publish_shared_key, publish_keypair)
43 | else:
44 | logger.debug("Deserializing state...")
45 | state = States.deserialize(server_state)
46 |
47 | logger.debug("Deserializing header...")
48 | header = HEADERS.deserialize(ratchet_header)
49 | logger.debug("Decrypting content...")
50 | plaintext = Ratchets.decrypt(
51 | state=state, header=header, ciphertext=encrypted_content, AD=publish_pub_key
52 | )
53 | return plaintext, state, None
54 | except Exception as e:
55 | return None, None, e
56 |
57 |
58 | def encrypt_payload(server_state, client_publish_pub_key, content):
59 | """
60 | Encrypts content into a RelaySMS payload.
61 |
62 | Args:
63 | server_state (bytes): Current state of the server-side ratchet.
64 | client_publish_pub_key (bytes): Client's public key for encryption.
65 | content (str): Plaintext content to encrypt.
66 |
67 | Returns:
68 | tuple:
69 | - header (bytes): Serialized ratchet header.
70 | - content_ciphertext (bytes): Encrypted content.
71 | - state (bytes): Updated server state.
72 | - error (Exception or None)
73 | """
74 | try:
75 | if not server_state:
76 | raise ValueError("Server state is not initialized.")
77 |
78 | logger.debug("Deserializing state...")
79 | state = States.deserialize(server_state)
80 | logger.debug("Encrypting content...")
81 | header, content_ciphertext = Ratchets.encrypt(
82 | state=state, data=content.encode("utf-8"), AD=client_publish_pub_key
83 | )
84 | return header.serialize(), content_ciphertext, state, None
85 | except Exception as e:
86 | return None, None, None, e
87 |
88 |
89 | def decode_relay_sms_payload(content):
90 | """
91 | Decode a RelaySMS payload from a base64-encoded string.
92 |
93 | Args:
94 | content (str): Base64-encoded string representing the payload.
95 |
96 | Returns:
97 | tuple:
98 | - header (bytes): Ratchet header.
99 | - encrypted_content (bytes): Encrypted payload.
100 | - error (Exception or None)
101 | """
102 | try:
103 | logger.debug("Unpacking payload....")
104 | payload = base64.b64decode(content)
105 | len_header = struct.unpack(".
5 | """
6 |
7 | from src.db_models import Signups
8 | from base_logger import get_logger
9 |
10 | logger = get_logger(__name__)
11 | database = Signups._meta.database
12 |
13 |
14 | def create_record(country_code, source):
15 | """
16 | Create a signup record.
17 |
18 | Args:
19 | country_code (str): The country code of the signup.
20 | source (str): The source of the signup (e.g., "bridges" or "platforms").
21 | """
22 | with database.atomic():
23 | signup = Signups.create(
24 | country_code=country_code,
25 | source=source,
26 | )
27 |
28 | logger.info("Signup record created successfully")
29 | return signup
30 |
--------------------------------------------------------------------------------
/src/tokens.py:
--------------------------------------------------------------------------------
1 | """
2 | Entity's Tokens Controllers
3 | """
4 |
5 | from playhouse.shortcuts import model_to_dict
6 | from peewee import DoesNotExist
7 | from src.db_models import Token
8 | from src.utils import remove_none_values
9 | from base_logger import get_logger
10 |
11 | logger = get_logger(__name__)
12 | database = Token._meta.database
13 |
14 |
15 | def create_entity_token(
16 | entity,
17 | platform,
18 | account_identifier_hash,
19 | account_identifier,
20 | account_tokens,
21 | **kwargs,
22 | ):
23 | """
24 | Create a new token associated with an entity.
25 |
26 | Args:
27 | entity (Entity): The entity associated with the token.
28 | platform (str): The platform name.
29 | account_identifier_hash (str): The hashed account identifier.
30 | account_identifier (str): The account identifier.
31 | account_tokens (str): The token data.
32 | **kwargs: Additional fields.
33 |
34 | Returns:
35 | Token: The created token object.
36 | """
37 | with database.atomic():
38 | token_data = {
39 | "eid": entity,
40 | "platform": platform,
41 | "account_identifier_hash": account_identifier_hash,
42 | "account_identifier": account_identifier,
43 | "account_tokens": account_tokens,
44 | **kwargs,
45 | }
46 | logger.debug("Creating a new token...")
47 | token = Token.create(**token_data)
48 | return token
49 |
50 |
51 | def fetch_entity_tokens(
52 | entity, fetch_all=False, fields=None, return_json=False, **search_criteria
53 | ):
54 | """
55 | Fetch tokens associated with the given entity.
56 |
57 | Args:
58 | entity (Entity): The entity associated with the tokens.
59 | fetch_all (bool, optional): If True, fetch all tokens
60 | associated with the entity regardless of search criteria.
61 | If False (default), fetch tokens based on search criteria.
62 | fields (list[str] or None, optional): Optional list of fields to select.
63 | return_json (bool, optional): If True, return the results as a list of dicts.
64 | If False (default), return the results as a list of token objects.
65 | **search_criteria: Additional keyword arguments representing the fields
66 | and their values to search for.
67 |
68 | Returns:
69 | list[dict] or list[Token]: A list of token objects or a list of dictionaries
70 | containing token data if return_json is True.
71 | """
72 | logger.debug("Fetching tokens for the specified entity...")
73 | results = []
74 |
75 | with database.atomic():
76 | query = entity.tokens
77 |
78 | if not fetch_all and search_criteria:
79 | conditions = [
80 | getattr(Token, key) == value
81 | for key, value in search_criteria.items()
82 | if value is not None
83 | ]
84 | query = query.where(*conditions)
85 | logger.debug("Applying search criteria to the query...")
86 |
87 | if fields:
88 | select = (getattr(Token, key) for key in fields)
89 | query = query.select(*select)
90 | logger.debug("Applying field selection to the query...")
91 |
92 | tokens = list(query.execute())
93 | logger.debug("Executing token fetch query...")
94 |
95 | if return_json:
96 | for token in tokens:
97 | token_dict = model_to_dict(token)
98 | results.append(token_dict)
99 | return remove_none_values(results)
100 |
101 | return tokens
102 |
103 |
104 | def find_token(**search_criteria):
105 | """
106 | Find a single token based on search criteria.
107 |
108 | Args:
109 | **search_criteria: Additional keyword arguments representing the fields
110 | and their values to search for.
111 |
112 | Returns:
113 | Token or None: The token object if found, otherwise None.
114 | """
115 | logger.debug("Finding a token based on the specified criteria...")
116 | with database.connection_context():
117 | try:
118 | token = Token.get(**search_criteria)
119 | logger.debug("Token is found...")
120 | return token
121 | except DoesNotExist:
122 | logger.debug("Token is not found...")
123 | return None
124 |
125 |
126 | def update_entity_tokens(entity, update_fields, **search_criteria):
127 | """
128 | Update tokens associated with the given entity based on search criteria.
129 |
130 | Args:
131 | entity (Entity): The entity associated with the tokens.
132 | update_fields (dict): A dictionary of fields to update with their new values.
133 | **search_criteria: Additional keyword arguments representing the fields
134 | and their values to search for.
135 |
136 | Returns:
137 | int: The number of rows updated.
138 | """
139 | logger.debug("Updating tokens for the specified entity...")
140 | with database.atomic():
141 | query = Token.update(**update_fields).where(Token.eid == entity)
142 |
143 | if search_criteria:
144 | conditions = [
145 | (getattr(Token, key) == value)
146 | for key, value in search_criteria.items()
147 | if value is not None
148 | ]
149 | if conditions:
150 | query = query.where(*conditions)
151 | logger.debug("Applying search criteria: %s", search_criteria)
152 |
153 | rows_updated = query.execute()
154 | logger.debug("Number of tokens updated: %s", rows_updated)
155 |
156 | return rows_updated
157 |
--------------------------------------------------------------------------------
/src/user_metrics.py:
--------------------------------------------------------------------------------
1 | """
2 | This program is free software: you can redistribute it under the terms
3 | of the GNU General Public License, v. 3.0. If a copy of the GNU General
4 | Public License was not distributed with this file, see .
5 | """
6 |
7 | from collections import defaultdict
8 | from peewee import fn
9 | from src.db_models import Entity, Signups, Token
10 | from src.utils import decrypt_and_decode
11 |
12 |
13 | def get_signup_users(filters=None, group_by=None, options=None):
14 | """Retrieve signup user data based on specified filters and grouping.
15 |
16 | Args:
17 | filters (dict, optional): A dictionary containing filtering options:
18 | - start_date (str): Start date for filtering records (YYYY-MM-DD).
19 | - end_date (str): End date for filtering records (YYYY-MM-DD).
20 | - country_code (str): Country code to filter results by.
21 | group_by (str, optional): Determines how the data is grouped:
22 | - "country": Group results by country.
23 | - "date": Group results by date.
24 | - None: No grouping, returns total signup users.
25 | options (dict, optional): A dictionary containing additional options:
26 | - granularity (str): Granularity of date grouping ("day" or "month").
27 | - top (int): Number of top results to return.
28 | - page (int): Current page for paginated results.
29 | - page_size (int): Number of records per page.
30 | - batch_size (int): Batch size for decrypting filtered rows.
31 |
32 | Returns:
33 | dict: A dictionary containing the results and optional pagination:
34 | - data (list): The retrieved data, structure depends on `group_by`.
35 | - If `group_by="country"`, list of dicts with country and user counts.
36 | - If `group_by="date"`, list of dicts with date and user counts.
37 | - If `group_by=None`, list with total user count.
38 | - pagination (dict): Pagination details (if applicable):
39 | - total_records (int): Total number of matching records.
40 | - page (int): Current page (omitted if `group_by` is None).
41 | - page_size (int): Records per page (omitted if `group_by` is None).
42 | - total_pages (int): Total pages (omitted if `group_by` is None).
43 | - total_signup_users (int): Total number of signup users across all records.
44 | - total_countries (int): Total number of distinct countries with signups.
45 | - countries (list): List of distinct countries.
46 | """
47 | filters = filters or {}
48 | options = options or {}
49 |
50 | start_date = filters.get("start_date")
51 | end_date = filters.get("end_date")
52 | country_code = filters.get("country_code")
53 |
54 | granularity = options.get("granularity", "day")
55 | top = options.get("top")
56 | page = options.get("page", 1)
57 | page_size = options.get("page_size", 50)
58 |
59 | if group_by not in {None, "country", "date"}:
60 | raise ValueError("Invalid group_by value. Use 'country', 'date', or None.")
61 |
62 | if group_by == "date" and granularity not in {"day", "month"}:
63 | raise ValueError("Invalid granularity. Use 'day' or 'month'.")
64 |
65 | if top is not None:
66 | if not isinstance(top, int) or top <= 0:
67 | raise ValueError("'top' must be a positive integer.")
68 | if page is not None or page_size is not None:
69 | raise ValueError("'top' cannot be used with 'page' or 'page_size'.")
70 |
71 | if page is not None:
72 | if not isinstance(page, int) or page <= 0:
73 | raise ValueError("'page' must be a positive integer.")
74 |
75 | if page_size is not None:
76 | if not isinstance(page_size, int) or page_size <= 0:
77 | raise ValueError("'page_size' must be a positive integer.")
78 |
79 | if group_by is None and (
80 | top is not None or page is not None or page_size is not None
81 | ):
82 | raise ValueError(
83 | "Pagination ('top', 'page', 'page_size') is not allowed when group_by is None."
84 | )
85 |
86 | query = Signups.select()
87 | if start_date:
88 | query = query.where(Signups.date_created >= start_date)
89 | if end_date:
90 | query = query.where(Signups.date_created <= end_date)
91 |
92 | if country_code:
93 | query = query.where(Signups.country_code == country_code)
94 |
95 | total_signup_users = query.select(fn.COUNT(Signups.id)).scalar()
96 | total_countries = query.select(fn.COUNT(fn.DISTINCT(Signups.country_code))).scalar()
97 | total_signups_from_bridges = (
98 | query.select(fn.COUNT(Signups.id)).where(Signups.source == "bridges").scalar()
99 | )
100 |
101 | countries_query = query.select(Signups.country_code.distinct())
102 | countries = [row.country_code for row in countries_query]
103 |
104 | if group_by is None:
105 | return {
106 | "data": [],
107 | "pagination": {},
108 | "total_signup_users": total_signup_users,
109 | "total_countries": total_countries,
110 | "total_signups_from_bridges": total_signups_from_bridges,
111 | "countries": countries,
112 | }
113 |
114 | if group_by == "country":
115 | query = (
116 | query.select(
117 | Signups.country_code, fn.COUNT(Signups.id).alias("signup_users")
118 | )
119 | .group_by(Signups.country_code)
120 | .order_by(fn.COUNT(Signups.id).desc())
121 | )
122 | elif group_by == "date":
123 | timeframe = Signups.date_created.truncate(granularity).alias("timeframe")
124 | query = (
125 | query.select(timeframe, fn.COUNT(Signups.id).alias("signup_users"))
126 | .group_by(timeframe)
127 | .order_by(timeframe.desc())
128 | )
129 |
130 | total_records = query.count()
131 |
132 | if top:
133 | query = query.limit(top)
134 | elif group_by is not None:
135 | offset = (page - 1) * page_size
136 | query = query.limit(page_size).offset(offset)
137 |
138 | result = [
139 | (
140 | {
141 | "country_code": row.country_code,
142 | "signup_users": row.signup_users,
143 | }
144 | if group_by == "country"
145 | else {
146 | "timeframe": str(row.timeframe.date()),
147 | "signup_users": row.signup_users,
148 | }
149 | )
150 | for row in query
151 | ]
152 |
153 | pagination = (
154 | {
155 | "page": page,
156 | "page_size": page_size,
157 | "total_pages": (total_records + page_size - 1) // page_size,
158 | "total_records": total_records,
159 | }
160 | if group_by is not None
161 | else {}
162 | )
163 |
164 | return {
165 | "data": result,
166 | "pagination": pagination,
167 | "total_signup_users": total_signup_users,
168 | "total_countries": total_countries,
169 | "total_signups_from_bridges": total_signups_from_bridges,
170 | "countries": countries,
171 | }
172 |
173 |
174 | def get_retained_users(filters=None, group_by=None, options=None):
175 | """Retrieve retained user data based on specified filters and grouping.
176 |
177 | Args:
178 | filters (dict, optional): A dictionary containing filtering options:
179 | - start_date (str): Start date for filtering records (YYYY-MM-DD).
180 | - end_date (str): End date for filtering records (YYYY-MM-DD).
181 | - country_code (str): Country code to filter results by.
182 | group_by (str, optional): Determines how the data is grouped:
183 | - "country": Group results by country.
184 | - "date": Group results by date.
185 | - None: No grouping, returns total retained users.
186 | options (dict, optional): A dictionary containing additional options:
187 | - granularity (str): Granularity of date grouping ("day" or "month").
188 | - top (int): Number of top results to return.
189 | - page (int): Current page for paginated results.
190 | - page_size (int): Number of records per page.
191 | - batch_size (int): Batch size for decrypting filtered rows.
192 |
193 | Returns:
194 | dict: A dictionary containing the results and optional pagination:
195 | - data (list): The retrieved data, structure depends on `group_by`.
196 | - If `group_by="country"`, list of dicts with country and user counts.
197 | - If `group_by="date"`, list of dicts with date and user counts.
198 | - If `group_by=None`, list with total user count.
199 | - pagination (dict): Pagination details (if applicable):
200 | - total_records (int): Total number of matching records.
201 | - page (int): Current page (omitted if `group_by` is None).
202 | - page_size (int): Records per page (omitted if `group_by` is None).
203 | - total_pages (int): Total pages (omitted if `group_by` is None).
204 | - total_retained_users (int): Total number of retained users across all records.
205 | - total_countries (int): Total number of unique countries.
206 | - countries (list): List of unique countries involved in the result.
207 | """
208 | filters = filters or {}
209 | options = options or {}
210 |
211 | start_date = filters.get("start_date")
212 | end_date = filters.get("end_date")
213 | country_code = filters.get("country_code")
214 |
215 | granularity = options.get("granularity", "day")
216 | top = options.get("top")
217 | page = options.get("page", 1)
218 | page_size = options.get("page_size", 50)
219 | batch_size = options.get("batch_size", 500)
220 |
221 | if group_by not in {None, "country", "date"}:
222 | raise ValueError("Invalid group_by value. Use 'country', 'date', or None.")
223 |
224 | if group_by == "date" and granularity not in {"day", "month"}:
225 | raise ValueError("Invalid granularity. Use 'day' or 'month'.")
226 |
227 | if top is not None:
228 | if not isinstance(top, int) or top <= 0:
229 | raise ValueError("'top' must be a positive integer.")
230 | if page is not None or page_size is not None:
231 | raise ValueError("'top' cannot be used with 'page' or 'page_size'.")
232 |
233 | if page is not None:
234 | if not isinstance(page, int) or page <= 0:
235 | raise ValueError("'page' must be a positive integer.")
236 |
237 | if page_size is not None:
238 | if not isinstance(page_size, int) or page_size <= 0:
239 | raise ValueError("'page_size' must be a positive integer.")
240 |
241 | if group_by is None and (
242 | top is not None or page is not None or page_size is not None
243 | ):
244 | raise ValueError(
245 | "Pagination ('top', 'page', 'page_size') is not allowed when group_by is None."
246 | )
247 |
248 | query = Entity.select()
249 | if start_date:
250 | query = query.where(Entity.date_created >= start_date)
251 | if end_date:
252 | query = query.where(Entity.date_created <= end_date)
253 |
254 | total_retained_users_with_tokens = (
255 | query.select(fn.COUNT(fn.DISTINCT(Entity.eid))).join(Token).scalar()
256 | )
257 |
258 | def decrypt_and_filter_generator(query, country_code, batch_size):
259 | batch_number = 1
260 | while True:
261 | batch_query = query.paginate(batch_number, batch_size)
262 | batch_results = list(batch_query)
263 | if not batch_results:
264 | break
265 | for row in batch_results:
266 | decrypted_code = decrypt_and_decode(row.country_code)
267 | if country_code is None or decrypted_code == country_code:
268 | yield row, decrypted_code
269 | batch_number += 1
270 |
271 | decrypted_rows = decrypt_and_filter_generator(query, country_code, batch_size)
272 |
273 | total_retained_users = 0
274 | unique_countries = set()
275 | country_aggregates = defaultdict(int)
276 |
277 | for _, decrypted_country in decrypted_rows:
278 | total_retained_users += 1
279 | unique_countries.add(decrypted_country)
280 | if group_by == "country":
281 | country_aggregates[decrypted_country] += 1
282 |
283 | total_countries = len(unique_countries)
284 | result = []
285 | total_records = 0
286 |
287 | if group_by == "country":
288 | result = sorted(
289 | [
290 | {"country_code": k, "retained_users": v}
291 | for k, v in country_aggregates.items()
292 | ],
293 | key=lambda x: x["retained_users"],
294 | reverse=True,
295 | )
296 | total_records = len(result)
297 | if top:
298 | result = result[:top]
299 | else:
300 | start_idx = (page - 1) * page_size
301 | end_idx = start_idx + page_size
302 | result = result[start_idx:end_idx]
303 |
304 | elif group_by == "date":
305 | timeframe = Entity.date_created.truncate(granularity).alias("timeframe")
306 | query = (
307 | query.select(timeframe, fn.COUNT(Entity.eid).alias("retained_users"))
308 | .group_by(timeframe)
309 | .order_by(timeframe.desc())
310 | )
311 |
312 | total_records = query.count()
313 | query = query.limit(top) if top else query.paginate(page, page_size)
314 | result = [
315 | {
316 | "timeframe": str(row.timeframe.date()),
317 | "retained_users": row.retained_users,
318 | }
319 | for row in query
320 | ]
321 |
322 | elif group_by is None:
323 | result = []
324 |
325 | pagination = (
326 | {
327 | "page": page,
328 | "page_size": page_size,
329 | "total_pages": (total_records + page_size - 1) // page_size,
330 | "total_records": total_records,
331 | }
332 | if group_by is not None and not top
333 | else {}
334 | )
335 |
336 | return {
337 | "data": result,
338 | "pagination": pagination,
339 | "total_retained_users": total_retained_users,
340 | "total_retained_users_with_tokens": total_retained_users_with_tokens,
341 | "total_countries": total_countries,
342 | "countries": list(unique_countries),
343 | }
344 |
--------------------------------------------------------------------------------
/supervisord.conf:
--------------------------------------------------------------------------------
1 | [supervisord]
2 | childlogdir=/var/log/supervisor/
3 | nodaemon=true
4 | logfile=/dev/null
5 | logfile_maxbytes=0
6 | user=root
7 |
8 | [program:runtime_setup]
9 | command=make runtime-setup
10 | autostart=true
11 | autorestart=false
12 | startsecs=0
13 | exitcodes=0
14 | stdout_logfile=/dev/fd/1
15 | stdout_logfile_maxbytes=0
16 | redirect_stderr=true
17 |
18 | [program:rest_server]
19 | command=make start-rest-api
20 | autostart=true
21 | autorestart=true
22 | stdout_logfile=/dev/fd/1
23 | stdout_logfile_maxbytes=0
24 | redirect_stderr=true
25 |
26 | [program:grpc_server]
27 | command=make grpc-server-start
28 | autostart=true
29 | autorestart=true
30 | stdout_logfile=/dev/fd/1
31 | stdout_logfile_maxbytes=0
32 | redirect_stderr=true
33 |
34 | [program:grpc_internal_server]
35 | command=make grpc-internal-server-start
36 | autostart=true
37 | autorestart=true
38 | stdout_logfile=/dev/fd/1
39 | stdout_logfile_maxbytes=0
40 | redirect_stderr=true
41 |
--------------------------------------------------------------------------------
/template.env:
--------------------------------------------------------------------------------
1 | # Server Configuration
2 | SSL_SERVER_NAME=localhost
3 | HOST=localhost
4 | PORT=19000
5 | SSL_PORT=19001
6 | SSL_CERTIFICATE=
7 | SSL_KEY=
8 | SSL_PEM=
9 |
10 | # gRPC Configuration
11 | GRPC_HOST=localhost
12 | GRPC_PORT=8000
13 | GRPC_SSL_PORT=8001
14 | GRPC_INTERNAL_PORT=8443
15 | GRPC_INTERNAL_SSL_PORT=8444
16 |
17 | # Security Keys
18 | SHARED_KEY=encryption.key
19 | HASHING_SALT=hashing.key
20 |
21 | # Database Configuration
22 | MYSQL_HOST=127.0.0.1
23 | MYSQL_USER=
24 | MYSQL_PASSWORD=
25 | MYSQL_DATABASE=relaysms_vault
26 | SQLITE_DATABASE_PATH=vault.db
27 |
28 | # Twilio Configuration
29 | TWILIO_ACCOUNT_SID=
30 | TWILIO_AUTH_TOKEN=
31 | TWILIO_SERVICE_SID=
32 | TWILIO_PHONE_NUMBER=
33 |
34 | # OTP
35 | MOCK_OTP=true
36 |
37 | # CORS
38 | ORIGINS=[]
39 |
40 | # Keystore
41 | KEYSTORE_PATH=keystore
42 | STATIC_X25519_KEYSTORE_PATH=keystore/static_x25519
43 |
44 | # Logging
45 | LOG_LEVEL=info
46 |
47 | # Dummy Data
48 | DUMMY_PHONENUMBERS=+237123456789
49 | DUMMY_PASSWORD=dummy_password
50 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/smswithoutborders/RelaySMS-Vault/10f70b1b0fde90c225789efb804743736728de63/tests/__init__.py
--------------------------------------------------------------------------------
/tests/grpc_test_cases.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | if ! command -v grpcurl &>/dev/null; then
4 | echo "Error: grpcurl is not installed. Please install grpcurl and try again."
5 | exit 1
6 | fi
7 |
8 | GRPC_SERVER="localhost:8000"
9 | PROTO_FILE="../protos/v1/vault.proto"
10 |
11 | client_publish_pub_key="ND/VGAu7SjxWvcEH7zSctDqnfEG6YibqBWSXmjbFYFQ="
12 | client_device_id_pub_key="ND/VGAu7SjxWvcEH7zSctDqnfEG6YibqBWSXmjbFYFQ="
13 |
14 | read -rp "Enter phone number: " phone_number
15 | read -rsp "Enter password: " password
16 | echo
17 |
18 | echo "Testing AuthenticateEntity..."
19 |
20 | grpcurl -plaintext \
21 | -d @ \
22 | -proto "$PROTO_FILE" \
23 | "$GRPC_SERVER" "vault.v1.Entity/AuthenticateEntity" <