├── .env ├── .github ├── dependabot.yml └── workflows │ └── staging-deploy.yml ├── .gitignore ├── Dockerfile ├── LICENSE ├── Makefile ├── README.md ├── apache.conf ├── docker-compose.yml ├── docs ├── api_v3.md ├── gateway_clients_cli.md └── reliability_tests_cli.md ├── gc_cli.py ├── logutils.py ├── main.py ├── mccmnc.py ├── migrations ├── README.md ├── run.py └── v0.1.1.json ├── requirements.txt ├── rt_cli.py ├── sockets ├── .gitignore ├── Dockerfile ├── ip_grap.py ├── requirements.txt └── sync_sockets.py ├── src ├── __init__.py ├── aes.py ├── api_v2.py ├── api_v3.py ├── bridge_server_grpc_client.py ├── db.py ├── ftp_server.py ├── gateway_clients.py ├── grpc_publisher_client.py ├── imap_listener.py ├── keypairs.py ├── main.py ├── models.py ├── notifications.py ├── payload_service.py ├── process_incoming_messages.py ├── publisher.py ├── reliability_tests.py ├── rmq_broker.py ├── router.py ├── rsa.py ├── sync.py ├── users.py ├── users_entity.py ├── utest.py └── utils.py ├── supervisord.conf └── test ├── .gitignore ├── Handshake.sh ├── useless_private_key.key └── useless_public_key.pub /.env: -------------------------------------------------------------------------------- 1 | MYSQL_USER=root 2 | 3 | MYSQL_DATABASE=SMSWithoutBorders_Database_1 4 | 5 | FRONT_END_PORT=80 6 | 7 | BACKEND_SERVER_PORT=9001 8 | 9 | BACKEND_SERVER_PORT_SSL=9000 10 | 11 | GATEWAY_SERVER_SYNC_SOCKET_PORT=15001 12 | 13 | GATEWAY_SERVER_PORT=15000 14 | 15 | PUBLISHER_PORT=13000 16 | 17 | PATH_FRONT_END=repos/front-end/ 18 | 19 | PATH_BACK_END=repos/back-end/ 20 | 21 | PATH_GATEWAY_SERVER=repos/gateway-server/ 22 | 23 | PATH_PUBLISHER=repos/publisher/ 24 | 25 | PATH_RMQ=repos/rabbitmq/ 26 | 27 | ENABLE_RECAPTCHA=false 28 | 29 | SSL_PATH=ssl/ 30 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "docker" 4 | directory: "/" 5 | target-branch: "staging" 6 | schedule: 7 | interval: "weekly" 8 | open-pull-requests-limit: 99 9 | labels: 10 | - "dependencies" 11 | - package-ecosystem: "pip" 12 | directory: "/" 13 | target-branch: "staging" 14 | schedule: 15 | interval: "weekly" 16 | open-pull-requests-limit: 99 17 | allow: 18 | - dependency-type: "direct" 19 | - dependency-type: "indirect" 20 | labels: 21 | - "dependencies" 22 | groups: 23 | production-dependencies: 24 | dependency-type: "production" 25 | patterns: 26 | - "*" 27 | development-dependencies: 28 | dependency-type: "development" 29 | patterns: 30 | - "*" 31 | -------------------------------------------------------------------------------- /.github/workflows/staging-deploy.yml: -------------------------------------------------------------------------------- 1 | name: Staging Server Build Pipeline 2 | 3 | on: 4 | push: 5 | branches: 6 | - staging 7 | 8 | jobs: 9 | deploy: 10 | name: Deploy to Staging Server 11 | runs-on: ubuntu-latest 12 | environment: 13 | name: staging 14 | steps: 15 | - name: Execute Remote SSH Commands 16 | uses: appleboy/ssh-action@master 17 | with: 18 | host: ${{ secrets.HOST }} 19 | username: ${{ secrets.USERNAME }} 20 | key: ${{ secrets.KEY }} 21 | script: | 22 | set -e 23 | 24 | echo "============================" 25 | echo "Updating repository ..." 26 | echo "============================" 27 | if ! assembler clone --branch staging --project gateway_server; then 28 | echo "❌ Error updating repository!" 29 | exit 1 30 | fi 31 | echo "===============================" 32 | echo "✅ Repository update complete" 33 | echo "===============================" 34 | 35 | echo "=========================" 36 | echo "Building project ..." 37 | echo "=========================" 38 | if ! assembler deploy --project gateway_server; then 39 | echo "❌ Error building project!" 40 | exit 1 41 | fi 42 | echo "===========================" 43 | echo "✅ Project build complete" 44 | echo "===========================" 45 | 46 | echo "=============================" 47 | echo "Cleaning up staging builds ..." 48 | echo "=============================" 49 | if ! ${{ secrets.CLEANUP_CMD }}; then 50 | echo "❌ Error cleaning up builds!" 51 | exit 1 52 | fi 53 | echo "=============================" 54 | echo "✅ Cleanup complete" 55 | echo "=============================" 56 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | config.ini 2 | default.js 3 | venv/* 4 | __pycache__/* 5 | */__pycache__/* 6 | .idea/* 7 | *.sw* 8 | */*.sw* 9 | security/*.pem 10 | **/__pycache__/* 11 | tmp/* 12 | *.pem 13 | **.pem 14 | data 15 | *.sh 16 | repos/ 17 | mccmnc.json 18 | # Byte-compiled / optimized / DLL files 19 | __pycache__/ 20 | *.py[cod] 21 | *$py.class 22 | 23 | # C extensions 24 | *.so 25 | 26 | # Distribution / packaging 27 | .Python 28 | build/ 29 | develop-eggs/ 30 | dist/ 31 | downloads/ 32 | eggs/ 33 | .eggs/ 34 | lib/ 35 | lib64/ 36 | parts/ 37 | sdist/ 38 | var/ 39 | wheels/ 40 | share/python-wheels/ 41 | *.egg-info/ 42 | .installed.cfg 43 | *.egg 44 | MANIFEST 45 | 46 | # PyInstaller 47 | # Usually these files are written by a python script from a template 48 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 49 | *.manifest 50 | *.spec 51 | 52 | # Installer logs 53 | pip-log.txt 54 | pip-delete-this-directory.txt 55 | 56 | # Unit test / coverage reports 57 | htmlcov/ 58 | .tox/ 59 | .nox/ 60 | .coverage 61 | .coverage.* 62 | .cache 63 | nosetests.xml 64 | coverage.xml 65 | *.cover 66 | *.py,cover 67 | .hypothesis/ 68 | .pytest_cache/ 69 | cover/ 70 | 71 | # Translations 72 | *.mo 73 | *.pot 74 | 75 | # Django stuff: 76 | *.log 77 | local_settings.py 78 | db.sqlite3 79 | db.sqlite3-journal 80 | 81 | # Flask stuff: 82 | instance/ 83 | .webassets-cache 84 | 85 | # Scrapy stuff: 86 | .scrapy 87 | 88 | # Sphinx documentation 89 | docs/_build/ 90 | 91 | # PyBuilder 92 | .pybuilder/ 93 | target/ 94 | 95 | # Jupyter Notebook 96 | .ipynb_checkpoints 97 | 98 | # IPython 99 | profile_default/ 100 | ipython_config.py 101 | 102 | # pyenv 103 | # For a library or package, you might want to ignore these files since the code is 104 | # intended to run in multiple environments; otherwise, check them in: 105 | # .python-version 106 | 107 | # pipenv 108 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 109 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 110 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 111 | # install all needed dependencies. 112 | #Pipfile.lock 113 | 114 | # poetry 115 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 116 | # This is especially recommended for binary packages to ensure reproducibility, and is more 117 | # commonly ignored for libraries. 118 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 119 | #poetry.lock 120 | 121 | # pdm 122 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 123 | #pdm.lock 124 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 125 | # in version control. 126 | # https://pdm.fming.dev/latest/usage/project/#working-with-version-control 127 | .pdm.toml 128 | .pdm-python 129 | .pdm-build/ 130 | 131 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 132 | __pypackages__/ 133 | 134 | # Celery stuff 135 | celerybeat-schedule 136 | celerybeat.pid 137 | 138 | # SageMath parsed files 139 | *.sage.py 140 | 141 | # Environments 142 | .env 143 | .env.* 144 | .venv 145 | env/ 146 | venv/ 147 | ENV/ 148 | env.bak/ 149 | venv.bak/ 150 | 151 | # Spyder project settings 152 | .spyderproject 153 | .spyproject 154 | 155 | # Rope project settings 156 | .ropeproject 157 | 158 | # mkdocs documentation 159 | /site 160 | 161 | # mypy 162 | .mypy_cache/ 163 | .dmypy.json 164 | dmypy.json 165 | 166 | # Pyre type checker 167 | .pyre/ 168 | 169 | # pytype static type analyzer 170 | .pytype/ 171 | 172 | # Cython debug symbols 173 | cython_debug/ 174 | 175 | # PyCharm 176 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 177 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 178 | # and can be added to the global gitignore or merged into this file. For a more nuclear 179 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 180 | #.idea/ 181 | 182 | # protoc 183 | *_pb2_grpc.py* 184 | *_pb2.py* 185 | 186 | # Ignore SQLite database files 187 | *.sqlite 188 | *.sqlite3 189 | *.db 190 | 191 | # Ignore all .proto files 192 | *.proto 193 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.13.4-slim AS base 2 | 3 | WORKDIR /gateway_server 4 | 5 | RUN apt-get update && \ 6 | apt-get install -y --no-install-recommends \ 7 | build-essential \ 8 | apache2 \ 9 | apache2-dev \ 10 | default-libmysqlclient-dev \ 11 | supervisor \ 12 | git \ 13 | curl \ 14 | pkg-config && \ 15 | apt-get clean && \ 16 | rm -rf /var/lib/apt/lists/* 17 | 18 | COPY requirements.txt . 19 | RUN --mount=type=cache,target=/root/.cache/pip \ 20 | pip install --disable-pip-version-check --quiet --no-cache-dir -r requirements.txt 21 | 22 | COPY . . 23 | COPY supervisord.conf /etc/supervisor/conf.d/supervisord.conf 24 | 25 | ENV MODE=production 26 | CMD ["supervisord", "-n", "-c", "/etc/supervisor/conf.d/supervisord.conf"] 27 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | python=python3 2 | PROTO_DIR=protos/v1 3 | CURRENT_BRANCH=$(shell git branch --show-current) 4 | 5 | define log_message 6 | @echo "[$(shell date +'%Y-%m-%d %H:%M:%S')] - $1" 7 | endef 8 | 9 | define download-proto 10 | $(call log_message,INFO - Downloading $(PROTO_URL) to $@ ...) 11 | @mkdir -p $(dir $@) && \ 12 | curl -o $@ -L $(PROTO_URL) 13 | $(call log_message,INFO - $@ downloaded successfully!) 14 | endef 15 | 16 | $(PROTO_DIR)/%.proto: 17 | $(eval PROTO_URL := $(PROTO_URL)) 18 | $(call download-proto) 19 | 20 | setup: grpc-compile start-rest-api 21 | 22 | publisher-proto: 23 | @rm -f "$(PROTO_DIR)/publisher.proto" 24 | @$(MAKE) PROTO_URL=https://raw.githubusercontent.com/smswithoutborders/RelaySMS-Publisher/$(CURRENT_BRANCH)/protos/v1/publisher.proto \ 25 | $(PROTO_DIR)/publisher.proto 26 | 27 | bridge-proto: 28 | @rm -f "$(PROTO_DIR)/bridge.proto" 29 | @$(MAKE) PROTO_URL=https://raw.githubusercontent.com/smswithoutborders/RelaySMS-Bridge-Server/$(CURRENT_BRANCH)/protos/v1/bridge.proto \ 30 | $(PROTO_DIR)/bridge.proto 31 | 32 | grpc-compile: publisher-proto bridge-proto 33 | $(call log_message,INFO - Compiling gRPC protos ...) 34 | @$(python) -m grpc_tools.protoc \ 35 | -I$(PROTO_DIR) \ 36 | --python_out=. \ 37 | --pyi_out=. \ 38 | --grpc_python_out=. \ 39 | $(PROTO_DIR)/*.proto 40 | $(call log_message,INFO - gRPC Compilation complete!) 41 | 42 | start-rest-api: 43 | @(\ 44 | echo "[$(shell date +'%Y-%m-%d %H:%M:%S')] - INFO - Starting REST API with TLS ..." && \ 45 | gunicorn -w 4 -b 0.0.0.0:'${SSL_PORT}' \ 46 | --log-level=info \ 47 | --access-logfile=- \ 48 | --certfile='${SSL_CERTIFICATE}' \ 49 | --keyfile='${SSL_KEY}' \ 50 | --thread 15 \ 51 | --timeout 30 \ 52 | main:app; \ 53 | ) 54 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # SMSWithoutBorders Gateway Server 2 | 3 | ## API References 4 | 5 | - [API Version 3](/docs/api_v3.md) 6 | 7 | ## Requirements 8 | 9 | - [MySQL](https://www.mysql.com/) (version >= 8.0.28) 10 | ([MariaDB](https://mariadb.org/)) 11 | - [Python](https://www.python.org/) (version >= 12 | [3.8.10](https://www.python.org/downloads/release/python-3810/)) 13 | - [Python Virtual Environments](https://docs.python.org/3/tutorial/venv.html) 14 | 15 | ## Dependencies 16 | 17 | On Ubuntu, install the following dependencies: 18 | 19 | ```bash 20 | sudo apt install python3-dev libmysqlclient-dev apache2 apache2-dev make libapache2-mod-wsgi-py3 21 | ``` 22 | 23 | > [!NOTE] 24 | > The gateway server has strong dependencies on the 25 | > [Backend](https://github.com/smswithoutborders/SMSwithoutborders-BE) 26 | > User Databases. 27 | 28 | ## Linux Environment Variables 29 | 30 | Variables used for the Project: 31 | 32 | - MYSQL_HOST 33 | - MYSQL_USER 34 | - MYSQL_PASSWORD 35 | - MYSQL_DATABASE 36 | - SHARED_KEY 37 | - HASHING_SALT 38 | - ORIGINS 39 | - HOST 40 | - PORT 41 | - RMQ_HOST 42 | - RABBITMQ_DEFAULT_USER 43 | - RABBITMQ_DEFAULT_PASS 44 | - IMAP_SERVER 45 | - IMAP_PORT 46 | - IMAP_USERNAME 47 | - IMAP_PASSWORD 48 | - MAIL_FOLDER 49 | - FTP_USERNAME 50 | - FTP_PASSWORD 51 | - FTP_IP_ADDRESS 52 | - FTP_PORT 53 | - FTP_PASSIVE_PORTS 54 | - FTP_READ_LIMIT 55 | - FTP_WRITE_LIMIT 56 | - FTP_MAX_CON 57 | - FTP_MAX_CON_PER_IP 58 | - FTP_DIRECTORY 59 | - DEKU_CLOUD_URL 60 | - DEKU_CLOUD_PROJECT_REF 61 | - DEKU_CLOUD_SERVICE_ID 62 | - DEKU_CLOUD_ACCOUNT_SID 63 | - DEKU_CLOUD_AUTH_TOKEN 64 | - SSL_CERTIFICATE 65 | - SSL_KEY 66 | 67 | ## Installation 68 | 69 | ### Clone the Repository 70 | 71 | Clone the SMSWithoutBorders Gateway Server repository from GitHub: 72 | 73 | ```bash 74 | git clone https://github.com/smswithoutborders/SMSWithoutBorders-Gateway-Server.git 75 | cd SMSWithoutBorders-Gateway-Server 76 | ``` 77 | 78 | Install all Python packages: 79 | 80 | ### Pip 81 | 82 | ```bash 83 | python3 -m venv venv 84 | source venv/bin/activate 85 | pip install -r requirements.txt 86 | ``` 87 | 88 | ### Build and Run with Docker 89 | 90 | 1. **Build Docker Image:** 91 | 92 | Ensure you have Docker installed on your system. Then, navigate to the root 93 | directory of the cloned repository and run the following command to build the 94 | Docker image: 95 | 96 | ```bash 97 | docker build -t smswithoutborders-gateway-server . 98 | ``` 99 | 100 | Replace `smswithoutborders-gateway-server` with your desired image name. 101 | 102 | 2. **Run Docker Container:** 103 | 104 | After the image is built, run a Docker container using the following command: 105 | 106 | ```bash 107 | docker run -d -p 5000:5000 --name gateway-server smswithoutborders-gateway-server 108 | ``` 109 | 110 | Adjust the port mapping (`-p`) and container name (`--name`) as needed. 111 | 112 | 3. **Verify Container:** 113 | 114 | Verify that the container is running by checking its status: 115 | 116 | ```bash 117 | docker ps 118 | ``` 119 | 120 | This should display the running containers, including the SMSWithoutBorders 121 | Gateway Server container. 122 | 123 | ## Running 124 | 125 | For quicker development, you can integrate the 126 | [BE Dependencies](https://github.com/smswithoutborders/SMSwithoutborders-BE) 127 | databases. 128 | 129 | > In cases where the BE Database and Gateway server share the same database: 130 | 131 | ```bash 132 | MYSQL_HOST=host \ 133 | MYSQL_PORT=port \ 134 | MYSQL_USERNAME=username \ 135 | MYSQL_DATABASE=dbname \ 136 | flask --debug --app src.main run 137 | ``` 138 | 139 | > In cases where the BE Database and Gateway server don't share the same 140 | > database: 141 | 142 | ```bash 143 | MYSQL_HOST=host \ 144 | MYSQL_PORT=port \ 145 | MYSQL_USERNAME=username \ 146 | MYSQL_DATABASE=dbname \ 147 | MYSQL_BE_HOST=host \ 148 | MYSQL_BE_PORT=port \ 149 | MYSQL_BE_USERNAME=username \ 150 | MYSQL_BE_DATABASE=dbname \ 151 | flask --debug --app src.main run 152 | ``` 153 | 154 | ## Use cases 155 | 156 | **Synchronization** 157 | 158 | Synchronization prepares the app for secured conversation using shared keys. 159 | 160 | **Synchronization flow** 161 | 162 | 1. Begin by requesting a new session: `GET //sync/users/` 163 | 164 | This returns a URL string, which can be connected to by websocket clients. 165 | Users can begin communicating with this returned URL or scan them through the 166 | QR scan function in the app. The frequency of change of the synchronization 167 | URLs depends on the configuration settings (defaults = 15 seconds). 168 | 169 | The total number of changes per frequency can be changed (defaults = 3 170 | times). 171 | 172 | Response: 173 | 174 | - `200`: session created 175 | - `500`: some error occurred, check debug logs 176 | 177 | 2. Once a sync URL is connected and begins processing, the websocket sends a 178 | pause text `201- pause`. The user begins authenticating themselves and adding 179 | their security policies to their record on the server. 180 | 181 | 3. Once the user has performed the necessary handshake and the information 182 | exchange has begun, the websocket sends an acknowledgment text `200- ack`. 183 | 184 | **Reliability Tests** 185 | 186 | The Reliability Tests CLI (`rt_cli`) can be used 187 | to trigger and view reliability tests for gateway clients. Refer to the 188 | [Reliability Tests CLI documentation](/docs/reliability_tests_cli.md) for usage and 189 | installation instructions. 190 | 191 | **Gateway Clients Management** 192 | 193 | The Gateway Clients CLI (`gc_cli`) provides 194 | functionality to create, view, and update gateway client records. Check 195 | out the [Gateway Clients CLI documentation](/docs/gateway_clients_cli.md) for more 196 | details on usage and installation. 197 | 198 | ## Testing 199 | 200 | - Testing [Users model](gateway_server/users.py): 201 | 202 | ```bash 203 | python -m unittest gateway_server/test/UTestUsers.py 204 | ``` 205 | 206 | - Testing [WebSockets](gateway_server/sessions_websocket.py): 207 | 208 | Install [websocat](https://github.com/vi/websocat) and 209 | [jq](https://stedolan.github.io/jq/): 210 | 211 | _Manjaro:_ 212 | 213 | ```bash 214 | sudo pacman -S websocat jq 215 | ``` 216 | 217 | Test websocket: 218 | 219 | ```bash 220 | websocat ws://localhost:6996/v2/sync/init/111/000 221 | ``` 222 | 223 | - Testing [RSA Encryption/Decryption](test/security_rsa.py): This will require 224 | pem files. Copy them into the test/ directory to allow the test run. 225 | 226 | ```bash 227 | python -m unittest test/security_rsa.py 228 | ``` 229 | 230 | - Testing [Entire Handshake process](test/handshake.py): This will require pem 231 | files. Copy them into the test/ directory to allow the test run. 232 | 233 | ```bash 234 | ./test/handshake.sh 235 | ``` 236 | 237 | ## Scripts 238 | 239 | ### FTP Server 240 | 241 | ```bash 242 | MYSQL_HOST= \ 243 | MYSQL_USER= \ 244 | MYSQL_PASSWORD= \ 245 | MYSQL_DATABASE= \ 246 | FTP_USERNAME= \ 247 | FTP_PASSWORD= \ 248 | FTP_IP_ADDRESS= \ 249 | FTP_PORT= \ 250 | FTP_PASSIVE_PORTS= \ 251 | FTP_READ_LIMIT= \ 252 | FTP_WRITE_LIMIT= \ 253 | FTP_MAX_CON= \ 254 | FTP_MAX_CON_PER_IP= \ 255 | FTP_DIRECTORY= \ 256 | SSL_CERTIFICATE= \ 257 | SSL_KEY= \ 258 | python3 -m src.ftp_server 259 | ``` 260 | 261 | ### IMAP Listener 262 | 263 | ```bash 264 | MYSQL_HOST= \ 265 | MYSQL_USER= \ 266 | MYSQL_PASSWORD= \ 267 | MYSQL_DATABASE= \ 268 | IMAP_SERVER= \ 269 | IMAP_PORT= \ 270 | IMAP_USERNAME= \ 271 | IMAP_PASSWORD= \ 272 | MAIL_FOLDER= \ 273 | SSL_CERTIFICATE= \ 274 | SSL_KEY= \ 275 | python3 -m src.imap_listener 276 | ``` 277 | -------------------------------------------------------------------------------- /apache.conf: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Order deny,allow 5 | Deny from all 6 | 7 | = 2.4> 8 | Require all granted 9 | 10 | 11 | Allow from localhost 12 | 13 | 14 | 15 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3.9' 2 | 3 | services: 4 | rmq: 5 | build: 6 | context: ./${PATH_RMQ} 7 | dockerfile: Dockerfile 8 | ports: 9 | - "15671:15671" 10 | - "15672:15672" 11 | - "5671:5671" 12 | - "5672:5672" 13 | environment: 14 | listeners.ssl.default = 5671 15 | 16 | ssl_options.cacertfile=${SSL_PEM:?err} 17 | ssl_options.certfile=${SSL_CERTIFICATE:?err} 18 | ssl_options.keyfile=${SSL_KEY:?err} 19 | ssl_options.verify=verify_peer 20 | ssl_options.fail_if_no_peer_cert=true 21 | 22 | mysql: 23 | image: mariadb:10.5 24 | ports: 25 | - "3307:3306" 26 | restart: on-failure 27 | command: --default-authentication-plugin=mysql_native_password 28 | environment: 29 | MYSQL_ROOT_PASSWORD: ${MYSQL_PASSWORD:?err} 30 | MYSQL_HOST: 127.0.0.1 31 | healthcheck: 32 | test: "mysql -u${MYSQL_USER} -p${MYSQL_PASSWORD:?err} -e \"show databases\"" 33 | interval: 1s 34 | retries: 120 35 | 36 | gateway-server: 37 | 38 | depends_on: 39 | mysql: 40 | condition: service_healthy 41 | volumes: 42 | - "${SSL_FILE_PATH:?err}:${SSL_FILE_PATH}" 43 | ports: 44 | - "${GATEWAY_SERVER_SSL_PORT:?err}:${GATEWAY_SERVER_SSL_PORT}" 45 | build: . 46 | environment: 47 | - HOST=${GATEWAY_SERVER_HOST:?err} 48 | - PORT=${GATEWAY_SERVER_PORT:?err} 49 | 50 | - SSL_PORT=${GATEWAY_SERVER_SSL_PORT:?err} 51 | - SSL_CERTIFICATE=${SSL_CERTIFICATE:?err} 52 | - SSL_KEY=${SSL_KEY:?err} 53 | - SSL_PEM=${SSL_PEM:?err} 54 | 55 | 56 | - MYSQL_HOST=mysql 57 | - MYSQL_USER=${MYSQL_USER:?err} 58 | - MYSQL_PASSWORD=${MYSQL_PASSWORD:?err} 59 | - MYSQL_DATABASE=${MYSQL_DATABASE:?err} 60 | - SOCK_PORT=${GATEWAY_SERVER_SYNC_SOCKET_PORT:?err} 61 | - RSA_PR_KEY=${SSL_KEY:?err} 62 | 63 | - MYSQL_BE_HOST=mysql 64 | - MYSQL_BE_PASSWORD=${MYSQL_PASSWORD:?err} 65 | - MYSQL_BE_DATABASE=${MYSQL_DATABASE:?err} 66 | 67 | - RMQ_HOST=${RMQ_HOST:-rmq} 68 | - RMQ_SSL=${RMQ_SSL:false} 69 | 70 | # sync_sockets: 71 | # depends_on: 72 | # - gateway_server 73 | # ports: 74 | # - "15001:15001" 75 | # build: ./sockets/ 76 | # environment: 77 | # HOST: ${HOST} 78 | # PORT: ${PORT} 79 | # SOCK_PORT: ${SOCK_PORT} 80 | # SOCK_HOST: ${SOCK_HOST} 81 | -------------------------------------------------------------------------------- /docs/api_v3.md: -------------------------------------------------------------------------------- 1 | # API v3 Documentation 2 | 3 | ## Table of Contents 4 | 5 | - [Base URL](#base-url) 6 | - [Security Headers](#security-headers) 7 | - [Endpoints](#endpoints) 8 | - [Get Gateway Clients](#get-gateway-clients) 9 | - [Get Tests for a Gateway Client](#get-tests-for-a-gateway-client) 10 | - [Get All Countries](#get-all-countries) 11 | - [Get Operators for a Country](#get-operators-for-a-country) 12 | 13 | ## Base URL 14 | 15 | All endpoints in this API have the base URL: `/v3` 16 | 17 | ### Security Headers 18 | 19 | All responses from this API include the following security headers: 20 | 21 | - `Strict-Transport-Security`: Ensures that browsers will only connect to the 22 | server over HTTPS. 23 | - `X-Content-Type-Options`: Prevents browsers from MIME-sniffing a response away 24 | from the declared content type. 25 | - `Content-Security-Policy`: Helps prevent XSS attacks by restricting the 26 | sources of content that can be loaded on a web page. 27 | - `Referrer-Policy`: Specifies how much referrer information should be included 28 | with requests. 29 | - `Cache-Control`: Directs caches not to store the response. 30 | - `Permissions-Policy`: Defines the permissions the site requires to function 31 | correctly. 32 | 33 | ## Endpoints 34 | 35 | ### Get Gateway Clients 36 | 37 | ```http 38 | GET /v3/clients?country=cameroon&operator=operator_name&protocols=https,smtp,ftp&last_published_date=2024-05-27:00:00Z&per_page=20&page=2 39 | ``` 40 | 41 | #### Description 42 | 43 | Get gateway clients with optional filters. 44 | 45 | #### Parameters 46 | 47 | - `country` (optional): Filter by country. 48 | - `operator` (optional): Filter by operator. 49 | - `protocols` (optional): Filter by protocols. 50 | - `last_published_date` (optional): Filter by last published date. Format: 51 | YYYY-MM-DD 52 | - `page` (optional): Page number for pagination (default: 1). 53 | - `per_page` (optional): Number of results per page (default: 10). 54 | 55 | #### Response 56 | 57 | ```json 58 | [ 59 | { 60 | "country": "Cameroon", 61 | "last_published_date": 1714846064, 62 | "msisdn": "+xxxxxxxxx", 63 | "operator": "OPERATOR", 64 | "operator_code": "xxxxxx", 65 | "protocols": ["https", "smtp", "ftp"], 66 | "reliability": "0.00" 67 | } 68 | ] 69 | ``` 70 | 71 | > [!NOTE] 72 | > 73 | > - `last_published_date` field is in 74 | > [unix time](https://en.wikipedia.org/wiki/Unix_time). 75 | > - `reliability` field represents the reliability of the gateway client as a 76 | > percentage. 77 | 78 | #### Errors 79 | 80 | - `400 Bad Request`: If the request is malformed. 81 | - `500 Internal Server Error`: If an unexpected error occurs. 82 | 83 | #### Additional Headers 84 | 85 | - `X-Total-Count`: Total number of records. 86 | - `X-Page`: Current page number. 87 | - `X-Per-Page`: Number of records per page. 88 | - `Link`: Provides links for pagination. Refer to GitHub's 89 | [comprehensive documentation](https://docs.github.com/en/rest/using-the-rest-api/using-pagination-in-the-rest-api?apiVersion=2022-11-28#using-link-headers) 90 | on how to use link headers. 91 | 92 | ### Get Tests for a Gateway Client 93 | 94 | ```http 95 | GET /v3/clients//tests?per_page=20&page=2 96 | ``` 97 | 98 | #### Description 99 | 100 | Get reliability tests for a specific gateway client with optional filters. 101 | 102 | #### Parameters 103 | 104 | - `page` (optional): Page number for pagination (default: 1). 105 | - `per_page` (optional): Number of results per page (default: 10). 106 | 107 | #### Response 108 | 109 | ```json 110 | { 111 | "data": [ 112 | { 113 | "id": 11, 114 | "msisdn": "+xxxxxxxxx", 115 | "sms_received_time": 1747691895, 116 | "sms_routed_time": 1747691895, 117 | "sms_sent_time": 1747691895, 118 | "start_time": 1747691894, 119 | "status": "success" 120 | }, 121 | { 122 | "id": 10, 123 | "msisdn": "+xxxxxxxxx", 124 | "sms_received_time": 1747691894, 125 | "sms_routed_time": 1747691894, 126 | "sms_sent_time": 1747691894, 127 | "start_time": 1747691893, 128 | "status": "success" 129 | } 130 | // ...more test objects... 131 | ], 132 | "total_failed": "1", 133 | "total_records": 11, 134 | "total_success": "10" 135 | } 136 | ``` 137 | 138 | > [!NOTE] 139 | > 140 | > - `sms_received_time`, `sms_routed_time`, `sms_sent_time`, and `start_time` 141 | > fields are in [unix time](https://en.wikipedia.org/wiki/Unix_time). 142 | > - `status` field for the tests has two values: `"success"` or `"timedout"`. 143 | 144 | #### Errors 145 | 146 | - `400 Bad Request`: If the request is malformed. 147 | - `404 Not Found`: If the requested resource is not found. 148 | - `500 Internal Server Error`: If an unexpected error occurs. 149 | 150 | #### Additional Headers 151 | 152 | - `X-Total-Count`: Total number of records. 153 | - `X-Page`: Current page number. 154 | - `X-Per-Page`: Number of records per page. 155 | - `Link`: Provides links for pagination. Refer to GitHub's 156 | [comprehensive documentation](https://docs.github.com/en/rest/using-the-rest-api/using-pagination-in-the-rest-api?apiVersion=2022-11-28#using-link-headers) 157 | on how to use link headers. 158 | 159 | ### Get All Countries 160 | 161 | ```http 162 | GET /v3/clients/countries 163 | ``` 164 | 165 | #### Description 166 | 167 | Get all countries for gateway clients. 168 | 169 | #### Response 170 | 171 | ```json 172 | ["Cameroon", "Example Country"] 173 | ``` 174 | 175 | #### Errors 176 | 177 | - `500 Internal Server Error`: If an unexpected error occurs. 178 | 179 | ### Get Operators for a Country 180 | 181 | ```http 182 | GET /v3/clients//operators 183 | ``` 184 | 185 | #### Description 186 | 187 | Get all operators for a specific country. 188 | 189 | #### Parameters 190 | 191 | - `country`: Country name. 192 | 193 | #### Response 194 | 195 | ```json 196 | ["Operator A", "Operator B"] 197 | ``` 198 | 199 | #### Errors 200 | 201 | - `400 Bad Request`: If the country parameter is missing. 202 | - `500 Internal Server Error`: If an unexpected error occurs. 203 | 204 | ### Publish Payload 205 | 206 | ```http 207 | POST /v3/publish 208 | ``` 209 | 210 | #### Description 211 | 212 | Publishes content payload to either the **bridge server** or directly to the **publisher**, depending on the content of the payload. 213 | 214 | #### Request Body 215 | 216 | ```json 217 | { 218 | "text": "base64_encoded_payload", 219 | "MSISDN": "+237123456789" 220 | } 221 | ``` 222 | 223 | - **text**: The Base64-encoded payload to be published. 224 | 225 | - If the first byte of the decoded payload is `0`, the system will treat it as a **bridge server** payload. The remaining bytes (after the first byte) will be forwarded to the bridge server. 226 | 227 | ```python 228 | publish_payload = bytes([0]) + b"encrypted content" 229 | base64_encoded_payload = base64.b64encode(publish_payload).decode("utf-8") 230 | ``` 231 | 232 | - If the first byte is not `0`, the system will treat it as a regular payload and send the entire payload directly to the **publisher**. 233 | 234 | - **MSISDN** or **address**: Required field specifying the sender's phone number. 235 | 236 | #### Response 237 | 238 | ```json 239 | { 240 | "publisher_response": "response message from publisher or bridge" 241 | } 242 | ``` 243 | 244 | - **publisher_response**: Returns the response message based on the type of publishing (bridge server or publisher). 245 | 246 | #### Errors 247 | 248 | - `400 Bad Request`: If the request is malformed. 249 | - `500 Internal Server Error`: If an unexpected error occurs. 250 | -------------------------------------------------------------------------------- /docs/gateway_clients_cli.md: -------------------------------------------------------------------------------- 1 | # Gateway Clients CLI 2 | 3 | The Gateway Clients CLI provides functionalities to manage gateway clients. It 4 | interacts with a database to perform CRU (Create, Read, Update) operations on 5 | client data. 6 | 7 | ## Prerequisites 8 | 9 | - [Python](https://www.python.org/) (version >= 10 | [3.8.10](https://www.python.org/downloads/release/python-3810/)) 11 | 12 | Ensure the following environment variables are set: 13 | 14 | - `MYSQL_HOST`: The hostname or IP address of the MySQL server. 15 | - `MYSQL_USER`: The MySQL user with appropriate privileges to access the 16 | database. 17 | - `MYSQL_PASSWORD`: The password for the MySQL user. 18 | - `MYSQL_DATABASE`: The name of the MySQL database where the gateway client 19 | records will be stored. 20 | 21 | ## Installation 22 | 23 | 1. **Set up Virtual Environment**: 24 | 25 | Create and activate a virtual environment to manage project dependencies: 26 | 27 | ```bash 28 | python -m venv venv 29 | source venv/bin/activate # On macOS and Linux 30 | venv\Scripts\activate # On Windows 31 | ``` 32 | 33 | 2. **Install Dependencies**: 34 | 35 | Install required Python dependencies using `pip`: 36 | 37 | ```bash 38 | pip install -r requirements.txt 39 | ``` 40 | 41 | ## Usage 42 | 43 | ### Create 44 | 45 | To create a new gateway client, use the following command: 46 | 47 | ```bash 48 | python gc_cli.py create --msisdn MSISDN --protocols PROTOCOLS 49 | ``` 50 | 51 | - `--msisdn MSISDN`: Specify the MSISDN (Mobile Station International Subscriber 52 | Directory Number) of the client. 53 | - `--protocols PROTOCOLS`: Specify the protocol(s) of the client, separated by 54 | commas. 55 | 56 | ### View 57 | 58 | To view details of existing gateway client(s), use the following command: 59 | 60 | ```bash 61 | python gc_cli.py view [--msisdn MSISDN] 62 | ``` 63 | 64 | - `--msisdn MSISDN`: (Optional) Specify the MSISDN of the client to view. If not 65 | provided, details of all clients will be displayed. 66 | 67 | ### Update 68 | 69 | To update details of an existing gateway client, use the following command: 70 | 71 | ```bash 72 | python gc_cli.py update --msisdn MSISDN [--country COUNTRY] [--operator OPERATOR] [--protocols PROTOCOLS] 73 | ``` 74 | 75 | - `--msisdn MSISDN`: Specify the MSISDN of the client to update. 76 | - `--country COUNTRY`: (Optional) Specify the new country value for the client. 77 | - `--operator OPERATOR`: (Optional) Specify the new operator value for the 78 | client. 79 | - `--protocols PROTOCOLS`: (Optional) Specify the new protocol(s) value for the 80 | client, separated by commas. 81 | -------------------------------------------------------------------------------- /docs/reliability_tests_cli.md: -------------------------------------------------------------------------------- 1 | # Reliability Tests CLI 2 | 3 | This CLI (Command Line Interface) tool provides functionalities to trigger and 4 | view reliability tests for gateway clients. 5 | 6 | ## Prerequisites 7 | 8 | - [Python](https://www.python.org/) (version >= 9 | [3.8.10](https://www.python.org/downloads/release/python-3810/)) 10 | 11 | Ensure the following environment variables are set: 12 | 13 | - `DEKU_CLOUD_URL`: URL for Deku Cloud service. 14 | - `DEKU_CLOUD_PROJECT_REF`: Project reference for Deku Cloud. 15 | - `DEKU_CLOUD_SERVICE_ID`: Service ID for Deku Cloud. 16 | - `DEKU_CLOUD_ACCOUNT_SID`: Account SID for Deku Cloud. 17 | - `DEKU_CLOUD_AUTH_TOKEN`: Authentication token for Deku Cloud. 18 | - `SHARED_KEY_FILE`: Path to the file containing the shared encryption key. 19 | - `MYSQL_HOST`: The hostname or IP address of the MySQL server. 20 | - `MYSQL_USER`: The MySQL user with appropriate privileges to access the 21 | database. 22 | - `MYSQL_PASSWORD`: The password for the MySQL user. 23 | - `MYSQL_DATABASE`: The name of the MySQL database where the reliability tests 24 | records will be stored. 25 | 26 | > [!NOTE] 27 | > 28 | > - To use the Reliability Tests CLI, you need to set up credentials for 29 | > accessing the Deku Cloud service. If you're not familiar with Deku Cloud or 30 | > need guidance on obtaining credentials, refer to the 31 | > [Deku Cloud tutorials](https://staging.smswithoutborders.com:3000/#/tutorial) 32 | > for detailed instructions. 33 | > - The Reliability Tests CLI depends on the availability of gateway clients. 34 | > Make sure to have gateway clients set up before triggering reliability 35 | > tests. For information on setting up gateway clients, refer to the 36 | > [Gateway Clients CLI documentation](gateway_clients_cli.md). 37 | 38 | ## Installation 39 | 40 | 1. **Set up Virtual Environment**: 41 | 42 | Create and activate a virtual environment to manage project dependencies: 43 | 44 | ```bash 45 | python -m venv venv 46 | source venv/bin/activate # On macOS and Linux 47 | venv\Scripts\activate # On Windows 48 | ``` 49 | 50 | 2. **Install Dependencies**: 51 | 52 | Install required Python dependencies using `pip`: 53 | 54 | ```bash 55 | pip install -r requirements.txt 56 | ``` 57 | 58 | ## Usage 59 | 60 | ### Starting Tests 61 | 62 | To start reliability tests for a specific MSISDN or for all MSISDNs, use the 63 | following command: 64 | 65 | ```bash 66 | python rt_cli.py start [--msisdn MSISDN] [--all] 67 | ``` 68 | 69 | - `--msisdn MSISDN`: Specify the MSISDN for which tests are to be started. 70 | - `--all`: Start tests for all MSISDNs. 71 | 72 | ### Viewing Test Data 73 | 74 | To view test data for a specific MSISDN or for all test data in the database, 75 | use the following command: 76 | 77 | ```bash 78 | python rt_cli.py view [--msisdn MSISDN] 79 | ``` 80 | 81 | - `--msisdn MSISDN`: Specify the MSISDN for which test data is to be viewed. 82 | 83 | ## Examples 84 | 85 | ### Starting Tests 86 | 87 | Start tests for a specific MSISDN: 88 | 89 | ```bash 90 | python rt_cli.py start --msisdn +1234567890 91 | ``` 92 | 93 | Start tests for all MSISDNs: 94 | 95 | ```bash 96 | python rt_cli.py start --all 97 | ``` 98 | 99 | ### Viewing Test Data 100 | 101 | View test data for a specific MSISDN: 102 | 103 | ```bash 104 | python rt_cli.py view --msisdn +1234567890 105 | ``` 106 | 107 | View all test data: 108 | 109 | ```bash 110 | python rt_cli.py view 111 | ``` 112 | 113 | ## Setting up Linux Cron Jobs 114 | 115 | To automate the execution of reliability tests at regular intervals using cron 116 | jobs, follow these steps: 117 | 118 | 1. Open the crontab file using the command: 119 | 120 | ```bash 121 | crontab -e 122 | ``` 123 | 124 | 2. Add a new cron job entry to execute the reliability tests script. For 125 | example, to run the tests every day at 2:00 AM, add the following line: 126 | 127 | ```bash 128 | 0 2 * * * /usr/bin/python /path/to/rt_cli.py start --all >> /path/to/logfile.log 2>&1 129 | ``` 130 | 131 | > [!NOTE] 132 | > 133 | > Replace `/usr/bin/python` with the path to your Python interpreter, 134 | > `/path/to/rt_cli.py` with the actual path to your script, and 135 | > `/path/to/logfile.log` with the path where you want to store the log output. 136 | 137 | 3. Save and exit the crontab file. The cron job will now be scheduled to run at 138 | the specified time. 139 | 140 | > [!NOTE] 141 | > 142 | > - Ensure that the Python interpreter path and script path are correctly 143 | > specified in the cron job entry. 144 | > - Verify the cron job execution and check the log file for any errors or 145 | > issues. 146 | > - Adjust the cron job schedule as needed based on your testing requirements. 147 | -------------------------------------------------------------------------------- /gc_cli.py: -------------------------------------------------------------------------------- 1 | """Gateway Clients CLI""" 2 | 3 | import argparse 4 | import logging 5 | import phonenumbers 6 | from phonenumbers import carrier, geocoder 7 | from playhouse.shortcuts import model_to_dict 8 | from src.models import GatewayClients 9 | from mccmnc import find_matches, update 10 | 11 | logging.basicConfig( 12 | level=logging.INFO, 13 | format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", 14 | datefmt="%Y-%m-%d %H:%M:%S", 15 | ) 16 | logger = logging.getLogger("[GC CLI]") 17 | 18 | 19 | def get_plmn(country_code, operator, refresh=False): 20 | """ 21 | Get PLMN (Public Land Mobile Network) information based on country code and operator. 22 | 23 | Args: 24 | country_code (int): The country code. 25 | operator (str): The operator name. 26 | refresh (bool, optional): Whether to force a refresh of PLMN data. Defaults to False. 27 | 28 | Returns: 29 | dict: PLMN information. 30 | """ 31 | if refresh: 32 | update() 33 | 34 | network = operator.split()[0].lower() 35 | 36 | try: 37 | plmn = find_matches(user_cc=country_code, user_network=network) 38 | except FileNotFoundError: 39 | update() 40 | plmn = find_matches(user_cc=country_code, user_network=network) 41 | 42 | return list(plmn.keys())[0] 43 | 44 | 45 | def get_operator_information(msisdn): 46 | """ 47 | Get country and operator information from MSISDN 48 | (Mobile Station International Subscriber Directory Number). 49 | 50 | Args: 51 | msisdn (str): The MSISDN of the client. 52 | 53 | Returns: 54 | tuple: A tuple containing country, operator, and operator code. 55 | - country (str): The country name. 56 | - operator (str): The operator name. 57 | - operator_code (str): The PLMN (Public Land Mobile Network) code. 58 | """ 59 | try: 60 | number = phonenumbers.parse(msisdn, None) 61 | country = geocoder.description_for_number(number, "en") 62 | country_code = number.country_code 63 | operator = carrier.name_for_number(number, "en") or "N/A" 64 | operator_code = get_plmn(country_code, operator) if operator != "N/A" else "N/A" 65 | return country, operator, operator_code 66 | # pylint: disable=W0718 67 | except Exception: 68 | logger.exception("Failed to parse MSISDN.") 69 | return None, None, None 70 | 71 | 72 | def create_client(msisdn, protocols): 73 | """ 74 | Create a new gateway client. 75 | 76 | Args: 77 | msisdn (str): The MSISDN of the client. 78 | protocols (str): The protocol(s) of the client (comma separated). 79 | 80 | Returns: 81 | None 82 | """ 83 | try: 84 | country, operator, operator_code = get_operator_information(msisdn) 85 | 86 | if not all((country, operator, operator_code)): 87 | logger.error( 88 | "Failed to retrieve complete operator information for the provided MSISDN." 89 | ) 90 | if not country: 91 | logger.error("Country information is missing.") 92 | if not operator: 93 | logger.error("Operator information is missing.") 94 | if not operator_code: 95 | logger.error("Operator code information is missing.") 96 | return 97 | 98 | # pylint: disable=W0212,E1101 99 | with GatewayClients._meta.database.atomic(): 100 | client = GatewayClients.create( 101 | msisdn=msisdn, 102 | country=country, 103 | operator=operator, 104 | operator_code=operator_code, 105 | protocols=protocols, 106 | ) 107 | 108 | logger.info("Client created successfully.") 109 | 110 | print("-" * 60) 111 | print(f"{'Client Details':=^60}") 112 | for key, value in model_to_dict(client).items(): 113 | print(f"{key.upper()}: {value}") 114 | # pylint: disable=W0718 115 | except Exception: 116 | logger.error("Failed to create client.", exc_info=True) 117 | 118 | 119 | def view_client(msisdn=None): 120 | """ 121 | View gateway client(s). 122 | 123 | Args: 124 | msisdn (str, optional): The MSISDN of the client to view. If None, 125 | all clients will be displayed. 126 | 127 | Returns: 128 | None 129 | """ 130 | # pylint: disable=W0212,E1101 131 | with GatewayClients._meta.database.atomic(): 132 | try: 133 | query = GatewayClients.select().dicts() 134 | 135 | if msisdn: 136 | query = query.where(GatewayClients.msisdn == msisdn).dicts() 137 | 138 | if not query: 139 | logger.info("No clients found.") 140 | return 141 | 142 | print(f"{'Clients':=^60}") 143 | for test in query: 144 | print("-" * 60) 145 | for key, value in test.items(): 146 | print(f"{key.upper()}: {value}") 147 | 148 | # pylint: disable=W0718 149 | except Exception: 150 | logger.error("Failed to get client(s).", exc_info=True) 151 | 152 | 153 | def update_client(msisdn, country=None, operator=None, protocols=None): 154 | """ 155 | Update an existing gateway client. 156 | 157 | Args: 158 | msisdn (str): The MSISDN of the client to update. 159 | country (str, optional): The new country value for the client. 160 | operator (str, optional): The new operator value for the client. 161 | protocols (str, optional): The new protocol(s) value for the client (comma separated). 162 | 163 | Returns: 164 | None 165 | """ 166 | # pylint: disable=W0212,E1101 167 | with GatewayClients._meta.database.atomic(): 168 | try: 169 | client = GatewayClients.get_or_none(msisdn=msisdn) 170 | if client: 171 | if country: 172 | client.country = country 173 | 174 | if operator: 175 | client.operator = operator 176 | 177 | if protocols: 178 | client.protocols = protocols 179 | 180 | client.save() 181 | logger.info("Client updated successfully.") 182 | else: 183 | logger.info("No client found with MSISDN: %s", msisdn) 184 | # pylint: disable=W0718 185 | except Exception: 186 | logger.error("Failed to update record.", exc_info=True) 187 | 188 | 189 | def delete_client(msisdn): 190 | """ 191 | Delete an existing gateway client. 192 | 193 | Args: 194 | msisdn (str): The MSISDN of the client to delete. 195 | """ 196 | with GatewayClients._meta.database.atomic(): 197 | try: 198 | client = GatewayClients.get_or_none(msisdn=msisdn) 199 | if client: 200 | client.delete_instance() 201 | logger.info("Client deleted successfully.") 202 | else: 203 | logger.info("No client found with MSISDN: %s", msisdn) 204 | # pylint: disable=W0718 205 | except Exception: 206 | logger.exception("Failed to delete record.") 207 | 208 | 209 | def main(): 210 | """ 211 | Parse command line arguments and execute corresponding actions. 212 | """ 213 | parser = argparse.ArgumentParser(description="Gateway Clients CLI") 214 | parser.add_argument( 215 | "action", 216 | choices=["create", "view", "update", "delete"], 217 | help="Action to perform", 218 | ) 219 | parser.add_argument("--msisdn", help="MSISDN of the client") 220 | parser.add_argument("--country", help="Country of the client") 221 | parser.add_argument("--operator", help="Operator of the client") 222 | parser.add_argument( 223 | "--protocols", help="Protocol(s) of the client (comma separated)" 224 | ) 225 | 226 | args = parser.parse_args() 227 | 228 | if not args.action: 229 | parser.error("Please specify an action to perform (create, view, update)") 230 | 231 | if args.action == "create": 232 | if not all([args.msisdn, args.protocols]): 233 | parser.error( 234 | "For 'create' action, all arguments are required: --msisdn, --protocols" 235 | ) 236 | elif args.action == "update": 237 | if not args.msisdn: 238 | parser.error(f"For '{args.action}' action, --msisdn is required") 239 | 240 | if args.action == "create": 241 | create_client(args.msisdn, args.protocols) 242 | elif args.action == "view": 243 | view_client(args.msisdn) 244 | elif args.action == "update": 245 | update_client( 246 | args.msisdn, 247 | args.country, 248 | args.operator, 249 | args.protocols, 250 | ) 251 | elif args.action == "delete": 252 | delete_client(args.msisdn) 253 | 254 | 255 | if __name__ == "__main__": 256 | main() 257 | -------------------------------------------------------------------------------- /logutils.py: -------------------------------------------------------------------------------- 1 | """ 2 | A module for handling application logging. 3 | 4 | This program is free software: you can redistribute it under the terms 5 | of the GNU General Public License, v. 3.0. If a copy of the GNU General 6 | Public License was not distributed with this file, see . 7 | """ 8 | 9 | import os 10 | import logging 11 | 12 | LOG_LEVEL = os.getenv("LOG_LEVEL", "INFO").upper() 13 | numeric_level = getattr(logging, LOG_LEVEL, None) 14 | 15 | if not isinstance(numeric_level, int): 16 | raise ValueError(f"Invalid log level: {LOG_LEVEL}") 17 | 18 | logging.basicConfig( 19 | level=numeric_level, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s" 20 | ) 21 | 22 | 23 | def get_logger(name: str = None) -> logging.Logger: 24 | """Retrieves a logger instance configured with the specified name. 25 | 26 | Args: 27 | name (str, optional): The name of the logger. If None, the root logger is 28 | returned. 29 | 30 | Returns: 31 | logging.logger: A configured logger instance. 32 | """ 33 | return logging.getLogger(name) 34 | -------------------------------------------------------------------------------- /main.py: -------------------------------------------------------------------------------- 1 | """ 2 | Initializes a Flask app and registers API blueprints. 3 | """ 4 | 5 | from flask import Flask 6 | from src.api_v2 import v2_blueprint 7 | from src.api_v3 import v3_blueprint 8 | 9 | app = Flask(__name__) 10 | 11 | app.register_blueprint(v2_blueprint) 12 | app.register_blueprint(v3_blueprint) 13 | -------------------------------------------------------------------------------- /mccmnc.py: -------------------------------------------------------------------------------- 1 | """ 2 | A module for matching Mobile Country Codes (MCC) and Mobile Network Codes (MNC) 3 | against a JSON dataset scraped from https://www.mcc-mnc.com/. 4 | 5 | References: 6 | - Original codebase: https://github.com/jbjulia/mcc-mnc 7 | This module is based on the code from the GitHub repository linked above. 8 | It provides functionality to match MCCs and MNCs against a JSON dataset 9 | containing Public Land Mobile Network (PLMN) information scraped from the 10 | MCC-MNC website. 11 | """ 12 | 13 | import json 14 | import os 15 | import sys 16 | from urllib.error import URLError 17 | from urllib.request import urlopen 18 | 19 | from bs4 import BeautifulSoup 20 | from tqdm import tqdm 21 | 22 | MCC_MNC_URL = "https://www.mcc-mnc.com/" 23 | JSON_PATH = os.path.join(os.path.dirname(__file__), "mccmnc.json") 24 | 25 | 26 | def find_matches( 27 | user_cc=None, user_mcc=None, user_mnc=None, user_plmn=None, user_network=None 28 | ): 29 | """ 30 | Match the given criteria against the JSON data. 31 | 32 | Args: 33 | user_cc (str, optional): User's desired Country Code (CC). 34 | user_mcc (str, optional): User's desired Mobile Country Code (MCC). 35 | user_mnc (str, optional): User's desired Mobile Network Code (MNC). 36 | user_plmn (str, optional): User's desired Public Land Mobile Network (PLMN). 37 | user_network (str, optional): User's desired Network. 38 | 39 | Returns: 40 | dict: Dictionary of matching PLMNs with their details. 41 | """ 42 | match_list = {} 43 | 44 | with open(JSON_PATH, "r", encoding="utf-8") as json_file: 45 | json_data = json.load(json_file) 46 | 47 | for plmn, details in json_data.items(): 48 | if user_plmn and user_plmn != plmn: 49 | continue 50 | if user_cc and str(user_cc) != details["CC"]: 51 | continue 52 | if user_mcc and str(user_mcc) != details["MCC"]: 53 | continue 54 | if user_mnc and str(user_mnc) != details["MNC"]: 55 | continue 56 | if user_network and user_network != details["NETWORK"].lower(): 57 | continue 58 | match_list[plmn] = details 59 | 60 | return match_list 61 | 62 | 63 | def update(): 64 | """ 65 | Update the JSON data by scraping the MCC-MNC website. 66 | 67 | Returns: 68 | None 69 | """ 70 | try: 71 | with urlopen(MCC_MNC_URL) as raw: 72 | print(f"Decoding raw HTML from {MCC_MNC_URL}") 73 | soup = BeautifulSoup(raw, features="html.parser") 74 | 75 | if os.path.exists(JSON_PATH): 76 | print(f"Removing old JSON dictionary {JSON_PATH}.") 77 | os.remove(JSON_PATH) 78 | 79 | print(f"Creating new JSON dictionary {JSON_PATH}.") 80 | json_data = {} 81 | table = soup.find("table") 82 | rows = table.find_all("tr")[1:] # Skip the header 83 | total_rows = len(rows) 84 | progress_bar = tqdm( 85 | total=total_rows, 86 | bar_format="{l_bar}{bar}| {n_fmt}/{total_fmt}", 87 | colour="blue", 88 | ) 89 | 90 | for i, row in enumerate(rows, start=1): 91 | cols = row.find_all("td") 92 | mcc = cols[0].text 93 | mnc = cols[1].text 94 | plmn = mcc + mnc # MCC + MNC 95 | json_data[plmn] = { 96 | "MCC": mcc, 97 | "MNC": mnc, 98 | "ISO": cols[2].text, 99 | "COUNTRY": cols[3].text, 100 | "CC": cols[4].text, 101 | "NETWORK": cols[5].text.strip() if cols[5].text else "unknown", 102 | } 103 | progress_bar.set_description(f"Processing row {i}/{total_rows}") 104 | progress_bar.update(1) 105 | 106 | progress_bar.close() 107 | 108 | with open(JSON_PATH, "w+", encoding="utf-8") as json_file: 109 | print(f"\nSaving JSON dictionary to {JSON_PATH}.") 110 | json.dump(json_data, json_file, indent=4, sort_keys=True) 111 | 112 | except URLError as e: 113 | print(f"Error downloading file: {e}") 114 | sys.exit(1) 115 | -------------------------------------------------------------------------------- /migrations/README.md: -------------------------------------------------------------------------------- 1 | # Database Migration Script 2 | 3 | This script allows you to apply database migrations using a JSON specification 4 | file. It uses the Peewee ORM and the Playhouse migrations module for database 5 | operations. 6 | 7 | ## Getting Started 8 | 9 | ### Prerequisites 10 | 11 | - Python 3.x installed on your system 12 | - Peewee ORM (`pip install peewee`) 13 | 14 | ### Usage 15 | 16 | ```bash 17 | python3 -m migrations.run 18 | ``` 19 | 20 | Replace `` with the version of the migration specification file 21 | you want to apply. 22 | 23 | For example: 24 | 25 | ```bash 26 | python3 -m migrations.run v1.0.0 27 | ``` 28 | 29 | ### Spec File Format 30 | 31 | The migration specification file is a JSON file that defines the schema changes 32 | to be applied. Here's a sample format: 33 | 34 | ```json 35 | [ 36 | { 37 | "action": "add_column", 38 | "table": "users", 39 | "column_name": "age", 40 | "field": "IntegerField()" 41 | }, 42 | { 43 | "action": "drop_column", 44 | "table": "posts", 45 | "column_name": "author_id", 46 | "cascade": true 47 | }, 48 | { 49 | "action": "rename_column", 50 | "table": "posts", 51 | "old_name": "title", 52 | "new_name": "post_title" 53 | }, 54 | { 55 | "action": "add_not_null", 56 | "table": "comments", 57 | "column": "post_id" 58 | }, 59 | { 60 | "action": "rename_table", 61 | "old_name": "posts", 62 | "new_name": "articles" 63 | }, 64 | { 65 | "action": "add_index", 66 | "table": "articles", 67 | "columns": ["status", "created_at"], 68 | "unique": true 69 | }, 70 | { 71 | "action": "drop_index", 72 | "table": "comments", 73 | "index_name": "post_id" 74 | } 75 | ] 76 | ``` 77 | 78 | ### Supported Actions 79 | 80 | - `add_column` 81 | - `drop_column` 82 | - `rename_column` 83 | - `add_not_null` 84 | - `drop_not_null` 85 | - `rename_table` 86 | - `add_index` 87 | - `drop_index` 88 | 89 | Each action requires specific parameters as mentioned in the sample spec file 90 | format. 91 | -------------------------------------------------------------------------------- /migrations/run.py: -------------------------------------------------------------------------------- 1 | """ 2 | Database migration tool using peewee ORM. 3 | 4 | Applies schema changes defined in JSON spec file. 5 | """ 6 | 7 | import os 8 | import json 9 | import argparse 10 | import logging 11 | 12 | import peewee 13 | from playhouse.migrate import MySQLMigrator, migrate 14 | 15 | from src.db import connect 16 | 17 | logging.basicConfig( 18 | level=logging.INFO, 19 | format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", 20 | datefmt="%Y-%m-%d %H:%M:%S", 21 | ) 22 | logger = logging.getLogger("[DB MIGRATOR]") 23 | 24 | 25 | db = connect() 26 | 27 | migrator = MySQLMigrator(db) 28 | MIGRATION_DIR = "migrations" 29 | 30 | ACTIONS = { 31 | "add_column": migrator.add_column, 32 | "drop_column": migrator.drop_column, 33 | "rename_column": migrator.rename_column, 34 | "add_not_null": migrator.add_not_null, 35 | "drop_not_null": migrator.drop_not_null, 36 | "rename_table": migrator.rename_table, 37 | "add_index": migrator.add_index, 38 | "drop_index": migrator.drop_index, 39 | } 40 | 41 | ALLOWED_FIELDS = ["CharField", "DecimalField"] 42 | 43 | PENDING = "⏳" 44 | SUCCESS = "✅" 45 | FAILED = "❌" 46 | 47 | 48 | def parse_field(field_str): 49 | """Parse a field string from spec into a Field instance.""" 50 | field_type = field_str.split("(")[0] 51 | if field_type not in ALLOWED_FIELDS: 52 | raise ValueError(f"Unsupported field: {field_type}") 53 | return eval("peewee." + field_str) 54 | 55 | 56 | def migrate_operations(operations): 57 | """ 58 | Execute migration operations. 59 | 60 | Args: 61 | operations (list): Migration actions to run 62 | 63 | Raises: 64 | MigrationError: On any migration failure 65 | """ 66 | migrations_done = 0 67 | migrations_failed = 0 68 | 69 | for operation in operations: 70 | print("============================================\n") 71 | print(f"Performing operation: {operation}", end="") 72 | print(f" {PENDING}", end="\b") 73 | 74 | try: 75 | action = operation.pop("action") 76 | 77 | if operation.get("field"): 78 | operation["field"] = parse_field(operation["field"]) 79 | 80 | if action not in ACTIONS: 81 | raise ValueError(f"Unsupported action: {action}") 82 | 83 | migrate(ACTIONS[action](**operation)) 84 | 85 | migrations_done += 1 86 | print(f"{SUCCESS}") 87 | print("\n============================================\n") 88 | except Exception as error: 89 | print(f"{FAILED}") 90 | print(f"Error: {error}") 91 | print("\n============================================\n") 92 | migrations_failed += 1 93 | 94 | print(f"{SUCCESS} Completed migrations : {migrations_done}") 95 | print(f"{FAILED} Failed migrations : {migrations_failed}") 96 | 97 | 98 | def check_and_migrate_schema(current_schema_version): 99 | """Check schema version and migrate if necessary.""" 100 | latest_schema_version = get_latest_schema_version() 101 | 102 | if current_schema_version != latest_schema_version: 103 | logger.info( 104 | "Migration required. Migrating to latest schema version: %s", 105 | latest_schema_version, 106 | ) 107 | spec = load_spec(latest_schema_version) 108 | migrate_operations(spec) 109 | logger.info("Migration completed.") 110 | else: 111 | logger.info("Database schema is up to date.") 112 | 113 | 114 | def get_latest_schema_version(): 115 | """Get the latest schema version.""" 116 | if not os.path.isdir(MIGRATION_DIR): 117 | return None 118 | 119 | migration_files = [ 120 | file 121 | for file in os.listdir(MIGRATION_DIR) 122 | if file.startswith("v") and file.endswith(".json") 123 | ] 124 | versions = sorted(migration_files, reverse=True) 125 | 126 | return versions[0].rstrip(".json") if versions else None 127 | 128 | 129 | def load_spec(spec_version): 130 | """Load and return the JSON spec.""" 131 | spec_file_path = os.path.join(MIGRATION_DIR, f"{spec_version}.json") 132 | 133 | if not os.path.exists(spec_file_path): 134 | raise FileNotFoundError(f"Spec file '{spec_file_path}' not found.") 135 | 136 | with open(spec_file_path, encoding="utf-8") as f: 137 | return json.load(f) 138 | 139 | 140 | def main(): 141 | """Main function to parse arguments and initiate migration.""" 142 | parser = argparse.ArgumentParser( 143 | usage="python3 -m migrations [-h] spec_version", 144 | description="Apply database migrations", 145 | ) 146 | parser.add_argument("spec_version", help="spec version to apply") 147 | args = parser.parse_args() 148 | spec = load_spec(args.spec_version) 149 | migrate_operations(spec) 150 | 151 | 152 | if __name__ == "__main__": 153 | main() 154 | -------------------------------------------------------------------------------- /migrations/v0.1.1.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "action": "add_column", 4 | "table": "gateway_clients", 5 | "column_name": "reliability", 6 | "field": "DecimalField(max_digits=5, decimal_places=2, default=0.00)" 7 | } 8 | ] -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | beautifulsoup4==4.13.4 2 | bleach==6.2.0 3 | email-reply-parser==0.5.12 4 | Flask==3.1.1 5 | Flask-Cors==6.0.0 6 | grpcio==1.72.1 7 | grpcio-testing==1.72.1 8 | grpcio-tools==1.72.1 9 | gunicorn==23.0.0 10 | idna==3.10 11 | imap-tools==1.10.0 12 | peewee==3.18.1 13 | phonenumbers==9.0.7 14 | pika==1.3.2 15 | pycryptodome==3.23.0 16 | pycryptodomex==3.23.0 17 | pyftpdlib==2.0.1 18 | pymysql==1.1.1 19 | pyOpenSSL==25.1.0 20 | requests==2.32.4 21 | tqdm==4.67.1 22 | webencodings==0.5.1 23 | websocket==0.2.1 24 | websockets==15.0.1 25 | -------------------------------------------------------------------------------- /rt_cli.py: -------------------------------------------------------------------------------- 1 | """Reliability Tests CLI""" 2 | 3 | import os 4 | import logging 5 | import json 6 | import base64 7 | import argparse 8 | import requests 9 | 10 | from src.models import ReliabilityTests 11 | from src import aes, gateway_clients, reliability_tests 12 | 13 | DEKU_CLOUD_URL = os.environ.get("DEKU_CLOUD_URL") 14 | DEKU_CLOUD_PROJECT_REF = os.environ.get("DEKU_CLOUD_PROJECT_REF") 15 | DEKU_CLOUD_SERVICE_ID = os.environ.get("DEKU_CLOUD_SERVICE_ID") 16 | DEKU_CLOUD_ACCOUNT_SID = os.environ.get("DEKU_CLOUD_ACCOUNT_SID") 17 | DEKU_CLOUD_AUTH_TOKEN = os.environ.get("DEKU_CLOUD_AUTH_TOKEN") 18 | 19 | SHARED_KEY_FILE = os.environ.get("SHARED_KEY") 20 | 21 | logging.basicConfig( 22 | level=logging.INFO, 23 | format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", 24 | datefmt="%Y-%m-%d %H:%M:%S", 25 | ) 26 | logger = logging.getLogger("[RT CLI]") 27 | 28 | # pylint: disable=W0718,E1101,W0212 29 | 30 | 31 | def update_gateway_client_reliability_score(_, msisdn: str): 32 | """ 33 | Update reliability score for a gateway client. 34 | 35 | Args: 36 | msisdn: MSISDN of the gateway client. 37 | 38 | Returns: 39 | True if the reliability score was updated successfully, False otherwise. 40 | """ 41 | reliability_score = reliability_tests.calculate_reliability_score_for_client(msisdn) 42 | 43 | if reliability_score == 0.0: 44 | logger.warning( 45 | "Reliability score for gateway client with MSISDN %s is %s", 46 | msisdn, 47 | reliability_score, 48 | ) 49 | 50 | if not gateway_clients.update_by_msisdn(msisdn, {"reliability": reliability_score}): 51 | logger.error( 52 | "Failed to update gateway client reliability score with MSISDN: %s", msisdn 53 | ) 54 | return None 55 | 56 | return True 57 | 58 | 59 | def make_deku_api_call(test_data, mock=False): 60 | """Make an API call to Deku Cloud to send test data. 61 | 62 | Args: 63 | test_data: The test data containing 'msisdn' and 'base64 encoded ciphertext' 64 | mock (bool): Whether to mock the API call or not. 65 | 66 | Returns: 67 | int or None: HTTP status code of the API call or None if failed. 68 | """ 69 | msisdn, payload = test_data 70 | 71 | if mock: 72 | logger.info("Mocking API call to Deku Cloud.") 73 | logger.info("MSISDN: %s", msisdn) 74 | logger.info("PAYLOAD: %s", payload) 75 | return 200 76 | 77 | if ( 78 | not DEKU_CLOUD_URL 79 | or not DEKU_CLOUD_ACCOUNT_SID 80 | or not DEKU_CLOUD_AUTH_TOKEN 81 | or not DEKU_CLOUD_PROJECT_REF 82 | or not DEKU_CLOUD_SERVICE_ID 83 | ): 84 | logger.error("Deku Cloud environment variables are not set.") 85 | return None 86 | 87 | data = {"sid": "", "to": msisdn, "body": payload} 88 | auth = (DEKU_CLOUD_ACCOUNT_SID, DEKU_CLOUD_AUTH_TOKEN) 89 | url = f"{DEKU_CLOUD_URL}/v1/projects/{DEKU_CLOUD_PROJECT_REF}/services/{DEKU_CLOUD_SERVICE_ID}" 90 | 91 | try: 92 | response = requests.post(url, json=data, auth=auth, timeout=10) 93 | response.raise_for_status() 94 | return response.status_code 95 | except requests.exceptions.RequestException: 96 | logger.error("Failed to make API call to Deku Cloud.", exc_info=True) 97 | return None 98 | 99 | 100 | def encrypt_payload(payload): 101 | """Encrypts test payload using AES encryption. 102 | 103 | Args: 104 | payload (bytes): The test payload to be encrypted. 105 | 106 | Returns: 107 | str or None: The base64 encoded ciphertext if successful, None otherwise. 108 | """ 109 | if not SHARED_KEY_FILE: 110 | logger.error("SHARED_KEY_FILE environment variable not set.") 111 | return None 112 | 113 | with open(SHARED_KEY_FILE, "r", encoding="utf-8") as f: 114 | encryption_key = f.readline().strip()[:32] 115 | 116 | if not encryption_key: 117 | logger.error("Encryption key is empty or invalid.") 118 | return None 119 | 120 | try: 121 | ciphertext = aes.AESCipher.encrypt(shared_key=encryption_key, data=payload) 122 | return base64.b64encode(ciphertext).decode("utf-8") 123 | except Exception: 124 | logger.error("Failed to encrypt payload.", exc_info=True) 125 | return None 126 | 127 | 128 | def create_test_payload(test_data): 129 | """Creates a test payload and encrypts it. 130 | 131 | Args: 132 | test_data (dict): Test data containing 'id' and 'msisdn'. 133 | 134 | Returns: 135 | tuple or None: Tuple containing MSISDN and base64 encoded ciphertext 136 | of the encrypted payload, or None if creation failed. 137 | """ 138 | test_payload = {"test_id": test_data["id"], "msisdn": test_data["msisdn"]} 139 | test_ciphertext = encrypt_payload(payload=bytes(json.dumps(test_payload), "utf-8")) 140 | 141 | if not test_ciphertext: 142 | logger.error( 143 | "Failed to create test payload for MSISDN: %s", 144 | test_data["msisdn"], 145 | ) 146 | return None 147 | 148 | return test_data["msisdn"], test_ciphertext 149 | 150 | 151 | def start_tests(msisdn=None, all_tests=False, mock_api=False): 152 | """Start reliability tests for specified MSISDN or all MSISDNs. 153 | 154 | Args: 155 | msisdn (str, optional): MSISDN for which tests are to be started. 156 | all_tests (bool, optional): Flag to indicate if tests are to be 157 | started for all MSISDNs. 158 | mock_api (bool, optional): Whether to mock the API call or not. 159 | """ 160 | if not msisdn and not all_tests: 161 | logger.error( 162 | "Please provide an MSISDN or use --all option to start tests for all MSISDNs." 163 | ) 164 | return 165 | 166 | if all_tests: 167 | clients, _ = gateway_clients.get_all() 168 | else: 169 | clients = [gateway_clients.get_by_msisdn(msisdn=msisdn)] 170 | 171 | for client in clients: 172 | if not client: 173 | logger.info("No client found with MSISDN: %s", msisdn) 174 | continue 175 | 176 | pre_commit_funcs = [ 177 | (create_test_payload, ()), 178 | (make_deku_api_call, (mock_api,)), 179 | (update_gateway_client_reliability_score, (client["msisdn"],)), 180 | ] 181 | 182 | reliability_tests.create_test_for_client( 183 | client["msisdn"], "running", pre_commit_funcs 184 | ) 185 | 186 | 187 | def view_test_data(msisdn=None): 188 | """View test data for specified MSISDN or all test data in the database. 189 | 190 | Args: 191 | msisdn (str, optional): MSISDN for which test data is to be viewed. 192 | """ 193 | with ReliabilityTests._meta.database.atomic(): 194 | try: 195 | result = reliability_tests.get_all() 196 | tests = result["data"] 197 | 198 | if msisdn: 199 | tests, _ = reliability_tests.get_tests_for_client(msisdn) 200 | 201 | if not tests: 202 | logger.info("No tests found.") 203 | return 204 | 205 | print(f"{'Tests':=^60}") 206 | for test in tests: 207 | print("-" * 60) 208 | for key, value in test.items(): 209 | print(f"{key.upper()}: {value}") 210 | 211 | except Exception: 212 | logger.error("Failed to get test(s).", exc_info=True) 213 | 214 | 215 | def main(): 216 | """Parse command line arguments and execute corresponding action.""" 217 | parser = argparse.ArgumentParser( 218 | description="Gateway Clients Reliability Tests CLI" 219 | ) 220 | 221 | parser.add_argument( 222 | "action", 223 | choices=["start", "view"], 224 | help="Action to perform: start or view tests", 225 | ) 226 | parser.add_argument( 227 | "--msisdn", help="MSISDN for which tests are to be started or viewed" 228 | ) 229 | parser.add_argument( 230 | "--all", action="store_true", help="Start tests for all MSISDNs" 231 | ) 232 | 233 | parser.add_argument("--mock-api", action="store_true", help="Mock the API call") 234 | 235 | args = parser.parse_args() 236 | 237 | reliability_tests.update_timed_out_tests_status() 238 | 239 | if args.action == "start": 240 | start_tests(args.msisdn, args.all, args.mock_api) 241 | elif args.action == "view": 242 | view_test_data(args.msisdn) 243 | 244 | 245 | if __name__ == "__main__": 246 | main() 247 | -------------------------------------------------------------------------------- /sockets/.gitignore: -------------------------------------------------------------------------------- 1 | venv/ 2 | -------------------------------------------------------------------------------- /sockets/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.10 2 | 3 | RUN apt update && apt install -y python3-pip 4 | 5 | WORKDIR /sync_sockets 6 | 7 | COPY . . 8 | 9 | RUN pip install -r requirements.txt 10 | 11 | CMD ["python3", "sync_sockets.py"] 12 | 13 | EXPOSE 15001 14 | -------------------------------------------------------------------------------- /sockets/ip_grap.py: -------------------------------------------------------------------------------- 1 | import socket 2 | 3 | def get_private_ip() -> str: 4 | """ 5 | """ 6 | # This method was extracted from pallet/flask (flask) 7 | # https://github.com/pallets/werkzeug/blob/a44c1d76689ae6608d1783ac628127150826c809/src/werkzeug/serving.py#L925 8 | """Get the IP address of an external interface. Used when binding to 9 | 0.0.0.0 or ::1 to show a more useful URL. 10 | :meta private: 11 | """ 12 | # arbitrary private address 13 | family = socket.AF_INET 14 | 15 | host = "10.253.155.219" 16 | # host = "fd31:f903:5ab5:1::1" if family == socket.AF_INET6 else "10.253.155.219" 17 | 18 | with socket.socket(family, socket.SOCK_DGRAM) as s: 19 | try: 20 | s.connect((host, 58162)) 21 | except OSError: 22 | return "::1" if family == socket.AF_INET6 else "127.0.0.1" 23 | 24 | return s.getsockname()[0] # type: ignore 25 | -------------------------------------------------------------------------------- /sockets/requirements.txt: -------------------------------------------------------------------------------- 1 | websockets==15.0.1 2 | -------------------------------------------------------------------------------- /sockets/sync_sockets.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import os 4 | import socket 5 | 6 | import asyncio 7 | import websockets 8 | import uuid 9 | import ssl 10 | import logging 11 | import json 12 | 13 | import ip_grap 14 | 15 | logging.basicConfig(level='DEBUG') 16 | 17 | class SyncSockets: 18 | """ 19 | """ 20 | __persistent_connections = {} 21 | 22 | class ClientWebsocket: 23 | """Manages states of each client connecting. 24 | """ 25 | state = '__RUN__' 26 | def __init__(self, websocket): 27 | self.websocket = websocket 28 | # self.state = 'run' 29 | 30 | def get_socket(self): 31 | return self.websocket 32 | 33 | def __init__(self, host: str, port: str, 34 | gateway_server_host: str, gateway_server_port: str, ssl_context = None): 35 | """ 36 | """ 37 | self.host = host 38 | self.port = port 39 | self.ssl_context = ssl_context 40 | 41 | self.gateway_server_port = gateway_server_port 42 | self.gateway_server_host = gateway_server_host 43 | 44 | self.refresh_limit = 3 45 | self.time_to_refresh = 10 46 | 47 | self.gateway_server_protocol = "http" if not ssl_context else "https" 48 | self.gateway_server_protocol_mobile = "app" if not ssl_context else "apps" 49 | 50 | self.__valid_sessions = {} 51 | 52 | async def construct_websocket_object(self): 53 | """ 54 | read for prod: 55 | https://websockets.readthedocs.io/en/stable/reference/server.html 56 | """ 57 | logging.debug("[*] HOST %s", self.host) 58 | logging.debug("[*] PORT %s", self.port) 59 | async with websockets.serve( 60 | ws_handler = self.active_sessions, 61 | host = self.host, 62 | port = self.port, 63 | ssl= self.ssl_context): 64 | 65 | await asyncio.Future() 66 | 67 | def __get_sessions_url__(self, user_id: str): 68 | """ 69 | TODO: use session_id for something important 70 | like verifying the integrity of the connection 71 | """ 72 | session_id = uuid.uuid4().hex 73 | 74 | sessions_protocol = f"%s://{self.gateway_server_host}:{self.gateway_server_port}/" \ 75 | f"v2/sync/users/{user_id}/sessions/{session_id}/" 76 | 77 | api_handshake_url = sessions_protocol % ( self.gateway_server_protocol) 78 | 79 | mobile_url = sessions_protocol % ( self.gateway_server_protocol_mobile) 80 | 81 | return api_handshake_url, mobile_url 82 | 83 | async def __active_session__(self, 84 | client_socket_connection: websockets.WebSocketServerProtocol, 85 | user_id: str): 86 | """ 87 | """ 88 | client_socket = self.ClientWebsocket(client_socket_connection) 89 | 90 | session_change_counter = 0 91 | 92 | while( session_change_counter < self.refresh_limit): 93 | self.__persistent_connections[user_id] = client_socket 94 | 95 | api_handshake_url, mobile_url = self.__get_sessions_url__(user_id=user_id) 96 | 97 | synchronization_request = { 98 | "qr_url": api_handshake_url, 99 | "mobile_url": mobile_url 100 | } 101 | 102 | try: 103 | await self.__persistent_connections[user_id].get_socket().send( 104 | json.dumps(synchronization_request)) 105 | except Exception as error: 106 | raise error 107 | 108 | await asyncio.sleep(self.time_to_refresh) 109 | 110 | client_state = self.__persistent_connections[user_id].state 111 | 112 | if client_state == '__PAUSE__': 113 | await asyncio.sleep(self.session_paused_timeout) 114 | 115 | if client_state == "__ACK__": 116 | logging.debug("connection has been acked, closing") 117 | break 118 | 119 | session_change_counter += 1 120 | 121 | 122 | async def __process_new_client_connection__(self, 123 | client_socket_connection: websockets.WebSocketServerProtocol, 124 | user_id: str): 125 | """ 126 | """ 127 | try: 128 | await self.__active_session__(client_socket_connection = client_socket_connection, 129 | user_id=user_id) 130 | 131 | except Exception as error: 132 | raise error 133 | 134 | else: 135 | try: 136 | await self.__persistent_connections[user_id].get_socket().close() 137 | except Exception as error: 138 | raise error 139 | 140 | 141 | async def __process_pause_connection__(self, user_id: str): 142 | """ 143 | """ 144 | self.__persistent_connections[user_id].state = '__PAUSE__' 145 | try: 146 | await self.__persistent_connections[user_id].get_socket().send("201- pause") 147 | except Exception as error: 148 | raise error 149 | 150 | 151 | @classmethod 152 | async def pause_connection(cls, user_id: str): 153 | """ 154 | """ 155 | try: 156 | await cls.__process_ack_connection__(user_id = user_id) 157 | except Exception as error: 158 | logging.exception(error) 159 | 160 | 161 | async def __process_ack_connection__(cls, user_id: str): 162 | """ 163 | """ 164 | self.__persistent_connections[user_id].state = '__ACK__' 165 | try: 166 | await self.__persistent_connections[user_id].get_socket().send("200- ack") 167 | await self.__persistent_connections[user_id].get_socket().close() 168 | del self.__persistent_connections[user_id] 169 | except Exception as error: 170 | raise error 171 | 172 | @classmethod 173 | async def ack_connection(cls, user_id: str): 174 | """ 175 | """ 176 | try: 177 | await cls.__process_ack_connection__(user_id=user_id) 178 | except Exception as error: 179 | logging.exception(error) 180 | 181 | 182 | def __verify_url_path__(self, path): 183 | """ 184 | """ 185 | split_path = path.split('/') 186 | 187 | if len(split_path) < 4: 188 | raise Exception("Invalid init path request") 189 | 190 | user_id = split_path[-1] 191 | 192 | return user_id 193 | 194 | 195 | async def active_sessions(self, 196 | client_socket_connection: websockets.WebSocketServerProtocol, 197 | path: str) -> None: 198 | """Websocket connection required for synchronizing users. 199 | 200 | Once a client is connected, this begins streaming a series of urls after set durations to the client. 201 | The URLs are session urls which when connected to begin a handshake process for the requesting user 202 | """ 203 | 204 | # http://localhost/v2/sync/init/1s1s/sss 205 | if path.find('/v2/sync/init') > -1: 206 | try: 207 | user_id = self.__verify_url_path__(path=path) 208 | except Exception as error: 209 | logging.exception(error) 210 | else: 211 | if user_id in self.__persistent_connections and \ 212 | self.__persistent_connections[user_id].get_socket().open: 213 | logging.error("User already exist...: %s", user_id) 214 | return 215 | 216 | try: 217 | await self.__process_new_client_connection__( 218 | client_socket_connection=client_socket_connection, 219 | user_id = user_id) 220 | 221 | except Exception as error: 222 | logging.exception(error) 223 | await client_socket_connection.close(reason='') 224 | 225 | 226 | def get_host(host: str) -> str: 227 | """ 228 | """ 229 | if not host: 230 | host = "127.0.0.1" 231 | 232 | else: 233 | host = ip_grap.get_private_ip() if host == "0.0.0.0" else host 234 | 235 | return host 236 | 237 | def main_tls(ssl_key_filepath: str, ssl_crt_filepath: str, ssl_pem_filepath: str): 238 | """ 239 | """ 240 | logging.info("WSS protocol!") 241 | ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER) 242 | # ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) 243 | ssl_context.load_cert_chain(certfile=ssl_crt_filepath, 244 | keyfile=ssl_key_filepath) 245 | ssl_context.load_verify_locations(ssl_pem_filepath) 246 | 247 | try: 248 | socket = SyncSockets( 249 | host=HOST, 250 | port=PORT, 251 | gateway_server_port=GATEWAY_SERVER_PORT, 252 | gateway_server_host=GATEWAY_SERVER_HOST, 253 | ssl_context=ssl_context) 254 | 255 | except Exception as error: 256 | logging.exception(error) 257 | else: 258 | asyncio.run(socket.construct_websocket_object()) 259 | 260 | 261 | def main_no_tls() -> None: 262 | """ 263 | """ 264 | logging.info("WS protocol!") 265 | try: 266 | socket = SyncSockets( 267 | host=HOST, 268 | port=PORT, 269 | gateway_server_port=GATEWAY_SERVER_PORT, 270 | gateway_server_host=GATEWAY_SERVER_HOST) 271 | 272 | except Exception as error: 273 | logging.exception(error) 274 | else: 275 | asyncio.run(socket.construct_websocket_object()) 276 | 277 | def main() -> None: 278 | """ 279 | """ 280 | global PORT, HOST, GATEWAY_SERVER_HOST, GATEWAY_SERVER_PORT 281 | 282 | PORT = os.environ.get("PORT") 283 | HOST = os.environ.get("HOST") 284 | # HOST = "127.0.0.1" if not HOST else HOST 285 | HOST = "127.0.0.1" if not HOST else "0.0.0.0" 286 | 287 | 288 | SSL_KEY_FILEPATH = os.environ.get("SSL_KEY") 289 | SSL_CRT_FILEPATH = os.environ.get("SSL_CRT") 290 | SSL_PEM_FILEPATH = os.environ.get("SSL_PEM") 291 | 292 | logging.debug("SSL_KEY_FILEPATH: %s", SSL_KEY_FILEPATH) 293 | logging.debug("SSL_CRT_FILEPATH: %s", SSL_CRT_FILEPATH) 294 | logging.debug("SSL_PEM_FILEPATH: %s", SSL_PEM_FILEPATH) 295 | 296 | GATEWAY_SERVER_HOST = os.environ["GATEWAY_SERVER_HOST"] 297 | if(SSL_KEY_FILEPATH and SSL_CRT_FILEPATH and SSL_PEM_FILEPATH): 298 | GATEWAY_SERVER_PORT = os.environ["GATEWAY_SERVER_SSL_PORT"] 299 | main_tls(ssl_key_filepath=SSL_KEY_FILEPATH, 300 | ssl_crt_filepath=SSL_CRT_FILEPATH, 301 | ssl_pem_filepath=SSL_PEM_FILEPATH) 302 | else: 303 | GATEWAY_SERVER_PORT = os.environ["GATEWAY_SERVER_PORT"] 304 | main_no_tls() 305 | 306 | 307 | if __name__ == "__main__": 308 | main() 309 | -------------------------------------------------------------------------------- /src/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/smswithoutborders/RelaySMS-Gateway-Server/6e60ee6387cb44789db48166c250dd78929ff5d6/src/__init__.py -------------------------------------------------------------------------------- /src/aes.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | from hashlib import md5 4 | 5 | from Crypto.Cipher import AES 6 | from Crypto.Random import get_random_bytes 7 | from Crypto.Util.Padding import pad, unpad 8 | 9 | 10 | class AESCipher: 11 | def __init__(self, key: str): 12 | password = key.encode('utf-8') 13 | self.key = md5(password).digest() 14 | 15 | @staticmethod 16 | def encrypt(shared_key: str, data: bytes) -> bytes: 17 | shared_key = shared_key.encode("utf-8") 18 | vector = get_random_bytes(AES.block_size) 19 | encryption_cipher = AES.new(shared_key, AES.MODE_CBC, vector) 20 | return vector + encryption_cipher.encrypt(pad(data, AES.block_size)) 21 | 22 | @staticmethod 23 | def decrypt(iv: bytes, shared_key: str, data: bytes) -> bytes: 24 | """ 25 | """ 26 | decryption_cipher = AES.new( 27 | shared_key.encode('utf-8'), 28 | AES.MODE_CBC, 29 | iv) 30 | return unpad(decryption_cipher.decrypt(data), AES.block_size) 31 | 32 | -------------------------------------------------------------------------------- /src/api_v2.py: -------------------------------------------------------------------------------- 1 | """API V2 Blueprint""" 2 | 3 | import logging 4 | 5 | from flask import Blueprint, request, jsonify 6 | from flask_cors import CORS 7 | from werkzeug.exceptions import BadRequest, NotFound 8 | 9 | from src.db import connect 10 | from src.payload_service import decode_and_publish 11 | 12 | v2_blueprint = Blueprint("v2", __name__) 13 | CORS(v2_blueprint) 14 | 15 | database = connect() 16 | 17 | logger = logging.getLogger(__name__) 18 | 19 | 20 | def set_security_headers(response): 21 | """Set security headers for each response.""" 22 | security_headers = { 23 | "Strict-Transport-Security": "max-age=63072000; includeSubdomains", 24 | "X-Content-Type-Options": "nosniff", 25 | "Content-Security-Policy": "script-src 'self'; object-src 'self'", 26 | "Referrer-Policy": "strict-origin-when-cross-origin", 27 | "Cache-Control": "no-cache", 28 | "Permissions-Policy": ( 29 | "accelerometer=(), ambient-light-sensor=(), autoplay=(), battery=(), camera=(), " 30 | "clipboard-read=(), clipboard-write=(), cross-origin-isolated=(), display-capture=(), " 31 | "document-domain=(), encrypted-media=(), execution-while-not-rendered=(), " 32 | "execution-while-out-of-viewport=(), fullscreen=(), gamepad=(), geolocation=(), " 33 | "gyroscope=(), magnetometer=(), microphone=(), midi=(), navigation-override=(), " 34 | "payment=(), picture-in-picture=(), publickey-credentials-get=(), screen-wake-lock=(), " 35 | "speaker=(), speaker-selection=(), sync-xhr=(), usb=(), web-share=(), " 36 | "xr-spatial-tracking=()" 37 | ), 38 | } 39 | 40 | for header, value in security_headers.items(): 41 | response.headers[header] = value 42 | 43 | return response 44 | 45 | 46 | @v2_blueprint.before_request 47 | def _db_connect(): 48 | """Connect to the database before processing the request.""" 49 | database.connect(reuse_if_open=True) 50 | 51 | 52 | @v2_blueprint.teardown_request 53 | def _db_close(response): 54 | """Close the database connection after processing the request.""" 55 | database.close() 56 | return response 57 | 58 | 59 | @v2_blueprint.after_request 60 | def after_request(response): 61 | """Set security headers after each request.""" 62 | response = set_security_headers(response) 63 | return response 64 | 65 | 66 | @v2_blueprint.route("/sms/platform/", methods=["POST"]) 67 | def publish_relaysms_payload(platform): 68 | """Publishes RelaySMS Payload.""" 69 | 70 | request_data = request.json 71 | publisher_response, err = decode_and_publish(request_data, "http") 72 | 73 | if err: 74 | raise BadRequest(err) 75 | 76 | return jsonify({"publisher_response": publisher_response}) 77 | 78 | 79 | @v2_blueprint.errorhandler(BadRequest) 80 | @v2_blueprint.errorhandler(NotFound) 81 | def handle_bad_request_error(error): 82 | """Handle BadRequest errors.""" 83 | logger.error(error.description) 84 | return jsonify({"error": error.description}), error.code 85 | 86 | 87 | @v2_blueprint.errorhandler(Exception) 88 | def handle_generic_error(error): 89 | """Handle generic errors.""" 90 | logger.exception(error) 91 | return ( 92 | jsonify({"error": "Oops! Something went wrong. Please try again later."}), 93 | 500, 94 | ) 95 | -------------------------------------------------------------------------------- /src/api_v3.py: -------------------------------------------------------------------------------- 1 | """API V3 Blueprint""" 2 | 3 | import logging 4 | from datetime import datetime 5 | 6 | from flask import Blueprint, request, jsonify 7 | from flask_cors import CORS 8 | from werkzeug.exceptions import BadRequest, NotFound 9 | 10 | from src import gateway_clients, reliability_tests 11 | from src.db import connect 12 | from src.utils import build_link_header 13 | from src.payload_service import decode_and_publish 14 | from src.models import ReliabilityTests, GatewayClients 15 | 16 | v3_blueprint = Blueprint("v3", __name__, url_prefix="/v3") 17 | CORS(v3_blueprint, expose_headers=["X-Total-Count", "X-Page", "X-Per-Page", "Link"]) 18 | 19 | database = connect() 20 | 21 | logger = logging.getLogger(__name__) 22 | 23 | 24 | def set_security_headers(response): 25 | """Set security headers for each response.""" 26 | security_headers = { 27 | "Strict-Transport-Security": "max-age=63072000; includeSubdomains", 28 | "X-Content-Type-Options": "nosniff", 29 | "Content-Security-Policy": "script-src 'self'; object-src 'self'", 30 | "Referrer-Policy": "strict-origin-when-cross-origin", 31 | "Cache-Control": "no-cache", 32 | "Permissions-Policy": ( 33 | "accelerometer=(), ambient-light-sensor=(), autoplay=(), battery=(), camera=(), " 34 | "clipboard-read=(), clipboard-write=(), cross-origin-isolated=(), display-capture=(), " 35 | "document-domain=(), encrypted-media=(), execution-while-not-rendered=(), " 36 | "execution-while-out-of-viewport=(), fullscreen=(), gamepad=(), geolocation=(), " 37 | "gyroscope=(), magnetometer=(), microphone=(), midi=(), navigation-override=(), " 38 | "payment=(), picture-in-picture=(), publickey-credentials-get=(), screen-wake-lock=(), " 39 | "speaker=(), speaker-selection=(), sync-xhr=(), usb=(), web-share=(), " 40 | "xr-spatial-tracking=()" 41 | ), 42 | } 43 | 44 | for header, value in security_headers.items(): 45 | response.headers[header] = value 46 | 47 | return response 48 | 49 | 50 | @v3_blueprint.before_request 51 | def _db_connect(): 52 | """Connect to the database before processing the request.""" 53 | database.connect(reuse_if_open=True) 54 | 55 | 56 | @v3_blueprint.teardown_request 57 | def _db_close(response): 58 | """Close the database connection after processing the request.""" 59 | database.close() 60 | return response 61 | 62 | 63 | @v3_blueprint.after_request 64 | def after_request(response): 65 | """Set security headers after each request.""" 66 | response = set_security_headers(response) 67 | return response 68 | 69 | 70 | @v3_blueprint.route("/clients", methods=["GET"]) 71 | def get_gateway_clients(): 72 | """Get gateway clients with optional filters""" 73 | 74 | filters = { 75 | "country": request.args.get("country") or None, 76 | "operator": request.args.get("operator") or None, 77 | "protocols": request.args.get("protocols") or None, 78 | "last_published_date": request.args.get("last_published_date") or None, 79 | } 80 | 81 | try: 82 | page = int(request.args.get("page") or 1) 83 | per_page = int(request.args.get("per_page") or 10) 84 | 85 | if page < 1 or per_page < 1: 86 | raise ValueError 87 | except ValueError as exc: 88 | raise BadRequest( 89 | "Invalid page or per_page parameter. Must be positive integers." 90 | ) from exc 91 | 92 | last_published_date_str = filters.get("last_published_date") 93 | if last_published_date_str: 94 | try: 95 | filters["last_published_date"] = datetime.fromisoformat( 96 | last_published_date_str 97 | ) 98 | except ValueError as exc: 99 | raise BadRequest( 100 | "Invalid last_published_date. " 101 | "Please provide a valid ISO format datetime (YYYY-MM-DD)." 102 | ) from exc 103 | 104 | results, total_records = gateway_clients.get_all(filters, page, per_page) 105 | 106 | response = jsonify(results) 107 | response.headers["X-Total-Count"] = str(total_records) 108 | response.headers["X-Page"] = str(page) 109 | response.headers["X-Per-Page"] = str(per_page) 110 | 111 | link_header = build_link_header(request.base_url, page, per_page, total_records) 112 | if link_header: 113 | response.headers["Link"] = link_header 114 | 115 | return response 116 | 117 | 118 | @v3_blueprint.route("/clients//tests", methods=["GET", "POST"]) 119 | def manage_gateway_client_tests(msisdn): 120 | """Manage reliability tests for a specific gateway client: 121 | GET to fetch tests, POST to start a new test.""" 122 | reliability_tests.update_timed_out_tests_status(check_interval=10) 123 | 124 | if request.method == "GET": 125 | try: 126 | page = int(request.args.get("page", 1)) 127 | per_page = int(request.args.get("per_page", 10)) 128 | if page < 1 or per_page < 1: 129 | raise ValueError 130 | except ValueError: 131 | raise BadRequest("Page and per_page must be positive integers.") 132 | 133 | filters = {"msisdn": msisdn} 134 | status = request.args.get("status") 135 | if status: 136 | filters["status"] = status 137 | 138 | for key, param, filter_key in [ 139 | ("start_time", request.args.get("start_time"), "start_time__gte"), 140 | ("end_time", request.args.get("end_time"), "start_time__lte"), 141 | ]: 142 | if param: 143 | try: 144 | filters[filter_key] = datetime.fromisoformat(param) 145 | except ValueError as exc: 146 | raise BadRequest( 147 | f"Invalid {key} format. Use ISO format (YYYY-MM-DDTHH:MM:SS)." 148 | ) from exc 149 | 150 | results = reliability_tests.get_all(filters, page, per_page) 151 | 152 | response = jsonify(results) 153 | response.headers["X-Total-Count"] = str(results["total_records"]) 154 | response.headers["X-Page"] = str(page) 155 | response.headers["X-Per-Page"] = str(per_page) 156 | link_header = build_link_header( 157 | request.base_url, page, per_page, results["total_records"] 158 | ) 159 | if link_header: 160 | response.headers["Link"] = link_header 161 | return response 162 | 163 | # POST 164 | gateway_client = GatewayClients.get_or_none(msisdn=msisdn) 165 | if not gateway_client: 166 | return jsonify({"error": "Gateway client not found"}) 167 | 168 | new_test = ReliabilityTests.create( 169 | msisdn=gateway_client, 170 | status="pending", 171 | sms_sent_time=None, 172 | sms_received_time=None, 173 | sms_routed_time=None, 174 | ) 175 | 176 | return jsonify( 177 | { 178 | "message": "Test started successfully.", 179 | "test_id": int(new_test.id), 180 | "test_start_time": int(new_test.start_time.timestamp()), 181 | } 182 | ) 183 | 184 | 185 | @v3_blueprint.route("/clients/countries", methods=["GET"]) 186 | def get_all_countries(): 187 | """Get all countries for clients.""" 188 | countries = gateway_clients.get_all_countries() 189 | return jsonify(countries) 190 | 191 | 192 | @v3_blueprint.route("/clients//operators", methods=["GET"]) 193 | def get_operators_for_country(country): 194 | """Get all operators for a specific country.""" 195 | if not country: 196 | raise BadRequest("Country parameter is required.") 197 | 198 | operators = gateway_clients.get_operators_for_country(country.lower()) 199 | return jsonify(operators) 200 | 201 | 202 | @v3_blueprint.route("/publish", methods=["POST"]) 203 | def publish_relaysms_payload(): 204 | """Publishes RelaySMS Payload.""" 205 | 206 | request_data = request.json 207 | publisher_response, err = decode_and_publish(request_data, "http") 208 | 209 | if err: 210 | raise BadRequest(err) 211 | 212 | return jsonify({"publisher_response": publisher_response}) 213 | 214 | 215 | @v3_blueprint.errorhandler(BadRequest) 216 | @v3_blueprint.errorhandler(NotFound) 217 | def handle_bad_request_error(error): 218 | """Handle BadRequest errors.""" 219 | logger.error(error.description) 220 | return jsonify({"error": error.description}), error.code 221 | 222 | 223 | @v3_blueprint.errorhandler(Exception) 224 | def handle_generic_error(error): 225 | """Handle generic errors.""" 226 | logger.exception(error) 227 | return ( 228 | jsonify({"error": "Oops! Something went wrong. Please try again later."}), 229 | 500, 230 | ) 231 | -------------------------------------------------------------------------------- /src/bridge_server_grpc_client.py: -------------------------------------------------------------------------------- 1 | """Bridge Server gRPC Client.""" 2 | 3 | import functools 4 | import grpc 5 | 6 | import bridge_pb2 7 | import bridge_pb2_grpc 8 | 9 | from src.utils import get_configs 10 | from logutils import get_logger 11 | 12 | logger = get_logger(__name__) 13 | 14 | 15 | def get_channel(): 16 | """Get the appropriate gRPC channel based on the mode. 17 | 18 | Returns: 19 | grpc.Channel: The gRPC channel. 20 | """ 21 | mode = get_configs("MODE", default_value="development") 22 | hostname = get_configs("BRIDGE_GRPC_HOST") 23 | port = get_configs("BRIDGE_GRPC_PORT") 24 | secure_port = get_configs("BRIDGE_GRPC_SSL_PORT") 25 | 26 | if mode == "production": 27 | logger.info("Connecting to bridge gRPC server at %s:%s", hostname, secure_port) 28 | credentials = grpc.ssl_channel_credentials() 29 | logger.info("Using secure channel for gRPC communication") 30 | return grpc.secure_channel(f"{hostname}:{secure_port}", credentials) 31 | 32 | logger.info("Connecting to bridge gRPC server at %s:%s", hostname, port) 33 | logger.warning("Using insecure channel for gRPC communication") 34 | return grpc.insecure_channel(f"{hostname}:{port}") 35 | 36 | 37 | def grpc_call(): 38 | """Decorator to handle gRPC calls.""" 39 | 40 | def decorator(func): 41 | @functools.wraps(func) 42 | def wrapper(*args, **kwargs): 43 | try: 44 | channel = get_channel() 45 | 46 | with channel as conn: 47 | kwargs["stub"] = bridge_pb2_grpc.EntityServiceStub(conn) 48 | return func(*args, **kwargs) 49 | except grpc.RpcError as e: 50 | return None, e 51 | except Exception as e: 52 | raise e 53 | 54 | return wrapper 55 | 56 | return decorator 57 | 58 | 59 | @grpc_call() 60 | def publish_bridge_content(content, phone_number, **kwargs): 61 | """ 62 | Publishes bridge content. 63 | 64 | Args: 65 | content (str): The content to be published. 66 | phone_number (str): The phone number associated with the bridge entity. 67 | **kwargs: 68 | - stub (object): The gRPC client stub for making requests. 69 | 70 | Returns: 71 | tuple: 72 | - response (object): The bridge server's response. 73 | - error (Exception or None): None if successful, otherwise the encountered exception. 74 | """ 75 | stub = kwargs["stub"] 76 | 77 | request = bridge_pb2.PublishContentRequest( 78 | content=content, metadata={"From": phone_number} 79 | ) 80 | response = stub.PublishContent(request) 81 | logger.info("Content published successfully.") 82 | return response, None 83 | -------------------------------------------------------------------------------- /src/db.py: -------------------------------------------------------------------------------- 1 | """Module for connecting to a database.""" 2 | 3 | import os 4 | import logging 5 | from peewee import DatabaseError, MySQLDatabase 6 | from playhouse.shortcuts import ReconnectMixin 7 | from src.utils import ensure_database_exists 8 | 9 | logger = logging.getLogger(__name__) 10 | 11 | MYSQL_DATABASE = os.environ.get("MYSQL_DATABASE") 12 | MYSQL_HOST = os.environ.get("MYSQL_HOST") 13 | MYSQL_PASSWORD = os.environ.get("MYSQL_PASSWORD") 14 | MYSQL_USER = os.environ.get("MYSQL_USER") 15 | 16 | 17 | class ReconnectMySQLDatabase(ReconnectMixin, MySQLDatabase): 18 | """ 19 | A custom MySQLDatabase class with automatic reconnection capability. 20 | 21 | This class inherits from both ReconnectMixin and MySQLDatabase 22 | to provide automatic reconnection functionality in case the database 23 | connection is lost. 24 | """ 25 | 26 | 27 | def connect(): 28 | """ 29 | Connects to the database. 30 | 31 | Returns: 32 | Database: The connected database object. 33 | 34 | Raises: 35 | DatabaseError: If failed to connect to the database. 36 | """ 37 | return connect_to_mysql() 38 | 39 | 40 | @ensure_database_exists(MYSQL_HOST, MYSQL_USER, MYSQL_PASSWORD, MYSQL_DATABASE) 41 | def connect_to_mysql(): 42 | """ 43 | Connects to the MySQL database. 44 | 45 | Returns: 46 | ReconnectMySQLDatabase: The connected MySQL database object with reconnection capability. 47 | 48 | Raises: 49 | DatabaseError: If failed to connect to the database. 50 | """ 51 | try: 52 | db = ReconnectMySQLDatabase( 53 | MYSQL_DATABASE, 54 | user=MYSQL_USER, 55 | password=MYSQL_PASSWORD, 56 | host=MYSQL_HOST, 57 | ) 58 | logger.debug("Connected to MySQL database successfully.") 59 | return db 60 | except DatabaseError as error: 61 | logger.error("Failed to connect to MySQL database: %s", error) 62 | raise error 63 | -------------------------------------------------------------------------------- /src/ftp_server.py: -------------------------------------------------------------------------------- 1 | """FTP Server Module""" 2 | 3 | import os 4 | import logging 5 | from OpenSSL import SSL 6 | from pyftpdlib.authorizers import DummyAuthorizer 7 | from pyftpdlib.servers import FTPServer 8 | from pyftpdlib.handlers import FTPHandler, TLS_FTPHandler, TLS_DTPHandler 9 | from src.payload_service import decode_and_publish 10 | 11 | logging.basicConfig( 12 | level=logging.INFO, 13 | format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", 14 | datefmt="%Y-%m-%d %H:%M:%S", 15 | ) 16 | logger = logging.getLogger("[FTP SERVER]") 17 | 18 | FTP_USERNAME = os.environ["FTP_USERNAME"] 19 | FTP_PASSWORD = os.environ["FTP_PASSWORD"] 20 | FTP_IP_ADDRESS = os.environ["FTP_IP_ADDRESS"] 21 | FTP_PORT = int(os.environ.get("FTP_PORT", 9909)) 22 | FTP_MAX_CON = int(os.environ.get("FTP_MAX_CON", 256)) 23 | FTP_MAX_CON_PER_IP = int(os.environ.get("FTP_MAX_CON_PER_IP", 5)) 24 | FTP_PASSIVE_PORTS = [int(p) for p in os.environ["FTP_PASSIVE_PORTS"].split("-")] 25 | FTP_DIRECTORY = os.environ["FTP_DIRECTORY"] 26 | SSL_CERTIFICATE = os.environ["SSL_CERTIFICATE"] 27 | SSL_KEY = os.environ["SSL_KEY"] 28 | 29 | 30 | def file_received(_, file): 31 | """Handle file received event. 32 | 33 | Args: 34 | _: Instance of FTPHandler (not used). 35 | file (str): The name of the received file. 36 | """ 37 | try: 38 | with open(file, "r", encoding="utf-8") as f: 39 | content = f.read() 40 | 41 | publisher_response, err = decode_and_publish(content, "ftp") 42 | 43 | if err: 44 | logger.error(err) 45 | os.remove(file) 46 | logger.info("Deleted file %s due to error", file) 47 | return 48 | 49 | logger.info({"publisher_response": publisher_response}) 50 | os.remove(file) 51 | 52 | except Exception as exc: 53 | logger.error("Failed to process file '%s': %s", file, exc, exc_info=True) 54 | 55 | 56 | def create_ssl_context(certfile, keyfile): 57 | """Create an SSL context. 58 | 59 | Args: 60 | certfile (str): Path to the SSL certificate file. 61 | keyfile (str): Path to the SSL private key file. 62 | 63 | Returns: 64 | SSLContext: SSL context. 65 | """ 66 | context = SSL.Context(SSL.TLS_SERVER_METHOD) 67 | context.use_certificate_file(certfile) 68 | context.use_privatekey_file(keyfile) 69 | return context 70 | 71 | 72 | def main(): 73 | """ 74 | Main function to start the FTP server. 75 | """ 76 | if os.path.exists(SSL_CERTIFICATE) and os.path.exists(SSL_KEY): 77 | logger.info("SSL credentials found. Running in production mode.") 78 | ssl_context = create_ssl_context(SSL_CERTIFICATE, SSL_KEY) 79 | handler = TLS_FTPHandler 80 | handler.ssl_context = ssl_context 81 | handler.tls_control_required = True 82 | handler.tls_data_required = True 83 | else: 84 | logger.info("No valid SSL credentials found. Running in development mode.") 85 | handler = FTPHandler 86 | 87 | authorizer = DummyAuthorizer() 88 | authorizer.add_user(FTP_USERNAME, FTP_PASSWORD, FTP_DIRECTORY, perm="w") 89 | 90 | address = (FTP_IP_ADDRESS, FTP_PORT) 91 | server = FTPServer(address, handler) 92 | 93 | server.max_cons = FTP_MAX_CON 94 | server.max_cons_per_ip = FTP_MAX_CON_PER_IP 95 | 96 | dtp_handler = TLS_DTPHandler 97 | 98 | handler.authorizer = authorizer 99 | handler.banner = "SmsWithoutBorders FTP Server" 100 | handler.passive_ports = range(FTP_PASSIVE_PORTS[0], FTP_PASSIVE_PORTS[1]) 101 | handler.permit_privileged_ports = True 102 | 103 | handler.on_file_received = file_received 104 | handler.dtp_handler = dtp_handler 105 | 106 | server.serve_forever() 107 | 108 | 109 | if __name__ == "__main__": 110 | main() 111 | -------------------------------------------------------------------------------- /src/gateway_clients.py: -------------------------------------------------------------------------------- 1 | """Gateway Clients Controllers""" 2 | 3 | import logging 4 | import datetime 5 | 6 | from peewee import fn, DoesNotExist 7 | 8 | from src.models import GatewayClients 9 | 10 | logger = logging.getLogger(__name__) 11 | 12 | 13 | database = GatewayClients._meta.database 14 | 15 | 16 | def get_all(filters=None, page=None, per_page=None) -> tuple: 17 | """Get all gateway clients according to the filters, pagination. 18 | 19 | Args: 20 | filters (dict, optional): A dictionary containing filtering criteria. 21 | page (int, optional): Page number for pagination. 22 | per_page (int, optional): Number of records per page for pagination. 23 | 24 | Returns: 25 | tuple: A tuple containing a list of dictionaries containing client data and total_records. 26 | """ 27 | results = [] 28 | query = GatewayClients.select().dicts() 29 | 30 | if filters: 31 | conditions = [] 32 | for key, value in filters.items(): 33 | if value is not None: 34 | if key == "country": 35 | conditions.append( 36 | fn.lower(getattr(GatewayClients, key)) == value.lower() 37 | ) 38 | elif key in ("protocols", "operator"): 39 | conditions.append( 40 | fn.lower(getattr(GatewayClients, key)).contains(value.lower()) 41 | ) 42 | elif key == "last_published_date": 43 | conditions.append( 44 | getattr(GatewayClients, key).truncate("day") == value 45 | ) 46 | else: 47 | conditions.append(getattr(GatewayClients, key) == value) 48 | 49 | if conditions: 50 | query = query.where(*conditions).dicts() 51 | 52 | total_records = query.count() or 0 53 | 54 | if page is not None and per_page is not None: 55 | query = query.paginate(page, per_page) 56 | 57 | for client in query: 58 | client = { 59 | field: ( 60 | int(value.timestamp()) 61 | if isinstance(value, datetime.datetime) 62 | else value 63 | ) 64 | for field, value in client.items() 65 | } 66 | 67 | client["protocols"] = ( 68 | client.get("protocols", "").split(",") if client.get("protocols") else [] 69 | ) 70 | 71 | results.append(client) 72 | 73 | return results, total_records 74 | 75 | 76 | def get_by_msisdn(msisdn: str) -> dict: 77 | """Retrieve a gateway client by its MSISDN. 78 | 79 | Args: 80 | msisdn (str): The MSISDN of the gateway client to retrieve. 81 | 82 | Returns: 83 | dict: A dictionary containing client data if a matching client is found, 84 | or None if no client with the provided MSISDN exists. 85 | """ 86 | client = ( 87 | GatewayClients.select() 88 | .where(GatewayClients.msisdn == msisdn) 89 | .dicts() 90 | .get_or_none() 91 | ) 92 | 93 | if not client: 94 | return None 95 | 96 | client = { 97 | field: int(value.timestamp()) if isinstance(value, datetime.datetime) else value 98 | for field, value in client.items() 99 | } 100 | 101 | client["protocols"] = client.get("protocols", "").split(",") 102 | 103 | return client 104 | 105 | 106 | def update_by_msisdn(msisdn: str, fields: dict) -> bool: 107 | """Update a gateway client by its MSISDN. 108 | 109 | Args: 110 | msisdn (str): The MSISDN of the gateway client to update. 111 | fields (dict): A dictionary containing the fields to update 112 | along with their new values. 113 | 114 | Returns: 115 | bool: True if the client is updated successfully, False otherwise. 116 | """ 117 | try: 118 | client = GatewayClients.get(GatewayClients.msisdn == msisdn) 119 | 120 | with database.atomic(): 121 | for field, value in fields.items(): 122 | setattr(client, field, value) 123 | client.save() 124 | 125 | return True 126 | 127 | except DoesNotExist: 128 | return False 129 | 130 | 131 | def get_all_countries() -> list: 132 | """Retrieve a list of all unique countries from the gateway clients. 133 | 134 | Returns: 135 | list: A list containing names of all unique countries. 136 | """ 137 | countries = ( 138 | GatewayClients.select(GatewayClients.country) 139 | .distinct() 140 | .where(GatewayClients.country.is_null(False)) 141 | .tuples() 142 | ) 143 | return [country[0] for country in countries] 144 | 145 | 146 | def get_operators_for_country(country: str) -> list: 147 | """Retrieve a list of all unique operators for a specific country. 148 | 149 | Args: 150 | country (str): The name of the country for which operators are 151 | to be retrieved. 152 | 153 | Returns: 154 | list: A list containing names of all unique operators for the 155 | specified country. 156 | """ 157 | operators = ( 158 | GatewayClients.select(GatewayClients.operator) 159 | .distinct() 160 | .where( 161 | (GatewayClients.country == country) 162 | & (GatewayClients.operator.is_null(False)) 163 | ) 164 | .tuples() 165 | ) 166 | return [operator[0] for operator in operators] 167 | -------------------------------------------------------------------------------- /src/grpc_publisher_client.py: -------------------------------------------------------------------------------- 1 | """Publisher gRPC Client""" 2 | 3 | import functools 4 | import grpc 5 | 6 | import publisher_pb2 7 | import publisher_pb2_grpc 8 | 9 | from src.utils import get_configs 10 | from logutils import get_logger 11 | 12 | logger = get_logger(__name__) 13 | 14 | 15 | def get_channel(): 16 | """Get the appropriate gRPC channel based on the mode. 17 | 18 | Returns: 19 | grpc.Channel: The gRPC channel. 20 | """ 21 | mode = get_configs("MODE", default_value="development") 22 | hostname = get_configs("PUBLISHER_GRPC_HOST") 23 | port = get_configs("PUBLISHER_GRPC_PORT") 24 | secure_port = get_configs("PUBLISHER_GRPC_SSL_PORT") 25 | 26 | if mode == "production": 27 | logger.info( 28 | "Connecting to publisher gRPC server at %s:%s", hostname, secure_port 29 | ) 30 | credentials = grpc.ssl_channel_credentials() 31 | logger.info("Using secure channel for gRPC communication") 32 | return grpc.secure_channel(f"{hostname}:{secure_port}", credentials) 33 | 34 | logger.info("Connecting to publisher gRPC server at %s:%s", hostname, port) 35 | logger.warning("Using insecure channel for gRPC communication") 36 | return grpc.insecure_channel(f"{hostname}:{port}") 37 | 38 | 39 | def grpc_call(func): 40 | """Decorator to handle gRPC calls.""" 41 | 42 | @functools.wraps(func) 43 | def wrapper(*args, **kwargs): 44 | try: 45 | channel = get_channel() 46 | 47 | with channel as conn: 48 | kwargs["stub"] = publisher_pb2_grpc.PublisherStub(conn) 49 | return func(*args, **kwargs) 50 | except grpc.RpcError as e: 51 | return None, e 52 | except Exception as e: 53 | raise e 54 | 55 | return wrapper 56 | 57 | 58 | @grpc_call 59 | def publish_content(content, sender, **kwargs): 60 | """Request for publishing message to a target platform""" 61 | stub = kwargs["stub"] 62 | date = kwargs["date"] 63 | date_sent = kwargs["date_sent"] 64 | request = publisher_pb2.PublishContentRequest( 65 | content=content, 66 | metadata={ 67 | "From": sender, 68 | "Date": date, 69 | "Date_sent": date_sent, 70 | }, 71 | ) 72 | 73 | response = stub.PublishContent(request) 74 | return response, None 75 | -------------------------------------------------------------------------------- /src/imap_listener.py: -------------------------------------------------------------------------------- 1 | """Module to listen for incoming emails via IMAP, process them, and publish encrypted data.""" 2 | 3 | import os 4 | import ssl 5 | import logging 6 | 7 | import time 8 | import socket 9 | import imaplib 10 | import traceback 11 | 12 | from imap_tools import ( 13 | AND, 14 | MailBox, 15 | MailboxLoginError, 16 | MailboxLogoutError, 17 | ) 18 | from email_reply_parser import EmailReplyParser 19 | from src.payload_service import decode_and_publish 20 | 21 | IMAP_SERVER = os.environ["IMAP_SERVER"] 22 | IMAP_PORT = int(os.environ.get("IMAP_PORT", 993)) 23 | IMAP_USERNAME = os.environ["IMAP_USERNAME"] 24 | IMAP_PASSWORD = os.environ["IMAP_PASSWORD"] 25 | MAIL_FOLDER = os.environ.get("MAIL_FOLDER", "INBOX") 26 | SSL_CERTIFICATE = os.environ["SSL_CERTIFICATE"] 27 | SSL_KEY = os.environ["SSL_KEY"] 28 | SMTP_ALLOWED_EMAIL_ADDRESSES = set( 29 | os.environ.get("SMTP_ALLOWED_EMAIL_ADDRESSES", "").split(",") 30 | ) 31 | 32 | logging.basicConfig( 33 | level=logging.INFO, 34 | format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", 35 | datefmt="%Y-%m-%d %H:%M:%S", 36 | ) 37 | logger = logging.getLogger("[IMAP LISTENER]") 38 | 39 | 40 | def delete_email(mailbox, email_uid): 41 | """ 42 | Delete an email from the mailbox. 43 | 44 | Args: 45 | mailbox (imaplib.IMAP4_SSL): An IMAP4_SSL object representing the 46 | connection to the IMAP server. 47 | email_uid (int): The UID of the email to be deleted. 48 | 49 | Raises: 50 | Exception: If there's an error while deleting the email. 51 | """ 52 | try: 53 | if email_uid: 54 | mailbox.delete(email_uid) 55 | logger.info("Successfully deleted email %s", email_uid) 56 | except Exception as e: 57 | logger.error("Error deleting email %s: %s", email_uid, e) 58 | raise 59 | 60 | 61 | def process_incoming_email(mailbox, email): 62 | """ 63 | Process an incoming email. 64 | 65 | Args: 66 | mailbox (imaplib.IMAP4_SSL): An IMAP4_SSL object representing the connection 67 | to the IMAP server. 68 | email (imap_tools.MailMessage): An object representing the email message. 69 | """ 70 | body = EmailReplyParser.parse_reply(email.text) 71 | email_uid = email.uid 72 | from_email = email.from_ 73 | 74 | try: 75 | if not from_email: 76 | logger.warning("No valid 'From' found. Discarding email.") 77 | delete_email(mailbox, email_uid) 78 | return 79 | 80 | if from_email not in SMTP_ALLOWED_EMAIL_ADDRESSES: 81 | logger.warning("Dropping email from unauthorized sender: %s", from_email) 82 | delete_email(mailbox, email_uid) 83 | return 84 | 85 | publisher_response, err = decode_and_publish(body, "smtp") 86 | 87 | if err: 88 | logger.error(err) 89 | delete_email(mailbox, email_uid) 90 | return 91 | 92 | logger.info({"publisher_response": publisher_response}) 93 | delete_email(mailbox, email_uid) 94 | 95 | except Exception as e: 96 | logger.exception("Error processing email %s: %s", email_uid, e) 97 | 98 | 99 | def main(): 100 | """ 101 | Main function to run the email processing loop. 102 | """ 103 | ssl_context = ssl.create_default_context() 104 | ssl_context.load_cert_chain(certfile=SSL_CERTIFICATE, keyfile=SSL_KEY) 105 | 106 | done = False 107 | while not done: 108 | connection_start_time = time.monotonic() 109 | connection_live_time = 0.0 110 | try: 111 | with MailBox(IMAP_SERVER, IMAP_PORT, ssl_context=ssl_context).login( 112 | IMAP_USERNAME, IMAP_PASSWORD, MAIL_FOLDER 113 | ) as mailbox: 114 | logger.info( 115 | "Connected to mailbox %s on %s", IMAP_SERVER, time.asctime() 116 | ) 117 | while connection_live_time < 29 * 60: 118 | try: 119 | responses = mailbox.idle.wait(timeout=20) 120 | if responses: 121 | logger.debug("IMAP IDLE responses: %s", responses) 122 | 123 | for msg in mailbox.fetch( 124 | criteria="ALL", 125 | bulk=50, 126 | mark_seen=False, 127 | ): 128 | process_incoming_email(mailbox, msg) 129 | 130 | except KeyboardInterrupt: 131 | logger.info("Received KeyboardInterrupt, exiting...") 132 | done = True 133 | break 134 | connection_live_time = time.monotonic() - connection_start_time 135 | except ( 136 | TimeoutError, 137 | ConnectionError, 138 | imaplib.IMAP4.abort, 139 | MailboxLoginError, 140 | MailboxLogoutError, 141 | socket.herror, 142 | socket.gaierror, 143 | socket.timeout, 144 | ) as e: 145 | logger.error("Error occurred: %s", e) 146 | logger.error(traceback.format_exc()) 147 | logger.info("Reconnecting in a minute...") 148 | time.sleep(60) 149 | 150 | 151 | if __name__ == "__main__": 152 | main() 153 | -------------------------------------------------------------------------------- /src/keypairs.py: -------------------------------------------------------------------------------- 1 | 2 | class Security: 3 | """ 4 | """ 5 | 6 | def generate_keypair(self) -> None: 7 | """ 8 | """ 9 | 10 | def has_keypair(self) -> None: 11 | """ 12 | """ 13 | -------------------------------------------------------------------------------- /src/main.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # Use this for IDEs to check data types 4 | # https://docs.python.org/3/library/typing.html 5 | 6 | import threading 7 | import requests 8 | from flask import ( 9 | Flask, 10 | request, 11 | jsonify, 12 | Response, 13 | copy_current_request_context, 14 | redirect, 15 | url_for, 16 | ) 17 | from flask_cors import CORS, cross_origin 18 | 19 | from src import sync, rsa, aes, publisher, rmq_broker, notifications 20 | from src.process_incoming_messages import ( 21 | process_data, 22 | process_test, 23 | DecryptError, 24 | UserNotFoundError, 25 | SharedKeyError, 26 | InvalidDataError, 27 | ) 28 | from src.api_v3 import v3_blueprint 29 | 30 | from sockets import ip_grap 31 | 32 | from src.users import Users 33 | 34 | from src.users_entity import UsersEntity 35 | 36 | import os 37 | import json 38 | import logging 39 | import threading 40 | import base64 41 | import bleach 42 | 43 | from SwobBackendPublisher import MySQL, Lib 44 | from SwobBackendPublisher.exceptions import UserDoesNotExist, DuplicateUsersExist 45 | 46 | 47 | __api_version_number = 2 48 | 49 | HOST = os.environ.get("HOST") 50 | SOCK_PORT = os.environ.get("SOCK_PORT") 51 | RSA_PR_KEY = os.environ.get("RSA_PR_KEY") 52 | SHARED_KEY_FILE = os.environ.get("SHARED_KEY") 53 | 54 | # Required for BE-Publisher Lib 55 | MYSQL_BE_HOST = ( 56 | os.environ["MYSQL_HOST"] 57 | if not os.environ.get("MYSQL_BE_HOST") 58 | else os.environ.get("MYSQL_BE_HOST") 59 | ) 60 | 61 | MYSQL_BE_USER = ( 62 | os.environ["MYSQL_USER"] 63 | if not os.environ.get("MYSQL_BE_USER") 64 | else os.environ.get("MYSQL_BE_USER") 65 | ) 66 | 67 | MYSQL_BE_PASSWORD = ( 68 | os.environ["MYSQL_PASSWORD"] 69 | if not os.environ.get("MYSQL_BE_PASSWORD") 70 | else os.environ.get("MYSQL_BE_PASSWORD") 71 | ) 72 | MYSQL_BE_DATABASE = ( 73 | os.environ["MYSQL_DATABASE"] 74 | if not os.environ.get("MYSQL_BE_DATABASE") 75 | else os.environ.get("MYSQL_BE_DATABASE") 76 | ) 77 | 78 | # Required for storing user encryption information 79 | MYSQL_HOST = ( 80 | "127.0.0.1" if not os.environ.get("MYSQL_HOST") else os.environ.get("MYSQL_HOST") 81 | ) 82 | MYSQL_USER = ( 83 | "root" if not os.environ.get("MYSQL_USER") else os.environ.get("MYSQL_USER") 84 | ) 85 | 86 | MYSQL_PASSWORD = os.environ["MYSQL_PASSWORD"] 87 | MYSQL_DATABASE = os.environ["MYSQL_DATABASE"] 88 | 89 | # Database creations 90 | usersBEPUB = UsersEntity( 91 | mysql_host=MYSQL_BE_HOST, 92 | mysql_user=MYSQL_BE_USER, 93 | mysql_password=MYSQL_BE_PASSWORD, 94 | mysql_database=MYSQL_BE_DATABASE, 95 | ) 96 | 97 | BEPubLib = Lib(usersBEPUB.db) 98 | 99 | usersEntity = UsersEntity( 100 | mysql_host=MYSQL_HOST, 101 | mysql_user=MYSQL_USER, 102 | mysql_password=MYSQL_PASSWORD, 103 | mysql_database=MYSQL_DATABASE, 104 | ) 105 | 106 | users = Users(usersEntity) 107 | 108 | try: 109 | users.create_database_and_tables__() 110 | except Exception as error: 111 | logging.exception(error) 112 | 113 | 114 | # RMQ creations 115 | rmq_connection, rmq_channel = publisher.init_rmq_connections() 116 | 117 | # create notifications exchanges 118 | try: 119 | notifications.create_exchange(channel=rmq_channel) 120 | except Exception as error: 121 | logging.exception(error) 122 | 123 | # Flask creations 124 | app = Flask(__name__) 125 | app.config.from_object(__name__) 126 | app.register_blueprint(v3_blueprint) 127 | 128 | # CORS( 129 | # app, 130 | # resources={r"/*": { 131 | # "origins": json.loads(os.environ.get("ORIGINS"))}}, 132 | # supports_credentials=True, 133 | # ) 134 | 135 | CORS( 136 | app, 137 | origins=json.loads(os.environ.get("ORIGINS")), 138 | supports_credentials=True, 139 | ) 140 | 141 | # @app.before_request 142 | # def after_request_func(): 143 | # response = Response() 144 | # response.headers['Access-Control-Allow-Origin'] = "https://smswithoutborders.com" 145 | # 146 | # return response 147 | 148 | 149 | @app.after_request 150 | def after_request(response): 151 | response.headers["Strict-Transport-Security"] = ( 152 | "max-age=63072000; includeSubdomains" 153 | ) 154 | response.headers["X-Content-Type-Options"] = "nosniff" 155 | response.headers["Content-Security-Policy"] = "script-src 'self'; object-src 'self'" 156 | response.headers["Referrer-Policy"] = "strict-origin-when-cross-origin" 157 | response.headers["Cache-Control"] = "no-cache" 158 | response.headers["Permissions-Policy"] = ( 159 | "accelerometer=(), ambient-light-sensor=(), autoplay=(), battery=(), camera=(), clipboard-read=(), clipboard-write=(), cross-origin-isolated=(), display-capture=(), document-domain=(), encrypted-media=(), execution-while-not-rendered=(), execution-while-out-of-viewport=(), fullscreen=(), gamepad=(), geolocation=(), gyroscope=(), magnetometer=(), microphone=(), midi=(), navigation-override=(), payment=(), picture-in-picture=(), publickey-credentials-get=(), screen-wake-lock=(), speaker=(), speaker-selection=(), sync-xhr=(), usb=(), web-share=(), xr-spatial-tracking=()" 160 | ) 161 | return response 162 | 163 | 164 | @app.route("/v%s/sync/users/" % (__api_version_number), methods=["GET"]) 165 | def get_sync_url(user_id: str): 166 | """ 167 | TODO: validate user_id before having it in production 168 | """ 169 | user_id = bleach.clean(user_id) 170 | 171 | try: 172 | port = app.config["SOCK_PORT"] 173 | 174 | # print(dir(app.env["HOST"])) 175 | # TODO: does not work well with docker 176 | # host = socket_sessions.get_host(app.config["HOST"]) 177 | host = request.host.split(":")[0] 178 | sockets_url = sync.get_sockets_sessions_url( 179 | user_id=user_id, host=host, port=SOCK_PORT 180 | ) 181 | except Exception as error: 182 | app.logger.exception(error) 183 | return "", 500 184 | else: 185 | return sockets_url, 200 186 | 187 | 188 | @app.route("/v%s/sync/users" % (__api_version_number), methods=["DELETE"]) 189 | def refresh_users_shared_key(): 190 | """ """ 191 | try: 192 | data = json.loads(request.data, strict=False) 193 | except Exception as error: 194 | logging.exception(error) 195 | 196 | return "poorly formed json", 400 197 | 198 | if not "msisdn_hashed" in data: 199 | return "missing msisdn", 400 200 | 201 | SHARED_KEY = None 202 | with open(SHARED_KEY_FILE, "r") as f: 203 | SHARED_KEY = f.readline().strip()[:32] 204 | 205 | msisdn_hash = data["msisdn_hashed"] 206 | 207 | # msisdn_hash = base64.b64decode(msisdn_hash) 208 | iv = msisdn_hash[:16].encode("utf8") 209 | msisdn_hash = bytes.fromhex(msisdn_hash[16:]) 210 | 211 | try: 212 | msisdn_hash = aes.AESCipher.decrypt( 213 | data=msisdn_hash, iv=iv, shared_key=SHARED_KEY 214 | ) 215 | except Exception as error: 216 | app.logger.exception(error) 217 | return "failed to decrypt", 403 218 | else: 219 | try: 220 | user = users.find(msisdn_hash=msisdn_hash) 221 | 222 | users.delete(user) 223 | except Exception as error: 224 | logging.exception(error) 225 | return "", 500 226 | 227 | return "OK", 200 228 | 229 | 230 | @app.route("/v%s/sync/users/verification" % (__api_version_number), methods=["POST"]) 231 | @cross_origin(origins="*") 232 | def verify_user_shared_key(): 233 | """ 234 | - encrypt user shared key 235 | - compare input shared key against encrypted copy 236 | """ 237 | try: 238 | data = json.loads(request.data, strict=False) 239 | except Exception as error: 240 | logging.exception(error) 241 | 242 | return "poorly formed json", 400 243 | else: 244 | if not "msisdn" in data: 245 | return "missing msisdn", 400 246 | if not "msisdn_signature" in data: 247 | return "missing signature", 400 248 | 249 | try: 250 | mgf1ParameterSpec = ( 251 | data["mgf1ParameterSpec"] if "mgf1ParameterSpec" in data else "sha1" 252 | ) 253 | # mgf1ParameterSpec = 'sha1' 254 | 255 | hashingAlgorithm = ( 256 | data["hashingAlgorithm"] if "hashingAlgorithm" in data else "sha256" 257 | ) 258 | # hashingAlgorithm = 'sha256' 259 | 260 | decrypted_msisdn = rsa.SecurityRSA.decrypt( 261 | data["msisdn"], 262 | private_key_filepath=RSA_PR_KEY, 263 | mgf1ParameterSpec=mgf1ParameterSpec, 264 | hashingAlgorithm=hashingAlgorithm, 265 | ) 266 | 267 | app.logger.debug("%s", decrypted_msisdn) 268 | 269 | except Exception as error: 270 | app.logger.exception(error) 271 | return "error with decryption", 403 272 | else: 273 | user = users.find(msisdn_hash=decrypted_msisdn) 274 | 275 | if not user.shared_key: 276 | return "no shared key for user", 403 277 | 278 | if not user.public_key: 279 | return "no public key for user", 403 280 | 281 | try: 282 | rsa.SecurityRSA.sign( 283 | message=decrypted_msisdn, 284 | signature=base64.b64decode(data["msisdn_signature"]), 285 | public_key=user.public_key, 286 | ) 287 | except (ValueError, TypeError) as error: 288 | return "unknown signature request", 403 289 | except Exception as error: 290 | app.logger.exception(error) 291 | return "signing check error", 400 292 | else: 293 | user_shared_key = user.shared_key 294 | user_public_key = user.public_key 295 | 296 | # mgf1ParameterSpec = user.mgf1ParameterSpec 297 | # logging.debug("user mgf param: %s", mgf1ParameterSpec) 298 | 299 | hashingAlgorithm = user.hashingAlgorithm 300 | 301 | mgf1ParameterSpec = "sha1" 302 | encrypted_shared_key = rsa.SecurityRSA.encrypt_with_key( 303 | data=user_shared_key, 304 | public_key=user_public_key, 305 | mgf1ParameterSpec=mgf1ParameterSpec, 306 | hashingAlgorithm=hashingAlgorithm, 307 | ) 308 | 309 | encrypted_shared_key = base64.b64encode(encrypted_shared_key) 310 | logging.debug("encrypted_key: %s", encrypted_shared_key) 311 | 312 | return ( 313 | jsonify({"shared_key": encrypted_shared_key.decode("utf-8")}), 314 | 200, 315 | ) 316 | 317 | 318 | @app.route( 319 | "/v%s/sync/users//sessions//" % (__api_version_number), 320 | methods=["POST"], 321 | ) 322 | def get_users_platforms(user_id: str, session_id: str): 323 | """ """ 324 | global rmq_connection, rmq_channel 325 | 326 | user_id = bleach.clean(user_id) 327 | session_id = bleach.clean(session_id) 328 | 329 | try: 330 | data = json.loads(request.data, strict=False) 331 | except Exception as error: 332 | logging.exception(error) 333 | 334 | return "poorly formed json", 400 335 | else: 336 | 337 | if not "password" in data: 338 | return "missing password", 400 339 | 340 | if not "public_key" in data: 341 | return "missing public key", 400 342 | 343 | """ 344 | TODO: 345 | - update the ios app to have mgf1ParameterSpec 346 | - update the android to have hashingAlgorithm 347 | """ 348 | mgf1ParameterSpec = ( 349 | data["mgf1ParameterSpec"] if "mgf1ParameterSpec" in data else "sha1" 350 | ) 351 | # mgf1ParameterSpec = 'sha1' 352 | hashingAlgorithm = ( 353 | data["hashingAlgorithm"] if "hashingAlgorithm" in data else "sha256" 354 | ) 355 | # hashingAlgorithm = 'sha256' 356 | 357 | try: 358 | user_password = data["password"] 359 | user_public_key = data["public_key"] 360 | 361 | app.logger.debug("mgf1ParameterSpec: %s", mgf1ParameterSpec) 362 | app.logger.debug("hashingAlgorithm: %s", hashingAlgorithm) 363 | 364 | decrypted_password = rsa.SecurityRSA.decrypt( 365 | user_password, 366 | private_key_filepath=RSA_PR_KEY, 367 | mgf1ParameterSpec=mgf1ParameterSpec, 368 | hashingAlgorithm=hashingAlgorithm, 369 | ) 370 | except Exception as error: 371 | app.logger.exception(error) 372 | return "error with decryption", 400 373 | else: 374 | 375 | user_msisdn_hash = None 376 | try: 377 | user_msisdn_hash = BEPubLib.get_phone_number_hash_from_id( 378 | user_id=user_id, password=str(decrypted_password, "utf-8") 379 | ) 380 | 381 | except (UserDoesNotExist, DuplicateUsersExist) as error: 382 | logging.exception(error) 383 | return "", 403 384 | 385 | user_msisdn_hash = user_msisdn_hash["phoneNumber_hash"] 386 | try: 387 | user = users.find(msisdn_hash=user_msisdn_hash) 388 | except Exception as error: 389 | app.logger.exception(error) 390 | return "", 403 391 | 392 | user_shared_key = sync.generate_shared_key() 393 | 394 | user.id = user_id 395 | user.public_key = user_public_key 396 | user.msisdn_hash = user_msisdn_hash 397 | user.shared_key = user_shared_key 398 | user.mgf1ParameterSpec = mgf1ParameterSpec 399 | user.hashingAlgorithm = hashingAlgorithm 400 | 401 | try: 402 | users.commit(user) 403 | except Exception as error: 404 | logging.exception(error) 405 | return "", 500 406 | 407 | try: 408 | user_platforms = BEPubLib.get_user_platforms_from_id(user_id=user_id) 409 | 410 | mgf1ParameterSpec = ( 411 | data["mgf1ParameterSpec_dec"] 412 | if "mgf1ParameterSpec_dec" in data 413 | else "sha1" 414 | ) 415 | encrypted_shared_key = rsa.SecurityRSA.encrypt_with_key( 416 | data=user_shared_key, 417 | public_key=user_public_key, 418 | mgf1ParameterSpec=mgf1ParameterSpec, 419 | hashingAlgorithm=hashingAlgorithm, 420 | ) 421 | 422 | # TODO: customize exception just in case issue with encrypting for user 423 | except Exception as error: 424 | logging.exception(error) 425 | return "", 500 426 | 427 | else: 428 | b64_encoded_shared_key = base64.b64encode(encrypted_shared_key) 429 | 430 | try: 431 | if not publisher.not_active_connection(rmq_connection): 432 | rmq_connection, rmq_channel = publisher.init_rmq_connections() 433 | 434 | notifications.create_users_notifications( 435 | rmq_host=os.environ.get("RMQ_HOST"), 436 | channel=rmq_channel, 437 | queue_name=user_msisdn_hash, 438 | user_name=user_msisdn_hash, 439 | password=b64_encoded_shared_key.decode("utf-8"), 440 | ) 441 | 442 | except Exception as error: 443 | logging.exception(error) 444 | 445 | return ( 446 | jsonify( 447 | { 448 | "msisdn_hash": user_msisdn_hash, 449 | "shared_key": b64_encoded_shared_key.decode("utf-8"), 450 | "user_platforms": user_platforms, 451 | } 452 | ), 453 | 200, 454 | ) 455 | 456 | 457 | @app.route("/sms/platform/", methods=["POST"]) 458 | @cross_origin(origins="*") 459 | def incoming_sms_routing(platform): 460 | """ """ 461 | global rmq_connection, rmq_channel 462 | 463 | platform = bleach.clean(platform) 464 | 465 | data = request.data 466 | 467 | def forward_request(target_url, payload): 468 | try: 469 | response = requests.post(target_url, json=payload, timeout=30) 470 | app.logger.info("Forwarded request response: %s", response.text) 471 | except requests.RequestException as e: 472 | app.logger.error("Request forwarding error: %s", str(e)) 473 | 474 | target_url = url_for("v3.publish_relaysms_payload", _external=True) 475 | app.logger.info("Background request forwarding to: %s", target_url) 476 | 477 | thread = threading.Thread(target=forward_request, args=(target_url, request.json)) 478 | thread.start() 479 | 480 | try: 481 | if process_test(data): 482 | return "published!", 200 483 | 484 | processed_data = process_data(data, BEPubLib, users) 485 | 486 | app.logger.debug("Encrypted data: %s", processed_data) 487 | 488 | # if not publisher.not_active_connection(rmq_channel): 489 | if not publisher.not_active_connection(rmq_connection): 490 | rmq_connection, rmq_channel = publisher.init_rmq_connections() 491 | 492 | publisher.publish(channel=rmq_channel, data=processed_data) 493 | 494 | return "published!", 200 495 | 496 | except ( 497 | DecryptError, 498 | UserNotFoundError, 499 | UserDoesNotExist, 500 | DuplicateUsersExist, 501 | ) as err: 502 | return str(err), 403 503 | 504 | except SharedKeyError as err: 505 | return str(err), 401 506 | 507 | except InvalidDataError as err: 508 | return str(err), 400 509 | 510 | except Exception as err: 511 | logging.exception(err) 512 | return "Internal Server Error", 500 513 | 514 | 515 | def logging_after_request(response): 516 | # in here is where we transmit to the logger trace 517 | # logging.debug(response) 518 | logging.debug(response.response) 519 | return response 520 | 521 | 522 | app.after_request(logging_after_request) 523 | -------------------------------------------------------------------------------- /src/models.py: -------------------------------------------------------------------------------- 1 | """ 2 | This program is free software: you can redistribute it under the terms 3 | of the GNU General Public License, v. 3.0. If a copy of the GNU General 4 | Public License was not distributed with this file, see . 5 | """ 6 | 7 | import datetime 8 | from peewee import Model, CharField, DateTimeField, DecimalField, ForeignKeyField 9 | from src.db import connect 10 | 11 | database = connect() 12 | 13 | 14 | class GatewayClients(Model): 15 | """Model representing Gateway Clients.""" 16 | 17 | msisdn = CharField(primary_key=True) 18 | country = CharField() 19 | operator = CharField() 20 | operator_code = CharField() 21 | protocols = CharField() 22 | reliability = DecimalField(max_digits=5, decimal_places=2, default=0.00) 23 | last_published_date = DateTimeField(default=datetime.datetime.now) 24 | 25 | class Meta: 26 | """Meta class to define database connection.""" 27 | 28 | database = database 29 | table_name = "gateway_clients" 30 | 31 | 32 | class ReliabilityTests(Model): 33 | """Model representing Gateway Clients Reliability Tests.""" 34 | 35 | start_time = DateTimeField(default=datetime.datetime.now) 36 | sms_sent_time = DateTimeField(null=True) 37 | sms_received_time = DateTimeField(null=True) 38 | sms_routed_time = DateTimeField(null=True) 39 | status = CharField(default="pending") 40 | msisdn = ForeignKeyField( 41 | GatewayClients, column_name="msisdn", backref="reliability_tests" 42 | ) 43 | 44 | class Meta: 45 | """Meta class to define database connection.""" 46 | 47 | database = database 48 | table_name = "reliability_tests" 49 | 50 | 51 | database.create_tables([GatewayClients, ReliabilityTests], safe=True) 52 | -------------------------------------------------------------------------------- /src/notifications.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import pika 4 | import logging 5 | import base64 6 | from src import rmq_broker 7 | 8 | default_queue_name = "notifications-smswithoutborders-" 9 | default_exchange_name = "notifications-smswithoutborders-exchange" 10 | 11 | def create_users_notifications(rmq_host: str, 12 | channel: pika.channel.Channel, 13 | queue_name:str, user_name:str, password: str) -> None: 14 | """ 15 | """ 16 | try: 17 | rmq_broker.add_user( 18 | rmq_host=rmq_host, 19 | user_name=user_name, 20 | password=password) 21 | except Exception as error: 22 | raise error 23 | else: 24 | try: 25 | queue_name = default_queue_name + queue_name 26 | 27 | rmq_broker.create_queue(channel=channel, 28 | queue_name=queue_name, 29 | exchange_name=default_exchange_name, 30 | routing_key=user_name) 31 | except Exception as error: 32 | raise error 33 | 34 | 35 | def create_exchange(channel: pika.channel.Channel) -> None: 36 | """ 37 | """ 38 | exchange_type = "fanout" 39 | 40 | try: 41 | rmq_broker.create_rmq_exchange( 42 | channel=channel, 43 | exchange_name=default_exchange_name, 44 | exchange_type=exchange_type) 45 | except Exception as error: 46 | raise error 47 | -------------------------------------------------------------------------------- /src/payload_service.py: -------------------------------------------------------------------------------- 1 | """Decode and publish RelaySMS payloads.""" 2 | 3 | import base64 4 | import logging 5 | import json 6 | 7 | from src.grpc_publisher_client import publish_content 8 | from src.bridge_server_grpc_client import publish_bridge_content 9 | from src.utils import get_configs 10 | 11 | logger = logging.getLogger(__name__) 12 | 13 | DISABLE_BRIDGE_PAYLOADS_OVER_HTTP = ( 14 | get_configs("DISABLE_BRIDGE_PAYLOADS_OVER_HTTP", default_value="false").lower() 15 | == "true" 16 | ) 17 | 18 | 19 | def decode_and_publish(payload, request_origin=None): 20 | """Decodes and publishes the RelaySMS payload based on the request origin. 21 | 22 | Args: 23 | payload (str or dict): The incoming request payload containing the data. 24 | request_origin (str): The origin of the request, either 'http', 'smtp' or 'ftp'. 25 | 26 | Returns: 27 | tuple: A message and an error (if any). 28 | """ 29 | 30 | if isinstance(payload, str): 31 | try: 32 | payload = json.loads(payload) 33 | except json.JSONDecodeError: 34 | return None, "Invalid JSON format" 35 | 36 | encoded_content = payload.get("text") 37 | sender_id = payload.get("MSISDN") or payload.get("address") 38 | date = payload.get("date") 39 | date_sent = payload.get("date_sent") 40 | 41 | if not encoded_content: 42 | return None, "Missing required field: text" 43 | if not sender_id: 44 | return None, "Missing required field: address or MSISDN" 45 | if not date: 46 | return None, "Missing required field: date" 47 | if not date_sent: 48 | return None, "Missing required field: date_sent" 49 | 50 | try: 51 | decoded_bytes = base64.b64decode(encoded_content) 52 | except (ValueError, TypeError): 53 | return None, "Invalid Base64-encoded payload" 54 | 55 | is_bridge_request = decoded_bytes[0] == 0 56 | 57 | if ( 58 | is_bridge_request 59 | and request_origin == "http" 60 | and DISABLE_BRIDGE_PAYLOADS_OVER_HTTP 61 | ): 62 | logger.warning("✖ Bridge payloads over HTTP are disabled.") 63 | return None, "Bridge payloads over HTTP are restricted." 64 | 65 | if is_bridge_request: 66 | bridge_content = base64.b64encode(decoded_bytes[1:]).decode("utf-8") 67 | publish_response, publish_error = publish_bridge_content( 68 | content=bridge_content, phone_number=sender_id 69 | ) 70 | else: 71 | publish_response, publish_error = publish_content( 72 | content=encoded_content, 73 | sender=sender_id, 74 | date=str(int(date) // 1000) if date else date, 75 | date_sent=str(int(date_sent) // 1000) if date_sent else date_sent, 76 | ) 77 | 78 | if publish_error: 79 | logger.error("✖ gRPC error: %s", publish_error.code()) 80 | return None, publish_error.details() 81 | 82 | if not publish_response.success: 83 | logger.error("✖ gRPC error: %s", publish_response.message) 84 | return None, publish_response.message 85 | 86 | logger.info( 87 | "✔ Payload published successfully from request origin: %s", request_origin 88 | ) 89 | return ( 90 | publish_response.message 91 | if is_bridge_request 92 | else publish_response.publisher_response 93 | ), None 94 | -------------------------------------------------------------------------------- /src/process_incoming_messages.py: -------------------------------------------------------------------------------- 1 | """Module to process incoming data.""" 2 | 3 | import logging 4 | import base64 5 | import json 6 | import os 7 | from datetime import datetime 8 | 9 | from src import aes, reliability_tests, gateway_clients 10 | 11 | logger = logging.getLogger(__name__) 12 | 13 | SHARED_KEY_FILE = os.environ.get("SHARED_KEY") 14 | 15 | # pylint: disable=E1101,W0212,W0718 16 | 17 | 18 | class UserNotFoundError(Exception): 19 | """Exception raised when user is not found.""" 20 | 21 | 22 | class SharedKeyError(Exception): 23 | """Exception raised when shared key is missing.""" 24 | 25 | 26 | class InvalidDataError(Exception): 27 | """Exception raised when data is invalid.""" 28 | 29 | 30 | class DecryptError(Exception): 31 | """Exception raised when decryption fails""" 32 | 33 | 34 | def parse_json_data(data): 35 | """ 36 | Parse JSON data. 37 | 38 | Args: 39 | data (str): JSON data to parse. 40 | 41 | Returns: 42 | dict: Parsed JSON data. 43 | 44 | Raises: 45 | InvalidDataError: If JSON parsing fails. 46 | """ 47 | try: 48 | return json.loads(data, strict=False) 49 | except Exception as err: 50 | logging.error("Failed to parse JSON data: %s", err) 51 | raise InvalidDataError( 52 | "Invalid JSON data format. Please check your input." 53 | ) from err 54 | 55 | 56 | def validate_data(data): 57 | """ 58 | Validate incoming data. 59 | 60 | Args: 61 | data (dict): Incoming data to validate. 62 | 63 | Raises: 64 | InvalidDataError: If required fields are missing. 65 | """ 66 | if not data.get("MSISDN") and not data.get("address"): 67 | logger.error("Missing MSISDN or address") 68 | raise InvalidDataError("Missing MSISDN or address") 69 | if not data.get("text"): 70 | logger.error("Missing Text") 71 | raise InvalidDataError("Missing Text") 72 | 73 | 74 | def decrypt_text(encrypted_text, shared_key, encoding_type=None): 75 | """ 76 | Decrypt the provided encrypted text using AES algorithm. 77 | 78 | Args: 79 | encrypted_text (str): Encrypted text to decrypt. 80 | shared_key (str): Shared key for decryption. 81 | encoding_type (str, optional): Type of encoding applied to the encrypted text 82 | before encryption (e.g., 'base64'). Defaults to None. 83 | 84 | Returns: 85 | str: Decrypted text. 86 | 87 | Raises: 88 | DecryptError: If decryption fails. 89 | """ 90 | try: 91 | encrypted_bytes = base64.b64decode(encrypted_text) 92 | iv = encrypted_bytes[:16] 93 | ciphertext = encrypted_bytes[16:] 94 | 95 | if encoding_type == "base64": 96 | ciphertext = base64.b64decode(ciphertext) 97 | 98 | decrypted_text = aes.AESCipher.decrypt( 99 | data=ciphertext, iv=iv, shared_key=shared_key 100 | ) 101 | return str(decrypted_text, "utf-8") 102 | except Exception as err: 103 | logger.error( 104 | "Failed to decrypt the text%s", 105 | " using " + encoding_type if encoding_type else "", 106 | ) 107 | raise DecryptError("Failed to decrypt the text") from err 108 | 109 | 110 | def process_data(data, be_pub_lib, users): 111 | """ 112 | Process incoming data. 113 | 114 | Args: 115 | data (str): Incoming data in JSON format. 116 | be_pub_lib: Backend Publishing library. 117 | users: User database. 118 | 119 | Returns: 120 | str: Processed and encrypted data. 121 | 122 | Raises: 123 | Exception: If any error occurs during processing. 124 | """ 125 | try: 126 | data = parse_json_data(data) 127 | validate_data(data) 128 | 129 | user_msisdn = data.get("MSISDN") or data.get("address") 130 | user_msisdn_hash = be_pub_lib.hasher(data=user_msisdn) 131 | user = users.find(msisdn_hash=user_msisdn_hash) 132 | 133 | if not user: 134 | logger.error("User not found: %s", user_msisdn_hash) 135 | raise UserNotFoundError("User not found") 136 | 137 | shared_key = user.shared_key 138 | 139 | if not shared_key: 140 | logging.error("no shared key for user, strange") 141 | raise SharedKeyError("Shared key error") 142 | 143 | decrypted_text = decrypt_text(data["text"], shared_key, "base64") 144 | 145 | platform_letter = decrypted_text[0] 146 | platform_name = be_pub_lib.get_platform_name_from_letter( 147 | platform_letter=platform_letter 148 | )["platform_name"] 149 | 150 | data = be_pub_lib.get_grant_from_platform_name( 151 | phone_number=user_msisdn, platform_name=platform_name 152 | ) 153 | data["data"] = decrypted_text 154 | data["platform_name"] = platform_name 155 | 156 | shared_key = os.environ["PUBLISHER_ENCRYPTION_KEY"][:32] 157 | 158 | # Padding just in case shorter than required key size 159 | if len(shared_key) < 32: 160 | shared_key += "0" * (32 - len(shared_key)) 161 | 162 | data = json.dumps(data).encode("utf-8") 163 | data = aes.AESCipher.encrypt(shared_key=shared_key, data=data) 164 | data = base64.b64encode(data) 165 | 166 | return str(data, "utf-8") 167 | 168 | except Exception as error: 169 | raise error 170 | 171 | 172 | def process_test(data): 173 | """ 174 | Process incoming test data. 175 | 176 | Args: 177 | data (str): Incoming data in JSON format. 178 | 179 | Returns: 180 | bool: True if successful, False otherwise. 181 | 182 | Raises: 183 | Exception: If any error occurs during processing. 184 | """ 185 | try: 186 | data = parse_json_data(data) 187 | validate_data(data) 188 | 189 | with open(SHARED_KEY_FILE, "r", encoding="utf-8") as f: 190 | encryption_key = f.readline().strip()[:32] 191 | 192 | plaintext = decrypt_text(data["text"], encryption_key) 193 | decrypted_test_data = parse_json_data(plaintext) 194 | 195 | test_id = decrypted_test_data.get("test_id") 196 | test_msisdn = decrypted_test_data.get("msisdn") 197 | 198 | if not test_id or not test_msisdn: 199 | logger.error("Test data is incomplete.") 200 | return False 201 | 202 | reliability_tests.update_timed_out_tests_status() 203 | 204 | date_sent = int(data["date_sent"]) / 1000 205 | date = int(data["date"]) / 1000 206 | 207 | fields = { 208 | "status": "success", 209 | "sms_routed_time": datetime.now(), 210 | "sms_sent_time": datetime.fromtimestamp(date_sent), 211 | "sms_received_time": datetime.fromtimestamp(date), 212 | } 213 | criteria = { 214 | "sms_routed_time": "is_null", 215 | "msisdn": test_msisdn, 216 | "status": "running", 217 | } 218 | updated_tests = reliability_tests.update_test_for_client( 219 | test_id, fields, criteria 220 | ) 221 | 222 | if updated_tests == 0: 223 | logger.error("No running test record found for MSISDN %s.", test_msisdn) 224 | return False 225 | 226 | reliability_score = reliability_tests.calculate_reliability_score_for_client( 227 | test_msisdn 228 | ) 229 | gateway_clients.update_by_msisdn( 230 | test_msisdn, {"reliability": reliability_score} 231 | ) 232 | 233 | return True 234 | 235 | except DecryptError: 236 | logger.info("Skipping test check ...") 237 | return False 238 | except Exception as error: 239 | logger.error("An error occurred during test data processing: %s", error) 240 | return False 241 | -------------------------------------------------------------------------------- /src/publisher.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import os 4 | import json 5 | import pika 6 | import logging 7 | 8 | from src import rmq_broker 9 | 10 | def init_rmq_connections(): 11 | """ 12 | TODO: transform all env to args 13 | """ 14 | try: 15 | logging.debug("RMQ host: %s", os.environ.get("RMQ_HOST")) 16 | 17 | host = os.environ.get("RMQ_HOST") \ 18 | if os.environ.get("RMQ_HOST") else "127.0.0.1" 19 | 20 | tls_rmq = True \ 21 | if os.environ.get("RMQ_SSL") and os.environ.get("RMQ_SSL") == "true" else False 22 | 23 | logging.debug("ENV TLS RMQ: %s", os.environ.get("RMQ_SSL")) 24 | 25 | logging.debug("TLS RMQ: %s", tls_rmq) 26 | 27 | logging.debug("RMQ DEFAULT USER: %s", os.environ.get("RABBITMQ_DEFAULT_USER")) 28 | 29 | rmq_connection: pika.BlockingConnection = rmq_broker.get_rmq_connection( 30 | user=os.environ.get("RABBITMQ_DEFAULT_USER"), 31 | password=os.environ.get("RABBITMQ_DEFAULT_PASS"), 32 | ssl_crt = os.environ.get("SSL_CERTIFICATE"), 33 | ssl_key=os.environ.get("SSL_KEY"), 34 | ssl_pem=os.environ.get("SSL_PEM"), 35 | tls_rmq=tls_rmq, 36 | ca_ssl_host=os.environ.get("HOST"), 37 | host=host) 38 | 39 | except Exception as error: 40 | raise error 41 | 42 | else: 43 | channel = rmq_broker.create_rmq_channel(connection=rmq_connection) 44 | rmq_broker.create_rmq_exchange(channel=channel) 45 | 46 | return rmq_connection, channel 47 | 48 | return None, None 49 | 50 | 51 | def publish(channel: pika.channel.Channel, data: str) -> None: 52 | """ 53 | """ 54 | try: 55 | channel.basic_publish( 56 | exchange=rmq_broker.default_exchange_name, 57 | routing_key=rmq_broker.default_routing_key, 58 | body=data, 59 | properties=pika.BasicProperties( 60 | delivery_mode=2, # make message persistent 61 | ), 62 | ) 63 | except Exception as error: 64 | raise error 65 | 66 | # def not_active_connection(channel: pika.channel.Channel) -> bool: 67 | def not_active_connection(connection: pika.BlockingConnection) -> bool: 68 | """ 69 | TODO: 70 | - Check if channel is closed 71 | """ 72 | # connection: pika.BlockingConnection = channel.connection 73 | return connection.is_closed 74 | 75 | -------------------------------------------------------------------------------- /src/reliability_tests.py: -------------------------------------------------------------------------------- 1 | """Reliability Tests Controllers""" 2 | 3 | import logging 4 | import datetime 5 | 6 | from playhouse.shortcuts import model_to_dict 7 | from peewee import DoesNotExist, fn, Case, DQ 8 | 9 | from src.models import ReliabilityTests 10 | from src import gateway_clients 11 | 12 | logger = logging.getLogger(__name__) 13 | 14 | # pylint: disable=W0718,E1101,W0212 15 | 16 | database = ReliabilityTests._meta.database 17 | 18 | 19 | class PreCommitError(Exception): 20 | """Custom exception for pre-commit function failures.""" 21 | 22 | 23 | def get_all( 24 | filters: dict = None, 25 | page: int = None, 26 | per_page: int = None, 27 | order_desc: bool = True, 28 | ) -> dict: 29 | """Get all reliability tests according to filters and pagination.""" 30 | results = [] 31 | dq_filters = [] 32 | 33 | if filters: 34 | for key, value in filters.items(): 35 | if value is not None: 36 | dq_filters.append(DQ(**{key: value})) 37 | 38 | with database.atomic(): 39 | query = ReliabilityTests.select() 40 | if dq_filters: 41 | query = query.filter(*dq_filters) 42 | query = query.order_by( 43 | ReliabilityTests.id.desc() if order_desc else ReliabilityTests.id.asc() 44 | ) 45 | 46 | if page and per_page: 47 | query = query.paginate(page, per_page) 48 | 49 | for test in query.dicts(): 50 | cleaned = { 51 | field: ( 52 | int(value.timestamp()) 53 | if isinstance(value, datetime.datetime) 54 | else value 55 | ) 56 | for field, value in test.items() 57 | } 58 | results.append(cleaned) 59 | 60 | success_condition = ( 61 | (ReliabilityTests.status == "success") 62 | & (~ReliabilityTests.sms_routed_time.is_null()) 63 | & ( 64 | ( 65 | ReliabilityTests.sms_routed_time.to_timestamp() 66 | - ReliabilityTests.sms_received_time.to_timestamp() 67 | ) 68 | <= 300 69 | ) 70 | ) 71 | 72 | agg_query = ReliabilityTests.select( 73 | fn.COUNT(ReliabilityTests.id).alias("total_records"), 74 | fn.SUM(Case(None, ((success_condition, 1),), 0)).alias("total_success"), 75 | fn.SUM(Case(None, ((ReliabilityTests.status == "timedout", 1),), 0)).alias( 76 | "total_failed" 77 | ), 78 | ) 79 | 80 | if dq_filters: 81 | agg_query = agg_query.filter(*dq_filters) 82 | 83 | agg_result = agg_query.dicts().get() 84 | 85 | return { 86 | "data": results, 87 | "total_records": agg_result["total_records"], 88 | "total_success": agg_result["total_success"], 89 | "total_failed": agg_result["total_failed"], 90 | } 91 | 92 | 93 | def get_tests_for_client( 94 | msisdn: str, 95 | filters: dict = None, 96 | page: int = None, 97 | per_page: int = None, 98 | ) -> tuple: 99 | """Get reliability tests associated with a specific gateway client. 100 | 101 | Args: 102 | msisdn (str): The MSISDN of the gateway client. 103 | filters (dict, optional): A dictionary containing filtering criteria. 104 | page (int, optional): Page number for pagination. 105 | per_page (int, optional): Number of records per page for pagination. 106 | 107 | Returns: 108 | tuple: A tuple containing: 109 | - list of dict: A list of dictionaries containing reliability test data 110 | for the client. 111 | - int: Total number of records. 112 | Returns (None, None) if no gateway client is found with the provided MSISDN. 113 | """ 114 | 115 | if not gateway_clients.get_by_msisdn(msisdn): 116 | return None, None 117 | 118 | filters = filters or {} 119 | filters["msisdn"] = msisdn 120 | 121 | result = get_all(filters, page, per_page) 122 | 123 | return result["data"], result["total_records"] 124 | 125 | 126 | def update_timed_out_tests_status(check_interval: int = 15) -> None: 127 | """Update the status of reliability tests that have timed out. 128 | 129 | This function checks the start time of each reliability test and 130 | compares it with the current time. It then updates the status of 131 | tests that have timed out to 'timedout' in bulk. 132 | 133 | Args: 134 | check_interval (int, optional): The duration in minutes after 135 | which a test is considered timed out. 136 | 137 | Note: 138 | This function relies on a ReliabilityTests table in the database 139 | with columns 'start_time' and 'status'. The 'status' column 140 | should represent the status of the test, with 'success' 141 | indicating successful completion and 'timedout' indicating 142 | that the test has timed out. 143 | 144 | """ 145 | threshold_time = datetime.datetime.now() - datetime.timedelta( 146 | minutes=check_interval 147 | ) 148 | 149 | with database.atomic(): 150 | timed_out_tests = ReliabilityTests.update(status="timedout").where( 151 | (ReliabilityTests.status.not_in(["success", "timedout"])) 152 | & (ReliabilityTests.start_time <= threshold_time) 153 | ) 154 | updated_count = timed_out_tests.execute() 155 | logger.info("Updated %d tests to 'timedout' status.", updated_count) 156 | 157 | 158 | def create_test_for_client( 159 | msisdn: str, status: str, pre_commit_funcs: list = None 160 | ) -> dict: 161 | """Create a reliability test for a specific gateway client. 162 | 163 | Args: 164 | msisdn (str): The MSISDN of the client. 165 | status (str): The test status. 166 | pre_commit_funcs (list, optional): A list of tuples where each tuple 167 | contains a function and its arguments to execute before committing 168 | the transaction. 169 | 170 | Returns: 171 | dict: A dictionary with test data. Returns None if the same test 172 | already exists. 173 | 174 | Example: 175 | # Define pre-commit functions 176 | def example_pre_commit_func(test_data, arg1, arg2): 177 | logger.info("Using test data: %s", test_data) 178 | 179 | # Example of rolling back transaction based on a condition 180 | if some_condition: 181 | return None 182 | 183 | # Otherwise, proceed with the transaction 184 | logger.info("Pre-commit executed with args: %s, %s", arg1, arg2) 185 | 186 | def another_pre_commit_func(prev_return, arg3): 187 | logger.info("Using previous return value: %s", prev_return) 188 | 189 | # Example of additional operations before committing 190 | logger.info("Another pre-commit executed with arg: %s", arg3) 191 | 192 | # Define arguments for pre-commit functions 193 | arg1 = "value1" 194 | arg2 = "value2" 195 | arg3 = "value3" 196 | 197 | # Define pre-commit functions with arguments 198 | pre_commit_funcs = [ 199 | (example_pre_commit_func, (arg1, arg2)), 200 | (another_pre_commit_func, (arg3,)) 201 | ] 202 | 203 | # Call create_test_for_client with pre-commit functions 204 | create_test_for_client("1234567890", "running", pre_commit_funcs) 205 | """ 206 | existing_test, _ = get_tests_for_client(msisdn, filters={"status": status}) 207 | 208 | if existing_test: 209 | logger.error( 210 | "Test not created for MSISDN: %s with status: %s, as it already exists", 211 | msisdn, 212 | status, 213 | ) 214 | return None 215 | 216 | with database.atomic() as transaction: 217 | try: 218 | new_test = ReliabilityTests.create(msisdn=msisdn, status=status) 219 | new_test_data = model_to_dict(new_test, False) 220 | if pre_commit_funcs: 221 | prev_return = new_test_data 222 | for func, args in pre_commit_funcs: 223 | args = (prev_return,) + args 224 | prev_return = func(*args) 225 | 226 | if prev_return is None: 227 | raise PreCommitError( 228 | f"Pre-commit function '{func.__name__}' failed" 229 | ) 230 | 231 | logger.info("Test created for MSISDN: %s with status: %s", msisdn, status) 232 | return new_test_data 233 | 234 | except PreCommitError as e: 235 | transaction.rollback() 236 | logger.error(str(e)) 237 | return None 238 | 239 | except Exception: 240 | transaction.rollback() 241 | logger.error( 242 | "Failed to create test for MSISDN: %s with status: %s", 243 | msisdn, 244 | status, 245 | exc_info=True, 246 | ) 247 | return None 248 | 249 | 250 | def update_test_for_client(test_id: int, fields: dict, criteria: dict = None) -> int: 251 | """ 252 | Update a reliability test with specified fields based on given criteria. 253 | 254 | Args: 255 | test_id (int): The ID of the test to be updated. 256 | fields (dict): A dictionary containing the fields and their new 257 | values to update. 258 | criteria (dict, optional): A dictionary containing filtering criteria 259 | to identify the test to update. Defaults to None. 260 | 261 | Returns: 262 | int: The number of rows updated in the database. 263 | """ 264 | try: 265 | with database.atomic(): 266 | query = ReliabilityTests.update(**fields).where( 267 | ReliabilityTests.id == test_id 268 | ) 269 | 270 | criteria = criteria or {} 271 | 272 | if "id" in criteria: 273 | del criteria["id"] 274 | 275 | for key, value in criteria.items(): 276 | if value == "is_null": 277 | query = query.where(getattr(ReliabilityTests, key).is_null()) 278 | else: 279 | query = query.where(getattr(ReliabilityTests, key) == value) 280 | 281 | updated_count = query.execute() 282 | logger.info("Updated %d rows for test with ID '%d'", updated_count, test_id) 283 | return updated_count 284 | except DoesNotExist: 285 | logger.error("Test with ID '%d' does not exist.", test_id) 286 | return 0 287 | 288 | 289 | def calculate_reliability_score_for_client(msisdn: str) -> float: 290 | """ 291 | Calculate the reliability score for a gateway client based on successful SMS routing. 292 | 293 | Args: 294 | msisdn (str): The MSISDN of the client. 295 | 296 | Returns: 297 | float: Reliability percentage rounded to two decimal places. 298 | 299 | Notes: 300 | This function calculates the reliability score for a given client based on the 301 | percentage of successful SMS routings within a 3-minute window. Reliability is 302 | defined as the ratio of successful SMS routings to the total number of tests 303 | conducted for the client. 304 | 305 | A successful SMS routing is defined as a routing with a 'success' status, where 306 | the SMS is routed within 180 seconds (3 minutes) of being received by the system. 307 | """ 308 | total_tests = ( 309 | ReliabilityTests.select().where(ReliabilityTests.msisdn == msisdn).count() 310 | ) 311 | 312 | if total_tests == 0: 313 | return round(0.0, 2) 314 | 315 | successful_tests = ( 316 | ReliabilityTests.select() 317 | .where( 318 | ReliabilityTests.msisdn == msisdn, 319 | ReliabilityTests.status == "success", 320 | (~ReliabilityTests.sms_routed_time.is_null()), 321 | ( 322 | ( 323 | ReliabilityTests.sms_routed_time.to_timestamp() 324 | - ReliabilityTests.sms_received_time.to_timestamp() 325 | ) 326 | <= 180 327 | ), 328 | ) 329 | .count() 330 | ) 331 | 332 | reliability = (successful_tests / total_tests) * 100 333 | 334 | return round(reliability, 2) 335 | -------------------------------------------------------------------------------- /src/rmq_broker.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import os 4 | import pika 5 | import ssl 6 | import logging 7 | import json 8 | import requests 9 | 10 | default_routing_key = "default-smswithoutborders-routing-key" \ 11 | if not os.environ.get("RMQ_ROUTING_KEY") \ 12 | else os.environ.get("RMQ_ROUTING_KEY") 13 | 14 | default_exchange_name = "default-smswithoutborders-exchange" \ 15 | if not os.environ.get("RMQ_EXCHANGE") \ 16 | else os.environ.get("RMQ_EXCHANGE") 17 | 18 | default_connection_name = "default-smswithoutborders-publisher" \ 19 | if not os.environ.get("RMQ_CONNECTION_NAME") \ 20 | else os.environ.get("RMQ_CONNECTION_NAME") 21 | 22 | default_queue_name = "default-smswithoutborders-queue" \ 23 | if not os.environ.get("RMQ_QUEUE_NAME") \ 24 | else os.environ.get("RMQ_QUEUE_NAME") 25 | 26 | 27 | def add_user(user_name: str, password: str, 28 | rmq_host: str='127.0.0.1', rmq_port: str='15672') -> None: 29 | """ 30 | """ 31 | try: 32 | add_user_url = f"http://{rmq_host}:{rmq_port}/api/users/{user_name}" 33 | 34 | add_user_data = { 35 | "password": password, 36 | "tags": "monitoring" 37 | } 38 | 39 | add_user_response = requests.put(url=add_user_url, json=add_user_data, 40 | auth=(os.environ.get("RABBITMQ_DEFAULT_USER"), 41 | os.environ.get("RABBITMQ_DEFAULT_PASS"))) 42 | 43 | if add_user_response.status_code in [201, 204]: 44 | logging.debug("[*] New user added") 45 | logging.debug("[*] User tag set") 46 | 47 | set_permissions_url = f"http://{rmq_host}:{rmq_port}/api/permissions/%2F/{user_name}" 48 | 49 | """ 50 | set_permissions_data = { 51 | "configure":f"^({default_exchange_name}|{user_name}_.*)$", 52 | "write":f"^({default_exchange_name}|{user_name}_.*)$", 53 | "read":f"^({default_exchange_name}|{user_name}_.*)$" 54 | } 55 | """ 56 | set_permissions_data = { 57 | "configure":f".*", 58 | "write":".*", 59 | "read":".*" 60 | } 61 | 62 | set_permissions_response = requests.put(url=set_permissions_url, 63 | json=set_permissions_data, 64 | auth=(os.environ.get("RABBITMQ_DEFAULT_USER"), 65 | os.environ.get("RABBITMQ_DEFAULT_PASS"))) 66 | 67 | if set_permissions_response.status_code in [201, 204]: 68 | logging.debug("[*] User privilege set") 69 | return None 70 | 71 | else: 72 | logging.error("Failed to set user privilege") 73 | set_permissions_response.raise_for_status() 74 | 75 | else: 76 | logging.error("Failed to add new user") 77 | add_user_response.raise_for_status() 78 | 79 | except Exception as error: 80 | raise 81 | 82 | 83 | def create_queue(channel: pika.channel.Channel, 84 | queue_name:str=None, durable:bool=True, 85 | exchange_name:str=None, routing_key:str=None) -> None: 86 | """ 87 | """ 88 | if not queue_name: 89 | queue_name = default_queue_name 90 | 91 | if not exchange_name: 92 | exchange_name = default_exchange_name 93 | 94 | if not routing_key: 95 | routing_key = default_routing_key 96 | 97 | channel.queue_declare(queue_name, durable=durable) 98 | channel.queue_bind( 99 | queue=queue_name, 100 | exchange=exchange_name, 101 | routing_key=routing_key) 102 | 103 | logging.debug("queue created successfully") 104 | 105 | def create_rmq_channel(connection: pika.BlockingConnection) -> pika.channel.Channel: 106 | """ 107 | """ 108 | channel = connection.channel() 109 | logging.debug("channel creates successfully") 110 | 111 | create_queue(channel=channel) 112 | return channel 113 | 114 | def create_rmq_exchange( 115 | channel: pika.channel.Channel, 116 | exchange_name: str=None, 117 | exchange_type: str="topic") -> None: 118 | """ 119 | """ 120 | if not exchange_name: 121 | exchange_name = default_exchange_name 122 | 123 | channel.exchange_declare( 124 | exchange=exchange_name, 125 | exchange_type=exchange_type, 126 | durable=True) 127 | 128 | def get_rmq_connection( 129 | user: str=None, 130 | password: str=None, 131 | ssl_crt: str=None, 132 | ssl_key: str=None, 133 | ssl_pem: str=None, 134 | tls_rmq: bool=False, 135 | connection_name: str=default_connection_name, 136 | heartbeat: int = 30, 137 | blocked_connection_timeout: int=300, 138 | host: str='127.0.0.1', 139 | ca_ssl_host: str ='localhost', 140 | ssl_port: str="5671", 141 | port: str="5672") -> pika.BlockingConnection: 142 | """ 143 | - If using docker-compose network, unless certificate signed with 144 | service name it will fail to verify certificates. 145 | 146 | - If connecting to external host set: tls_rmq = True - would allow 147 | for using SSL. 148 | """ 149 | client_properties = {'connection_name' : connection_name} 150 | 151 | credentials=pika.PlainCredentials(user, password) 152 | 153 | conn_params = None 154 | 155 | if(ssl_crt and ssl_key and ssl_pem and tls_rmq): 156 | logging.debug("Connectin securely to %s", host) 157 | 158 | # ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER) 159 | # ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) 160 | 161 | ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) 162 | 163 | ssl_context = ssl.create_default_context() 164 | ssl_context.load_cert_chain(certfile=ssl_crt, 165 | keyfile=ssl_key) 166 | ssl_context.load_verify_locations(ssl_pem) 167 | 168 | ssl_options = pika.SSLOptions(ssl_context, ca_ssl_host) 169 | conn_params = pika.ConnectionParameters( 170 | host=host, 171 | port=ssl_port, 172 | ssl_options=ssl_options, 173 | heartbeat=heartbeat, 174 | credentials=credentials, 175 | blocked_connection_timeout=blocked_connection_timeout, 176 | client_properties=client_properties) 177 | else: 178 | conn_params = pika.ConnectionParameters( 179 | host=host, 180 | port=port, 181 | heartbeat=heartbeat, 182 | blocked_connection_timeout=blocked_connection_timeout, 183 | credentials=credentials, 184 | client_properties=client_properties) 185 | 186 | connection = pika.BlockingConnection(conn_params) 187 | 188 | return connection 189 | -------------------------------------------------------------------------------- /src/router.py: -------------------------------------------------------------------------------- 1 | 2 | def sms_incoming(platform): 3 | """Receive inbound messages from Webhooks. 4 | Given that this URL is unique, only seeders can have the required key to route to them 5 | TODO: 6 | - Add platform security with secret keys at url levels 7 | """ 8 | 9 | if not platform: 10 | return 'no platform provided', 500 11 | app.logger.debug('incoming sms for platform %s', platform) 12 | 13 | if platform == 'twilio': 14 | """Receives Form Data. 15 | """ 16 | From = request.values.get('From', None) 17 | To = request.values.get('To', None) 18 | FromCountry = request.values.get('FromCountry', None) 19 | NumSegments = request.values.get('NumSegments', None) 20 | Body = request.values.get('Body', None) 21 | 22 | app.logger.debug("From: %s", From) 23 | app.logger.debug("Body: %s", Body) 24 | 25 | else: 26 | """Receives JSON Data. 27 | """ 28 | try: 29 | data = json.loads(request.data) 30 | except Exception as error: 31 | logging.exception(error) 32 | return 'invalid data type, json expected', 500 33 | else: 34 | if not 'text' in data: 35 | return 'missing key - text', 400 36 | 37 | if not 'MSISDN' in data: 38 | return 'missing key - MSISDN', 400 39 | 40 | Body = data['text'] 41 | MSISDN = data['MSISDN'] 42 | 43 | app.logger.debug("MSISDN: %s", MSISDN) 44 | app.logger.debug("Body: %s", Body) 45 | 46 | try: 47 | decrypted_message = process_publisher(MSISDN=MSISDN, body=Body) 48 | app.logger.debug("Decrypted message: %s", decrypted_message) 49 | 50 | if decrypted_message is None: 51 | return 'message cannot be published', 200 52 | else: 53 | try: 54 | publish(MSISDN=MSISDN, message=decrypted_message) 55 | except Exception as error: 56 | logging.exception(error) 57 | raise error 58 | else: 59 | return 'message published successfully', 200 60 | except Exception as error: 61 | logging.exception(error) 62 | return '', 500 63 | return 'cannot process request', 400 64 | 65 | def publish(MSISDN: str, message: bytes) -> None: 66 | """ 67 | bytes required because that will keep using this endpoint intentional. 68 | """ 69 | publisher_endpoint = __gateway_confs['publisher']['endpoint'] 70 | publisher_port = int(__gateway_confs['publisher']['port']) 71 | publisher_url = "http://localhost:%d%s" % (publisher_port, publisher_endpoint) 72 | logging.debug("publishing to: %s", publisher_url) 73 | 74 | request = requests.Session() 75 | response = request.post( 76 | publisher_url, 77 | json={"MSISDN": MSISDN, "message": str(message, 'utf-8')}) 78 | 79 | response.raise_for_status() 80 | 81 | 82 | return True, request 83 | 84 | def process_publisher(MSISDN: str, body: str) -> str: 85 | """ 86 | """ 87 | 88 | # TODO: sanitize Body and MSISDN 89 | try: 90 | iv, encrypted_message = gateway_server.process_message_for_publishing( 91 | message=body) 92 | except base64.binascii.Error as error: 93 | app.logger.exception(error) 94 | except Exception as error: 95 | app.logger.exception(error) 96 | return '', 500 97 | else: 98 | app.logger.debug("iv: %s", iv) 99 | app.logger.debug("encrypted_message: %s", encrypted_message) 100 | 101 | try: 102 | user_id = user_management_api_get_user_id(MSISDN=MSISDN) 103 | except requests.exceptions.HTTPError as error: 104 | app.logger.debug("Not an app user") 105 | raise error 106 | except Exception as error: 107 | raise error 108 | else: 109 | app.logger.debug("User ID: %s", user_id) 110 | 111 | user = Users(user_id) 112 | shared_key = user.get_shared_key() 113 | shared_key = shared_key[0][0] 114 | app.logger.debug("Shared key: %s", shared_key) 115 | 116 | try: 117 | decrypted_message = gateway_server.decrypt_message( 118 | iv=iv, shared_key=shared_key, message=encrypted_message) 119 | except Exception as error: 120 | app.logger.exception(error) 121 | return '', 500 122 | else: 123 | return decrypted_message 124 | 125 | return False 126 | 127 | 128 | def user_management_api_get_user_id(MSISDN: str) -> str: 129 | """ 130 | """ 131 | auth_id=__gateway_confs['dev_api']['auth_id'] 132 | auth_key=__gateway_confs['dev_api']['auth_key'] 133 | try: 134 | state, request = dev_backend_authenticate_user( 135 | auth_id=auth_id, auth_key=auth_key) 136 | except Exception as error: 137 | raise error 138 | else: 139 | app.logger.debug("%s %s", state, request) 140 | try: 141 | api_response = user_management_api_request_user_id( 142 | request=request, MSISDN=MSISDN) 143 | except Exception as error: 144 | raise error 145 | else: 146 | """ 147 | """ 148 | user_id = api_response['user_id'] 149 | return user_id 150 | 151 | 152 | def user_management_api_request_user_id( 153 | request: requests.Session, MSISDN: str) -> dict: 154 | """Request for the user's tokens. 155 | 156 | Args: 157 | Request (requests.Session): authenticated sessions from dev BE. 158 | 159 | MSISDN (str): phone number of the user token is requested for. 160 | 161 | Returns: 162 | json_response (dict) 163 | """ 164 | 165 | backend_publisher_endpoint = __gateway_confs['backend_publisher']['endpoint'] 166 | backend_publisher_port = int(__gateway_confs['backend_publisher']['port']) 167 | backend_publisher_api_decrypted_tokens_request_url = "http://localhost:%d%s" % ( 168 | backend_publisher_port, backend_publisher_endpoint) 169 | 170 | response = request.post( 171 | backend_publisher_api_decrypted_tokens_request_url, 172 | json={"phone_number": MSISDN}, cookies=request.cookies.get_dict()) 173 | 174 | response.raise_for_status() 175 | 176 | return response.json() 177 | 178 | def dev_backend_authenticate_user(auth_id: str, auth_key: str) -> tuple: 179 | """ 180 | """ 181 | dev_backend_api_auth_url = __gateway_confs['dev_api']['authentication_url'] 182 | logging.debug("dev_backed_api_auth_url: %s", dev_backend_authenticate_user) 183 | 184 | request = requests.Session() 185 | response = request.post( 186 | dev_backend_api_auth_url, 187 | json={"auth_key": auth_key, "auth_id": auth_id}) 188 | 189 | response.raise_for_status() 190 | 191 | 192 | return True, request 193 | -------------------------------------------------------------------------------- /src/rsa.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | from Crypto.PublicKey import RSA 4 | from Crypto.Cipher import AES, PKCS1_OAEP 5 | from Cryptodome.Hash import SHA512, SHA256, SHA1 6 | from Cryptodome.Signature import pss 7 | from Crypto import Random 8 | 9 | import base64 10 | 11 | class SecurityRSA: 12 | def generate_keypair(self, keysize:int = 2048) -> tuple: 13 | 14 | """Generate public and private keypair and return them. 15 | 16 | Returns: 17 | public_key (str), private_key (str) 18 | """ 19 | 20 | key = RSA.generate(keysize) 21 | self.private_key = key.export_key() 22 | 23 | self.public_key = key.publickey().exportKey() 24 | 25 | return self.public_key, self.private_key 26 | 27 | 28 | @staticmethod 29 | def generate_keypair_write( 30 | private_key_filepath: str="private.pem", 31 | public_key_filepath: str="public.pem", keysize: int=2048) -> tuple: 32 | 33 | """Generate public and private keypair and write them. 34 | 35 | Args: 36 | public_key_filepath (str): Absolute filepath to where the public key should be written and stored. 37 | If None, would be stored in the current dir. Files should end with the .pem. 38 | 39 | 40 | private_key_filepath (str): Absolute filepath to where the private key should be written and stored. 41 | If None, would be stored in the current dir. Files should end with the .pem. 42 | 43 | Returns: 44 | public_key (str), private_key (str) 45 | """ 46 | 47 | securityRSA = SecurityRSA() 48 | public_key, private_key = securityRSA.generate_keypair() 49 | 50 | file_out = open(private_key_filepath, "wb") 51 | file_out.write(private_key) 52 | file_out.close() 53 | 54 | file_out = open(public_key_filepath, "wb") 55 | file_out.write(public_key) 56 | file_out.close() 57 | 58 | return public_key, private_key 59 | 60 | @staticmethod 61 | def encrypt_with_key(data: str, public_key: str, mgf1ParameterSpec: str='sha1', 62 | hashingAlgorithm: str='sha256') -> bytes: 63 | """Encrypt with public key stored in ..public.pem. 64 | 65 | Args: 66 | data (str): Base64 plain input. 67 | public_key (str): Base64 public key, possibly in PEM format. 68 | """ 69 | 70 | mgf1ParameterSpec = SHA256 if not mgf1ParameterSpec == 'sha1' else SHA1 71 | hashingAlgorithm = SHA256 if not hashingAlgorithm == 'sha1' else SHA1 72 | 73 | public_key = PKCS1_OAEP.new( 74 | key=RSA.importKey(public_key), 75 | hashAlgo=hashingAlgorithm.new(), mgfunc=lambda x,y: pss.MGF1(x,y, mgf1ParameterSpec)) 76 | 77 | data = bytes(data, 'utf-8') 78 | encrypted_text = public_key.encrypt(data) 79 | 80 | return encrypted_text 81 | 82 | 83 | @staticmethod 84 | def decrypt(data: str, 85 | private_key_filepath: str="private.pem", 86 | mgf1ParameterSpec: str='sha1', 87 | hashingAlgorithm: str='sha256') -> bytes: 88 | """Decrypt with own private key stored in ..private.pem. 89 | 90 | Args: 91 | data (str): Base64 encrypted input. 92 | private_key_filepath (str): path to private key (private.pem) on system. 93 | """ 94 | 95 | with open(private_key_filepath) as fd: 96 | private_key = RSA.import_key(fd.read()) 97 | 98 | mgf1ParameterSpec = SHA256 if not mgf1ParameterSpec == 'sha1' else SHA1 99 | hashingAlgorithm = SHA256 if not hashingAlgorithm == 'sha1' else SHA1 100 | 101 | try: 102 | private_key = PKCS1_OAEP.new( 103 | key=private_key, 104 | hashAlgo=hashingAlgorithm.new(), mgfunc=lambda x,y: pss.MGF1(x,y, mgf1ParameterSpec)) 105 | 106 | data = base64.b64decode(data) 107 | decrypted_text = private_key.decrypt(data) 108 | 109 | except Exception as error: 110 | raise error 111 | 112 | else: 113 | return decrypted_text 114 | 115 | 116 | def _decrypt(self, data: bytes) -> bytes: 117 | """Decrypt with own private key stored in ..private.pem. 118 | 119 | Args: 120 | data (str): Base64 encrypted input. 121 | """ 122 | 123 | private_key = PKCS1_OAEP.new( 124 | key=RSA.importKey(self.private_key), 125 | hashAlgo=SHA256.new(), mgfunc=lambda x,y: pss.MGF1(x,y, SHA1)) 126 | 127 | decrypted_text = private_key.decrypt(data) 128 | 129 | return decrypted_text 130 | 131 | def _encrypt(self, data: str) -> bytes: 132 | """Decrypt with own private key stored in ..private.pem. 133 | 134 | Args: 135 | data (str): Base64 encrypted input. 136 | """ 137 | 138 | public_key = PKCS1_OAEP.new( 139 | key=RSA.importKey(self.public_key), 140 | hashAlgo=SHA256.new(), mgfunc=lambda x,y: pss.MGF1(x,y, SHA1)) 141 | 142 | data = bytes(data, 'utf-8') 143 | encrypted_text = public_key.encrypt(data) 144 | 145 | return encrypted_text 146 | 147 | @staticmethod 148 | def sign(message: str, signature: str, public_key: str) -> bool: 149 | """ 150 | """ 151 | key = RSA.importKey(public_key) 152 | 153 | h = SHA512.new(message) 154 | 155 | verifier = pss.new(key) 156 | 157 | try: 158 | return verifier.verify(h, signature) 159 | except (ValueError, TypeError) as error: 160 | raise error 161 | 162 | 163 | -------------------------------------------------------------------------------- /src/sync.py: -------------------------------------------------------------------------------- 1 | import os 2 | from sockets import ip_grap 3 | import logging 4 | 5 | import secrets 6 | 7 | __api_version_number = "2" 8 | 9 | 10 | def generate_shared_key(keysize: int=256//16) -> str: 11 | """Generates a shared key. 12 | Why //16? https://stackoverflow.com/a/50321063 13 | Args: 14 | keysize (int): size of key (in bits) to generate (defaults to 256). 15 | Returns: 16 | key (str): Generated key. 17 | """ 18 | 19 | return secrets.token_hex(nbytes=keysize) 20 | 21 | def tls_available() -> bool: 22 | """ 23 | """ 24 | if not os.environ.get("SSL_KEY") or not os.environ.get("SSL_CERTIFICATE"): 25 | return False 26 | 27 | return True 28 | 29 | def get_sockets_sessions_url(user_id: str, host: str, port: str) -> str: 30 | """ 31 | """ 32 | try: 33 | # user = Users(user_id) 34 | user = None 35 | except Exception as error: 36 | raise error 37 | else: 38 | try: 39 | websocket_protocol = "ws" if not tls_available() else "wss" 40 | synchronization_initialization_url = "%s://%s:%s/v%s/sync/init/%s" % ( 41 | websocket_protocol, 42 | host, 43 | port, 44 | __api_version_number, 45 | user_id) 46 | 47 | return synchronization_initialization_url 48 | 49 | except Exception as error: 50 | # logging.exception(error) 51 | raise error 52 | 53 | 54 | def sessions_public_key_exchange(user_id, session_id): 55 | """Generates a shared for the user attached to this session. 56 | Args: 57 | user_id (str): User ID provided when the user logs in 58 | session_id (str): Unique ID as has been provided by the websocket connections. The use of this is to keep 59 | the user safe; changing the QR code during generated stops using expired QR codes during the sync process. 60 | 61 | Returns: {}, int 62 | 63 | TODO: 64 | - Extract public key from body 65 | - Store public_key key against session 66 | - return own public key and user_id 67 | """ 68 | 69 | try: 70 | # data = json.loads(request.data, strict=False) 71 | data = None 72 | except Exception as error: 73 | raise error 74 | else: 75 | """ 76 | if not 'public_key' in data: 77 | return 'missing public key', 400 78 | 79 | app.logger.debug("Requesting __PAUSE__") 80 | gateway_server.websocket_message(message='__PAUSE__', 81 | user_id = user_id, session_id = session_id) 82 | 83 | user_public_key = data['public_key'] 84 | # TODO: validate is valid public key 85 | 86 | with open(gateway_server.public_key_filepath, 'r') as public_key_fd: 87 | gateway_server_public_key = public_key_fd.read() 88 | """ 89 | 90 | try: 91 | # user = Users(user_id) 92 | user = None 93 | except Exception as error: 94 | logging.exception(error) 95 | else: 96 | try: 97 | """ 98 | TODO: 99 | - Check for other criterias here, for example - 100 | - does session already have a public key? 101 | """ 102 | verification_url = '/v%s/sync/users/%s/sessions/%s' % \ 103 | (__api_version_number, user_id, session_id) 104 | 105 | """ 106 | if user.update_public_key( 107 | session_id = session_id, public_key=user_public_key) > 0: 108 | 109 | return jsonify( 110 | { "verification_url": verification_url 111 | }), 200 112 | """ 113 | 114 | return verification_url 115 | 116 | except Exception as error: 117 | raise error 118 | 119 | 120 | def sessions_user_update(user_id, session_id): 121 | """Updates the current session for user. 122 | Uses users ID and session ID to update current user's session on the users record DB. 123 | 124 | Args: 125 | user_id (str): User ID provided when the user logs in 126 | session_id (str): Unique ID as has been provided by the websocket connections 127 | 128 | Returns: str, int 129 | 130 | TODO: 131 | """ 132 | # logging.debug("updating user session from - %s to - %s", session_id, new_session_id) 133 | try: 134 | user = Users(user_id) 135 | except Exception as error: 136 | logging.exception(error) 137 | else: 138 | new_session_id = user.update_current_session(session_id) 139 | return new_session_id, 200 140 | 141 | return '', 500 142 | 143 | 144 | def sessions_user_fetch(user_id: str, session_id: str, user_public_key: str, password: str): 145 | """Authenticates and fetches information to populate the usser's app. 146 | Authenticating users happen at the BE user management API which can be configured in the config routes. 147 | Args: 148 | user_id (str): User ID provided when the user logs in 149 | session_id (str): Unique ID as has been provided by the websocket connections 150 | Body: 151 | password (str): User password encrypted with server public key 152 | 153 | Returns: {}, int 154 | """ 155 | 156 | path_to_private_key = os.environ.get("RSA_PR_KEY") 157 | 158 | decrypted_password = rsa.decrypt(password, path_to_private_key) 159 | 160 | user_platforms = None 161 | shared_key = None 162 | encrypted_shared_key = rsa.encrypt(shared_key, user_public_key) 163 | 164 | user = Users(user_id) 165 | user.update_shared_key(shared_key) 166 | 167 | return encrypted_shared_key, user_platforms 168 | -------------------------------------------------------------------------------- /src/users.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | from src.users_entity import UsersEntity 4 | 5 | import mysql.connector 6 | from mysql.connector import errorcode 7 | 8 | logger = logging.getLogger(__name__) 9 | logger.setLevel(logging.DEBUG) 10 | 11 | 12 | class User: 13 | 14 | id = None 15 | public_key = None 16 | msisdn_hash = None 17 | shared_key = None 18 | mgf1ParameterSpec = None 19 | hashingAlgorithm = None 20 | 21 | 22 | class Users(User): 23 | TABLES = {} 24 | 25 | TABLE_NAME = "gateway_server_users" 26 | 27 | TABLES[TABLE_NAME] = ( 28 | f"CREATE TABLE `{TABLE_NAME}` (" 29 | " `msisdn_hash` varchar(256) NOT NULL," 30 | " `shared_key` text NOT NULL," 31 | " `public_key` text NOT NULL," 32 | " `mgf1ParameterSpec` text NOT NULL," 33 | " `hashingAlgorithm` text NOT NULL," 34 | " `date` datetime DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP," 35 | " PRIMARY KEY (`msisdn_hash`)" 36 | ") ENGINE=InnoDB" 37 | ) 38 | 39 | def __init__(self, userEntity: UsersEntity) -> None: 40 | """Creates new user record if not exist. 41 | 42 | This method would create a record for the user and store in the path of 43 | user_record_filepath. 44 | 45 | Args: session_id (str): The last session ID being tracked for this session. 46 | public_key (str): User's public key. 47 | shared_key (str): Shared key for encrypting and decrypting incoming messages. 48 | """ 49 | 50 | self.userEntity = userEntity 51 | self.__connect__() 52 | 53 | def __connect__(self): 54 | """ """ 55 | self.connection = mysql.connector.connect( 56 | host=self.userEntity.MYSQL_HOST, 57 | user=self.userEntity.MYSQL_USER, 58 | database=self.userEntity.MYSQL_DATABASE, 59 | password=self.userEntity.MYSQL_PASSWORD, 60 | charset="utf8mb4", 61 | collation="utf8mb4_unicode_ci", 62 | ) 63 | 64 | self.connection.autocommit = True 65 | 66 | def __get_cursor__(self, buffered=None, dictionary=None): 67 | """ """ 68 | if not self.connection.is_connected(): 69 | self.__connect__() 70 | 71 | return self.connection.cursor(buffered=buffered, dictionary=dictionary) 72 | 73 | def __create_database__(self): 74 | """ """ 75 | cursor = self.__get_cursor__() 76 | try: 77 | cursor.execute( 78 | f"CREATE DATABASE {self.userEntity.MYSQL_DATABASE} " 79 | "DEFAULT CHARACTER SET 'utf8mb4' COLLATE 'utf8mb4_unicode_ci'" 80 | ) 81 | except mysql.connector.Error as err: 82 | if err.errno == errorcode.ER_DB_CREATE_EXISTS: 83 | logger.warning( 84 | "Database [%s] creation: already exist", 85 | self.userEntity.MYSQL_DATABASE, 86 | ) 87 | else: 88 | raise err 89 | 90 | def __create_tables__(self): 91 | """ """ 92 | cursor = self.__get_cursor__() 93 | 94 | for table_name, table_description in self.TABLES.items(): 95 | try: 96 | cursor.execute(table_description) 97 | except (mysql.connector.Error, Exception) as err: 98 | if err.errno == errorcode.ER_TABLE_EXISTS_ERROR: 99 | logger.warning( 100 | "User table[%s] populate: already exist.", table_name 101 | ) 102 | else: 103 | raise err 104 | else: 105 | logger.info("User table[%s] populate: OK.", table_name) 106 | 107 | cursor.close() 108 | 109 | def create_database_and_tables__(self) -> None: 110 | """ """ 111 | try: 112 | self.__create_database__() 113 | except Exception as error: 114 | raise error 115 | 116 | try: 117 | self.__create_tables__() 118 | except Exception as error: 119 | raise error 120 | 121 | def store_shared_key(self, shared_key: str) -> None: 122 | """ 123 | TODO: shared key should be encrypted when stored 124 | """ 125 | 126 | def commit(self, user: User) -> None: 127 | """ 128 | insert or update 129 | """ 130 | cursor = self.__get_cursor__() 131 | 132 | insert_query = ( 133 | f"INSERT INTO {self.TABLE_NAME} " 134 | "(public_key, shared_key, msisdn_hash, mgf1ParameterSpec, hashingAlgorithm) " 135 | "VALUES (%s, %s, %s, %s, %s) ON DUPLICATE KEY UPDATE " 136 | "public_key = VALUES(public_key), " 137 | "shared_key = VALUES(shared_key), " 138 | "mgf1ParameterSpec = VALUES(mgf1ParameterSpec), " 139 | "hashingAlgorithm = VALUES(hashingAlgorithm)" 140 | ) 141 | 142 | try: 143 | cursor.execute( 144 | insert_query, 145 | ( 146 | user.public_key, 147 | user.shared_key, 148 | user.msisdn_hash, 149 | user.mgf1ParameterSpec, 150 | user.hashingAlgorithm, 151 | ), 152 | ) 153 | 154 | self.connection.commit() 155 | 156 | except Exception as error: 157 | raise error 158 | 159 | finally: 160 | cursor.close() 161 | 162 | def delete(self, user: User) -> None: 163 | """ 164 | delete 165 | """ 166 | cursor = self.__get_cursor__() 167 | 168 | delete_query = f"DELETE FROM {self.TABLE_NAME} " "WHERE msisdn_hash = %s" 169 | 170 | try: 171 | cursor.execute(delete_query, (user.msisdn_hash,)) 172 | 173 | self.connection.commit() 174 | 175 | except Exception as error: 176 | raise error 177 | 178 | finally: 179 | cursor.close() 180 | 181 | def find(self, msisdn_hash: str): 182 | """ """ 183 | if not msisdn_hash: 184 | return User() 185 | 186 | cursor = self.__get_cursor__(buffered=True, dictionary=True) 187 | query = ( 188 | "SELECT public_key, shared_key, msisdn_hash, mgf1ParameterSpec, hashingAlgorithm " 189 | f"FROM {self.TABLE_NAME} WHERE msisdn_hash = %s" 190 | ) 191 | try: 192 | cursor.execute(query, (msisdn_hash,)) 193 | except Exception as error: 194 | raise error 195 | else: 196 | user = User() 197 | for row in cursor: 198 | user.public_key = row["public_key"] 199 | user.shared_key = row["shared_key"] 200 | user.msisdn_hash = row["msisdn_hash"] 201 | user.mgf1ParameterSpec = row["mgf1ParameterSpec"] 202 | user.hashingAlgorithm = row["hashingAlgorithm"] 203 | 204 | cursor.close() 205 | 206 | return user 207 | finally: 208 | cursor.close() 209 | -------------------------------------------------------------------------------- /src/users_entity.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import logging 4 | import os 5 | 6 | from SwobBackendPublisher import MySQL, Lib 7 | from SwobBackendPublisher.exceptions import ( 8 | PlatformDoesNotExist, 9 | UserDoesNotExist, 10 | DuplicateUsersExist, 11 | InvalidDataError 12 | ) 13 | 14 | class UsersEntity: 15 | def __init__(self, mysql_host, mysql_user, mysql_password, mysql_database): 16 | self.MYSQL_HOST=mysql_host 17 | self.MYSQL_USER=mysql_user 18 | self.MYSQL_PASSWORD=mysql_password 19 | self.MYSQL_DATABASE=mysql_database 20 | 21 | self.db = MySQL.connector( 22 | database=self.MYSQL_DATABASE, 23 | user=self.MYSQL_USER, 24 | password=self.MYSQL_PASSWORD, 25 | host=self.MYSQL_HOST) 26 | 27 | 28 | if __name__ == "__main__": 29 | logging.basicConfig(level='DEBUG') 30 | try: 31 | host = os.environ["MYSQL_HOST"] 32 | user = os.environ["MYSQL_USER"] 33 | password = os.environ["MYSQL_PASSWORD"] 34 | database = os.environ["MYSQL_DATABASE"] 35 | 36 | except Exception as error: 37 | logging.exception(error) 38 | else: 39 | try: 40 | usersEntity = UsersEntity(mysql_host=host, mysql_user=user, 41 | mysql_password=password, mysql_database=database) 42 | except Exception as error: 43 | logging.exception(error) 44 | else: 45 | usersEntity.dbConnector.decrypt(phone_number="000000", platform_name="gmail") 46 | 47 | -------------------------------------------------------------------------------- /src/utest.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import unittest 4 | import ip_grap 5 | import sync 6 | import os 7 | 8 | class Sync(unittest.TestCase): 9 | """ 10 | """ 11 | 12 | def test_get_sockets_sessions_url(self): 13 | """ 14 | """ 15 | ip = ip_grap.get_private_ip() 16 | port = "8080" 17 | 18 | # os.environ["HOST"] = "localhost" 19 | # os.environ["SOC_PORT"] = port 20 | host = "localhost" 21 | 22 | 23 | user_id = "00000" 24 | expected_session_url = "ws://%s:%s/v%s/sync/init/%s" % ( 25 | host, 26 | port, 27 | "2", 28 | user_id) 29 | 30 | acquired_session_url = sync.get_sockets_sessions_url(user_id=user_id, host=host, port=port) 31 | self.assertEqual(acquired_session_url, expected_session_url) 32 | 33 | def test_sessions_public_key_exchange(self): 34 | """ 35 | """ 36 | user_id = "00000" 37 | session_id = "11111" 38 | 39 | expected_verification_url = "/v%s/sync/users/%s/sessions/%s" % ( 40 | "2", 41 | user_id, 42 | session_id 43 | ) 44 | 45 | acquired_verification_url = sync.sessions_public_key_exchange(user_id=user_id, session_id=session_id) 46 | self.assertEqual(acquired_verification_url, expected_verification_url) 47 | 48 | 49 | def test_sessions_user_fetch(self): 50 | """ 51 | """ 52 | 53 | if __name__ == '__main__': 54 | unittest.main() 55 | -------------------------------------------------------------------------------- /src/utils.py: -------------------------------------------------------------------------------- 1 | """Utility module""" 2 | 3 | import os 4 | import logging 5 | from functools import wraps 6 | from urllib.parse import urlparse, urljoin 7 | from peewee import DatabaseError 8 | 9 | import pymysql 10 | 11 | logger = logging.getLogger(__name__) 12 | 13 | 14 | def ensure_database_exists(host, user, password, database_name): 15 | """ 16 | Decorator that ensures a MySQL database exists before executing a function. 17 | 18 | Args: 19 | host (str): The host address of the MySQL server. 20 | user (str): The username for connecting to the MySQL server. 21 | password (str): The password for connecting to the MySQL server. 22 | database_name (str): The name of the database to ensure existence. 23 | 24 | Returns: 25 | function: Decorated function. 26 | """ 27 | 28 | def decorator(func): 29 | @wraps(func) 30 | def wrapper(*args, **kwargs): 31 | try: 32 | connection = pymysql.connect( 33 | host=host, 34 | user=user, 35 | password=password, 36 | charset="utf8mb4", 37 | collation="utf8mb4_unicode_ci", 38 | ) 39 | with connection.cursor() as cursor: 40 | sql = "CREATE DATABASE IF NOT EXISTS " + database_name 41 | cursor.execute(sql) 42 | 43 | logger.debug( 44 | "Database %s created successfully (if it didn't exist)", 45 | database_name, 46 | ) 47 | 48 | except pymysql.MySQLError as error: 49 | logger.error("Failed to create database: %s", error) 50 | 51 | finally: 52 | connection.close() 53 | 54 | return func(*args, **kwargs) 55 | 56 | return wrapper 57 | 58 | return decorator 59 | 60 | 61 | def create_tables(models): 62 | """ 63 | Creates tables for the given models if they don't 64 | exist in their specified database. 65 | 66 | Args: 67 | models(list): A list of Peewee Model classes. 68 | """ 69 | if not models: 70 | logger.warning("No models provided for table creation.") 71 | return 72 | 73 | try: 74 | databases = {} 75 | for model in models: 76 | database = model._meta.database 77 | if database not in databases: 78 | databases[database] = [] 79 | databases[database].append(model) 80 | 81 | for database, db_models in databases.items(): 82 | with database.atomic(): 83 | existing_tables = set(database.get_tables()) 84 | tables_to_create = [ 85 | model 86 | for model in db_models 87 | if model._meta.table_name not in existing_tables 88 | ] 89 | 90 | if tables_to_create: 91 | database.create_tables(tables_to_create) 92 | logger.info( 93 | "Created tables: %s", 94 | [model._meta.table_name for model in tables_to_create], 95 | ) 96 | else: 97 | logger.debug("No new tables to create.") 98 | 99 | except DatabaseError as e: 100 | logger.error("An error occurred while creating tables: %s", e) 101 | 102 | 103 | def build_link_header(base_url, page, per_page, total_records): 104 | """Builds a Link header for pagination. 105 | 106 | Args: 107 | base_url (str): The base URL of the resource. 108 | page (int): The current page number. 109 | per_page (int): The number of records per page. 110 | total_records (int): The total number of records matching the query. 111 | 112 | Returns: 113 | str: A string representing the Link header with pagination links. 114 | """ 115 | last_page = max(1, (total_records - 1) // per_page + 1) 116 | url_components = urlparse(base_url) 117 | base_url = ( 118 | url_components.scheme + "://" + url_components.netloc + url_components.path 119 | ) 120 | links = [] 121 | 122 | if page > 1: 123 | links.append( 124 | f'<{urljoin(base_url, f"?page=1&per_page={per_page}")}>; rel="first"' 125 | ) 126 | links.append( 127 | f'<{urljoin(base_url, f"?page={page - 1}&per_page={per_page}")}>; rel="prev"' 128 | ) 129 | 130 | links.append( 131 | f'<{urljoin(base_url, f"?page={page}&per_page={per_page}")}>; rel="self"' 132 | ) 133 | 134 | if page < last_page: 135 | links.append( 136 | f'<{urljoin(base_url, f"?page={last_page}&per_page={per_page}")}>; rel="last"' 137 | ) 138 | links.append( 139 | f'<{urljoin(base_url, f"?page={page + 1}&per_page={per_page}")}>; rel="next"' 140 | ) 141 | 142 | return ", ".join(links) 143 | 144 | 145 | def get_configs(config_name, strict=False, default_value=None): 146 | """ 147 | Retrieves the value of a configuration from the environment variables. 148 | 149 | Args: 150 | config_name (str): The name of the configuration to retrieve. 151 | strict (bool): If True, raises an error if the configuration 152 | is not found. Default is False. 153 | default_value (str): The default value to return if the configuration 154 | is not found and strict is False. Default is None. 155 | 156 | Returns: 157 | str: The value of the configuration, or default_value if not found and s 158 | trict is False. 159 | 160 | Raises: 161 | KeyError: If the configuration is not found and strict is True. 162 | ValueError: If the configuration value is empty and strict is True. 163 | """ 164 | try: 165 | value = ( 166 | os.environ[config_name] 167 | if strict 168 | else os.environ.get(config_name) or default_value 169 | ) 170 | if strict and (value is None or value.strip() == ""): 171 | raise ValueError(f"Configuration '{config_name}' is missing or empty.") 172 | return value 173 | except KeyError as error: 174 | logger.error( 175 | "Configuration '%s' not found in environment variables: %s", 176 | config_name, 177 | error, 178 | ) 179 | raise 180 | except ValueError as error: 181 | logger.error("Configuration '%s' is empty: %s", config_name, error) 182 | raise 183 | -------------------------------------------------------------------------------- /supervisord.conf: -------------------------------------------------------------------------------- 1 | [supervisord] 2 | childlogdir=/var/log/supervisor/ 3 | nodaemon=true 4 | logfile=/dev/null 5 | logfile_maxbytes=0 6 | user=root 7 | 8 | [program:imap_listener] 9 | command=python3 -u -m src.imap_listener 10 | autostart=true 11 | autorestart=true 12 | stdout_logfile=/dev/fd/1 13 | stdout_logfile_maxbytes=0 14 | redirect_stderr=true 15 | 16 | [program:ftp_server] 17 | command=python3 -u -m src.ftp_server 18 | autostart=true 19 | autorestart=true 20 | stdout_logfile=/dev/fd/1 21 | stdout_logfile_maxbytes=0 22 | redirect_stderr=true 23 | 24 | [program:apache] 25 | command=/bin/sh -c "make setup" 26 | autostart=true 27 | autorestart=true 28 | stdout_logfile=/dev/fd/1 29 | stdout_logfile_maxbytes=0 30 | redirect_stderr=true 31 | 32 | -------------------------------------------------------------------------------- /test/.gitignore: -------------------------------------------------------------------------------- 1 | userid 2 | -------------------------------------------------------------------------------- /test/Handshake.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/bash 2 | 3 | # Use below command to extract public from chain file 4 | # openssl x509 -in /tmp/server_pubkey.pub -pubkey -noout 5 | # 6 | # Usage: ./handshake.sh 7 | 8 | server_public_key=$1 9 | echo "Server public key - $server_public_key" 10 | 11 | public_key_file="useless_public_key.pub" 12 | 13 | private_key_filepath="useless_private_key.key" 14 | 15 | public_key=$(cat $public_key_file) 16 | 17 | user_id="dead3662-5f78-11ed-b8e7-6d06c3aaf3c6" 18 | 19 | password="dummy_password" 20 | 21 | MSISDN="+237123456789" 22 | 23 | echo "Starting handshake..." 24 | # verification_url="http://127.0.0.1:5000/v2/sync/users/dead3662-5f78-11ed-b8e7-6d06c3aaf3c6/sessions/000/" 25 | verification_url="https://staging.smswithoutborders.com:15000/v2/sync/users/${user_id}/sessions/000/" 26 | messaging_url="https://staging.smswithoutborders.com:15000/sms/platform/gateway-server" 27 | 28 | #echo "public_key - $public_key" 29 | #echo "user_id - $user_id" 30 | #echo "password - $password" 31 | #echo "verification url - $verification_url" 32 | 33 | # request_body="{\"public_key\":\"$public_key\", \"password\":\"{}\", \"mgf1ParameterSpec\":\"sha256\", \"mgf1ParameterSpec_dec\":\"sha256\"}" 34 | 35 | email=$2 36 | subject="Afkanerd - SMSWithoutBorders state of things" 37 | body="Hi!\nThis is test on $( date ), is intended to see if SMSWithoutBorders can now publish!\n\nMany thanks, Afkanerd" 38 | email_content="g:${email}:::${subject}:${body}" 39 | 40 | tmp_email_content_file=/tmp/email_content.txt 41 | echo $email_content > $tmp_email_content_file 42 | 43 | iv=$((1234567890123456 + $RANDOM % 4)) 44 | 45 | echo "Email content:- $email_content" 46 | 47 | encrypted_password=$( echo "$password" | tr -d '\n' | \ 48 | openssl pkeyutl -encrypt -inkey $server_public_key -pubin \ 49 | -pkeyopt rsa_padding_mode:oaep \ 50 | -pkeyopt rsa_oaep_md:sha256 \ 51 | -pkeyopt rsa_mgf1_md:sha256 | \ 52 | base64 -w 0 ) 53 | echo "- Encrypted password: $encrypted_password" 54 | 55 | encrypted_shared_key=$( curl -s -X POST \ 56 | -H "Content-Type: application/json" \ 57 | -d "{\"public_key\":\"$public_key\", \"password\":\"$encrypted_password\", \"mgf1ParameterSpec\":\"sha256\"}" \ 58 | "$verification_url" | \ 59 | jq -cr '.shared_key' ) 60 | echo "- Encrypted shared key: $encrypted_shared_key" 61 | 62 | decrypted_shared_key=$( echo $encrypted_shared_key | \ 63 | base64 --decode | \ 64 | openssl pkeyutl -decrypt -inkey $private_key_filepath \ 65 | -pkeyopt rsa_padding_mode:oaep \ 66 | -pkeyopt rsa_oaep_md:sha256 \ 67 | -pkeyopt rsa_mgf1_md:sha1 ) 68 | echo "- Decrypted shared key: $decrypted_shared_key" 69 | echo "- Iv: $iv" 70 | 71 | shared_key_hex=$( echo $decrypted_shared_key | od -A n -t x1 | sed -z 's/[ \n]*//g' | sed -z 's/0a$//g' ) 72 | iv_hex=$( echo $iv | od -A n -t x1 | sed -z 's/[ \n]*//g' | sed -z 's/0a$//g' ) 73 | 74 | echo "- Shared key hex: $shared_key_hex" 75 | echo "- Iv hex: $iv_hex" 76 | 77 | 78 | encrypted_content=$( echo $email_content | \ 79 | openssl enc -aes-256-cbc -e -iv "$iv_hex" -K "$shared_key_hex" -in $tmp_email_content_file -a ) 80 | encrypted_content="${iv}${encrypted_content}" 81 | encrypted_content_b64=$( echo $encrypted_content | base64 -w 0 ) 82 | 83 | echo "- Encrypted content: $encrypted_content_b64" 84 | 85 | curl -X POST \ 86 | -H "Content-Type: application/json" \ 87 | -d "{\"text\":\"$encrypted_content_b64\", \"MSISDN\":\"$MSISDN\"}" \ 88 | "$messaging_url" 89 | -------------------------------------------------------------------------------- /test/useless_private_key.key: -------------------------------------------------------------------------------- 1 | -----BEGIN RSA PRIVATE KEY----- 2 | MIIEowIBAAKCAQEAxulsY0ACNWSivfctzFePohbp2Tsq2yvkMVgSb2qGRcYD1hTp 3 | apSwPXJIvCwBUcEKDpTwwAKsCXUVq2opWk8pd0U/4HXqnHstqe1Hga+oZKe4Q7xe 4 | jXTGrFKTTNRT90tub1RQD7egtJS0bmOvWJKzsBv1gCnoJiN9et5ZamzQo3DC28dx 5 | hU6e0Ocxd2Lv5eTQ2ZwOz9oX/hfrzvpzZARyLIib2QqpwCFJ2D+lTLVCzigCBAXp 6 | hyvtLUhookPFujZuMQ+lvJZDaUInp5MYAsW2auu0wjLpta0PIKJT7ZbJnseR4Npn 7 | 9I7fVYyct/sYNU50J29KtNUmkw0lDn+JTGO2rQIDAQABAoIBACWbpqGRTZmYwGhY 8 | XrFe8Mo9uNMYq76qqsdlln64TlDM4pbMd2FuYXAMtpHVZB53+BXPtUEogCx9la5G 9 | MTFg/D8PuccPpF8XrweM1FNByf3V9DHiYKIwLQwRxMXm80UzXtrvvv9pwZnC7+Zb 10 | NKDzg8PNVXp48eqcFTJw7ckAb+Ymq155YBcRIsVmUwN9eXY1hWhcCkmmjotMFh6y 11 | DK+t4NiCCA6MaCGOigi1G/VtaFo5R8QeimldF/9skTf4n64UxtTxgJg/gYpGdb4q 12 | R46On718KdGqNwkHNd6GUTfxKH0tusKuBATnfPVW2SVeFmI5t2VtPnZuk2UP3vrU 13 | ISyZMmECgYEA2cu7LX1OmdBXOZYkoCRRtEbwDUahktOklRCWtPQymMPDy16/lKPX 14 | 4RYAr9v6hl+duwyryaNXzXbcBTIz5Iw4j1vI5Fzs6REV6wjqw8/Itxs+m/k9mPHq 15 | o24W/4lVvrCM7Z46bAxBsLwsMM/JzJvbEb9TfaD4a0Pwd7sLNpwXMKUCgYEA6c2x 16 | WXJamNXQhdGn0u8cKImiWCZYMCxGuZYeliJqROAaunEm5HosXpbA2yZIiAEB8OiN 17 | k/s542ph4TnviHNMDN6mMbVGb7tx4b5UP3BoPPAv2UIw/PuT/9tdaE16koQnwm+w 18 | eh8580RIDZwOGHWFSK6L58b23s+HsiAXSk5nR2kCgYBhKW9ektle60AOc7nxK2K7 19 | zy3Gbm0kaICy84yYO5za4/LUJnfHYvSq+LrjuXphOgTO7o8wzEseYVk0hDn8SsXs 20 | xqCvp/kU3MdvbDH3mxnK/j+HK8DcG4h20mU2KNl00aR9WSptC2a+5qO8Puai/iDU 21 | IQupLwfEKrH7aBlPioEZ9QKBgQDPw6e78rCrbCW3JQ3blfwR0ezgJILtnArlVI+t 22 | nruMpRFbaL3E/wR6scCDfkuqACs3Z5CYVgGlI54DRTakrMCCF0Dcn1gOZiXXEwlO 23 | rpcSv+XlTLq3tOBZ1xf0XhV0KdgjIplSjHk0whK9dVXZSmW8ps9QkjBC42yUT4wi 24 | zfVdMQKBgAWyHAzoC1g5AxWaJQljiq6/xUXlyWAgvWiU3V7JpGNlcEJhSPChDWEQ 25 | 8X5zIi4vBR0cD2YOyEirgYNYOzoKA96HTa9r0VzSEYIgZH03edQ6vH1W3F8Inyy8 26 | CJHhPa5D0Cd6wsPFhM+deBrNP9hJeU4ocWv8unD7wyVXIxMBSXhc 27 | -----END RSA PRIVATE KEY----- 28 | -------------------------------------------------------------------------------- /test/useless_public_key.pub: -------------------------------------------------------------------------------- 1 | -----BEGIN PUBLIC KEY----- 2 | MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAxulsY0ACNWSivfctzFeP 3 | ohbp2Tsq2yvkMVgSb2qGRcYD1hTpapSwPXJIvCwBUcEKDpTwwAKsCXUVq2opWk8p 4 | d0U/4HXqnHstqe1Hga+oZKe4Q7xejXTGrFKTTNRT90tub1RQD7egtJS0bmOvWJKz 5 | sBv1gCnoJiN9et5ZamzQo3DC28dxhU6e0Ocxd2Lv5eTQ2ZwOz9oX/hfrzvpzZARy 6 | LIib2QqpwCFJ2D+lTLVCzigCBAXphyvtLUhookPFujZuMQ+lvJZDaUInp5MYAsW2 7 | auu0wjLpta0PIKJT7ZbJnseR4Npn9I7fVYyct/sYNU50J29KtNUmkw0lDn+JTGO2 8 | rQIDAQAB 9 | -----END PUBLIC KEY----- 10 | --------------------------------------------------------------------------------