├── .dockerignore ├── .github └── workflows │ ├── docker-hub-publish.yml │ └── python-publish.yml ├── .gitignore ├── DOWNLOAD_EXAMPLES.md ├── Dockerfile ├── Jenkinsfile ├── README.md ├── __init__.py ├── apkeep_linux ├── apkeep_macos ├── mdast_cli ├── __init__.py ├── cr_report_generator.py ├── distribution_systems │ ├── __init__.py │ ├── appgallery.py │ ├── appstore.py │ ├── appstore_client │ │ ├── __init__.py │ │ ├── schemas │ │ │ ├── __init__.py │ │ │ ├── store_authenticate_req.py │ │ │ ├── store_authenticate_resp.py │ │ │ ├── store_buyproduct_req.py │ │ │ ├── store_buyproduct_resp.py │ │ │ ├── store_download_req.py │ │ │ └── store_download_resp.py │ │ └── store.py │ ├── base.py │ ├── firebase.py │ ├── google_play.py │ ├── google_play_apkeep.py │ ├── nexus.py │ ├── nexus2.py │ ├── rumarket.py │ └── rustore.py ├── helpers │ ├── __init__.py │ ├── const.py │ ├── exit_codes.py │ ├── file_utils.py │ ├── helpers.py │ ├── logging_utils.py │ └── platform_utils.py └── mdast_scan.py ├── mdast_cli_core ├── __init__.py ├── api.py ├── base.py └── token.py ├── requirements.txt ├── setup.cfg └── setup.py /.dockerignore: -------------------------------------------------------------------------------- 1 | ### Example user template template 2 | ### Example user template 3 | 4 | 5 | # IntelliJ project files 6 | .idea 7 | *.iml 8 | out 9 | gen 10 | ### Python template 11 | # Byte-compiled / optimized / DLL files 12 | __pycache__/ 13 | *.py[cod] 14 | *$py.class 15 | 16 | # C extensions 17 | *.so 18 | 19 | # Distribution / packaging 20 | .Python 21 | build/ 22 | develop-eggs/ 23 | dist/ 24 | downloads/ 25 | eggs/ 26 | .eggs/ 27 | lib/ 28 | lib64/ 29 | parts/ 30 | sdist/ 31 | var/ 32 | wheels/ 33 | share/python-wheels/ 34 | *.egg-info/ 35 | .installed.cfg 36 | *.egg 37 | MANIFEST 38 | 39 | # PyInstaller 40 | # Usually these files are written by a python script from a template 41 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 42 | *.manifest 43 | *.spec 44 | 45 | # Installer logs 46 | pip-log.txt 47 | pip-delete-this-directory.txt 48 | 49 | # Unit test / coverage reports 50 | htmlcov/ 51 | .tox/ 52 | .nox/ 53 | .coverage 54 | .coverage.* 55 | .cache 56 | nosetests.xml 57 | coverage.xml 58 | *.cover 59 | *.py,cover 60 | .hypothesis/ 61 | .pytest_cache/ 62 | cover/ 63 | 64 | # Translations 65 | *.mo 66 | *.pot 67 | 68 | # Django stuff: 69 | *.log 70 | local_settings.py 71 | db.sqlite3 72 | db.sqlite3-journal 73 | 74 | # Flask stuff: 75 | instance/ 76 | .webassets-cache 77 | 78 | # Scrapy stuff: 79 | .scrapy 80 | 81 | # Sphinx documentation 82 | docs/_build/ 83 | 84 | # PyBuilder 85 | .pybuilder/ 86 | target/ 87 | 88 | # Jupyter Notebook 89 | .ipynb_checkpoints 90 | 91 | # IPython 92 | profile_default/ 93 | ipython_config.py 94 | 95 | # pyenv 96 | # For a library or package, you might want to ignore these files since the code is 97 | # intended to run in multiple environments; otherwise, check them in: 98 | # .python-version 99 | 100 | # pipenv 101 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 102 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 103 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 104 | # install all needed dependencies. 105 | #Pipfile.lock 106 | 107 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 108 | __pypackages__/ 109 | 110 | # Celery stuff 111 | celerybeat-schedule 112 | celerybeat.pid 113 | 114 | # SageMath parsed files 115 | *.sage.py 116 | 117 | # Environments 118 | .env 119 | .venv 120 | env/ 121 | venv/ 122 | ENV/ 123 | env.bak/ 124 | venv.bak/ 125 | 126 | # Spyder project settings 127 | .spyderproject 128 | .spyproject 129 | 130 | # Rope project settings 131 | .ropeproject 132 | 133 | # mkdocs documentation 134 | /site 135 | 136 | # mypy 137 | .mypy_cache/ 138 | .dmypy.json 139 | dmypy.json 140 | 141 | # Pyre type checker 142 | .pyre/ 143 | 144 | # pytype static type analyzer 145 | .pytype/ 146 | 147 | # Cython debug symbols 148 | cython_debug/ 149 | 150 | /.github/ 151 | /build/ 152 | /downloaded_apps/ 153 | /mdast_cli/downloaded_apps/ 154 | /README_DOCKER.md 155 | /mdast_cli.egg-info/ 156 | -------------------------------------------------------------------------------- /.github/workflows/docker-hub-publish.yml: -------------------------------------------------------------------------------- 1 | name: Publish mdast_cli image to Docker Hub 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | 8 | jobs: 9 | build-n-publish-docker: 10 | name: Docker Hub - Publish mdast_cli 11 | runs-on: ubuntu-22.04 12 | 13 | steps: 14 | - uses: actions/checkout@v3 15 | 16 | - name: docker login 17 | env: 18 | DOCKER_USER: ${{secrets.DOCKER_USER}} 19 | DOCKER_PASSWORD: ${{secrets.DOCKER_PASSWORD}} 20 | run: 21 | docker login -u $DOCKER_USER -p $DOCKER_PASSWORD 22 | 23 | - name: Build the Docker image 24 | 25 | run: docker build . --file Dockerfile -t mobilesecurity/mdast_cli:2025.9.0 -t mobilesecurity/mdast_cli:latest 26 | 27 | 28 | - name: Docker Hub push latest image 29 | run: docker push mobilesecurity/mdast_cli:latest 30 | 31 | - name: Docker Hub push tagged image 32 | 33 | run: docker push mobilesecurity/mdast_cli:2025.9.0 34 | 35 | 36 | 37 | -------------------------------------------------------------------------------- /.github/workflows/python-publish.yml: -------------------------------------------------------------------------------- 1 | name: Publish mdast_cli 🐍 distributions to PyPI 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | 8 | jobs: 9 | build-n-publish: 10 | name: Build and publish mdast_cli 🐍 distributions to PyPI 11 | runs-on: ubuntu-22.04 12 | steps: 13 | - uses: actions/checkout@master 14 | 15 | - name: Set up Python 3.9 16 | uses: actions/setup-python@v1 17 | with: 18 | python-version: 3.9 19 | 20 | # - name: Linter flake8 21 | # uses: py-actions/flake8@v2.2.1 22 | # with: 23 | # exclude: "build,dist,check_mdast_cli,googleplay_pb2.py" 24 | # max-line-length: "120" 25 | # 26 | # - name: Linter isort 27 | # uses: isort/isort-action@v1.1.0 28 | # with: 29 | # configuration: "--skip ./docker --skip ./venv --skip ./check_mdast_cli --line-length 120 --check-only" 30 | # requirements-files: "requirements.txt" 31 | 32 | - name: Install pypa/build 33 | run: >- 34 | python -m 35 | pip install 36 | build 37 | --user 38 | 39 | - name: Build a binary wheel and a source tarball 40 | run: >- 41 | python -m 42 | build 43 | --sdist 44 | --wheel 45 | --outdir dist/ 46 | . 47 | 48 | - name: Publish distribution 📦 to Test PyPI 49 | uses: pypa/gh-action-pypi-publish@master 50 | with: 51 | user: __token__ 52 | password: ${{ secrets.TEST_PYPI_API_CLI_TOKEN }} 53 | repository_url: https://test.pypi.org/legacy/ 54 | 55 | - name: Publish distribution 📦 to PyPI 56 | if: github.event_name == 'push' 57 | uses: pypa/gh-action-pypi-publish@master 58 | with: 59 | user: __token__ 60 | password: ${{ secrets.PYPI_API_CLI_TOKEN }} -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # These are some examples of commonly ignored file patterns. 2 | # You should customize this list as applicable to your project. 3 | # Learn more about .gitignore: 4 | # https://www.atlassian.com/git/tutorials/saving-changes/gitignore 5 | 6 | # Node artifact files 7 | node_modules/ 8 | dist/ 9 | 10 | # venv 11 | venv/ 12 | 13 | # Compiled Java class files 14 | *.class 15 | 16 | # Compiled Python bytecode 17 | *.py[cod] 18 | __pycache__/ 19 | *.pyc 20 | 21 | # Log files 22 | *.log 23 | 24 | # Package files 25 | *.jar 26 | 27 | # Maven 28 | target/ 29 | dist/ 30 | 31 | build/ 32 | 33 | # JetBrains IDE 34 | .idea/ 35 | 36 | # Unit test reports 37 | TEST*.xml 38 | 39 | # Generated by MacOS 40 | .DS_Store 41 | 42 | # Generated by Windows 43 | Thumbs.db 44 | 45 | # Applications 46 | *.app 47 | *.exe 48 | *.war 49 | *.apk 50 | 51 | 52 | # Large media files 53 | *.mp4 54 | *.tiff 55 | *.avi 56 | *.flv 57 | *.mov 58 | *.wmv 59 | 60 | downloaded_apps/ 61 | 62 | venv!/mdast_cli.egg-info/ 63 | 64 | config 65 | !/README_DOCKER.md 66 | README_DOCKER.md 67 | mdast_cli.egg-info/ 68 | 69 | *.pkl 70 | 71 | # Python package metadata 72 | *.egg-info/ 73 | .pytest_cache/ 74 | .mypy_cache/ 75 | .ruff_cache/ 76 | 77 | # IDE 78 | .vscode/ 79 | *.swp 80 | *.swo 81 | *~ 82 | 83 | # OS 84 | .DS_Store 85 | Thumbs.db 86 | 87 | # Application sessions (may contain sensitive data) 88 | appstore_sessions/ -------------------------------------------------------------------------------- /DOWNLOAD_EXAMPLES.md: -------------------------------------------------------------------------------- 1 | # Примеры команд для скачивания приложений 2 | 3 | Этот файл содержит примеры команд для тестирования скачивания приложений из всех поддерживаемых distribution systems. 4 | 5 | ## Общий формат 6 | 7 | Все примеры используют флаг `--download_only` (или `-d`) для скачивания без запуска сканирования. 8 | 9 | --- 10 | 11 | ## 1. Local File (Локальный файл) 12 | 13 | Скачивание не требуется - используется существующий файл. 14 | 15 | ```bash 16 | # Просто проверка существования файла 17 | mdast_cli -d \ 18 | --distribution_system file \ 19 | --file_path /path/to/your/app.apk 20 | ``` 21 | 22 | **Пример с реальным файлом:** 23 | ```bash 24 | mdast_cli -d \ 25 | --distribution_system file \ 26 | --file_path ./my_app.apk 27 | ``` 28 | 29 | --- 30 | 31 | ## 2. Google Play 32 | 33 | ### С использованием OAuth2 токена (первый запуск) 34 | 35 | ```bash 36 | mdast_cli -d \ 37 | --distribution_system google_play \ 38 | --google_play_package_name com.instagram.android \ 39 | --google_play_email your.email@gmail.com \ 40 | --google_play_oauth2_token "YOUR_OAUTH2_TOKEN" \ 41 | --google_play_file_name instagram_latest 42 | ``` 43 | 44 | ### С использованием AAS токена (последующие запуски) 45 | 46 | ```bash 47 | mdast_cli -d \ 48 | --distribution_system google_play \ 49 | --google_play_package_name com.instagram.android \ 50 | --google_play_email your.email@gmail.com \ 51 | --google_play_aas_token "YOUR_AAS_TOKEN" \ 52 | --google_play_file_name instagram_latest 53 | ``` 54 | 55 | ### С прокси 56 | 57 | ```bash 58 | mdast_cli -d \ 59 | --distribution_system google_play \ 60 | --google_play_package_name com.whatsapp \ 61 | --google_play_email your.email@gmail.com \ 62 | --google_play_aas_token "YOUR_AAS_TOKEN" \ 63 | --google_play_proxy "socks5://username:password@proxy.example.com:1080" 64 | ``` 65 | 66 | ### Популярные пакеты для тестирования 67 | 68 | ```bash 69 | # Instagram 70 | mdast_cli -d \ 71 | --distribution_system google_play \ 72 | --google_play_package_name com.instagram.android \ 73 | --google_play_email your.email@gmail.com \ 74 | --google_play_aas_token "YOUR_AAS_TOKEN" 75 | 76 | # WhatsApp 77 | mdast_cli -d \ 78 | --distribution_system google_play \ 79 | --google_play_package_name com.whatsapp \ 80 | --google_play_email your.email@gmail.com \ 81 | --google_play_aas_token "YOUR_AAS_TOKEN" 82 | 83 | # Telegram 84 | mdast_cli -d \ 85 | --distribution_system google_play \ 86 | --google_play_package_name org.telegram.messenger \ 87 | --google_play_email your.email@gmail.com \ 88 | --google_play_aas_token "YOUR_AAS_TOKEN" 89 | 90 | # VK 91 | mdast_cli -d \ 92 | --distribution_system google_play \ 93 | --google_play_package_name com.vkontakte.android \ 94 | --google_play_email your.email@gmail.com \ 95 | --google_play_aas_token "YOUR_AAS_TOKEN" 96 | ``` 97 | 98 | --- 99 | 100 | ## 3. AppStore 101 | 102 | ### С использованием App ID 103 | 104 | ```bash 105 | mdast_cli -d \ 106 | --distribution_system appstore \ 107 | --appstore_app_id YOUR_APP_ID \ 108 | --appstore_apple_id your.email@icloud.com \ 109 | --appstore_password "YOUR_PASSWORD" \ 110 | --appstore_2FA YOUR_2FA_CODE \ 111 | --appstore_file_name my_app 112 | ``` 113 | 114 | ### С использованием Bundle ID 115 | 116 | ```bash 117 | mdast_cli -d \ 118 | --distribution_system appstore \ 119 | --appstore_bundle_id com.example.myapp \ 120 | --appstore_apple_id your.email@icloud.com \ 121 | --appstore_password "YOUR_PASSWORD" \ 122 | --appstore_2FA YOUR_2FA_CODE \ 123 | --appstore_file_name my_app 124 | ``` 125 | 126 | ### Примеры популярных приложений 127 | 128 | ```bash 129 | # Instagram (App ID: 389801252) 130 | mdast_cli -d \ 131 | --distribution_system appstore \ 132 | --appstore_app_id 389801252 \ 133 | --appstore_apple_id your.email@icloud.com \ 134 | --appstore_password "YOUR_PASSWORD" \ 135 | --appstore_2FA YOUR_2FA_CODE 136 | 137 | # WhatsApp (App ID: 310633997) 138 | mdast_cli -d \ 139 | --distribution_system appstore \ 140 | --appstore_app_id 310633997 \ 141 | --appstore_apple_id your.email@icloud.com \ 142 | --appstore_password "YOUR_PASSWORD" \ 143 | --appstore_2FA YOUR_2FA_CODE 144 | ``` 145 | 146 | --- 147 | 148 | ## 4. Firebase 149 | 150 | ### Android приложение (APK) 151 | 152 | ```bash 153 | mdast_cli -d \ 154 | --distribution_system firebase \ 155 | --firebase_project_number YOUR_PROJECT_NUMBER \ 156 | --firebase_app_id "YOUR_FIREBASE_APP_ID" \ 157 | --firebase_account_json_path /path/to/service_account.json \ 158 | --firebase_file_extension apk \ 159 | --firebase_file_name my_android_app 160 | ``` 161 | 162 | ### iOS приложение (IPA) 163 | 164 | ```bash 165 | mdast_cli -d \ 166 | --distribution_system firebase \ 167 | --firebase_project_number YOUR_PROJECT_NUMBER \ 168 | --firebase_app_id "YOUR_FIREBASE_APP_ID" \ 169 | --firebase_account_json_path /path/to/service_account.json \ 170 | --firebase_file_extension ipa \ 171 | --firebase_file_name my_ios_app 172 | ``` 173 | 174 | ### Без указания имени файла (используется версия из Firebase) 175 | 176 | ```bash 177 | mdast_cli -d \ 178 | --distribution_system firebase \ 179 | --firebase_project_number YOUR_PROJECT_NUMBER \ 180 | --firebase_app_id "YOUR_FIREBASE_APP_ID" \ 181 | --firebase_account_json_path ./service_account.json \ 182 | --firebase_file_extension apk 183 | ``` 184 | 185 | --- 186 | 187 | ## 5. Nexus Repository (Nexus 3.x) 188 | 189 | ```bash 190 | mdast_cli -d \ 191 | --distribution_system nexus \ 192 | --nexus_url https://nexus.example.com \ 193 | --nexus_login YOUR_NEXUS_USERNAME \ 194 | --nexus_password YOUR_NEXUS_PASSWORD \ 195 | --nexus_repo_name releases \ 196 | --nexus_group_id com.example \ 197 | --nexus_artifact_id myapp \ 198 | --nexus_version 1.0.0 199 | ``` 200 | 201 | ### Пример с локальным Nexus 202 | 203 | ```bash 204 | mdast_cli -d \ 205 | --distribution_system nexus \ 206 | --nexus_url http://localhost:8081 \ 207 | --nexus_login YOUR_NEXUS_USERNAME \ 208 | --nexus_password YOUR_NEXUS_PASSWORD \ 209 | --nexus_repo_name maven-releases \ 210 | --nexus_group_id com.mycompany \ 211 | --nexus_artifact_id mobile-app \ 212 | --nexus_version 2.5.1 213 | ``` 214 | 215 | --- 216 | 217 | ## 6. Nexus2 Repository 218 | 219 | ```bash 220 | mdast_cli -d \ 221 | --distribution_system nexus2 \ 222 | --nexus2_url http://nexus:8081/nexus/ \ 223 | --nexus2_login YOUR_NEXUS_USERNAME \ 224 | --nexus2_password YOUR_NEXUS_PASSWORD \ 225 | --nexus2_repo_name releases \ 226 | --nexus2_group_id com.example \ 227 | --nexus2_artifact_id myapp \ 228 | --nexus2_version 1.337 \ 229 | --nexus2_extension apk \ 230 | --nexus2_file_name my_app_from_nexus2 231 | ``` 232 | 233 | ### Пример с кастомным именем файла 234 | 235 | ```bash 236 | mdast_cli -d \ 237 | --distribution_system nexus2 \ 238 | --nexus2_url http://nexus.example.com:8081/nexus/ \ 239 | --nexus2_login YOUR_NEXUS_USERNAME \ 240 | --nexus2_password YOUR_NEXUS_PASSWORD \ 241 | --nexus2_repo_name snapshots \ 242 | --nexus2_group_id org.myproject \ 243 | --nexus2_artifact_id android-app \ 244 | --nexus2_version 1.0.0-SNAPSHOT \ 245 | --nexus2_extension apk \ 246 | --nexus2_file_name production_build 247 | ``` 248 | 249 | --- 250 | 251 | ## 7. RuStore 252 | 253 | ```bash 254 | mdast_cli -d \ 255 | --distribution_system rustore \ 256 | --rustore_package_name ru.example.app 257 | ``` 258 | 259 | ### Примеры популярных приложений из RuStore 260 | 261 | ```bash 262 | # VK 263 | mdast_cli -d \ 264 | --distribution_system rustore \ 265 | --rustore_package_name com.vkontakte.android 266 | 267 | # Яндекс.Браузер 268 | mdast_cli -d \ 269 | --distribution_system rustore \ 270 | --rustore_package_name com.yandex.browser 271 | 272 | # Сбербанк Онлайн 273 | mdast_cli -d \ 274 | --distribution_system rustore \ 275 | --rustore_package_name ru.sberbank.sberbankid 276 | ``` 277 | 278 | --- 279 | 280 | ## 8. AppGallery (Huawei) 281 | 282 | ```bash 283 | mdast_cli -d \ 284 | --distribution_system appgallery \ 285 | --appgallery_app_id C101184875 \ 286 | --appgallery_file_name huawei_app 287 | ``` 288 | 289 | ### Примеры популярных приложений 290 | 291 | ```bash 292 | # Instagram в AppGallery 293 | mdast_cli -d \ 294 | --distribution_system appgallery \ 295 | --appgallery_app_id C101184875 \ 296 | --appgallery_file_name instagram_huawei 297 | 298 | # WhatsApp в AppGallery 299 | mdast_cli -d \ 300 | --distribution_system appgallery \ 301 | --appgallery_app_id C100000001 \ 302 | --appgallery_file_name whatsapp_huawei 303 | ``` 304 | 305 | **Примечание:** App ID можно найти на странице приложения в AppGallery в URL. 306 | 307 | --- 308 | 309 | ## 9. RuMarket 310 | 311 | ```bash 312 | mdast_cli -d \ 313 | --distribution_system rumarket \ 314 | --rumarket_package_name com.example.app 315 | ``` 316 | 317 | ### Примеры 318 | 319 | ```bash 320 | # Любое приложение по package name 321 | mdast_cli -d \ 322 | --distribution_system rumarket \ 323 | --rumarket_package_name ru.play.market.app 324 | ``` 325 | 326 | --- 327 | 328 | ## Полезные опции 329 | 330 | ### Указание кастомной директории для скачивания 331 | 332 | ```bash 333 | mdast_cli -d \ 334 | --distribution_system google_play \ 335 | --google_play_package_name com.example.app \ 336 | --google_play_email your.email@gmail.com \ 337 | --google_play_aas_token "YOUR_TOKEN" \ 338 | --download_path /custom/path/to/downloads 339 | ``` 340 | 341 | ### Проверка вывода (для CI/CD) 342 | 343 | ```bash 344 | # Команда выведет DOWNLOAD_PATH=/path/to/app.apk 345 | mdast_cli -d \ 346 | --distribution_system google_play \ 347 | --google_play_package_name com.example.app \ 348 | --google_play_email your.email@gmail.com \ 349 | --google_play_aas_token "YOUR_TOKEN" \ 350 | | grep DOWNLOAD_PATH 351 | ``` 352 | 353 | ### Использование переменных окружения 354 | 355 | ```bash 356 | # Установите переменные 357 | export GOOGLE_PLAY_EMAIL="your.email@gmail.com" 358 | export GOOGLE_PLAY_AAS_TOKEN="your_token_here" 359 | 360 | # Используйте в команде 361 | mdast_cli -d \ 362 | --distribution_system google_play \ 363 | --google_play_package_name com.example.app \ 364 | --google_play_email "$GOOGLE_PLAY_EMAIL" \ 365 | --google_play_aas_token "$GOOGLE_PLAY_AAS_TOKEN" 366 | ``` 367 | 368 | --- 369 | 370 | ## Тестирование всех систем 371 | 372 | ### Скрипт для тестирования всех distribution systems 373 | 374 | ```bash 375 | #!/bin/bash 376 | set -e 377 | 378 | DOWNLOAD_DIR="./test_downloads" 379 | mkdir -p "$DOWNLOAD_DIR" 380 | 381 | echo "Testing all distribution systems..." 382 | 383 | # 1. Local File 384 | echo "1. Testing Local File..." 385 | mdast_cli -d \ 386 | --distribution_system file \ 387 | --file_path ./test.apk \ 388 | --download_path "$DOWNLOAD_DIR" || echo " ⚠️ Local file test skipped (file not found)" 389 | 390 | # 2. Google Play 391 | echo "2. Testing Google Play..." 392 | if [ -n "$GOOGLE_PLAY_AAS_TOKEN" ]; then 393 | mdast_cli -d \ 394 | --distribution_system google_play \ 395 | --google_play_package_name com.instagram.android \ 396 | --google_play_email "$GOOGLE_PLAY_EMAIL" \ 397 | --google_play_aas_token "$GOOGLE_PLAY_AAS_TOKEN" \ 398 | --download_path "$DOWNLOAD_DIR" && echo " ✅ Google Play OK" 399 | else 400 | echo " ⚠️ Google Play skipped (no token)" 401 | fi 402 | 403 | # 3. AppStore 404 | echo "3. Testing AppStore..." 405 | if [ -n "$APPSTORE_PASSWORD" ]; then 406 | mdast_cli -d \ 407 | --distribution_system appstore \ 408 | --appstore_app_id YOUR_APP_ID \ 409 | --appstore_apple_id "$APPSTORE_APPLE_ID" \ 410 | --appstore_password "$APPSTORE_PASSWORD" \ 411 | --appstore_2FA "$APPSTORE_2FA" \ 412 | --download_path "$DOWNLOAD_DIR" && echo " ✅ AppStore OK" 413 | else 414 | echo " ⚠️ AppStore skipped (no credentials)" 415 | fi 416 | 417 | # 4. Firebase 418 | echo "4. Testing Firebase..." 419 | if [ -f "$FIREBASE_SERVICE_ACCOUNT" ]; then 420 | mdast_cli -d \ 421 | --distribution_system firebase \ 422 | --firebase_project_number "$FIREBASE_PROJECT_NUMBER" \ 423 | --firebase_app_id "$FIREBASE_APP_ID" \ 424 | --firebase_account_json_path "$FIREBASE_SERVICE_ACCOUNT" \ 425 | --firebase_file_extension apk \ 426 | --download_path "$DOWNLOAD_DIR" && echo " ✅ Firebase OK" 427 | else 428 | echo " ⚠️ Firebase skipped (no service account)" 429 | fi 430 | 431 | # 5. Nexus 432 | echo "5. Testing Nexus..." 433 | if [ -n "$NEXUS_URL" ]; then 434 | mdast_cli -d \ 435 | --distribution_system nexus \ 436 | --nexus_url "$NEXUS_URL" \ 437 | --nexus_login "$NEXUS_LOGIN" \ 438 | --nexus_password "$NEXUS_PASSWORD" \ 439 | --nexus_repo_name "$NEXUS_REPO_NAME" \ 440 | --nexus_group_id "$NEXUS_GROUP_ID" \ 441 | --nexus_artifact_id "$NEXUS_ARTIFACT_ID" \ 442 | --nexus_version "$NEXUS_VERSION" \ 443 | --download_path "$DOWNLOAD_DIR" && echo " ✅ Nexus OK" 444 | else 445 | echo " ⚠️ Nexus skipped (no configuration)" 446 | fi 447 | 448 | # 6. Nexus2 449 | echo "6. Testing Nexus2..." 450 | if [ -n "$NEXUS2_URL" ]; then 451 | mdast_cli -d \ 452 | --distribution_system nexus2 \ 453 | --nexus2_url "$NEXUS2_URL" \ 454 | --nexus2_login "$NEXUS2_LOGIN" \ 455 | --nexus2_password "$NEXUS2_PASSWORD" \ 456 | --nexus2_repo_name "$NEXUS2_REPO_NAME" \ 457 | --nexus2_group_id "$NEXUS2_GROUP_ID" \ 458 | --nexus2_artifact_id "$NEXUS2_ARTIFACT_ID" \ 459 | --nexus2_version "$NEXUS2_VERSION" \ 460 | --nexus2_extension apk \ 461 | --download_path "$DOWNLOAD_DIR" && echo " ✅ Nexus2 OK" 462 | else 463 | echo " ⚠️ Nexus2 skipped (no configuration)" 464 | fi 465 | 466 | # 7. RuStore 467 | echo "7. Testing RuStore..." 468 | mdast_cli -d \ 469 | --distribution_system rustore \ 470 | --rustore_package_name com.vkontakte.android \ 471 | --download_path "$DOWNLOAD_DIR" && echo " ✅ RuStore OK" 472 | 473 | # 8. AppGallery 474 | echo "8. Testing AppGallery..." 475 | mdast_cli -d \ 476 | --distribution_system appgallery \ 477 | --appgallery_app_id C101184875 \ 478 | --download_path "$DOWNLOAD_DIR" && echo " ✅ AppGallery OK" 479 | 480 | # 9. RuMarket 481 | echo "9. Testing RuMarket..." 482 | mdast_cli -d \ 483 | --distribution_system rumarket \ 484 | --rumarket_package_name com.example.app \ 485 | --download_path "$DOWNLOAD_DIR" && echo " ✅ RuMarket OK" 486 | 487 | echo "" 488 | echo "✅ Testing completed! Check $DOWNLOAD_DIR for downloaded files" 489 | ``` 490 | 491 | --- 492 | 493 | ## Docker примеры 494 | 495 | ### Google Play через Docker 496 | 497 | ```bash 498 | docker run -it \ 499 | -v $(pwd)/downloads:/mdast/downloads \ 500 | mobilesecurity/mdast_cli:latest \ 501 | -d \ 502 | --distribution_system google_play \ 503 | --google_play_package_name com.instagram.android \ 504 | --google_play_email your.email@gmail.com \ 505 | --google_play_aas_token "YOUR_TOKEN" \ 506 | --download_path /mdast/downloads 507 | ``` 508 | 509 | ### Firebase через Docker 510 | 511 | ```bash 512 | docker run -it \ 513 | -v $(pwd)/service_account.json:/mdast/service_account.json \ 514 | -v $(pwd)/downloads:/mdast/downloads \ 515 | mobilesecurity/mdast_cli:latest \ 516 | -d \ 517 | --distribution_system firebase \ 518 | --firebase_project_number YOUR_PROJECT_NUMBER \ 519 | --firebase_app_id "YOUR_FIREBASE_APP_ID" \ 520 | --firebase_account_json_path /mdast/service_account.json \ 521 | --firebase_file_extension apk \ 522 | --download_path /mdast/downloads 523 | ``` 524 | 525 | --- 526 | 527 | ## Отладка 528 | 529 | ### Включение подробных логов 530 | 531 | ```bash 532 | # Python logging level 533 | export PYTHONUNBUFFERED=1 534 | 535 | mdast_cli -d \ 536 | --distribution_system google_play \ 537 | --google_play_package_name com.example.app \ 538 | --google_play_email your.email@gmail.com \ 539 | --google_play_aas_token "YOUR_TOKEN" \ 540 | 2>&1 | tee download.log 541 | ``` 542 | 543 | ### Проверка только синтаксиса команды 544 | 545 | ```bash 546 | # Проверка без выполнения (если поддерживается) 547 | mdast_cli --help | grep -A 5 "google_play" 548 | ``` 549 | 550 | --- 551 | 552 | ## Примечания 553 | 554 | 1. **Безопасность**: Никогда не коммитьте токены, пароли или ключи в репозиторий 555 | 2. **Переменные окружения**: Используйте переменные окружения для чувствительных данных 556 | 3. **Таймауты**: Большие файлы могут требовать больше времени для скачивания 557 | 4. **Права доступа**: Убедитесь, что у вас есть права на запись в `--download_path` 558 | 5. **Сеть**: Некоторые distribution systems могут требовать стабильного интернет-соединения 559 | 560 | --- 561 | 562 | ## Быстрая справка 563 | 564 | ```bash 565 | # Показать все доступные опции 566 | mdast_cli --help 567 | 568 | # Показать help для конкретной distribution system 569 | mdast_cli --distribution_system google_play --help 2>&1 | head -20 570 | ``` 571 | 572 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.9-slim 2 | 3 | WORKDIR /mdast_cli 4 | 5 | COPY ./ /mdast_cli 6 | 7 | # Make apkeep_linux executable (if it exists) 8 | RUN if [ -f /mdast_cli/apkeep_linux ]; then chmod +x /mdast_cli/apkeep_linux; fi 9 | 10 | RUN pip install -r requirements.txt 11 | 12 | ENV PYTHONPATH "${PYTHONPATH}:/mdast_cli" 13 | 14 | ENTRYPOINT ["python3", "mdast_cli/mdast_scan.py"] -------------------------------------------------------------------------------- /Jenkinsfile: -------------------------------------------------------------------------------- 1 | pipeline { 2 | environment { 3 | registry = "mobilesecurity/mdast_cli" 4 | registryCredential = 'dockerhub_mobilesecurity' 5 | dockerImage = '' 6 | } 7 | agent { label 'master' } 8 | stages { 9 | stage('Cloning Git') { 10 | steps { 11 | git branch: 'main', url: 'https://github.com/Dynamic-Mobile-Security/mdast-cli.git' 12 | } 13 | } 14 | stage('Building image') { 15 | steps { 16 | script { 17 | dockerImage = docker.build registry + ":latest" 18 | } 19 | } 20 | } 21 | stage('Deploy image') { 22 | steps { 23 | script { 24 | docker.withRegistry( '', registryCredential ) { 25 | dockerImage.push() 26 | } 27 | } 28 | } 29 | } 30 | stage('Cleaning up') { 31 | steps { 32 | sh "docker rmi $registry:latest" 33 | } 34 | } 35 | } 36 | } -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Mobile DAST CI/CD 2 | 3 | **Python script for automating security analysis of mobile applications.** 4 | 5 | [![Docker Hub](https://img.shields.io/docker/v/mobilesecurity/mdast_cli?label=docker%20hub)](https://hub.docker.com/repository/docker/mobilesecurity/mdast_cli) 6 | [![PyPi](https://img.shields.io/pypi/v/mdast_cli?color=3)](https://pypi.org/project/mdast-cli/) 7 | ![GitHub issues](https://img.shields.io/github/issues-raw/Dynamic-Mobile-Security/mdast-cli) 8 | ![GitHub pull requests](https://img.shields.io/github/issues-pr-raw/Dynamic-Mobile-Security/mdast-cli) 9 | 10 | ## Overview 11 | 12 | This script is designed to integrate mobile applications' security analysis into the continuous development process (CI/CD). It supports downloading applications from various distribution systems and performing dynamic security analysis (DAST). 13 | 14 | ### Key Features 15 | 16 | - **Multiple Distribution Systems**: Support for 9+ distribution platforms (Google Play, AppStore, Firebase, Nexus, etc.) 17 | - **Download Only Mode**: Download applications without scanning for testing or manual analysis 18 | - **Automated Scanning**: Full integration with DAST scanning platform 19 | - **Multiple Report Formats**: Generate JSON and PDF reports 20 | - **CI/CD Ready**: Designed for seamless integration into CI/CD pipelines 21 | - **Docker Support**: Pre-built Docker images for easy deployment 22 | 23 | ### What It Does 24 | 25 | 1. **Downloads** mobile applications (APK/IPA) from supported distribution systems 26 | 2. **Uploads** applications to the DAST scanning platform 27 | 3. **Executes** security scans (manual or automated with testcases) 28 | 4. **Generates** detailed security reports in JSON and/or PDF formats 29 | 5. **Returns** structured output suitable for CI/CD integration 30 | 31 | --- 32 | 33 | ## Table of Contents 34 | 35 | * [Installation](#installation) 36 | * [Docker (Recommended)](#docker-recommended) 37 | * [PyPI Package](#pypi-package) 38 | * [From Source](#from-source) 39 | * [Quick Start](#quick-start) 40 | * [Usage Modes](#usage-modes) 41 | * [Download Only Mode](#download-only-mode) 42 | * [Scan Mode](#scan-mode) 43 | * [Distribution Systems](#distribution-systems) 44 | * [Local File](#local-file) 45 | * [Google Play](#google-play) 46 | * [AppStore](#appstore) 47 | * [Firebase](#firebase) 48 | * [Nexus Repository](#nexus-repository) 49 | * [Nexus2 Repository](#nexus2-repository) 50 | * [RuStore](#rustore) 51 | * [AppGallery](#appgallery) 52 | * [RuMarket](#rumarket) 53 | * [Scan Configuration](#scan-configuration) 54 | * [Scan Types](#scan-types) 55 | * [Architecture Selection](#architecture-selection) 56 | * [Profile Management](#profile-management) 57 | * [Reports](#reports) 58 | * [JSON Summary Report](#json-summary-report) 59 | * [PDF Report](#pdf-report) 60 | * [CR Report](#cr-report) 61 | * [Advanced Features](#advanced-features) 62 | * [Non-blocking Scans](#non-blocking-scans) 63 | * [Long-running Scans](#long-running-scans) 64 | * [Appium Integration](#appium-integration) 65 | * [Troubleshooting](#troubleshooting) 66 | * [Exit Codes](#exit-codes) 67 | * [Examples](#examples) 68 | 69 | --- 70 | 71 | ## Installation 72 | 73 | ### Docker (Recommended) 74 | 75 | The easiest way to use `mdast_cli` is via Docker: 76 | 77 | ```bash 78 | # Pull the latest image 79 | docker pull mobilesecurity/mdast_cli:latest 80 | 81 | # Run with volume mounts for files and reports 82 | docker run -it \ 83 | -v /path/to/apps:/mdast/files \ 84 | -v /path/to/reports:/mdast/report \ 85 | mobilesecurity/mdast_cli:latest \ 86 | --distribution_system file \ 87 | --file_path /mdast/files/app.apk \ 88 | --url "https://saas.mobile.appsec.world" \ 89 | --company_id 1 \ 90 | --token "YOUR_TOKEN" \ 91 | --profile_id 1 92 | ``` 93 | 94 | **Benefits:** 95 | - No Python environment setup required 96 | - All dependencies pre-installed 97 | - Consistent execution environment 98 | - Easy CI/CD integration 99 | 100 | ### PyPI Package 101 | 102 | Install from PyPI using pip: 103 | 104 | ```bash 105 | pip install mdast_cli 106 | ``` 107 | 108 | After installation, you can use the `mdast_cli` command directly: 109 | 110 | ```bash 111 | mdast_cli --help 112 | ``` 113 | 114 | **Note:** You may need to install additional system dependencies depending on your distribution system choice (e.g., `apkeep` for Google Play). 115 | 116 | ### From Source 117 | 118 | **Requirements:** 119 | - Python 3.9 or higher (3.9, 3.10, 3.11, 3.12 supported) 120 | 121 | Clone the repository and install dependencies: 122 | 123 | ```bash 124 | git clone https://github.com/Dynamic-Mobile-Security/mdast-cli.git 125 | cd mdast-cli 126 | pip install -r requirements.txt 127 | ``` 128 | 129 | Then run the script directly: 130 | 131 | ```bash 132 | python3 mdast_cli/mdast_scan.py --help 133 | ``` 134 | 135 | --- 136 | 137 | ## Quick Start 138 | 139 | ### Download an Application 140 | 141 | ```bash 142 | mdast_cli -d \ 143 | --distribution_system google_play \ 144 | --google_play_package_name com.example.app \ 145 | --google_play_email user@example.com \ 146 | --google_play_aas_token "YOUR_AAS_TOKEN" 147 | ``` 148 | 149 | ### Run a Security Scan 150 | 151 | ```bash 152 | mdast_cli \ 153 | --distribution_system file \ 154 | --file_path app.apk \ 155 | --url "https://saas.mobile.appsec.world" \ 156 | --company_id 1 \ 157 | --token "YOUR_TOKEN" \ 158 | --profile_id 1 \ 159 | --testcase_id 4 \ 160 | --summary_report_json_file_name report.json 161 | ``` 162 | 163 | --- 164 | 165 | ## Usage Modes 166 | 167 | ### Download Only Mode 168 | 169 | Use the `--download_only` (or `-d`) flag to download applications without scanning: 170 | 171 | ```bash 172 | mdast_cli -d --distribution_system google_play --google_play_package_name com.example.app ... 173 | ``` 174 | 175 | **Output:** 176 | - Downloads the application to the specified `--download_path` (default: `downloaded_apps/`) 177 | - Prints `DOWNLOAD_PATH=/path/to/app.apk` for CI/CD parsing 178 | - Exits immediately after download 179 | 180 | **Use Cases:** 181 | - Testing download functionality 182 | - Manual application analysis 183 | - CI/CD pipelines that handle scanning separately 184 | 185 | ### Scan Mode 186 | 187 | Without `--download_only`, the script will: 188 | 1. Download the application (if not using `file` distribution) 189 | 2. Upload to the DAST platform 190 | 3. Start a security scan 191 | 4. Wait for completion (unless `--nowait` is set) 192 | 5. Generate reports (if specified) 193 | 194 | --- 195 | 196 | ## Distribution Systems 197 | 198 | ### Local File 199 | 200 | Use a local APK/IPA file for scanning. 201 | 202 | **Required Parameters:** 203 | - `--distribution_system file` 204 | - `--file_path ` - Absolute or relative path to the application file 205 | 206 | **Example:** 207 | ```bash 208 | mdast_cli \ 209 | --distribution_system file \ 210 | --file_path /path/to/app.apk \ 211 | --url "https://saas.mobile.appsec.world" \ 212 | --company_id 1 \ 213 | --token "YOUR_TOKEN" \ 214 | --profile_id 1 215 | ``` 216 | 217 | **Docker Example:** 218 | ```bash 219 | docker run -it \ 220 | -v /host/path/to/apps:/mdast/files \ 221 | -v /host/path/to/reports:/mdast/report \ 222 | mobilesecurity/mdast_cli:latest \ 223 | --distribution_system file \ 224 | --file_path /mdast/files/app.apk \ 225 | --url "https://saas.mobile.appsec.world" \ 226 | --company_id 1 \ 227 | --token "YOUR_TOKEN" \ 228 | --summary_report_json_file_name /mdast/report/report.json 229 | ``` 230 | 231 | --- 232 | 233 | ### Google Play 234 | 235 | Download applications from Google Play Store using `apkeep`. 236 | 237 | **Prerequisites:** 238 | 1. Install `apkeep`: 239 | ```bash 240 | # Using Rust (requires Rust toolchain) 241 | cargo install apkeep 242 | 243 | # Or download prebuilt binary from: 244 | # https://github.com/EFForg/apkeep/releases 245 | # Place binary in your PATH 246 | ``` 247 | 248 | 2. Obtain authentication: 249 | - **Option A**: OAuth2 token (recommended for first-time setup) 250 | - **Option B**: AAS token (reuse from previous runs) 251 | 252 | **Required Parameters:** 253 | - `--distribution_system google_play` 254 | - `--google_play_package_name ` - Package name (e.g., `com.instagram.android`) 255 | - `--google_play_email ` - Google account email 256 | - **Either:** 257 | - `--google_play_oauth2_token ` - OAuth2 token to fetch AAS automatically 258 | - `--google_play_aas_token ` - Direct AAS token (from previous run) 259 | 260 | **Optional Parameters:** 261 | - `--google_play_file_name ` - Custom filename for downloaded app 262 | - `--google_play_proxy ` - Proxy configuration (e.g., `socks5://user:pass@host:port`) 263 | 264 | **Getting Package Name:** 265 | - Visit the app page on Google Play 266 | - Package name is visible in the URL: `play.google.com/store/apps/details?id=` 267 | - Or check the app's page source 268 | 269 | **Authentication Flow:** 270 | 271 | 1. **First Run (OAuth2):** 272 | ```bash 273 | mdast_cli -d \ 274 | --distribution_system google_play \ 275 | --google_play_package_name com.example.app \ 276 | --google_play_email user@example.com \ 277 | --google_play_oauth2_token "ya29.a0AVvZ..." 278 | ``` 279 | - The script will fetch an AAS token automatically 280 | - **Save the AAS token from logs** for future runs 281 | - Look for `AAS token: aas_et/...` in the output 282 | 283 | 2. **Subsequent Runs (AAS Token):** 284 | ```bash 285 | mdast_cli -d \ 286 | --distribution_system google_play \ 287 | --google_play_package_name com.example.app \ 288 | --google_play_email user@example.com \ 289 | --google_play_aas_token "aas_et/AKppINZUCsnVs80yu3k4ZpiApuOlHlOnxSlwNNMOPjomkWDDbNi1SKd0PRTbOFSS6TNLQFlY70SIrUoxUnababWUcBXuhuVdpmrVUvff5etUCWqToxpRkHV8jf4RLcwX56AMkGhlslqrY4hrAH28-yCyOf9FFeLnhCo9p3ydbRrT5at3Le3Tnc-0CPILroJ_NldfLpDeQvBcj2BM_wBM-Tc" 290 | ``` 291 | 292 | **Important Notes:** 293 | - Use a **temporary Google account with 2FA disabled** (recommended) 294 | - Split APKs are automatically packaged into a ZIP archive 295 | - Python 3.9 or higher required (3.9, 3.10, 3.11, 3.12 supported) 296 | - AAS tokens are long-lived but may expire; keep OAuth2 token as backup 297 | 298 | **Example with Scan:** 299 | ```bash 300 | mdast_cli \ 301 | --distribution_system google_play \ 302 | --google_play_package_name com.instagram.android \ 303 | --google_play_email user@example.com \ 304 | --google_play_aas_token "YOUR_AAS_TOKEN" \ 305 | --google_play_file_name instagram_latest \ 306 | --url "https://saas.mobile.appsec.world" \ 307 | --company_id 1 \ 308 | --token "YOUR_TOKEN" \ 309 | --profile_id 1 \ 310 | --testcase_id 4 \ 311 | --summary_report_json_file_name report.json 312 | ``` 313 | 314 | --- 315 | 316 | ### AppStore 317 | 318 | Download iOS applications (.ipa) from the App Store. 319 | 320 | **Prerequisites:** 321 | - iTunes account with valid credentials 322 | - 2FA enabled on the Apple ID 323 | - Application ID or Bundle ID 324 | 325 | **Required Parameters:** 326 | - `--distribution_system appstore` 327 | - **Either:** 328 | - `--appstore_app_id ` - Application ID from App Store URL 329 | - `--appstore_bundle_id ` - Bundle identifier 330 | - `--appstore_apple_id ` - iTunes account email 331 | - `--appstore_password ` - iTunes account password 332 | - `--appstore_2FA ` - 6-digit 2FA code 333 | 334 | **Optional Parameters:** 335 | - `--appstore_file_name ` - Custom filename for downloaded IPA 336 | 337 | **Getting App ID:** 338 | 1. Visit the app page in App Store (web or app) 339 | 2. Extract ID from URL: `apps.apple.com/app/id{APP_ID}` 340 | 3. Example: URL contains `id398129933` → use `398129933` 341 | 342 | **2FA Setup (First Time):** 343 | 1. Run the script with email and password 344 | 2. You'll receive a 2FA code on your device 345 | 3. Use the code with `--appstore_2FA` 346 | 4. **Save the combined password+2FA** format for 6 months: `password2FA` (e.g., `P@ssword742877`) 347 | 348 | **Deprecated Parameter:** 349 | - `--appstore_password2FA` - Will be removed on 01.05.2023. Use separate `--appstore_password` and `--appstore_2FA` instead. 350 | 351 | **Example:** 352 | ```bash 353 | mdast_cli \ 354 | --distribution_system appstore \ 355 | --appstore_app_id 564177498 \ 356 | --appstore_apple_id user@icloud.com \ 357 | --appstore_password "YourPassword" \ 358 | --appstore_2FA 123456 \ 359 | --appstore_file_name my_app \ 360 | --url "https://saas.mobile.appsec.world" \ 361 | --company_id 2 \ 362 | --token "YOUR_TOKEN" \ 363 | --profile_id 1246 \ 364 | --architecture_id 3 365 | ``` 366 | 367 | **Troubleshooting:** 368 | - **"Wrong Apple ID" error**: Contact support to coordinate the Apple ID for AppStore integration 369 | - **Session expired**: Re-authenticate and save the new 2FA code 370 | - **Login errors**: Ensure 2FA is enabled and code is current (6-digit format) 371 | 372 | **Note:** This integration uses [ipatool](https://github.com/majd/ipatool) - thanks to all contributors! 373 | 374 | --- 375 | 376 | ### Firebase 377 | 378 | Download applications from Firebase App Distribution. 379 | 380 | **Prerequisites:** 381 | 1. Firebase project with App Distribution enabled 382 | 2. Service Account with `cloud-platform` scope 383 | 3. JSON key file for the Service Account 384 | 385 | **Required Parameters:** 386 | - `--distribution_system firebase` 387 | - `--firebase_project_number ` - Project number (integer) 388 | - `--firebase_app_id ` - Application ID (format: `1:PROJECT:android:APP_ID`) 389 | - `--firebase_account_json_path ` - Path to Service Account JSON key file 390 | - `--firebase_file_extension ` - File extension: `apk` or `ipa` 391 | 392 | **Optional Parameters:** 393 | - `--firebase_file_name ` - Custom filename (defaults to version name) 394 | 395 | **Finding Project Number:** 396 | 1. Go to [Firebase Console](https://console.firebase.google.com/) 397 | 2. Select your project 398 | 3. Click Settings (gear icon) → Project settings 399 | 4. Find "Project number" in the General tab 400 | 401 | **Finding App ID:** 402 | 1. In Project settings, scroll to "Your apps" 403 | 2. Find your app and copy the App ID 404 | 3. Format: `1:PROJECT_NUMBER:android:APP_ID` or `1:PROJECT_NUMBER:ios:APP_ID` 405 | 406 | **Creating Service Account:** 407 | 1. Go to [Google Cloud Console](https://console.cloud.google.com/) 408 | 2. Navigate to **IAM & Admin** → **Service accounts** 409 | 3. Create a new Service Account or use existing 410 | 4. Grant the `Cloud Platform` scope (`/auth/cloud-platform`) 411 | 5. Create and download a JSON key file 412 | 6. Save the file securely (keep it out of version control!) 413 | 414 | **Example:** 415 | ```bash 416 | mdast_cli -d \ 417 | --distribution_system firebase \ 418 | --firebase_project_number 1231231337 \ 419 | --firebase_app_id "1:1337:android:123123" \ 420 | --firebase_account_json_path /path/to/service_account.json \ 421 | --firebase_file_extension apk \ 422 | --firebase_file_name my_app 423 | ``` 424 | 425 | **Security Note:** Never commit Service Account JSON files to version control. Use environment variables or secure secret management in CI/CD. 426 | 427 | --- 428 | 429 | ### Nexus Repository 430 | 431 | Download applications from Nexus Repository Manager 3.x (Maven repository). 432 | 433 | **Required Parameters:** 434 | - `--distribution_system nexus` 435 | - `--nexus_url ` - Nexus server URL (e.g., `https://nexus.example.com`) 436 | - `--nexus_login ` - Nexus username 437 | - `--nexus_password ` - Nexus password 438 | - `--nexus_repo_name ` - Repository name 439 | - `--nexus_group_id ` - Maven group ID 440 | - `--nexus_artifact_id ` - Maven artifact ID 441 | - `--nexus_version ` - Application version 442 | 443 | **Maven Coordinates:** 444 | The script uses standard Maven coordinates to locate artifacts: 445 | - Format: `group_id:artifact_id:version` 446 | - Example: `com.example:myapp:1.0.0` 447 | 448 | **Uploading to Nexus:** 449 | See these gists for uploading apps to Nexus: 450 | - [Android APK upload script](https://gist.github.com/Dynamic-Mobile-Security/9730e8eaa1b5d5f7f21e28beb63561a8) 451 | - [iOS IPA upload script](https://gist.github.com/Dynamic-Mobile-Security/66daaf526e0109636d8bcdc21fd10779) 452 | 453 | **Example:** 454 | ```bash 455 | mdast_cli -d \ 456 | --distribution_system nexus \ 457 | --nexus_url https://nexus.example.com \ 458 | --nexus_login myuser \ 459 | --nexus_password mypass \ 460 | --nexus_repo_name releases \ 461 | --nexus_group_id com.example \ 462 | --nexus_artifact_id myapp \ 463 | --nexus_version 1.0.0 464 | ``` 465 | 466 | --- 467 | 468 | ### Nexus2 Repository 469 | 470 | Download applications from Nexus Repository Manager 2.x. 471 | 472 | **Required Parameters:** 473 | - `--distribution_system nexus2` 474 | - `--nexus2_url ` - Nexus2 server URL 475 | - `--nexus2_login ` - Nexus2 username 476 | - `--nexus2_password ` - Nexus2 password 477 | - `--nexus2_repo_name ` - Repository name 478 | - `--nexus2_group_id ` - Maven group ID 479 | - `--nexus2_artifact_id ` - Maven artifact ID 480 | - `--nexus2_version ` - Application version 481 | - `--nexus2_extension ` - File extension (e.g., `apk`, `ipa`) 482 | 483 | **Optional Parameters:** 484 | - `--nexus2_file_name ` - Custom filename 485 | 486 | **Example:** 487 | ```bash 488 | mdast_cli -d \ 489 | --distribution_system nexus2 \ 490 | --nexus2_url http://nexus:8081/nexus/ \ 491 | --nexus2_login admin \ 492 | --nexus2_password admin123 \ 493 | --nexus2_repo_name releases \ 494 | --nexus2_group_id com.example \ 495 | --nexus2_artifact_id myapp \ 496 | --nexus2_version 1.337 \ 497 | --nexus2_extension apk \ 498 | --nexus2_file_name my_app 499 | ``` 500 | 501 | --- 502 | 503 | ### RuStore 504 | 505 | Download Android applications from [RuStore](https://rustore.ru/) (Russian app store). 506 | 507 | **Required Parameters:** 508 | - `--distribution_system rustore` 509 | - `--rustore_package_name ` - Package name (e.g., `ru.example.app`) 510 | 511 | **Getting Package Name:** 512 | - Visit the app page on RuStore 513 | - Package name is typically visible in the URL or app details 514 | 515 | **Example:** 516 | ```bash 517 | mdast_cli -d \ 518 | --distribution_system rustore \ 519 | --rustore_package_name ru.example.app 520 | ``` 521 | 522 | --- 523 | 524 | ### AppGallery 525 | 526 | Download applications from [Huawei AppGallery](https://appgallery.huawei.com/). 527 | 528 | **Required Parameters:** 529 | - `--distribution_system appgallery` 530 | - `--appgallery_app_id ` - Application ID from AppGallery 531 | 532 | **Optional Parameters:** 533 | - `--appgallery_file_name ` - Custom filename 534 | 535 | **Getting App ID:** 536 | 1. Visit the app page in AppGallery 537 | 2. Extract ID from URL: `appgallery.huawei.com/app/{APP_ID}` 538 | 3. Example: URL contains `C101184875` → use `C101184875` 539 | 540 | **Example:** 541 | ```bash 542 | mdast_cli -d \ 543 | --distribution_system appgallery \ 544 | --appgallery_app_id C123456789 \ 545 | --appgallery_file_name huawei_app 546 | ``` 547 | 548 | --- 549 | 550 | ### RuMarket 551 | 552 | Download Android applications from [RuMarket](https://ruplay.market/apps/) (Russian app store). 553 | 554 | **Required Parameters:** 555 | - `--distribution_system rumarket` 556 | - `--rumarket_package_name ` - Package name 557 | 558 | **Example:** 559 | ```bash 560 | mdast_cli -d \ 561 | --distribution_system rumarket \ 562 | --rumarket_package_name com.example.app 563 | ``` 564 | 565 | --- 566 | 567 | ## Scan Configuration 568 | 569 | ### Scan Types 570 | 571 | #### Manual Scan (Default) 572 | 573 | When `--testcase_id` is **not** specified: 574 | - Application is installed on the device 575 | - Application is launched automatically 576 | - Waits 30 seconds for user interaction 577 | - Application is stopped 578 | - Security analysis is performed 579 | 580 | **Use Case:** Quick security checks, initial assessments 581 | 582 | **Example:** 583 | ```bash 584 | mdast_cli \ 585 | --distribution_system file \ 586 | --file_path app.apk \ 587 | --url "https://saas.mobile.appsec.world" \ 588 | --company_id 1 \ 589 | --token "YOUR_TOKEN" \ 590 | --profile_id 1 591 | # No --testcase_id = manual scan 592 | ``` 593 | 594 | #### Automated Scan with Testcase 595 | 596 | When `--testcase_id` is specified: 597 | - Previously recorded testcase is replayed 598 | - All recorded user interactions are executed 599 | - Comprehensive security analysis is performed 600 | 601 | **Use Case:** Deep security analysis, regression testing, CI/CD integration 602 | 603 | **Example:** 604 | ```bash 605 | mdast_cli \ 606 | --distribution_system file \ 607 | --file_path app.apk \ 608 | --url "https://saas.mobile.appsec.world" \ 609 | --company_id 1 \ 610 | --token "YOUR_TOKEN" \ 611 | --profile_id 1 \ 612 | --testcase_id 4 # Automated scan with testcase #4 613 | ``` 614 | 615 | ### Architecture Selection 616 | 617 | **Parameter:** `--architecture_id ` 618 | 619 | Select the target architecture/OS version for scanning: 620 | - If not specified, defaults to Android 11 or iOS 14 (depending on file type) 621 | - Use specific architecture ID for testing on different OS versions 622 | - Available architectures depend on your DAST platform configuration 623 | 624 | **Example:** 625 | ```bash 626 | mdast_cli ... --architecture_id 5 # Use architecture ID 5 627 | ``` 628 | 629 | ### Profile Management 630 | 631 | **Parameter:** `--profile_id ` (optional) 632 | 633 | - If **not specified**: A new profile is created automatically 634 | - If **specified**: Uses existing profile with the given ID 635 | - Profiles contain device configuration, app settings, and scan parameters 636 | 637 | **Auto-create in Existing Project:** 638 | ```bash 639 | mdast_cli ... --project_id 10 # Create profile in project #10 640 | ``` 641 | 642 | --- 643 | 644 | ## Reports 645 | 646 | ### JSON Summary Report 647 | 648 | Generate a structured JSON report with scan summary and statistics. 649 | 650 | **Parameter:** `--summary_report_json_file_name ` 651 | 652 | **Output Format:** 653 | - Total number of vulnerabilities 654 | - Vulnerability breakdown by severity 655 | - Scan metadata (timestamp, duration, etc.) 656 | - Application information 657 | 658 | **Example:** 659 | ```bash 660 | mdast_cli \ 661 | --distribution_system file \ 662 | --file_path app.apk \ 663 | --url "https://saas.mobile.appsec.world" \ 664 | --company_id 1 \ 665 | --token "YOUR_TOKEN" \ 666 | --profile_id 1 \ 667 | --summary_report_json_file_name scan_results.json 668 | ``` 669 | 670 | **Use Case:** CI/CD integration, automated reporting, data analysis 671 | 672 | ### PDF Report 673 | 674 | Generate a detailed PDF report with full scan results. 675 | 676 | **Parameter:** `--pdf_report_file_name ` 677 | 678 | **Output Format:** 679 | - Detailed vulnerability descriptions 680 | - Screenshots and evidence 681 | - Remediation recommendations 682 | - Executive summary 683 | 684 | **Example:** 685 | ```bash 686 | mdast_cli \ 687 | --distribution_system file \ 688 | --file_path app.apk \ 689 | --url "https://saas.mobile.appsec.world" \ 690 | --company_id 1 \ 691 | --token "YOUR_TOKEN" \ 692 | --profile_id 1 \ 693 | --pdf_report_file_name detailed_report.pdf 694 | ``` 695 | 696 | **Use Case:** Compliance reporting, stakeholder presentations, documentation 697 | 698 | ### CR Report 699 | 700 | Generate a CR (Change Request) report in HTML format. 701 | 702 | **Required Parameters (when `--cr_report` is set):** 703 | - `--cr_report` - Enable CR report generation 704 | - `--stingray_login ` - Stingray platform login 705 | - `--stingray_password ` - Stingray platform password 706 | 707 | **Optional Parameters:** 708 | - `--organization_name ` - Organization name (default: "ООО Стингрей Технолоджиз") 709 | - `--engineer_name ` - Engineer name 710 | - `--controller_name ` - Controller name 711 | - `--use_ldap` - Use LDAP authentication 712 | - `--authority_server_id ` - Authority server ID 713 | - `--cr_report_path ` - Output file path (default: `stingray-CR-report.html`) 714 | 715 | **Example:** 716 | ```bash 717 | mdast_cli \ 718 | --distribution_system file \ 719 | --file_path app.apk \ 720 | --url "https://saas.mobile.appsec.world" \ 721 | --company_id 1 \ 722 | --token "YOUR_TOKEN" \ 723 | --profile_id 1337 \ 724 | --architecture_id 3 \ 725 | --cr_report \ 726 | --stingray_login user@example.com \ 727 | --stingray_password "password" \ 728 | --organization_name "My Company" \ 729 | --engineer_name "John Doe" \ 730 | --controller_name "Jane Smith" \ 731 | --cr_report_path custom-report.html 732 | ``` 733 | 734 | --- 735 | 736 | ## Advanced Features 737 | 738 | ### Non-blocking Scans 739 | 740 | Use `--nowait` (or `-nw`) to start a scan and exit immediately without waiting for completion. 741 | 742 | **Use Case:** 743 | - Long-running scans 744 | - Fire-and-forget scenarios 745 | - CI/CD pipelines that poll for results separately 746 | 747 | **Example:** 748 | ```bash 749 | mdast_cli \ 750 | --distribution_system file \ 751 | --file_path app.apk \ 752 | --url "https://saas.mobile.appsec.world" \ 753 | --company_id 1 \ 754 | --token "YOUR_TOKEN" \ 755 | --profile_id 1 \ 756 | --nowait # Exit immediately after starting scan 757 | ``` 758 | 759 | **Note:** Reports will not be generated when using `--nowait`. Poll the API separately for results. 760 | 761 | ### Long-running Scans 762 | 763 | Use `--long_wait` to extend the maximum wait time to 1 week (instead of default timeout). 764 | 765 | **Use Case:** 766 | - Very long testcases 767 | - Deep analysis scenarios 768 | - Extended monitoring periods 769 | 770 | **Example:** 771 | ```bash 772 | mdast_cli \ 773 | --distribution_system file \ 774 | --file_path app.apk \ 775 | --url "https://saas.mobile.appsec.world" \ 776 | --company_id 1 \ 777 | --token "YOUR_TOKEN" \ 778 | --profile_id 1 \ 779 | --testcase_id 4 \ 780 | --long_wait # Wait up to 1 week for completion 781 | ``` 782 | 783 | ### Appium Integration 784 | 785 | Use `--appium_script_path` to provide a custom Appium script for automated testing. 786 | 787 | **Parameter:** `--appium_script_path ` 788 | 789 | **Use Case:** 790 | - Custom test automation 791 | - Integration with existing Appium test suites 792 | - Advanced interaction scenarios 793 | 794 | **Example:** 795 | ```bash 796 | mdast_cli \ 797 | --distribution_system file \ 798 | --file_path app.apk \ 799 | --url "https://saas.mobile.appsec.world" \ 800 | --company_id 1 \ 801 | --token "YOUR_TOKEN" \ 802 | --profile_id 1 \ 803 | --appium_script_path /path/to/appium_script.py 804 | ``` 805 | 806 | ### Download Path Configuration 807 | 808 | **Parameter:** `--download_path ` (or `-p `) 809 | 810 | Specify where downloaded applications should be saved. 811 | 812 | - **Default:** `downloaded_apps/` 813 | - Can be absolute or relative path 814 | - Directory is created automatically if it doesn't exist 815 | 816 | **Example:** 817 | ```bash 818 | mdast_cli -d \ 819 | --distribution_system google_play \ 820 | --google_play_package_name com.example.app \ 821 | --google_play_email user@example.com \ 822 | --google_play_aas_token "TOKEN" \ 823 | --download_path /custom/path/to/apps 824 | ``` 825 | 826 | --- 827 | 828 | ## Troubleshooting 829 | 830 | ### Common Issues 831 | 832 | #### Google Play Authentication Errors 833 | 834 | **Problem:** `DF-DFERH-01` error or authentication failures 835 | 836 | **Solutions:** 837 | 1. Ensure `apkeep` is installed and in PATH: `apkeep --version` 838 | 2. Verify OAuth2 token is valid and not expired 839 | 3. Use a temporary Google account with 2FA disabled 840 | 4. Check that AAS token format is correct (starts with `aas_et/`) 841 | 842 | #### AppStore Login Issues 843 | 844 | **Problem:** "Wrong Apple ID" or login failures 845 | 846 | **Solutions:** 847 | 1. Ensure 2FA is enabled on the Apple ID 848 | 2. Use the 6-digit 2FA code (not the longer backup code) 849 | 3. Format password+2FA correctly: `password2FA` (e.g., `P@ssword742877`) 850 | 4. Contact support if Apple ID needs to be whitelisted 851 | 5. Re-authenticate if session expired (sessions last ~6 months) 852 | 853 | #### Firebase Service Account Errors 854 | 855 | **Problem:** Authentication or permission errors 856 | 857 | **Solutions:** 858 | 1. Verify Service Account JSON file path is correct 859 | 2. Ensure Service Account has `cloud-platform` scope enabled 860 | 3. Check that Service Account has access to Firebase App Distribution 861 | 4. Verify project number and app ID format 862 | 5. Ensure JSON file is valid (not corrupted) 863 | 864 | #### Network/Timeout Issues 865 | 866 | **Problem:** Downloads fail or time out 867 | 868 | **Solutions:** 869 | 1. Check network connectivity 870 | 2. Verify distribution system URLs are accessible 871 | 3. Use `--google_play_proxy` for Google Play if behind firewall 872 | 4. Increase timeout values (if configurable) 873 | 5. Check firewall/proxy settings 874 | 875 | #### File Not Found Errors 876 | 877 | **Problem:** Application file not found after download 878 | 879 | **Solutions:** 880 | 1. Check `--download_path` directory permissions 881 | 2. Verify disk space is available 882 | 3. Check file system permissions 883 | 4. Review download logs for errors 884 | 5. Ensure distribution system returned valid file 885 | 886 | ### Getting Help 887 | 888 | 1. **Check Logs:** Review console output for detailed error messages 889 | 2. **Verify Parameters:** Use `--help` to see all available options 890 | 3. **Test Download Only:** Use `-d` flag to isolate download issues 891 | 4. **Contact Support:** Reach out with: 892 | - Full command used 893 | - Error messages 894 | - Distribution system and parameters (redact sensitive data) 895 | - Log output 896 | 897 | --- 898 | 899 | ## Exit Codes 900 | 901 | The script uses standardized exit codes for CI/CD integration: 902 | 903 | | Code | Constant | Description | 904 | |------|----------|-------------| 905 | | 0 | `SUCCESS` | Operation completed successfully | 906 | | 1 | `INVALID_ARGS` | Invalid command-line arguments | 907 | | 2 | `AUTH_ERROR` | Authentication failed | 908 | | 3 | `DOWNLOAD_FAILED` | Application download failed | 909 | | 4 | `NETWORK_ERROR` | Network/connection error | 910 | | 5 | `SCAN_FAILED` | Scan execution or upload failed | 911 | 912 | **Example CI/CD Usage:** 913 | ```bash 914 | #!/bin/bash 915 | mdast_cli --distribution_system file --file_path app.apk ... 916 | EXIT_CODE=$? 917 | 918 | if [ $EXIT_CODE -eq 0 ]; then 919 | echo "Scan completed successfully" 920 | elif [ $EXIT_CODE -eq 3 ]; then 921 | echo "Download failed - check distribution system" 922 | exit 1 923 | else 924 | echo "Scan failed with code $EXIT_CODE" 925 | exit 1 926 | fi 927 | ``` 928 | 929 | --- 930 | 931 | ## Examples 932 | 933 | ### Complete CI/CD Pipeline Example 934 | 935 | ```bash 936 | #!/bin/bash 937 | set -e 938 | 939 | # Download application 940 | mdast_cli -d \ 941 | --distribution_system google_play \ 942 | --google_play_package_name com.example.app \ 943 | --google_play_email ci@example.com \ 944 | --google_play_aas_token "$GOOGLE_PLAY_AAS_TOKEN" \ 945 | --download_path ./artifacts 946 | 947 | # Extract download path from output 948 | DOWNLOAD_PATH=$(mdast_cli -d ... 2>&1 | grep "DOWNLOAD_PATH=" | cut -d'=' -f2) 949 | 950 | # Run security scan 951 | mdast_cli \ 952 | --distribution_system file \ 953 | --file_path "$DOWNLOAD_PATH" \ 954 | --url "$DAST_URL" \ 955 | --company_id "$COMPANY_ID" \ 956 | --token "$DAST_TOKEN" \ 957 | --profile_id "$PROFILE_ID" \ 958 | --testcase_id "$TESTCASE_ID" \ 959 | --summary_report_json_file_name ./reports/scan_results.json \ 960 | --pdf_report_file_name ./reports/scan_results.pdf 961 | 962 | # Check results 963 | if [ -f ./reports/scan_results.json ]; then 964 | CRITICAL_COUNT=$(jq '.vulnerabilities.critical' ./reports/scan_results.json) 965 | if [ "$CRITICAL_COUNT" -gt 0 ]; then 966 | echo "Critical vulnerabilities found!" 967 | exit 1 968 | fi 969 | fi 970 | ``` 971 | 972 | ### Docker Compose Example 973 | 974 | ```yaml 975 | version: '3.8' 976 | services: 977 | mdast-scan: 978 | image: mobilesecurity/mdast_cli:latest 979 | volumes: 980 | - ./apps:/mdast/files 981 | - ./reports:/mdast/report 982 | environment: 983 | - DAST_URL=https://saas.mobile.appsec.world 984 | - COMPANY_ID=1 985 | - TOKEN=${DAST_TOKEN} 986 | command: 987 | - --distribution_system 988 | - file 989 | - --file_path 990 | - /mdast/files/app.apk 991 | - --url 992 | - ${DAST_URL} 993 | - --company_id 994 | - ${COMPANY_ID} 995 | - --token 996 | - ${TOKEN} 997 | - --profile_id 998 | - "1" 999 | - --summary_report_json_file_name 1000 | - /mdast/report/results.json 1001 | ``` 1002 | 1003 | ### Multi-Distribution Example 1004 | 1005 | ```bash 1006 | # Test multiple distribution systems 1007 | for DIST in google_play appstore firebase; do 1008 | echo "Testing $DIST..." 1009 | mdast_cli -d \ 1010 | --distribution_system "$DIST" \ 1011 | --download_path "./downloads/$DIST" \ 1012 | # ... distribution-specific parameters 1013 | done 1014 | ``` 1015 | 1016 | --- 1017 | 1018 | ## Additional Resources 1019 | 1020 | - **Docker Hub:** https://hub.docker.com/r/mobilesecurity/mdast_cli 1021 | - **PyPI Package:** https://pypi.org/project/mdast-cli/ 1022 | - **GitHub Repository:** https://github.com/Dynamic-Mobile-Security/mdast-cli 1023 | - **apkeep (Google Play):** https://github.com/EFForg/apkeep 1024 | - **ipatool (AppStore):** https://github.com/majd/ipatool 1025 | 1026 | --- 1027 | 1028 | ## License 1029 | 1030 | See LICENSE file for details. 1031 | 1032 | --- 1033 | 1034 | ## Support 1035 | 1036 | For issues, questions, or contributions, please visit the GitHub repository or contact support. 1037 | 1038 | **Note:** This documentation is maintained alongside the codebase. For the latest information, always refer to the version-specific documentation or the `--help` command output. 1039 | -------------------------------------------------------------------------------- /__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Dynamic-Mobile-Security/mdast-cli/804bffe421bfc66bf37e562f088a5e4b78b139b9/__init__.py -------------------------------------------------------------------------------- /apkeep_linux: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Dynamic-Mobile-Security/mdast-cli/804bffe421bfc66bf37e562f088a5e4b78b139b9/apkeep_linux -------------------------------------------------------------------------------- /apkeep_macos: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Dynamic-Mobile-Security/mdast-cli/804bffe421bfc66bf37e562f088a5e4b78b139b9/apkeep_macos -------------------------------------------------------------------------------- /mdast_cli/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Dynamic-Mobile-Security/mdast-cli/804bffe421bfc66bf37e562f088a5e4b78b139b9/mdast_cli/__init__.py -------------------------------------------------------------------------------- /mdast_cli/distribution_systems/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Dynamic-Mobile-Security/mdast-cli/804bffe421bfc66bf37e562f088a5e4b78b139b9/mdast_cli/distribution_systems/__init__.py -------------------------------------------------------------------------------- /mdast_cli/distribution_systems/appgallery.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import os 3 | import time 4 | 5 | import requests 6 | from tqdm import tqdm 7 | 8 | from mdast_cli.helpers.file_utils import ensure_download_dir, cleanup_file 9 | 10 | logger = logging.getLogger(__name__) 11 | 12 | 13 | def get_app_info(app_id): 14 | get_interface_code_resp = requests.post('https://web-drru.hispace.dbankcloud.ru/webedge/getInterfaceCode') 15 | if get_interface_code_resp.status_code != 200: 16 | raise RuntimeError('Appgallery - Cannot get interface code, connection error') 17 | interface_code = get_interface_code_resp.json() 18 | timestamp = time.time() 19 | timestamp = str(timestamp).replace('.', '') 20 | headers = { 21 | 'Interface-Code': f'{interface_code}_{timestamp}' 22 | } 23 | req = requests.get(f'https://web-drru.hispace.dbankcloud.ru/uowap/index?' 24 | f'method=internal.getTabDetail&uri=app%7C{app_id}', headers=headers) 25 | if req.status_code == 200: 26 | resp_info = req.json() 27 | else: 28 | raise RuntimeError(f'Appgallery - Failed to get application info. ' 29 | f'Request return status code: {req.status_code}') 30 | app_info = {} 31 | 32 | layout_data = resp_info.get('layoutData', []) 33 | for element in layout_data: 34 | if 'dataList' not in element.keys(): 35 | continue 36 | 37 | for item in element.get('dataList', []): 38 | if 'package' not in item.keys(): 39 | continue 40 | if app_id != item.get('appid'): 41 | continue 42 | 43 | app_info['integration_type'] = 'appgallery' 44 | app_info['icon_url'] = item.get('icon') 45 | app_info['md5'] = item.get('md5') 46 | app_info['name'] = item.get('name') 47 | app_info['package_name'] = item.get('package') 48 | app_info['target_sdk'] = item.get('targetSDK') 49 | app_info['version_code'] = item.get('versionCode') 50 | app_info['version_name'] = item.get('versionName') 51 | app_info['file_size'] = item.get('size') 52 | 53 | logger.info(f"Appgallery - Successfully found app with id: {app_id}, " 54 | f"package name: {app_info['package_name']}," 55 | f" version:{app_info['version_name']}," 56 | f" name: {app_info['name']}") 57 | return app_info 58 | 59 | raise RuntimeError(f'Appgallery - Cannot find application {app_id}') 60 | 61 | 62 | def appgallery_download_app(app_id, download_path, file_name=None): 63 | logger.info(f'Appgallery - Try to download application with id {app_id}') 64 | app_info = get_app_info(app_id) 65 | r = requests.get(f'https://appgallery.cloud.huawei.com/appdl/{app_id}', stream=True) 66 | if r.status_code != 200: 67 | raise RuntimeError(f'Appgallery - Failed to download application. ' 68 | f'Something goes wrong. Request return status code: {r.status_code}') 69 | 70 | ensure_download_dir(download_path) 71 | logger.info(f'Appgallery - Creating directory {download_path} for downloading app from Appgallery') 72 | 73 | if file_name is None: 74 | file_path = f"{download_path}/{app_info['package_name']}-{app_info['version_name']}.apk" 75 | else: 76 | file_path = f"{download_path}/{file_name}.apk" 77 | 78 | try: 79 | total_size = int(r.headers.get('content-length', 0)) 80 | chunk_size = 512 * 1024 81 | with open(file_path, 'wb') as f: 82 | with tqdm(total=total_size, unit='B', unit_scale=True, unit_divisor=1024, 83 | desc=f"AppGallery - Downloading {app_info.get('name', app_id)}", 84 | disable=total_size == 0) as pbar: 85 | for chunk in r.iter_content(chunk_size=chunk_size): 86 | if chunk: 87 | f.write(chunk) 88 | pbar.update(len(chunk)) 89 | except Exception as e: 90 | # Cleanup partial file on error 91 | cleanup_file(file_path) 92 | raise RuntimeError(f'Appgallery - Failed to write downloaded file: {e}') 93 | 94 | logger.info(f'Appgallery - Apk was downloaded from Appgallery to {file_path}') 95 | 96 | return file_path 97 | -------------------------------------------------------------------------------- /mdast_cli/distribution_systems/appstore.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import os 3 | import pickle 4 | import plistlib 5 | import shutil 6 | import zipfile 7 | from functools import lru_cache 8 | 9 | import requests 10 | import urllib3 11 | from requests.adapters import HTTPAdapter 12 | from urllib3 import Retry 13 | from tqdm import tqdm 14 | 15 | from mdast_cli.distribution_systems.appstore_client.store import StoreClient, StoreException 16 | from mdast_cli.helpers.file_utils import ensure_download_dir, cleanup_file 17 | 18 | logger = logging.getLogger(__name__) 19 | 20 | 21 | def download_file(url, download_path, file_path): 22 | with requests.get(url, stream=True, verify=False) as r: 23 | if r.status_code != 200: 24 | raise RuntimeError(f'Failed to download application. Request return status code: {r.status_code}"') 25 | ensure_download_dir(download_path) 26 | logger.info(f'Creating directory {download_path} for downloading app from AppStore') 27 | try: 28 | total_size = int(r.headers.get('content-length', 0)) 29 | chunk_size = 1 * 1024 * 1024 30 | with open(file_path, 'wb') as f: 31 | with tqdm(total=total_size, unit='B', unit_scale=True, unit_divisor=1024, 32 | desc='AppStore - Downloading application', 33 | disable=total_size == 0) as pbar: 34 | for chunk in r.iter_content(chunk_size=chunk_size): 35 | if chunk: 36 | f.write(chunk) 37 | pbar.update(len(chunk)) 38 | if os.path.exists(file_path): 39 | logger.info('Application successfully downloaded') 40 | else: 41 | logger.info('Failed to download application. ' 42 | 'Seems like something is wrong with your file path or app file is broken') 43 | except Exception as e: 44 | # Cleanup partial file on error 45 | cleanup_file(file_path) 46 | raise RuntimeError(f'AppStore - Failed to write downloaded file: {e}') 47 | 48 | return file_path 49 | 50 | 51 | class AppStore(object): 52 | """ 53 | Downloading application(.ipa file) from AppStore 54 | """ 55 | 56 | def __init__(self, appstore_apple_id, appstore_password2FA): 57 | self.apple_id = appstore_apple_id 58 | self.pass2FA = appstore_password2FA 59 | sess = requests.Session() 60 | self.store = StoreClient(sess) 61 | self.login_by_session = False 62 | 63 | # Configure retry strategy compatible with both old and new urllib3 versions 64 | # In urllib3 1.26.0+, method_whitelist was renamed to allowed_methods 65 | # Detect which version is installed and use appropriate parameter 66 | retry_kwargs = { 67 | 'total': 8, 68 | 'connect': 4, 69 | 'read': 2, 70 | 'backoff_factor': 0.3, 71 | 'status_forcelist': [500, 502, 503, 504], 72 | } 73 | 74 | # Check urllib3 version to determine which parameter to use 75 | urllib3_version = tuple(map(int, urllib3.__version__.split('.')[:2])) 76 | methods = ['GET', 'POST', 'PUT', 'DELETE', 'HEAD', 'OPTIONS', 'TRACE'] 77 | 78 | if urllib3_version >= (1, 26): 79 | # urllib3 >= 1.26.0 uses allowed_methods 80 | retry_kwargs['allowed_methods'] = set(methods) 81 | else: 82 | # urllib3 < 1.26.0 uses method_whitelist 83 | retry_kwargs['method_whitelist'] = methods 84 | 85 | retry_strategy = Retry(**retry_kwargs) 86 | 87 | sess.mount("https://", HTTPAdapter(max_retries=retry_strategy)) 88 | sess.mount("http://", HTTPAdapter(max_retries=retry_strategy)) 89 | 90 | @lru_cache 91 | def login(self, force=False): 92 | self.login_by_session = False 93 | session_cache = os.path.join('appstore_sessions/', self.apple_id.split("@")[0].replace(".", "")) 94 | if force and os.path.exists(session_cache): 95 | shutil.rmtree(session_cache) 96 | 97 | if session_cache and os.path.exists(f'{session_cache}/session.pkl'): 98 | logger.info(f'Trying to load session for {self.apple_id} iTunes account') 99 | with open(f'{session_cache}/session.pkl', "rb") as file: 100 | try: 101 | self.store = pickle.load(file) 102 | self.login_by_session = True 103 | logger.info(f'Loaded session for {self.apple_id}') 104 | except Exception: 105 | shutil.rmtree(session_cache) 106 | logger.info('Session was corrupted, deleting it') 107 | 108 | if self.login_by_session: 109 | return 110 | 111 | try: 112 | logger.info('Logging into iTunes') 113 | self.store.authenticate(self.apple_id, self.pass2FA) 114 | logger.info(f'Successfully logged in as {self.store.account_name}') 115 | 116 | ensure_download_dir(session_cache) 117 | with open(f'{session_cache}/session.pkl', "wb") as file: 118 | pickle.dump(self.store, file) 119 | logger.info(f'Dumped session for {self.apple_id}') 120 | except StoreException as e: 121 | raise RuntimeError(f'Failed to download application. Seems like your credentials are incorrect ' 122 | f'or your 2FA code expired. Message: {e.req} {e.err_msg} {e.err_type}') 123 | 124 | def get_app_info(self, app_id=None, bundle_id=None, country='US'): 125 | if not app_id and not bundle_id: 126 | raise 'One of properties ApplicationID or BundleID should be set' 127 | 128 | self.login(True) 129 | resp_info = self.store.find_app(app_id=app_id, bundle_id=bundle_id, country=country).json() 130 | try: 131 | app_info = resp_info['results'][0] 132 | except KeyError: 133 | if 'errorMessage' in resp_info: 134 | raise RuntimeError(f'App Store - error: {resp_info["errorMessage"]}') 135 | else: 136 | raise RuntimeError(f'App Store - error: {resp_info}') 137 | except IndexError: 138 | raise RuntimeError('App Store - Application not found') from None 139 | return { 140 | 'integration_type': 'app_store', 141 | 'appstore_id': app_info['trackId'], 142 | 'package_name': app_info['bundleId'], 143 | 'version_name': app_info['version'], 144 | 'min_sdk_version': app_info['minimumOsVersion'], 145 | 'file_size': app_info['fileSizeBytes'], 146 | 'icon_url': app_info['artworkUrl100'] 147 | } 148 | 149 | def _download_app_int(self, download_path, app_id=None, bundle_id=None, country='US', file_name=None): 150 | if not app_id: 151 | logger.info(f'Trying to find app in App Store with bundle id {bundle_id}') 152 | found_by_bundle_resp = self.store.find_app(bundle_id=bundle_id, country=country) 153 | resp_info = found_by_bundle_resp.json() 154 | if found_by_bundle_resp.status_code != 200 or resp_info['resultCount'] != 1: 155 | raise RuntimeError('Application with your bundle id not found, probably you enter invalid bundle') 156 | 157 | app_info = resp_info['results'][0] 158 | logger.info(f'Successfully found application by bundle id ({bundle_id}) ' 159 | f'with name: "{app_info["trackName"]}", version: {app_info["version"]},' 160 | f' app_id: {app_info["trackId"]}') 161 | app_id = app_info["trackId"] 162 | 163 | logger.info(f'Trying to purchase app with id {app_id}') 164 | purchase_resp = self.store.purchase(app_id) 165 | if purchase_resp.status_code == 200: 166 | logger.info(f'App was successfully purchased for {self.apple_id} account') 167 | elif purchase_resp.status_code == 500: 168 | logger.info(f'This app was purchased before for {self.apple_id} account') 169 | logger.info(f'Retrieving download info for app with id: {app_id}') 170 | download_resp = self.store.download(app_id) 171 | if not download_resp.songList: 172 | raise RuntimeError('Failed to get app download info! Check your parameters') 173 | 174 | downloaded_app_info = download_resp.songList[0] 175 | 176 | app_name = downloaded_app_info.metadata.bundleDisplayName 177 | app_id = downloaded_app_info.songId 178 | app_bundle_id = downloaded_app_info.metadata.softwareVersionBundleId 179 | app_version = downloaded_app_info.metadata.bundleShortVersionString 180 | md5 = downloaded_app_info.md5 181 | 182 | logger.info( 183 | f'Downloading app is {app_name} ({app_bundle_id}) with app_id {app_id} and version {app_version}') 184 | 185 | if not file_name: 186 | file_name = '%s-%s.ipa' % (app_name, app_version) 187 | else: 188 | file_name = '%s-%s.ipa' % (file_name, app_version) 189 | 190 | file_path = os.path.join(download_path, file_name) 191 | logger.info(f'Downloading ipa to {file_path}') 192 | download_file(downloaded_app_info.URL, download_path, file_path) 193 | 194 | # Optimize ZIP operations to avoid loading all file names into memory for large IPAs 195 | try: 196 | with zipfile.ZipFile(file_path, 'a', compression=zipfile.ZIP_DEFLATED) as ipa_file: 197 | logger.info('AppStore - Creating iTunesMetadata.plist with metadata info') 198 | metadata = downloaded_app_info.metadata.as_dict() 199 | metadata["apple-id"] = self.apple_id 200 | metadata["userName"] = self.apple_id 201 | ipa_file.writestr("iTunesMetadata.plist", plistlib.dumps(metadata)) 202 | 203 | # Find appContentDir - break early to minimize memory usage 204 | # Note: namelist() still loads all names, but we break immediately after finding target 205 | logger.info('AppStore - Searching for Payload directory in IPA') 206 | appContentDir = None 207 | try: 208 | for name in ipa_file.namelist(): 209 | if name.startswith('Payload/') and len(name.strip('/').split('/')) == 2: 210 | appContentDir = name.rstrip('/') 211 | logger.info(f'AppStore - Found Payload directory: {appContentDir}') 212 | break 213 | except MemoryError: 214 | raise RuntimeError('AppStore - Out of memory while processing IPA file. IPA file may be too large.') 215 | 216 | if not appContentDir: 217 | raise RuntimeError('AppStore - Failed to find Payload directory in IPA file') 218 | 219 | # Read only the manifest file we need (small file) 220 | manifest_path = appContentDir + '/SC_Info/Manifest.plist' 221 | try: 222 | scManifestData = ipa_file.read(manifest_path) 223 | except KeyError: 224 | raise RuntimeError(f'AppStore - Failed to find {manifest_path} in IPA file') 225 | 226 | scManifest = plistlib.loads(scManifestData) 227 | 228 | sinfs = {c.id: c.sinf for c in downloaded_app_info.sinfs} 229 | logger.info(f'AppStore - Writing {len(scManifest["SinfPaths"])} sinf files to IPA') 230 | for i, sinfPath in enumerate(scManifest['SinfPaths']): 231 | ipa_file.writestr(appContentDir + '/' + sinfPath, sinfs[i]) 232 | except MemoryError as e: 233 | cleanup_file(file_path) 234 | raise RuntimeError(f'AppStore - Out of memory while processing IPA file: {e}. ' 235 | f'Try increasing available memory or processing on a machine with more RAM.') 236 | 237 | return file_path, md5 238 | 239 | def download_app(self, download_path, app_id=None, bundle_id=None, country='US', file_name=None): 240 | file_path, md5 = None, None 241 | for force in (False, True): # try first time with possible stored session, second time with forced login 242 | try: 243 | self.login(force=force) 244 | file_path, md5 = self._download_app_int(download_path, app_id, bundle_id, country, file_name) 245 | break 246 | except StoreException as e: 247 | if not self.login_by_session: # login by credentials, still with error 248 | raise RuntimeError(f'Failed to download application. Seems like your app_id does not exist ' 249 | f'or you did not purchase this paid app from apple account before. ' 250 | f'Message: {e.req} {e.err_msg} {e.err_type}') 251 | 252 | return file_path, md5 253 | -------------------------------------------------------------------------------- /mdast_cli/distribution_systems/appstore_client/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Dynamic-Mobile-Security/mdast-cli/804bffe421bfc66bf37e562f088a5e4b78b139b9/mdast_cli/distribution_systems/appstore_client/__init__.py -------------------------------------------------------------------------------- /mdast_cli/distribution_systems/appstore_client/schemas/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Dynamic-Mobile-Security/mdast-cli/804bffe421bfc66bf37e562f088a5e4b78b139b9/mdast_cli/distribution_systems/appstore_client/schemas/__init__.py -------------------------------------------------------------------------------- /mdast_cli/distribution_systems/appstore_client/schemas/store_authenticate_req.py: -------------------------------------------------------------------------------- 1 | from reprlib import repr as limitedRepr 2 | 3 | 4 | class StoreAuthenticateReq: 5 | 6 | _types_map = { 7 | "appleId": {"type": str, "subtype": None}, 8 | "attempt": {"type": str, "subtype": None}, 9 | "createSession": {"type": str, "subtype": None}, 10 | "guid": {"type": str, "subtype": None}, 11 | "password": {"type": str, "subtype": None}, 12 | "rmp": {"type": str, "subtype": None}, 13 | "why": {"type": str, "subtype": None}, 14 | } 15 | _formats_map = {} 16 | _validations_map = { 17 | "appleId": { 18 | "required": True, 19 | }, 20 | "attempt": { 21 | "required": True, 22 | }, 23 | "createSession": { 24 | "required": True, 25 | }, 26 | "guid": { 27 | "required": True, 28 | }, 29 | "password": { 30 | "required": True, 31 | }, 32 | "rmp": { 33 | "required": True, 34 | }, 35 | "why": { 36 | "required": True, 37 | }, 38 | } 39 | 40 | def __init__( 41 | self, 42 | appleId: str = None, 43 | attempt: str = None, 44 | createSession: str = None, 45 | guid: str = None, 46 | password: str = None, 47 | rmp: str = None, 48 | why: str = None, 49 | ): 50 | pass 51 | self.__appleId = appleId 52 | self.__attempt = attempt 53 | self.__createSession = createSession 54 | self.__guid = guid 55 | self.__password = password 56 | self.__rmp = rmp 57 | self.__why = why 58 | 59 | def _get_appleId(self): 60 | return self.__appleId 61 | 62 | def _set_appleId(self, value): 63 | if not isinstance(value, str): 64 | raise TypeError("appleId must be str") 65 | 66 | self.__appleId = value 67 | 68 | appleId = property(_get_appleId, _set_appleId) 69 | 70 | def _get_attempt(self): 71 | return self.__attempt 72 | 73 | def _set_attempt(self, value): 74 | if not isinstance(value, str): 75 | raise TypeError("attempt must be str") 76 | 77 | self.__attempt = value 78 | 79 | attempt = property(_get_attempt, _set_attempt) 80 | 81 | def _get_createSession(self): 82 | return self.__createSession 83 | 84 | def _set_createSession(self, value): 85 | if not isinstance(value, str): 86 | raise TypeError("createSession must be str") 87 | 88 | self.__createSession = value 89 | 90 | createSession = property(_get_createSession, _set_createSession) 91 | 92 | def _get_guid(self): 93 | return self.__guid 94 | 95 | def _set_guid(self, value): 96 | if not isinstance(value, str): 97 | raise TypeError("guid must be str") 98 | 99 | self.__guid = value 100 | 101 | guid = property(_get_guid, _set_guid) 102 | 103 | def _get_password(self): 104 | return self.__password 105 | 106 | def _set_password(self, value): 107 | if not isinstance(value, str): 108 | raise TypeError("password must be str") 109 | 110 | self.__password = value 111 | 112 | password = property(_get_password, _set_password) 113 | 114 | def _get_rmp(self): 115 | return self.__rmp 116 | 117 | def _set_rmp(self, value): 118 | if not isinstance(value, str): 119 | raise TypeError("rmp must be str") 120 | 121 | self.__rmp = value 122 | 123 | rmp = property(_get_rmp, _set_rmp) 124 | 125 | def _get_why(self): 126 | return self.__why 127 | 128 | def _set_why(self, value): 129 | if not isinstance(value, str): 130 | raise TypeError("why must be str") 131 | 132 | self.__why = value 133 | 134 | why = property(_get_why, _set_why) 135 | 136 | @staticmethod 137 | def from_dict(d): 138 | v = {} 139 | if "appleId" in d: 140 | v["appleId"] = ( 141 | str.from_dict(d["appleId"]) 142 | if hasattr(str, "from_dict") 143 | else d["appleId"] 144 | ) 145 | if "attempt" in d: 146 | v["attempt"] = ( 147 | str.from_dict(d["attempt"]) 148 | if hasattr(str, "from_dict") 149 | else d["attempt"] 150 | ) 151 | if "createSession" in d: 152 | v["createSession"] = ( 153 | str.from_dict(d["createSession"]) 154 | if hasattr(str, "from_dict") 155 | else d["createSession"] 156 | ) 157 | if "guid" in d: 158 | v["guid"] = ( 159 | str.from_dict(d["guid"]) if hasattr(str, "from_dict") else d["guid"] 160 | ) 161 | if "password" in d: 162 | v["password"] = ( 163 | str.from_dict(d["password"]) 164 | if hasattr(str, "from_dict") 165 | else d["password"] 166 | ) 167 | if "rmp" in d: 168 | v["rmp"] = ( 169 | str.from_dict(d["rmp"]) if hasattr(str, "from_dict") else d["rmp"] 170 | ) 171 | if "why" in d: 172 | v["why"] = ( 173 | str.from_dict(d["why"]) if hasattr(str, "from_dict") else d["why"] 174 | ) 175 | return StoreAuthenticateReq(**v) 176 | 177 | def as_dict(self): 178 | d = {} 179 | if self.__appleId is not None: 180 | d["appleId"] = ( 181 | self.__appleId.as_dict() 182 | if hasattr(self.__appleId, "as_dict") 183 | else self.__appleId 184 | ) 185 | if self.__attempt is not None: 186 | d["attempt"] = ( 187 | self.__attempt.as_dict() 188 | if hasattr(self.__attempt, "as_dict") 189 | else self.__attempt 190 | ) 191 | if self.__createSession is not None: 192 | d["createSession"] = ( 193 | self.__createSession.as_dict() 194 | if hasattr(self.__createSession, "as_dict") 195 | else self.__createSession 196 | ) 197 | if self.__guid is not None: 198 | d["guid"] = ( 199 | self.__guid.as_dict() 200 | if hasattr(self.__guid, "as_dict") 201 | else self.__guid 202 | ) 203 | if self.__password is not None: 204 | d["password"] = ( 205 | self.__password.as_dict() 206 | if hasattr(self.__password, "as_dict") 207 | else self.__password 208 | ) 209 | if self.__rmp is not None: 210 | d["rmp"] = ( 211 | self.__rmp.as_dict() if hasattr(self.__rmp, "as_dict") else self.__rmp 212 | ) 213 | if self.__why is not None: 214 | d["why"] = ( 215 | self.__why.as_dict() if hasattr(self.__why, "as_dict") else self.__why 216 | ) 217 | return d 218 | 219 | def __repr__(self): 220 | return ""\ 222 | .format(limitedRepr( 223 | self.__appleId[:20] 224 | if isinstance(self.__appleId, bytes) 225 | else self.__appleId 226 | ), limitedRepr( 227 | self.__attempt[:20] 228 | if isinstance(self.__attempt, bytes) 229 | else self.__attempt 230 | ), limitedRepr( 231 | self.__createSession[:20] 232 | if isinstance(self.__createSession, bytes) 233 | else self.__createSession 234 | ), limitedRepr( 235 | self.__guid[:20] if isinstance(self.__guid, bytes) else self.__guid 236 | ), limitedRepr( 237 | self.__password[:20] 238 | if isinstance(self.__password, bytes) 239 | else self.__password 240 | ), limitedRepr( 241 | self.__rmp[:20] if isinstance(self.__rmp, bytes) else self.__rmp 242 | ), limitedRepr( 243 | self.__why[:20] if isinstance(self.__why, bytes) else self.__why 244 | ),) 245 | -------------------------------------------------------------------------------- /mdast_cli/distribution_systems/appstore_client/schemas/store_buyproduct_req.py: -------------------------------------------------------------------------------- 1 | from reprlib import repr as limitedRepr 2 | 3 | 4 | class StoreBuyproductReq: 5 | 6 | _types_map = { 7 | "ageCheck": {"type": str, "subtype": None}, 8 | "appExtVrsId": {"type": str, "subtype": None}, 9 | "guid": {"type": str, "subtype": None}, 10 | "hasBeenAuthedForBuy": {"type": str, "subtype": None}, 11 | "isInApp": {"type": str, "subtype": None}, 12 | "kbsync": {"type": str, "subtype": None}, 13 | "machineName": {"type": str, "subtype": None}, 14 | "mtApp": {"type": str, "subtype": None}, 15 | "mtClientId": {"type": str, "subtype": None}, 16 | "mtEventTime": {"type": str, "subtype": None}, 17 | "mtPageId": {"type": str, "subtype": None}, 18 | "mtPageType": {"type": str, "subtype": None}, 19 | "mtPrevPage": {"type": str, "subtype": None}, 20 | "mtRequestId": {"type": str, "subtype": None}, 21 | "mtTopic": {"type": str, "subtype": None}, 22 | "needDiv": {"type": str, "subtype": None}, 23 | "pg": {"type": str, "subtype": None}, 24 | "price": {"type": str, "subtype": None}, 25 | "pricingParameters": {"type": str, "subtype": None}, 26 | "productType": {"type": str, "subtype": None}, 27 | "salableAdamId": {"type": str, "subtype": None}, 28 | "hasAskedToFulfillPreorder": {"type": str, "subtype": None}, 29 | "buyWithoutAuthorization": {"type": str, "subtype": None}, 30 | "hasDoneAgeCheck": {"type": str, "subtype": None}, 31 | } 32 | _formats_map = {} 33 | _validations_map = { 34 | "ageCheck": { 35 | "required": False, 36 | }, 37 | "appExtVrsId": { 38 | "required": True, 39 | }, 40 | "guid": { 41 | "required": True, 42 | }, 43 | "hasBeenAuthedForBuy": { 44 | "required": False, 45 | }, 46 | "isInApp": { 47 | "required": False, 48 | }, 49 | "kbsync": { 50 | "required": True, 51 | }, 52 | "machineName": { 53 | "required": False, 54 | }, 55 | "mtApp": { 56 | "required": False, 57 | }, 58 | "mtClientId": { 59 | "required": False, 60 | }, 61 | "mtEventTime": { 62 | "required": False, 63 | }, 64 | "mtPageId": { 65 | "required": False, 66 | }, 67 | "mtPageType": { 68 | "required": False, 69 | }, 70 | "mtPrevPage": { 71 | "required": False, 72 | }, 73 | "mtRequestId": { 74 | "required": False, 75 | }, 76 | "mtTopic": { 77 | "required": False, 78 | }, 79 | "needDiv": { 80 | "required": False, 81 | }, 82 | "pg": { 83 | "required": False, 84 | }, 85 | "price": { 86 | "required": True, 87 | }, 88 | "pricingParameters": { 89 | "required": True, 90 | }, 91 | "productType": { 92 | "required": True, 93 | }, 94 | "salableAdamId": { 95 | "required": True, 96 | }, 97 | "hasAskedToFulfillPreorder": { 98 | "required": False, 99 | }, 100 | "buyWithoutAuthorization": { 101 | "required": False, 102 | }, 103 | "hasDoneAgeCheck": { 104 | "required": False, 105 | }, 106 | } 107 | 108 | def __init__( 109 | self, 110 | ageCheck: str = None, 111 | appExtVrsId: str = None, 112 | guid: str = None, 113 | hasBeenAuthedForBuy: str = None, 114 | isInApp: str = None, 115 | kbsync: str = None, 116 | machineName: str = None, 117 | mtApp: str = None, 118 | mtClientId: str = None, 119 | mtEventTime: str = None, 120 | mtPageId: str = None, 121 | mtPageType: str = None, 122 | mtPrevPage: str = None, 123 | mtRequestId: str = None, 124 | mtTopic: str = None, 125 | needDiv: str = None, 126 | pg: str = None, 127 | price: str = None, 128 | pricingParameters: str = None, 129 | productType: str = None, 130 | salableAdamId: str = None, 131 | hasAskedToFulfillPreorder: str = None, 132 | buyWithoutAuthorization: str = None, 133 | hasDoneAgeCheck: str = None, 134 | ): 135 | pass 136 | self.__ageCheck = ageCheck 137 | self.__appExtVrsId = appExtVrsId 138 | self.__guid = guid 139 | self.__hasBeenAuthedForBuy = hasBeenAuthedForBuy 140 | self.__isInApp = isInApp 141 | self.__kbsync = kbsync 142 | self.__machineName = machineName 143 | self.__mtApp = mtApp 144 | self.__mtClientId = mtClientId 145 | self.__mtEventTime = mtEventTime 146 | self.__mtPageId = mtPageId 147 | self.__mtPageType = mtPageType 148 | self.__mtPrevPage = mtPrevPage 149 | self.__mtRequestId = mtRequestId 150 | self.__mtTopic = mtTopic 151 | self.__needDiv = needDiv 152 | self.__pg = pg 153 | self.__price = price 154 | self.__pricingParameters = pricingParameters 155 | self.__productType = productType 156 | self.__salableAdamId = salableAdamId 157 | self.__hasAskedToFulfillPreorder = hasAskedToFulfillPreorder 158 | self.__buyWithoutAuthorization = buyWithoutAuthorization 159 | self.__hasDoneAgeCheck = hasDoneAgeCheck 160 | 161 | def _get_ageCheck(self): 162 | return self.__ageCheck 163 | 164 | def _set_ageCheck(self, value): 165 | if value is not None and not isinstance(value, str): 166 | raise TypeError("ageCheck must be str") 167 | 168 | self.__ageCheck = value 169 | 170 | ageCheck = property(_get_ageCheck, _set_ageCheck) 171 | 172 | def _get_appExtVrsId(self): 173 | return self.__appExtVrsId 174 | 175 | def _set_appExtVrsId(self, value): 176 | if not isinstance(value, str): 177 | raise TypeError("appExtVrsId must be str") 178 | 179 | self.__appExtVrsId = value 180 | 181 | appExtVrsId = property(_get_appExtVrsId, _set_appExtVrsId) 182 | 183 | def _get_guid(self): 184 | return self.__guid 185 | 186 | def _set_guid(self, value): 187 | if not isinstance(value, str): 188 | raise TypeError("guid must be str") 189 | 190 | self.__guid = value 191 | 192 | guid = property(_get_guid, _set_guid) 193 | 194 | def _get_hasBeenAuthedForBuy(self): 195 | return self.__hasBeenAuthedForBuy 196 | 197 | def _set_hasBeenAuthedForBuy(self, value): 198 | if value is not None and not isinstance(value, str): 199 | raise TypeError("hasBeenAuthedForBuy must be str") 200 | 201 | self.__hasBeenAuthedForBuy = value 202 | 203 | hasBeenAuthedForBuy = property(_get_hasBeenAuthedForBuy, _set_hasBeenAuthedForBuy) 204 | 205 | def _get_isInApp(self): 206 | return self.__isInApp 207 | 208 | def _set_isInApp(self, value): 209 | if value is not None and not isinstance(value, str): 210 | raise TypeError("isInApp must be str") 211 | 212 | self.__isInApp = value 213 | 214 | isInApp = property(_get_isInApp, _set_isInApp) 215 | 216 | def _get_kbsync(self): 217 | return self.__kbsync 218 | 219 | def _set_kbsync(self, value): 220 | if not isinstance(value, str): 221 | raise TypeError("kbsync must be str") 222 | 223 | self.__kbsync = value 224 | 225 | kbsync = property(_get_kbsync, _set_kbsync) 226 | 227 | def _get_machineName(self): 228 | return self.__machineName 229 | 230 | def _set_machineName(self, value): 231 | if value is not None and not isinstance(value, str): 232 | raise TypeError("machineName must be str") 233 | 234 | self.__machineName = value 235 | 236 | machineName = property(_get_machineName, _set_machineName) 237 | 238 | def _get_mtApp(self): 239 | return self.__mtApp 240 | 241 | def _set_mtApp(self, value): 242 | if value is not None and not isinstance(value, str): 243 | raise TypeError("mtApp must be str") 244 | 245 | self.__mtApp = value 246 | 247 | mtApp = property(_get_mtApp, _set_mtApp) 248 | 249 | def _get_mtClientId(self): 250 | return self.__mtClientId 251 | 252 | def _set_mtClientId(self, value): 253 | if value is not None and not isinstance(value, str): 254 | raise TypeError("mtClientId must be str") 255 | 256 | self.__mtClientId = value 257 | 258 | mtClientId = property(_get_mtClientId, _set_mtClientId) 259 | 260 | def _get_mtEventTime(self): 261 | return self.__mtEventTime 262 | 263 | def _set_mtEventTime(self, value): 264 | if value is not None and not isinstance(value, str): 265 | raise TypeError("mtEventTime must be str") 266 | 267 | self.__mtEventTime = value 268 | 269 | mtEventTime = property(_get_mtEventTime, _set_mtEventTime) 270 | 271 | def _get_mtPageId(self): 272 | return self.__mtPageId 273 | 274 | def _set_mtPageId(self, value): 275 | if value is not None and not isinstance(value, str): 276 | raise TypeError("mtPageId must be str") 277 | 278 | self.__mtPageId = value 279 | 280 | mtPageId = property(_get_mtPageId, _set_mtPageId) 281 | 282 | def _get_mtPageType(self): 283 | return self.__mtPageType 284 | 285 | def _set_mtPageType(self, value): 286 | if value is not None and not isinstance(value, str): 287 | raise TypeError("mtPageType must be str") 288 | 289 | self.__mtPageType = value 290 | 291 | mtPageType = property(_get_mtPageType, _set_mtPageType) 292 | 293 | def _get_mtPrevPage(self): 294 | return self.__mtPrevPage 295 | 296 | def _set_mtPrevPage(self, value): 297 | if value is not None and not isinstance(value, str): 298 | raise TypeError("mtPrevPage must be str") 299 | 300 | self.__mtPrevPage = value 301 | 302 | mtPrevPage = property(_get_mtPrevPage, _set_mtPrevPage) 303 | 304 | def _get_mtRequestId(self): 305 | return self.__mtRequestId 306 | 307 | def _set_mtRequestId(self, value): 308 | if value is not None and not isinstance(value, str): 309 | raise TypeError("mtRequestId must be str") 310 | 311 | self.__mtRequestId = value 312 | 313 | mtRequestId = property(_get_mtRequestId, _set_mtRequestId) 314 | 315 | def _get_mtTopic(self): 316 | return self.__mtTopic 317 | 318 | def _set_mtTopic(self, value): 319 | if value is not None and not isinstance(value, str): 320 | raise TypeError("mtTopic must be str") 321 | 322 | self.__mtTopic = value 323 | 324 | mtTopic = property(_get_mtTopic, _set_mtTopic) 325 | 326 | def _get_needDiv(self): 327 | return self.__needDiv 328 | 329 | def _set_needDiv(self, value): 330 | if value is not None and not isinstance(value, str): 331 | raise TypeError("needDiv must be str") 332 | 333 | self.__needDiv = value 334 | 335 | needDiv = property(_get_needDiv, _set_needDiv) 336 | 337 | def _get_pg(self): 338 | return self.__pg 339 | 340 | def _set_pg(self, value): 341 | if value is not None and not isinstance(value, str): 342 | raise TypeError("pg must be str") 343 | 344 | self.__pg = value 345 | 346 | pg = property(_get_pg, _set_pg) 347 | 348 | def _get_price(self): 349 | return self.__price 350 | 351 | def _set_price(self, value): 352 | if not isinstance(value, str): 353 | raise TypeError("price must be str") 354 | 355 | self.__price = value 356 | 357 | price = property(_get_price, _set_price) 358 | 359 | def _get_pricingParameters(self): 360 | return self.__pricingParameters 361 | 362 | def _set_pricingParameters(self, value): 363 | if not isinstance(value, str): 364 | raise TypeError("pricingParameters must be str") 365 | 366 | self.__pricingParameters = value 367 | 368 | pricingParameters = property(_get_pricingParameters, _set_pricingParameters) 369 | 370 | def _get_productType(self): 371 | return self.__productType 372 | 373 | def _set_productType(self, value): 374 | if not isinstance(value, str): 375 | raise TypeError("productType must be str") 376 | 377 | self.__productType = value 378 | 379 | productType = property(_get_productType, _set_productType) 380 | 381 | def _get_salableAdamId(self): 382 | return self.__salableAdamId 383 | 384 | def _set_salableAdamId(self, value): 385 | if not isinstance(value, str): 386 | raise TypeError("salableAdamId must be str") 387 | 388 | self.__salableAdamId = value 389 | 390 | salableAdamId = property(_get_salableAdamId, _set_salableAdamId) 391 | 392 | def _get_hasAskedToFulfillPreorder(self): 393 | return self.__hasAskedToFulfillPreorder 394 | 395 | def _set_hasAskedToFulfillPreorder(self, value): 396 | if value is not None and not isinstance(value, str): 397 | raise TypeError("hasAskedToFulfillPreorder must be str") 398 | 399 | self.__hasAskedToFulfillPreorder = value 400 | 401 | hasAskedToFulfillPreorder = property( 402 | _get_hasAskedToFulfillPreorder, _set_hasAskedToFulfillPreorder 403 | ) 404 | 405 | def _get_buyWithoutAuthorization(self): 406 | return self.__buyWithoutAuthorization 407 | 408 | def _set_buyWithoutAuthorization(self, value): 409 | if value is not None and not isinstance(value, str): 410 | raise TypeError("buyWithoutAuthorization must be str") 411 | 412 | self.__buyWithoutAuthorization = value 413 | 414 | buyWithoutAuthorization = property( 415 | _get_buyWithoutAuthorization, _set_buyWithoutAuthorization 416 | ) 417 | 418 | def _get_hasDoneAgeCheck(self): 419 | return self.__hasDoneAgeCheck 420 | 421 | def _set_hasDoneAgeCheck(self, value): 422 | if value is not None and not isinstance(value, str): 423 | raise TypeError("hasDoneAgeCheck must be str") 424 | 425 | self.__hasDoneAgeCheck = value 426 | 427 | hasDoneAgeCheck = property(_get_hasDoneAgeCheck, _set_hasDoneAgeCheck) 428 | 429 | @staticmethod 430 | def from_dict(d): 431 | v = {} 432 | if "ageCheck" in d: 433 | v["ageCheck"] = ( 434 | str.from_dict(d["ageCheck"]) 435 | if hasattr(str, "from_dict") 436 | else d["ageCheck"] 437 | ) 438 | if "appExtVrsId" in d: 439 | v["appExtVrsId"] = ( 440 | str.from_dict(d["appExtVrsId"]) 441 | if hasattr(str, "from_dict") 442 | else d["appExtVrsId"] 443 | ) 444 | if "guid" in d: 445 | v["guid"] = ( 446 | str.from_dict(d["guid"]) if hasattr(str, "from_dict") else d["guid"] 447 | ) 448 | if "hasBeenAuthedForBuy" in d: 449 | v["hasBeenAuthedForBuy"] = ( 450 | str.from_dict(d["hasBeenAuthedForBuy"]) 451 | if hasattr(str, "from_dict") 452 | else d["hasBeenAuthedForBuy"] 453 | ) 454 | if "isInApp" in d: 455 | v["isInApp"] = ( 456 | str.from_dict(d["isInApp"]) 457 | if hasattr(str, "from_dict") 458 | else d["isInApp"] 459 | ) 460 | if "kbsync" in d: 461 | v["kbsync"] = ( 462 | str.from_dict(d["kbsync"]) if hasattr(str, "from_dict") else d["kbsync"] 463 | ) 464 | if "machineName" in d: 465 | v["machineName"] = ( 466 | str.from_dict(d["machineName"]) 467 | if hasattr(str, "from_dict") 468 | else d["machineName"] 469 | ) 470 | if "mtApp" in d: 471 | v["mtApp"] = ( 472 | str.from_dict(d["mtApp"]) if hasattr(str, "from_dict") else d["mtApp"] 473 | ) 474 | if "mtClientId" in d: 475 | v["mtClientId"] = ( 476 | str.from_dict(d["mtClientId"]) 477 | if hasattr(str, "from_dict") 478 | else d["mtClientId"] 479 | ) 480 | if "mtEventTime" in d: 481 | v["mtEventTime"] = ( 482 | str.from_dict(d["mtEventTime"]) 483 | if hasattr(str, "from_dict") 484 | else d["mtEventTime"] 485 | ) 486 | if "mtPageId" in d: 487 | v["mtPageId"] = ( 488 | str.from_dict(d["mtPageId"]) 489 | if hasattr(str, "from_dict") 490 | else d["mtPageId"] 491 | ) 492 | if "mtPageType" in d: 493 | v["mtPageType"] = ( 494 | str.from_dict(d["mtPageType"]) 495 | if hasattr(str, "from_dict") 496 | else d["mtPageType"] 497 | ) 498 | if "mtPrevPage" in d: 499 | v["mtPrevPage"] = ( 500 | str.from_dict(d["mtPrevPage"]) 501 | if hasattr(str, "from_dict") 502 | else d["mtPrevPage"] 503 | ) 504 | if "mtRequestId" in d: 505 | v["mtRequestId"] = ( 506 | str.from_dict(d["mtRequestId"]) 507 | if hasattr(str, "from_dict") 508 | else d["mtRequestId"] 509 | ) 510 | if "mtTopic" in d: 511 | v["mtTopic"] = ( 512 | str.from_dict(d["mtTopic"]) 513 | if hasattr(str, "from_dict") 514 | else d["mtTopic"] 515 | ) 516 | if "needDiv" in d: 517 | v["needDiv"] = ( 518 | str.from_dict(d["needDiv"]) 519 | if hasattr(str, "from_dict") 520 | else d["needDiv"] 521 | ) 522 | if "pg" in d: 523 | v["pg"] = str.from_dict(d["pg"]) if hasattr(str, "from_dict") else d["pg"] 524 | if "price" in d: 525 | v["price"] = ( 526 | str.from_dict(d["price"]) if hasattr(str, "from_dict") else d["price"] 527 | ) 528 | if "pricingParameters" in d: 529 | v["pricingParameters"] = ( 530 | str.from_dict(d["pricingParameters"]) 531 | if hasattr(str, "from_dict") 532 | else d["pricingParameters"] 533 | ) 534 | if "productType" in d: 535 | v["productType"] = ( 536 | str.from_dict(d["productType"]) 537 | if hasattr(str, "from_dict") 538 | else d["productType"] 539 | ) 540 | if "salableAdamId" in d: 541 | v["salableAdamId"] = ( 542 | str.from_dict(d["salableAdamId"]) 543 | if hasattr(str, "from_dict") 544 | else d["salableAdamId"] 545 | ) 546 | if "hasAskedToFulfillPreorder" in d: 547 | v["hasAskedToFulfillPreorder"] = ( 548 | str.from_dict(d["hasAskedToFulfillPreorder"]) 549 | if hasattr(str, "from_dict") 550 | else d["hasAskedToFulfillPreorder"] 551 | ) 552 | if "buyWithoutAuthorization" in d: 553 | v["buyWithoutAuthorization"] = ( 554 | str.from_dict(d["buyWithoutAuthorization"]) 555 | if hasattr(str, "from_dict") 556 | else d["buyWithoutAuthorization"] 557 | ) 558 | if "hasDoneAgeCheck" in d: 559 | v["hasDoneAgeCheck"] = ( 560 | str.from_dict(d["hasDoneAgeCheck"]) 561 | if hasattr(str, "from_dict") 562 | else d["hasDoneAgeCheck"] 563 | ) 564 | return StoreBuyproductReq(**v) 565 | 566 | def as_dict(self): 567 | d = {} 568 | if self.__ageCheck is not None: 569 | d["ageCheck"] = ( 570 | self.__ageCheck.as_dict() 571 | if hasattr(self.__ageCheck, "as_dict") 572 | else self.__ageCheck 573 | ) 574 | if self.__appExtVrsId is not None: 575 | d["appExtVrsId"] = ( 576 | self.__appExtVrsId.as_dict() 577 | if hasattr(self.__appExtVrsId, "as_dict") 578 | else self.__appExtVrsId 579 | ) 580 | if self.__guid is not None: 581 | d["guid"] = ( 582 | self.__guid.as_dict() 583 | if hasattr(self.__guid, "as_dict") 584 | else self.__guid 585 | ) 586 | if self.__hasBeenAuthedForBuy is not None: 587 | d["hasBeenAuthedForBuy"] = ( 588 | self.__hasBeenAuthedForBuy.as_dict() 589 | if hasattr(self.__hasBeenAuthedForBuy, "as_dict") 590 | else self.__hasBeenAuthedForBuy 591 | ) 592 | if self.__isInApp is not None: 593 | d["isInApp"] = ( 594 | self.__isInApp.as_dict() 595 | if hasattr(self.__isInApp, "as_dict") 596 | else self.__isInApp 597 | ) 598 | if self.__kbsync is not None: 599 | d["kbsync"] = ( 600 | self.__kbsync.as_dict() 601 | if hasattr(self.__kbsync, "as_dict") 602 | else self.__kbsync 603 | ) 604 | if self.__machineName is not None: 605 | d["machineName"] = ( 606 | self.__machineName.as_dict() 607 | if hasattr(self.__machineName, "as_dict") 608 | else self.__machineName 609 | ) 610 | if self.__mtApp is not None: 611 | d["mtApp"] = ( 612 | self.__mtApp.as_dict() 613 | if hasattr(self.__mtApp, "as_dict") 614 | else self.__mtApp 615 | ) 616 | if self.__mtClientId is not None: 617 | d["mtClientId"] = ( 618 | self.__mtClientId.as_dict() 619 | if hasattr(self.__mtClientId, "as_dict") 620 | else self.__mtClientId 621 | ) 622 | if self.__mtEventTime is not None: 623 | d["mtEventTime"] = ( 624 | self.__mtEventTime.as_dict() 625 | if hasattr(self.__mtEventTime, "as_dict") 626 | else self.__mtEventTime 627 | ) 628 | if self.__mtPageId is not None: 629 | d["mtPageId"] = ( 630 | self.__mtPageId.as_dict() 631 | if hasattr(self.__mtPageId, "as_dict") 632 | else self.__mtPageId 633 | ) 634 | if self.__mtPageType is not None: 635 | d["mtPageType"] = ( 636 | self.__mtPageType.as_dict() 637 | if hasattr(self.__mtPageType, "as_dict") 638 | else self.__mtPageType 639 | ) 640 | if self.__mtPrevPage is not None: 641 | d["mtPrevPage"] = ( 642 | self.__mtPrevPage.as_dict() 643 | if hasattr(self.__mtPrevPage, "as_dict") 644 | else self.__mtPrevPage 645 | ) 646 | if self.__mtRequestId is not None: 647 | d["mtRequestId"] = ( 648 | self.__mtRequestId.as_dict() 649 | if hasattr(self.__mtRequestId, "as_dict") 650 | else self.__mtRequestId 651 | ) 652 | if self.__mtTopic is not None: 653 | d["mtTopic"] = ( 654 | self.__mtTopic.as_dict() 655 | if hasattr(self.__mtTopic, "as_dict") 656 | else self.__mtTopic 657 | ) 658 | if self.__needDiv is not None: 659 | d["needDiv"] = ( 660 | self.__needDiv.as_dict() 661 | if hasattr(self.__needDiv, "as_dict") 662 | else self.__needDiv 663 | ) 664 | if self.__pg is not None: 665 | d["pg"] = ( 666 | self.__pg.as_dict() if hasattr(self.__pg, "as_dict") else self.__pg 667 | ) 668 | if self.__price is not None: 669 | d["price"] = ( 670 | self.__price.as_dict() 671 | if hasattr(self.__price, "as_dict") 672 | else self.__price 673 | ) 674 | if self.__pricingParameters is not None: 675 | d["pricingParameters"] = ( 676 | self.__pricingParameters.as_dict() 677 | if hasattr(self.__pricingParameters, "as_dict") 678 | else self.__pricingParameters 679 | ) 680 | if self.__productType is not None: 681 | d["productType"] = ( 682 | self.__productType.as_dict() 683 | if hasattr(self.__productType, "as_dict") 684 | else self.__productType 685 | ) 686 | if self.__salableAdamId is not None: 687 | d["salableAdamId"] = ( 688 | self.__salableAdamId.as_dict() 689 | if hasattr(self.__salableAdamId, "as_dict") 690 | else self.__salableAdamId 691 | ) 692 | if self.__hasAskedToFulfillPreorder is not None: 693 | d["hasAskedToFulfillPreorder"] = ( 694 | self.__hasAskedToFulfillPreorder.as_dict() 695 | if hasattr(self.__hasAskedToFulfillPreorder, "as_dict") 696 | else self.__hasAskedToFulfillPreorder 697 | ) 698 | if self.__buyWithoutAuthorization is not None: 699 | d["buyWithoutAuthorization"] = ( 700 | self.__buyWithoutAuthorization.as_dict() 701 | if hasattr(self.__buyWithoutAuthorization, "as_dict") 702 | else self.__buyWithoutAuthorization 703 | ) 704 | if self.__hasDoneAgeCheck is not None: 705 | d["hasDoneAgeCheck"] = ( 706 | self.__hasDoneAgeCheck.as_dict() 707 | if hasattr(self.__hasDoneAgeCheck, "as_dict") 708 | else self.__hasDoneAgeCheck 709 | ) 710 | return d 711 | 712 | def __repr__(self): 713 | return ""\ 718 | .format(limitedRepr( 719 | self.__ageCheck[:20] 720 | if isinstance(self.__ageCheck, bytes) 721 | else self.__ageCheck 722 | ), limitedRepr( 723 | self.__appExtVrsId[:20] 724 | if isinstance(self.__appExtVrsId, bytes) 725 | else self.__appExtVrsId 726 | ), limitedRepr( 727 | self.__guid[:20] if isinstance(self.__guid, bytes) else self.__guid 728 | ), limitedRepr( 729 | self.__hasBeenAuthedForBuy[:20] 730 | if isinstance(self.__hasBeenAuthedForBuy, bytes) 731 | else self.__hasBeenAuthedForBuy 732 | ), limitedRepr( 733 | self.__isInApp[:20] 734 | if isinstance(self.__isInApp, bytes) 735 | else self.__isInApp 736 | ), limitedRepr( 737 | self.__kbsync[:20] 738 | if isinstance(self.__kbsync, bytes) 739 | else self.__kbsync 740 | ), limitedRepr( 741 | self.__machineName[:20] 742 | if isinstance(self.__machineName, bytes) 743 | else self.__machineName 744 | ), limitedRepr( 745 | self.__mtApp[:20] if isinstance(self.__mtApp, bytes) else self.__mtApp 746 | ), limitedRepr( 747 | self.__mtClientId[:20] 748 | if isinstance(self.__mtClientId, bytes) 749 | else self.__mtClientId 750 | ), limitedRepr( 751 | self.__mtEventTime[:20] 752 | if isinstance(self.__mtEventTime, bytes) 753 | else self.__mtEventTime 754 | ), limitedRepr( 755 | self.__mtPageId[:20] 756 | if isinstance(self.__mtPageId, bytes) 757 | else self.__mtPageId 758 | ), limitedRepr( 759 | self.__mtPageType[:20] 760 | if isinstance(self.__mtPageType, bytes) 761 | else self.__mtPageType 762 | ), limitedRepr( 763 | self.__mtPrevPage[:20] 764 | if isinstance(self.__mtPrevPage, bytes) 765 | else self.__mtPrevPage 766 | ), limitedRepr( 767 | self.__mtRequestId[:20] 768 | if isinstance(self.__mtRequestId, bytes) 769 | else self.__mtRequestId 770 | ), limitedRepr( 771 | self.__mtTopic[:20] 772 | if isinstance(self.__mtTopic, bytes) 773 | else self.__mtTopic 774 | ), limitedRepr( 775 | self.__needDiv[:20] 776 | if isinstance(self.__needDiv, bytes) 777 | else self.__needDiv 778 | ), limitedRepr( 779 | self.__pg[:20] 780 | if isinstance(self.__pg, bytes) 781 | else self.__pg 782 | ), limitedRepr( 783 | self.__price[:20] 784 | if isinstance(self.__price, bytes) 785 | else self.__price 786 | ), limitedRepr( 787 | self.__pricingParameters[:20] 788 | if isinstance(self.__pricingParameters, bytes) 789 | else self.__pricingParameters 790 | ), limitedRepr( 791 | self.__productType[:20] 792 | if isinstance(self.__productType, bytes) 793 | else self.__productType 794 | ), limitedRepr( 795 | self.__salableAdamId[:20] 796 | if isinstance(self.__salableAdamId, bytes) 797 | else self.__salableAdamId 798 | ), limitedRepr( 799 | self.__hasAskedToFulfillPreorder[:20] 800 | if isinstance(self.__hasAskedToFulfillPreorder, bytes) 801 | else self.__hasAskedToFulfillPreorder 802 | ), limitedRepr( 803 | self.__buyWithoutAuthorization[:20] 804 | if isinstance(self.__buyWithoutAuthorization, bytes) 805 | else self.__buyWithoutAuthorization 806 | ), limitedRepr( 807 | self.__hasDoneAgeCheck[:20] 808 | if isinstance(self.__hasDoneAgeCheck, bytes) 809 | else self.__hasDoneAgeCheck 810 | ),) 811 | -------------------------------------------------------------------------------- /mdast_cli/distribution_systems/appstore_client/schemas/store_download_req.py: -------------------------------------------------------------------------------- 1 | from reprlib import repr as limitedRepr 2 | 3 | 4 | class StoreDownloadReq: 5 | 6 | _types_map = { 7 | "creditDisplay": {"type": str, "subtype": None}, 8 | "guid": {"type": str, "subtype": None}, 9 | "salableAdamId": {"type": str, "subtype": None}, 10 | "appExtVrsId": {"type": str, "subtype": None}, 11 | } 12 | _formats_map = {} 13 | _validations_map = { 14 | "creditDisplay": { 15 | "required": True, 16 | }, 17 | "guid": { 18 | "required": True, 19 | }, 20 | "salableAdamId": { 21 | "required": True, 22 | }, 23 | "appExtVrsId": { 24 | "required": False, 25 | }, 26 | } 27 | 28 | def __init__( 29 | self, 30 | creditDisplay: str = None, 31 | guid: str = None, 32 | salableAdamId: str = None, 33 | appExtVrsId: str = None, 34 | ): 35 | pass 36 | self.__creditDisplay = creditDisplay 37 | self.__guid = guid 38 | self.__salableAdamId = salableAdamId 39 | self.__appExtVrsId = appExtVrsId 40 | 41 | def _get_creditDisplay(self): 42 | return self.__creditDisplay 43 | 44 | def _set_creditDisplay(self, value): 45 | if not isinstance(value, str): 46 | raise TypeError("creditDisplay must be str") 47 | 48 | self.__creditDisplay = value 49 | 50 | creditDisplay = property(_get_creditDisplay, _set_creditDisplay) 51 | 52 | def _get_guid(self): 53 | return self.__guid 54 | 55 | def _set_guid(self, value): 56 | if not isinstance(value, str): 57 | raise TypeError("guid must be str") 58 | 59 | self.__guid = value 60 | 61 | guid = property(_get_guid, _set_guid) 62 | 63 | def _get_salableAdamId(self): 64 | return self.__salableAdamId 65 | 66 | def _set_salableAdamId(self, value): 67 | if not isinstance(value, str): 68 | raise TypeError("salableAdamId must be str") 69 | 70 | self.__salableAdamId = value 71 | 72 | salableAdamId = property(_get_salableAdamId, _set_salableAdamId) 73 | 74 | def _get_appExtVrsId(self): 75 | return self.__appExtVrsId 76 | 77 | def _set_appExtVrsId(self, value): 78 | if value is not None and not isinstance(value, str): 79 | raise TypeError("appExtVrsId must be str") 80 | 81 | self.__appExtVrsId = value 82 | 83 | appExtVrsId = property(_get_appExtVrsId, _set_appExtVrsId) 84 | 85 | @staticmethod 86 | def from_dict(d): 87 | v = {} 88 | if "creditDisplay" in d: 89 | v["creditDisplay"] = ( 90 | str.from_dict(d["creditDisplay"]) 91 | if hasattr(str, "from_dict") 92 | else d["creditDisplay"] 93 | ) 94 | if "guid" in d: 95 | v["guid"] = ( 96 | str.from_dict(d["guid"]) if hasattr(str, "from_dict") else d["guid"] 97 | ) 98 | if "salableAdamId" in d: 99 | v["salableAdamId"] = ( 100 | str.from_dict(d["salableAdamId"]) 101 | if hasattr(str, "from_dict") 102 | else d["salableAdamId"] 103 | ) 104 | if "appExtVrsId" in d: 105 | v["appExtVrsId"] = ( 106 | str.from_dict(d["appExtVrsId"]) 107 | if hasattr(str, "from_dict") 108 | else d["appExtVrsId"] 109 | ) 110 | return StoreDownloadReq(**v) 111 | 112 | def as_dict(self): 113 | d = {} 114 | if self.__creditDisplay is not None: 115 | d["creditDisplay"] = ( 116 | self.__creditDisplay.as_dict() 117 | if hasattr(self.__creditDisplay, "as_dict") 118 | else self.__creditDisplay 119 | ) 120 | if self.__guid is not None: 121 | d["guid"] = ( 122 | self.__guid.as_dict() 123 | if hasattr(self.__guid, "as_dict") 124 | else self.__guid 125 | ) 126 | if self.__salableAdamId is not None: 127 | d["salableAdamId"] = ( 128 | self.__salableAdamId.as_dict() 129 | if hasattr(self.__salableAdamId, "as_dict") 130 | else self.__salableAdamId 131 | ) 132 | if self.__appExtVrsId is not None: 133 | d["appExtVrsId"] = ( 134 | self.__appExtVrsId.as_dict() 135 | if hasattr(self.__appExtVrsId, "as_dict") 136 | else self.__appExtVrsId 137 | ) 138 | return d 139 | 140 | def __repr__(self): 141 | return "".format( 142 | limitedRepr( 143 | self.__creditDisplay[:20] 144 | if isinstance(self.__creditDisplay, bytes) 145 | else self.__creditDisplay 146 | ), 147 | limitedRepr( 148 | self.__guid[:20] if isinstance(self.__guid, bytes) else self.__guid 149 | ), 150 | limitedRepr( 151 | self.__salableAdamId[:20] 152 | if isinstance(self.__salableAdamId, bytes) 153 | else self.__salableAdamId 154 | ), 155 | limitedRepr( 156 | self.__appExtVrsId[:20] 157 | if isinstance(self.__appExtVrsId, bytes) 158 | else self.__appExtVrsId 159 | ), 160 | ) 161 | -------------------------------------------------------------------------------- /mdast_cli/distribution_systems/appstore_client/store.py: -------------------------------------------------------------------------------- 1 | import hashlib 2 | import plistlib 3 | 4 | import requests 5 | 6 | from mdast_cli.distribution_systems.appstore_client.schemas.store_authenticate_req import StoreAuthenticateReq 7 | from mdast_cli.distribution_systems.appstore_client.schemas.store_authenticate_resp import StoreAuthenticateResp 8 | from mdast_cli.distribution_systems.appstore_client.schemas.store_buyproduct_req import StoreBuyproductReq 9 | from mdast_cli.distribution_systems.appstore_client.schemas.store_download_req import StoreDownloadReq 10 | from mdast_cli.distribution_systems.appstore_client.schemas.store_download_resp import StoreDownloadResp 11 | 12 | 13 | class StoreException(Exception): 14 | def __init__(self, req, err_msg, err_type=None): 15 | self.req = req 16 | self.err_msg = err_msg 17 | self.err_type = err_type 18 | super().__init__( 19 | "Store %s error: %s" % (self.req, self.err_msg) if not self.err_type else 20 | "Store %s error: %s, errorType: %s" % (self.req, self.err_msg, self.err_type) 21 | ) 22 | 23 | 24 | class StoreClient(object): 25 | def __init__(self, sess: requests.Session, guid: str = None): 26 | self.sess = sess 27 | self.guid = guid 28 | self.dsid = None 29 | self.store_front = None 30 | self.account_name = None 31 | 32 | def authenticate(self, appleId, password): 33 | if not self.guid: 34 | self.guid = self._generateGuid(appleId) 35 | req = StoreAuthenticateReq(appleId=appleId, password=password, attempt='4', createSession="true", 36 | guid=self.guid, rmp='0', why='signIn') 37 | url = "https://p46-buy.itunes.apple.com/WebObjects/MZFinance.woa/wa/authenticate?guid=%s" % self.guid 38 | while True: 39 | r = self.sess.post(url, 40 | headers={ 41 | "Accept": "*/*", 42 | "Content-Type": "application/x-www-form-urlencoded", 43 | "User-Agent": 44 | "Configurator/2.0 (Macintosh; OS X 10.12.6; 16G29) AppleWebKit/2603.3.8", 45 | }, 46 | data=plistlib.dumps(req.as_dict()), allow_redirects=False, 47 | verify=False) 48 | if r.status_code == 302: 49 | url = r.headers['Location'] 50 | continue 51 | break 52 | resp = StoreAuthenticateResp.from_dict(plistlib.loads(r.content)) 53 | if not resp.m_allowed: 54 | raise StoreException("authenticate", resp.customerMessage, resp.failureType) 55 | 56 | self.sess.headers['X-Dsid'] = self.sess.headers['iCloud-Dsid'] = str(resp.download_queue_info.dsid) 57 | self.sess.headers['X-Apple-Store-Front'] = r.headers.get('x-set-apple-store-front') 58 | self.sess.headers['X-Token'] = resp.passwordToken 59 | 60 | self.account_name = resp.accountInfo.address.firstName + " " + resp.accountInfo.address.lastName 61 | return resp 62 | 63 | def find_app(self, app_id=None, bundle_id=None, country="US"): 64 | return self.sess.get("https://itunes.apple.com/lookup?", 65 | params={ 66 | "bundleId": bundle_id, 67 | "id": app_id, 68 | "term": None, 69 | "country": country, 70 | "limit": 1, 71 | "media": "software", 72 | }, 73 | headers={ 74 | "Content-Type": "application/x-www-form-urlencoded", 75 | }, 76 | verify=False) 77 | 78 | def purchase(self, app_id, productType='C'): 79 | url = "https://buy.itunes.apple.com/WebObjects/MZBuy.woa/wa/buyProduct" 80 | req = StoreBuyproductReq( 81 | guid=self.guid, 82 | salableAdamId=str(app_id), 83 | appExtVrsId='0', 84 | 85 | price='0', 86 | productType=productType, 87 | pricingParameters='STDQ', 88 | 89 | hasAskedToFulfillPreorder='true', 90 | buyWithoutAuthorization='true', 91 | hasDoneAgeCheck='true', 92 | ) 93 | payload = req.as_dict() 94 | 95 | return self.sess.post(url, 96 | headers={ 97 | "Content-Type": "application/x-apple-plist", 98 | "User-Agent": "Configurator/2.15 (Macintosh; OS X 11.0.0; 16G29) " 99 | "AppleWebKit/2603.3.8", 100 | }, 101 | data=plistlib.dumps(payload), 102 | verify=False) 103 | 104 | def download(self, app_id, app_ver_id=""): 105 | req = StoreDownloadReq(creditDisplay="", guid=self.guid, salableAdamId=app_id, appExtVrsId=app_ver_id) 106 | r = self.sess.post("https://p25-buy.itunes.apple.com/WebObjects/MZFinance.woa/wa/volumeStoreDownloadProduct", 107 | params={ 108 | "guid": self.guid 109 | }, 110 | headers={ 111 | "Content-Type": "application/x-www-form-urlencoded", 112 | "User-Agent": "Configurator/2.0 (Macintosh; OS X 10.12.6; 16G29) AppleWebKit/2603.3.8", 113 | }, 114 | data=plistlib.dumps(req.as_dict()), 115 | verify=False) 116 | 117 | resp = StoreDownloadResp.from_dict(plistlib.loads(r.content)) 118 | if resp.cancel_purchase_batch: 119 | raise StoreException("volumeStoreDownloadProduct", resp.customerMessage, resp.failureType) 120 | return resp 121 | 122 | def _generateGuid(self, appleId): 123 | DEFAULT_GUID = '123C2941396B' 124 | GUID_DEFAULT_PREFIX = 2 125 | GUID_SEED = 'STINGRAY' 126 | GUID_POS = 10 127 | 128 | h = hashlib.sha1((GUID_SEED + appleId + GUID_SEED).encode("utf-8")).hexdigest() 129 | defaultPart = DEFAULT_GUID[:GUID_DEFAULT_PREFIX] 130 | hashPart = h[GUID_POS: GUID_POS + (len(DEFAULT_GUID) - GUID_DEFAULT_PREFIX)] 131 | guid = (defaultPart + hashPart).upper() 132 | return guid 133 | -------------------------------------------------------------------------------- /mdast_cli/distribution_systems/base.py: -------------------------------------------------------------------------------- 1 | """ 2 | Base interface for all distribution system downloaders. 3 | """ 4 | from abc import ABC, abstractmethod 5 | from typing import Dict, Optional, Any 6 | 7 | 8 | class BaseDownloader(ABC): 9 | """ 10 | Abstract base class for all distribution system downloaders. 11 | 12 | All distribution systems should implement this interface to ensure 13 | consistent behavior and easier testing. 14 | """ 15 | 16 | @abstractmethod 17 | def download_app(self, download_path: str, **kwargs) -> str: 18 | """ 19 | Download application from the distribution system. 20 | 21 | Args: 22 | download_path: Directory where the application should be saved 23 | **kwargs: Distribution-specific parameters 24 | 25 | Returns: 26 | Path to the downloaded application file 27 | 28 | Raises: 29 | RuntimeError: If download fails 30 | """ 31 | pass 32 | 33 | @abstractmethod 34 | def get_app_info(self, **kwargs) -> Dict[str, Any]: 35 | """ 36 | Get application metadata from the distribution system. 37 | 38 | Args: 39 | **kwargs: Distribution-specific parameters (e.g., package_name, app_id) 40 | 41 | Returns: 42 | Dictionary with application information: 43 | - integration_type: str 44 | - package_name: Optional[str] 45 | - version_name: Optional[str] 46 | - version_code: Optional[int] 47 | - file_size: Optional[int] 48 | - icon_url: Optional[str] 49 | - Additional distribution-specific fields 50 | 51 | Raises: 52 | RuntimeError: If info retrieval fails 53 | """ 54 | pass 55 | 56 | -------------------------------------------------------------------------------- /mdast_cli/distribution_systems/firebase.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import os 3 | import warnings 4 | 5 | # Suppress pkg_resources deprecation warning from google-auth 6 | # Must be before importing google.auth to catch the warning 7 | warnings.filterwarnings('ignore', message='.*pkg_resources is deprecated.*', category=UserWarning) 8 | 9 | import google.auth 10 | import google.auth.transport.requests 11 | import requests 12 | from google.oauth2 import service_account 13 | from tqdm import tqdm 14 | 15 | from mdast_cli.helpers.file_utils import ensure_download_dir, cleanup_file 16 | from mdast_cli.helpers.const import HTTP_REQUEST_TIMEOUT, HTTP_DOWNLOAD_TIMEOUT 17 | 18 | logger = logging.getLogger(__name__) 19 | 20 | 21 | def get_token(account_info): 22 | if isinstance(account_info, dict): 23 | credentials = service_account.Credentials.from_service_account_info(account_info, scopes=[ 24 | 'https://www.googleapis.com/auth/cloud-platform']) 25 | else: 26 | credentials = service_account.Credentials.from_service_account_file(account_info, scopes=[ 27 | 'https://www.googleapis.com/auth/cloud-platform']) 28 | 29 | credentials.refresh(google.auth.transport.requests.Request()) 30 | google_id_token = credentials.token 31 | if 'ya29' in google_id_token: 32 | return google_id_token 33 | 34 | raise RuntimeError(f'Incorrect token {google_id_token}') 35 | 36 | 37 | def get_app_info(project_number, app_id, account_info): 38 | try: 39 | token = get_token(account_info) 40 | headers = {'Authorization': f'Bearer {token}'} 41 | last_release_info_resp = requests.get( 42 | f'https://firebaseappdistribution.googleapis.com/v1/projects/{project_number}/apps/' 43 | f'{app_id}/releases?pageSize=1', 44 | headers=headers, 45 | timeout=HTTP_REQUEST_TIMEOUT) 46 | if last_release_info_resp.status_code != 200: 47 | raise RuntimeError(f'Firebase - Failed to get application info. Status: {last_release_info_resp.status_code}, ' 48 | f'Response: {last_release_info_resp.text[:500]}') 49 | release = last_release_info_resp.json()['releases'][0] 50 | except KeyError as e: 51 | raise RuntimeError(f'Firebase - Failed to get application info: unexpected response structure: {e}') 52 | except Exception as e: 53 | raise RuntimeError(f'Firebase - Failed to get application info: {e}') 54 | 55 | logger.info(f"Firebase - found release {release['name']} with version - {release['displayVersion']}") 56 | return { 57 | 'integration_type': 'firebase', 58 | 'app_name': release['name'], 59 | 'version_code': release['buildVersion'], 60 | 'version_name': release['displayVersion'], 61 | 'create_time': release['createTime'], 62 | 'download_link': release['binaryDownloadUri'] 63 | } 64 | 65 | 66 | def firebase_download_app(download_path, project_number, app_id, account_info, file_name=None, 67 | file_extension='apk'): 68 | logger.info(f'Firebase - Try to download {file_extension} from latest release in project - ' 69 | f'{project_number} with app id {app_id}') 70 | app_info = get_app_info(project_number, app_id, account_info) 71 | app_download_link = app_info['download_link'] 72 | app_file_resp = requests.get(app_download_link, allow_redirects=True, stream=True, timeout=HTTP_DOWNLOAD_TIMEOUT) 73 | if app_file_resp.status_code != 200: 74 | raise RuntimeError(f'Firebase - Failed to download application, status code: {app_file_resp.status_code}') 75 | 76 | if file_name is None: 77 | file_name = app_info['version_name'] 78 | 79 | path_to_file = f'{download_path}/{file_name}.{file_extension}' 80 | 81 | ensure_download_dir(download_path) 82 | logger.info(f'Firebase - Creating directory {download_path} for downloading app from Firebase') 83 | 84 | # Use streaming for large files to avoid memory issues 85 | try: 86 | total_size = int(app_file_resp.headers.get('content-length', 0)) 87 | chunk_size = 8192 88 | with open(path_to_file, 'wb') as file: 89 | with tqdm(total=total_size, unit='B', unit_scale=True, unit_divisor=1024, 90 | desc=f'Firebase - Downloading {file_name}.{file_extension}', 91 | disable=total_size == 0) as pbar: 92 | for chunk in app_file_resp.iter_content(chunk_size=chunk_size): 93 | if chunk: 94 | file.write(chunk) 95 | pbar.update(len(chunk)) 96 | except Exception as e: 97 | # Cleanup partial file on error 98 | cleanup_file(path_to_file) 99 | raise RuntimeError(f'Firebase - Failed to write downloaded file: {e}') 100 | 101 | if os.path.exists(path_to_file): 102 | logger.info(f'Firebase - Application successfully downloaded to {path_to_file}') 103 | else: 104 | logger.info('Firebase - Failed to download application. ' 105 | 'Seems like something is wrong with your file path or app file is broken') 106 | 107 | return path_to_file 108 | -------------------------------------------------------------------------------- /mdast_cli/distribution_systems/google_play.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from datetime import datetime 3 | import asyncio 4 | 5 | from mdast_cli.distribution_systems import google_play_apkeep as gp_apkeep 6 | from mdast_cli.helpers.file_utils import ensure_download_dir 7 | 8 | logger = logging.getLogger(__name__) 9 | 10 | 11 | class GooglePlay(object): 12 | def __init__(self, email=None, aas_token=None): 13 | # New flow uses email + AAS token (or OAuth2->AAS upstream) 14 | self.email = email 15 | self.aas_token = aas_token 16 | 17 | def login(self): 18 | # No-op for apkeep-based flow; kept for backward compatibility 19 | logger.info('Google Play - Using apkeep-based integration (login is a no-op)') 20 | 21 | def get_gsf_id(self): 22 | # Not applicable in new flow 23 | return None 24 | 25 | def get_auth_subtoken(self): 26 | # Return provided token as-is (treated as AAS token) 27 | # Deprecated: kept for backward compatibility 28 | return self.aas_token 29 | 30 | def get_app_info(self, package_name): 31 | # Best-effort stub to keep interface; detailed app info not provided by apkeep CLI 32 | return { 33 | 'integration_type': 'google_play', 34 | 'package_name': package_name, 35 | 'version_name': None, 36 | 'version_code': None, 37 | 'file_size': None, 38 | 'icon_url': None 39 | } 40 | 41 | def download_app(self, download_path, package_name, file_name=None, proxy=None): 42 | # proxy is not used in the apkeep flow; preserved for signature compatibility 43 | ensure_download_dir(download_path) 44 | 45 | aas_token = self.aas_token or '' 46 | email = self.email or '' 47 | 48 | # Run async downloader synchronously 49 | artifact_path = asyncio.run( 50 | gp_apkeep.download_app( 51 | download_dir=download_path, 52 | package_name=package_name, 53 | email=email, 54 | aas_token=aas_token, 55 | timeout_sec=gp_apkeep.DEFAULT_TIMEOUT_SEC, 56 | ) 57 | ) 58 | 59 | return artifact_path 60 | 61 | @staticmethod 62 | def _get_upload_timestamp(info): 63 | try: 64 | upload_date = info.get('uploadDate') 65 | dt = datetime.strptime(upload_date, '%b %d, %Y') 66 | except Exception: 67 | now = datetime.now() 68 | dt = datetime(year=now.year, month=now.month, day=1) 69 | 70 | return int(dt.timestamp()) 71 | -------------------------------------------------------------------------------- /mdast_cli/distribution_systems/google_play_apkeep.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import logging 3 | import os 4 | import shutil 5 | import zipfile 6 | import argparse 7 | import sys 8 | from typing import Final, Optional, List, Mapping 9 | import re 10 | 11 | from mdast_cli.helpers.logging_utils import redact 12 | from mdast_cli.helpers.platform_utils import get_apkeep_binary_path 13 | 14 | logger = logging.getLogger(__name__) 15 | 16 | DEFAULT_TIMEOUT_SEC: Final[int] = 300 17 | DEFAULT_DOWNLOAD_DIR: Final[str] = 'downloaded_apps' 18 | DEFAULT_LOG_LEVEL: Final[str] = 'INFO' 19 | 20 | 21 | async def fetch_aas_token(email: str, oauth2_token: str, timeout_sec: int) -> str: 22 | apkeep_path = get_apkeep_binary_path() 23 | logger.debug(f'Google Play - using apkeep binary: {apkeep_path}') 24 | 25 | redacted_email: Optional[Mapping[str, object]] = redact({'email': email}) 26 | logger.info(f'Google Play - fetching AAS token via OAuth2 for account {(redacted_email or {}).get("email")}') 27 | proc = await asyncio.create_subprocess_exec( 28 | apkeep_path, 29 | '-e', email, 30 | '--oauth-token', oauth2_token, 31 | stdout=asyncio.subprocess.PIPE, 32 | stderr=asyncio.subprocess.PIPE, 33 | ) 34 | try: 35 | stdout_b, stderr_b = await asyncio.wait_for(proc.communicate(), timeout=timeout_sec) 36 | except asyncio.TimeoutError: 37 | proc.kill() 38 | logger.error('gp:token_fetch_timeout') 39 | raise RuntimeError('Google Play: timeout while executing apkeep for token fetch') 40 | 41 | output = (stdout_b or b'').decode(errors='ignore') + '\n' + (stderr_b or b'').decode(errors='ignore') 42 | sanitized_output = [] 43 | for line in output.splitlines(): 44 | if line.strip().startswith('AAS Token: '): 45 | sanitized_output.append('AAS Token: ***') 46 | else: 47 | sanitized_output.append(line) 48 | sanitized_output_text = '\n'.join(sanitized_output) 49 | logger.debug(f'Google Play - apkeep output when fetching token (fragment): {sanitized_output_text[:1000]}') 50 | logger.debug(f'Google Play - full apkeep output when fetching token: {sanitized_output_text}') 51 | 52 | if proc.returncode != 0: 53 | raise RuntimeError(sanitized_output_text.strip() or 'Google Play: apkeep returned non-zero exit code') 54 | 55 | token_line = None 56 | for line in output.splitlines(): 57 | if line.strip().startswith('AAS Token: '): 58 | token_line = line.strip() 59 | break 60 | if not token_line: 61 | raise RuntimeError(sanitized_output_text.strip() or 'Google Play: AAS token not found in apkeep output') 62 | parsed = token_line.split('AAS Token: ', 1) 63 | if len(parsed) != 2 or not parsed[1].strip(): 64 | raise RuntimeError('Google Play: failed to parse AAS token from apkeep output') 65 | 66 | token_value = parsed[1].strip() 67 | logger.info('Google Play - AAS token successfully obtained') 68 | logger.debug(f'Google Play - AAS token (DEBUG): {token_value}') 69 | return token_value 70 | 71 | 72 | async def download_app( 73 | download_dir: str, 74 | package_name: str, 75 | email: str, 76 | aas_token: str, 77 | timeout_sec: int, 78 | ) -> str: 79 | apkeep_path = get_apkeep_binary_path() 80 | logger.debug(f'Google Play - using apkeep binary: {apkeep_path} (package: {package_name})') 81 | 82 | proc = await asyncio.create_subprocess_exec( 83 | apkeep_path, 84 | '-a', package_name, 85 | '-d', 'google-play', 86 | '-e', email, 87 | '-o', 'split_apk=true,locale=ru_RU', 88 | '-t', aas_token, 89 | download_dir, 90 | stdout=asyncio.subprocess.PIPE, 91 | stderr=asyncio.subprocess.PIPE, 92 | ) 93 | try: 94 | stdout_b, stderr_b = await asyncio.wait_for(proc.communicate(), timeout=timeout_sec) 95 | except asyncio.TimeoutError: 96 | proc.kill() 97 | logger.error(f'Google Play - download timeout exceeded, package: {package_name}') 98 | raise RuntimeError('Google Play: timeout while executing apkeep for download') 99 | 100 | output = (stdout_b or b'').decode(errors='ignore') + '\n' + (stderr_b or b'').decode(errors='ignore') 101 | sanitized_output = [] 102 | for line in output.splitlines(): 103 | if line.strip().startswith('AAS Token: '): 104 | sanitized_output.append('AAS Token: ***') 105 | else: 106 | sanitized_output.append(line) 107 | sanitized_output_text = '\n'.join(sanitized_output) 108 | logger.debug(f'Google Play - apkeep output during download (fragment): {sanitized_output_text[:1000]} (package: {package_name})') 109 | logger.debug(f'Google Play - full apkeep output during download (package: {package_name}): {sanitized_output_text}') 110 | 111 | if proc.returncode != 0: 112 | raise RuntimeError(sanitized_output_text.strip() or 'apkeep returned non-zero exit code') 113 | 114 | if f'{package_name} downloaded successfully!' not in output: 115 | logger.warning(f'Google Play - success marker not found in output, continuing artifact check (package: {package_name})') 116 | 117 | split_dir = os.path.join(download_dir, package_name) 118 | single_apk = os.path.join(download_dir, f'{package_name}.apk') 119 | if os.path.isdir(split_dir): 120 | # Rename main APK to base-master.apk before zipping 121 | original_base_apk = os.path.join(split_dir, f'{package_name}.apk') 122 | renamed_base_apk = os.path.join(split_dir, 'base-master.apk') 123 | try: 124 | if os.path.exists(original_base_apk): 125 | if os.path.exists(renamed_base_apk): 126 | try: 127 | os.remove(renamed_base_apk) 128 | except Exception: 129 | pass 130 | os.replace(original_base_apk, renamed_base_apk) 131 | logger.info(f'Google Play - renamed base APK: {original_base_apk} → {renamed_base_apk} (package: {package_name})') 132 | except Exception as ex: 133 | logger.warning(f'Google Play - failed to rename base APK: {ex} (package: {package_name})') 134 | 135 | zip_base = os.path.join(download_dir, f'{package_name}') 136 | try: 137 | archive_path = shutil.make_archive(zip_base, 'zip', split_dir) 138 | logger.info(f'Google Play - artifact ready: {archive_path} (package: {package_name})') 139 | try: 140 | shutil.rmtree(split_dir) 141 | logger.debug(f'Google Play - temporary split directory removed: {split_dir} (package: {package_name})') 142 | except Exception as ex: 143 | logger.warning('gp:cleanup_split_dir_failed', extra={'package': package_name, 'dir': split_dir, 'error': str(ex)}) 144 | return archive_path 145 | except Exception as ex: 146 | logger.error(f'Google Play - failed to archive split APKs: {ex} (package: {package_name})') 147 | raise RuntimeError(f'Google Play: failed to archive split APKs: {ex}') 148 | if os.path.isfile(single_apk): 149 | # Keep single APK as the final artifact (no zipping) 150 | logger.info(f'Google Play - artifact ready: {single_apk} (package: {package_name})') 151 | return single_apk 152 | 153 | # Try to infer artifact path(s) from apkeep output (absolute .apk/.apks paths) 154 | candidate_paths = set() 155 | try: 156 | for match in re.findall(r'(/[^ \n"]+\.(?:apks?|zip))', output): 157 | candidate_paths.add(match) 158 | except Exception: 159 | pass 160 | if candidate_paths: 161 | logger.debug(f'Google Play - candidates from apkeep output: {list(candidate_paths)[:10]} (package: {package_name})') 162 | for path in candidate_paths: 163 | if os.path.isdir(path): 164 | # Directory with splits; zip it 165 | original_base_apk = os.path.join(path, f'{package_name}.apk') 166 | renamed_base_apk = os.path.join(path, 'base-master.apk') 167 | try: 168 | if os.path.exists(original_base_apk): 169 | if os.path.exists(renamed_base_apk): 170 | try: 171 | os.remove(renamed_base_apk) 172 | except Exception: 173 | pass 174 | os.replace(original_base_apk, renamed_base_apk) 175 | logger.info(f'Google Play - renamed base APK: {original_base_apk} → {renamed_base_apk} (package: {package_name})') 176 | except Exception as ex: 177 | logger.warning(f'Google Play - failed to rename base APK: {ex} (package: {package_name})') 178 | try: 179 | archive_path = shutil.make_archive(path, 'zip', path) 180 | logger.info(f'Google Play - artifact ready: {archive_path} (package: {package_name})') 181 | try: 182 | shutil.rmtree(path) 183 | logger.debug(f'Google Play - temporary split directory removed: {path} (package: {package_name})') 184 | except Exception as ex: 185 | logger.warning('gp:cleanup_split_dir_failed', extra={'package': package_name, 'dir': path, 'error': str(ex)}) 186 | return archive_path 187 | except Exception as ex: 188 | logger.error(f'Google Play - failed to archive split APKs: {ex} (package: {package_name})') 189 | elif os.path.isfile(path): 190 | # If apkeep produced an .apks file, repackage to .zip and remove source 191 | if path.endswith('.apks'): 192 | zip_path = os.path.splitext(path)[0] + '.zip' 193 | try: 194 | with zipfile.ZipFile(zip_path, 'w', compression=zipfile.ZIP_DEFLATED) as zf: 195 | zf.write(path, arcname=os.path.basename(path)) 196 | logger.info(f'Google Play - artifact ready: {zip_path} (package: {package_name})') 197 | try: 198 | os.remove(path) 199 | logger.debug(f'Google Play - temporary .apks removed: {path} (package: {package_name})') 200 | except Exception as ex: 201 | logger.debug(f'Google Play - failed to remove temporary .apks: {path}, error: {ex} (package: {package_name})') 202 | return zip_path 203 | except Exception as ex: 204 | logger.error(f'Google Play - failed to repackage .apks to .zip: {ex} (package: {package_name})') 205 | elif path.endswith('.apk'): 206 | # Keep single APK as the final artifact 207 | logger.info(f'Google Play - artifact ready: {path} (package: {package_name})') 208 | return path 209 | elif path.endswith('.zip'): 210 | logger.info(f'Google Play - artifact ready: {path} (package: {package_name})') 211 | return path 212 | 213 | # Fallback: scan download_dir for recent files/dirs matching the package 214 | try: 215 | candidates = [] 216 | for root, dirs, files in os.walk(download_dir): 217 | for f in files: 218 | if f.startswith(package_name) and (f.endswith('.apk') or f.endswith('.apks') or f.endswith('.zip')): 219 | candidates.append(os.path.join(root, f)) 220 | for d in dirs: 221 | if d.startswith(package_name): 222 | candidates.append(os.path.join(root, d)) 223 | if candidates: 224 | logger.debug(f'Google Play - found candidates: {candidates[:10]} (package: {package_name})') 225 | # Prefer .apks/.zip, then split dir, then .apk 226 | for ext in ('.zip',): 227 | for c in candidates: 228 | if c.endswith(ext) and os.path.isfile(c): 229 | logger.info('gp:artifact_ready', extra={'package': package_name, 'artifact': c}) 230 | return c 231 | # Repack .apks files to .zip 232 | for c in candidates: 233 | if c.endswith('.apks') and os.path.isfile(c): 234 | zip_path = os.path.splitext(c)[0] + '.zip' 235 | try: 236 | with zipfile.ZipFile(zip_path, 'w', compression=zipfile.ZIP_DEFLATED) as zf: 237 | zf.write(c, arcname=os.path.basename(c)) 238 | logger.info(f'Google Play - artifact ready: {zip_path} (package: {package_name})') 239 | try: 240 | os.remove(c) 241 | logger.debug(f'Google Play - temporary .apks removed: {c} (package: {package_name})') 242 | except Exception as ex: 243 | logger.debug(f'Google Play - failed to remove temporary .apks: {c}, error: {ex} (package: {package_name})') 244 | return zip_path 245 | except Exception as ex: 246 | logger.error('gp:repack_apks_failed', extra={'package': package_name, 'error': str(ex)}) 247 | # split dir 248 | for c in candidates: 249 | if os.path.isdir(c): 250 | original_base_apk = os.path.join(c, f'{package_name}.apk') 251 | renamed_base_apk = os.path.join(c, 'base-master.apk') 252 | try: 253 | if os.path.exists(original_base_apk): 254 | if os.path.exists(renamed_base_apk): 255 | try: 256 | os.remove(renamed_base_apk) 257 | except Exception: 258 | pass 259 | os.replace(original_base_apk, renamed_base_apk) 260 | logger.info('gp:rename_base_apk', extra={'package': package_name, 'from': original_base_apk, 'to': renamed_base_apk}) 261 | except Exception as ex: 262 | logger.warning('gp:rename_base_apk_failed', extra={'package': package_name, 'error': str(ex)}) 263 | try: 264 | archive_path = shutil.make_archive(c, 'zip', c) 265 | logger.info(f'Google Play - artifact ready: {archive_path} (package: {package_name})') 266 | try: 267 | shutil.rmtree(c) 268 | logger.debug(f'Google Play - temporary split directory removed: {c} (package: {package_name})') 269 | except Exception as ex: 270 | logger.debug(f'Google Play - failed to remove temporary split directory: {c}, error: {ex} (package: {package_name})') 271 | return archive_path 272 | except Exception as ex: 273 | logger.error(f'Google Play - failed to archive split APKs: {ex} (package: {package_name})') 274 | # single apk (keep as-is) 275 | for c in candidates: 276 | if c.endswith('.apk') and os.path.isfile(c): 277 | logger.info(f'Google Play - artifact ready: {c} (package: {package_name})') 278 | return c 279 | except Exception as ex: 280 | logger.debug(f'Google Play - failed to scan directory for candidates: {ex} (package: {package_name})') 281 | 282 | logger.error(f'Google Play - artifact not found after successful download (package: {package_name})') 283 | raise RuntimeError('Google Play: download reports success but artifact not found') 284 | 285 | 286 | def _ensure_dir_exists(path: str) -> None: 287 | try: 288 | os.makedirs(path, exist_ok=True) 289 | except Exception as ex: 290 | logger.error('gp:mkdir_failed', extra={'dir': path, 'error': str(ex)}) 291 | raise 292 | 293 | 294 | def _configure_logging(level: str) -> None: 295 | numeric_level = getattr(logging, level.upper(), logging.INFO) 296 | logging.basicConfig( 297 | level=numeric_level, 298 | format='%(asctime)s %(levelname)s %(name)s %(message)s', 299 | stream=sys.stdout, 300 | ) 301 | logger.debug(f'Google Play - logging configured (level: {level})') 302 | 303 | 304 | async def _run_cli( 305 | email: str, 306 | package_name: str, 307 | oauth2_token: Optional[str], 308 | aas_token: Optional[str], 309 | ) -> int: 310 | red_email: Optional[Mapping[str, object]] = redact({'email': email}) 311 | logger.info(f'Google Play - start: package {package_name}, email {(red_email or {}).get("email")}, ' 312 | f'OAuth2={bool(oauth2_token)}, AAS={bool(aas_token)}, download directory "{DEFAULT_DOWNLOAD_DIR}"') 313 | _ensure_dir_exists(DEFAULT_DOWNLOAD_DIR) 314 | 315 | try: 316 | token_to_use = aas_token 317 | if not token_to_use: 318 | logger.info(f'Google Play - AAS token not provided, will be fetched via OAuth2 (package: {package_name})') 319 | token_to_use = await fetch_aas_token(email=email, oauth2_token=oauth2_token or '', timeout_sec=DEFAULT_TIMEOUT_SEC) 320 | logger.info(f'Google Play - AAS token obtained (package: {package_name})') 321 | else: 322 | logger.debug(f'Google Play - AAS token (DEBUG): {token_to_use}') 323 | 324 | artifact = await download_app( 325 | download_dir=DEFAULT_DOWNLOAD_DIR, 326 | package_name=package_name, 327 | email=email, 328 | aas_token=token_to_use or '', 329 | timeout_sec=DEFAULT_TIMEOUT_SEC, 330 | ) 331 | logger.info(f'Google Play - success: artifact {artifact} (package: {package_name})') 332 | return 0 333 | except Exception as ex: 334 | logger.exception(f'Google Play - error: {ex} (package: {package_name})') 335 | return 1 336 | 337 | 338 | def _parse_args(argv: List[str]) -> argparse.Namespace: 339 | parser = argparse.ArgumentParser(description='Google Play downloader (apkeep-based)') 340 | parser.add_argument('--email', required=True, help='Google account email') 341 | parser.add_argument('--package', required=True, help='Android package name') 342 | group = parser.add_mutually_exclusive_group(required=True) 343 | group.add_argument('--oauth2-token', help='OAuth2 token to fetch AAS token') 344 | group.add_argument('--aas-token', help='Already obtained AAS token; skips fetch') 345 | return parser.parse_args(argv) 346 | 347 | 348 | def main(argv: Optional[List[str]] = None) -> int: 349 | args = _parse_args(argv if argv is not None else sys.argv[1:]) 350 | _configure_logging(DEFAULT_LOG_LEVEL) 351 | 352 | if not args.aas_token and not args.oauth2_token: 353 | logger.error('gp:args_missing_token') 354 | return 2 355 | 356 | try: 357 | exit_code = asyncio.run( 358 | _run_cli( 359 | email=args.email, 360 | package_name=args.package, 361 | oauth2_token=args.oauth2_token, 362 | aas_token=args.aas_token, 363 | ) 364 | ) 365 | return exit_code 366 | except KeyboardInterrupt: 367 | logger.warning('gp:cli_interrupted') 368 | return 130 369 | 370 | 371 | if __name__ == '__main__': 372 | sys.exit(main()) 373 | 374 | 375 | -------------------------------------------------------------------------------- /mdast_cli/distribution_systems/nexus.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import os 3 | from base64 import b64encode 4 | 5 | import requests 6 | import urllib3 7 | from tqdm import tqdm 8 | 9 | from mdast_cli.helpers.file_utils import ensure_download_dir, cleanup_file 10 | 11 | urllib3.disable_warnings() 12 | 13 | logger = logging.getLogger(__name__) 14 | 15 | 16 | class NexusRepository(object): 17 | nexus_url = None 18 | 19 | def __init__(self, nexus_url, login, password): 20 | self.nexus_url = nexus_url if not nexus_url.endswith('/') else nexus_url[:-1] 21 | self.login = b64encode(str.encode(login)) 22 | self.password = b64encode(str.encode(password)) 23 | self.session = None 24 | 25 | def connect(self): 26 | if self.session: # we already connected 27 | return 28 | 29 | self.session = requests.session() 30 | 31 | api_url = self.nexus_url + '/service/rapture/session' 32 | body = { 33 | 'username': self.login, 34 | 'password': self.password 35 | } 36 | 37 | json_response = self.session.post(api_url, data=body, verify=False) 38 | if json_response.status_code == 403: 39 | logger.error('NexusRepo: Incorrect authentication credentials') 40 | elif json_response.status_code == 500: 41 | logger.error("NexusRepo: Nexus Repo server error 500 during authentication") 42 | 43 | def search_component(self, repo_name, group_id, artifact_id, version): 44 | self.connect() 45 | search_url = f"{self.nexus_url}/service/rest/v1/search?repository={repo_name}&name={artifact_id}" \ 46 | f"&version={version}&group={group_id}" 47 | json_response = self.session.get(search_url, verify=False) 48 | component_search_result = json_response.json().get('items', {}) 49 | if component_search_result: 50 | logger.info(f'NexusRepo: Search length: {len(component_search_result)}') 51 | logger.info(f'NexusRepo: Successfully find component: {component_search_result}') 52 | return component_search_result[-1] 53 | else: 54 | logger.info(f'NexusRepo: Unable to find component in repository - {repo_name}, ' 55 | f'name - {artifact_id}, version - {version}&group_id={group_id}') 56 | return None 57 | 58 | def download_app(self, download_path, repo_name, group_id, artifact_id, version): 59 | self.connect() 60 | download_url = '' 61 | file_name = '' 62 | component_search_result = self.search_component(repo_name, group_id, artifact_id, version) 63 | for asset in component_search_result.get('assets', {}): 64 | if asset.get('contentType', '') in ('application/vnd.android.package-archive', 'application/x-itunes-ipa'): 65 | download_url = asset.get('downloadUrl') 66 | file_name = download_url.split('/')[-1] if download_url.split('/')[ 67 | -1] != '' else f'{group_id}-{version}.apk' 68 | break 69 | if not download_url: 70 | logger.error(f'NexusRepo: Unable to find download URL: {len(component_search_result)}') 71 | response = self.session.get(download_url, allow_redirects=True, stream=True) 72 | if response.status_code != 200: 73 | raise RuntimeError(f'NexusRepo: Failed to download application. ' 74 | f'Request return status code: {response.status_code}') 75 | 76 | path_to_save = os.path.join(download_path, file_name) 77 | 78 | ensure_download_dir(download_path) 79 | 80 | try: 81 | total_size = int(response.headers.get('content-length', 0)) 82 | chunk_size = 512 * 1024 83 | with open(path_to_save, 'wb') as file: 84 | with tqdm(total=total_size, unit='B', unit_scale=True, unit_divisor=1024, 85 | desc=f"Nexus - Downloading {file_name}", 86 | disable=total_size == 0) as pbar: 87 | for chunk in response.iter_content(chunk_size=chunk_size): 88 | if chunk: 89 | file.write(chunk) 90 | pbar.update(len(chunk)) 91 | except Exception as e: 92 | # Cleanup partial file on error 93 | cleanup_file(path_to_save) 94 | raise RuntimeError(f'Nexus - Failed to write downloaded file: {e}') 95 | 96 | return path_to_save 97 | -------------------------------------------------------------------------------- /mdast_cli/distribution_systems/nexus2.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import os 3 | from base64 import b64encode 4 | 5 | import requests 6 | import urllib3 7 | from tqdm import tqdm 8 | 9 | from mdast_cli.helpers.file_utils import ensure_download_dir, cleanup_file 10 | 11 | urllib3.disable_warnings() 12 | 13 | logger = logging.getLogger(__name__) 14 | 15 | 16 | def basic_auth(username, password): 17 | token = b64encode(f"{username}:{password}".encode('utf-8')).decode("ascii") 18 | return f'Basic {token}' 19 | 20 | 21 | class Nexus2Repository(object): 22 | nexus_url = None 23 | 24 | def __init__(self, nexus_url, login, password): 25 | self.nexus_url = nexus_url if not nexus_url.endswith('/') else nexus_url[:-1] 26 | self.login = str(login) 27 | self.password = str(password) 28 | 29 | def download_app(self, download_path, repo_name, group_id, artifact_id, version, extension, file_name=''): 30 | download_url = f'{self.nexus_url}/service/local/artifact/maven/content?r={repo_name}&g={group_id}&a=' \ 31 | f'{artifact_id}&v={version}&p={extension}' 32 | if file_name == '': 33 | file_name = f'{artifact_id}-{version}.{extension}' 34 | headers = { 35 | 'DNT': '1', 36 | 'Upgrade-Insecure-Requests': '1', 37 | 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) ' 38 | 'Chrome/107.0.0.0 Safari/537.36', 39 | 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;' 40 | 'q=0.8,application/signed-exchange;v=b3;q=0.9', 41 | 'Accept-Encoding': 'gzip, deflate', 42 | 'Accept-Language': 'en-US,en;q=0.9,ru;q=0.8', 43 | 'Authorization': basic_auth(self.login, self.password), 44 | 'Connection': 'close' 45 | } 46 | response = requests.get(download_url, headers=headers, stream=True) 47 | if response.status_code != 200: 48 | raise RuntimeError(f'NexusRepo: Failed to download application. ' 49 | f'Request return status code: {response.status_code}') 50 | logger.info(f'Nexus 2 - Downloading app from repo - {repo_name}, with group - {group_id},' 51 | f' artifact - {artifact_id}, version - {version} and extension - {extension}') 52 | path_to_save = os.path.join(download_path, file_name) 53 | 54 | ensure_download_dir(download_path) 55 | logger.info(f'Nexus 2 - Creating directory {download_path} for downloading app from Nexus 2') 56 | 57 | try: 58 | total_size = int(response.headers.get('content-length', 0)) 59 | chunk_size = 512 * 1024 60 | with open(path_to_save, 'wb') as f: 61 | with tqdm(total=total_size, unit='B', unit_scale=True, unit_divisor=1024, 62 | desc=f"Nexus2 - Downloading {file_name}", 63 | disable=total_size == 0) as pbar: 64 | for chunk in response.iter_content(chunk_size=chunk_size): 65 | if chunk: 66 | f.write(chunk) 67 | pbar.update(len(chunk)) 68 | except Exception as e: 69 | # Cleanup partial file on error 70 | cleanup_file(path_to_save) 71 | raise RuntimeError(f'Nexus2 - Failed to write downloaded file: {e}') 72 | 73 | logger.info('Nexus 2 - Application was successfully downloaded!') 74 | 75 | return path_to_save 76 | -------------------------------------------------------------------------------- /mdast_cli/distribution_systems/rumarket.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import os 3 | 4 | import requests 5 | from tqdm import tqdm 6 | 7 | from mdast_cli.helpers.file_utils import ensure_download_dir, cleanup_file 8 | 9 | logger = logging.getLogger(__name__) 10 | 11 | 12 | def get_app_info(package_name): 13 | req = requests.get(f'https://store-api.ruplay.market/api/v1/app/getApp/{package_name}') 14 | if req.status_code == 200: 15 | app_info = req.json()['data'] 16 | logger.info(f"Rumarket - Successfully found app with package name: {package_name}," 17 | f" version:{app_info['latestApk']['versionName']}, company: {app_info['author']['name']}") 18 | else: 19 | raise RuntimeError(f'Rumarket - Failed to get application info. Request return status code: {req.status_code}') 20 | 21 | return { 22 | 'integration_type': 'rumarket', 23 | 'download_url': f"https://cdn.ruplay.market/data/apks/{app_info['latestApk']['name']}", 24 | 'package_name': app_info['packageName'], 25 | 'version_name': app_info['latestApk']['versionName'], 26 | 'version_code': app_info['latestApk']['versionCode'], 27 | 'min_sdk_version': app_info['latestApk']['minSdkVersion'], 28 | 'target_sdk_version': app_info['latestApk']['targetSdkVersion'], 29 | 'file_size': app_info['latestApk']['size'], 30 | 'icon_url': app_info['iconUrl'] 31 | } 32 | 33 | 34 | def rumarket_download_app(package_name, download_path): 35 | app_info = get_app_info(package_name) 36 | logger.info(f'Rumarket - Start downloading application {package_name}') 37 | r = requests.get(app_info['download_url'], stream=True) 38 | if r.status_code == 401: 39 | raise RuntimeError(f'Rumarket - Failed to download application. ' 40 | f'Something goes wrong. Request return status code: {r.status_code}') 41 | 42 | file_path = f"{download_path}/{app_info['package_name']}-{app_info['version_name']}.apk" 43 | 44 | ensure_download_dir(download_path) 45 | logger.info(f'Rumarket - Creating directory {download_path} for downloading app from Rumarket') 46 | 47 | try: 48 | total_size = int(r.headers.get('content-length', 0)) 49 | chunk_size = 512 * 1024 50 | with open(file_path, 'wb') as f: 51 | with tqdm(total=total_size, unit='B', unit_scale=True, unit_divisor=1024, 52 | desc=f"RuMarket - Downloading {package_name}", 53 | disable=total_size == 0) as pbar: 54 | for chunk in r.iter_content(chunk_size=chunk_size): 55 | if chunk: 56 | f.write(chunk) 57 | pbar.update(len(chunk)) 58 | except Exception as e: 59 | # Cleanup partial file on error 60 | cleanup_file(file_path) 61 | raise RuntimeError(f'Rumarket - Failed to write downloaded file: {e}') 62 | 63 | logger.info(f'Rumarket - Apk was downloaded from rumarket to {file_path}') 64 | 65 | return file_path 66 | -------------------------------------------------------------------------------- /mdast_cli/distribution_systems/rustore.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import os 3 | import zipfile 4 | 5 | import requests 6 | from tqdm import tqdm 7 | 8 | from mdast_cli.helpers.file_utils import ensure_download_dir, cleanup_file 9 | 10 | logger = logging.getLogger(__name__) 11 | 12 | 13 | def get_app_info(package_name): 14 | """ 15 | Обработка метаданных приложения RuStore и получение URL-адреса для прямой загрузки. 16 | Изменения по сравнению с исходной реализацией: 17 | - Добавлены явные заголовки (User-Agent, Accept) и таймауты ко всем HTTP-вызовам. 18 | - Проверяется структура JSON-ответов (проверяется наличие «body» и «apkUrl»). 19 | - Исправлена ошибка, при которой статус POST проверялся по предыдущему ответу GET. 20 | - Предоставляются подробные сообщения об ошибках со статусом и фрагментом ответа. 21 | """ 22 | common_headers = { 23 | 'User-Agent': 'mdast-cli/1.0 (+https://stingray-tech.ru)', 24 | 'Accept': 'application/json' 25 | } 26 | 27 | req = requests.get( 28 | f'https://backapi.rustore.ru/applicationData/overallInfo/{package_name}', 29 | headers=common_headers, 30 | timeout=30 31 | ) 32 | if req.status_code == 200: 33 | body = req.json() 34 | if 'body' not in body: 35 | raise RuntimeError('Rustore - Invalid response for overallInfo: missing body field') 36 | body_info = body['body'] 37 | logger.info(f"Rustore - Successfully found app with package name: {package_name}," 38 | f" version:{body_info['versionName']}, company: {body_info['companyName']}") 39 | else: 40 | raise RuntimeError( 41 | f"Rustore - Failed to get application info. Status: {req.status_code}, body: {req.text[:500]}" 42 | ) 43 | 44 | headers = { 45 | 'Content-Type': 'application/json; charset=utf-8', 46 | **common_headers 47 | } 48 | body = { 49 | 'appId': body_info['appId'], 50 | 'firstInstall': True 51 | } 52 | download_link_resp = requests.post( 53 | 'https://backapi.rustore.ru/applicationData/download-link', 54 | headers=headers, 55 | json=body, 56 | timeout=30 57 | ) 58 | if download_link_resp.status_code == 200: 59 | dl_json = download_link_resp.json() 60 | if 'body' not in dl_json or 'apkUrl' not in dl_json['body']: 61 | raise RuntimeError( 62 | f"Rustore - Invalid response for download-link: {download_link_resp.text[:500]}" 63 | ) 64 | download_link = dl_json['body']['apkUrl'] 65 | else: 66 | raise RuntimeError( 67 | f"Rustore - Failed to get application download link. Status: {download_link_resp.status_code}, " 68 | f"body: {download_link_resp.text[:500]}" 69 | ) 70 | 71 | return { 72 | 'integration_type': 'rustore', 73 | 'download_url': download_link, 74 | 'package_name': body_info['packageName'], 75 | 'version_name': body_info['versionName'], 76 | 'version_code': body_info['versionCode'], 77 | 'min_sdk_version': body_info['minSdkVersion'], 78 | 'max_sdk_version': body_info['maxSdkVersion'], 79 | 'target_sdk_version': body_info['targetSdkVersion'], 80 | 'file_size': body_info['fileSize'], 81 | 'icon_url': body_info['iconUrl'] 82 | } 83 | 84 | 85 | def rustore_download_app(package_name, download_path): 86 | """ 87 | Загрузка APK из RuStore с поддержкой как прямых ссылок на APK, так и ZIP-контейнеров. 88 | 89 | Изменения по сравнению с исходной реализацией: 90 | - Потоковая загрузка во временный файл для предотвращения частичной записи 91 | - Обнаружение ZIP по URL или Content-Type и извлечение встроенного APK 92 | - Проверка конечного артефакта на наличие ZIP-контейнера (формат APK) перед возвратом 93 | - Использует ensure_download_dir() и os.path.join для кроссплатформенных путей 94 | - Запись в временные файлы 95 | """ 96 | app_info = get_app_info(package_name) 97 | logger.info('Rustore - Start downloading application') 98 | 99 | download_headers = { 100 | 'User-Agent': 'mdast-cli/1.0 (+https://stingray-tech.ru)', 101 | 'Accept': '*/*' 102 | } 103 | 104 | r = requests.get( 105 | app_info['download_url'], 106 | headers=download_headers, 107 | stream=True, 108 | allow_redirects=True, 109 | timeout=120 110 | ) 111 | if r.status_code != 200: 112 | raise RuntimeError( 113 | f"Rustore - Failed to download application. Status: {r.status_code}, " 114 | f"content-type: {r.headers.get('Content-Type')}, body: {r.text[:500]}" 115 | ) 116 | 117 | ensure_download_dir(download_path) 118 | file_path = os.path.join(download_path, f"{app_info['package_name']}-{app_info['version_name']}.apk") 119 | tmp_download_path = file_path + '.download' 120 | tmp_apk_path = file_path + '.part' 121 | 122 | # Save network payload to a temp file first to avoid creating a locked/partial target file 123 | try: 124 | total_size = int(r.headers.get('content-length', 0)) 125 | chunk_size = 512 * 1024 126 | with open(tmp_download_path, 'wb') as f: 127 | with tqdm(total=total_size, unit='B', unit_scale=True, unit_divisor=1024, 128 | desc=f"RuStore - Downloading {package_name}", 129 | disable=total_size == 0) as pbar: 130 | for chunk in r.iter_content(chunk_size=chunk_size): 131 | if chunk: 132 | f.write(chunk) 133 | pbar.update(len(chunk)) 134 | except Exception as e: 135 | # Cleanup partial file on error 136 | cleanup_file(tmp_download_path) 137 | raise RuntimeError(f'Rustore - Failed to write downloaded file: {e}') 138 | 139 | content_type = r.headers.get('Content-Type', '') 140 | url_looks_like_zip = app_info['download_url'].endswith('.zip') 141 | 142 | if url_looks_like_zip or 'zip' in content_type.lower(): 143 | # Response is a zip archive containing an apk. Extract the apk inside. 144 | try: 145 | with zipfile.ZipFile(tmp_download_path, 'r') as zip_ref: 146 | file_list = zip_ref.namelist() 147 | apk_candidates = [p for p in file_list if p.lower().endswith('.apk')] 148 | target_in_zip = apk_candidates[0] if apk_candidates else file_list[0] 149 | with zip_ref.open(target_in_zip) as source_file: 150 | with open(tmp_apk_path, 'wb') as target_file: 151 | target_file.write(source_file.read()) 152 | logger.info('Rustore - Extracted apk from zip package') 153 | except zipfile.BadZipFile: 154 | raise RuntimeError('Rustore - Downloaded file reported as zip, but it is not a valid zip') 155 | finally: 156 | try: 157 | os.remove(tmp_download_path) 158 | except OSError: 159 | pass 160 | working_path = tmp_apk_path 161 | else: 162 | # Treat as direct APK; the temp download is the working APK file. 163 | working_path = tmp_download_path 164 | 165 | # Validate that resulting file looks like an APK (which is a zip file) 166 | if not zipfile.is_zipfile(working_path): 167 | # Read small prefix for diagnostics 168 | try: 169 | with open(working_path, 'rb') as f: 170 | head = f.read(64) 171 | except Exception: 172 | head = b'' 173 | # Cleanup invalid file before raising error 174 | cleanup_file(working_path) 175 | raise RuntimeError( 176 | f"Rustore - Downloaded file is not a valid APK/ZIP. Content-Type: {content_type}, " 177 | f"first-bytes: {head[:16]}" 178 | ) 179 | 180 | # Atomically place the APK to the final path (Linux; no Windows lock retries needed) 181 | os.replace(working_path, file_path) 182 | 183 | logger.info(f'Rustore - Apk was downloaded from rustore to {file_path}') 184 | 185 | return file_path 186 | -------------------------------------------------------------------------------- /mdast_cli/helpers/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Dynamic-Mobile-Security/mdast-cli/804bffe421bfc66bf37e562f088a5e4b78b139b9/mdast_cli/helpers/__init__.py -------------------------------------------------------------------------------- /mdast_cli/helpers/const.py: -------------------------------------------------------------------------------- 1 | class DastState: 2 | CREATED = 0 3 | STARTING = 1 4 | STARTED = 2 5 | ANALYZING = 3 6 | SUCCESS = 4 7 | FAILED = 5 8 | STOPPING = 6 9 | RECALCULATING = 7 10 | INTERRUPTING = 8 11 | INITIALIZING = 9 12 | CANCELLED = 10 13 | CANCELLING = 11 14 | 15 | 16 | DastStateDict = { 17 | 0: "CREATED", 18 | 1: "STARTING", 19 | 2: "STARTED", 20 | 3: "ANALYZING", 21 | 4: "SUCCESS", 22 | 5: "FAILED", 23 | 6: "STOPPING", 24 | 7: "RECALCULATING", 25 | 8: "INTERRUPTING", 26 | 9: "INITIALIZING", 27 | 10: "CANCELLED", 28 | 11: "CANCELLING" 29 | } 30 | 31 | ANDROID_EXTENSIONS = ['.apk', '.apks', '.zip', '.aab'] 32 | 33 | # Default architecture names for auto-selection 34 | DEFAULT_ANDROID_ARCHITECTURE = 'Android 11' 35 | DEFAULT_IOS_ARCHITECTURE = 'iOS 14' 36 | 37 | # Timeout constants (in seconds) 38 | TRY = 360 39 | LONG_TRY = 20160 40 | END_SCAN_TIMEOUT = 30 41 | SLEEP_TIMEOUT = 10 42 | 43 | # HTTP timeout constants 44 | HTTP_REQUEST_TIMEOUT = 30 45 | HTTP_DOWNLOAD_TIMEOUT = 300 46 | -------------------------------------------------------------------------------- /mdast_cli/helpers/exit_codes.py: -------------------------------------------------------------------------------- 1 | """ 2 | Exit codes for mdast_cli application. 3 | Following Unix conventions and providing clear error categorization. 4 | """ 5 | from enum import IntEnum 6 | 7 | 8 | class ExitCode(IntEnum): 9 | """Standard exit codes for the application.""" 10 | SUCCESS = 0 11 | """Operation completed successfully.""" 12 | 13 | INVALID_ARGS = 2 14 | """Invalid command-line arguments provided.""" 15 | 16 | DOWNLOAD_FAILED = 4 17 | """Failed to download application from distribution system.""" 18 | 19 | SCAN_FAILED = 5 20 | """Failed to create or execute scan.""" 21 | 22 | NETWORK_ERROR = 6 23 | """Network-related error (timeout, connection failed, etc.).""" 24 | 25 | AUTH_ERROR = 7 26 | """Authentication or authorization error.""" 27 | 28 | INTERNAL_ERROR = 1 29 | """Internal application error (default for unexpected errors).""" 30 | 31 | -------------------------------------------------------------------------------- /mdast_cli/helpers/file_utils.py: -------------------------------------------------------------------------------- 1 | """ 2 | Common file and directory utilities for distribution systems. 3 | """ 4 | import logging 5 | import os 6 | import shutil 7 | from pathlib import Path 8 | from typing import Optional 9 | 10 | logger = logging.getLogger(__name__) 11 | 12 | 13 | def ensure_download_dir(download_path: str) -> None: 14 | """ 15 | Ensure download directory exists, create if it doesn't. 16 | 17 | Args: 18 | download_path: Path to the download directory 19 | 20 | Raises: 21 | OSError: If directory cannot be created 22 | """ 23 | try: 24 | os.makedirs(download_path, exist_ok=True) 25 | except OSError as e: 26 | logger.error(f'Failed to create download directory {download_path}: {e}') 27 | raise 28 | 29 | 30 | def cleanup_file(file_path: str) -> None: 31 | """ 32 | Safely remove a file, logging but not raising on failure. 33 | 34 | Args: 35 | file_path: Path to file to remove 36 | """ 37 | try: 38 | if os.path.exists(file_path): 39 | os.remove(file_path) 40 | logger.debug(f'Cleaned up file: {file_path}') 41 | except Exception as e: 42 | logger.warning(f'Failed to cleanup file {file_path}: {e}') 43 | 44 | 45 | def cleanup_directory(dir_path: str) -> None: 46 | """ 47 | Safely remove a directory and all its contents, logging but not raising on failure. 48 | 49 | Args: 50 | dir_path: Path to directory to remove 51 | """ 52 | try: 53 | if os.path.exists(dir_path) and os.path.isdir(dir_path): 54 | shutil.rmtree(dir_path) 55 | logger.debug(f'Cleaned up directory: {dir_path}') 56 | except Exception as e: 57 | logger.warning(f'Failed to cleanup directory {dir_path}: {e}') 58 | 59 | 60 | def safe_path_join(base: str, *parts: str) -> str: 61 | """ 62 | Safely join path parts, preventing path traversal. 63 | 64 | Args: 65 | base: Base directory path 66 | *parts: Additional path components 67 | 68 | Returns: 69 | Joined path 70 | 71 | Raises: 72 | ValueError: If path traversal is detected 73 | """ 74 | base_path = Path(base).resolve() 75 | full_path = base_path.joinpath(*parts).resolve() 76 | 77 | # Check that resolved path is still within base directory 78 | try: 79 | full_path.relative_to(base_path) 80 | except ValueError: 81 | raise ValueError(f'Path traversal detected: {full_path} is outside {base_path}') 82 | 83 | return str(full_path) 84 | 85 | 86 | def get_file_size_mb(file_path: str) -> Optional[float]: 87 | """ 88 | Get file size in megabytes. 89 | 90 | Args: 91 | file_path: Path to file 92 | 93 | Returns: 94 | File size in MB, or None if file doesn't exist 95 | """ 96 | try: 97 | size_bytes = os.path.getsize(file_path) 98 | return size_bytes / (1024 * 1024) 99 | except OSError: 100 | return None 101 | 102 | -------------------------------------------------------------------------------- /mdast_cli/helpers/helpers.py: -------------------------------------------------------------------------------- 1 | import hashlib 2 | import os 3 | 4 | 5 | def get_app_path(test_app_path): 6 | my_path = os.path.abspath(os.path.dirname(__file__)) 7 | path = os.path.join(my_path, test_app_path) 8 | return path 9 | 10 | 11 | def check_app_md5(file_path): 12 | with open(f'{file_path}', "rb") as f: 13 | file_hash = hashlib.md5() 14 | while chunk := f.read(8192): 15 | file_hash.update(chunk) 16 | return file_hash.hexdigest() 17 | -------------------------------------------------------------------------------- /mdast_cli/helpers/logging_utils.py: -------------------------------------------------------------------------------- 1 | from collections.abc import Mapping 2 | from typing import Any, Optional 3 | 4 | 5 | def redact(data: Optional[Mapping[str, Any]]) -> Mapping[str, Any]: 6 | if not data: 7 | return {} 8 | result = dict(data) 9 | email = result.get('email') 10 | if isinstance(email, str) and '@' in email: 11 | name, _, domain = email.partition('@') 12 | if name: 13 | masked = (name[0] + '***') if len(name) > 1 else '*' 14 | result['email'] = f'{masked}@{domain}' 15 | else: 16 | result['email'] = '***' 17 | return result 18 | 19 | 20 | -------------------------------------------------------------------------------- /mdast_cli/helpers/platform_utils.py: -------------------------------------------------------------------------------- 1 | """Platform detection and binary path utilities.""" 2 | import os 3 | import platform 4 | import shutil 5 | from pathlib import Path 6 | 7 | 8 | def is_macos() -> bool: 9 | """Check if running on macOS.""" 10 | return platform.system() == 'Darwin' 11 | 12 | 13 | def is_linux() -> bool: 14 | """Check if running on Linux.""" 15 | return platform.system() == 'Linux' 16 | 17 | 18 | def get_apkeep_binary_path() -> str: 19 | """ 20 | Get path to apkeep binary. 21 | 22 | Priority: 23 | 1. Local binary in project root (apkeep_macos or apkeep_linux) 24 | 2. System binary in PATH (apkeep) 25 | 26 | Returns: 27 | Path to apkeep binary. 28 | 29 | Raises: 30 | RuntimeError: If apkeep binary is not found. 31 | """ 32 | # Try local binaries first (project root) 33 | project_root = Path(__file__).parent.parent.parent 34 | if is_macos(): 35 | local_binary = project_root / 'apkeep_macos' 36 | elif is_linux(): 37 | local_binary = project_root / 'apkeep_linux' 38 | else: 39 | local_binary = None 40 | 41 | if local_binary and local_binary.exists() and local_binary.is_file(): 42 | # Make binary executable if needed 43 | os.chmod(local_binary, 0o755) 44 | return str(local_binary.absolute()) 45 | 46 | # Fallback to system PATH 47 | system_binary = shutil.which('apkeep') 48 | if system_binary: 49 | return system_binary 50 | 51 | # Not found 52 | platform_name = 'macOS' if is_macos() else 'Linux' if is_linux() else platform.system() 53 | local_binary_name = 'apkeep_macos' if is_macos() else 'apkeep_linux' if is_linux() else 'apkeep' 54 | raise RuntimeError( 55 | f'Google Play: apkeep binary not found. ' 56 | f'Expected local binary: {local_binary_name} in project root, ' 57 | f'or system binary "apkeep" in PATH. ' 58 | f'Platform: {platform_name}' 59 | ) 60 | 61 | -------------------------------------------------------------------------------- /mdast_cli_core/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Dynamic-Mobile-Security/mdast-cli/804bffe421bfc66bf37e562f088a5e4b78b139b9/mdast_cli_core/__init__.py -------------------------------------------------------------------------------- /mdast_cli_core/api.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | import requests 4 | 5 | from .base import mDastBase 6 | 7 | 8 | class mDastAPI(mDastBase): 9 | """ 10 | Class for interact with mDast system through REST API 11 | """ 12 | 13 | def __init__(self, base_url, username, password): 14 | super().__init__(base_url) 15 | self.headers = {} 16 | self.username = username 17 | self.password = password 18 | self.current_context = {} 19 | 20 | self._auth() 21 | self._current_context() 22 | 23 | def _auth(self): 24 | self.headers['Content-Type'] = 'application/json' 25 | payload = {'username': self.username, 'password': self.password} 26 | 27 | resp = requests.post(f'{self.url}/login/', 28 | headers=self.headers, 29 | data=json.dumps(payload, indent=4), 30 | verify=False) 31 | 32 | resp_body = resp.json() 33 | 34 | self.headers['Authorization'] = 'Bearer {0}'.format(resp_body['access']) 35 | 36 | def login(self, username, password): 37 | self.headers = {'Content-Type': 'application/json'} 38 | new_payload = {'username': username, 'password': password} 39 | self.username = username 40 | self.password = password 41 | resp = requests.post(f'{self.url}/login/', 42 | headers=self.headers, 43 | data=json.dumps(new_payload, indent=4), 44 | verify=False) 45 | 46 | if resp.status_code == 200: 47 | self.set_headers(resp.json()['access']) 48 | current_context_resp = requests.get(f'{self.url}/currentuser/', 49 | headers=self.headers, 50 | verify=False) 51 | self.current_context = current_context_resp.json() 52 | return resp 53 | 54 | def _current_context(self): 55 | current_context_resp = requests.get(f'{self.url}/currentuser/', 56 | headers=self.headers, 57 | verify=False) 58 | 59 | self.current_context = current_context_resp.json() 60 | 61 | def set_headers(self, access_token): 62 | self.headers['Authorization'] = 'Bearer {0}'.format(access_token) 63 | -------------------------------------------------------------------------------- /mdast_cli_core/base.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | from abc import abstractmethod 4 | 5 | import requests 6 | 7 | 8 | class mDastBase: 9 | def __init__(self, base_url): 10 | self.headers = {} 11 | self.current_context = {} 12 | self.url = base_url 13 | 14 | @abstractmethod 15 | def set_headers(self, ci_token): 16 | pass 17 | 18 | def get_current_user_info(self): 19 | current_user_info_resp = requests.get(f'{self.url}/currentuser/', 20 | headers=self.headers, 21 | verify=False) 22 | self.current_context = current_user_info_resp.json() 23 | return current_user_info_resp 24 | 25 | def set_user_language(self, login, lang): 26 | data = { 27 | 'username': login, 28 | 'language': lang 29 | } 30 | set_language_resp = requests.patch(f'{self.url}/currentuser/', 31 | headers=self.headers, 32 | data=json.dumps(data), 33 | verify=False) 34 | assert set_language_resp.status_code == 200 35 | 36 | def get_projects(self): 37 | return requests.get(f'{self.url}/organizations/{self.current_context["company"]}/projects/', 38 | headers=self.headers, 39 | verify=False) 40 | 41 | def get_projects_by_org_id(self, org_id): 42 | return requests.get(f'{self.url}/organizations/{org_id}/projects/', 43 | headers=self.headers, 44 | verify=False) 45 | 46 | def create_project(self, project_info, arch_type): 47 | data = { 48 | 'name': project_info['name'], 49 | 'description': project_info['description'], 50 | 'architecture_type': arch_type 51 | } 52 | return requests.post(f'{self.url}/organizations/{self.current_context["company"]}/projects/', 53 | headers=self.headers, 54 | data=json.dumps(data), 55 | verify=False) 56 | 57 | def create_project_for_organization(self, org_id, project_info, arch_type): 58 | data = { 59 | 'name': project_info['name'], 60 | 'description': project_info['description'], 61 | 'architecture_type': arch_type 62 | } 63 | return requests.post(f'{self.url}/organizations/{org_id}/projects/', 64 | headers=self.headers, 65 | data=json.dumps(data), 66 | verify=False) 67 | 68 | def get_users(self): 69 | return requests.get(f'{self.url}/organizations/{self.current_context["company"]}/users', 70 | headers=self.headers, 71 | verify=False) 72 | 73 | def get_users_by_org(self, org_id): 74 | return requests.get(f'{self.url}/organizations/{org_id}/users', 75 | headers=self.headers, 76 | verify=False) 77 | 78 | def get_user_info(self, user_id): 79 | return requests.get(f'{self.url}/users/{user_id}/', 80 | headers=self.headers, 81 | verify=False) 82 | 83 | def create_user(self, org_id, user_data, role): 84 | data = { 85 | 'username': user_data['username'], 86 | 'password': user_data['password'], 87 | 'role': role 88 | } 89 | return requests.post(f'{self.url}/organizations/{org_id}/users/', 90 | headers=self.headers, 91 | data=json.dumps(data), 92 | verify=False) 93 | 94 | def create_user_by_superadmin(self, user_id, username, org_id, role, is_active, change_password, 95 | accepted_eula, accepted_confidential): 96 | data = { 97 | 'username': username, 98 | 'role': role, 99 | 'is_active': is_active, 100 | 'change_password': change_password, 101 | 'org_id': org_id, 102 | 'accepted_eula': accepted_eula, 103 | 'accepted_confidential': accepted_confidential 104 | } 105 | return requests.put(f'{self.url}/users/{user_id}/', headers=self.headers, data=json.dumps(data), verify=False) 106 | 107 | def delete_user(self, user_id): 108 | return requests.delete(f'{self.url}/users/{user_id}/', 109 | headers=self.headers, 110 | verify=False) 111 | 112 | def update_username(self, user_id, username): 113 | data = { 114 | 'username': username 115 | } 116 | return requests.patch(f'{self.url}/users/{user_id}/', 117 | headers=self.headers, 118 | data=json.dumps(data), 119 | verify=False) 120 | 121 | def patch_user(self, user_id, username, role, is_active, change_password, accepted_eula, accepted_confidential): 122 | data = { 123 | 'username': username, 124 | 'role': role, 125 | 'is_active': is_active, 126 | 'change_password': change_password, 127 | 'accepted_eula': accepted_eula, 128 | 'accepted_confidential': accepted_confidential 129 | } 130 | return requests.patch(f'{self.url}/users/{user_id}/', headers=self.headers, data=json.dumps(data), verify=False) 131 | 132 | def accept_agreements(self, accept_eula, accept_confidential): 133 | data = { 134 | 'accepted_eula': accept_eula, 135 | 'accepted_confidential': accept_confidential 136 | } 137 | return requests.patch(f'{self.url}/currentuser/accept_agreements/', 138 | headers=self.headers, 139 | data=json.dumps(data), 140 | verify=False) 141 | 142 | def change_user_organisation(self, user_data, new_org_id): 143 | data = { 144 | 'username': user_data['username'], 145 | 'company': new_org_id 146 | } 147 | user_id = user_data['id'] 148 | return requests.patch(f'{self.url}/users/{user_id}/', 149 | headers=self.headers, 150 | data=json.dumps(data), 151 | verify=False) 152 | 153 | def get_project(self, project_id): 154 | return requests.get(f'{self.url}/projects/{project_id}/', 155 | headers=self.headers, 156 | verify=False) 157 | 158 | def delete_project(self, project_id): 159 | return requests.delete(f'{self.url}/projects/{project_id}/', 160 | headers=self.headers, 161 | verify=False) 162 | 163 | def get_profiles_for_project(self, project_id): 164 | return requests.get(f'{self.url}/projects/{project_id}/profiles/', 165 | headers=self.headers, 166 | verify=False) 167 | 168 | def get_profile(self, profile_id): 169 | return requests.get(f'{self.url}/profiles/{profile_id}/', 170 | headers=self.headers, 171 | verify=False) 172 | 173 | def get_profile_settings(self, profile_id): 174 | return requests.get(f'{self.url}/profiles/{profile_id}/settings/', 175 | headers=self.headers, 176 | verify=False) 177 | 178 | def create_profile_for_project(self, project_id, profile_info): 179 | data = { 180 | 'name': profile_info['name'], 181 | 'description': profile_info['description'], 182 | 'project': project_id 183 | } 184 | return requests.post(f'{self.url}/projects/{project_id}/profiles/', 185 | headers=self.headers, 186 | data=json.dumps(data), 187 | verify=False) 188 | 189 | def update_testcase_for_project(self, project_id, test_case_id, updated_test_case_info): 190 | data = { 191 | 'name': updated_test_case_info['name'], 192 | 'description': updated_test_case_info['description'], 193 | 'project': project_id 194 | } 195 | return requests.patch(f'{self.url}/testcases/{test_case_id}/', 196 | headers=self.headers, 197 | data=json.dumps(data), 198 | verify=False) 199 | 200 | def update_profile_for_project(self, project_id, profile_id, updated_profile_info): 201 | data = { 202 | 'name': updated_profile_info['name'], 203 | 'description': updated_profile_info['description'], 204 | 'project': project_id 205 | } 206 | return requests.patch(f'{self.url}/profiles/{profile_id}/', 207 | headers=self.headers, 208 | data=json.dumps(data), 209 | verify=False) 210 | 211 | def update_project(self, project_id, updated_project_info): 212 | data = { 213 | 'name': updated_project_info['name'], 214 | 'description': updated_project_info['description'], 215 | 'project': project_id 216 | } 217 | return requests.patch(f'{self.url}/projects/{project_id}/', 218 | headers=self.headers, 219 | data=json.dumps(data), 220 | verify=False) 221 | 222 | def delete_profile(self, profile_id): 223 | return requests.delete(f'{self.url}/profiles/{profile_id}/', 224 | headers=self.headers, 225 | verify=False) 226 | 227 | def get_all_scans(self): 228 | return requests.get(f'{self.url}/organizations/{self.current_context["company"]}/dasts/', 229 | headers=self.headers, 230 | verify=False) 231 | 232 | def get_all_scans_by_org_id(self, org_id): 233 | return requests.get(f'{self.url}/organizations/{org_id}/dasts/', 234 | headers=self.headers, 235 | verify=False) 236 | 237 | def get_all_started_scans_by_org_id(self, org_id): 238 | return requests.get(f'{self.url}/organizations/{org_id}/dasts/?state=2&ordering=state&page_size=10000', 239 | headers=self.headers, 240 | verify=False) 241 | 242 | def get_all_created_scans_by_org_id(self, org_id): 243 | return requests.get(f'{self.url}/organizations/{org_id}/dasts/?state=0&ordering=state&page_size=10000', 244 | headers=self.headers, 245 | verify=False) 246 | 247 | def get_all_scans_by_org_id_filter_profile(self, org_id, profile_id): 248 | return requests.get(f'{self.url}/organizations/{org_id}/dasts/?profile={profile_id}', 249 | headers=self.headers, 250 | verify=False) 251 | 252 | def get_scan_info(self, scan_id): 253 | return requests.get(f'{self.url}/dasts/{scan_id}/', 254 | headers=self.headers, 255 | verify=False) 256 | 257 | def get_architectures(self): 258 | return requests.get(f'{self.url}/architectures/', 259 | headers=self.headers, 260 | verify=False) 261 | 262 | def get_architecture_types(self): 263 | return requests.get(f'{self.url}/architecture_types/', 264 | headers=self.headers, 265 | verify=False) 266 | 267 | def get_testcases(self): 268 | return requests.get(f'{self.url}/organizations/{self.current_context["company"]}/testcases/', 269 | headers=self.headers, 270 | verify=False) 271 | 272 | def get_testcases_for_project(self, project_id): 273 | return requests.get(f'{self.url}/projects/{project_id}/testcases/', 274 | headers=self.headers, 275 | verify=False) 276 | 277 | def create_testcase(self, project_id, testcase_info, app_id, arch_id): 278 | data = { 279 | 'name': testcase_info['name'], 280 | 'description': testcase_info['description'], 281 | 'application_id': app_id, 282 | 'architecture_id': arch_id 283 | } 284 | return requests.post(f'{self.url}/projects/{project_id}/testcases/', 285 | headers=self.headers, 286 | data=json.dumps(data), 287 | verify=False) 288 | 289 | def get_testcase(self, testcase_id): 290 | return requests.get(f'{self.url}/testcases/{testcase_id}/', 291 | headers=self.headers, 292 | verify=False) 293 | 294 | def start_testcase(self, testcase_id): 295 | return requests.post(f'{self.url}/testcases/{testcase_id}/start/', 296 | headers=self.headers, 297 | verify=False) 298 | 299 | def stop_testcase(self, testcase_id): 300 | return requests.post(f'{self.url}/testcases/{testcase_id}/stop/', 301 | headers=self.headers, 302 | verify=False) 303 | 304 | def delete_testcase(self, testcase_id): 305 | return requests.delete(f'{self.url}/testcases/{testcase_id}/', 306 | headers=self.headers, 307 | verify=False) 308 | 309 | def upload_application(self, path, architecture_type): 310 | headers_multipart = {'Authorization': self.headers['Authorization']} 311 | multipart_form_data = { 312 | 'file': (os.path.split(path)[-1], open(path, 'rb')) 313 | } 314 | return requests.post(f'{self.url}/organizations/{self.current_context["company"]}/applications/', 315 | headers=headers_multipart, 316 | files=multipart_form_data, 317 | data={'architecture_type': architecture_type}, 318 | verify=False) 319 | 320 | def create_manual_scan(self, project_id, profile_id, app_id, arch_id): 321 | data = { 322 | 'application_id': app_id, 323 | 'architecture_id': arch_id, 324 | 'type': 0 325 | } 326 | if project_id: 327 | data['project_id'] = project_id 328 | if profile_id: 329 | data['profile_id'] = profile_id 330 | return requests.post(f'{self.url}/organizations/{self.current_context["company"]}/dasts/', 331 | headers=self.headers, 332 | data=json.dumps(data), 333 | verify=False) 334 | 335 | def create_auto_scan(self, project_id, profile_id, app_id, arch_id, test_case_id): 336 | data = { 337 | 'application_id': app_id, 338 | 'architecture_id': arch_id, 339 | 'test_case_id': test_case_id, 340 | 'type': 1 341 | } 342 | if project_id: 343 | data['project_id'] = project_id 344 | if profile_id: 345 | data['profile_id'] = profile_id 346 | return requests.post(f'{self.url}/organizations/{self.current_context["company"]}/dasts/', 347 | headers=self.headers, 348 | data=json.dumps(data), 349 | verify=False) 350 | 351 | def create_appium_scan(self, project_id, profile_id, app_id, arch_id, appium_script_path): 352 | data = { 353 | 'application_id': app_id, 354 | 'architecture_id': arch_id, 355 | 'type': 2 356 | } 357 | headers = { 358 | 'Authorization': self.headers['Authorization'] 359 | } 360 | 361 | if project_id: 362 | data['project_id'] = project_id 363 | if profile_id: 364 | data['profile_id'] = profile_id 365 | with open(appium_script_path, 'rb') as f: 366 | files = {'script': f} 367 | return requests.post(f'{self.url}/organizations/{self.current_context["company"]}/dasts/', 368 | headers=headers, 369 | data=data, 370 | files=files, 371 | verify=False) 372 | 373 | def create_manual_scan_autocreate_profile(self, app_id, arch_id): 374 | data = { 375 | 'application_id': app_id, 376 | 'architecture_id': arch_id, 377 | 'type': 0 378 | } 379 | return requests.post(f'{self.url}/organizations/{self.current_context["company"]}/dasts/', 380 | headers=self.headers, 381 | data=json.dumps(data), 382 | verify=False) 383 | 384 | def start_scan(self, dast_id): 385 | """ 386 | Start automated scan through REST API 387 | :return: scan info resp(dict) 388 | """ 389 | return requests.post(f'{self.url}/dasts/{dast_id}/start/', 390 | headers=self.headers, 391 | verify=False) 392 | 393 | def stop_scan(self, scan_id): 394 | """ 395 | Get scan status from current scan Id 396 | :param scan_id: Scan ID to get status 397 | :return: 398 | """ 399 | return requests.post(f'{self.url}/dasts/{scan_id}/stop/', 400 | headers=self.headers, 401 | verify=False) 402 | 403 | def get_scan_issues(self, scan_id): 404 | return requests.get(f'{self.url}/dasts/{scan_id}/issues/', 405 | headers=self.headers, 406 | verify=False) 407 | 408 | def get_engines(self): 409 | return requests.get(f'{self.url}/organizations/{self.current_context["company"]}/engines/', 410 | headers=self.headers, 411 | verify=False) 412 | 413 | def engine_create(self, name, architecture): 414 | data = { 415 | "name": name, 416 | "architecture": architecture 417 | } 418 | return requests.post(f'{self.url}/organizations/{self.current_context["company"]}/engines/', 419 | headers=self.headers, 420 | data=json.dumps(data), 421 | verify=False) 422 | 423 | def get_engine(self, engine_id): 424 | return requests.get(f'{self.url}/engines/{engine_id}/', 425 | headers=self.headers, 426 | verify=False) 427 | 428 | def engine_management(self, engine_id, action): 429 | return requests.get(f'{self.url}/engines/{engine_id}/{action}/', 430 | headers=self.headers, 431 | verify=False) 432 | 433 | def engine_delete(self, engine_id): 434 | return requests.delete(f'{self.url}/engines/{engine_id}/', 435 | headers=self.headers, 436 | verify=False) 437 | 438 | def get_organizations(self): 439 | return requests.get(f'{self.url}/organizations/', 440 | headers=self.headers, 441 | verify=False) 442 | 443 | def get_organization(self, organization_id): 444 | return requests.get(f'{self.url}/organizations/{organization_id}/', 445 | headers=self.headers, 446 | verify=False) 447 | 448 | def create_organization(self, org_info): 449 | data = { 450 | 'name': org_info['name'], 451 | 'description': org_info['description'] 452 | } 453 | return requests.post(f'{self.url}/organizations/', 454 | headers=self.headers, 455 | data=json.dumps(data), 456 | verify=False) 457 | 458 | def update_organization(self, org_info, org_id): 459 | data = { 460 | 'name': org_info['name'], 461 | 'description': org_info['description'] 462 | } 463 | return requests.patch(f'{self.url}/organizations/{org_id}/', 464 | headers=self.headers, 465 | data=json.dumps(data), 466 | verify=False) 467 | 468 | def delete_organization(self, org_id): 469 | return requests.delete(f'{self.url}/organizations/{org_id}/', 470 | headers=self.headers, 471 | verify=False) 472 | 473 | def get_project_rules(self, project_id): 474 | return requests.get(f'{self.url}/projects/{project_id}/rules/', 475 | headers=self.headers, 476 | verify=False) 477 | 478 | def get_project_rule_expressions(self, project_id): 479 | return requests.get(f'{self.url}/projects/{project_id}/rule_expressions/', 480 | headers=self.headers, 481 | verify=False) 482 | 483 | def get_project_rule_modules(self, project_id): 484 | return requests.get(f'{self.url}/projects/{project_id}/rule_modules/', 485 | headers=self.headers, 486 | verify=False) 487 | 488 | def get_organization_rules(self): 489 | return requests.get(f'{self.url}/organizations/{self.current_context["company"]}/rules/', 490 | headers=self.headers, 491 | verify=False) 492 | 493 | def get_organization_rules_by_id(self, org_id): 494 | return requests.get(f'{self.url}/organizations/{org_id}/rules/', 495 | headers=self.headers, 496 | verify=False) 497 | 498 | def get_organization_rule_expressions(self): 499 | return requests.get(f'{self.url}/organizations/{self.current_context["company"]}/rule_expressions/', 500 | headers=self.headers, 501 | verify=False) 502 | 503 | def get_organization_rule_modules(self): 504 | return requests.get(f'{self.url}/organizations/{self.current_context["company"]}/rule_modules/', 505 | headers=self.headers, 506 | verify=False) 507 | 508 | def get_organization_injections(self): 509 | return requests.get(f'{self.url}/organizations/{self.current_context["company"]}/injections', 510 | headers=self.headers, 511 | verify=False) 512 | 513 | def get_project_injections(self, project_id): 514 | return requests.get(f'{self.url}/projects/{project_id}/injections', 515 | headers=self.headers, 516 | verify=False) 517 | 518 | def get_requirement_groups(self): 519 | return requests.get(f'{self.url}/requirement_groups/', 520 | headers=self.headers, 521 | verify=False) 522 | 523 | def get_organization_requirements_groups(self): 524 | return requests.get(f'{self.url}/organizations/{self.current_context["company"]}/requirement_groups/', 525 | headers=self.headers, 526 | verify=False) 527 | 528 | def get_profile_requirements_groups(self, profile_id): 529 | return requests.get(f'{self.url}/profiles/{profile_id}/requirement_groups/', 530 | headers=self.headers, 531 | verify=False) 532 | 533 | def get_ci_token(self): 534 | return requests.get(f'{self.url}/organizations/{self.current_context["company"]}/ci_token/info', 535 | headers=self.headers, 536 | verify=False) 537 | 538 | def renew_token(self): 539 | return requests.get(f'{self.url}/organizations/{self.current_context["company"]}/ci_token/renew', 540 | headers=self.headers, 541 | verify=False) 542 | 543 | def replace_token(self): 544 | return requests.get(f'{self.url}/organizations/{self.current_context["company"]}/ci_token/replace', 545 | headers=self.headers, 546 | verify=False) 547 | 548 | def get_rules(self): 549 | return requests.get(f'{self.url}/rules/', 550 | headers=self.headers, 551 | verify=False) 552 | 553 | def get_settings(self): 554 | return requests.get(f'{self.url}/settings/', 555 | headers=self.headers, 556 | verify=False) 557 | 558 | def change_password(self, old_pass, new_pass): 559 | data = { 560 | 'password': old_pass, 561 | 'new_password': new_pass 562 | } 563 | return requests.put(f'{self.url}/currentuser/change_password/', 564 | headers=self.headers, 565 | data=json.dumps(data), 566 | verify=False) 567 | 568 | def change_password_by_admin(self, admin_password, new_user_password, user_id): 569 | data = { 570 | 'password': admin_password, 571 | 'new_password': new_user_password 572 | } 573 | return requests.put(f'{self.url}/users/{user_id}/change_password/', 574 | headers=self.headers, 575 | data=json.dumps(data), 576 | verify=False) 577 | 578 | def add_rule_to_organization(self, org_id, rule_data, is_used): 579 | data = { 580 | 'is_used': is_used, 581 | 'name': rule_data['name'], 582 | 'description': rule_data['description'] 583 | } 584 | return requests.post(f'{self.url}/organizations/{org_id}/rules/', 585 | headers=self.headers, 586 | data=json.dumps(data), 587 | verify=False) 588 | 589 | def change_rule(self, rule_id, new_rule_data): 590 | data = { 591 | 'name': new_rule_data['name'], 592 | 'description': new_rule_data['description'] 593 | } 594 | return requests.patch(f'{self.url}/rules/{rule_id}/', 595 | headers=self.headers, 596 | data=json.dumps(data), 597 | verify=False) 598 | 599 | def delete_rule(self, rule_id): 600 | return requests.delete(f'{self.url}/rules/{rule_id}/', 601 | headers=self.headers, 602 | verify=False) 603 | 604 | def download_report(self, dast_id): 605 | report = requests.get(f'{self.url}/dasts/{dast_id}/report/', 606 | allow_redirects=True, 607 | headers=self.headers, 608 | verify=False) 609 | return report 610 | 611 | def download_scan_json_result(self, dast_id): 612 | report = requests.get(f'{self.url}/dasts/{dast_id}/report/?output=json', 613 | allow_redirects=True, 614 | headers=self.headers, 615 | verify=False) 616 | return report 617 | 618 | def get_dast_logs(self, dast_id): 619 | return requests.get(f'{self.url}/dasts/{dast_id}/log/', 620 | headers=self.headers, 621 | verify=False) 622 | 623 | def get_dasts_for_org_project(self, org_id, project_id): 624 | return requests.get(f'{self.url}/organizations/{org_id}/dasts/?project={project_id}', 625 | headers=self.headers, 626 | verify=False) 627 | 628 | def get_dast_issues(self, dast_id): 629 | return requests.get(f'{self.url}/dasts/{dast_id}/issues/?page_size=1000', 630 | headers=self.headers, 631 | verify=False) 632 | 633 | def download_dast_logs(self, dast_id): 634 | logs = requests.get(f'{self.url}/dasts/{dast_id}/log/download/', 635 | allow_redirects=True, 636 | headers=self.headers, 637 | verify=False) 638 | return logs 639 | 640 | def get_testcase_logs(self, testcase_id): 641 | return requests.get(f'{self.url}/testcases/{testcase_id}/log/', 642 | headers=self.headers, 643 | verify=False) 644 | 645 | def download_testcase_logs(self, testcase_id): 646 | logs = requests.get(f'{self.url}/testcases/{testcase_id}/log/download/', 647 | allow_redirects=True, 648 | headers=self.headers, 649 | verify=False) 650 | return logs 651 | 652 | def check_app_md5(self, org_id, md5): 653 | resp = requests.get(f'{self.url}/organizations/{org_id}/applications/?md5={md5}', 654 | allow_redirects=True, 655 | headers=self.headers, 656 | verify=False) 657 | return resp 658 | 659 | def get_issue_info(self, issue_id): 660 | return requests.get(f'{self.url}/dast_issues/{issue_id}/', 661 | headers=self.headers, 662 | verify=False) 663 | 664 | def get_localization_issue_data_keys(self): 665 | return requests.get(f'{self.url}/localization/issue_data_keys', 666 | headers=self.headers, 667 | verify=False) 668 | 669 | def get_modules(self): 670 | return requests.get(f'{self.url}/modules/', 671 | headers=self.headers, 672 | verify=False) 673 | -------------------------------------------------------------------------------- /mdast_cli_core/token.py: -------------------------------------------------------------------------------- 1 | from .base import mDastBase 2 | 3 | 4 | class mDastToken(mDastBase): 5 | """ 6 | Class for interact with mDast system through ci/cd token 7 | """ 8 | 9 | def __init__(self, base_url, ci_token, company_id): 10 | super().__init__(base_url) 11 | self.company_id = company_id 12 | self.current_context = {"company": company_id} 13 | self.headers = {'Authorization': 'Token {0}'.format(ci_token), 14 | 'Content-Type': 'application/json'} 15 | 16 | def set_headers(self, ci_token): 17 | self.headers = {'Authorization': 'Token {0}'.format(ci_token)} 18 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | altgraph==0.17 2 | beautifulsoup4==4.10.0 3 | cachetools==4.1.1 4 | certifi>=2023.7.22 5 | cffi==1.15.0 6 | chardet>=3.0.4 7 | charset-normalizer==2.0.12 8 | cryptography>=37.0.2 9 | google==3.0.0 10 | google-auth>=2.23.0 11 | idna>=3.7 12 | pyasn1==0.4.8 13 | pyasn1-modules==0.2.8 14 | pycparser==2.21 15 | pyparsing==3.0.7 16 | pytz>=2021.1 17 | requests>=2.23.0 18 | rsa==4.8 19 | six>=1.15.0 20 | soupsieve==2.3.1 21 | uritemplate==3.0.1 22 | urllib3>=1.26.13 23 | tqdm>=4.66.0 24 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [egg_info] 2 | tag_build = 3 | tag_date = 0 -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import find_packages, setup 2 | 3 | with open("README.md", "r") as fh: 4 | long_description = fh.read() 5 | 6 | setup( 7 | name="mdast_cli", 8 | 9 | version='2025.11.2', 10 | 11 | python_requires='>=3.9', 12 | 13 | author="Dynamic-Mobile-Security", 14 | description="Dynamic-Mobile-Security", 15 | long_description=long_description, 16 | long_description_content_type="text/markdown", 17 | url="https://github.com/Dynamic-Mobile-Security/mdast-cli", 18 | packages=find_packages(), 19 | include_package_data=True, 20 | package_data={'': ['device.properties']}, 21 | install_requires=[ 22 | 'altgraph==0.17', 23 | 'beautifulsoup4==4.10.0', 24 | 'cachetools==4.1.1', 25 | 'certifi>=2023.7.22', 26 | 'cffi==1.15.0', 27 | 'chardet>=3.0.4', 28 | 'charset-normalizer==2.0.12', 29 | 'cryptography>=37.0.2', 30 | 'google==3.0.0', 31 | 'google-auth>=2.23.0', 32 | 'idna>=3.7', 33 | 'pyasn1==0.4.8', 34 | 'pyasn1-modules==0.2.8', 35 | 'pycparser==2.21', 36 | 'pyparsing==3.0.7', 37 | 'pytz>=2021.1', 38 | 'requests>=2.23.0', 39 | 'rsa==4.8', 40 | 'six>=1.15.0', 41 | 'soupsieve==2.3.1', 42 | 'uritemplate==3.0.1', 43 | 'urllib3>=1.26.13', 44 | 'tqdm>=4.66.0' 45 | ], 46 | entry_points={ 47 | 'console_scripts': [ 48 | 'mdast_cli=mdast_cli.mdast_scan:main' 49 | ] 50 | }, 51 | classifiers=[ 52 | "Programming Language :: Python :: 3", 53 | "Programming Language :: Python :: 3.9", 54 | "Programming Language :: Python :: 3.10", 55 | "Programming Language :: Python :: 3.11", 56 | "Programming Language :: Python :: 3.12", 57 | "License :: OSI Approved :: MIT License", 58 | "Operating System :: Unix", 59 | ], 60 | ) 61 | --------------------------------------------------------------------------------