├── .darglint ├── .devcontainer ├── Dockerfile └── devcontainer.json ├── .flake8 ├── .gitattributes ├── .github ├── CODEOWNERS ├── dependabot.yml ├── release-drafter.yml └── workflows │ ├── release.yml │ └── tests.yml ├── .gitignore ├── .pre-commit-config.yaml ├── .vscode ├── settings.default.json └── tasks.json ├── CONTRIBUTING.md ├── LICENSE.txt ├── README.md ├── codecov.yml ├── mypy.ini ├── pyproject.toml ├── requirements_dev.txt ├── scripts ├── clean.sh ├── common.sh ├── publish.sh └── setup ├── setup.cfg ├── src └── synology_dsm │ ├── __init__.py │ ├── api │ ├── __init__.py │ ├── core │ │ ├── __init__.py │ │ ├── external_usb.py │ │ ├── security.py │ │ ├── share.py │ │ ├── system.py │ │ ├── upgrade.py │ │ └── utilization.py │ ├── download_station │ │ ├── __init__.py │ │ └── task.py │ ├── dsm │ │ ├── __init__.py │ │ ├── information.py │ │ └── network.py │ ├── file_station │ │ ├── __init__.py │ │ └── models.py │ ├── photos │ │ ├── __init__.py │ │ └── model.py │ ├── storage │ │ ├── __init__.py │ │ └── storage.py │ ├── surveillance_station │ │ ├── __init__.py │ │ ├── camera.py │ │ └── const.py │ └── virtual_machine_manager │ │ ├── __init__.py │ │ └── guest.py │ ├── const.py │ ├── exceptions.py │ ├── helpers.py │ ├── py.typed │ └── synology_dsm.py └── tests ├── __init__.py ├── __snapshots__ └── test_synology_dsm_7.ambr ├── api_data ├── __init__.py ├── dsm_5 │ ├── __init__.py │ ├── const_5_api_auth.py │ ├── const_5_api_info.py │ ├── core │ │ ├── __init__.py │ │ └── const_5_core_utilization.py │ ├── dsm │ │ ├── __init__.py │ │ ├── const_5_dsm_info.py │ │ └── const_5_dsm_network.py │ └── storage │ │ ├── __init__.py │ │ └── const_5_storage_storage.py ├── dsm_6 │ ├── __init__.py │ ├── const_6_api_auth.py │ ├── const_6_api_info.py │ ├── core │ │ ├── __init__.py │ │ ├── const_6_core_security.py │ │ ├── const_6_core_share.py │ │ ├── const_6_core_system.py │ │ ├── const_6_core_upgrade.py │ │ └── const_6_core_utilization.py │ ├── download_station │ │ ├── __init__.py │ │ ├── const_6_download_station_info.py │ │ ├── const_6_download_station_stat.py │ │ └── const_6_download_station_task.py │ ├── dsm │ │ ├── __init__.py │ │ ├── const_6_dsm_info.py │ │ └── const_6_dsm_network.py │ ├── storage │ │ ├── __init__.py │ │ └── const_6_storage_storage.py │ └── surveillance_station │ │ ├── __init__.py │ │ ├── const_6_api_info.py │ │ ├── const_6_surveillance_station_camera.py │ │ └── const_6_surveillance_station_home_mode.py └── dsm_7 │ ├── __init__.py │ ├── const_7_api_auth.py │ ├── const_7_api_info.py │ ├── core │ ├── __init__.py │ ├── const_7_core_external_usb.py │ └── const_7_core_upgrade.py │ ├── dsm │ ├── __init__.py │ └── const_7_dsm_info.py │ ├── file_station │ ├── __init__.py │ └── const_7_file_station.py │ ├── photos │ ├── __init__.py │ └── const_7_photo.py │ └── virtual_machine_manager │ ├── __init__.py │ └── const_7_vmm.py ├── conftest.py ├── const.py ├── test_synology_dsm.py ├── test_synology_dsm_5.py ├── test_synology_dsm_6.py └── test_synology_dsm_7.py /.darglint: -------------------------------------------------------------------------------- 1 | [darglint] 2 | strictness = short 3 | -------------------------------------------------------------------------------- /.devcontainer/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM mcr.microsoft.com/vscode/devcontainers/python:3.9-bookworm 2 | 3 | # install test requirements 4 | COPY requirements*.txt /tmp/pip-tmp/ 5 | RUN pip3 --disable-pip-version-check --no-cache-dir install -r /tmp/pip-tmp/requirements_dev.txt \ 6 | && rm -rf /tmp/pip-tmp 7 | 8 | # Set the default shell to bash instead of sh 9 | ENV SHELL /bin/bash 10 | -------------------------------------------------------------------------------- /.devcontainer/devcontainer.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "Python Synology DSM API", 3 | "context": "..", 4 | "dockerFile": "Dockerfile", 5 | "postCreateCommand": "scripts/setup", 6 | "containerEnv": { "DEVCONTAINER": "1" }, 7 | "runArgs": ["-e", "GIT_EDITOR=code --wait"], 8 | "customizations": { 9 | "vscode": { 10 | "extensions": [ 11 | "ms-python.vscode-pylance", 12 | "ms-python.black-formatter", 13 | "ms-python.pylint", 14 | "ms-python.mypy-type-checker", 15 | "visualstudioexptteam.vscodeintellicode", 16 | "esbenp.prettier-vscode" 17 | ], 18 | // Please keep this file in sync with settings in .vscode/settings.default.json 19 | "settings": { 20 | "python.pythonPath": "/usr/local/bin/python", 21 | "python.testing.pytestArgs": ["tests"], 22 | "python.testing.unittestEnabled": false, 23 | "python.testing.pytestEnabled": true, 24 | "mypy-type-checker.args": ["--config-file=pyproject.toml"], 25 | "editor.formatOnPaste": false, 26 | "editor.formatOnSave": true, 27 | "editor.formatOnType": true, 28 | "files.trimTrailingWhitespace": true, 29 | "terminal.integrated.profiles.linux": { 30 | "zsh": { 31 | "path": "/usr/bin/zsh" 32 | } 33 | }, 34 | "terminal.integrated.defaultProfile.linux": "zsh" 35 | } 36 | } 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | select = B,B9,C,D,DAR,E,F,N,RST,S,W 3 | # Some rules are ignore on top of the standard ones. 4 | # C901 (complexity) will be processed in a dedicated PR 5 | # DARxxx (documentation in docstrings) will be processed in a dedicated PR 6 | # Final target is: 7 | # ignore = E203,E501,RST201,RST203,RST301,W503 8 | ignore = E203,E501,RST201,RST203,RST301,W503,C901,DAR101,DAR201,N818 9 | max-line-length = 80 10 | max-complexity = 10 11 | docstring-convention = google 12 | per-file-ignores = 13 | tests/*:S101,S105 14 | tests/**/const_*.py:B950 15 | src/synology_dsm/const.py:B950 16 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | * text=auto eol=lf 2 | -------------------------------------------------------------------------------- /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | * @mib1185 2 | 3 | .github/* @oncleben31 4 | .darglint @oncleben31 5 | .flake8 @oncleben31 6 | 7 | */surveillance_station/* @shenxn 8 | src/synology_dsm/api/core/share.py @Gestas 9 | src/synology_dsm/api/core/system.py @mib1185 10 | src/synology_dsm/api/core/upgrade.py @mib1185 11 | tests/api_data/dsm_6/core/const_6_core_share.py @Gestas 12 | tests/api_data/dsm_6/core/const_6_core_system.py @mib1185 13 | tests/api_data/dsm_6/core/const_6_core_upgrade.py @mib1185 14 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: github-actions 4 | directory: "/" 5 | schedule: 6 | interval: monthly 7 | groups: 8 | gh-actions: 9 | patterns: ["*"] 10 | - package-ecosystem: pip 11 | directory: "/" 12 | schedule: 13 | interval: monthly 14 | groups: 15 | pip-dependencies: 16 | patterns: ["*"] 17 | -------------------------------------------------------------------------------- /.github/release-drafter.yml: -------------------------------------------------------------------------------- 1 | categories: 2 | - title: ":boom: Breaking Changes" 3 | label: "breaking" 4 | - title: ":rocket: Features" 5 | label: "enhancement" 6 | - title: ":fire: Removals and Deprecations" 7 | label: "removal" 8 | - title: ":beetle: Fixes" 9 | labels: 10 | - "bug" 11 | - "bugfix" 12 | - title: ":racehorse: Performance" 13 | label: "performance" 14 | - title: ":rotating_light: Testing" 15 | label: "testing" 16 | - title: ":construction_worker: Continuous Integration" 17 | label: "ci" 18 | - title: ":books: Documentation" 19 | label: "documentation" 20 | - title: ":hammer: Refactoring" 21 | label: "refactoring" 22 | - title: ":lipstick: Style" 23 | label: "style" 24 | - title: ":package: Dependencies" 25 | collapse-after: 1 26 | labels: 27 | - "dependencies" 28 | - "build" 29 | template: | 30 | ## Changes 31 | 32 | $CHANGES 33 | 34 | --- 35 | You like my work, want to support me? 36 | Buy Me A Coffee 37 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Release 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | - master 8 | 9 | jobs: 10 | release: 11 | name: Release 12 | runs-on: ubuntu-latest 13 | permissions: 14 | contents: write 15 | id-token: write 16 | pull-requests: read 17 | steps: 18 | - name: Check out the repository 19 | uses: actions/checkout@v4.2.2 20 | with: 21 | fetch-depth: 2 22 | 23 | - name: Set up Python 24 | uses: actions/setup-python@v5.6.0 25 | with: 26 | python-version: "3.12" 27 | 28 | - name: Install build dependencies 29 | run: | 30 | python -m pip install --upgrade pip 31 | pip install setuptools wheel build 32 | 33 | - name: Check if there is a parent commit 34 | id: check-parent-commit 35 | run: | 36 | echo "::set-output name=sha::$(git rev-parse --verify --quiet HEAD^)" 37 | 38 | - name: Detect and tag new version 39 | id: check-version 40 | if: steps.check-parent-commit.outputs.sha 41 | uses: salsify/action-detect-and-tag-new-version@v2.0.3 42 | with: 43 | version-command: | 44 | sed -n 's/version = \"\(.*\)\"/\1/p' pyproject.toml 45 | 46 | - name: Bump version for developmental release 47 | if: "! steps.check-version.outputs.tag" 48 | run: | 49 | sed "s/version = \"\(.*\)\"/version = \"\1\.dev\.`date +%Y%m%d%H%M%S`\"/" -i pyproject.toml 50 | 51 | - name: Build package 52 | run: | 53 | python -m build . --wheel 54 | 55 | - name: Publish package on PyPI 56 | if: steps.check-version.outputs.tag 57 | uses: pypa/gh-action-pypi-publish@v1.12.4 58 | with: 59 | user: __token__ 60 | password: ${{ secrets.PYPI_TOKEN }} 61 | 62 | - name: Publish package on TestPyPI 63 | if: "! steps.check-version.outputs.tag" 64 | uses: pypa/gh-action-pypi-publish@v1.12.4 65 | with: 66 | user: __token__ 67 | password: ${{ secrets.TEST_PYPI_TOKEN }} 68 | repository-url: https://test.pypi.org/legacy/ 69 | 70 | - name: Publish release notes 71 | uses: release-drafter/release-drafter@v6.1.0 72 | with: 73 | publish: ${{ steps.check-version.outputs.tag != '' }} 74 | tag: ${{ steps.check-version.outputs.tag }} 75 | env: 76 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 77 | 78 | analyze: 79 | name: Analyze 80 | if: ${{ !contains(github.ref, 'dependabot') }} 81 | runs-on: ubuntu-latest 82 | needs: release 83 | permissions: 84 | actions: read 85 | contents: read 86 | security-events: write 87 | 88 | strategy: 89 | fail-fast: false 90 | 91 | steps: 92 | - name: Checkout repository 93 | uses: actions/checkout@v4.2.2 94 | 95 | # Initializes the CodeQL tools for scanning. 96 | - name: Initialize CodeQL 97 | uses: github/codeql-action/init@v3 98 | with: 99 | languages: python 100 | setup-python-dependencies: true 101 | 102 | - name: Perform CodeQL Analysis 103 | uses: github/codeql-action/analyze@v3 104 | -------------------------------------------------------------------------------- /.github/workflows/tests.yml: -------------------------------------------------------------------------------- 1 | name: Tests 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | pull_request: ~ 8 | 9 | jobs: 10 | pre-commit: 11 | runs-on: ubuntu-latest 12 | steps: 13 | - name: Check out the repository 14 | uses: actions/checkout@v4.2.2 15 | 16 | - name: Set up Python 3.13 17 | uses: actions/setup-python@v5.6.0 18 | with: 19 | python-version: "3.13" 20 | 21 | - name: Install dependencies 22 | run: | 23 | pip install -r requirements_dev.txt -e . 24 | 25 | - name: Compute pre-commit cache key 26 | id: pre-commit-cache 27 | shell: python 28 | run: | 29 | import hashlib 30 | import sys 31 | 32 | python = "py{}.{}".format(*sys.version_info[:2]) 33 | payload = sys.version.encode() + sys.executable.encode() 34 | digest = hashlib.sha256(payload).hexdigest() 35 | result = "${{ runner.os }}-{}-{}-pre-commit".format(python, digest[:8]) 36 | 37 | print("::set-output name=result::{}".format(result)) 38 | 39 | - name: Restore pre-commit cache 40 | uses: actions/cache@v4.2.3 41 | with: 42 | path: ~/.cache/pre-commit 43 | key: ${{ steps.pre-commit-cache.outputs.result }}-${{ hashFiles('.pre-commit-config.yaml') }} 44 | restore-keys: | 45 | ${{ steps.pre-commit-cache.outputs.result }}- 46 | 47 | - name: Run pre-commit 48 | run: | 49 | pre-commit run --all-files --show-diff-on-failure 50 | 51 | tests: 52 | name: Python ${{ matrix.python-version }} 53 | runs-on: ubuntu-latest 54 | strategy: 55 | fail-fast: false 56 | matrix: 57 | include: 58 | - python-version: "3.9" 59 | - python-version: "3.10" 60 | - python-version: "3.11" 61 | - python-version: "3.12" 62 | - python-version: "3.13" 63 | 64 | steps: 65 | - name: Check out the repository 66 | uses: actions/checkout@v4.2.2 67 | 68 | - name: Set up Python ${{ matrix.python-version }} 69 | uses: actions/setup-python@v5.6.0 70 | with: 71 | python-version: ${{ matrix.python-version }} 72 | 73 | - name: Install dependencies 74 | run: | 75 | pip install -r requirements_dev.txt -e . 76 | 77 | - name: Run pytest 78 | run: | 79 | coverage run --parallel -m pytest 80 | 81 | - name: Upload coverage data 82 | if: matrix.python-version == '3.10' 83 | uses: "actions/upload-artifact@v4.6.2" 84 | with: 85 | include-hidden-files: true 86 | name: coverage-data 87 | path: ".coverage.*" 88 | 89 | coverage: 90 | runs-on: ubuntu-latest 91 | needs: tests 92 | steps: 93 | - name: Check out the repository 94 | uses: actions/checkout@v4.2.2 95 | 96 | - name: Set up Python 3.13 97 | uses: actions/setup-python@v5.6.0 98 | with: 99 | python-version: "3.13" 100 | 101 | - name: Install dependencies 102 | run: | 103 | pip install coverage[toml] 104 | 105 | - name: Download coverage data 106 | uses: actions/download-artifact@v4.3.0 107 | with: 108 | name: coverage-data 109 | 110 | - name: Combine coverage data and display human readable report 111 | run: | 112 | coverage combine 113 | coverage report 114 | 115 | - name: Create coverage report 116 | run: | 117 | coverage xml 118 | 119 | - name: Upload coverage report 120 | uses: codecov/codecov-action@v5.4.3 121 | 122 | build: 123 | runs-on: ubuntu-latest 124 | needs: tests 125 | steps: 126 | - name: Check out the repository 127 | uses: actions/checkout@v4.2.2 128 | with: 129 | fetch-depth: 2 130 | 131 | - name: Set up Python 132 | uses: actions/setup-python@v5.6.0 133 | with: 134 | python-version: "3.13" 135 | 136 | - name: Install build dependencies 137 | run: | 138 | python -m pip install --upgrade pip 139 | pip install setuptools wheel build 140 | 141 | - name: Build package 142 | run: | 143 | python -m build . --wheel 144 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # From cookiecutter-hypermodern-python 2 | .mypy_cache/ 3 | /.coverage 4 | /.nox/ 5 | /.python-version 6 | /.pytype/ 7 | /dist/ 8 | /docs/_build/ 9 | /src/*.egg-info/ 10 | __pycache__/ 11 | 12 | # Following are kept for not anoying current developers. Could be remove in 13 | # a future package release. 14 | 15 | # Python 16 | *.py[cod] 17 | 18 | # C extensions 19 | *.so 20 | 21 | # Packages 22 | *.egg 23 | *.egg-info 24 | dist 25 | build 26 | eggs 27 | .eggs 28 | parts 29 | bin 30 | var 31 | sdist 32 | develop-eggs 33 | .installed.cfg 34 | lib 35 | lib64 36 | pip-wheel-metadata 37 | 38 | # Logs 39 | *.log 40 | pip-log.txt 41 | 42 | # Unit test / coverage reports 43 | .coverage 44 | .tox 45 | coverage.xml 46 | nosetests.xml 47 | htmlcov/ 48 | test-reports/ 49 | test-results.xml 50 | test-output.xml 51 | 52 | # Translations 53 | *.mo 54 | 55 | # Mac OS X 56 | .DS_Store 57 | .AppleDouble 58 | .LSOverride 59 | Icon 60 | 61 | # Windows Explorer 62 | desktop.ini 63 | 64 | # Visual Studio Code 65 | .vscode/* 66 | !.vscode/cSpell.json 67 | !.vscode/extensions.json 68 | !.vscode/settings.default.json 69 | !.vscode/tasks.json 70 | 71 | # IntelliJ IDEA 72 | .idea 73 | *.iml 74 | 75 | # Sublime text 76 | *.sublime-project 77 | *.sublime-workspace 78 | 79 | # Mr Developer 80 | .mr.developer.cfg 81 | .project 82 | .pydevproject 83 | 84 | 85 | # Virtual env 86 | .venv 87 | 88 | node_modules 89 | package-lock.json 90 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: local 3 | hooks: 4 | - id: black 5 | name: black 6 | entry: black 7 | language: system 8 | types: [python] 9 | require_serial: true 10 | - id: check-added-large-files 11 | name: Check for added large files 12 | entry: check-added-large-files 13 | language: system 14 | - id: check-toml 15 | name: Check Toml 16 | entry: check-toml 17 | language: system 18 | types: [toml] 19 | - id: check-yaml 20 | name: Check Yaml 21 | entry: check-yaml 22 | language: system 23 | types: [yaml] 24 | - id: end-of-file-fixer 25 | name: Fix End of Files 26 | entry: end-of-file-fixer 27 | language: system 28 | types: [text] 29 | stages: [pre-commit, pre-push, manual] 30 | - id: flake8 31 | name: flake8 32 | entry: flake8 33 | language: system 34 | types: [python] 35 | require_serial: true 36 | - id: trailing-whitespace 37 | name: Trim Trailing Whitespace 38 | entry: trailing-whitespace-fixer 39 | language: system 40 | types: [text] 41 | stages: [pre-commit, pre-push, manual] 42 | - id: pylint 43 | name: pylint 44 | entry: pylint 45 | language: system 46 | types: [python] 47 | files: ^src/.+\.py$ 48 | args: ["-sn"] 49 | - id: mypy 50 | name: mypy 51 | entry: mypy 52 | language: system 53 | types: [python] 54 | files: ^src/.+\.py$ 55 | - repo: https://github.com/pre-commit/mirrors-prettier 56 | rev: v2.2.1 57 | hooks: 58 | - id: prettier 59 | - repo: https://github.com/PyCQA/isort 60 | rev: 5.12.0 61 | hooks: 62 | - id: isort 63 | -------------------------------------------------------------------------------- /.vscode/settings.default.json: -------------------------------------------------------------------------------- 1 | { 2 | "python.testing.pytestArgs": ["tests"], 3 | "python.testing.unittestEnabled": false, 4 | "python.testing.pytestEnabled": true, 5 | "mypy-type-checker.args": ["--config-file=pyproject.toml"] 6 | } 7 | -------------------------------------------------------------------------------- /.vscode/tasks.json: -------------------------------------------------------------------------------- 1 | { 2 | // See https://go.microsoft.com/fwlink/?LinkId=733558 3 | // for the documentation about the tasks.json format 4 | "version": "2.0.0", 5 | "tasks": [ 6 | { 7 | "label": "pytest", 8 | "type": "shell", 9 | "command": "pytest tests/", 10 | "group": { 11 | "kind": "test", 12 | "isDefault": true 13 | } 14 | }, 15 | { 16 | "label": "coverage", 17 | "type": "shell", 18 | "command": "coverage run --parallel -m pytest ; coverage combine ; coverage report", 19 | "group": { 20 | "kind": "test", 21 | "isDefault": false 22 | } 23 | }, 24 | { 25 | "label": "reinstall dependencies", 26 | "type": "shell", 27 | "command": "pip3 --disable-pip-version-check --no-cache-dir install -r requirements_dev.txt -e .", 28 | "group": { 29 | "kind": "build", 30 | "isDefault": true 31 | } 32 | } 33 | ] 34 | } 35 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributor Guide 2 | 3 | Thank you for your interest in improving this project. 4 | This project is open-source under the [MIT license](https://opensource.org/licenses/MIT) and 5 | welcomes contributions in the form of bug reports, feature requests, and pull requests. 6 | 7 | Here is a list of important resources for contributors: 8 | 9 | - [Source Code](https://github.com/mib1185/py-synologydsm-api) 10 | - [Documentation](https://github.com/mib1185/py-synologydsm-api#readme) 11 | - [Issue Tracker](https://github.com/mib1185/py-synologydsm-api/issues) 12 | 13 | ## How to report a bug 14 | 15 | Report bugs on the [Issue Tracker](https://github.com/mib1185/py-synologydsm-api/issues). 16 | 17 | When filing an issue, make sure to answer these questions: 18 | 19 | - Which operating system and Python version are you using? 20 | - Which version of this project are you using? 21 | - What did you do? 22 | - What did you expect to see? 23 | - What did you see instead? 24 | 25 | The best way to get your bug fixed is to provide a test case, 26 | and/or steps to reproduce the issue. 27 | 28 | ## How to request a feature 29 | 30 | Request features on the [Issue Tracker](https://github.com/mib1185/py-synologydsm-api/issues). 31 | 32 | ## How to set up your development environment 33 | 34 | This project use a [Visual Studio Code Dev Container](https://code.visualstudio.com/docs/devcontainers/containers). 35 | This approach will create a preconfigured development environment with all the tools you need. 36 | 37 | ## How to test the project 38 | 39 | Unit tests are located in the `tests` directory, and are written using the [pytest](https://pytest.readthedocs.io/) testing framework. 40 | Run the full test suite: 41 | 42 | ```bash 43 | $ pytest tests 44 | ``` 45 | 46 | There is also a Visual Studio Code task available to run the full test suite or you can use the testing feature from Visual Studio Code. 47 | 48 | ## How to submit changes 49 | 50 | Open a [pull request](https://github.com/mib1185/py-synologydsm-api/pulls) to submit changes to this project. 51 | 52 | Your pull request needs to meet the following guidelines for acceptance: 53 | 54 | - The test suite must pass without errors and warnings. 55 | - Include unit tests. This project maintains 100% code coverage. 56 | - If your changes add functionality, update the documentation accordingly. 57 | 58 | Feel free to submit early, though—we can always iterate on this. 59 | 60 | To run linting and code formatting checks before commiting your change, pre-commit as a Git hook is installed within the [Visual Studio Code Dev Container](https://code.visualstudio.com/docs/devcontainers/containers). 61 | 62 | It is recommended to open an issue before starting work on anything. 63 | This will allow a chance to talk it over with the owners and validate your approach. 64 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | Copyright (c) 2020 mib1185 3 | 4 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 5 | 6 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 7 | 8 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 9 | -------------------------------------------------------------------------------- /codecov.yml: -------------------------------------------------------------------------------- 1 | comment: false 2 | coverage: 3 | status: 4 | project: 5 | default: 6 | target: "75" 7 | patch: 8 | default: 9 | target: "100" 10 | -------------------------------------------------------------------------------- /mypy.ini: -------------------------------------------------------------------------------- 1 | [mypy] 2 | show_error_codes = true 3 | follow_imports = silent 4 | ignore_missing_imports = true 5 | strict_equality = true 6 | no_implicit_optional = true 7 | warn_incomplete_stub = true 8 | warn_redundant_casts = true 9 | warn_unused_configs = true 10 | warn_unused_ignores = true 11 | enable_error_code = ignore-without-code 12 | disable_error_code = annotation-unchecked 13 | strict_concatenate = false 14 | check_untyped_defs = true 15 | disallow_incomplete_defs = true 16 | disallow_subclassing_any = true 17 | disallow_untyped_calls = true 18 | disallow_untyped_decorators = true 19 | disallow_untyped_defs = true 20 | warn_return_any = true 21 | warn_unreachable = true 22 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools>=61.0"] 3 | build-backend = "setuptools.build_meta" 4 | 5 | [project] 6 | name = "py-synologydsm-api" 7 | version = "2.7.3" 8 | description = "Python API for communication with Synology DSM" 9 | authors = [ 10 | {name="Michael Brussk (mib1185)"}, 11 | {name="Quentin POLLET (Quentame)"}, 12 | {name="FG van Zeelst (ProtoThis)"} 13 | ] 14 | license = {text = "MIT"} 15 | readme = "README.md" 16 | classifiers = [ 17 | "Development Status :: 5 - Production/Stable", 18 | "Intended Audience :: Developers", 19 | "License :: OSI Approved :: MIT License", 20 | "Operating System :: OS Independent", 21 | "Programming Language :: Python :: 3.9", 22 | "Programming Language :: Python :: 3.10", 23 | "Programming Language :: Python :: 3.11", 24 | "Programming Language :: Python :: 3.12", 25 | "Programming Language :: Python :: 3.13", 26 | "Topic :: Software Development :: Libraries", 27 | ] 28 | keywords=["synology-dsm", "synology"] 29 | requires-python = ">=3.9" 30 | dependencies = ["aiohttp", "aiofiles", "awesomeversion"] 31 | 32 | [project.urls] 33 | Changelog = "https://github.com/mib1185/py-synologydsm-api/releases" 34 | Repository = "https://github.com/mib1185/py-synologydsm-api" 35 | Documentation = "https://github.com/mib1185/py-synologydsm-api#readme" 36 | 37 | [project.scripts] 38 | synologydsm-api = "synology_dsm.__main__:main" 39 | 40 | [tool.setuptools.package-data] 41 | synology_dsm = ["py.typed", "*.py", "**/*.py"] 42 | 43 | [tool.coverage.paths] 44 | source = ["src", "*/site-packages"] 45 | 46 | [tool.coverage.run] 47 | branch = true 48 | source = ["synology_dsm"] 49 | 50 | [tool.coverage.report] 51 | show_missing = true 52 | fail_under = 75 53 | 54 | [tool.isort] 55 | profile = "black" 56 | src_paths = ["src", "tests"] 57 | 58 | [tool.mypy] 59 | exclude = ["tests/"] 60 | 61 | [tool.pylint.MAIN] 62 | ignore = ["tests"] 63 | 64 | [tool.pylint.BASIC] 65 | class-const-naming-style = "any" 66 | good-names = [ 67 | "id", 68 | ] 69 | 70 | [tool.pylint."MESSAGES CONTROL"] 71 | disable = [ 72 | "too-few-public-methods", 73 | "too-many-arguments", 74 | "too-many-branches", 75 | "too-many-instance-attributes", 76 | "too-many-locals", 77 | "too-many-public-methods", 78 | "too-many-positional-arguments", 79 | "too-many-statements", 80 | "too-many-return-statements", 81 | ] 82 | -------------------------------------------------------------------------------- /requirements_dev.txt: -------------------------------------------------------------------------------- 1 | aiohttp 2 | aiofiles 3 | types-aiofiles 4 | awesomeversion 5 | black==25.1.0 6 | coverage[toml] 7 | flake8-bandit==4.1.1 8 | flake8-bugbear==24.12.12 9 | flake8-docstrings==1.7.0 10 | flake8-rst-docstrings==0.3.1 11 | flake8==7.2.0 12 | mypy==1.16.0 13 | pep8-naming==0.15.1 14 | pre-commit-hooks==5.0.0 15 | pre-commit==4.2.0 16 | Pygments==2.19.1 17 | pylint==3.3.7 18 | pytest-asyncio==0.26.0 19 | pytest==8.3.5 20 | reorder-python-imports==3.15.0 21 | syrupy 22 | -------------------------------------------------------------------------------- /scripts/clean.sh: -------------------------------------------------------------------------------- 1 | ./scripts/common.sh 2 | 3 | # Clean 4 | rm -r .tox 5 | rm -r build 6 | rm -r dist 7 | rm -r python_synology.egg-info 8 | rm -r src/python_synology.egg-info 9 | -------------------------------------------------------------------------------- /scripts/common.sh: -------------------------------------------------------------------------------- 1 | # Be in right place 2 | if [ ! -f setup.py ]; then 3 | cd .. 4 | fi 5 | -------------------------------------------------------------------------------- /scripts/publish.sh: -------------------------------------------------------------------------------- 1 | # Publish the library 2 | # https://pypi.org/project/synologydsm-api 3 | # Publish documentation here: https://packaging.python.org/tutorials/packaging-projects/ 4 | 5 | ./scripts/common.sh 6 | ./scripts/clean.sh 7 | 8 | # Install/update dependencies 9 | python3 -m pip install --user --upgrade setuptools wheel 10 | python3 -m pip install --user --upgrade twine 11 | 12 | # Build 13 | python3 setup.py sdist bdist_wheel 14 | 15 | # Push to PyPi 16 | python3 -m twine upload dist/* 17 | # python3 -m twine upload --repository testpypi dist/* 18 | 19 | # Enter credentials manually :P 20 | -------------------------------------------------------------------------------- /scripts/setup: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # Setups the repository. 3 | 4 | # Stop on errors 5 | set -e 6 | 7 | cd "$(dirname "$0")/.." 8 | 9 | # Add default vscode settings if not existing 10 | SETTINGS_FILE=./.vscode/settings.json 11 | SETTINGS_TEMPLATE_FILE=./.vscode/settings.default.json 12 | if [ ! -f "$SETTINGS_FILE" ]; then 13 | echo "Copy $SETTINGS_TEMPLATE_FILE to $SETTINGS_FILE." 14 | cp "$SETTINGS_TEMPLATE_FILE" "$SETTINGS_FILE" 15 | fi 16 | 17 | # install git pre-commit hook 18 | pre-commit install 19 | 20 | # install from source 21 | pip install -e . 22 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [metadata] 2 | url = https://github.com/mib1185/py-synologydsm-api#readme 3 | -------------------------------------------------------------------------------- /src/synology_dsm/__init__.py: -------------------------------------------------------------------------------- 1 | """The synologydsm-api library.""" 2 | 3 | from .synology_dsm import SynologyDSM 4 | 5 | __all__ = ["SynologyDSM"] 6 | -------------------------------------------------------------------------------- /src/synology_dsm/api/__init__.py: -------------------------------------------------------------------------------- 1 | """Synology API models.""" 2 | 3 | from __future__ import annotations 4 | 5 | from typing import TYPE_CHECKING, Generic, TypeVar 6 | 7 | if TYPE_CHECKING: 8 | from synology_dsm import SynologyDSM 9 | 10 | 11 | _DataT = TypeVar("_DataT") 12 | 13 | 14 | class SynoBaseApi(Generic[_DataT]): 15 | """Base api class.""" 16 | 17 | def __init__(self, dsm: "SynologyDSM") -> None: 18 | """Constructor method.""" 19 | self._dsm = dsm 20 | self._data: _DataT = {} # type: ignore[assignment] 21 | -------------------------------------------------------------------------------- /src/synology_dsm/api/core/__init__.py: -------------------------------------------------------------------------------- 1 | """Synology Core API models.""" 2 | -------------------------------------------------------------------------------- /src/synology_dsm/api/core/external_usb.py: -------------------------------------------------------------------------------- 1 | """DSM external USB device data.""" 2 | 3 | from __future__ import annotations 4 | 5 | from typing import TypedDict, cast 6 | 7 | from synology_dsm.api import SynoBaseApi 8 | from synology_dsm.helpers import SynoFormatHelper 9 | 10 | ExternalUsbDevicePartitionDataType = TypedDict( 11 | "ExternalUsbDevicePartitionDataType", 12 | { 13 | "dev_fstype": str, 14 | "filesystem": str, 15 | "name_id": str, 16 | "partition_title": str, 17 | "share_name": str, 18 | "status": str, 19 | "total_size_mb": "int | str", 20 | "used_size_mb": "int | None", 21 | }, 22 | total=False, 23 | ) 24 | ExternalUsbDeviceDataType = TypedDict( 25 | "ExternalUsbDeviceDataType", 26 | { 27 | "dev_id": str, 28 | "dev_title": str, 29 | "dev_type": str, 30 | "formatable": bool, 31 | "partitions": "dict[str, SynoUSBStoragePartition]", 32 | "producer": str, 33 | "product": "str | None", 34 | "progress": str, 35 | "status": str, 36 | "total_size_mb": int, 37 | }, 38 | total=False, 39 | ) 40 | 41 | 42 | class SynoCoreExternalUSB(SynoBaseApi["dict[str, SynoCoreExternalUSBDevice]"]): 43 | """Class for external USB storage devices.""" 44 | 45 | API_KEY = "SYNO.Core.ExternalDevice.Storage.USB" 46 | REQUEST_DATA = {"additional": '["all"]'} 47 | 48 | async def update(self) -> None: 49 | """Updates external USB storage device data.""" 50 | self._data: dict[str, SynoCoreExternalUSBDevice] = {} 51 | raw_data = await self._dsm.post(self.API_KEY, "list", data=self.REQUEST_DATA) 52 | if isinstance(raw_data, dict) and (data := raw_data.get("data")) is not None: 53 | for device in data["devices"]: 54 | self._data[device["dev_id"]] = SynoCoreExternalUSBDevice(device) 55 | 56 | # Root 57 | @property 58 | def get_devices(self) -> dict[str, SynoCoreExternalUSBDevice]: 59 | """Gets all external USB storage devices.""" 60 | return self._data 61 | 62 | def get_device(self, device_id: str) -> SynoCoreExternalUSBDevice | None: 63 | """Returns a specific external USB storage device.""" 64 | return self._data.get(device_id) 65 | 66 | 67 | class SynoCoreExternalUSBDevice: 68 | """A representation of an external USB device.""" 69 | 70 | def __init__(self, data: dict): 71 | """Initialize a external USB device.""" 72 | partitions: dict[str, SynoUSBStoragePartition] = {} 73 | for partition in data["partitions"]: 74 | partitions[partition["name_id"]] = SynoUSBStoragePartition(partition) 75 | self._data = cast(ExternalUsbDeviceDataType, {**data, "partitions": partitions}) 76 | 77 | @property 78 | def device_id(self) -> str: 79 | """Return id of the device.""" 80 | return self._data["dev_id"] 81 | 82 | @property 83 | def device_name(self) -> str: 84 | """The title of the external USB storage device.""" 85 | return self._data["dev_title"] 86 | 87 | @property 88 | def device_type(self) -> str: 89 | """The type of the external USB storage device.""" 90 | return self._data["dev_type"] 91 | 92 | def device_size_total(self, human_readable: bool = False) -> str | int: 93 | """Total size of the external USB storage device.""" 94 | return_data = SynoFormatHelper.megabytes_to_bytes( 95 | int(self._data["total_size_mb"]) 96 | ) 97 | if human_readable: 98 | return SynoFormatHelper.bytes_to_readable(return_data) 99 | return return_data 100 | 101 | @property 102 | def device_status(self) -> str: 103 | """The status of the external USB storage device.""" 104 | return self._data["status"] 105 | 106 | @property 107 | def device_formatable(self) -> bool: 108 | """Whether the external USB storage device can be formatted.""" 109 | return self._data["formatable"] 110 | 111 | @property 112 | def device_progress(self) -> str: 113 | """The progress the external USB storage device.""" 114 | return self._data["progress"] 115 | 116 | @property 117 | def device_product_name(self) -> str | None: 118 | """The product name of the external USB storage device.""" 119 | return self._data.get("product") 120 | 121 | @property 122 | def device_manufacturer(self) -> str: 123 | """The producer name of the external USB storage device.""" 124 | return self._data["producer"] 125 | 126 | # Partition 127 | @property 128 | def device_partitions(self) -> dict[str, SynoUSBStoragePartition]: 129 | """Returns all partitions of the external USB storage device.""" 130 | return self._data["partitions"] 131 | 132 | def get_device_partition(self, partition_id: str) -> SynoUSBStoragePartition | None: 133 | """Returns a partition of the external USB storage device.""" 134 | return self._data["partitions"].get(partition_id) 135 | 136 | def partitions_all_size_total( 137 | self, human_readable: bool = False 138 | ) -> str | int | None: 139 | """Total size of all parititions of the external USB storage device.""" 140 | partitions = self._data["partitions"] 141 | if not partitions: 142 | return None 143 | 144 | size_total = 0 145 | for partition in partitions.values(): 146 | partition_size = partition.partition_size_total() 147 | # Partitions may be reported without a size 148 | if isinstance(partition_size, int): 149 | size_total += partition_size 150 | 151 | if human_readable: 152 | return SynoFormatHelper.bytes_to_readable(size_total) 153 | return size_total 154 | 155 | def partitions_all_size_used( 156 | self, human_readable: bool = False 157 | ) -> str | int | None: 158 | """Total size used of all partitions of the external USB storage device.""" 159 | partitions = self._data["partitions"] 160 | if not partitions: 161 | return None 162 | 163 | size_used = 0 164 | for partition in partitions.values(): 165 | partition_used = partition.partition_size_used() 166 | # Partitions may be reported without a size 167 | if isinstance(partition_used, int): 168 | size_used += partition_used 169 | 170 | if human_readable: 171 | return SynoFormatHelper.bytes_to_readable(size_used) 172 | return size_used 173 | 174 | @property 175 | def partitions_all_percentage_used(self) -> float | None: 176 | """Used size in percentage for all partitions of the USB storage device.""" 177 | size_total = self.partitions_all_size_total() 178 | size_used = self.partitions_all_size_used() 179 | 180 | if ( 181 | isinstance(size_used, int) 182 | and size_used >= 0 183 | and isinstance(size_total, int) 184 | and size_total > 0 185 | ): 186 | return round((float(size_used) / float(size_total)) * 100.0, 1) 187 | return None 188 | 189 | 190 | class SynoUSBStoragePartition: 191 | """A representation of a parition of an external USB storage device.""" 192 | 193 | def __init__(self, data: ExternalUsbDevicePartitionDataType): 194 | """Initialize a partition object of an external USB storage device.""" 195 | self._data = data 196 | 197 | @property 198 | def fstype(self) -> str: 199 | """Return the dev_fstype for the partition.""" 200 | return self._data["dev_fstype"] 201 | 202 | @property 203 | def filesystem(self) -> str: 204 | """Return the filesystem for the partition.""" 205 | return self._data["filesystem"] 206 | 207 | @property 208 | def name_id(self) -> str: 209 | """Return the name_id for the partition.""" 210 | return self._data["name_id"] 211 | 212 | @property 213 | def partition_title(self) -> str: 214 | """Return the title for the partition.""" 215 | return self._data["partition_title"] 216 | 217 | @property 218 | def share_name(self) -> str: 219 | """Return the share name for the partition.""" 220 | return self._data["share_name"] 221 | 222 | @property 223 | def status(self) -> str: 224 | """Return the status for the partition.""" 225 | return self._data["status"] 226 | 227 | def partition_size_total(self, human_readable: bool = False) -> int | str | None: 228 | """Total size of the partition.""" 229 | # API returns property as empty string if a partition has no size 230 | size_total = self._data["total_size_mb"] 231 | if not isinstance(size_total, int): 232 | return None 233 | size_total = SynoFormatHelper.megabytes_to_bytes(size_total) 234 | if human_readable: 235 | return SynoFormatHelper.bytes_to_readable(size_total) 236 | return size_total 237 | 238 | def partition_size_used(self, human_readable: bool = False) -> int | str | None: 239 | """Used size of the partition.""" 240 | # API does not return property if a partition has no size 241 | size_used = self._data.get("used_size_mb") 242 | if not isinstance(size_used, int): 243 | return None 244 | size_used = SynoFormatHelper.megabytes_to_bytes(size_used) 245 | if human_readable: 246 | return SynoFormatHelper.bytes_to_readable(size_used) 247 | return size_used 248 | 249 | @property 250 | def partition_percentage_used(self) -> float | None: 251 | """Used size in percentage of the partition.""" 252 | size_total = self.partition_size_total() 253 | size_used = self.partition_size_used() 254 | if ( 255 | isinstance(size_used, int) 256 | and size_used >= 0 257 | and isinstance(size_total, int) 258 | and size_total > 0 259 | ): 260 | return round((float(size_used) / float(size_total)) * 100.0, 1) 261 | return None 262 | 263 | @property 264 | def is_mounted(self) -> bool: 265 | """Is the partition formatted.""" 266 | return self._data["share_name"] != "" 267 | 268 | @property 269 | def is_supported(self) -> bool: 270 | """Is the partition formatted.""" 271 | return self._data["filesystem"] != "" 272 | -------------------------------------------------------------------------------- /src/synology_dsm/api/core/security.py: -------------------------------------------------------------------------------- 1 | """DSM Security data.""" 2 | 3 | from __future__ import annotations 4 | 5 | from typing import TypedDict 6 | 7 | from synology_dsm.api import SynoBaseApi 8 | 9 | SecurityCategory = TypedDict( 10 | "SecurityCategory", 11 | { 12 | "category": str, 13 | "fail": "dict[str, int]", 14 | "failSeverity": str, 15 | "progress": int, 16 | "runningItem": str, 17 | "total": int, 18 | "waitNum": int, 19 | }, 20 | ) 21 | 22 | 23 | class SecurityDataType(TypedDict): 24 | """Data type.""" 25 | 26 | items: dict[str, SecurityCategory] 27 | lastScanTime: str # noqa: N815 28 | startTime: str # noqa: N815 29 | success: bool 30 | sysProgress: int # noqa: N815 31 | sysStatus: str # noqa: N815 32 | 33 | 34 | class SynoCoreSecurity(SynoBaseApi[SecurityDataType]): 35 | """Class containing Security data.""" 36 | 37 | API_KEY = "SYNO.Core.SecurityScan.Status" 38 | 39 | async def update(self) -> None: 40 | """Updates security data.""" 41 | raw_data = await self._dsm.get(self.API_KEY, "system_get") 42 | if isinstance(raw_data, dict) and (data := raw_data.get("data")) is not None: 43 | self._data = data 44 | 45 | @property 46 | def checks(self) -> dict[str, SecurityCategory]: 47 | """Gets the checklist by check category.""" 48 | return self._data["items"] 49 | 50 | @property 51 | def last_scan_time(self) -> str: 52 | """Gets the last scan time.""" 53 | return self._data["lastScanTime"] 54 | 55 | @property 56 | def start_time(self) -> str: 57 | """Gets the start time (if in progress).""" 58 | return self._data["startTime"] 59 | 60 | @property 61 | def success(self) -> bool: 62 | """Gets the last scan success.""" 63 | return self._data["success"] 64 | 65 | @property 66 | def progress(self) -> int: 67 | """Gets the scan progress. 68 | 69 | Returns: 100 if finished 70 | """ 71 | return self._data["sysProgress"] 72 | 73 | @property 74 | def status(self) -> str: 75 | """Gets the last scan status. 76 | 77 | Possible values: safe, danger, info, outOfDate, risk, warning. 78 | """ 79 | return self._data["sysStatus"] 80 | 81 | @property 82 | def status_by_check(self) -> dict[str, str]: 83 | """Gets the last scan status per check.""" 84 | status = {} 85 | for category in self.checks: 86 | status[category] = self.checks[category]["failSeverity"] 87 | return status 88 | -------------------------------------------------------------------------------- /src/synology_dsm/api/core/share.py: -------------------------------------------------------------------------------- 1 | """Shared Folders data.""" 2 | 3 | from __future__ import annotations 4 | 5 | from typing import TypedDict 6 | 7 | from synology_dsm.api import SynoBaseApi 8 | from synology_dsm.helpers import SynoFormatHelper 9 | 10 | Share = TypedDict( 11 | "Share", 12 | { 13 | "uuid": str, 14 | "name": str, 15 | "vol_path": str, 16 | "enable_recycle_bin": bool, 17 | "share_quota_used": float, 18 | }, 19 | total=False, 20 | ) 21 | 22 | 23 | class ShareDataType(TypedDict): 24 | """Data type.""" 25 | 26 | shares: list[Share] 27 | 28 | 29 | class SynoCoreShare(SynoBaseApi[ShareDataType]): 30 | """Class containing Share data.""" 31 | 32 | API_KEY = "SYNO.Core.Share" 33 | # Syno supports two methods to retrieve resource details, GET and POST. 34 | # GET returns a limited set of keys. With POST the same keys as GET 35 | # are returned plus any keys listed in the "additional" parameter. 36 | # NOTE: The value of the additional key must be a string. 37 | REQUEST_DATA = { 38 | "additional": '["hidden","encryption","is_aclmode","unite_permission",' 39 | '"is_support_acl","is_sync_share","is_force_readonly","force_readonly_reason",' 40 | '"recyclebin","is_share_moving","is_cluster_share","is_exfat_share",' 41 | '"is_cold_storage_share","support_snapshot","share_quota",' 42 | '"enable_share_compress","enable_share_cow","include_cold_storage_share",' 43 | '"is_cold_storage_share"]', 44 | "shareType": "all", 45 | } 46 | 47 | async def update(self) -> None: 48 | """Updates share data.""" 49 | raw_data = await self._dsm.post(self.API_KEY, "list", data=self.REQUEST_DATA) 50 | if isinstance(raw_data, dict) and (data := raw_data.get("data")) is not None: 51 | self._data = data 52 | 53 | @property 54 | def shares(self) -> list[Share]: 55 | """Gets all shares.""" 56 | return self._data["shares"] 57 | 58 | @property 59 | def shares_uuids(self) -> list[str]: 60 | """Return (internal) share ids.""" 61 | shares = [] 62 | for share in self.shares: 63 | shares.append(share["uuid"]) 64 | return shares 65 | 66 | def get_share(self, share_uuid: str) -> Share: 67 | """Returns a specific share by uuid..""" 68 | for share in self.shares: 69 | if share["uuid"] == share_uuid: 70 | return share 71 | return {} 72 | 73 | def share_name(self, share_uuid: str) -> str: 74 | """Return the name of this share.""" 75 | return self.get_share(share_uuid)["name"] 76 | 77 | def share_path(self, share_uuid: str) -> str: 78 | """Return the volume path of this share.""" 79 | return self.get_share(share_uuid)["vol_path"] 80 | 81 | def share_recycle_bin(self, share_uuid: str) -> bool: 82 | """Is the recycle bin enabled for this share?""" 83 | return self.get_share(share_uuid)["enable_recycle_bin"] 84 | 85 | def share_size(self, share_uuid: str, human_readable: bool = False) -> int | str: 86 | """Total size of share.""" 87 | share_size_mb = self.get_share(share_uuid)["share_quota_used"] 88 | # Share size is returned in MB so we convert it. 89 | share_size_bytes = SynoFormatHelper.megabytes_to_bytes(share_size_mb) 90 | if human_readable: 91 | return SynoFormatHelper.bytes_to_readable(share_size_bytes) 92 | return share_size_bytes 93 | -------------------------------------------------------------------------------- /src/synology_dsm/api/core/system.py: -------------------------------------------------------------------------------- 1 | """DSM System data and actions.""" 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Any, TypedDict 6 | 7 | from synology_dsm.api import SynoBaseApi 8 | 9 | 10 | class SystemDataType(TypedDict): 11 | """Data type.""" 12 | 13 | cpu_clock_speed: int 14 | cpu_cores: str 15 | cpu_family: str 16 | cpu_series: str 17 | enabled_ntp: bool 18 | ntp_server: str 19 | firmware_ver: str 20 | model: str 21 | ram_size: int 22 | serial: str 23 | sys_temp: int 24 | time: str 25 | time_zone: str 26 | time_zone_desc: str 27 | up_time: str 28 | usb_dev: list[dict[str, Any]] 29 | 30 | 31 | class SynoCoreSystem(SynoBaseApi[SystemDataType]): 32 | """Class containing System data and actions.""" 33 | 34 | API_KEY = "SYNO.Core.System" 35 | 36 | async def update(self) -> None: 37 | """Updates System data.""" 38 | raw_data = await self._dsm.get(self.API_KEY, "info") 39 | if isinstance(raw_data, dict) and (data := raw_data.get("data")) is not None: 40 | self._data = data 41 | 42 | # 43 | # get information 44 | # 45 | @property 46 | def cpu_clock_speed(self) -> int: 47 | """Gets System CPU clock speed.""" 48 | return self._data["cpu_clock_speed"] 49 | 50 | @property 51 | def cpu_cores(self) -> str: 52 | """Gets System CPU cores.""" 53 | return self._data["cpu_cores"] 54 | 55 | @property 56 | def cpu_family(self) -> str: 57 | """Gets System CPU family.""" 58 | return self._data["cpu_family"] 59 | 60 | @property 61 | def cpu_series(self) -> str: 62 | """Gets System CPU series.""" 63 | return self._data["cpu_series"] 64 | 65 | @property 66 | def enabled_ntp(self) -> bool: 67 | """Gets System NTP state.""" 68 | return self._data["enabled_ntp"] 69 | 70 | @property 71 | def ntp_server(self) -> str: 72 | """Gets System NTP server.""" 73 | return self._data["ntp_server"] 74 | 75 | @property 76 | def firmware_ver(self) -> str: 77 | """Gets System firmware version.""" 78 | return self._data["firmware_ver"] 79 | 80 | @property 81 | def model(self) -> str: 82 | """Gets System model.""" 83 | return self._data["model"] 84 | 85 | @property 86 | def ram_size(self) -> int: 87 | """Gets System ram size.""" 88 | return self._data["ram_size"] 89 | 90 | @property 91 | def serial(self) -> str: 92 | """Gets System serial number.""" 93 | return self._data["serial"] 94 | 95 | @property 96 | def sys_temp(self) -> int: 97 | """Gets System temperature.""" 98 | return self._data["sys_temp"] 99 | 100 | @property 101 | def time(self) -> str: 102 | """Gets System time.""" 103 | return self._data["time"] 104 | 105 | @property 106 | def time_zone(self) -> str: 107 | """Gets System time zone.""" 108 | return self._data["time_zone"] 109 | 110 | @property 111 | def time_zone_desc(self) -> str: 112 | """Gets System time zone description.""" 113 | return self._data["time_zone_desc"] 114 | 115 | @property 116 | def up_time(self) -> str: 117 | """Gets System uptime.""" 118 | return self._data["up_time"] 119 | 120 | @property 121 | def usb_dev(self) -> list: 122 | """Gets System connected usb devices.""" 123 | return self._data["usb_dev"] 124 | 125 | # 126 | # do system actions 127 | # 128 | async def shutdown(self) -> None: 129 | """Shutdown NAS.""" 130 | await self._dsm.get( 131 | self.API_KEY, 132 | "shutdown", 133 | {"local": "true"}, 134 | max_version=1, # shutdown method is only available on api version 1 135 | ) 136 | 137 | async def reboot(self) -> None: 138 | """Reboot NAS.""" 139 | await self._dsm.get( 140 | self.API_KEY, 141 | "reboot", 142 | max_version=1, # reboot method is only available on api version 1 143 | ) 144 | -------------------------------------------------------------------------------- /src/synology_dsm/api/core/upgrade.py: -------------------------------------------------------------------------------- 1 | """DSM Upgrade data and actions.""" 2 | 3 | from __future__ import annotations 4 | 5 | from typing import TypedDict 6 | 7 | from synology_dsm.api import SynoBaseApi 8 | 9 | 10 | class UpgradeDataType(TypedDict, total=False): 11 | """Data type.""" 12 | 13 | available: bool 14 | version: str 15 | version_details: dict 16 | reboot: str 17 | restart: str 18 | 19 | 20 | class SynoCoreUpgrade(SynoBaseApi[UpgradeDataType]): 21 | """Class containing upgrade data and actions.""" 22 | 23 | API_KEY = "SYNO.Core.Upgrade" 24 | API_SERVER_KEY = API_KEY + ".Server" 25 | 26 | async def update(self) -> None: 27 | """Updates Upgrade data.""" 28 | raw_data = await self._dsm.get(self.API_SERVER_KEY, "check") 29 | if isinstance(raw_data, dict) and (data := raw_data.get("data")) is not None: 30 | self._data = data.get("update", data) 31 | 32 | @property 33 | def update_available(self) -> bool: 34 | """Gets available update info.""" 35 | return self._data["available"] 36 | 37 | @property 38 | def available_version(self) -> str | None: 39 | """Gets available verion info.""" 40 | return self._data.get("version") 41 | 42 | @property 43 | def available_version_details(self) -> dict | None: 44 | """Gets details about available verion.""" 45 | return self._data.get("version_details") 46 | 47 | @property 48 | def reboot_needed(self) -> str | None: 49 | """Gets info if reboot is needed.""" 50 | return self._data.get("reboot") 51 | 52 | @property 53 | def service_restarts(self) -> str | None: 54 | """Gets info if services are restarted.""" 55 | return self._data.get("restart") 56 | -------------------------------------------------------------------------------- /src/synology_dsm/api/core/utilization.py: -------------------------------------------------------------------------------- 1 | """DSM Utilization data.""" 2 | 3 | from __future__ import annotations 4 | 5 | from typing import TypedDict 6 | 7 | from synology_dsm.api import SynoBaseApi 8 | from synology_dsm.helpers import SynoFormatHelper 9 | 10 | CpuUtilization = TypedDict( 11 | "CpuUtilization", 12 | { 13 | "15min_load": int, 14 | "1min_load": int, 15 | "5min_load": int, 16 | "device": str, 17 | "other_load": int, 18 | "system_load": int, 19 | "user_load": int, 20 | }, 21 | ) 22 | 23 | MemoryUtilization = TypedDict( 24 | "MemoryUtilization", 25 | { 26 | "avail_real": int, 27 | "avail_swap": int, 28 | "buffer": int, 29 | "cached": int, 30 | "device": str, 31 | "memory_size": int, 32 | "real_usage": int, 33 | "si_disk": int, 34 | "so_disk": int, 35 | "swap_usage": int, 36 | "total_real": int, 37 | "total_swap": int, 38 | }, 39 | ) 40 | 41 | NetworkUtilization = TypedDict( 42 | "NetworkUtilization", 43 | { 44 | "device": str, 45 | "rx": int, 46 | "tx": int, 47 | }, 48 | ) 49 | 50 | 51 | class UtilizationDataType(TypedDict, total=False): 52 | """Data type.""" 53 | 54 | cpu: CpuUtilization 55 | memory: MemoryUtilization 56 | network: list[NetworkUtilization] 57 | 58 | 59 | class SynoCoreUtilization(SynoBaseApi[UtilizationDataType]): 60 | """Class containing Utilization data.""" 61 | 62 | API_KEY = "SYNO.Core.System.Utilization" 63 | 64 | async def update(self) -> None: 65 | """Updates utilization data.""" 66 | raw_data = await self._dsm.get(self.API_KEY, "get") 67 | if isinstance(raw_data, dict) and (data := raw_data.get("data")) is not None: 68 | self._data = data 69 | 70 | @property 71 | def cpu(self) -> CpuUtilization: 72 | """Gets CPU utilization.""" 73 | return self._data["cpu"] 74 | 75 | @property 76 | def cpu_other_load(self) -> int: 77 | """Other percentage of the total CPU load.""" 78 | return self.cpu["other_load"] 79 | 80 | @property 81 | def cpu_user_load(self) -> int: 82 | """User percentage of the total CPU load.""" 83 | return self.cpu["user_load"] 84 | 85 | @property 86 | def cpu_system_load(self) -> int: 87 | """System percentage of the total CPU load.""" 88 | return self.cpu["system_load"] 89 | 90 | @property 91 | def cpu_total_load(self) -> int: 92 | """Total CPU load for Synology DSM.""" 93 | system_load = self.cpu_system_load 94 | user_load = self.cpu_user_load 95 | other_load = self.cpu_other_load 96 | 97 | return system_load + user_load + other_load 98 | 99 | @property 100 | def cpu_1min_load(self) -> int: 101 | """Average CPU load past minute.""" 102 | return self.cpu["1min_load"] 103 | 104 | @property 105 | def cpu_5min_load(self) -> int: 106 | """Average CPU load past 5 minutes.""" 107 | return self.cpu["5min_load"] 108 | 109 | @property 110 | def cpu_15min_load(self) -> int: 111 | """Average CPU load past 15 minutes.""" 112 | return self.cpu["15min_load"] 113 | 114 | @property 115 | def memory(self) -> MemoryUtilization: 116 | """Gets memory utilization.""" 117 | return self._data["memory"] 118 | 119 | @property 120 | def memory_real_usage(self) -> int: 121 | """Real Memory usage from Synology DSM.""" 122 | return self.memory["real_usage"] 123 | 124 | def memory_size(self, human_readable: bool = False) -> int | str: 125 | """Total memory size of Synology DSM.""" 126 | return_data = self.memory["memory_size"] * 1024 127 | if human_readable: 128 | return SynoFormatHelper.bytes_to_readable(return_data) 129 | return return_data 130 | 131 | def memory_available_swap(self, human_readable: bool = False) -> int | str: 132 | """Total available memory swap.""" 133 | # Memory is actually returned in KB's so multiply before converting 134 | return_data = self.memory["avail_swap"] * 1024 135 | if human_readable: 136 | return SynoFormatHelper.bytes_to_readable(return_data) 137 | return return_data 138 | 139 | def memory_cached(self, human_readable: bool = False) -> int | str: 140 | """Total cached memory.""" 141 | # Memory is actually returned in KB's so multiply before converting 142 | return_data = self.memory["cached"] * 1024 143 | if human_readable: 144 | return SynoFormatHelper.bytes_to_readable(return_data) 145 | return return_data 146 | 147 | def memory_available_real(self, human_readable: bool = False) -> int | str: 148 | """Real available memory.""" 149 | # Memory is actually returned in KB's so multiply before converting 150 | return_data = self.memory["avail_real"] * 1024 151 | if human_readable: 152 | return SynoFormatHelper.bytes_to_readable(return_data) 153 | return return_data 154 | 155 | def memory_total_real(self, human_readable: bool = False) -> int | str: 156 | """Total available real memory.""" 157 | # Memory is actually returned in KB's so multiply before converting 158 | return_data = self.memory["total_real"] * 1024 159 | if human_readable: 160 | return SynoFormatHelper.bytes_to_readable(return_data) 161 | return return_data 162 | 163 | def memory_total_swap(self, human_readable: bool = False) -> int | str: 164 | """Total swap memory.""" 165 | # Memory is actually returned in KB's so multiply before converting 166 | return_data = self.memory["total_swap"] * 1024 167 | if human_readable: 168 | return SynoFormatHelper.bytes_to_readable(return_data) 169 | return return_data 170 | 171 | @property 172 | def network(self) -> list[NetworkUtilization]: 173 | """Gets network utilization.""" 174 | return self._data["network"] 175 | 176 | def _get_network(self, network_id: str) -> NetworkUtilization | None: 177 | """Function to get specific network (eth0, total, etc).""" 178 | for network in self.network: 179 | if network["device"] == network_id: 180 | return network 181 | return None 182 | 183 | def network_up(self, human_readable: bool = False) -> int | str | None: 184 | """Total upload speed being used.""" 185 | if (network := self._get_network("total")) is not None: 186 | return_data = network["tx"] 187 | if human_readable: 188 | return SynoFormatHelper.bytes_to_readable(return_data) 189 | return return_data 190 | return None 191 | 192 | def network_down(self, human_readable: bool = False) -> int | str | None: 193 | """Total download speed being used.""" 194 | if (network := self._get_network("total")) is not None: 195 | return_data = network["rx"] 196 | if human_readable: 197 | return SynoFormatHelper.bytes_to_readable(return_data) 198 | return return_data 199 | return None 200 | -------------------------------------------------------------------------------- /src/synology_dsm/api/download_station/__init__.py: -------------------------------------------------------------------------------- 1 | """Synology DownloadStation API wrapper.""" 2 | 3 | from __future__ import annotations 4 | 5 | from synology_dsm.api import SynoBaseApi 6 | 7 | from .task import SynoDownloadTask 8 | 9 | 10 | class SynoDownloadStation(SynoBaseApi["dict[str, SynoDownloadTask]"]): 11 | """An implementation of a Synology DownloadStation.""" 12 | 13 | API_KEY = "SYNO.DownloadStation.*" 14 | INFO_API_KEY = "SYNO.DownloadStation.Info" 15 | STAT_API_KEY = "SYNO.DownloadStation.Statistic" 16 | TASK_API_KEY = "SYNO.DownloadStation.Task" 17 | REQUEST_DATA = { 18 | "additional": "detail,file" 19 | } # Can contain: detail, transfer, file, tracker, peer 20 | 21 | async def update(self) -> None: 22 | """Update tasks from API.""" 23 | self._data = {} 24 | raw_data = await self._dsm.get(self.TASK_API_KEY, "List", self.REQUEST_DATA) 25 | if not isinstance(raw_data, dict) or (data := raw_data.get("data")) is None: 26 | return 27 | 28 | for task_data in data["tasks"]: 29 | if task_data["id"] in self._data: 30 | self._data[task_data["id"]].update(task_data) 31 | else: 32 | self._data[task_data["id"]] = SynoDownloadTask(task_data) 33 | 34 | # Global 35 | async def get_info(self) -> dict | None: 36 | """Return general informations about the Download Station instance.""" 37 | raw_data = await self._dsm.get(self.INFO_API_KEY, "GetInfo") 38 | if isinstance(raw_data, dict): 39 | return raw_data 40 | return None 41 | 42 | async def get_config(self) -> dict | None: 43 | """Return configuration about the Download Station instance.""" 44 | raw_data = await self._dsm.get(self.INFO_API_KEY, "GetConfig") 45 | if isinstance(raw_data, dict): 46 | return raw_data 47 | return None 48 | 49 | async def get_stat(self) -> dict | None: 50 | """Return statistic about the Download Station instance.""" 51 | raw_data = await self._dsm.get(self.STAT_API_KEY, "GetInfo") 52 | if isinstance(raw_data, dict): 53 | return raw_data 54 | return None 55 | 56 | # Downloads 57 | def get_all_tasks(self) -> list[SynoDownloadTask]: 58 | """Return a list of tasks.""" 59 | return list(self._data.values()) 60 | 61 | def get_task(self, task_id: str) -> SynoDownloadTask | None: 62 | """Return task matching task_id.""" 63 | return self._data.get(task_id) 64 | 65 | async def create( 66 | self, 67 | uri: str | list[str], 68 | unzip_password: str | None = None, 69 | destination: str | None = None, 70 | ) -> dict | None: 71 | """Create a new task (uri accepts HTTP/FTP/magnet/ED2K links).""" 72 | res = await self._dsm.post( 73 | self.TASK_API_KEY, 74 | "Create", 75 | { 76 | "uri": ",".join(uri) if isinstance(uri, list) else uri, 77 | "unzip_password": unzip_password, 78 | "destination": destination, 79 | }, 80 | ) 81 | await self.update() 82 | if isinstance(res, dict): 83 | return res 84 | return None 85 | 86 | async def pause(self, task_id: str | list[str]) -> dict | None: 87 | """Pause a download task.""" 88 | res = await self._dsm.get( 89 | self.TASK_API_KEY, 90 | "Pause", 91 | {"id": ",".join(task_id) if isinstance(task_id, list) else task_id}, 92 | ) 93 | await self.update() 94 | if isinstance(res, dict): 95 | return res 96 | return None 97 | 98 | async def resume(self, task_id: str | list[str]) -> dict | None: 99 | """Resume a paused download task.""" 100 | res = await self._dsm.get( 101 | self.TASK_API_KEY, 102 | "Resume", 103 | {"id": ",".join(task_id) if isinstance(task_id, list) else task_id}, 104 | ) 105 | await self.update() 106 | if isinstance(res, dict): 107 | return res 108 | return None 109 | 110 | async def delete( 111 | self, task_id: str | list[str], force_complete: bool = False 112 | ) -> dict | None: 113 | """Delete a download task.""" 114 | res = await self._dsm.get( 115 | self.TASK_API_KEY, 116 | "Delete", 117 | { 118 | "id": ",".join(task_id) if isinstance(task_id, list) else task_id, 119 | "force_complete": force_complete, 120 | }, 121 | ) 122 | await self.update() 123 | if isinstance(res, dict): 124 | return res 125 | return None 126 | -------------------------------------------------------------------------------- /src/synology_dsm/api/download_station/task.py: -------------------------------------------------------------------------------- 1 | """DownloadStation task.""" 2 | 3 | from __future__ import annotations 4 | 5 | from typing import TypedDict 6 | 7 | SynoDownloadTaskType = TypedDict( 8 | "SynoDownloadTaskType", 9 | { 10 | "additional": dict, 11 | "id": str, 12 | "size": int, 13 | "status": str, 14 | "title": str, 15 | "type": str, 16 | "username": str, 17 | "status_extra": dict, 18 | }, 19 | total=False, 20 | ) 21 | 22 | 23 | class SynoDownloadTask: 24 | """An representation of a Synology DownloadStation task.""" 25 | 26 | def __init__(self, data: SynoDownloadTaskType): 27 | """Initialize a Download Station task.""" 28 | self._data: SynoDownloadTaskType = data 29 | 30 | def update(self, data: SynoDownloadTaskType) -> None: 31 | """Update the task.""" 32 | self._data = data 33 | 34 | @property 35 | def id(self) -> str: 36 | """Return id of the task.""" 37 | return self._data["id"] 38 | 39 | @property 40 | def title(self) -> str: 41 | """Return title of the task.""" 42 | return self._data["title"] 43 | 44 | @property 45 | def type(self) -> str: 46 | """Return type of the task (bt, nzb, http(s), ftp, emule).""" 47 | return self._data["type"] 48 | 49 | @property 50 | def username(self) -> str: 51 | """Return username of the task.""" 52 | return self._data["username"] 53 | 54 | @property 55 | def size(self) -> int: 56 | """Return size of the task.""" 57 | return self._data["size"] 58 | 59 | @property 60 | def status(self) -> str: 61 | """Return status of the task. 62 | 63 | Possible values: waiting, downloading, paused, finishing, finished, 64 | hash_checking, seeding, filehosting_waiting, extracting, error 65 | """ 66 | return self._data["status"] 67 | 68 | @property 69 | def status_extra(self) -> dict | None: 70 | """Return status_extra of the task.""" 71 | return self._data.get("status_extra") 72 | 73 | @property 74 | def additional(self) -> dict: 75 | """Return additional data of the task.""" 76 | return self._data["additional"] 77 | -------------------------------------------------------------------------------- /src/synology_dsm/api/dsm/__init__.py: -------------------------------------------------------------------------------- 1 | """Synology DSM API models.""" 2 | -------------------------------------------------------------------------------- /src/synology_dsm/api/dsm/information.py: -------------------------------------------------------------------------------- 1 | """DSM Information data.""" 2 | 3 | from __future__ import annotations 4 | 5 | import re 6 | from typing import TypedDict 7 | 8 | from awesomeversion import AwesomeVersion 9 | 10 | from synology_dsm.api import SynoBaseApi 11 | from synology_dsm.exceptions import SynologyDSMException 12 | 13 | 14 | class DsmInformationDataType(TypedDict, total=False): 15 | """Data type.""" 16 | 17 | model: str 18 | ram: int 19 | serial: str 20 | temperature: int 21 | temperature_warn: bool 22 | uptime: int 23 | version: str 24 | version_string: str 25 | 26 | 27 | class SynoDSMInformation(SynoBaseApi[DsmInformationDataType]): 28 | """Class containing Information data.""" 29 | 30 | API_KEY = "SYNO.DSM.Info" 31 | 32 | async def update(self) -> None: 33 | """Updates information data.""" 34 | raw_data = await self._dsm.get(self.API_KEY, "getinfo") 35 | if isinstance(raw_data, dict) and (data := raw_data.get("data")) is not None: 36 | self._data = data 37 | 38 | @property 39 | def model(self) -> str: 40 | """Model of the NAS.""" 41 | return self._data["model"] 42 | 43 | @property 44 | def ram(self) -> int: 45 | """RAM of the NAS (in MB).""" 46 | return self._data["ram"] 47 | 48 | @property 49 | def serial(self) -> str: 50 | """Serial of the NAS.""" 51 | return self._data["serial"] 52 | 53 | @property 54 | def temperature(self) -> int | None: 55 | """Temperature of the NAS.""" 56 | return self._data.get("temperature") 57 | 58 | @property 59 | def temperature_warn(self) -> bool: 60 | """Temperature warning of the NAS.""" 61 | # some very old nas may not provide this attribute 62 | return self._data.get("temperature_warn", False) 63 | 64 | @property 65 | def uptime(self) -> int: 66 | """Uptime of the NAS.""" 67 | return self._data["uptime"] 68 | 69 | @property 70 | def version(self) -> str: 71 | """Version of the NAS (build version).""" 72 | return self._data["version"] 73 | 74 | @property 75 | def version_string(self) -> str: 76 | """Version of the NAS.""" 77 | return self._data["version_string"] 78 | 79 | @property 80 | def awesome_version(self) -> AwesomeVersion: 81 | """Awesome version representation.""" 82 | pattern = ( 83 | r"DSM (?P\d+)\.(?P\d+)" 84 | r"(\.(?P\d+))?-(?P\d+)" 85 | r"( Update (?P\d+))?" 86 | ) 87 | match = re.match(pattern, self.version_string) 88 | if not match: 89 | raise SynologyDSMException( 90 | api=self.API_KEY, 91 | code=0, 92 | details=f"Could not parse version string {self.version_string}", 93 | ) 94 | parts = match.groupdict() 95 | version = f"{parts['major']}.{parts['minor']}.{parts['micro'] or '0'}" 96 | if (smallfixnumber := parts.get("smallfixnumber")) is not None: 97 | version += f".{smallfixnumber}" 98 | 99 | return AwesomeVersion(version) 100 | -------------------------------------------------------------------------------- /src/synology_dsm/api/dsm/network.py: -------------------------------------------------------------------------------- 1 | """DSM Network data.""" 2 | 3 | from __future__ import annotations 4 | 5 | from typing import TypedDict 6 | 7 | from synology_dsm.api import SynoBaseApi 8 | 9 | InterfaceIp = TypedDict("InterfaceIp", {"address": str, "netmask": str}) 10 | InterfaceIpv6 = TypedDict( 11 | "InterfaceIpv6", {"address": str, "prefix_length": int, "scope": str} 12 | ) 13 | 14 | NetworkInterface = TypedDict( 15 | "NetworkInterface", 16 | { 17 | "id": str, 18 | "ip": "list[InterfaceIp]", 19 | "ipv6": "list[InterfaceIpv6]", 20 | "mac": str, 21 | "type": str, 22 | }, 23 | total=False, 24 | ) 25 | 26 | 27 | class DsmNetworkDataType(TypedDict, total=False): 28 | """Data type.""" 29 | 30 | dns: list[str] 31 | gateway: str 32 | hostname: str 33 | interfaces: list[NetworkInterface] 34 | workgroup: str 35 | 36 | 37 | class SynoDSMNetwork(SynoBaseApi[DsmNetworkDataType]): 38 | """Class containing Network data.""" 39 | 40 | API_KEY = "SYNO.DSM.Network" 41 | 42 | async def update(self) -> None: 43 | """Updates network data.""" 44 | raw_data = await self._dsm.get(self.API_KEY, "list") 45 | if isinstance(raw_data, dict) and (data := raw_data.get("data")) is not None: 46 | self._data = data 47 | 48 | @property 49 | def dns(self) -> list[str]: 50 | """DNS of the NAS.""" 51 | return self._data["dns"] 52 | 53 | @property 54 | def gateway(self) -> str: 55 | """Gateway of the NAS.""" 56 | return self._data["gateway"] 57 | 58 | @property 59 | def hostname(self) -> str: 60 | """Host name of the NAS.""" 61 | return self._data["hostname"] 62 | 63 | @property 64 | def interfaces(self) -> list[NetworkInterface]: 65 | """Interfaces of the NAS.""" 66 | return self._data["interfaces"] 67 | 68 | def interface(self, eth_id: str) -> NetworkInterface | None: 69 | """Interface of the NAS.""" 70 | for interface in self.interfaces: 71 | if interface["id"] == eth_id: 72 | return interface 73 | return None 74 | 75 | @property 76 | def macs(self) -> list[str]: 77 | """List of MACs of the NAS.""" 78 | macs: list[str] = [] 79 | for interface in self.interfaces: 80 | if (mac := interface.get("mac")) is not None: 81 | macs.append(mac) 82 | return macs 83 | 84 | @property 85 | def workgroup(self) -> str: 86 | """Workgroup of the NAS.""" 87 | return self._data["workgroup"] 88 | -------------------------------------------------------------------------------- /src/synology_dsm/api/file_station/__init__.py: -------------------------------------------------------------------------------- 1 | """Synology FileStation API wrapper.""" 2 | 3 | from __future__ import annotations 4 | 5 | from collections.abc import AsyncIterator 6 | from io import BufferedReader 7 | 8 | import aiofiles 9 | from aiohttp import StreamReader 10 | 11 | from synology_dsm.api import SynoBaseApi 12 | 13 | from .models import ( 14 | SynoFileAdditionalOwner, 15 | SynoFileFile, 16 | SynoFileFileAdditional, 17 | SynoFileFileAdditionalPermission, 18 | SynoFileFileAdditionalTime, 19 | SynoFileSharedFolder, 20 | SynoFileSharedFolderAdditional, 21 | SynoFileSharedFolderAdditionalPermission, 22 | SynoFileSharedFolderAdditionalVolumeStatus, 23 | ) 24 | 25 | 26 | class SynoFileStation(SynoBaseApi): 27 | """An implementation of a Synology FileStation.""" 28 | 29 | API_KEY = "SYNO.FileStation.*" 30 | LIST_API_KEY = "SYNO.FileStation.List" 31 | DOWNLOAD_API_KEY = "SYNO.FileStation.Download" 32 | UPLOAD_API_KEY = "SYNO.FileStation.Upload" 33 | DELETE_API_KEY = "SYNO.FileStation.Delete" 34 | 35 | async def get_shared_folders( 36 | self, offset: int = 0, limit: int = 100, only_writable: bool = False 37 | ) -> list[SynoFileSharedFolder] | None: 38 | """Get a list of all shared folders.""" 39 | raw_data = await self._dsm.get( 40 | self.LIST_API_KEY, 41 | "list_share", 42 | { 43 | "offset": offset, 44 | "limit": limit, 45 | "onlywritable": only_writable, 46 | "additional": ( 47 | '["real_path","owner","time","perm",' 48 | '"mount_point_type","sync_share","volume_status"]' 49 | ), 50 | }, 51 | ) 52 | if not isinstance(raw_data, dict) or (data := raw_data.get("data")) is None: 53 | return None 54 | 55 | shared_folders: list[SynoFileSharedFolder] = [] 56 | for folder in data["shares"]: 57 | if (additional := folder.get("additional")) is not None: 58 | shared_folders.append( 59 | SynoFileSharedFolder( 60 | SynoFileSharedFolderAdditional( 61 | additional["mount_point_type"], 62 | SynoFileAdditionalOwner(**additional["owner"]), 63 | SynoFileSharedFolderAdditionalPermission( 64 | **additional["perm"] 65 | ), 66 | SynoFileSharedFolderAdditionalVolumeStatus( 67 | **additional["volume_status"], 68 | ), 69 | ), 70 | folder["isdir"], 71 | folder["name"], 72 | folder["path"], 73 | ) 74 | ) 75 | else: 76 | shared_folders.append( 77 | SynoFileSharedFolder( 78 | None, folder["isdir"], folder["name"], folder["path"] 79 | ) 80 | ) 81 | 82 | return shared_folders 83 | 84 | async def get_files( 85 | self, path: str, offset: int = 0, limit: int = 100 86 | ) -> list[SynoFileFile] | None: 87 | """Get a list of all files in a folder.""" 88 | raw_data = await self._dsm.get( 89 | self.LIST_API_KEY, 90 | "list", 91 | { 92 | "offset": offset, 93 | "limit": limit, 94 | "folder_path": path, 95 | "additional": ( 96 | '["real_path","owner","time","perm",' 97 | '"mount_point_type","type","size"]' 98 | ), 99 | }, 100 | ) 101 | if not isinstance(raw_data, dict) or (data := raw_data.get("data")) is None: 102 | return None 103 | 104 | files: list[SynoFileFile] = [] 105 | for file in data["files"]: 106 | if (additional := file.get("additional")) is not None: 107 | files.append( 108 | SynoFileFile( 109 | SynoFileFileAdditional( 110 | additional["mount_point_type"], 111 | SynoFileAdditionalOwner(**additional["owner"]), 112 | SynoFileFileAdditionalPermission(**additional["perm"]), 113 | additional["real_path"], 114 | additional["size"], 115 | SynoFileFileAdditionalTime(**additional["time"]), 116 | additional["type"], 117 | ), 118 | file["isdir"], 119 | file["name"], 120 | file["path"], 121 | ) 122 | ) 123 | else: 124 | files.append( 125 | SynoFileFile(None, file["isdir"], file["name"], file["path"]) 126 | ) 127 | 128 | return files 129 | 130 | async def upload_file( 131 | self, 132 | path: str, 133 | filename: str, 134 | source: bytes | BufferedReader | AsyncIterator[bytes] | str, 135 | create_parents: bool = False, 136 | ) -> bool | None: 137 | """Upload a file to a folder from eather a local source_file or content.""" 138 | if isinstance(source, str): 139 | source = open(source, "rb") 140 | 141 | raw_data = await self._dsm.post( 142 | self.UPLOAD_API_KEY, 143 | "upload", 144 | path=path, 145 | filename=filename, 146 | content=source, 147 | create_parents=create_parents, 148 | ) 149 | if not isinstance(raw_data, dict): 150 | return None 151 | return raw_data.get("success") 152 | 153 | async def download_file( 154 | self, path: str, filename: str, target_file: str | None = None 155 | ) -> StreamReader | bool | None: 156 | """Download a file to local target_file or returns an async StreamReader.""" 157 | response_content = await self._dsm.get( 158 | self.DOWNLOAD_API_KEY, 159 | "download", 160 | {"path": f"{path}/{filename}", "mode": "download"}, 161 | raw_response_content=True, 162 | ) 163 | if not isinstance(response_content, StreamReader): 164 | return None 165 | 166 | if target_file: 167 | async with aiofiles.open(target_file, "wb") as fh: 168 | async for data, _ in response_content.iter_chunks(): 169 | await fh.write(data) 170 | return True 171 | 172 | return response_content 173 | 174 | async def delete_file(self, path: str, filename: str) -> bool | None: 175 | """Delete a file.""" 176 | raw_data = await self._dsm.get( 177 | self.DELETE_API_KEY, 178 | "delete", 179 | {"path": f"{path}/{filename}", "recursive": False}, 180 | ) 181 | if not isinstance(raw_data, dict): 182 | return None 183 | return raw_data.get("success") 184 | -------------------------------------------------------------------------------- /src/synology_dsm/api/file_station/models.py: -------------------------------------------------------------------------------- 1 | """Data models for Synology FileStation Module.""" 2 | 3 | from __future__ import annotations 4 | 5 | from dataclasses import dataclass 6 | 7 | # ------------------------------------- 8 | # generic additional data 9 | # ------------------------------------- 10 | 11 | 12 | @dataclass 13 | class SynoFileAdditionalOwner: 14 | """Representation of an Synology FileStation additionl owner data.""" 15 | 16 | gid: int 17 | group: str 18 | uid: int 19 | user: str 20 | 21 | 22 | # ------------------------------------- 23 | # shared folder 24 | # ------------------------------------- 25 | 26 | 27 | @dataclass 28 | class SynoFileSharedFolderAdditionalPermission: 29 | """Representation of an Synology FileStation additionl permission data.""" 30 | 31 | acl: dict 32 | acl_enable: bool 33 | adv_right: dict 34 | is_acl_mode: bool 35 | is_share_readonly: bool 36 | posix: int 37 | share_right: str 38 | 39 | 40 | @dataclass 41 | class SynoFileSharedFolderAdditionalVolumeStatus: 42 | """Representation of an Synology FileStation additionl permission data.""" 43 | 44 | freespace: int 45 | totalspace: int 46 | readonly: bool 47 | 48 | 49 | @dataclass 50 | class SynoFileSharedFolderAdditional: 51 | """Representation of an Synology FileStation Shared Folder additionl data.""" 52 | 53 | mount_point_type: str 54 | owner: SynoFileAdditionalOwner 55 | perm: SynoFileSharedFolderAdditionalPermission 56 | volume_status: SynoFileSharedFolderAdditionalVolumeStatus 57 | 58 | 59 | @dataclass 60 | class SynoFileSharedFolder: 61 | """Representation of an Synology FileStation Shared Folder.""" 62 | 63 | additional: SynoFileSharedFolderAdditional | None 64 | is_dir: bool 65 | name: str 66 | path: str 67 | 68 | 69 | # ------------------------------------- 70 | # file 71 | # ------------------------------------- 72 | 73 | 74 | @dataclass 75 | class SynoFileFileAdditionalPermission: 76 | """Representation of an Synology FileStation additionl permission data.""" 77 | 78 | acl: dict 79 | is_acl_mode: bool 80 | posix: int 81 | 82 | 83 | @dataclass 84 | class SynoFileFileAdditionalTime: 85 | """Representation of an Synology FileStation additionl permission data.""" 86 | 87 | atime: int 88 | ctime: int 89 | crtime: int 90 | mtime: int 91 | 92 | 93 | @dataclass 94 | class SynoFileFileAdditional: 95 | """Representation of an Synology FileStation File additionl data.""" 96 | 97 | mount_point_type: str 98 | owner: SynoFileAdditionalOwner 99 | perm: SynoFileFileAdditionalPermission 100 | real_path: str 101 | size: int 102 | time: SynoFileFileAdditionalTime 103 | type: str 104 | 105 | 106 | @dataclass 107 | class SynoFileFile: 108 | """Representation of an Synology FileStation File.""" 109 | 110 | additional: SynoFileFileAdditional | None 111 | is_dir: bool 112 | name: str 113 | path: str 114 | -------------------------------------------------------------------------------- /src/synology_dsm/api/photos/__init__.py: -------------------------------------------------------------------------------- 1 | """Synology Photos API wrapper.""" 2 | 3 | from __future__ import annotations 4 | 5 | from synology_dsm.api import SynoBaseApi 6 | 7 | from .model import SynoPhotosAlbum, SynoPhotosItem 8 | 9 | 10 | class SynoPhotos(SynoBaseApi): 11 | """An implementation of a Synology Photos.""" 12 | 13 | API_KEY = "SYNO.Foto.*" 14 | BROWSE_ALBUMS_API_KEY = "SYNO.Foto.Browse.Album" 15 | BROWSE_ITEM_API_KEY = "SYNO.Foto.Browse.Item" 16 | DOWNLOAD_API_KEY = "SYNO.Foto.Download" 17 | DOWNLOAD_FOTOTEAM_API_KEY = "SYNO.FotoTeam.Download" 18 | SEARCH_API_KEY = "SYNO.Foto.Search.Search" 19 | THUMBNAIL_API_KEY = "SYNO.Foto.Thumbnail" 20 | THUMBNAIL_FOTOTEAM_API_KEY = "SYNO.FotoTeam.Thumbnail" 21 | BROWSE_ITEM_FOTOTEAM_API_KEY = "SYNO.FotoTeam.Browse.Item" 22 | 23 | async def get_albums( 24 | self, offset: int = 0, limit: int = 100 25 | ) -> list[SynoPhotosAlbum] | None: 26 | """Get a list of all albums.""" 27 | albums: list[SynoPhotosAlbum] = [] 28 | raw_data = await self._dsm.get( 29 | self.BROWSE_ALBUMS_API_KEY, 30 | "list", 31 | {"offset": offset, "limit": limit, "category": "normal_share_with_me"}, 32 | ) 33 | if not isinstance(raw_data, dict) or (data := raw_data.get("data")) is None: 34 | return None 35 | 36 | for album in data["list"]: 37 | albums.append( 38 | SynoPhotosAlbum( 39 | album["id"], 40 | album["name"], 41 | album["item_count"], 42 | album["passphrase"], 43 | ) 44 | ) 45 | return albums 46 | 47 | def _raw_data_to_items( # noqa: S107 48 | self, raw_data: dict, passphrase: str = "" 49 | ) -> list[SynoPhotosItem] | None: 50 | """Parse the raw data response to a list of photo items.""" 51 | items: list[SynoPhotosItem] = [] 52 | if (data := raw_data.get("data")) is None: 53 | return None 54 | 55 | for item in data["list"]: 56 | if item["additional"]["thumbnail"]["xl"] == "ready": 57 | size = "xl" 58 | elif item["additional"]["thumbnail"]["m"] == "ready": 59 | size = "m" 60 | else: 61 | size = "sm" 62 | 63 | items.append( 64 | SynoPhotosItem( 65 | item["id"], 66 | item["type"], 67 | item["filename"], 68 | item["filesize"], 69 | item["additional"]["thumbnail"]["cache_key"], 70 | size, 71 | item["owner_user_id"] == 0, 72 | passphrase, 73 | ) 74 | ) 75 | return items 76 | 77 | async def get_items_from_album( 78 | self, album: SynoPhotosAlbum, offset: int = 0, limit: int = 100 79 | ) -> list[SynoPhotosItem] | None: 80 | """Get a list of all items from given album.""" 81 | params = { 82 | "offset": offset, 83 | "limit": limit, 84 | "additional": '["thumbnail"]', 85 | } 86 | if album.passphrase: 87 | params["passphrase"] = album.passphrase 88 | else: 89 | params["album_id"] = album.album_id 90 | 91 | raw_data = await self._dsm.get( 92 | self.BROWSE_ITEM_API_KEY, 93 | "list", 94 | params, 95 | ) 96 | if not isinstance(raw_data, dict): 97 | return None 98 | return self._raw_data_to_items(raw_data, album.passphrase) 99 | 100 | async def get_items_from_shared_space( 101 | self, offset: int = 0, limit: int = 100 102 | ) -> list[SynoPhotosItem] | None: 103 | """Get a list of all items from the shared space.""" 104 | raw_data = await self._dsm.get( 105 | self.BROWSE_ITEM_FOTOTEAM_API_KEY, 106 | "list", 107 | { 108 | "offset": offset, 109 | "limit": limit, 110 | "additional": '["thumbnail"]', 111 | }, 112 | ) 113 | if not isinstance(raw_data, dict): 114 | return None 115 | return self._raw_data_to_items(raw_data) 116 | 117 | async def get_items_from_search( 118 | self, search_string: str, offset: int = 0, limit: int = 100 119 | ) -> list[SynoPhotosItem] | None: 120 | """Get a list of all items matching the keyword.""" 121 | raw_data = await self._dsm.get( 122 | self.SEARCH_API_KEY, 123 | "list_item", 124 | { 125 | "keyword": search_string, 126 | "offset": offset, 127 | "limit": limit, 128 | "additional": '["thumbnail"]', 129 | }, 130 | ) 131 | if not isinstance(raw_data, dict): 132 | return None 133 | return self._raw_data_to_items(raw_data) 134 | 135 | async def download_item(self, item: SynoPhotosItem) -> bytes | None: 136 | """Download the given item.""" 137 | download_api = self.DOWNLOAD_API_KEY 138 | if item.is_shared: 139 | download_api = self.DOWNLOAD_FOTOTEAM_API_KEY 140 | 141 | params = { 142 | "unit_id": f"[{item.item_id}]", 143 | "cache_key": item.thumbnail_cache_key, 144 | } 145 | 146 | if item.passphrase: 147 | params["passphrase"] = item.passphrase 148 | 149 | raw_data = await self._dsm.get( 150 | download_api, 151 | "download", 152 | params, 153 | ) 154 | if isinstance(raw_data, bytes): 155 | return raw_data 156 | return None 157 | 158 | async def download_item_thumbnail(self, item: SynoPhotosItem) -> bytes | None: 159 | """Download the given items thumbnail.""" 160 | download_api = self.THUMBNAIL_API_KEY 161 | if item.is_shared: 162 | download_api = self.THUMBNAIL_FOTOTEAM_API_KEY 163 | 164 | params = { 165 | "id": item.item_id, 166 | "cache_key": item.thumbnail_cache_key, 167 | "size": item.thumbnail_size, 168 | "type": "unit", 169 | } 170 | 171 | if item.passphrase: 172 | params["passphrase"] = item.passphrase 173 | 174 | raw_data = await self._dsm.get( 175 | download_api, 176 | "get", 177 | params, 178 | ) 179 | if isinstance(raw_data, bytes): 180 | return raw_data 181 | return None 182 | 183 | async def get_item_thumbnail_url(self, item: SynoPhotosItem) -> str: 184 | """Get the url of given items thumbnail.""" 185 | download_api = self.THUMBNAIL_API_KEY 186 | if item.is_shared: 187 | download_api = self.THUMBNAIL_FOTOTEAM_API_KEY 188 | 189 | params = { 190 | "id": item.item_id, 191 | "cache_key": item.thumbnail_cache_key, 192 | "size": item.thumbnail_size, 193 | "type": "unit", 194 | } 195 | 196 | if item.passphrase: 197 | params["passphrase"] = item.passphrase 198 | 199 | return await self._dsm.generate_url( 200 | download_api, 201 | "get", 202 | params, 203 | ) 204 | -------------------------------------------------------------------------------- /src/synology_dsm/api/photos/model.py: -------------------------------------------------------------------------------- 1 | """Data models for Synology Photos Module.""" 2 | 3 | from __future__ import annotations 4 | 5 | from dataclasses import dataclass 6 | 7 | 8 | @dataclass 9 | class SynoPhotosAlbum: 10 | """Representation of an Synology Photos Album.""" 11 | 12 | album_id: int 13 | name: str 14 | item_count: int 15 | passphrase: str 16 | 17 | 18 | @dataclass 19 | class SynoPhotosItem: 20 | """Representation of an Synology Photos Item.""" 21 | 22 | item_id: int 23 | item_type: str 24 | file_name: str 25 | file_size: str 26 | thumbnail_cache_key: str 27 | thumbnail_size: str 28 | is_shared: bool 29 | passphrase: str 30 | -------------------------------------------------------------------------------- /src/synology_dsm/api/storage/__init__.py: -------------------------------------------------------------------------------- 1 | """Synology Storage API models.""" 2 | -------------------------------------------------------------------------------- /src/synology_dsm/api/storage/storage.py: -------------------------------------------------------------------------------- 1 | """DSM Storage data.""" 2 | 3 | from __future__ import annotations 4 | 5 | from typing import TypedDict, cast 6 | 7 | from synology_dsm.api import SynoBaseApi 8 | from synology_dsm.helpers import SynoFormatHelper 9 | 10 | 11 | class SynoStorageDisk(TypedDict, total=False): 12 | """Synology Storage Disk.""" 13 | 14 | id: str 15 | name: str 16 | device: str 17 | firm: str 18 | diskType: str # noqa: N815 19 | smart_status: str 20 | status: str 21 | exceed_bad_sector_thr: bool 22 | below_remain_life_thr: bool 23 | temp: int 24 | model: str 25 | vendor: str 26 | size_total: int 27 | 28 | 29 | SynoStoragePoolChild = TypedDict( 30 | "SynoStoragePoolChild", {"id": str, "size": dict}, total=False 31 | ) 32 | 33 | 34 | class SynoStoragePool(TypedDict, total=False): 35 | """Synology Storage Pool.""" 36 | 37 | disks: list[str] 38 | pool_child: list[SynoStoragePoolChild] 39 | 40 | 41 | SynoStorageVolumeSize = TypedDict( 42 | "SynoStorageVolumeSize", 43 | { 44 | "free_inode": str, 45 | "total": str, 46 | "total_device": str, 47 | "total_inode": str, 48 | "used": str, 49 | }, 50 | ) 51 | 52 | 53 | class SynoStorageVolume(TypedDict, total=False): 54 | """Synology Storage Volume.""" 55 | 56 | id: str 57 | device_type: str 58 | size: SynoStorageVolumeSize 59 | status: str 60 | fs_type: str 61 | 62 | 63 | class StorageDataType(TypedDict, total=False): 64 | """Synology Storage Data type.""" 65 | 66 | disks: list[SynoStorageDisk] 67 | env: dict 68 | storagePools: list[SynoStoragePool] # noqa: N815 69 | volumes: list[SynoStorageVolume] 70 | 71 | 72 | class SynoStorage(SynoBaseApi[StorageDataType]): 73 | """Class containing Storage data.""" 74 | 75 | API_KEY = "SYNO.Storage.CGI.Storage" 76 | 77 | async def update(self) -> None: 78 | """Updates storage data.""" 79 | raw_data = await self._dsm.get(self.API_KEY, "load_info") 80 | if isinstance(raw_data, dict): 81 | self._data = cast(StorageDataType, raw_data) 82 | if (data := raw_data.get("data")) is not None: 83 | self._data = data 84 | 85 | # Root 86 | @property 87 | def disks(self) -> list[SynoStorageDisk]: 88 | """Gets all (internal) disks.""" 89 | return self._data.get("disks", []) 90 | 91 | @property 92 | def env(self) -> dict | None: 93 | """Gets storage env.""" 94 | return self._data.get("env") 95 | 96 | @property 97 | def storage_pools(self) -> list[SynoStoragePool]: 98 | """Gets all storage pools.""" 99 | return self._data.get("storagePools", []) 100 | 101 | @property 102 | def volumes(self) -> list[SynoStorageVolume]: 103 | """Gets all volumes.""" 104 | return self._data.get("volumes", []) 105 | 106 | # Volume 107 | @property 108 | def volumes_ids(self) -> list[str]: 109 | """Returns volumes ids.""" 110 | volumes: list[str] = [] 111 | for volume in self.volumes: 112 | volumes.append(volume["id"]) 113 | return volumes 114 | 115 | def get_volume(self, volume_id: str) -> SynoStorageVolume | None: 116 | """Returns a specific volume.""" 117 | for volume in self.volumes: 118 | if volume["id"] == volume_id: 119 | return volume 120 | return None 121 | 122 | def volume_status(self, volume_id: str) -> str | None: 123 | """Status of the volume (normal, degraded, etc).""" 124 | if volume := self.get_volume(volume_id): 125 | return volume.get("status") 126 | return None 127 | 128 | def volume_device_type(self, volume_id: str) -> str | None: 129 | """Returns the volume type (RAID1, RAID2, etc).""" 130 | if volume := self.get_volume(volume_id): 131 | return volume.get("device_type") 132 | return None 133 | 134 | def volume_size_total( 135 | self, volume_id: str, human_readable: bool = False 136 | ) -> int | str | None: 137 | """Total size of volume.""" 138 | if (volume := self.get_volume(volume_id)) is None or ( 139 | size := volume.get("size") 140 | ) is None: 141 | return None 142 | return_data = int(size["total"]) 143 | if human_readable: 144 | return SynoFormatHelper.bytes_to_readable(return_data) 145 | return return_data 146 | 147 | def volume_size_used( 148 | self, volume_id: str, human_readable: bool = False 149 | ) -> int | str | None: 150 | """Total used size in volume.""" 151 | if (volume := self.get_volume(volume_id)) is None or ( 152 | size := volume.get("size") 153 | ) is None: 154 | return None 155 | return_data = int(size["used"]) 156 | if human_readable: 157 | return SynoFormatHelper.bytes_to_readable(return_data) 158 | return return_data 159 | 160 | def volume_percentage_used(self, volume_id: str) -> float | None: 161 | """Total used size in percentage for volume.""" 162 | if (volume := self.get_volume(volume_id)) is None or ( 163 | size := volume.get("size") 164 | ) is None: 165 | return None 166 | total = int(size["total"]) 167 | used = int(size["used"]) 168 | return round((float(used) / float(total)) * 100.0, 1) 169 | 170 | def volume_disk_temp_avg(self, volume_id: str) -> float | None: 171 | """Average temperature of all disks making up the volume.""" 172 | total_temp = 0 173 | total_disks = 0 174 | disks = self._get_disks_for_volume(volume_id) 175 | for disk in disks: 176 | if disk_temp := self.disk_temp(disk["id"]): 177 | total_disks += 1 178 | total_temp += disk_temp 179 | 180 | if total_temp > 0 and total_disks > 0: 181 | return round(total_temp / total_disks, 0) 182 | return None 183 | 184 | def volume_disk_temp_max(self, volume_id: str) -> int | None: 185 | """Maximum temperature of all disks making up the volume.""" 186 | disks = self._get_disks_for_volume(volume_id) 187 | if not disks: 188 | return None 189 | 190 | disk_temps: list[int] = [0] 191 | for disk in disks: 192 | if disk_temp := self.disk_temp(disk["id"]): 193 | disk_temps.append(disk_temp) 194 | return max(disk_temps) 195 | 196 | # Disk 197 | @property 198 | def disks_ids(self) -> list[str]: 199 | """Returns (internal) disks ids.""" 200 | disks: list[str] = [] 201 | for disk in self.disks: 202 | disks.append(disk["id"]) 203 | return disks 204 | 205 | def get_disk(self, disk_id: str) -> SynoStorageDisk | None: 206 | """Returns a specific disk.""" 207 | for disk in self.disks: 208 | if disk["id"] == disk_id: 209 | return disk 210 | return None 211 | 212 | def _get_disks_for_volume(self, volume_id: str) -> list[SynoStorageDisk]: 213 | """Returns a list of disk for a specific volume.""" 214 | disks: list[SynoStorageDisk] = [] 215 | for pool in self.storage_pools: 216 | if pool.get("deploy_path") == volume_id: 217 | # RAID disk redundancy 218 | for disk_id in pool["disks"]: 219 | if disk := self.get_disk(disk_id): 220 | disks.append(disk) 221 | 222 | if pool.get("pool_child"): 223 | # SHR disk redundancy 224 | for pool_child in pool["pool_child"]: 225 | if pool_child["id"] != volume_id: 226 | continue 227 | for disk_id in pool["disks"]: 228 | if disk := self.get_disk(disk_id): 229 | disks.append(disk) 230 | 231 | return disks 232 | 233 | def disk_name(self, disk_id: str) -> str | None: 234 | """The name of this disk.""" 235 | if disk := self.get_disk(disk_id): 236 | return disk.get("name") 237 | return None 238 | 239 | def disk_device(self, disk_id: str) -> str | None: 240 | """The mount point of this disk.""" 241 | if disk := self.get_disk(disk_id): 242 | return disk.get("device") 243 | return None 244 | 245 | def disk_smart_status(self, disk_id: str) -> str | None: 246 | """Status of disk according to S.M.A.R.T).""" 247 | if disk := self.get_disk(disk_id): 248 | return disk.get("smart_status") 249 | return None 250 | 251 | def disk_status(self, disk_id: str) -> str | None: 252 | """Status of disk.""" 253 | if disk := self.get_disk(disk_id): 254 | return disk.get("status") 255 | return None 256 | 257 | def disk_exceed_bad_sector_thr(self, disk_id: str) -> bool | None: 258 | """Checks if disk has exceeded maximum bad sector threshold.""" 259 | if disk := self.get_disk(disk_id): 260 | return disk.get("exceed_bad_sector_thr") 261 | return None 262 | 263 | def disk_below_remain_life_thr(self, disk_id: str) -> bool | None: 264 | """Checks if disk has fallen below minimum life threshold.""" 265 | if disk := self.get_disk(disk_id): 266 | return disk.get("below_remain_life_thr") 267 | return None 268 | 269 | def disk_temp(self, disk_id: str) -> int | None: 270 | """Returns the temperature of the disk.""" 271 | if disk := self.get_disk(disk_id): 272 | return disk.get("temp") 273 | return None 274 | -------------------------------------------------------------------------------- /src/synology_dsm/api/surveillance_station/camera.py: -------------------------------------------------------------------------------- 1 | """SurveillanceStation camera.""" 2 | 3 | from __future__ import annotations 4 | 5 | from typing import TypedDict 6 | 7 | from .const import MOTION_DETECTION_DISABLED, RECORDING_STATUS 8 | 9 | SynoCameraData = TypedDict( 10 | "SynoCameraData", 11 | { 12 | "enabled": bool, 13 | "fps": int, 14 | "id": int, 15 | "MDParam": dict, 16 | "model": str, 17 | "name": str, 18 | "recStatus": int, 19 | "resolution": str, 20 | }, 21 | total=False, 22 | ) 23 | 24 | SynoCameraLifeViewData = TypedDict( 25 | "SynoCameraLifeViewData", 26 | { 27 | "mjpegHttpPath": str, 28 | "multicstPath": str, 29 | "mxpegHttpPath": str, 30 | "rtspOverHttpPath": str, 31 | "rtspPath": str, 32 | }, 33 | total=False, 34 | ) 35 | 36 | 37 | class SynoCamera: 38 | """An representation of a Synology SurveillanceStation camera.""" 39 | 40 | def __init__( 41 | self, data: SynoCameraData, live_view_data: SynoCameraLifeViewData | None = None 42 | ) -> None: 43 | """Initialize a Surveillance Station camera.""" 44 | self._data: SynoCameraData = data 45 | self.live_view = SynoCameraLiveView(live_view_data) 46 | self._motion_detection_enabled: bool | None = None 47 | 48 | def update(self, data: SynoCameraData) -> None: 49 | """Update the camera.""" 50 | self._data = data 51 | 52 | def update_motion_detection(self, data: SynoCameraData) -> None: 53 | """Update the camera motion detection.""" 54 | self._motion_detection_enabled = ( 55 | MOTION_DETECTION_DISABLED != data["MDParam"]["source"] 56 | ) 57 | 58 | @property 59 | def id(self) -> int: 60 | """Return id of the camera.""" 61 | return self._data["id"] 62 | 63 | @property 64 | def name(self) -> str: 65 | """Return name of the camera.""" 66 | return self._data["name"] 67 | 68 | @property 69 | def model(self) -> str: 70 | """Return model of the camera.""" 71 | return self._data["model"] 72 | 73 | @property 74 | def resolution(self) -> str: 75 | """Return resolution of the camera.""" 76 | return self._data["resolution"] 77 | 78 | @property 79 | def fps(self) -> int: 80 | """Return FPS of the camera.""" 81 | return self._data["fps"] 82 | 83 | @property 84 | def is_enabled(self) -> bool: 85 | """Return true if camera is enabled.""" 86 | return self._data["enabled"] 87 | 88 | @property 89 | def is_motion_detection_enabled(self) -> bool | None: 90 | """Return true if motion detection is enabled.""" 91 | return self._motion_detection_enabled 92 | 93 | @property 94 | def is_recording(self) -> bool: 95 | """Return true if camera is recording.""" 96 | return self._data["recStatus"] in RECORDING_STATUS 97 | 98 | 99 | class SynoCameraLiveView: 100 | """An representation of a Synology SurveillanceStation camera live view.""" 101 | 102 | def __init__(self, data: SynoCameraLifeViewData | None): 103 | """Initialize a Surveillance Station camera live view.""" 104 | if data is not None: 105 | self._data = data 106 | else: 107 | self._data = {} 108 | 109 | def update(self, data: SynoCameraLifeViewData) -> None: 110 | """Update the camera live view.""" 111 | self._data = data 112 | 113 | @property 114 | def mjpeg_http(self) -> str | None: 115 | """Return the mjpeg stream (over http) path of the camera.""" 116 | return self._data.get("mjpegHttpPath") 117 | 118 | @property 119 | def multicast(self) -> str | None: 120 | """Return the multi-cast path of the camera.""" 121 | return self._data.get("multicstPath") 122 | 123 | @property 124 | def mxpeg_http(self) -> str | None: 125 | """Return the mxpeg stream path of the camera.""" 126 | return self._data.get("mxpegHttpPath") 127 | 128 | @property 129 | def rtsp_http(self) -> str | None: 130 | """Return the RTSP stream (over http) path of the camera.""" 131 | return self._data.get("rtspOverHttpPath") 132 | 133 | @property 134 | def rtsp(self) -> str | None: 135 | """Return the RTSP stream path of the camera.""" 136 | return self._data.get("rtspPath") 137 | -------------------------------------------------------------------------------- /src/synology_dsm/api/surveillance_station/const.py: -------------------------------------------------------------------------------- 1 | """Synology SurveillanceStation API constants.""" 2 | 3 | RECORDING_STATUS = [ 4 | 1, # Continue recording schedule 5 | 2, # Motion detect recording schedule 6 | 3, # Digital input recording schedule 7 | 4, # Digital input recording schedule 8 | 5, # Manual recording schedule 9 | ] 10 | MOTION_DETECTION_DISABLED = -1 11 | MOTION_DETECTION_BY_CAMERA = 0 12 | MOTION_DETECTION_BY_SURVEILLANCE = 1 13 | 14 | SNAPSHOT_SIZE_ICON = 1 15 | SNAPSHOT_SIZE_FULL = 2 16 | 17 | SNAPSHOT_PROFILE_HIGH_QUALITY = 0 18 | SNAPSHOT_PROFILE_BALANCED = 1 19 | SNAPSHOT_PROFILE_LOW_BANDWIDTH = 2 20 | -------------------------------------------------------------------------------- /src/synology_dsm/api/virtual_machine_manager/__init__.py: -------------------------------------------------------------------------------- 1 | """Synology Virtual Machine Manager API models.""" 2 | 3 | from __future__ import annotations 4 | 5 | from synology_dsm.api import SynoBaseApi 6 | 7 | from .guest import SynoVmmGuest 8 | 9 | 10 | class SynoVirtualMachineManager(SynoBaseApi["dict[str, SynoVmmGuest]"]): 11 | """Class containing Virtual Machine Guests.""" 12 | 13 | API_KEY = "SYNO.Virtualization.*" 14 | GUEST_API_KEY = "SYNO.Virtualization.Guest" 15 | ACTION_API_KEY = "SYNO.Virtualization.Guest.Action" 16 | 17 | async def update(self) -> None: 18 | """Updates Virtual Machine Manager data.""" 19 | raw_data = await self._dsm.get(self.GUEST_API_KEY, "list") 20 | print(raw_data) 21 | if not isinstance(raw_data, dict) or (data := raw_data.get("data")) is None: 22 | return 23 | 24 | for guest in data["guests"]: 25 | if guest["guest_id"] in self._data: 26 | self._data[guest["guest_id"]].update(guest) 27 | else: 28 | self._data[guest["guest_id"]] = SynoVmmGuest(guest) 29 | 30 | def get_all_guests(self) -> list[SynoVmmGuest]: 31 | """Return a list of all vmm guests.""" 32 | return list(self._data.values()) 33 | 34 | def get_guest(self, guest_id: str) -> SynoVmmGuest | None: 35 | """Return vmm guest by guest_id.""" 36 | return self._data.get(guest_id) 37 | 38 | async def _guest_action(self, guest_id: str, action: str) -> bool | None: 39 | raw_data = await self._dsm.post( 40 | self.ACTION_API_KEY, 41 | "pwr_ctl", 42 | { 43 | "guest_id": guest_id, 44 | "action": action, 45 | }, 46 | ) 47 | if ( 48 | isinstance(raw_data, dict) 49 | and (result := raw_data.get("success")) is not None 50 | ): 51 | return bool(result) 52 | return None 53 | 54 | async def guest_poweron(self, guest_id: str) -> bool | None: 55 | """Power on a vmm guest.""" 56 | return await self._guest_action(guest_id, "poweron") 57 | 58 | async def guest_poweroff(self, guest_id: str) -> bool | None: 59 | """Power off a vmm guest.""" 60 | return await self._guest_action(guest_id, "poweroff") 61 | 62 | async def guest_shutdown(self, guest_id: str) -> bool | None: 63 | """Graceful shutdown a vmm guest.""" 64 | return await self._guest_action(guest_id, "shutdown") 65 | 66 | async def guest_restart(self, guest_id: str) -> bool | None: 67 | """Graceful restart a vmm guest.""" 68 | return await self._guest_action(guest_id, "reboot") 69 | -------------------------------------------------------------------------------- /src/synology_dsm/api/virtual_machine_manager/guest.py: -------------------------------------------------------------------------------- 1 | """VirtualMachineManager guest.""" 2 | 3 | from __future__ import annotations 4 | 5 | from typing import TypedDict, Union 6 | 7 | SynoVmmGuestData = TypedDict( 8 | "SynoVmmGuestData", 9 | { 10 | "autorun": int, 11 | "desc": str, 12 | "guest_id": str, 13 | "name": str, 14 | "ram_used": int, 15 | "status": str, 16 | "vcpu_num": int, 17 | "vcpu_usage": Union[str, int], # empty str when offline 18 | "vram_size": int, 19 | }, 20 | total=False, 21 | ) 22 | 23 | 24 | class SynoVmmGuest: 25 | """An representation of a Synology Virtual Machine Manager guest.""" 26 | 27 | def __init__(self, data: SynoVmmGuestData) -> None: 28 | """Initialize a Virtual Machine Manager guest.""" 29 | self._data: SynoVmmGuestData = data 30 | 31 | def update(self, data: SynoVmmGuestData) -> None: 32 | """Update the vmm guest.""" 33 | self._data = data 34 | 35 | @property 36 | def autorun(self) -> bool: 37 | """Return autorun of the vmm guest.""" 38 | return bool(self._data["autorun"]) 39 | 40 | @property 41 | def description(self) -> str: 42 | """Return description of the vmm guest.""" 43 | return self._data["desc"] 44 | 45 | @property 46 | def guest_id(self) -> str: 47 | """Return guest_id of the vmm guest.""" 48 | return self._data["guest_id"] 49 | 50 | @property 51 | def name(self) -> str: 52 | """Return name of the vmm guest.""" 53 | return self._data["name"] 54 | 55 | @property 56 | def status(self) -> str: 57 | """Return status of the vmm guest.""" 58 | return self._data["status"] 59 | 60 | @property 61 | def host_cpu_usage(self) -> int: 62 | """Return host cpu usage in one thousandth of the vmm guest.""" 63 | if isinstance(self._data["vcpu_usage"], str): 64 | return 0 65 | return self._data["vcpu_usage"] 66 | 67 | @property 68 | def host_ram_usage(self) -> int: 69 | """Return host ram usage in KiByte of the vmm guest.""" 70 | return self._data["ram_used"] 71 | 72 | @property 73 | def vcpu_num(self) -> int: 74 | """Return number of vcpu of the vmm guest.""" 75 | return self._data["vcpu_num"] 76 | 77 | @property 78 | def vram_size(self) -> int: 79 | """Return size of vram in KiByte of the vmm guest.""" 80 | return self._data["vram_size"] 81 | -------------------------------------------------------------------------------- /src/synology_dsm/const.py: -------------------------------------------------------------------------------- 1 | """Library constants.""" 2 | 3 | from typing import Final 4 | 5 | # APIs 6 | API_INFO: Final = "SYNO.API.Info" 7 | API_AUTH: Final = "SYNO.API.Auth" 8 | 9 | # Parameters to be masked on debug output 10 | SENSITIV_PARAMS: Final = ["account", "passwd", "_sid", "SynoToken", "device_id"] 11 | 12 | # SYNO.* 13 | ERROR_COMMON: Final = { 14 | 100: "Unknown error", 15 | 101: "No parameter API, method, or version", 16 | 102: "API does not exist", 17 | 103: "API method does not exist", 18 | 104: "API version not supported", 19 | 105: "Insufficient user privilege", 20 | 106: "Session timeout", 21 | 107: "Session interrupted by duplicate login", 22 | 114: "Missing required parameters", 23 | 117: "Unknown internal error", 24 | 120: "Invalid parameter", 25 | 160: "Insufficient application privilege", 26 | } 27 | 28 | # SYNO.API.Auth 29 | ERROR_AUTH: Final = { 30 | 400: "Invalid credentials", 31 | 401: "Guest or disabled account", 32 | 402: "Permission denied", 33 | 403: "One time password not specified", 34 | 404: "One time password authenticate failed", 35 | 405: "App portal incorrect", 36 | 406: "One time password code enforced", 37 | 407: "Max Tries (if auto blocking is set to true)", 38 | 408: "Password Expired Can not Change", 39 | 409: "Password Expired", 40 | 410: "Password must change (when first time use or after reset password by admin)", 41 | 411: "Account Locked (when account max try exceed)", 42 | } 43 | 44 | # SYNO.DownloadStation[2].BTSearch 45 | ERROR_DOWNLOAD_SEARCH: Final = { 46 | 400: "Unknown error", 47 | 401: "Invalid parameter", 48 | 402: "Parse the user setting failed", 49 | 403: "Get category failed", 50 | 404: "Get the search result from DB failed", 51 | 405: "Get the user setting failed", 52 | } 53 | # SYNO.DownloadStation[2].Task 54 | ERROR_DOWNLOAD_TASK: Final = { 55 | 400: "File upload failed", 56 | 401: "Max number of tasks reached", 57 | 402: "Destination denied", 58 | 403: "Destination does not exist", 59 | 404: "Invalid task id", 60 | 405: "Invalid task action", 61 | 406: "No default destination", 62 | 407: "Set destination failed", 63 | 408: "File does not exist", 64 | } 65 | 66 | # SYNO.FileStation.* 67 | ERROR_FILE: Final = { 68 | 400: "Invalid parameter of file operation", 69 | 401: "Unknown error of file operation", 70 | 402: "System is too busy", 71 | 403: "Invalid user does this file operation", 72 | 404: "Invalid group does this file operation", 73 | 405: "Invalid user and group does this file operation", 74 | 406: "Can’t get user/group information from the account server Operation not permitted", 75 | 407: "Operation not permitted", 76 | 408: "No such file or directory", 77 | 409: "Non-supported file system", 78 | 410: "Failed to connect internet-based file system (ex: CIFS)", 79 | 411: "Read-only file system", 80 | 412: "Filename too long in the non-encrypted file system", 81 | 413: "Filename too long in the encrypted file system", 82 | 414: "File already exists", 83 | 415: "Disk quota exceeded", 84 | 416: "No space left on device", 85 | 417: "Input/output error", 86 | 418: "Illegal name or path", 87 | 419: "Illegal file name", 88 | 420: "Illegal file name on FAT file system", 89 | 421: "Device or resource busy", 90 | 599: "No such task of the file operation", 91 | 900: "Failed to delete file(s)/folder(s). More information in object", 92 | 1000: "Failed to copy files/folders. More information in object", 93 | 1001: "Failed to move files/folders. More information in object", 94 | 1002: "An error occurred at the destination. More information in object", 95 | 1003: "Cannot overwrite or skip the existing file because no overwrite parameter is given", 96 | 1004: "File cannot overwrite a folder with the same name, or folder cannot overwrite a file with the same name", # pylint: disable=line-too-long 97 | 1006: "Cannot copy/move file/folder with special characters to a FAT32 file system", 98 | 1007: "Cannot copy/move a file bigger than 4G to a FAT32 file system", 99 | 1100: "Failed to create a folder. More information in object", 100 | 1101: "The number of folders to the parent folder would exceed the system limitation", 101 | 1300: "Failed to compress files/folders", 102 | 1301: "Cannot create the archive because the given archive name is too long", 103 | 1400: "Failed to extract files", 104 | 1401: "Cannot open the file as archive", 105 | 1402: "Failed to read archive data error", 106 | 1403: "Wrong archive password", 107 | 1404: "Failed to get the file and dir list in an archive", 108 | 1405: "Failed to find the item ID in an archive file", 109 | 1200: "Failed to rename it. More information in object", 110 | 1800: "There is no Content-Length information in the HTTP header or the received size doesn’t match the value of Content-Length information in the HTTP header", # pylint: disable=line-too-long 111 | 1801: "Wait too long, no date can be received from client (Default maximum wait time is 3600 seconds)", # pylint: disable=line-too-long 112 | 1802: "No filename information in the last part of file content", 113 | 1803: "Upload connection is cancelled", 114 | 1804: "Failed to upload too big file to FAT file system", 115 | 1805: "Can’t overwrite or skip the existed file, if no overwrite parameter is given", 116 | 2000: "Sharing link does not exist", 117 | 2001: "Cannot generate sharing link because too many sharing links exist", 118 | 2002: "Failed to access sharing links", 119 | } 120 | 121 | # SYNO.SurveillanceStation.* 122 | ERROR_SURVEILLANCE: Final = { 123 | 400: "Execution failed", 124 | 401: "Invalid parameter", 125 | 402: "Camera disabled", 126 | 403: "Insufficient license", 127 | 404: "Codec activation failed", 128 | 405: "CMS server connection failed", 129 | 407: "CMS closed", 130 | 412: "Need to add license", 131 | 413: "Reach the maximum of platform", 132 | 414: "Some events not exist", 133 | 415: "Message connect failed", 134 | 417: "Test connection error", 135 | 418: "Object/VisualStation ID does not exist", 136 | 419: "VisualStation name repetition", 137 | 439: "Too many items selected", 138 | 446: "Task path already exist", 139 | 522: "Original task is migrating", 140 | 534: "Exceed name length limitation", 141 | } 142 | 143 | # SYNO.Virtualization.* 144 | ERROR_VIRTUALIZATION: Final = { 145 | 400: "Unknown error", 146 | 401: "Bad parameter", 147 | 402: "Operation failed", 148 | 403: "Name conflict", 149 | 404: "The number of iSCSI LUNs has reached the system limit", 150 | 500: "Note: vdisk is based on iSCSI LUN, which is also limited by the system", 151 | 501: "The cluster is frozen. More than half of the hosts are offline", 152 | 600: "The cluster is in the incompatible mode. Please upgrade to a compatible DSM version and try again", # pylint: disable=line-too-long 153 | 601: "The cluster is not ready", 154 | 700: "The host is offline", 155 | 900: "The storage is in invalid", 156 | 901: "Failed to set a host to a virtual machine", 157 | 902: "The virtual machine does not have a host", 158 | 903: "Can't shutdown the guest, it is not running", 159 | 904: "Can't power off the guest, it is not running", 160 | 905: "Can't restart the guest, it is not running", 161 | 906: "MAC conflict", 162 | 907: "Failed to create virtual machine because the selected image is not found", 163 | 908: "The status of virtual machine is offline", 164 | 909: "Failed to power on a virtual machine due to insufficient CPU threads for reservation on the host", # pylint: disable=line-too-long 165 | 910: "Failed to power on the virtual machine because there is no corresponding networking on the host", # pylint: disable=line-too-long 166 | 911: "Only the VirtIO hard disk controller can be used to boot the virtual machine remotely. Virtual machines with UEFI enabled cannot be powered on remotely", # pylint: disable=line-too-long 167 | 939: "Guest already running", 168 | 1000: "Cannot find task_id", 169 | 1001: "Need Virtual Machine Manager Pro", 170 | 1400: "The result of image creating is partial success", 171 | 1600: "The virtual machine has been successfully edited. However, errors occurred while reserving the memory or CPU on the HA hosts", # pylint: disable=line-too-long 172 | } 173 | -------------------------------------------------------------------------------- /src/synology_dsm/exceptions.py: -------------------------------------------------------------------------------- 1 | """Library exceptions.""" 2 | 3 | from __future__ import annotations 4 | 5 | from .const import ( 6 | API_AUTH, 7 | ERROR_AUTH, 8 | ERROR_COMMON, 9 | ERROR_DOWNLOAD_SEARCH, 10 | ERROR_DOWNLOAD_TASK, 11 | ERROR_FILE, 12 | ERROR_SURVEILLANCE, 13 | ERROR_VIRTUALIZATION, 14 | ) 15 | 16 | 17 | class SynologyDSMException(Exception): 18 | """Generic Synology DSM exception.""" 19 | 20 | def __init__(self, api: str | None, code: int, details: str | None = None) -> None: 21 | """Constructor method.""" 22 | reason = ERROR_COMMON.get(code) 23 | if api and not reason: 24 | if api == API_AUTH: 25 | reason = ERROR_AUTH.get(code) 26 | elif "SYNO.DownloadStation" in api: 27 | if "BTSearch" in api: 28 | reason = ERROR_DOWNLOAD_SEARCH.get(code) 29 | elif "Task" in api: 30 | reason = ERROR_DOWNLOAD_TASK.get(code) 31 | elif "SYNO.FileStation" in api: 32 | reason = ERROR_FILE.get(code) 33 | elif "SYNO.SurveillanceStation" in api: 34 | reason = ERROR_SURVEILLANCE.get(code) 35 | elif "SYNO.Virtualization" in api: 36 | reason = ERROR_VIRTUALIZATION.get(code) 37 | if not reason: 38 | reason = "Unknown" 39 | 40 | error_message = {"api": api, "code": code, "reason": reason, "details": details} 41 | super().__init__(error_message) 42 | 43 | 44 | class SynologyDSMNotLoggedInException(SynologyDSMException): 45 | """Not logged in exception.""" 46 | 47 | def __init__(self) -> None: 48 | """Constructor method.""" 49 | super().__init__(None, -1, "Not logged in. You have to do login() first.") 50 | 51 | 52 | # Request 53 | class SynologyDSMRequestException(SynologyDSMException): 54 | """Request exception.""" 55 | 56 | def __init__(self, exception: Exception) -> None: 57 | """Constructor method.""" 58 | ex_class = exception.__class__.__name__ 59 | if not exception.args: 60 | super().__init__(None, -1, ex_class) 61 | return 62 | ex_reason = exception.args[0] 63 | if hasattr(exception.args[0], "reason"): 64 | ex_reason = exception.args[0].reason 65 | super().__init__(None, -1, f"{ex_class} = {ex_reason}") 66 | 67 | 68 | # API 69 | class SynologyDSMAPINotExistsException(SynologyDSMException): 70 | """API not exists exception.""" 71 | 72 | def __init__(self, api: str) -> None: 73 | """Constructor method.""" 74 | super().__init__(api, -2, f"API {api} does not exists") 75 | 76 | 77 | class SynologyDSMAPIErrorException(SynologyDSMException): 78 | """API returns an error exception.""" 79 | 80 | def __init__(self, api: str, code: int, details: str) -> None: 81 | """Constructor method.""" 82 | super().__init__(api, code, details) 83 | 84 | 85 | # Login 86 | class SynologyDSMLoginFailedException(SynologyDSMException): 87 | """Failed to login exception.""" 88 | 89 | def __init__(self, code: int, details: str | None = None) -> None: 90 | """Constructor method.""" 91 | super().__init__(API_AUTH, code, details) 92 | 93 | 94 | class SynologyDSMLoginInvalidException(SynologyDSMLoginFailedException): 95 | """Invalid password & not admin account exception.""" 96 | 97 | def __init__(self, username: str) -> None: 98 | """Constructor method.""" 99 | message = f"Invalid password or not admin account: {username}" 100 | super().__init__(400, message) 101 | 102 | 103 | class SynologyDSMLoginDisabledAccountException(SynologyDSMLoginFailedException): 104 | """Guest & disabled account exception.""" 105 | 106 | def __init__(self, username: str) -> None: 107 | """Constructor method.""" 108 | message = f"Guest or disabled account: {username}" 109 | super().__init__(401, message) 110 | 111 | 112 | class SynologyDSMLoginPermissionDeniedException(SynologyDSMLoginFailedException): 113 | """No access to login exception.""" 114 | 115 | def __init__(self, username: str) -> None: 116 | """Constructor method.""" 117 | message = f"Permission denied for account: {username}" 118 | super().__init__(402, message) 119 | 120 | 121 | class SynologyDSMLogin2SARequiredException(SynologyDSMLoginFailedException): 122 | """2SA required to login exception.""" 123 | 124 | def __init__(self, username: str) -> None: 125 | """Constructor method.""" 126 | message = f"Two-step authentication required for account: {username}" 127 | super().__init__(403, message) 128 | 129 | 130 | class SynologyDSMLogin2SAFailedException(SynologyDSMLoginFailedException): 131 | """2SA code failed exception.""" 132 | 133 | def __init__(self) -> None: 134 | """Constructor method.""" 135 | message = "Two-step authentication failed, retry with a new pass code" 136 | super().__init__(404, message) 137 | 138 | 139 | class SynologyDSMLogin2SAForcedException(SynologyDSMLoginFailedException): 140 | """2SA force to setup exception.""" 141 | 142 | def __init__(self, username: str) -> None: 143 | """Constructor method.""" 144 | message = ( 145 | f"Two-step authentication forced to be setuped for account: {username}" 146 | ) 147 | super().__init__(406, message) 148 | -------------------------------------------------------------------------------- /src/synology_dsm/helpers.py: -------------------------------------------------------------------------------- 1 | """Helpers.""" 2 | 3 | from __future__ import annotations 4 | 5 | 6 | class SynoFormatHelper: 7 | """Class containing various formatting functions.""" 8 | 9 | @staticmethod 10 | def bytes_to_readable(num: int) -> str: 11 | """Converts bytes to a human readable format.""" 12 | if num < 512: 13 | return "0 Kb" 14 | if num < 1024: 15 | return "1 Kb" 16 | 17 | work_num = float(num) 18 | for unit in ["", "Kb", "Mb", "Gb", "Tb", "Pb", "Eb", "Zb"]: 19 | if abs(work_num) < 1024.0: 20 | return f"{round(work_num, 1)}{unit}" 21 | work_num /= 1024.0 22 | return f"{round(work_num, 1)}Yb" 23 | 24 | @staticmethod 25 | def bytes_to_megabytes(num: int) -> float: 26 | """Converts bytes to megabytes.""" 27 | var_mb = num / 1024.0 / 1024.0 28 | 29 | return round(var_mb, 1) 30 | 31 | @staticmethod 32 | def bytes_to_gigabytes(num: int) -> float: 33 | """Converts bytes to gigabytes.""" 34 | var_gb = num / 1024.0 / 1024.0 / 1024.0 35 | 36 | return round(var_gb, 1) 37 | 38 | @staticmethod 39 | def bytes_to_terrabytes(num: int) -> float: 40 | """Converts bytes to terrabytes.""" 41 | var_tb = num / 1024.0 / 1024.0 / 1024.0 / 1024.0 42 | 43 | return round(var_tb, 1) 44 | 45 | @staticmethod 46 | def megabytes_to_bytes(num: int | float) -> int: 47 | """Converts megabytes to bytes.""" 48 | var_bytes = num * 1024.0 * 1024.0 49 | 50 | return int(round(var_bytes, 0)) 51 | -------------------------------------------------------------------------------- /src/synology_dsm/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mib1185/py-synologydsm-api/4b7df11340ff9e40dc426f424ed443e9f8602a88/src/synology_dsm/py.typed -------------------------------------------------------------------------------- /tests/__snapshots__/test_synology_dsm_7.ambr: -------------------------------------------------------------------------------- 1 | # serializer version: 1 2 | # name: TestSynologyDSM7.test_external_usb 3 | dict({ 4 | 'sdq': SynoCoreExternalUSBDevice( 5 | device_formatable=True, 6 | device_id='sdq', 7 | device_manufacturer='Genesys Logic, Inc.', 8 | device_name='SD Card 1', 9 | device_partitions=dict({ 10 | 'sdq1': SynoUSBStoragePartition( 11 | filesystem='FAT32', 12 | fstype='vfat', 13 | is_mounted=True, 14 | is_supported=True, 15 | name_id='sdq1', 16 | partition_percentage_used=4.0, 17 | partition_title='SD Card 1 Partition 1', 18 | share_name='SDKarte', 19 | status='normal', 20 | ), 21 | }), 22 | device_product_name=None, 23 | device_progress='', 24 | device_status='normal', 25 | device_type='sdCard', 26 | partitions_all_percentage_used=4.0, 27 | ), 28 | 'sds': SynoCoreExternalUSBDevice( 29 | device_formatable=True, 30 | device_id='sds', 31 | device_manufacturer='Iomega Corp.', 32 | device_name='USB Disk 1', 33 | device_partitions=dict({ 34 | 'sds1': SynoUSBStoragePartition( 35 | filesystem='ntfs', 36 | fstype='ntfs', 37 | is_mounted=True, 38 | is_supported=True, 39 | name_id='sds1', 40 | partition_percentage_used=90.4, 41 | partition_title='USB Disk 1 Partition 1', 42 | share_name='usbshare1', 43 | status='usbbackup', 44 | ), 45 | }), 46 | device_product_name='Prestige Portable Hard Drive', 47 | device_progress='', 48 | device_status='usbbackup', 49 | device_type='usbDisk', 50 | partitions_all_percentage_used=90.4, 51 | ), 52 | 'usb1': SynoCoreExternalUSBDevice( 53 | device_formatable=True, 54 | device_id='usb1', 55 | device_manufacturer='PNY', 56 | device_name='USB Disk 1', 57 | device_partitions=dict({ 58 | 'usb1p1': SynoUSBStoragePartition( 59 | filesystem='ntfs', 60 | fstype='ntfs', 61 | is_mounted=True, 62 | is_supported=True, 63 | name_id='usb1p1', 64 | partition_percentage_used=1.0, 65 | partition_title='USB Disk 1 Partition 1', 66 | share_name='usbshare1-1', 67 | status='normal', 68 | ), 69 | 'usb1p2': SynoUSBStoragePartition( 70 | filesystem='FAT32', 71 | fstype='vfat', 72 | is_mounted=True, 73 | is_supported=True, 74 | name_id='usb1p2', 75 | partition_percentage_used=0.0, 76 | partition_title='USB Disk 1 Partition 2', 77 | share_name='usbshare1-2', 78 | status='normal', 79 | ), 80 | }), 81 | device_product_name='Flash Drive', 82 | device_progress='', 83 | device_status='normal', 84 | device_type='usbDisk', 85 | partitions_all_percentage_used=0.5, 86 | ), 87 | 'usb2': SynoCoreExternalUSBDevice( 88 | device_formatable=True, 89 | device_id='usb2', 90 | device_manufacturer='Western Digital Technologies, Inc.', 91 | device_name='USB Disk 2', 92 | device_partitions=dict({ 93 | 'usb2p1': SynoUSBStoragePartition( 94 | filesystem='ext4', 95 | fstype='ext4', 96 | is_mounted=True, 97 | is_supported=True, 98 | name_id='usb2p1', 99 | partition_percentage_used=72.0, 100 | partition_title='USB Disk 2 Partition 1', 101 | share_name='usbshare2', 102 | status='normal', 103 | ), 104 | }), 105 | device_product_name='easystore 264D', 106 | device_progress='', 107 | device_status='normal', 108 | device_type='usbDisk', 109 | partitions_all_percentage_used=72.0, 110 | ), 111 | 'usb3': SynoCoreExternalUSBDevice( 112 | device_formatable=True, 113 | device_id='usb3', 114 | device_manufacturer='SanDisk Corp.', 115 | device_name='USB Disk 3', 116 | device_partitions=dict({ 117 | 'usb3p1': SynoUSBStoragePartition( 118 | filesystem='FAT32', 119 | fstype='vfat', 120 | is_mounted=True, 121 | is_supported=True, 122 | name_id='usb3p1', 123 | partition_percentage_used=5.1, 124 | partition_title='USB Disk 3 Partition 1', 125 | share_name='usbshare3-1', 126 | status='normal', 127 | ), 128 | 'usb3p2': SynoUSBStoragePartition( 129 | filesystem='hfsplus', 130 | fstype='hfsplus', 131 | is_mounted=True, 132 | is_supported=True, 133 | name_id='usb3p2', 134 | partition_percentage_used=11.9, 135 | partition_title='USB Disk 3 Partition 2', 136 | share_name='usbshare3-2', 137 | status='normal', 138 | ), 139 | }), 140 | device_product_name=' SanDisk 3.2Gen1', 141 | device_progress='', 142 | device_status='normal', 143 | device_type='usbDisk', 144 | partitions_all_percentage_used=11.9, 145 | ), 146 | 'usb4': SynoCoreExternalUSBDevice( 147 | device_formatable=True, 148 | device_id='usb4', 149 | device_manufacturer='PNY', 150 | device_name='USB Disk 4', 151 | device_partitions=dict({ 152 | 'usb4p1': SynoUSBStoragePartition( 153 | filesystem='exfat', 154 | fstype='exfat', 155 | is_mounted=True, 156 | is_supported=True, 157 | name_id='usb4p1', 158 | partition_percentage_used=0.0, 159 | partition_title='USB Disk 4 Partition 1', 160 | share_name='usbshare4', 161 | status='normal', 162 | ), 163 | }), 164 | device_product_name='USB 3.0 FD', 165 | device_progress='', 166 | device_status='normal', 167 | device_type='usbDisk', 168 | partitions_all_percentage_used=0.0, 169 | ), 170 | 'usb5': SynoCoreExternalUSBDevice( 171 | device_formatable=True, 172 | device_id='usb5', 173 | device_manufacturer='SanDisk Corp.', 174 | device_name='USB Disk 5', 175 | device_partitions=dict({ 176 | 'usb5p1': SynoUSBStoragePartition( 177 | filesystem='FAT32', 178 | fstype='vfat', 179 | is_mounted=True, 180 | is_supported=True, 181 | name_id='usb5p1', 182 | partition_percentage_used=0.0, 183 | partition_title='USB Disk 5 Partition 1', 184 | share_name='usbshare5-1', 185 | status='normal', 186 | ), 187 | 'usb5p2': SynoUSBStoragePartition( 188 | filesystem='FAT32', 189 | fstype='vfat', 190 | is_mounted=True, 191 | is_supported=True, 192 | name_id='usb5p2', 193 | partition_percentage_used=84.4, 194 | partition_title='USB Disk 5 Partition 2', 195 | share_name='usbshare5-2', 196 | status='normal', 197 | ), 198 | }), 199 | device_product_name='Cruzer Titanium+', 200 | device_progress='', 201 | device_status='normal', 202 | device_type='usbDisk', 203 | partitions_all_percentage_used=0.7, 204 | ), 205 | 'usb6': SynoCoreExternalUSBDevice( 206 | device_formatable=True, 207 | device_id='usb6', 208 | device_manufacturer='Western Digital Technologies, Inc.', 209 | device_name='USB Disk 6', 210 | device_partitions=dict({ 211 | 'usb6p1': SynoUSBStoragePartition( 212 | filesystem='ntfs', 213 | fstype='ntfs', 214 | is_mounted=True, 215 | is_supported=True, 216 | name_id='usb6p1', 217 | partition_percentage_used=28.5, 218 | partition_title='USB Disk 6 Partition 1', 219 | share_name='usbshare6', 220 | status='normal', 221 | ), 222 | }), 223 | device_product_name='My Passport Essential (WDBACY)', 224 | device_progress='', 225 | device_status='normal', 226 | device_type='usbDisk', 227 | partitions_all_percentage_used=28.5, 228 | ), 229 | 'usb8': SynoCoreExternalUSBDevice( 230 | device_formatable=True, 231 | device_id='usb8', 232 | device_manufacturer='Genesys Logic, Inc.', 233 | device_name='USB Disk 8', 234 | device_partitions=dict({ 235 | 'usb8': SynoUSBStoragePartition( 236 | filesystem='', 237 | fstype='unknown', 238 | is_mounted=False, 239 | is_supported=False, 240 | name_id='usb8', 241 | partition_percentage_used=None, 242 | partition_title='USB Disk 8', 243 | share_name='', 244 | status='normal', 245 | ), 246 | }), 247 | device_product_name='All-in-One Cardreader', 248 | device_progress='', 249 | device_status='normal', 250 | device_type='usbDisk', 251 | partitions_all_percentage_used=None, 252 | ), 253 | }) 254 | # --- 255 | -------------------------------------------------------------------------------- /tests/api_data/__init__.py: -------------------------------------------------------------------------------- 1 | """APIs raw data constants.""" 2 | -------------------------------------------------------------------------------- /tests/api_data/dsm_5/__init__.py: -------------------------------------------------------------------------------- 1 | """DSM 5 datas.""" 2 | 3 | from .const_5_api_auth import ( 4 | DSM_5_AUTH_LOGIN, 5 | DSM_5_AUTH_LOGIN_2SA, 6 | DSM_5_AUTH_LOGIN_2SA_OTP, 7 | ) 8 | from .const_5_api_info import DSM_5_API_INFO 9 | from .core.const_5_core_utilization import DSM_5_CORE_UTILIZATION 10 | from .dsm.const_5_dsm_info import DSM_5_DSM_INFORMATION 11 | from .dsm.const_5_dsm_network import DSM_5_DSM_NETWORK 12 | from .storage.const_5_storage_storage import ( 13 | DSM_5_STORAGE_STORAGE_DS410J_RAID5_4DISKS_1VOL, 14 | ) 15 | 16 | __all__ = [ 17 | "DSM_5_AUTH_LOGIN", 18 | "DSM_5_AUTH_LOGIN_2SA", 19 | "DSM_5_AUTH_LOGIN_2SA_OTP", 20 | "DSM_5_API_INFO", 21 | "DSM_5_CORE_UTILIZATION", 22 | "DSM_5_DSM_INFORMATION", 23 | "DSM_5_DSM_NETWORK", 24 | "DSM_5_STORAGE_STORAGE_DS410J_RAID5_4DISKS_1VOL", 25 | ] 26 | -------------------------------------------------------------------------------- /tests/api_data/dsm_5/const_5_api_auth.py: -------------------------------------------------------------------------------- 1 | """DSM 5 SYNO.API.Auth data.""" 2 | 3 | from tests.const import DEVICE_TOKEN, ERROR_AUTH_OTP_NOT_SPECIFIED, SESSION_ID 4 | 5 | # No synotoken for an unknown reason 6 | DSM_5_AUTH_LOGIN = { 7 | "data": {"is_portal_port": False, "sid": SESSION_ID}, 8 | "success": True, 9 | } 10 | DSM_5_AUTH_LOGIN_2SA = ERROR_AUTH_OTP_NOT_SPECIFIED 11 | DSM_5_AUTH_LOGIN_2SA_OTP = { 12 | "data": {"did": DEVICE_TOKEN, "is_portal_port": False, "sid": SESSION_ID}, 13 | "success": True, 14 | } 15 | -------------------------------------------------------------------------------- /tests/api_data/dsm_5/core/__init__.py: -------------------------------------------------------------------------------- 1 | """DSM 5 SYNO.Core.* datas.""" 2 | -------------------------------------------------------------------------------- /tests/api_data/dsm_5/core/const_5_core_utilization.py: -------------------------------------------------------------------------------- 1 | """DSM 5 SYNO.Core.System.Utilization data.""" 2 | 3 | DSM_5_CORE_UTILIZATION = { 4 | "data": { 5 | "cpu": { 6 | "15min_load": 53, 7 | "1min_load": 57, 8 | "5min_load": 56, 9 | "device": "System", 10 | "other_load": 63, 11 | "system_load": 10, 12 | "user_load": 27, 13 | }, 14 | "disk": { 15 | "disk": [ 16 | { 17 | "device": "sda", 18 | "display_name": "Disk 1", 19 | "read_access": 21, 20 | "read_byte": 645529, 21 | "type": "internal", 22 | "utilization": 46, 23 | "write_access": 4, 24 | "write_byte": 86220, 25 | }, 26 | { 27 | "device": "sdb", 28 | "display_name": "Disk 2", 29 | "read_access": 23, 30 | "read_byte": 711338, 31 | "type": "internal", 32 | "utilization": 33, 33 | "write_access": 4, 34 | "write_byte": 95641, 35 | }, 36 | { 37 | "device": "sdc", 38 | "display_name": "Disk 3", 39 | "read_access": 21, 40 | "read_byte": 786841, 41 | "type": "internal", 42 | "utilization": 31, 43 | "write_access": 5, 44 | "write_byte": 99874, 45 | }, 46 | { 47 | "device": "sdd", 48 | "display_name": "Disk 4", 49 | "read_access": 21, 50 | "read_byte": 729907, 51 | "type": "internal", 52 | "utilization": 32, 53 | "write_access": 4, 54 | "write_byte": 76663, 55 | }, 56 | { 57 | "device": "sdq", 58 | "display_name": "USB Disk 1", 59 | "read_access": 0, 60 | "read_byte": 0, 61 | "type": "usb", 62 | "utilization": 0, 63 | "write_access": 0, 64 | "write_byte": 0, 65 | }, 66 | ], 67 | "total": { 68 | "device": "total", 69 | "read_access": 86, 70 | "read_byte": 2873615, 71 | "utilization": 28, 72 | "write_access": 17, 73 | "write_byte": 358398, 74 | }, 75 | }, 76 | "memory": { 77 | "avail_real": 8188, 78 | "avail_swap": 1933436, 79 | "buffer": 3700, 80 | "cached": 25636, 81 | "device": "Memory", 82 | "memory_size": 131072, 83 | "real_usage": 68, 84 | "si_disk": 5, 85 | "so_disk": 3, 86 | "swap_usage": 7, 87 | "total_real": 118464, 88 | "total_swap": 2097080, 89 | }, 90 | "network": [ 91 | {"device": "total", "rx": 1680, "tx": 553}, 92 | {"device": "eth0", "rx": 1680, "tx": 553}, 93 | ], 94 | "space": { 95 | "lun": [], 96 | "total": { 97 | "device": "total", 98 | "read_access": 261, 99 | "read_byte": 1069875, 100 | "utilization": 100, 101 | "write_access": 51, 102 | "write_byte": 208896, 103 | }, 104 | "volume": [ 105 | { 106 | "device": "md2", 107 | "display_name": "volume1", 108 | "read_access": 261, 109 | "read_byte": 1069875, 110 | "utilization": 100, 111 | "write_access": 51, 112 | "write_byte": 208896, 113 | } 114 | ], 115 | }, 116 | "time": 1586592505, 117 | }, 118 | "success": True, 119 | } 120 | -------------------------------------------------------------------------------- /tests/api_data/dsm_5/dsm/__init__.py: -------------------------------------------------------------------------------- 1 | """DSM 5 SYNO.DSM.* datas.""" 2 | -------------------------------------------------------------------------------- /tests/api_data/dsm_5/dsm/const_5_dsm_info.py: -------------------------------------------------------------------------------- 1 | """DSM 5 SYNO.DSM.Info data.""" 2 | 3 | DSM_5_DSM_INFORMATION_DS410J = { 4 | "data": { 5 | "codepage": "enu", 6 | "model": "DS410j", 7 | "ram": 128, 8 | "serial": "A3G7N00628", 9 | "temperature": 52, 10 | "temperature_warn": False, 11 | "time": "Mon Apr 13 18:26:27 2020", 12 | "uptime": 7077254, 13 | "version": "5967", 14 | "version_string": "DSM 5.2-5967 Update 9", 15 | }, 16 | "success": True, 17 | } 18 | 19 | DSM_5_DSM_INFORMATION_DS3615XS = { 20 | "data": { 21 | "codepage": "rus", 22 | "model": "DS3615xs", 23 | "ram": 6144, 24 | "serial": "B3J4N01003", 25 | "temperature": 40, 26 | "time": "Sat Apr 25 20:21:57 2020", 27 | "uptime": 3897, 28 | "version": "5967", 29 | "version_string": "DSM 5.2-5967 Update 9", 30 | }, 31 | "success": True, 32 | } 33 | 34 | DSM_5_DSM_INFORMATION = DSM_5_DSM_INFORMATION_DS3615XS 35 | -------------------------------------------------------------------------------- /tests/api_data/dsm_5/dsm/const_5_dsm_network.py: -------------------------------------------------------------------------------- 1 | """DSM 5 SYNO.DSM.Network data.""" 2 | 3 | DSM_5_DSM_NETWORK = { 4 | "data": { 5 | "dns": ["192.168.1.1"], 6 | "gateway": "192.168.1.1", 7 | "hostname": "HOME-NAS", 8 | "interfaces": [ 9 | { 10 | "id": "eth0", 11 | "ip": [{"address": "192.168.1.10", "netmask": "255.255.255.0"}], 12 | "mac": "XX-XX-XX-XX-XX-XX", 13 | "type": "lan", 14 | } 15 | ], 16 | "workgroup": "WORKGROUP", 17 | }, 18 | "success": True, 19 | } 20 | -------------------------------------------------------------------------------- /tests/api_data/dsm_5/storage/__init__.py: -------------------------------------------------------------------------------- 1 | """DSM 5 SYNO.Storage.* datas.""" 2 | -------------------------------------------------------------------------------- /tests/api_data/dsm_6/__init__.py: -------------------------------------------------------------------------------- 1 | """DSM 6 datas.""" 2 | 3 | from .const_6_api_auth import ( 4 | DSM_6_AUTH_LOGIN, 5 | DSM_6_AUTH_LOGIN_2SA, 6 | DSM_6_AUTH_LOGIN_2SA_OTP, 7 | ) 8 | from .const_6_api_info import DSM_6_API_INFO 9 | from .core.const_6_core_security import ( 10 | DSM_6_CORE_SECURITY, 11 | DSM_6_CORE_SECURITY_UPDATE_OUTOFDATE, 12 | ) 13 | from .core.const_6_core_share import DSM_6_CORE_SHARE 14 | from .core.const_6_core_system import ( 15 | DSM_6_CORE_SYSTEM_DS218_PLAY, 16 | DSM_6_CORE_SYSTEM_DS918_PLUS, 17 | ) 18 | from .core.const_6_core_upgrade import DSM_6_CORE_UPGRADE_FALSE, DSM_6_CORE_UPGRADE_TRUE 19 | from .core.const_6_core_utilization import ( 20 | DSM_6_CORE_UTILIZATION, 21 | DSM_6_CORE_UTILIZATION_ERROR_1055, 22 | ) 23 | from .download_station.const_6_download_station_info import ( 24 | DSM_6_DOWNLOAD_STATION_INFO_CONFIG, 25 | DSM_6_DOWNLOAD_STATION_INFO_INFO, 26 | ) 27 | from .download_station.const_6_download_station_stat import ( 28 | DSM_6_DOWNLOAD_STATION_STAT_INFO, 29 | ) 30 | from .download_station.const_6_download_station_task import ( 31 | DSM_6_DOWNLOAD_STATION_TASK_LIST, 32 | ) 33 | from .dsm.const_6_dsm_info import DSM_6_DSM_INFORMATION 34 | from .dsm.const_6_dsm_network import DSM_6_DSM_NETWORK_2LAN_1PPPOE 35 | from .storage.const_6_storage_storage import ( 36 | DSM_6_STORAGE_STORAGE_DS213_PLUS_SHR1_2DISKS_2VOLS, 37 | DSM_6_STORAGE_STORAGE_DS918_PLUS_RAID5_3DISKS_1VOL, 38 | DSM_6_STORAGE_STORAGE_DS1515_PLUS_SHR2_10DISKS_1VOL_WITH_EXPANSION, 39 | DSM_6_STORAGE_STORAGE_DS1819_PLUS_SHR2_8DISKS_1VOL, 40 | ) 41 | from .surveillance_station.const_6_api_info import ( 42 | DSM_6_API_INFO as DSM_6_API_INFO_SURVEILLANCE_STATION, 43 | ) 44 | from .surveillance_station.const_6_surveillance_station_camera import ( 45 | DSM_6_SURVEILLANCE_STATION_CAMERA_EVENT_MD_PARAM_SAVE, 46 | DSM_6_SURVEILLANCE_STATION_CAMERA_EVENT_MOTION_ENUM, 47 | DSM_6_SURVEILLANCE_STATION_CAMERA_GET_LIVE_VIEW_PATH, 48 | DSM_6_SURVEILLANCE_STATION_CAMERA_LIST, 49 | ) 50 | from .surveillance_station.const_6_surveillance_station_home_mode import ( 51 | DSM_6_SURVEILLANCE_STATION_HOME_MODE_GET_INFO, 52 | DSM_6_SURVEILLANCE_STATION_HOME_MODE_SWITCH, 53 | ) 54 | 55 | __all__ = [ 56 | "DSM_6_AUTH_LOGIN", 57 | "DSM_6_AUTH_LOGIN_2SA", 58 | "DSM_6_AUTH_LOGIN_2SA_OTP", 59 | "DSM_6_API_INFO", 60 | "DSM_6_CORE_SECURITY", 61 | "DSM_6_CORE_SECURITY_UPDATE_OUTOFDATE", 62 | "DSM_6_CORE_SHARE", 63 | "DSM_6_CORE_SYSTEM_DS218_PLAY", 64 | "DSM_6_CORE_SYSTEM_DS918_PLUS", 65 | "DSM_6_CORE_UPGRADE_FALSE", 66 | "DSM_6_CORE_UPGRADE_TRUE", 67 | "DSM_6_CORE_UTILIZATION", 68 | "DSM_6_CORE_UTILIZATION_ERROR_1055", 69 | "DSM_6_DOWNLOAD_STATION_INFO_CONFIG", 70 | "DSM_6_DOWNLOAD_STATION_INFO_INFO", 71 | "DSM_6_DOWNLOAD_STATION_STAT_INFO", 72 | "DSM_6_DOWNLOAD_STATION_TASK_LIST", 73 | "DSM_6_DSM_INFORMATION", 74 | "DSM_6_DSM_NETWORK_2LAN_1PPPOE", 75 | "DSM_6_STORAGE_STORAGE_DS1515_PLUS_SHR2_10DISKS_1VOL_WITH_EXPANSION", 76 | "DSM_6_STORAGE_STORAGE_DS1819_PLUS_SHR2_8DISKS_1VOL", 77 | "DSM_6_STORAGE_STORAGE_DS213_PLUS_SHR1_2DISKS_2VOLS", 78 | "DSM_6_STORAGE_STORAGE_DS918_PLUS_RAID5_3DISKS_1VOL", 79 | "DSM_6_API_INFO_SURVEILLANCE_STATION", 80 | "DSM_6_SURVEILLANCE_STATION_CAMERA_EVENT_MD_PARAM_SAVE", 81 | "DSM_6_SURVEILLANCE_STATION_CAMERA_EVENT_MOTION_ENUM", 82 | "DSM_6_SURVEILLANCE_STATION_CAMERA_GET_LIVE_VIEW_PATH", 83 | "DSM_6_SURVEILLANCE_STATION_CAMERA_LIST", 84 | "DSM_6_SURVEILLANCE_STATION_HOME_MODE_GET_INFO", 85 | "DSM_6_SURVEILLANCE_STATION_HOME_MODE_SWITCH", 86 | ] 87 | -------------------------------------------------------------------------------- /tests/api_data/dsm_6/const_6_api_auth.py: -------------------------------------------------------------------------------- 1 | """DSM 6 SYNO.API.Auth data.""" 2 | 3 | from tests.const import ( 4 | DEVICE_TOKEN, 5 | ERROR_AUTH_OTP_NOT_SPECIFIED, 6 | SESSION_ID, 7 | SYNO_TOKEN, 8 | ) 9 | 10 | DSM_6_AUTH_LOGIN = { 11 | "data": {"is_portal_port": False, "sid": SESSION_ID, "synotoken": SYNO_TOKEN}, 12 | "success": True, 13 | } 14 | DSM_6_AUTH_LOGIN_2SA = ERROR_AUTH_OTP_NOT_SPECIFIED 15 | DSM_6_AUTH_LOGIN_2SA_OTP = { 16 | "data": { 17 | "did": DEVICE_TOKEN, 18 | "is_portal_port": False, 19 | "sid": SESSION_ID, 20 | "synotoken": SYNO_TOKEN, 21 | }, 22 | "success": True, 23 | } 24 | 25 | DSM_6_AUTH_LOGOUT = {"success": True} 26 | -------------------------------------------------------------------------------- /tests/api_data/dsm_6/core/__init__.py: -------------------------------------------------------------------------------- 1 | """DSM 6 SYNO.Core.* datas.""" 2 | -------------------------------------------------------------------------------- /tests/api_data/dsm_6/core/const_6_core_security.py: -------------------------------------------------------------------------------- 1 | """DSM 6 SYNO.Core.SecurityScan.Status data.""" 2 | 3 | DSM_6_CORE_SECURITY = { 4 | "data": { 5 | "items": { 6 | "malware": { 7 | "category": "malware", 8 | "fail": { 9 | "danger": 0, 10 | "info": 0, 11 | "outOfDate": 0, 12 | "risk": 0, 13 | "warning": 0, 14 | }, 15 | "failSeverity": "safe", 16 | "progress": 100, 17 | "runningItem": "", 18 | "total": 3, 19 | "waitNum": 0, 20 | }, 21 | "network": { 22 | "category": "network", 23 | "fail": { 24 | "danger": 0, 25 | "info": 0, 26 | "outOfDate": 0, 27 | "risk": 0, 28 | "warning": 0, 29 | }, 30 | "failSeverity": "safe", 31 | "progress": 100, 32 | "runningItem": "", 33 | "total": 4, 34 | "waitNum": 0, 35 | }, 36 | "securitySetting": { 37 | "category": "securitySetting", 38 | "fail": { 39 | "danger": 0, 40 | "info": 0, 41 | "outOfDate": 0, 42 | "risk": 0, 43 | "warning": 0, 44 | }, 45 | "failSeverity": "safe", 46 | "progress": 100, 47 | "runningItem": "", 48 | "total": 0, 49 | "waitNum": 0, 50 | }, 51 | "systemCheck": { 52 | "category": "systemCheck", 53 | "fail": { 54 | "danger": 0, 55 | "info": 0, 56 | "outOfDate": 0, 57 | "risk": 0, 58 | "warning": 0, 59 | }, 60 | "failSeverity": "safe", 61 | "progress": 100, 62 | "runningItem": "", 63 | "total": 6, 64 | "waitNum": 0, 65 | }, 66 | "update": { 67 | "category": "update", 68 | "fail": { 69 | "danger": 0, 70 | "info": 0, 71 | "outOfDate": 0, 72 | "risk": 0, 73 | "warning": 0, 74 | }, 75 | "failSeverity": "safe", 76 | "progress": 100, 77 | "runningItem": "", 78 | "total": 4, 79 | "waitNum": 0, 80 | }, 81 | "userInfo": { 82 | "category": "userInfo", 83 | "fail": { 84 | "danger": 0, 85 | "info": 0, 86 | "outOfDate": 0, 87 | "risk": 0, 88 | "warning": 0, 89 | }, 90 | "failSeverity": "safe", 91 | "progress": 100, 92 | "runningItem": "", 93 | "total": 6, 94 | "waitNum": 0, 95 | }, 96 | }, 97 | "lastScanTime": "1588298442", 98 | "startTime": "", 99 | "success": True, 100 | "sysProgress": 100, 101 | "sysStatus": "safe", 102 | }, 103 | "success": True, 104 | } 105 | 106 | DSM_6_CORE_SECURITY_UPDATE_OUTOFDATE = { 107 | "data": { 108 | "items": { 109 | "malware": { 110 | "category": "malware", 111 | "fail": { 112 | "danger": 0, 113 | "info": 0, 114 | "outOfDate": 0, 115 | "risk": 0, 116 | "warning": 0, 117 | }, 118 | "failSeverity": "safe", 119 | "progress": 100, 120 | "runningItem": "", 121 | "total": 3, 122 | "waitNum": 0, 123 | }, 124 | "network": { 125 | "category": "network", 126 | "fail": { 127 | "danger": 0, 128 | "info": 0, 129 | "outOfDate": 0, 130 | "risk": 0, 131 | "warning": 0, 132 | }, 133 | "failSeverity": "safe", 134 | "progress": 100, 135 | "runningItem": "", 136 | "total": 4, 137 | "waitNum": 0, 138 | }, 139 | "securitySetting": { 140 | "category": "securitySetting", 141 | "fail": { 142 | "danger": 0, 143 | "info": 0, 144 | "outOfDate": 0, 145 | "risk": 0, 146 | "warning": 0, 147 | }, 148 | "failSeverity": "safe", 149 | "progress": 100, 150 | "runningItem": "", 151 | "total": 0, 152 | "waitNum": 0, 153 | }, 154 | "systemCheck": { 155 | "category": "systemCheck", 156 | "fail": { 157 | "danger": 0, 158 | "info": 0, 159 | "outOfDate": 0, 160 | "risk": 0, 161 | "warning": 0, 162 | }, 163 | "failSeverity": "safe", 164 | "progress": 100, 165 | "runningItem": "", 166 | "total": 6, 167 | "waitNum": 0, 168 | }, 169 | "update": { 170 | "category": "update", 171 | "fail": { 172 | "danger": 0, 173 | "info": 0, 174 | "outOfDate": 1, 175 | "risk": 0, 176 | "warning": 0, 177 | }, 178 | "failSeverity": "outOfDate", 179 | "progress": 100, 180 | "runningItem": "", 181 | "total": 4, 182 | "waitNum": 0, 183 | }, 184 | "userInfo": { 185 | "category": "userInfo", 186 | "fail": { 187 | "danger": 0, 188 | "info": 0, 189 | "outOfDate": 0, 190 | "risk": 0, 191 | "warning": 0, 192 | }, 193 | "failSeverity": "safe", 194 | "progress": 100, 195 | "runningItem": "", 196 | "total": 6, 197 | "waitNum": 0, 198 | }, 199 | }, 200 | "lastScanTime": "1590717640", 201 | "startTime": "", 202 | "success": True, 203 | "sysProgress": 100, 204 | "sysStatus": "outOfDate", 205 | }, 206 | "success": True, 207 | } 208 | -------------------------------------------------------------------------------- /tests/api_data/dsm_6/core/const_6_core_share.py: -------------------------------------------------------------------------------- 1 | """DSM 6 SYNO.Core.Share data.""" 2 | 3 | DSM_6_CORE_SHARE = { 4 | "data": { 5 | "shares": [ 6 | { 7 | "desc": "Docker Containers", 8 | "enable_recycle_bin": False, 9 | "enable_share_compress": False, 10 | "enable_share_cow": True, 11 | "enc_auto_mount": False, 12 | "encryption": 0, 13 | "force_readonly_reason": "", 14 | "hidden": True, 15 | "is_aclmode": True, 16 | "is_block_snap_action": False, 17 | "is_cluster_share": False, 18 | "is_cold_storage_share": False, 19 | "is_exfat_share": False, 20 | "is_force_readonly": False, 21 | "is_share_moving": False, 22 | "is_support_acl": True, 23 | "is_sync_share": False, 24 | "is_usb_share": False, 25 | "name": "docker", 26 | "quota_value": 0, 27 | "recycle_bin_admin_only": False, 28 | "share_quota_used": 0, 29 | "support_action": 511, 30 | "support_snapshot": True, 31 | "task_id": "", 32 | "unite_permission": False, 33 | "uuid": "78egut02-b5b1-4933-adt8-a9208526d234", 34 | "vol_path": "/volume1", 35 | }, 36 | { 37 | "desc": "", 38 | "enable_recycle_bin": True, 39 | "enable_share_compress": False, 40 | "enable_share_cow": True, 41 | "enc_auto_mount": False, 42 | "encryption": 0, 43 | "force_readonly_reason": "", 44 | "hidden": False, 45 | "is_aclmode": True, 46 | "is_block_snap_action": False, 47 | "is_cluster_share": False, 48 | "is_cold_storage_share": False, 49 | "is_exfat_share": False, 50 | "is_force_readonly": False, 51 | "is_share_moving": False, 52 | "is_support_acl": True, 53 | "is_sync_share": False, 54 | "is_usb_share": False, 55 | "name": "test_share", 56 | "quota_value": 0, 57 | "recycle_bin_admin_only": False, 58 | "share_quota_used": 36146658672640.0, 59 | "support_action": 511, 60 | "support_snapshot": True, 61 | "task_id": "", 62 | "unite_permission": False, 63 | "uuid": "2ee6c06a-8766-48b5-013d-63b18652a393", 64 | "vol_path": "/volume1", 65 | }, 66 | { 67 | "desc": "user home", 68 | "enable_recycle_bin": False, 69 | "enable_share_compress": False, 70 | "enable_share_cow": True, 71 | "enc_auto_mount": False, 72 | "encryption": 0, 73 | "force_readonly_reason": "", 74 | "hidden": False, 75 | "is_aclmode": True, 76 | "is_block_snap_action": False, 77 | "is_cluster_share": False, 78 | "is_cold_storage_share": False, 79 | "is_exfat_share": False, 80 | "is_force_readonly": False, 81 | "is_share_moving": False, 82 | "is_support_acl": True, 83 | "is_sync_share": False, 84 | "is_usb_share": False, 85 | "name": "homes", 86 | "quota_value": 0, 87 | "recycle_bin_admin_only": False, 88 | "share_quota_used": 0.015625, 89 | "support_action": 511, 90 | "support_snapshot": True, 91 | "task_id": "", 92 | "unite_permission": False, 93 | "uuid": "2b829t90-9512-4236-qqe0-d4133e9992d0", 94 | "vol_path": "/volume1", 95 | }, 96 | { 97 | "desc": "Log volume", 98 | "enable_recycle_bin": True, 99 | "enable_share_compress": True, 100 | "enable_share_cow": True, 101 | "enc_auto_mount": True, 102 | "encryption": 0, 103 | "force_readonly_reason": "", 104 | "hidden": True, 105 | "is_aclmode": True, 106 | "is_block_snap_action": False, 107 | "is_cluster_share": False, 108 | "is_cold_storage_share": False, 109 | "is_exfat_share": False, 110 | "is_force_readonly": False, 111 | "is_share_moving": False, 112 | "is_support_acl": True, 113 | "is_sync_share": False, 114 | "is_usb_share": False, 115 | "name": "logs", 116 | "quota_value": 0, 117 | "recycle_bin_admin_only": True, 118 | "share_quota_used": 947.28515625, 119 | "support_action": 511, 120 | "support_snapshot": True, 121 | "task_id": "", 122 | "unite_permission": False, 123 | "uuid": "b9876507-6880-4wes-8d61-6c984c0813ty", 124 | "vol_path": "/volume2", 125 | }, 126 | { 127 | "desc": "VMs", 128 | "enable_recycle_bin": False, 129 | "enable_share_compress": False, 130 | "enable_share_cow": True, 131 | "enc_auto_mount": False, 132 | "encryption": 0, 133 | "force_readonly_reason": "", 134 | "hidden": False, 135 | "is_aclmode": True, 136 | "is_block_snap_action": False, 137 | "is_cluster_share": False, 138 | "is_cold_storage_share": False, 139 | "is_exfat_share": False, 140 | "is_force_readonly": False, 141 | "is_share_moving": False, 142 | "is_support_acl": True, 143 | "is_sync_share": False, 144 | "is_usb_share": False, 145 | "name": "Virtual_Machines", 146 | "quota_value": 0, 147 | "recycle_bin_admin_only": False, 148 | "share_quota_used": 33911668, 149 | "support_action": 511, 150 | "support_snapshot": True, 151 | "task_id": "", 152 | "unite_permission": False, 153 | "uuid": "5416f693-04tt-4re2-b8e4-f6b18731689b", 154 | "vol_path": "/volume3", 155 | }, 156 | ], 157 | "total": 5, 158 | }, 159 | "success": True, 160 | } 161 | -------------------------------------------------------------------------------- /tests/api_data/dsm_6/core/const_6_core_system.py: -------------------------------------------------------------------------------- 1 | """DSM 6 SYNO.Core.System data.""" 2 | 3 | DSM_6_CORE_SYSTEM_DS918_PLUS = { 4 | "data": { 5 | "cpu_clock_speed": 1500, 6 | "cpu_cores": "4", 7 | "cpu_family": "Celeron", 8 | "cpu_series": "J3455", 9 | "cpu_vendor": "INTEL", 10 | "enabled_ntp": True, 11 | "firmware_date": "2020/07/08", 12 | "firmware_ver": "DSM 6.2.3-25426 Update 2", 13 | "model": "DS918+", 14 | "ntp_server": "time.google.com", 15 | "ram_size": 4096, 16 | "sata_dev": [], 17 | "serial": "1920PDN001501", 18 | "support_esata": "yes", 19 | "sys_temp": 40, 20 | "sys_tempwarn": False, 21 | "systempwarn": False, 22 | "temperature_warning": False, 23 | "time": "2020-10-19 23:33:52", 24 | "time_zone": "Brussels", 25 | "time_zone_desc": "(GMT+01:00) Brussels, Copenhagen, Madrid, Paris", 26 | "up_time": "75:12:9", 27 | "usb_dev": [ 28 | { 29 | "cls": "hub", 30 | "pid": "0612", 31 | "producer": "Genesys Logic, Inc.", 32 | "product": "Hub", 33 | "rev": "92.23", 34 | "vid": "05e3", 35 | }, 36 | { 37 | "cls": "other", 38 | "pid": "1790", 39 | "producer": "ASIX Electronics Corp.", 40 | "product": "AX88179 Gigabit Ethernet", 41 | "rev": "1.00", 42 | "vid": "0b95", 43 | }, 44 | { 45 | "cls": "hub", 46 | "pid": "0610", 47 | "producer": "Genesys Logic, Inc.", 48 | "product": "4-port hub", 49 | "rev": "92.23", 50 | "vid": "05e3", 51 | }, 52 | { 53 | "cls": "other", 54 | "pid": "0200", 55 | "producer": "Sigma Designs, Inc.", 56 | "product": "Aeotec Z-Stick Gen5 (ZW090) - UZB", 57 | "rev": "0.00", 58 | "vid": "0658", 59 | }, 60 | { 61 | "cls": "ups", 62 | "pid": "0002", 63 | "producer": "American Power Conversion", 64 | "product": "Uninterruptible Power Supply", 65 | "rev": "1.06", 66 | "vid": "051d", 67 | }, 68 | ], 69 | }, 70 | "success": True, 71 | } 72 | 73 | DSM_6_CORE_SYSTEM_DS218_PLAY = { 74 | "data": { 75 | "cpu_clock_speed": 1400, 76 | "cpu_cores": "4", 77 | "cpu_family": "RTD1296", 78 | "cpu_series": "SoC", 79 | "cpu_vendor": "Realtek", 80 | "enabled_ntp": True, 81 | "firmware_date": "2020/07/14", 82 | "firmware_ver": "DSM 6.2.3-25426 Update 2", 83 | "model": "DS218play", 84 | "ntp_server": "pool.ntp.org", 85 | "ram_size": 1024, 86 | "serial": "123456abcdefg", 87 | "support_esata": "no", 88 | "sys_temp": 40, 89 | "sys_tempwarn": False, 90 | "systempwarn": False, 91 | "temperature_warning": False, 92 | "time": "2020-10-16 20:26:58", 93 | "time_zone": "Amsterdam", 94 | "time_zone_desc": "(GMT+01:00) Amsterdam, Berlin, Rome, Stockholm, Vienna", 95 | "up_time": "289:31:54", 96 | "usb_dev": [ 97 | { 98 | "cls": "disk", 99 | "pid": "2621", 100 | "producer": "Western Digital Technologies, Inc.", 101 | "product": "Elements 2621", 102 | "rev": "10.26", 103 | "vid": "1058", 104 | } 105 | ], 106 | }, 107 | "success": True, 108 | } 109 | -------------------------------------------------------------------------------- /tests/api_data/dsm_6/core/const_6_core_upgrade.py: -------------------------------------------------------------------------------- 1 | """DSM 6 SYNO.Core.Upgrade data.""" 2 | 3 | DSM_6_CORE_UPGRADE_FALSE = {"data": {"update": {"available": False}}, "success": True} 4 | DSM_6_CORE_UPGRADE_TRUE = { 5 | "data": { 6 | "update": { 7 | "available": True, 8 | "reboot": "now", 9 | "restart": "some", 10 | "type": "nano", 11 | "version": "DSM 6.2.3-25426 Update 2", 12 | "version_details": { 13 | "buildnumber": 25426, 14 | "major": 6, 15 | "micro": 3, 16 | "minor": 2, 17 | "nano": 2, 18 | "os_name": "DSM", 19 | }, 20 | } 21 | }, 22 | "success": True, 23 | } 24 | -------------------------------------------------------------------------------- /tests/api_data/dsm_6/core/const_6_core_utilization.py: -------------------------------------------------------------------------------- 1 | """DSM 6 SYNO.Core.System.Utilization data.""" 2 | 3 | DSM_6_CORE_UTILIZATION_ERROR_1055 = { 4 | "error": { 5 | "code": 1055, 6 | "errors": { 7 | "err_key": "", 8 | "err_line": 883, 9 | "err_msg": "Transmition failed.", 10 | "err_session": "", 11 | }, 12 | }, 13 | "success": False, 14 | } 15 | 16 | DSM_6_CORE_UTILIZATION = { 17 | "data": { 18 | "cpu": { 19 | "15min_load": 51, 20 | "1min_load": 37, 21 | "5min_load": 33, 22 | "device": "System", 23 | "other_load": 3, 24 | "system_load": 2, 25 | "user_load": 4, 26 | }, 27 | "disk": { 28 | "disk": [ 29 | { 30 | "device": "sdc", 31 | "display_name": "Drive 3", 32 | "read_access": 3, 33 | "read_byte": 55261, 34 | "type": "internal", 35 | "utilization": 12, 36 | "write_access": 15, 37 | "write_byte": 419425, 38 | }, 39 | { 40 | "device": "sda", 41 | "display_name": "Drive 1", 42 | "read_access": 3, 43 | "read_byte": 63905, 44 | "type": "internal", 45 | "utilization": 8, 46 | "write_access": 14, 47 | "write_byte": 414795, 48 | }, 49 | { 50 | "device": "sdb", 51 | "display_name": "Drive 2", 52 | "read_access": 3, 53 | "read_byte": 55891, 54 | "type": "internal", 55 | "utilization": 10, 56 | "write_access": 15, 57 | "write_byte": 415658, 58 | }, 59 | ], 60 | "total": { 61 | "device": "total", 62 | "read_access": 9, 63 | "read_byte": 175057, 64 | "utilization": 10, 65 | "write_access": 44, 66 | "write_byte": 1249878, 67 | }, 68 | }, 69 | "lun": [], 70 | "memory": { 71 | "avail_real": 156188, 72 | "avail_swap": 4146316, 73 | "buffer": 15172, 74 | "cached": 2764756, 75 | "device": "Memory", 76 | "memory_size": 4194304, 77 | "real_usage": 24, 78 | "si_disk": 0, 79 | "so_disk": 0, 80 | "swap_usage": 6, 81 | "total_real": 3867268, 82 | "total_swap": 4415404, 83 | }, 84 | "network": [ 85 | {"device": "total", "rx": 109549, "tx": 45097}, 86 | {"device": "eth0", "rx": 109549, "tx": 45097}, 87 | {"device": "eth1", "rx": 0, "tx": 0}, 88 | ], 89 | "space": { 90 | "total": { 91 | "device": "total", 92 | "read_access": 1, 93 | "read_byte": 27603, 94 | "utilization": 1, 95 | "write_access": 23, 96 | "write_byte": 132496, 97 | }, 98 | "volume": [ 99 | { 100 | "device": "md2", 101 | "display_name": "volume1", 102 | "read_access": 1, 103 | "read_byte": 27603, 104 | "utilization": 1, 105 | "write_access": 23, 106 | "write_byte": 132496, 107 | } 108 | ], 109 | }, 110 | "time": 1585503221, 111 | }, 112 | "success": True, 113 | } 114 | -------------------------------------------------------------------------------- /tests/api_data/dsm_6/download_station/__init__.py: -------------------------------------------------------------------------------- 1 | """DSM 6 SYNO.DownloadStation.* datas.""" 2 | -------------------------------------------------------------------------------- /tests/api_data/dsm_6/download_station/const_6_download_station_info.py: -------------------------------------------------------------------------------- 1 | """DSM 6 SYNO.DownloadStation.Info data.""" 2 | 3 | DSM_6_DOWNLOAD_STATION_INFO_INFO = { 4 | "data": {"is_manager": True, "version": 3543, "version_string": "3.8-3543"}, 5 | "success": True, 6 | } 7 | 8 | DSM_6_DOWNLOAD_STATION_INFO_CONFIG = { 9 | "data": { 10 | "bt_max_download": 0, 11 | "bt_max_upload": 800, 12 | "default_destination": "downloads", 13 | "emule_default_destination": None, 14 | "emule_enabled": False, 15 | "emule_max_download": 0, 16 | "emule_max_upload": 20, 17 | "ftp_max_download": 0, 18 | "http_max_download": 0, 19 | "nzb_max_download": 0, 20 | "unzip_service_enabled": False, 21 | }, 22 | "success": True, 23 | } 24 | -------------------------------------------------------------------------------- /tests/api_data/dsm_6/download_station/const_6_download_station_stat.py: -------------------------------------------------------------------------------- 1 | """DSM 6 SYNO.DownloadStation.Statistic data.""" 2 | 3 | DSM_6_DOWNLOAD_STATION_STAT_INFO = { 4 | "data": {"speed_download": 89950232, "speed_upload": 0}, 5 | "success": True, 6 | } 7 | -------------------------------------------------------------------------------- /tests/api_data/dsm_6/dsm/__init__.py: -------------------------------------------------------------------------------- 1 | """DSM 6 SYNO.DSM.* datas.""" 2 | -------------------------------------------------------------------------------- /tests/api_data/dsm_6/dsm/const_6_dsm_info.py: -------------------------------------------------------------------------------- 1 | """DSM 6 SYNO.DSM.Info data.""" 2 | 3 | DSM_6_DSM_INFORMATION_DS213_PLUS = { 4 | "data": { 5 | "codepage": "enu", 6 | "model": "DS213+", 7 | "ram": 512, 8 | "serial": "XXXXXXXXXXX", 9 | "temperature": 30, 10 | "temperature_warn": False, 11 | "time": "Thu Apr 30 14:57:35 2020", 12 | "uptime": 3258607, 13 | "version": "24922", 14 | "version_string": "DSM 6.2.2-24922 Update 4", 15 | }, 16 | "success": True, 17 | } 18 | 19 | DSM_6_DSM_INFORMATION_DS918_PLUS = { 20 | "data": { 21 | "codepage": "fre", 22 | "model": "DS918+", 23 | "ram": 4096, 24 | "serial": "1920PDN001501", 25 | "temperature": 40, 26 | "temperature_warn": False, 27 | "time": "Sun Mar 29 19:33:41 2020", 28 | "uptime": 155084, 29 | "version": "24922", 30 | "version_string": "DSM 6.2.2-24922 Update 4", 31 | }, 32 | "success": True, 33 | } 34 | 35 | DSM_6_DSM_INFORMATION = DSM_6_DSM_INFORMATION_DS918_PLUS 36 | -------------------------------------------------------------------------------- /tests/api_data/dsm_6/dsm/const_6_dsm_network.py: -------------------------------------------------------------------------------- 1 | """DSM 6 SYNO.DSM.Network data.""" 2 | 3 | DSM_6_DSM_NETWORK_1LAN = { 4 | "data": { 5 | "dns": ["192.168.0.35"], 6 | "gateway": "192.168.0.254", 7 | "hostname": "NAS_[NAME]", 8 | "interfaces": [ 9 | { 10 | "id": "eth0", 11 | "ip": [{"address": "192.168.0.35", "netmask": "255.255.255.0"}], 12 | "ipv6": [ 13 | { 14 | "address": "2a01:e35:2434:d420:211:32ff:fea6:ca59", 15 | "prefix_length": 64, 16 | "scope": "global", 17 | }, 18 | { 19 | "address": "fe80::211:32ff:fea6:ca59", 20 | "prefix_length": 64, 21 | "scope": "link", 22 | }, 23 | ], 24 | "mac": "00-11-32-XX-XX-59", 25 | "type": "lan", 26 | }, 27 | { 28 | "id": "eth1", 29 | "ip": [{"address": "169.254.158.209", "netmask": "255.255.0.0"}], 30 | "mac": "00-11-32-XX-XX-5A", 31 | "type": "lan", 32 | }, 33 | ], 34 | "workgroup": "WORKGROUP", 35 | }, 36 | "success": True, 37 | } 38 | 39 | DSM_6_DSM_NETWORK_2LAN_1PPPOE = { 40 | "data": { 41 | "dns": ["192.168.0.35"], 42 | "gateway": "192.168.0.254", 43 | "hostname": "NAS_[NAME]", 44 | "interfaces": [ 45 | { 46 | "id": "eth0", 47 | "ip": [{"address": "192.168.5.10", "netmask": "255.255.255.0"}], 48 | "ipv6": [ 49 | { 50 | "address": "2001:b211:317c:147e:211:32ff:fe5d:fd11", 51 | "prefix_length": 64, 52 | "scope": "global", 53 | }, 54 | { 55 | "address": "fe80::211:32ff:fe5d:fd11", 56 | "prefix_length": 64, 57 | "scope": "link", 58 | }, 59 | ], 60 | "mac": "00-11-32-XX-XX-11", 61 | "type": "lan", 62 | }, 63 | { 64 | "id": "eth1", 65 | "ip": [{"address": "192.168.1.100", "netmask": "255.255.255.0"}], 66 | "ipv6": [ 67 | { 68 | "address": "2001:b011:300c:176c:211:11ff:fe5d:fd12", 69 | "prefix_length": 64, 70 | "scope": "global", 71 | }, 72 | { 73 | "address": "fe80::211:31ff:ff5d:fd12", 74 | "prefix_length": 64, 75 | "scope": "link", 76 | }, 77 | ], 78 | "mac": "00-11-32-XX-XX-12", 79 | "type": "lan", 80 | }, 81 | { 82 | "id": "ppp0", 83 | "ip": [{"address": "114.45.2.158", "netmask": "255.255.255.255"}], 84 | "type": "pppoe", 85 | }, 86 | ], 87 | "workgroup": "WORKGROUP", 88 | }, 89 | "success": True, 90 | } 91 | -------------------------------------------------------------------------------- /tests/api_data/dsm_6/storage/__init__.py: -------------------------------------------------------------------------------- 1 | """DSM 6 SYNO.Storage.* datas.""" 2 | -------------------------------------------------------------------------------- /tests/api_data/dsm_6/surveillance_station/__init__.py: -------------------------------------------------------------------------------- 1 | """DSM 6 SYNO.SurveillanceStation.* datas.""" 2 | -------------------------------------------------------------------------------- /tests/api_data/dsm_6/surveillance_station/const_6_surveillance_station_home_mode.py: -------------------------------------------------------------------------------- 1 | """DSM 6 SYNO.API.SurveillanceStation.HomeMode data.""" 2 | 3 | DSM_6_SURVEILLANCE_STATION_HOME_MODE_GET_INFO = { 4 | "data": { 5 | "actrule_on": False, 6 | "actrules": "-1", 7 | "cameras": "-1", 8 | "custom1_det": 1, 9 | "custom1_di": 1, 10 | "custom2_det": 1, 11 | "custom2_di": 1, 12 | "geo_delay_time": 60, 13 | "geo_lat": 12.00000, 14 | "geo_lng": -12.00000, 15 | "geo_radius": 100, 16 | "io_modules": "", 17 | "last_update_time": 0, 18 | "mode_schedule": "000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", 19 | "mode_schedule_next_time": -1, 20 | "mode_schedule_on": True, 21 | "notify_event_list": [ 22 | {"eventGroupType": 2, "eventType": 3, "filter": 4}, 23 | {"eventGroupType": 2, "eventType": 4, "filter": 4}, 24 | {"eventGroupType": 2, "eventType": 5, "filter": 0}, 25 | {"eventGroupType": 2, "eventType": 6, "filter": 0}, 26 | {"eventGroupType": 2, "eventType": 7, "filter": 0}, 27 | {"eventGroupType": 2, "eventType": 10, "filter": 0}, 28 | {"eventGroupType": 2, "eventType": 11, "filter": 4}, 29 | {"eventGroupType": 2, "eventType": 12, "filter": 0}, 30 | {"eventGroupType": 2, "eventType": 13, "filter": 0}, 31 | {"eventGroupType": 2, "eventType": 14, "filter": 0}, 32 | {"eventGroupType": 2, "eventType": 15, "filter": 0}, 33 | {"eventGroupType": 1, "eventType": 28, "filter": 4}, 34 | {"eventGroupType": 1, "eventType": 29, "filter": 4}, 35 | {"eventGroupType": 1, "eventType": 32, "filter": 4}, 36 | {"eventGroupType": 1, "eventType": 33, "filter": 4}, 37 | {"eventGroupType": 1, "eventType": 34, "filter": 4}, 38 | {"eventGroupType": 8, "eventType": 35, "filter": 0}, 39 | {"eventGroupType": 8, "eventType": 36, "filter": 0}, 40 | {"eventGroupType": 8, "eventType": 37, "filter": 0}, 41 | {"eventGroupType": 8, "eventType": 38, "filter": 0}, 42 | {"eventGroupType": 8, "eventType": 39, "filter": 0}, 43 | {"eventGroupType": 8, "eventType": 40, "filter": 0}, 44 | {"eventGroupType": 8, "eventType": 41, "filter": 0}, 45 | {"eventGroupType": 8, "eventType": 42, "filter": 0}, 46 | {"eventGroupType": 8, "eventType": 43, "filter": 0}, 47 | {"eventGroupType": 8, "eventType": 44, "filter": 0}, 48 | {"eventGroupType": 1, "eventType": 45, "filter": 0}, 49 | {"eventGroupType": 2, "eventType": 62, "filter": 0}, 50 | {"eventGroupType": 2, "eventType": 63, "filter": 0}, 51 | {"eventGroupType": 2, "eventType": 64, "filter": 0}, 52 | {"eventGroupType": 2, "eventType": 65, "filter": 0}, 53 | {"eventGroupType": 2, "eventType": 66, "filter": 0}, 54 | {"eventGroupType": 12, "eventType": 82, "filter": 0}, 55 | {"eventGroupType": 12, "eventType": 83, "filter": 0}, 56 | {"eventGroupType": 1, "eventType": 90, "filter": 0}, 57 | ], 58 | "notify_on": True, 59 | "on": True, 60 | "onetime_disable_on": False, 61 | "onetime_disable_time": 0, 62 | "onetime_enable_on": False, 63 | "onetime_enable_time": 0, 64 | "reason": 2, 65 | "rec_sch_custom_det_app_list": [ 66 | {"custom1_app_det": 0, "custom2_app_det": 0}, 67 | {"custom1_app_det": 0, "custom2_app_det": 0}, 68 | ], 69 | "rec_schedule": "111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111", 70 | "rec_schedule_on": False, 71 | "stream_profile": "1,1,1,1,1,1", 72 | "streaming_on": False, 73 | "wifi_ssid": "SSID", 74 | }, 75 | "success": True, 76 | } 77 | 78 | DSM_6_SURVEILLANCE_STATION_HOME_MODE_SWITCH = {"success": True} 79 | -------------------------------------------------------------------------------- /tests/api_data/dsm_7/__init__.py: -------------------------------------------------------------------------------- 1 | """DSM 7 datas.""" 2 | 3 | from .const_7_api_auth import ( 4 | DSM_7_AUTH_LOGIN, 5 | DSM_7_AUTH_LOGIN_2SA, 6 | DSM_7_AUTH_LOGIN_2SA_OTP, 7 | ) 8 | from .const_7_api_info import DSM_7_API_INFO 9 | from .core.const_7_core_external_usb import ( 10 | DSM_7_CORE_EXTERNAL_USB_DS1821_PLUS_EXTERNAL_USB, 11 | DSM_7_CORE_EXTERNAL_USB_DS1821_PLUS_NO_EXTERNAL_USB, 12 | ) 13 | from .core.const_7_core_upgrade import DSM_7_CORE_UPGRADE_FALSE, DSM_7_CORE_UPGRADE_TRUE 14 | from .dsm.const_7_dsm_info import DSM_7_DSM_INFORMATION 15 | from .file_station.const_7_file_station import ( 16 | DSM_7_FILE_STATION_FILES, 17 | DSM_7_FILE_STATION_FOLDERS, 18 | ) 19 | from .photos.const_7_photo import ( 20 | DSM_7_FOTO_ALBUMS, 21 | DSM_7_FOTO_ITEMS, 22 | DSM_7_FOTO_ITEMS_SEARCHED, 23 | DSM_7_FOTO_ITEMS_SHARED_ALBUM, 24 | DSM_7_FOTO_SHARED_ITEMS, 25 | ) 26 | from .virtual_machine_manager.const_7_vmm import DSM_7_VMM_GUESTS 27 | 28 | __all__ = [ 29 | "DSM_7_AUTH_LOGIN", 30 | "DSM_7_AUTH_LOGIN_2SA", 31 | "DSM_7_AUTH_LOGIN_2SA_OTP", 32 | "DSM_7_API_INFO", 33 | "DSM_7_CORE_EXTERNAL_USB_DS1821_PLUS_EXTERNAL_USB", 34 | "DSM_7_CORE_EXTERNAL_USB_DS1821_PLUS_NO_EXTERNAL_USB", 35 | "DSM_7_CORE_UPGRADE_FALSE", 36 | "DSM_7_CORE_UPGRADE_TRUE", 37 | "DSM_7_DSM_INFORMATION", 38 | "DSM_7_FILE_STATION_FILES", 39 | "DSM_7_FILE_STATION_FOLDERS", 40 | "DSM_7_FOTO_ALBUMS", 41 | "DSM_7_FOTO_ITEMS", 42 | "DSM_7_FOTO_ITEMS_SHARED_ALBUM", 43 | "DSM_7_FOTO_ITEMS_SEARCHED", 44 | "DSM_7_FOTO_SHARED_ITEMS", 45 | "DSM_7_VMM_GUESTS", 46 | ] 47 | -------------------------------------------------------------------------------- /tests/api_data/dsm_7/const_7_api_auth.py: -------------------------------------------------------------------------------- 1 | """DSM 7 SYNO.API.Auth data.""" 2 | 3 | from tests.const import ( 4 | DEVICE_TOKEN, 5 | ERROR_AUTH_OTP_NOT_SPECIFIED, 6 | SESSION_ID, 7 | SYNO_TOKEN, 8 | ) 9 | 10 | DSM_7_AUTH_LOGIN = { 11 | "data": {"is_portal_port": False, "sid": SESSION_ID, "synotoken": SYNO_TOKEN}, 12 | "success": True, 13 | } 14 | DSM_7_AUTH_LOGIN_2SA = ERROR_AUTH_OTP_NOT_SPECIFIED 15 | DSM_7_AUTH_LOGIN_2SA_OTP = { 16 | "data": { 17 | "device_id": DEVICE_TOKEN, 18 | "is_portal_port": False, 19 | "sid": SESSION_ID, 20 | "synotoken": SYNO_TOKEN, 21 | }, 22 | "success": True, 23 | } 24 | 25 | DSM_7_AUTH_LOGOUT = {"success": True} 26 | -------------------------------------------------------------------------------- /tests/api_data/dsm_7/core/__init__.py: -------------------------------------------------------------------------------- 1 | """DSM 7 SYNO.Core.* datas.""" 2 | -------------------------------------------------------------------------------- /tests/api_data/dsm_7/core/const_7_core_upgrade.py: -------------------------------------------------------------------------------- 1 | """DSM 7 SYNO.Core.Upgrade data.""" 2 | 3 | DSM_7_CORE_UPGRADE_FALSE = {"data": {"update": {"available": False}}, "success": True} 4 | DSM_7_CORE_UPGRADE_TRUE = { 5 | "data": { 6 | "update": { 7 | "available": True, 8 | "reboot": "now", 9 | "restart": "some", 10 | "type": "nano", 11 | "version": "7.0.1-42218 Update 3", 12 | "version_details": { 13 | "buildnumber": 42218, 14 | "major": 7, 15 | "micro": 1, 16 | "minor": 0, 17 | "nano": 3, 18 | "os_name": "DSM", 19 | }, 20 | } 21 | }, 22 | "success": True, 23 | } 24 | -------------------------------------------------------------------------------- /tests/api_data/dsm_7/dsm/__init__.py: -------------------------------------------------------------------------------- 1 | """DSM 7 SYNO.DSM.* datas.""" 2 | -------------------------------------------------------------------------------- /tests/api_data/dsm_7/dsm/const_7_dsm_info.py: -------------------------------------------------------------------------------- 1 | """DSM 7 SYNO.DSM.Info data.""" 2 | 3 | DSM_7_DSM_INFORMATION_DS213_PLUS = { 4 | "data": { 5 | "codepage": "enu", 6 | "model": "DS213+", 7 | "ram": 512, 8 | "serial": "XXXXXXXXXXX", 9 | "temperature": 30, 10 | "temperature_warn": False, 11 | "time": "Thu Apr 30 14:57:35 2020", 12 | "uptime": 3258607, 13 | "version": "24922", 14 | "version_string": "DSM 7.0-41222", 15 | }, 16 | "success": True, 17 | } 18 | 19 | DSM_7_DSM_INFORMATION_DS918_PLUS = { 20 | "data": { 21 | "codepage": "fre", 22 | "model": "DS918+", 23 | "ram": 4096, 24 | "serial": "1920PDN001501", 25 | "temperature": 40, 26 | "temperature_warn": False, 27 | "time": "Sun Mar 29 19:33:41 2020", 28 | "uptime": 155084, 29 | "version": "24922", 30 | "version_string": "DSM 7.0-41222", 31 | }, 32 | "success": True, 33 | } 34 | 35 | DSM_7_DSM_INFORMATION = DSM_7_DSM_INFORMATION_DS918_PLUS 36 | -------------------------------------------------------------------------------- /tests/api_data/dsm_7/file_station/__init__.py: -------------------------------------------------------------------------------- 1 | """DSM 7 SYNO.FileStation.* datas.""" 2 | -------------------------------------------------------------------------------- /tests/api_data/dsm_7/file_station/const_7_file_station.py: -------------------------------------------------------------------------------- 1 | """DSM 7 SYNO.FileStation.List data.""" 2 | 3 | DSM_7_FILE_STATION_FOLDERS = { 4 | "data": { 5 | "offset": 0, 6 | "shares": [ 7 | { 8 | "additional": { 9 | "mount_point_type": "", 10 | "owner": {"gid": 0, "group": "root", "uid": 0, "user": "root"}, 11 | "perm": { 12 | "acl": { 13 | "append": True, 14 | "del": True, 15 | "exec": True, 16 | "read": True, 17 | "write": True, 18 | }, 19 | "acl_enable": True, 20 | "adv_right": { 21 | "disable_download": False, 22 | "disable_list": False, 23 | "disable_modify": False, 24 | }, 25 | "is_acl_mode": True, 26 | "is_share_readonly": False, 27 | "posix": 777, 28 | "share_right": "RW", 29 | }, 30 | "real_path": "/volume1/backup", 31 | "sync_share": False, 32 | "time": { 33 | "atime": 1736098543, 34 | "crtime": 1669466095, 35 | "ctime": 1736083030, 36 | "mtime": 1733848814, 37 | }, 38 | "volume_status": { 39 | "freespace": 1553335107584, 40 | "readonly": False, 41 | "totalspace": 3821146505216, 42 | }, 43 | }, 44 | "isdir": True, 45 | "name": "backup", 46 | "path": "/backup", 47 | }, 48 | { 49 | "additional": { 50 | "mount_point_type": "", 51 | "owner": { 52 | "gid": 100, 53 | "group": "users", 54 | "uid": 1046, 55 | "user": "hass", 56 | }, 57 | "perm": { 58 | "acl": { 59 | "append": True, 60 | "del": True, 61 | "exec": True, 62 | "read": True, 63 | "write": True, 64 | }, 65 | "acl_enable": True, 66 | "adv_right": { 67 | "disable_download": False, 68 | "disable_list": False, 69 | "disable_modify": False, 70 | }, 71 | "is_acl_mode": True, 72 | "is_share_readonly": False, 73 | "posix": 777, 74 | "share_right": "RW", 75 | }, 76 | "real_path": "/volume1/homes/hass", 77 | "sync_share": False, 78 | "time": { 79 | "atime": 1736109199, 80 | "crtime": 1643818781, 81 | "ctime": 1736109071, 82 | "mtime": 1736109071, 83 | }, 84 | "volume_status": { 85 | "freespace": 1553335107584, 86 | "readonly": False, 87 | "totalspace": 3821146505216, 88 | }, 89 | }, 90 | "isdir": True, 91 | "name": "home", 92 | "path": "/home", 93 | }, 94 | ], 95 | "total": 2, 96 | }, 97 | "success": True, 98 | } 99 | DSM_7_FILE_STATION_FILES = { 100 | "data": { 101 | "files": [ 102 | { 103 | "additional": { 104 | "mount_point_type": "", 105 | "owner": { 106 | "gid": 105733, 107 | "group": "SynologyPhotos", 108 | "uid": 1046, 109 | "user": "hass", 110 | }, 111 | "perm": { 112 | "acl": { 113 | "append": True, 114 | "del": True, 115 | "exec": True, 116 | "read": True, 117 | "write": True, 118 | }, 119 | "is_acl_mode": True, 120 | "posix": 711, 121 | }, 122 | "real_path": "/volume1/homes/hass/Photos", 123 | "size": 50, 124 | "time": { 125 | "atime": 1735700476, 126 | "crtime": 1723653032, 127 | "ctime": 1723653464, 128 | "mtime": 1723653464, 129 | }, 130 | "type": "", 131 | }, 132 | "isdir": True, 133 | "name": "Photos", 134 | "path": "/home/Photos", 135 | }, 136 | { 137 | "additional": { 138 | "mount_point_type": "", 139 | "owner": { 140 | "gid": 100, 141 | "group": "users", 142 | "uid": 1046, 143 | "user": "hass", 144 | }, 145 | "perm": { 146 | "acl": { 147 | "append": True, 148 | "del": True, 149 | "exec": True, 150 | "read": True, 151 | "write": True, 152 | }, 153 | "is_acl_mode": True, 154 | "posix": 711, 155 | }, 156 | "real_path": "/volume1/homes/hass/3e57d06c.tar", 157 | "size": 1660753920, 158 | "time": { 159 | "atime": 1736105132, 160 | "crtime": 1736105128, 161 | "ctime": 1736105132, 162 | "mtime": 1736105132, 163 | }, 164 | "type": "TAR", 165 | }, 166 | "isdir": False, 167 | "name": "3e57d06c.tar", 168 | "path": "/home/3e57d06c.tar", 169 | }, 170 | ], 171 | "offset": 0, 172 | "total": 2, 173 | }, 174 | "success": True, 175 | } 176 | -------------------------------------------------------------------------------- /tests/api_data/dsm_7/photos/__init__.py: -------------------------------------------------------------------------------- 1 | """DSM 7 SYNO.Foto.* datas.""" 2 | -------------------------------------------------------------------------------- /tests/api_data/dsm_7/photos/const_7_photo.py: -------------------------------------------------------------------------------- 1 | """DSM 7 SYNO.Foto.Browse.Album data.""" 2 | 3 | DSM_7_FOTO_ALBUMS = { 4 | "data": { 5 | "list": [ 6 | { 7 | "condition": {"folder_filter": [597], "user_id": 1}, 8 | "create_time": 1674514932, 9 | "end_time": 1640958550, 10 | "id": 4, 11 | "item_count": 3, 12 | "name": "Album1", 13 | "owner_user_id": 1, 14 | "passphrase": "", 15 | "shared": False, 16 | "sort_by": "default", 17 | "sort_direction": "default", 18 | "start_time": 1601653233, 19 | "type": "condition", 20 | "version": 197920, 21 | }, 22 | { 23 | "cant_migrate_condition": {}, 24 | "condition": {}, 25 | "create_time": 1668690757, 26 | "end_time": 1668538602, 27 | "freeze_album": False, 28 | "id": 1, 29 | "item_count": 1, 30 | "name": "Album2", 31 | "owner_user_id": 1, 32 | "passphrase": "", 33 | "shared": False, 34 | "sort_by": "default", 35 | "sort_direction": "default", 36 | "start_time": 1668538602, 37 | "temporary_shared": False, 38 | "type": "normal", 39 | "version": 195694, 40 | }, 41 | { 42 | "cant_migrate_condition": {}, 43 | "condition": {}, 44 | "create_time": 1718658534, 45 | "end_time": 1719075481, 46 | "freeze_album": False, 47 | "id": 3, 48 | "item_count": 1, 49 | "name": "Album3", 50 | "owner_user_id": 7, 51 | "passphrase": "NiXlv1i2N", 52 | "shared": False, 53 | "sort_by": "default", 54 | "sort_direction": "default", 55 | "start_time": 1659724703, 56 | "temporary_shared": False, 57 | "type": "normal", 58 | "version": 102886, 59 | }, 60 | ] 61 | }, 62 | "success": True, 63 | } 64 | 65 | DSM_7_FOTO_ITEMS = { 66 | "success": True, 67 | "data": { 68 | "list": [ 69 | { 70 | "id": 29807, 71 | "filename": "20221115_185642.jpg", 72 | "filesize": 2644859, 73 | "time": 1668538602, 74 | "indexed_time": 1668564550862, 75 | "owner_user_id": 1, 76 | "folder_id": 597, 77 | "type": "photo", 78 | "additional": { 79 | "thumbnail": { 80 | "m": "ready", 81 | "xl": "ready", 82 | "preview": "broken", 83 | "sm": "ready", 84 | "cache_key": "29807_1668560967", 85 | "unit_id": 29807, 86 | } 87 | }, 88 | }, 89 | { 90 | "id": 29808, 91 | "filename": "20221115_185643.jpg", 92 | "filesize": 2644859, 93 | "time": 1668538602, 94 | "indexed_time": 1668564550862, 95 | "owner_user_id": 0, 96 | "folder_id": 597, 97 | "type": "photo", 98 | "additional": { 99 | "thumbnail": { 100 | "m": "ready", 101 | "xl": "notready", 102 | "preview": "broken", 103 | "sm": "ready", 104 | "cache_key": "29808_1668560967", 105 | "unit_id": 29808, 106 | } 107 | }, 108 | }, 109 | { 110 | "id": 29809, 111 | "filename": "20221115_185644.jpg", 112 | "filesize": 2644859, 113 | "time": 1668538602, 114 | "indexed_time": 1668564550862, 115 | "owner_user_id": 1, 116 | "folder_id": 597, 117 | "type": "photo", 118 | "additional": { 119 | "thumbnail": { 120 | "m": "notready", 121 | "xl": "notready", 122 | "preview": "broken", 123 | "sm": "ready", 124 | "cache_key": "29809_1668560967", 125 | "unit_id": 29809, 126 | } 127 | }, 128 | }, 129 | ] 130 | }, 131 | } 132 | 133 | DSM_7_FOTO_ITEMS_SHARED_ALBUM = { 134 | "success": True, 135 | "data": { 136 | "list": [ 137 | { 138 | "id": 29807, 139 | "filename": "20221115_185645.jpg", 140 | "filesize": 2644859, 141 | "time": 1668538602, 142 | "indexed_time": 1668564550862, 143 | "owner_user_id": 7, 144 | "folder_id": 597, 145 | "type": "photo", 146 | "additional": { 147 | "thumbnail": { 148 | "m": "ready", 149 | "xl": "ready", 150 | "preview": "broken", 151 | "sm": "ready", 152 | "cache_key": "29810_1668560967", 153 | "unit_id": 29807, 154 | } 155 | }, 156 | }, 157 | ] 158 | }, 159 | } 160 | 161 | DSM_7_FOTO_SHARED_ITEMS = { 162 | "success": True, 163 | "data": { 164 | "list": [ 165 | { 166 | "id": 77, 167 | "filename": "shared_1.jpg", 168 | "filesize": 1404758, 169 | "time": 1627062628, 170 | "indexed_time": 1628329471168, 171 | "owner_user_id": 0, 172 | "folder_id": 17, 173 | "type": "photo", 174 | "additional": { 175 | "thumbnail": { 176 | "m": "ready", 177 | "xl": "ready", 178 | "preview": "broken", 179 | "sm": "ready", 180 | "cache_key": "77_1628323785", 181 | "unit_id": 77, 182 | } 183 | }, 184 | }, 185 | { 186 | "id": 490, 187 | "filename": "shared_2.jpg", 188 | "filesize": 888192, 189 | "time": 1627062618, 190 | "indexed_time": 1628329516646, 191 | "owner_user_id": 0, 192 | "folder_id": 37, 193 | "type": "photo", 194 | "additional": { 195 | "thumbnail": { 196 | "m": "ready", 197 | "xl": "ready", 198 | "preview": "broken", 199 | "sm": "ready", 200 | "cache_key": "490_1628323817", 201 | "unit_id": 490, 202 | } 203 | }, 204 | }, 205 | { 206 | "id": 96, 207 | "filename": "shared_3.jpg", 208 | "filesize": 4903571, 209 | "time": 1626987559, 210 | "indexed_time": 1628329472531, 211 | "owner_user_id": 0, 212 | "folder_id": 18, 213 | "type": "photo", 214 | "additional": { 215 | "thumbnail": { 216 | "m": "ready", 217 | "xl": "ready", 218 | "preview": "broken", 219 | "sm": "ready", 220 | "cache_key": "96_1628323786", 221 | "unit_id": 96, 222 | } 223 | }, 224 | }, 225 | ] 226 | }, 227 | } 228 | 229 | DSM_7_FOTO_ITEMS_SEARCHED = { 230 | "success": True, 231 | "data": { 232 | "list": [ 233 | { 234 | "id": 12340, 235 | "filename": "search_1.jpg", 236 | "filesize": 2644859, 237 | "time": 1668538602, 238 | "indexed_time": 1668564550862, 239 | "owner_user_id": 1, 240 | "folder_id": 597, 241 | "type": "photo", 242 | "additional": { 243 | "thumbnail": { 244 | "m": "ready", 245 | "xl": "ready", 246 | "preview": "broken", 247 | "sm": "ready", 248 | "cache_key": "12340_1668560967", 249 | "unit_id": 12340, 250 | } 251 | }, 252 | }, 253 | { 254 | "id": 12341, 255 | "filename": "search_2.jpg", 256 | "filesize": 2644859, 257 | "time": 1668538602, 258 | "indexed_time": 1668564550862, 259 | "owner_user_id": 1, 260 | "folder_id": 597, 261 | "type": "photo", 262 | "additional": { 263 | "thumbnail": { 264 | "m": "ready", 265 | "xl": "ready", 266 | "preview": "broken", 267 | "sm": "ready", 268 | "cache_key": "12341_1668560967", 269 | "unit_id": 12341, 270 | } 271 | }, 272 | }, 273 | ] 274 | }, 275 | } 276 | -------------------------------------------------------------------------------- /tests/api_data/dsm_7/virtual_machine_manager/__init__.py: -------------------------------------------------------------------------------- 1 | """DSM 7 SYNO.Virtualization.* datas.""" 2 | -------------------------------------------------------------------------------- /tests/api_data/dsm_7/virtual_machine_manager/const_7_vmm.py: -------------------------------------------------------------------------------- 1 | """DSM 7 SYNO.Virtualization.Guest data.""" 2 | 3 | DSM_7_VMM_GUESTS = { 4 | "data": { 5 | "canHA": False, 6 | "canMove": True, 7 | "guests": [ 8 | { 9 | "autorun": 0, 10 | "birth_info": {"desc": "", "from": "vmm", "method": "create"}, 11 | "boot_from": "disk", 12 | "can_ha": True, 13 | "cpu_passthru": True, 14 | "cpu_pin_num": 0, 15 | "cpu_weight": 8, 16 | "desc": "vDSM test", 17 | "dsm_version": "DSM 7.2.1-69057 Update 5", 18 | "guest_id": "a39d0628-380e-42f8-8cb6-a00d6b930fa0", 19 | "ha_status": "", 20 | "ha_status_detail": "", 21 | "ha_status_type": "", 22 | "host_id": "f0310cdf-fa88-47d3-9e9e-be36fb8e121b", 23 | "host_name": "KrosseKrabbe", 24 | "host_ram_size": 6291456, 25 | "http_port": 5000, 26 | "https_enable": False, 27 | "https_port": 5001, 28 | "https_redirect": False, 29 | "hyperv_enlighten": False, 30 | "ip": "", 31 | "is_acting": False, 32 | "is_general_vm": False, 33 | "is_online": False, 34 | "is_replica_support": True, 35 | "is_rs_install": True, 36 | "iso_images": ["unmounted", "unmounted"], 37 | "kb_layout": "Default", 38 | "name": "vdsm", 39 | "ram_used": 0, 40 | "repo_id": "3a69ac31-004c-4b02-9b36-609099bc9be6", 41 | "repo_name": "KrosseKrabbe - VM Storage 1", 42 | "snap_num": 0, 43 | "status": "shutdown", 44 | "status_desc": "", 45 | "status_type": "healthy", 46 | "usb_device_name": None, 47 | "usbs": ["unmounted", "unmounted", "unmounted", "unmounted"], 48 | "use_ovmf": False, 49 | "vcpu_num": 1, 50 | "vcpu_usage": "", 51 | "vdisk_num": 3, 52 | "video_card": "cirrus", 53 | "vram_size": 1048576, 54 | }, 55 | { 56 | "autorun": 1, 57 | "birth_info": {"desc": "", "from": "vmm", "method": "create"}, 58 | "boot_from": "disk", 59 | "can_ha": True, 60 | "cpu_passthru": True, 61 | "cpu_pin_num": 0, 62 | "cpu_weight": 256, 63 | "desc": "", 64 | "dsm_version": "", 65 | "guest_id": "2b4ec8c8-2bec-4daa-b36d-1a47b639254f", 66 | "ha_status": "", 67 | "ha_status_detail": "", 68 | "ha_status_type": "", 69 | "has_agent": False, 70 | "host_id": "f0310cdf-fa88-47d3-9e9e-be36fb8e121b", 71 | "host_name": "KrosseKrabbe", 72 | "host_net_ifs": ["f0310cdf-fa88-47d3-9e9e-be36fb8e121b_bond0"], 73 | "host_ram_size": 6291456, 74 | "http_port": 5000, 75 | "https_enable": False, 76 | "https_port": 5001, 77 | "https_redirect": False, 78 | "hyperv_enlighten": True, 79 | "ip": "", 80 | "is_acting": False, 81 | "is_general_vm": True, 82 | "is_online": True, 83 | "is_replica_support": True, 84 | "is_rs_install": True, 85 | "iso_images": ["unmounted", "unmounted"], 86 | "kb_layout": "Default", 87 | "max_disk_latency": 0, 88 | "name": "lnx_test", 89 | "ram_used": 1169544, 90 | "repo_id": "3a69ac31-004c-4b02-9b36-609099bc9be6", 91 | "repo_name": "KrosseKrabbe - VM Storage 1", 92 | "snap_num": 0, 93 | "status": "running", 94 | "status_desc": "", 95 | "status_type": "healthy", 96 | "total_disk_iops": 0, 97 | "total_disk_throughput": 0, 98 | "total_net_receive": 0, 99 | "total_net_send": 0, 100 | "usb_device_name": None, 101 | "usbs": ["unmounted", "unmounted", "unmounted", "unmounted"], 102 | "use_ovmf": False, 103 | "vcpu_num": 1, 104 | "vcpu_usage": 25, 105 | "vdisk_num": 1, 106 | "video_card": "vmvga", 107 | "vram_size": 1048576, 108 | }, 109 | ], 110 | "is_freeze": False, 111 | }, 112 | "success": True, 113 | } 114 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | """Main conftest.""" 2 | 3 | import pytest 4 | 5 | from synology_dsm.synology_dsm import SynologyDSM 6 | 7 | from . import ( 8 | VALID_HOST, 9 | VALID_HTTPS, 10 | VALID_PASSWORD, 11 | VALID_PORT, 12 | VALID_USER, 13 | SynologyDSMMock, 14 | ) 15 | 16 | 17 | @pytest.fixture 18 | def api() -> SynologyDSM: 19 | """Return a mock DSM 6 API.""" 20 | return SynologyDSMMock( 21 | None, 22 | VALID_HOST, 23 | VALID_PORT, 24 | VALID_USER, 25 | VALID_PASSWORD, 26 | VALID_HTTPS, 27 | ) 28 | 29 | 30 | @pytest.fixture( 31 | params=[5, 6], 32 | ids=["DSM 5", "DSM 6"], 33 | ) 34 | def dsm(request, api) -> SynologyDSM: 35 | """Return a mock DSM 6 API.""" 36 | api.dsm_version = request.param 37 | return api 38 | 39 | 40 | @pytest.fixture 41 | def dsm_5(api) -> SynologyDSM: 42 | """Return a mock DSM 5 API.""" 43 | api.dsm_version = 5 44 | return api 45 | 46 | 47 | @pytest.fixture 48 | def dsm_6(api) -> SynologyDSM: 49 | """Alias for api fixture.""" 50 | return api 51 | 52 | 53 | @pytest.fixture 54 | def dsm_7(api) -> SynologyDSM: 55 | """Return a mock DSM 7 API.""" 56 | api.dsm_version = 7 57 | return api 58 | -------------------------------------------------------------------------------- /tests/const.py: -------------------------------------------------------------------------------- 1 | """Test constants.""" 2 | 3 | # API test data are localized in 4 | # `tests/api_data/dsm_[dsm_major_version]` 5 | # Data constant names should be like : 6 | # "DSM_[dsm_version]_[API_KEY]" 7 | # if data failed, add "_FAILED" 8 | 9 | SESSION_ID = "session_id" 10 | SYNO_TOKEN = "Syñ0_T0k€ñ" # noqa: S105 11 | DEVICE_TOKEN = "Dév!cè_T0k€ñ" # noqa: S105 12 | UNIQUE_KEY = "1x2X3x!_UK" 13 | 14 | # Common API error code 15 | ERROR_UNKNOWN = {"error": {"code": 100}, "success": False} 16 | ERROR_INVALID_PARAMETERS = {"error": {"code": 101}, "success": False} 17 | ERROR_API_NOT_EXISTS = {"error": {"code": 102}, "success": False} 18 | ERROR_API_METHOD_NOT_EXISTS = {"error": {"code": 103}, "success": False} 19 | ERROR_API_VERSION_NOT_SUPPORTED = {"error": {"code": 104}, "success": False} 20 | ERROR_INSUFFICIENT_USER_PRIVILEGE = {"error": {"code": 105}, "success": False} 21 | ERROR_CONNECTION_TIME_OUT = {"error": {"code": 106}, "success": False} 22 | ERROR_MULTIPLE_LOGIN_DETECTED = {"error": {"code": 107}, "success": False} 23 | 24 | # Auth API error code 25 | ERROR_AUTH_INVALID_CREDENTIALS = {"error": {"code": 400}, "success": False} 26 | ERROR_AUTH_GUEST_OR_DISABLED_ACCOUNT = {"error": {"code": 401}, "success": False} 27 | ERROR_AUTH_PERMISSION_DENIED = {"error": {"code": 402}, "success": False} 28 | ERROR_AUTH_OTP_NOT_SPECIFIED = {"error": {"code": 403}, "success": False} 29 | ERROR_AUTH_OTP_AUTHENTICATE_FAILED = {"error": {"code": 404}, "success": False} 30 | ERROR_AUTH_INCORRECT_APP_PORTAL = {"error": {"code": 405}, "success": False} 31 | ERROR_AUTH_OTP_CODE_ENFORCED = {"error": {"code": 406}, "success": False} 32 | ERROR_AUTH_MAX_TRIES = {"error": {"code": 407}, "success": False} 33 | -------------------------------------------------------------------------------- /tests/test_synology_dsm_5.py: -------------------------------------------------------------------------------- 1 | """Synology DSM tests.""" 2 | 3 | # pylint: disable=protected-access 4 | import pytest 5 | 6 | from synology_dsm.const import API_AUTH 7 | from synology_dsm.exceptions import SynologyDSMLogin2SARequiredException 8 | 9 | from . import ( 10 | VALID_HOST, 11 | VALID_HTTPS, 12 | VALID_OTP, 13 | VALID_PASSWORD, 14 | VALID_PORT, 15 | VALID_USER_2SA, 16 | SynologyDSMMock, 17 | ) 18 | from .const import DEVICE_TOKEN, SESSION_ID 19 | 20 | 21 | class TestSynologyDSM5: 22 | """SynologyDSM 5test cases.""" 23 | 24 | @pytest.mark.asyncio 25 | async def test_login(self, dsm_5): 26 | """Test login.""" 27 | assert await dsm_5.login() 28 | assert dsm_5.apis.get(API_AUTH) 29 | assert dsm_5._session_id == SESSION_ID 30 | assert dsm_5._syno_token is None 31 | 32 | @pytest.mark.asyncio 33 | async def test_login_2sa(self): 34 | """Test login with 2SA.""" 35 | dsm_5 = SynologyDSMMock( 36 | None, 37 | VALID_HOST, 38 | VALID_PORT, 39 | VALID_USER_2SA, 40 | VALID_PASSWORD, 41 | VALID_HTTPS, 42 | ) 43 | dsm_5.dsm_version = 5 44 | with pytest.raises(SynologyDSMLogin2SARequiredException): 45 | await dsm_5.login() 46 | await dsm_5.login(VALID_OTP) 47 | 48 | assert dsm_5._session_id == SESSION_ID 49 | assert dsm_5._syno_token is None 50 | assert dsm_5._device_token == DEVICE_TOKEN 51 | assert dsm_5.device_token == DEVICE_TOKEN 52 | 53 | @pytest.mark.asyncio 54 | async def test_login_2sa_new_session(self): 55 | """Test login with 2SA and a new session with granted device.""" 56 | dsm_5 = SynologyDSMMock( 57 | None, 58 | VALID_HOST, 59 | VALID_PORT, 60 | VALID_USER_2SA, 61 | VALID_PASSWORD, 62 | VALID_HTTPS, 63 | device_token=DEVICE_TOKEN, 64 | ) 65 | dsm_5.dsm_version = 5 66 | assert await dsm_5.login() 67 | 68 | assert dsm_5._session_id == SESSION_ID 69 | assert dsm_5._syno_token is None 70 | assert dsm_5._device_token == DEVICE_TOKEN 71 | assert dsm_5.device_token == DEVICE_TOKEN 72 | 73 | @pytest.mark.asyncio 74 | async def test_information(self, dsm_5): 75 | """Test information.""" 76 | assert await dsm_5.login() 77 | assert dsm_5.information 78 | await dsm_5.information.update() 79 | assert dsm_5.information.model == "DS3615xs" 80 | assert dsm_5.information.ram == 6144 81 | assert dsm_5.information.serial == "B3J4N01003" 82 | assert dsm_5.information.temperature == 40 83 | assert not dsm_5.information.temperature_warn 84 | assert dsm_5.information.uptime == 3897 85 | assert dsm_5.information.version == "5967" 86 | assert dsm_5.information.version_string == "DSM 5.2-5967 Update 9" 87 | assert dsm_5.information.awesome_version == "5.2.0.9" 88 | 89 | @pytest.mark.asyncio 90 | async def test_network(self, dsm_5): 91 | """Test network.""" 92 | assert await dsm_5.login() 93 | assert dsm_5.network 94 | await dsm_5.network.update() 95 | assert dsm_5.network.dns 96 | assert dsm_5.network.gateway 97 | assert dsm_5.network.hostname 98 | assert dsm_5.network.interfaces 99 | assert dsm_5.network.interface("eth0") 100 | assert dsm_5.network.interface("eth1") is None 101 | assert dsm_5.network.macs 102 | assert dsm_5.network.workgroup 103 | 104 | @pytest.mark.asyncio 105 | async def test_storage(self, dsm_5): 106 | """Test storage roots.""" 107 | assert await dsm_5.login() 108 | assert dsm_5.storage 109 | await dsm_5.storage.update() 110 | assert dsm_5.storage.disks 111 | assert dsm_5.storage.env 112 | assert dsm_5.storage.storage_pools == [] 113 | assert dsm_5.storage.volumes 114 | 115 | @pytest.mark.asyncio 116 | async def test_storage_volumes(self, dsm_5): 117 | """Test storage volumes.""" 118 | assert await dsm_5.login() 119 | await dsm_5.storage.update() 120 | # Basics 121 | assert dsm_5.storage.volumes_ids 122 | for volume_id in dsm_5.storage.volumes_ids: 123 | if volume_id == "test_volume": 124 | continue 125 | assert dsm_5.storage.volume_status(volume_id) 126 | assert dsm_5.storage.volume_device_type(volume_id) 127 | assert dsm_5.storage.volume_size_total(volume_id) 128 | assert dsm_5.storage.volume_size_total(volume_id, True) 129 | assert dsm_5.storage.volume_size_used(volume_id) 130 | assert dsm_5.storage.volume_size_used(volume_id, True) 131 | assert dsm_5.storage.volume_percentage_used(volume_id) 132 | assert ( 133 | dsm_5.storage.volume_disk_temp_avg(volume_id) is None 134 | ) # because of empty storagePools 135 | assert ( 136 | dsm_5.storage.volume_disk_temp_max(volume_id) is None 137 | ) # because of empty storagePools 138 | 139 | # Existing volume 140 | assert dsm_5.storage.volume_status("volume_1") == "normal" 141 | assert dsm_5.storage.volume_device_type("volume_1") == "raid_5" 142 | assert dsm_5.storage.volume_size_total("volume_1") == 8846249701376 143 | assert dsm_5.storage.volume_size_total("volume_1", True) == "8.0Tb" 144 | assert dsm_5.storage.volume_size_used("volume_1") == 5719795761152 145 | assert dsm_5.storage.volume_size_used("volume_1", True) == "5.2Tb" 146 | assert dsm_5.storage.volume_percentage_used("volume_1") == 64.7 147 | assert ( 148 | dsm_5.storage.volume_disk_temp_avg("volume_1") is None 149 | ) # because of empty storagePools 150 | assert ( 151 | dsm_5.storage.volume_disk_temp_max("volume_1") is None 152 | ) # because of empty storagePools 153 | 154 | # Non existing volume 155 | assert not dsm_5.storage.volume_status("not_a_volume") 156 | assert not dsm_5.storage.volume_device_type("not_a_volume") 157 | assert not dsm_5.storage.volume_size_total("not_a_volume") 158 | assert not dsm_5.storage.volume_size_total("not_a_volume", True) 159 | assert not dsm_5.storage.volume_size_used("not_a_volume") 160 | assert not dsm_5.storage.volume_size_used("not_a_volume", True) 161 | assert not dsm_5.storage.volume_percentage_used("not_a_volume") 162 | assert not dsm_5.storage.volume_disk_temp_avg("not_a_volume") 163 | assert not dsm_5.storage.volume_disk_temp_max("not_a_volume") 164 | 165 | # Test volume 166 | assert dsm_5.storage.volume_status("test_volume") is None 167 | assert dsm_5.storage.volume_device_type("test_volume") is None 168 | assert dsm_5.storage.volume_size_total("test_volume") is None 169 | assert dsm_5.storage.volume_size_total("test_volume", True) is None 170 | assert dsm_5.storage.volume_size_used("test_volume") is None 171 | assert dsm_5.storage.volume_size_used("test_volume", True) is None 172 | assert dsm_5.storage.volume_percentage_used("test_volume") is None 173 | assert dsm_5.storage.volume_disk_temp_avg("test_volume") is None 174 | assert dsm_5.storage.volume_disk_temp_max("test_volume") is None 175 | 176 | @pytest.mark.asyncio 177 | async def test_storage_disks(self, dsm_5): 178 | """Test storage disks.""" 179 | assert await dsm_5.login() 180 | await dsm_5.storage.update() 181 | # Basics 182 | assert dsm_5.storage.disks_ids 183 | for disk_id in dsm_5.storage.disks_ids: 184 | if disk_id == "test_disk": 185 | continue 186 | assert "Disk" in dsm_5.storage.disk_name(disk_id) 187 | assert "/dev/" in dsm_5.storage.disk_device(disk_id) 188 | if disk_id == "sda": 189 | assert dsm_5.storage.disk_smart_status(disk_id) == "90%" 190 | else: 191 | assert dsm_5.storage.disk_smart_status(disk_id) == "safe" 192 | assert dsm_5.storage.disk_status(disk_id) == "normal" 193 | assert not dsm_5.storage.disk_exceed_bad_sector_thr(disk_id) 194 | assert not dsm_5.storage.disk_below_remain_life_thr(disk_id) 195 | assert dsm_5.storage.disk_temp(disk_id) 196 | 197 | # Non existing disk 198 | assert not dsm_5.storage.disk_name("not_a_disk") 199 | assert not dsm_5.storage.disk_device("not_a_disk") 200 | assert not dsm_5.storage.disk_smart_status("not_a_disk") 201 | assert not dsm_5.storage.disk_status("not_a_disk") 202 | assert not dsm_5.storage.disk_exceed_bad_sector_thr("not_a_disk") 203 | assert not dsm_5.storage.disk_below_remain_life_thr("not_a_disk") 204 | assert not dsm_5.storage.disk_temp("not_a_disk") 205 | 206 | # Test disk 207 | assert dsm_5.storage.disk_name("test_disk") is None 208 | assert dsm_5.storage.disk_device("test_disk") is None 209 | assert dsm_5.storage.disk_smart_status("test_disk") is None 210 | assert dsm_5.storage.disk_status("test_disk") is None 211 | assert dsm_5.storage.disk_exceed_bad_sector_thr("test_disk") is None 212 | assert dsm_5.storage.disk_below_remain_life_thr("test_disk") is None 213 | assert dsm_5.storage.disk_temp("test_disk") is None 214 | --------------------------------------------------------------------------------