├── .gitattributes ├── .darglint ├── src └── synology_dsm │ ├── api │ ├── __init__.py │ ├── core │ │ ├── __init__.py │ │ ├── upgrade.py │ │ ├── security.py │ │ ├── share.py │ │ ├── system.py │ │ └── utilization.py │ ├── dsm │ │ ├── __init__.py │ │ ├── information.py │ │ └── network.py │ ├── storage │ │ ├── __init__.py │ │ └── storage.py │ ├── surveillance_station │ │ ├── const.py │ │ ├── camera.py │ │ └── __init__.py │ └── download_station │ │ ├── task.py │ │ └── __init__.py │ ├── __init__.py │ ├── helpers.py │ ├── exceptions.py │ ├── const.py │ └── synology_dsm.py ├── tests ├── api_data │ ├── __init__.py │ ├── dsm_5 │ │ ├── core │ │ │ ├── __init__.py │ │ │ └── const_5_core_utilization.py │ │ ├── dsm │ │ │ ├── __init__.py │ │ │ ├── const_5_dsm_network.py │ │ │ └── const_5_dsm_info.py │ │ ├── storage │ │ │ ├── __init__.py │ │ │ └── const_5_storage_storage.py │ │ ├── const_5_api_auth.py │ │ └── __init__.py │ └── dsm_6 │ │ ├── core │ │ ├── __init__.py │ │ ├── const_6_core_upgrade.py │ │ ├── const_6_core_system.py │ │ ├── const_6_core_utilization.py │ │ ├── const_6_core_share.py │ │ └── const_6_core_security.py │ │ ├── dsm │ │ ├── __init__.py │ │ ├── const_6_dsm_info.py │ │ └── const_6_dsm_network.py │ │ ├── storage │ │ └── __init__.py │ │ ├── download_station │ │ ├── __init__.py │ │ ├── const_6_download_station_stat.py │ │ ├── const_6_download_station_info.py │ │ └── const_6_download_station_task.py │ │ ├── surveillance_station │ │ ├── __init__.py │ │ └── const_6_surveillance_station_home_mode.py │ │ ├── const_6_api_auth.py │ │ └── __init__.py ├── const.py ├── __init__.py └── test_synology_dsm_5.py ├── scripts ├── common.sh ├── clean.sh └── publish.sh ├── .github ├── workflows │ ├── constraints.txt │ ├── release.yml │ └── tests.yml ├── CODEOWNERS └── release-drafter.yml ├── codecov.yml ├── .flake8 ├── mypy.ini ├── LICENSE.txt ├── .gitignore ├── .pre-commit-config.yaml ├── pyproject.toml ├── CONTRIBUTING.rst ├── noxfile.py └── README.rst /.gitattributes: -------------------------------------------------------------------------------- 1 | * text=auto eol=lf 2 | -------------------------------------------------------------------------------- /.darglint: -------------------------------------------------------------------------------- 1 | [darglint] 2 | strictness = short 3 | -------------------------------------------------------------------------------- /src/synology_dsm/api/__init__.py: -------------------------------------------------------------------------------- 1 | """Synology API models.""" 2 | -------------------------------------------------------------------------------- /tests/api_data/__init__.py: -------------------------------------------------------------------------------- 1 | """APIs raw data constants.""" 2 | -------------------------------------------------------------------------------- /src/synology_dsm/api/core/__init__.py: -------------------------------------------------------------------------------- 1 | """Synology Core API models.""" 2 | -------------------------------------------------------------------------------- /src/synology_dsm/api/dsm/__init__.py: -------------------------------------------------------------------------------- 1 | """Synology DSM API models.""" 2 | -------------------------------------------------------------------------------- /tests/api_data/dsm_5/core/__init__.py: -------------------------------------------------------------------------------- 1 | """DSM 5 SYNO.Core.* datas.""" 2 | -------------------------------------------------------------------------------- /tests/api_data/dsm_5/dsm/__init__.py: -------------------------------------------------------------------------------- 1 | """DSM 5 SYNO.DSM.* datas.""" 2 | -------------------------------------------------------------------------------- /tests/api_data/dsm_6/core/__init__.py: -------------------------------------------------------------------------------- 1 | """DSM 6 SYNO.Core.* datas.""" 2 | -------------------------------------------------------------------------------- /tests/api_data/dsm_6/dsm/__init__.py: -------------------------------------------------------------------------------- 1 | """DSM 6 SYNO.DSM.* datas.""" 2 | -------------------------------------------------------------------------------- /tests/api_data/dsm_5/storage/__init__.py: -------------------------------------------------------------------------------- 1 | """DSM 5 SYNO.Storage.* datas.""" 2 | -------------------------------------------------------------------------------- /tests/api_data/dsm_6/storage/__init__.py: -------------------------------------------------------------------------------- 1 | """DSM 6 SYNO.Storage.* datas.""" 2 | -------------------------------------------------------------------------------- /src/synology_dsm/api/storage/__init__.py: -------------------------------------------------------------------------------- 1 | """Synology Storage API models.""" 2 | -------------------------------------------------------------------------------- /scripts/common.sh: -------------------------------------------------------------------------------- 1 | # Be in right place 2 | if [ ! -f setup.py ]; then 3 | cd .. 4 | fi 5 | -------------------------------------------------------------------------------- /tests/api_data/dsm_6/download_station/__init__.py: -------------------------------------------------------------------------------- 1 | """DSM 6 SYNO.DownloadStation.* datas.""" 2 | -------------------------------------------------------------------------------- /tests/api_data/dsm_6/surveillance_station/__init__.py: -------------------------------------------------------------------------------- 1 | """DSM 6 SYNO.SurveillanceStation.* datas.""" 2 | -------------------------------------------------------------------------------- /.github/workflows/constraints.txt: -------------------------------------------------------------------------------- 1 | pip==20.2.4 2 | nox==2020.8.22 3 | nox-poetry==0.5.0 4 | poetry==1.1.4 5 | virtualenv==20.1.0 6 | -------------------------------------------------------------------------------- /src/synology_dsm/__init__.py: -------------------------------------------------------------------------------- 1 | """The python-synology library.""" 2 | from .synology_dsm import SynologyDSM 3 | 4 | __all__ = ["SynologyDSM"] 5 | -------------------------------------------------------------------------------- /scripts/clean.sh: -------------------------------------------------------------------------------- 1 | ./scripts/common.sh 2 | 3 | # Clean 4 | rm -r .tox 5 | rm -r build 6 | rm -r dist 7 | rm -r python_synology.egg-info 8 | rm -r src/python_synology.egg-info 9 | -------------------------------------------------------------------------------- /codecov.yml: -------------------------------------------------------------------------------- 1 | comment: false 2 | coverage: 3 | status: 4 | project: 5 | default: 6 | target: "80" 7 | patch: 8 | default: 9 | target: "100" 10 | -------------------------------------------------------------------------------- /tests/api_data/dsm_6/download_station/const_6_download_station_stat.py: -------------------------------------------------------------------------------- 1 | """DSM 6 SYNO.DownloadStation.Statistic data.""" 2 | 3 | DSM_6_DOWNLOAD_STATION_STAT_INFO = { 4 | "data": {"speed_download": 89950232, "speed_upload": 0}, 5 | "success": True, 6 | } 7 | -------------------------------------------------------------------------------- /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | @Quentame 2 | 3 | .github/* @oncleben31 4 | .darglint @oncleben31 5 | .flake8 @oncleben31 6 | 7 | */surveillance_station/* @shenxn 8 | src/synology_dsm/api/core/share.py @Gestas 9 | src/synology_dsm/api/core/system.py @mib1185 10 | src/synology_dsm/api/core/upgrade.py @mib1185 11 | tests/api_data/dsm_6/core/const_6_core_share.py @Gestas 12 | tests/api_data/dsm_6/core/const_6_core_system.py @mib1185 13 | tests/api_data/dsm_6/core/const_6_core_upgrade.py @mib1185 14 | -------------------------------------------------------------------------------- /src/synology_dsm/api/surveillance_station/const.py: -------------------------------------------------------------------------------- 1 | """Synology SurveillanceStation API constants.""" 2 | 3 | RECORDING_STATUS = [ 4 | 1, # Continue recording schedule 5 | 2, # Motion detect recording schedule 6 | 3, # Digital input recording schedule 7 | 4, # Digital input recording schedule 8 | 5, # Manual recording schedule 9 | ] 10 | MOTION_DETECTION_DISABLED = -1 11 | MOTION_DETECTION_BY_CAMERA = 0 12 | MOTION_DETECTION_BY_SURVEILLANCE = 1 13 | 14 | SNAPSHOT_SIZE_ICON = 1 15 | SNAPSHOT_SIZE_FULL = 2 16 | -------------------------------------------------------------------------------- /tests/api_data/dsm_5/const_5_api_auth.py: -------------------------------------------------------------------------------- 1 | """DSM 5 SYNO.API.Auth data.""" 2 | from tests.const import DEVICE_TOKEN 3 | from tests.const import ERROR_AUTH_OTP_NOT_SPECIFIED 4 | from tests.const import SESSION_ID 5 | 6 | # No synotoken for an unknown reason 7 | DSM_5_AUTH_LOGIN = { 8 | "data": {"is_portal_port": False, "sid": SESSION_ID}, 9 | "success": True, 10 | } 11 | DSM_5_AUTH_LOGIN_2SA = ERROR_AUTH_OTP_NOT_SPECIFIED 12 | DSM_5_AUTH_LOGIN_2SA_OTP = { 13 | "data": {"did": DEVICE_TOKEN, "is_portal_port": False, "sid": SESSION_ID}, 14 | "success": True, 15 | } 16 | -------------------------------------------------------------------------------- /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | select = B,B9,C,D,DAR,E,F,N,RST,S,W 3 | # Some rules are ignore on top of the standard ones. 4 | # C901 (complexity) will be processed in a dedicated PR 5 | # DARxxx (documentation in docstrings) will be processed in a dedicated PR 6 | # Final target is: 7 | # ignore = E203,E501,RST201,RST203,RST301,W503 8 | ignore = E203,E501,RST201,RST203,RST301,W503, C901, DAR101, DAR201 9 | max-line-length = 80 10 | max-complexity = 10 11 | docstring-convention = google 12 | per-file-ignores = 13 | tests/*:S101 14 | tests/**/const_*.py:B950 15 | src/synology_dsm/const.py:B950 16 | -------------------------------------------------------------------------------- /scripts/publish.sh: -------------------------------------------------------------------------------- 1 | # Publish the library 2 | # https://pypi.org/project/python-synology 3 | # Publish documentation here: https://packaging.python.org/tutorials/packaging-projects/ 4 | 5 | ./scripts/common.sh 6 | ./scripts/clean.sh 7 | 8 | # Install/update dependencies 9 | python3 -m pip install --user --upgrade setuptools wheel 10 | python3 -m pip install --user --upgrade twine 11 | 12 | # Build 13 | python3 setup.py sdist bdist_wheel 14 | 15 | # Push to PyPi 16 | python3 -m twine upload dist/* 17 | # python3 -m twine upload --repository testpypi dist/* 18 | 19 | # Enter credentials manually :P 20 | -------------------------------------------------------------------------------- /mypy.ini: -------------------------------------------------------------------------------- 1 | [mypy] 2 | check_untyped_defs = True 3 | disallow_any_generics = True 4 | disallow_incomplete_defs = True 5 | disallow_subclassing_any = True 6 | disallow_untyped_calls = True 7 | disallow_untyped_decorators = True 8 | disallow_untyped_defs = True 9 | no_implicit_optional = True 10 | no_implicit_reexport = True 11 | pretty = True 12 | show_column_numbers = True 13 | show_error_codes = True 14 | show_error_context = True 15 | strict_equality = True 16 | warn_redundant_casts = True 17 | warn_return_any = True 18 | warn_unreachable = True 19 | warn_unused_configs = True 20 | warn_unused_ignores = True 21 | -------------------------------------------------------------------------------- /tests/api_data/dsm_5/dsm/const_5_dsm_network.py: -------------------------------------------------------------------------------- 1 | """DSM 5 SYNO.DSM.Network data.""" 2 | 3 | DSM_5_DSM_NETWORK = { 4 | "data": { 5 | "dns": ["192.168.1.1"], 6 | "gateway": "192.168.1.1", 7 | "hostname": "HOME-NAS", 8 | "interfaces": [ 9 | { 10 | "id": "eth0", 11 | "ip": [{"address": "192.168.1.10", "netmask": "255.255.255.0"}], 12 | "mac": "XX-XX-XX-XX-XX-XX", 13 | "type": "lan", 14 | } 15 | ], 16 | "workgroup": "WORKGROUP", 17 | }, 18 | "success": True, 19 | } 20 | -------------------------------------------------------------------------------- /tests/api_data/dsm_6/const_6_api_auth.py: -------------------------------------------------------------------------------- 1 | """DSM 6 SYNO.API.Auth data.""" 2 | from tests.const import DEVICE_TOKEN 3 | from tests.const import ERROR_AUTH_OTP_NOT_SPECIFIED 4 | from tests.const import SESSION_ID 5 | from tests.const import SYNO_TOKEN 6 | 7 | 8 | DSM_6_AUTH_LOGIN = { 9 | "data": {"is_portal_port": False, "sid": SESSION_ID, "synotoken": SYNO_TOKEN}, 10 | "success": True, 11 | } 12 | DSM_6_AUTH_LOGIN_2SA = ERROR_AUTH_OTP_NOT_SPECIFIED 13 | DSM_6_AUTH_LOGIN_2SA_OTP = { 14 | "data": { 15 | "did": DEVICE_TOKEN, 16 | "is_portal_port": False, 17 | "sid": SESSION_ID, 18 | "synotoken": SYNO_TOKEN, 19 | }, 20 | "success": True, 21 | } 22 | 23 | DSM_6_AUTH_LOGOUT = {"success": True} 24 | -------------------------------------------------------------------------------- /tests/api_data/dsm_6/core/const_6_core_upgrade.py: -------------------------------------------------------------------------------- 1 | """DSM 6 SYNO.Core.Upgrade data.""" 2 | 3 | DSM_6_CORE_UPGRADE_FALSE = {"data": {"update": {"available": False}}, "success": True} 4 | DSM_6_CORE_UPGRADE_TRUE = { 5 | "data": { 6 | "update": { 7 | "available": True, 8 | "reboot": "now", 9 | "restart": "some", 10 | "type": "nano", 11 | "version": "DSM 6.2.3-25426 Update 2", 12 | "version_details": { 13 | "buildnumber": 25426, 14 | "major": 6, 15 | "micro": 3, 16 | "minor": 2, 17 | "nano": 2, 18 | "os_name": "DSM", 19 | }, 20 | } 21 | }, 22 | "success": True, 23 | } 24 | -------------------------------------------------------------------------------- /tests/api_data/dsm_6/download_station/const_6_download_station_info.py: -------------------------------------------------------------------------------- 1 | """DSM 6 SYNO.DownloadStation.Info data.""" 2 | 3 | DSM_6_DOWNLOAD_STATION_INFO_INFO = { 4 | "data": {"is_manager": True, "version": 3543, "version_string": "3.8-3543"}, 5 | "success": True, 6 | } 7 | 8 | DSM_6_DOWNLOAD_STATION_INFO_CONFIG = { 9 | "data": { 10 | "bt_max_download": 0, 11 | "bt_max_upload": 800, 12 | "default_destination": "downloads", 13 | "emule_default_destination": None, 14 | "emule_enabled": False, 15 | "emule_max_download": 0, 16 | "emule_max_upload": 20, 17 | "ftp_max_download": 0, 18 | "http_max_download": 0, 19 | "nzb_max_download": 0, 20 | "unzip_service_enabled": False, 21 | }, 22 | "success": True, 23 | } 24 | -------------------------------------------------------------------------------- /tests/api_data/dsm_5/__init__.py: -------------------------------------------------------------------------------- 1 | """DSM 5 datas.""" 2 | from .const_5_api_auth import DSM_5_AUTH_LOGIN 3 | from .const_5_api_auth import DSM_5_AUTH_LOGIN_2SA 4 | from .const_5_api_auth import DSM_5_AUTH_LOGIN_2SA_OTP 5 | from .const_5_api_info import DSM_5_API_INFO 6 | from .core.const_5_core_utilization import DSM_5_CORE_UTILIZATION 7 | from .dsm.const_5_dsm_info import DSM_5_DSM_INFORMATION 8 | from .dsm.const_5_dsm_network import DSM_5_DSM_NETWORK 9 | from .storage.const_5_storage_storage import ( 10 | DSM_5_STORAGE_STORAGE_DS410J_RAID5_4DISKS_1VOL, 11 | ) 12 | 13 | __all__ = [ 14 | "DSM_5_AUTH_LOGIN", 15 | "DSM_5_AUTH_LOGIN_2SA", 16 | "DSM_5_AUTH_LOGIN_2SA_OTP", 17 | "DSM_5_API_INFO", 18 | "DSM_5_CORE_UTILIZATION", 19 | "DSM_5_DSM_INFORMATION", 20 | "DSM_5_DSM_NETWORK", 21 | "DSM_5_STORAGE_STORAGE_DS410J_RAID5_4DISKS_1VOL", 22 | ] 23 | -------------------------------------------------------------------------------- /.github/release-drafter.yml: -------------------------------------------------------------------------------- 1 | categories: 2 | - title: ":boom: Breaking Changes" 3 | label: "breaking" 4 | - title: ":rocket: Features" 5 | label: "enhancement" 6 | - title: ":fire: Removals and Deprecations" 7 | label: "removal" 8 | - title: ":beetle: Fixes" 9 | label: "bug" 10 | - title: ":racehorse: Performance" 11 | label: "performance" 12 | - title: ":rotating_light: Testing" 13 | label: "testing" 14 | - title: ":construction_worker: Continuous Integration" 15 | label: "ci" 16 | - title: ":books: Documentation" 17 | label: "documentation" 18 | - title: ":hammer: Refactoring" 19 | label: "refactoring" 20 | - title: ":lipstick: Style" 21 | label: "style" 22 | - title: ":package: Dependencies" 23 | labels: 24 | - "dependencies" 25 | - "build" 26 | template: | 27 | ## Changes 28 | 29 | $CHANGES 30 | -------------------------------------------------------------------------------- /tests/api_data/dsm_5/dsm/const_5_dsm_info.py: -------------------------------------------------------------------------------- 1 | """DSM 5 SYNO.DSM.Info data.""" 2 | 3 | DSM_5_DSM_INFORMATION_DS410J = { 4 | "data": { 5 | "codepage": "enu", 6 | "model": "DS410j", 7 | "ram": 128, 8 | "serial": "A3G7N00628", 9 | "temperature": 52, 10 | "temperature_warn": False, 11 | "time": "Mon Apr 13 18:26:27 2020", 12 | "uptime": 7077254, 13 | "version": "5967", 14 | "version_string": "DSM 5.2-5967 Update 9", 15 | }, 16 | "success": True, 17 | } 18 | 19 | DSM_5_DSM_INFORMATION_DS3615XS = { 20 | "data": { 21 | "codepage": "rus", 22 | "model": "DS3615xs", 23 | "ram": 6144, 24 | "serial": "B3J4N01003", 25 | "temperature": 40, 26 | "time": "Sat Apr 25 20:21:57 2020", 27 | "uptime": 3897, 28 | "version": "5967", 29 | "version_string": "DSM 5.2-5967 Update 9", 30 | }, 31 | "success": True, 32 | } 33 | 34 | DSM_5_DSM_INFORMATION = DSM_5_DSM_INFORMATION_DS3615XS 35 | -------------------------------------------------------------------------------- /tests/api_data/dsm_6/dsm/const_6_dsm_info.py: -------------------------------------------------------------------------------- 1 | """DSM 6 SYNO.DSM.Info data.""" 2 | 3 | DSM_6_DSM_INFORMATION_DS213_PLUS = { 4 | "data": { 5 | "codepage": "enu", 6 | "model": "DS213+", 7 | "ram": 512, 8 | "serial": "XXXXXXXXXXX", 9 | "temperature": 30, 10 | "temperature_warn": False, 11 | "time": "Thu Apr 30 14:57:35 2020", 12 | "uptime": 3258607, 13 | "version": "24922", 14 | "version_string": "DSM 6.2.2-24922 Update 4", 15 | }, 16 | "success": True, 17 | } 18 | 19 | DSM_6_DSM_INFORMATION_DS918_PLUS = { 20 | "data": { 21 | "codepage": "fre", 22 | "model": "DS918+", 23 | "ram": 4096, 24 | "serial": "1920PDN001501", 25 | "temperature": 40, 26 | "temperature_warn": False, 27 | "time": "Sun Mar 29 19:33:41 2020", 28 | "uptime": 155084, 29 | "version": "24922", 30 | "version_string": "DSM 6.2.2-24922 Update 4", 31 | }, 32 | "success": True, 33 | } 34 | 35 | DSM_6_DSM_INFORMATION = DSM_6_DSM_INFORMATION_DS918_PLUS 36 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | Copyright (c) 2016 ProtoThis 3 | 4 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 5 | 6 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 7 | 8 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 9 | -------------------------------------------------------------------------------- /src/synology_dsm/api/core/upgrade.py: -------------------------------------------------------------------------------- 1 | """DSM Upgrade data and actions.""" 2 | 3 | 4 | class SynoCoreUpgrade: 5 | """Class containing upgrade data and actions.""" 6 | 7 | API_KEY = "SYNO.Core.Upgrade" 8 | API_SERVER_KEY = API_KEY + ".Server" 9 | 10 | def __init__(self, dsm): 11 | """Constructor method.""" 12 | self._dsm = dsm 13 | self._data = {} 14 | 15 | def update(self): 16 | """Updates Upgrade data.""" 17 | raw_data = self._dsm.get(self.API_SERVER_KEY, "check") 18 | if raw_data: 19 | self._data = raw_data["data"].get("update", raw_data["data"]) 20 | 21 | @property 22 | def update_available(self): 23 | """Gets available update info.""" 24 | return self._data.get("available") 25 | 26 | @property 27 | def available_version(self): 28 | """Gets available verion info.""" 29 | return self._data.get("version") 30 | 31 | @property 32 | def reboot_needed(self): 33 | """Gets info if reboot is needed.""" 34 | return self._data.get("reboot") 35 | 36 | @property 37 | def service_restarts(self): 38 | """Gets info if services are restarted.""" 39 | return self._data.get("restart") 40 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # From cookiecutter-hypermodern-python 2 | .mypy_cache/ 3 | /.coverage 4 | /.nox/ 5 | /.python-version 6 | /.pytype/ 7 | /dist/ 8 | /docs/_build/ 9 | /src/*.egg-info/ 10 | __pycache__/ 11 | 12 | # Following are kept for not anoying current developers. Could be remove in 13 | # a future package release. 14 | 15 | # Python 16 | *.py[cod] 17 | 18 | # C extensions 19 | *.so 20 | 21 | # Packages 22 | *.egg 23 | *.egg-info 24 | dist 25 | build 26 | eggs 27 | .eggs 28 | parts 29 | bin 30 | var 31 | sdist 32 | develop-eggs 33 | .installed.cfg 34 | lib 35 | lib64 36 | pip-wheel-metadata 37 | 38 | # Logs 39 | *.log 40 | pip-log.txt 41 | 42 | # Unit test / coverage reports 43 | .coverage 44 | .tox 45 | coverage.xml 46 | nosetests.xml 47 | htmlcov/ 48 | test-reports/ 49 | test-results.xml 50 | test-output.xml 51 | 52 | # Translations 53 | *.mo 54 | 55 | # Mac OS X 56 | .DS_Store 57 | .AppleDouble 58 | .LSOverride 59 | Icon 60 | 61 | # Windows Explorer 62 | desktop.ini 63 | 64 | # Visual Studio Code 65 | .vscode/* 66 | !.vscode/cSpell.json 67 | !.vscode/extensions.json 68 | !.vscode/tasks.json 69 | 70 | # IntelliJ IDEA 71 | .idea 72 | *.iml 73 | 74 | # Sublime text 75 | *.sublime-project 76 | *.sublime-workspace 77 | 78 | # Mr Developer 79 | .mr.developer.cfg 80 | .project 81 | .pydevproject 82 | -------------------------------------------------------------------------------- /src/synology_dsm/helpers.py: -------------------------------------------------------------------------------- 1 | """Helpers.""" 2 | 3 | 4 | class SynoFormatHelper: 5 | """Class containing various formatting functions.""" 6 | 7 | @staticmethod 8 | def bytes_to_readable(num): 9 | """Converts bytes to a human readable format.""" 10 | if num < 512: 11 | return "0 Kb" 12 | elif num < 1024: 13 | return "1 Kb" 14 | 15 | for unit in ["", "Kb", "Mb", "Gb", "Tb", "Pb", "Eb", "Zb"]: 16 | if abs(num) < 1024.0: 17 | return "%3.1f%s" % (num, unit) 18 | num /= 1024.0 19 | return "%.1f%s" % (num, "Yb") 20 | 21 | @staticmethod 22 | def bytes_to_megabytes(num): 23 | """Converts bytes to megabytes.""" 24 | var_mb = num / 1024.0 / 1024.0 25 | 26 | return round(var_mb, 1) 27 | 28 | @staticmethod 29 | def bytes_to_gigabytes(num): 30 | """Converts bytes to gigabytes.""" 31 | var_gb = num / 1024.0 / 1024.0 / 1024.0 32 | 33 | return round(var_gb, 1) 34 | 35 | @staticmethod 36 | def bytes_to_terrabytes(num): 37 | """Converts bytes to terrabytes.""" 38 | var_tb = num / 1024.0 / 1024.0 / 1024.0 / 1024.0 39 | 40 | return round(var_tb, 1) 41 | 42 | @staticmethod 43 | def megabytes_to_bytes(num): 44 | """Converts megabytes to bytes.""" 45 | var_bytes = num * 1024.0 * 1024.0 46 | 47 | return round(var_bytes, 1) 48 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: local 3 | hooks: 4 | - id: black 5 | name: black 6 | entry: black 7 | language: system 8 | types: [python] 9 | require_serial: true 10 | - id: check-added-large-files 11 | name: Check for added large files 12 | entry: check-added-large-files 13 | language: system 14 | - id: check-toml 15 | name: Check Toml 16 | entry: check-toml 17 | language: system 18 | types: [toml] 19 | - id: check-yaml 20 | name: Check Yaml 21 | entry: check-yaml 22 | language: system 23 | types: [yaml] 24 | - id: end-of-file-fixer 25 | name: Fix End of Files 26 | entry: end-of-file-fixer 27 | language: system 28 | types: [text] 29 | stages: [commit, push, manual] 30 | - id: flake8 31 | name: flake8 32 | entry: flake8 33 | language: system 34 | types: [python] 35 | require_serial: true 36 | - id: reorder-python-imports 37 | name: Reorder python imports 38 | entry: reorder-python-imports 39 | language: system 40 | types: [python] 41 | args: [--application-directories=src] 42 | - id: trailing-whitespace 43 | name: Trim Trailing Whitespace 44 | entry: trailing-whitespace-fixer 45 | language: system 46 | types: [text] 47 | stages: [commit, push, manual] 48 | - repo: https://github.com/prettier/prettier 49 | rev: 2.1.2 50 | hooks: 51 | - id: prettier 52 | -------------------------------------------------------------------------------- /tests/const.py: -------------------------------------------------------------------------------- 1 | """Test constants.""" 2 | # API test data are localized in 3 | # `tests/api_data/dsm_[dsm_major_version]` 4 | # Data constant names should be like : 5 | # "DSM_[dsm_version]_[API_KEY]" 6 | # if data failed, add "_FAILED" 7 | 8 | SESSION_ID = "session_id" 9 | SYNO_TOKEN = "Syñ0_T0k€ñ" 10 | DEVICE_TOKEN = "Dév!cè_T0k€ñ" 11 | UNIQUE_KEY = "1x2X3x!_UK" 12 | 13 | # Common API error code 14 | ERROR_UNKNOWN = {"error": {"code": 100}, "success": False} 15 | ERROR_INVALID_PARAMETERS = {"error": {"code": 101}, "success": False} 16 | ERROR_API_NOT_EXISTS = {"error": {"code": 102}, "success": False} 17 | ERROR_API_METHOD_NOT_EXISTS = {"error": {"code": 103}, "success": False} 18 | ERROR_API_VERSION_NOT_SUPPORTED = {"error": {"code": 104}, "success": False} 19 | ERROR_INSUFFICIENT_USER_PRIVILEGE = {"error": {"code": 105}, "success": False} 20 | ERROR_CONNECTION_TIME_OUT = {"error": {"code": 106}, "success": False} 21 | ERROR_MULTIPLE_LOGIN_DETECTED = {"error": {"code": 107}, "success": False} 22 | 23 | # Auth API error code 24 | ERROR_AUTH_INVALID_CREDENTIALS = {"error": {"code": 400}, "success": False} 25 | ERROR_AUTH_GUEST_OR_DISABLED_ACCOUNT = {"error": {"code": 401}, "success": False} 26 | ERROR_AUTH_PERMISSION_DENIED = {"error": {"code": 402}, "success": False} 27 | ERROR_AUTH_OTP_NOT_SPECIFIED = {"error": {"code": 403}, "success": False} 28 | ERROR_AUTH_OTP_AUTHENTICATE_FAILED = {"error": {"code": 404}, "success": False} 29 | ERROR_AUTH_INCORRECT_APP_PORTAL = {"error": {"code": 405}, "success": False} 30 | ERROR_AUTH_OTP_CODE_ENFORCED = {"error": {"code": 406}, "success": False} 31 | ERROR_AUTH_MAX_TRIES = {"error": {"code": 407}, "success": False} 32 | -------------------------------------------------------------------------------- /src/synology_dsm/api/download_station/task.py: -------------------------------------------------------------------------------- 1 | """DownloadStation task.""" 2 | 3 | 4 | class SynoDownloadTask: 5 | """An representation of a Synology DownloadStation task.""" 6 | 7 | def __init__(self, data): 8 | """Initialize a Download Station task.""" 9 | self._data = data 10 | 11 | def update(self, data): 12 | """Update the task.""" 13 | self._data = data 14 | 15 | @property 16 | def id(self): 17 | """Return id of the task.""" 18 | return self._data["id"] 19 | 20 | @property 21 | def title(self): 22 | """Return title of the task.""" 23 | return self._data["title"] 24 | 25 | @property 26 | def type(self): 27 | """Return type of the task (bt, nzb, http(s), ftp, emule).""" 28 | return self._data["type"] 29 | 30 | @property 31 | def username(self): 32 | """Return username of the task.""" 33 | return self._data["username"] 34 | 35 | @property 36 | def size(self): 37 | """Return size of the task.""" 38 | return self._data["size"] 39 | 40 | @property 41 | def status(self): 42 | """Return status of the task. 43 | 44 | Possible values: waiting, downloading, paused, finishing, finished, 45 | hash_checking, seeding, filehosting_waiting, extracting, error 46 | """ 47 | return self._data["status"] 48 | 49 | @property 50 | def status_extra(self): 51 | """Return status_extra of the task.""" 52 | return self._data.get("status_extra") 53 | 54 | @property 55 | def additional(self): 56 | """Return additional data of the task.""" 57 | return self._data["additional"] 58 | -------------------------------------------------------------------------------- /src/synology_dsm/api/dsm/information.py: -------------------------------------------------------------------------------- 1 | """DSM Information data.""" 2 | 3 | 4 | class SynoDSMInformation: 5 | """Class containing Information data.""" 6 | 7 | API_KEY = "SYNO.DSM.Info" 8 | 9 | def __init__(self, dsm): 10 | """Constructor methods.""" 11 | self._dsm = dsm 12 | self._data = {} 13 | 14 | def update(self): 15 | """Updates information data.""" 16 | raw_data = self._dsm.get(self.API_KEY, "getinfo") 17 | if raw_data: 18 | self._data = raw_data["data"] 19 | 20 | @property 21 | def model(self): 22 | """Model of the NAS.""" 23 | return self._data.get("model") 24 | 25 | @property 26 | def ram(self): 27 | """RAM of the NAS (in MB).""" 28 | return self._data.get("ram") 29 | 30 | @property 31 | def serial(self): 32 | """Serial of the NAS.""" 33 | return self._data.get("serial") 34 | 35 | @property 36 | def temperature(self): 37 | """Temperature of the NAS.""" 38 | return self._data.get("temperature") 39 | 40 | @property 41 | def temperature_warn(self): 42 | """Temperature warning of the NAS.""" 43 | return self._data.get("temperature_warn") 44 | 45 | @property 46 | def uptime(self): 47 | """Uptime of the NAS.""" 48 | return self._data.get("uptime") 49 | 50 | @property 51 | def version(self): 52 | """Version of the NAS (build version).""" 53 | return self._data.get("version") 54 | 55 | @property 56 | def version_string(self): 57 | """Version of the NAS.""" 58 | return self._data.get("version_string") 59 | -------------------------------------------------------------------------------- /src/synology_dsm/api/dsm/network.py: -------------------------------------------------------------------------------- 1 | """DSM Network data.""" 2 | 3 | 4 | class SynoDSMNetwork: 5 | """Class containing Network data.""" 6 | 7 | API_KEY = "SYNO.DSM.Network" 8 | 9 | def __init__(self, dsm): 10 | """Constructor method.""" 11 | self._dsm = dsm 12 | self._data = {} 13 | 14 | def update(self): 15 | """Updates network data.""" 16 | raw_data = self._dsm.get(self.API_KEY, "list") 17 | if raw_data: 18 | self._data = raw_data["data"] 19 | 20 | @property 21 | def dns(self): 22 | """DNS of the NAS.""" 23 | return self._data.get("dns") 24 | 25 | @property 26 | def gateway(self): 27 | """Gateway of the NAS.""" 28 | return self._data.get("gateway") 29 | 30 | @property 31 | def hostname(self): 32 | """Host name of the NAS.""" 33 | return self._data.get("hostname") 34 | 35 | @property 36 | def interfaces(self): 37 | """Interfaces of the NAS.""" 38 | return self._data.get("interfaces", []) 39 | 40 | def interface(self, eth_id): 41 | """Interface of the NAS.""" 42 | for interface in self.interfaces: 43 | if interface["id"] == eth_id: 44 | return interface 45 | return None 46 | 47 | @property 48 | def macs(self): 49 | """MACs of the NAS.""" # noqa: D403 50 | macs = [] 51 | for interface in self.interfaces: 52 | if interface.get("mac"): 53 | macs.append(interface["mac"]) 54 | return macs 55 | 56 | @property 57 | def workgroup(self): 58 | """Workgroup of the NAS.""" 59 | return self._data.get("workgroup") 60 | -------------------------------------------------------------------------------- /src/synology_dsm/api/core/security.py: -------------------------------------------------------------------------------- 1 | """DSM Security data.""" 2 | 3 | 4 | class SynoCoreSecurity: 5 | """Class containing Security data.""" 6 | 7 | API_KEY = "SYNO.Core.SecurityScan.Status" 8 | 9 | def __init__(self, dsm): 10 | """Constructor method.""" 11 | self._dsm = dsm 12 | self._data = {} 13 | 14 | def update(self): 15 | """Updates security data.""" 16 | raw_data = self._dsm.get(self.API_KEY, "system_get") 17 | if raw_data: 18 | self._data = raw_data["data"] 19 | 20 | @property 21 | def checks(self): 22 | """Gets the checklist by check category.""" 23 | return self._data.get("items", {}) 24 | 25 | @property 26 | def last_scan_time(self): 27 | """Gets the last scan time.""" 28 | return self._data.get("lastScanTime") 29 | 30 | @property 31 | def start_time(self): 32 | """Gets the start time (if in progress).""" 33 | return self._data.get("startTime") 34 | 35 | @property 36 | def success(self): 37 | """Gets the last scan success.""" 38 | return self._data.get("success") 39 | 40 | @property 41 | def progress(self): 42 | """Gets the scan progress. 43 | 44 | Returns: 100 if finished 45 | """ 46 | return self._data.get("sysProgress") 47 | 48 | @property 49 | def status(self): 50 | """Gets the last scan status. 51 | 52 | Possible values: safe, danger, info, outOfDate, risk, warning. 53 | """ 54 | return self._data.get("sysStatus") 55 | 56 | @property 57 | def status_by_check(self): 58 | """Gets the last scan status per check.""" 59 | status = {} 60 | for category in self.checks: 61 | status[category] = self.checks[category]["failSeverity"] 62 | return status 63 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "python-synology" 3 | version = "1.0.0" 4 | description = "Python API for communication with Synology DSM" 5 | authors = ["Quentin POLLET (Quentame)", "FG van Zeelst (ProtoThis)"] 6 | license = "MIT" 7 | readme = "README.rst" 8 | homepage = "https://github.com/ProtoThis/python-synology" 9 | repository = "https://github.com/ProtoThis/python-synology" 10 | documentation = "https://python-synology.readthedocs.io" 11 | classifiers = [ 12 | "Development Status :: 5 - Production/Stable", 13 | "Intended Audience :: Developers", 14 | "License :: OSI Approved :: MIT License", 15 | "Operating System :: OS Independent", 16 | "Programming Language :: Python", 17 | "Programming Language :: Python :: 3 :: Only", 18 | "Programming Language :: Python :: 3", 19 | "Programming Language :: Python :: 3.7", 20 | "Programming Language :: Python :: 3.8", 21 | "Programming Language :: Python :: 3.9", 22 | "Topic :: Software Development :: Libraries", 23 | ] 24 | keywords=["synology-dsm", "synology"] 25 | packages = [ 26 | { include = "synology_dsm", from = "src" }, 27 | ] 28 | 29 | [tool.poetry.urls] 30 | Changelog = "https://github.com/ProtoThis/python-synology/releases" 31 | 32 | [tool.poetry.dependencies] 33 | python = "^3.7.0" 34 | requests = "^2.24.0" 35 | urllib3 = "^1.25.10" 36 | 37 | [tool.poetry.dev-dependencies] 38 | pytest = "^6.1.2" 39 | coverage = {extras = ["toml"], version = "^5.3"} 40 | safety = "^1.9.0" 41 | mypy = "^0.790" 42 | typeguard = "^2.9.1" 43 | xdoctest = {extras = ["colors"], version = "^0.15.0"} 44 | sphinx = "^3.3.1" 45 | sphinx-autobuild = "^2020.9.1" 46 | pre-commit = "^2.8.2" 47 | flake8 = "^3.8.4" 48 | black = "^20.8b1" 49 | flake8-bandit = "^2.1.2" 50 | flake8-bugbear = "^20.1.4" 51 | flake8-docstrings = "^1.5.0" 52 | flake8-rst-docstrings = "^0.0.14" 53 | pep8-naming = "^0.11.1" 54 | darglint = "^1.5.5" 55 | reorder-python-imports = "^2.3.6" 56 | pre-commit-hooks = "^3.3.0" 57 | sphinx-rtd-theme = "^0.5.0" 58 | Pygments = "^2.7.2" 59 | 60 | [tool.poetry.scripts] 61 | python-synology = "synology_dsm.__main__:main" 62 | 63 | [tool.coverage.paths] 64 | source = ["src", "*/site-packages"] 65 | 66 | [tool.coverage.run] 67 | branch = true 68 | source = ["synology_dsm"] 69 | 70 | [tool.coverage.report] 71 | show_missing = true 72 | fail_under = 80 73 | 74 | [build-system] 75 | requires = ["poetry>=0.12"] 76 | build-backend = "poetry.masonry.api" 77 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Release 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | - master 8 | 9 | jobs: 10 | release: 11 | name: Release 12 | runs-on: ubuntu-latest 13 | steps: 14 | - name: Check out the repository 15 | uses: actions/checkout@v2.3.4 16 | with: 17 | fetch-depth: 2 18 | 19 | - name: Set up Python 20 | uses: actions/setup-python@v2.1.4 21 | with: 22 | python-version: "3.9" 23 | 24 | - name: Upgrade pip 25 | run: | 26 | pip install --constraint=.github/workflows/constraints.txt pip 27 | pip --version 28 | 29 | - name: Install Poetry 30 | run: | 31 | pip install --constraint=.github/workflows/constraints.txt poetry 32 | poetry --version 33 | 34 | - name: Check if there is a parent commit 35 | id: check-parent-commit 36 | run: | 37 | echo "::set-output name=sha::$(git rev-parse --verify --quiet HEAD^)" 38 | 39 | - name: Detect and tag new version 40 | id: check-version 41 | if: steps.check-parent-commit.outputs.sha 42 | uses: salsify/action-detect-and-tag-new-version@v2.0.1 43 | with: 44 | version-command: | 45 | bash -o pipefail -c "poetry version | awk '{ print \$2 }'" 46 | 47 | - name: Bump version for developmental release 48 | if: "! steps.check-version.outputs.tag" 49 | run: | 50 | poetry version patch && 51 | version=$(poetry version | awk '{ print $2 }') && 52 | poetry version $version.dev.$(date +%s) 53 | 54 | - name: Build package 55 | run: | 56 | poetry build --ansi 57 | 58 | - name: Publish package on PyPI 59 | if: steps.check-version.outputs.tag 60 | uses: pypa/gh-action-pypi-publish@v1.4.1 61 | with: 62 | user: __token__ 63 | password: ${{ secrets.PYPI_TOKEN }} 64 | 65 | - name: Publish package on TestPyPI 66 | if: "! steps.check-version.outputs.tag" 67 | uses: pypa/gh-action-pypi-publish@v1.4.1 68 | with: 69 | user: __token__ 70 | password: ${{ secrets.TEST_PYPI_TOKEN }} 71 | repository_url: https://test.pypi.org/legacy/ 72 | 73 | - name: Publish release notes 74 | uses: release-drafter/release-drafter@v5.12.1 75 | with: 76 | publish: ${{ steps.check-version.outputs.tag != '' }} 77 | tag: ${{ steps.check-version.outputs.tag }} 78 | env: 79 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 80 | -------------------------------------------------------------------------------- /src/synology_dsm/api/core/share.py: -------------------------------------------------------------------------------- 1 | """Shared Folders data.""" 2 | from synology_dsm.helpers import SynoFormatHelper 3 | 4 | 5 | class SynoCoreShare: 6 | """Class containing Share data.""" 7 | 8 | API_KEY = "SYNO.Core.Share" 9 | # Syno supports two methods to retrieve resource details, GET and POST. 10 | # GET returns a limited set of keys. With POST the same keys as GET 11 | # are returned plus any keys listed in the "additional" parameter. 12 | # NOTE: The value of the additional key must be a string. 13 | REQUEST_DATA = { 14 | "additional": '["hidden","encryption","is_aclmode","unite_permission",' 15 | '"is_support_acl","is_sync_share","is_force_readonly","force_readonly_reason",' 16 | '"recyclebin","is_share_moving","is_cluster_share","is_exfat_share",' 17 | '"is_cold_storage_share","support_snapshot","share_quota",' 18 | '"enable_share_compress","enable_share_cow","include_cold_storage_share",' 19 | '"is_cold_storage_share"]', 20 | "shareType": "all", 21 | } 22 | 23 | def __init__(self, dsm): 24 | """Constructor method.""" 25 | self._dsm = dsm 26 | self._data = {} 27 | 28 | def update(self): 29 | """Updates share data.""" 30 | raw_data = self._dsm.post(self.API_KEY, "list", data=self.REQUEST_DATA) 31 | if raw_data: 32 | self._data = raw_data["data"] 33 | 34 | @property 35 | def shares(self): 36 | """Gets all shares.""" 37 | return self._data.get("shares", []) 38 | 39 | @property 40 | def shares_uuids(self): 41 | """Return (internal) share ids.""" 42 | shares = [] 43 | for share in self.shares: 44 | shares.append(share["uuid"]) 45 | return shares 46 | 47 | def get_share(self, share_uuid): 48 | """Returns a specific share by uuid..""" 49 | for share in self.shares: 50 | if share["uuid"] == share_uuid: 51 | return share 52 | return {} 53 | 54 | def share_name(self, share_uuid): 55 | """Return the name of this share.""" 56 | return self.get_share(share_uuid).get("name") 57 | 58 | def share_path(self, share_uuid): 59 | """Return the volume path of this share.""" 60 | return self.get_share(share_uuid).get("vol_path") 61 | 62 | def share_recycle_bin(self, share_uuid): 63 | """Is the recycle bin enabled for this share?""" 64 | return self.get_share(share_uuid).get("enable_recycle_bin") 65 | 66 | def share_size(self, share_uuid, human_readable=False): 67 | """Total size of share.""" 68 | share_size_mb = self.get_share(share_uuid).get("share_quota_used") 69 | # Share size is returned in MB so we convert it. 70 | share_size_bytes = SynoFormatHelper.megabytes_to_bytes(share_size_mb) 71 | if human_readable: 72 | return SynoFormatHelper.bytes_to_readable(share_size_bytes) 73 | return share_size_bytes 74 | -------------------------------------------------------------------------------- /tests/api_data/dsm_6/dsm/const_6_dsm_network.py: -------------------------------------------------------------------------------- 1 | """DSM 6 SYNO.DSM.Network data.""" 2 | 3 | DSM_6_DSM_NETWORK_1LAN = { 4 | "data": { 5 | "dns": ["192.168.0.35"], 6 | "gateway": "192.168.0.254", 7 | "hostname": "NAS_[NAME]", 8 | "interfaces": [ 9 | { 10 | "id": "eth0", 11 | "ip": [{"address": "192.168.0.35", "netmask": "255.255.255.0"}], 12 | "ipv6": [ 13 | { 14 | "address": "2a01:e35:2434:d420:211:32ff:fea6:ca59", 15 | "prefix_length": 64, 16 | "scope": "global", 17 | }, 18 | { 19 | "address": "fe80::211:32ff:fea6:ca59", 20 | "prefix_length": 64, 21 | "scope": "link", 22 | }, 23 | ], 24 | "mac": "00-11-32-XX-XX-59", 25 | "type": "lan", 26 | }, 27 | { 28 | "id": "eth1", 29 | "ip": [{"address": "169.254.158.209", "netmask": "255.255.0.0"}], 30 | "mac": "00-11-32-XX-XX-5A", 31 | "type": "lan", 32 | }, 33 | ], 34 | "workgroup": "WORKGROUP", 35 | }, 36 | "success": True, 37 | } 38 | 39 | DSM_6_DSM_NETWORK_2LAN_1PPPOE = { 40 | "data": { 41 | "dns": ["192.168.0.35"], 42 | "gateway": "192.168.0.254", 43 | "hostname": "NAS_[NAME]", 44 | "interfaces": [ 45 | { 46 | "id": "eth0", 47 | "ip": [{"address": "192.168.5.10", "netmask": "255.255.255.0"}], 48 | "ipv6": [ 49 | { 50 | "address": "2001:b211:317c:147e:211:32ff:fe5d:fd11", 51 | "prefix_length": 64, 52 | "scope": "global", 53 | }, 54 | { 55 | "address": "fe80::211:32ff:fe5d:fd11", 56 | "prefix_length": 64, 57 | "scope": "link", 58 | }, 59 | ], 60 | "mac": "00-11-32-XX-XX-11", 61 | "type": "lan", 62 | }, 63 | { 64 | "id": "eth1", 65 | "ip": [{"address": "192.168.1.100", "netmask": "255.255.255.0"}], 66 | "ipv6": [ 67 | { 68 | "address": "2001:b011:300c:176c:211:11ff:fe5d:fd12", 69 | "prefix_length": 64, 70 | "scope": "global", 71 | }, 72 | { 73 | "address": "fe80::211:31ff:ff5d:fd12", 74 | "prefix_length": 64, 75 | "scope": "link", 76 | }, 77 | ], 78 | "mac": "00-11-32-XX-XX-12", 79 | "type": "lan", 80 | }, 81 | { 82 | "id": "ppp0", 83 | "ip": [{"address": "114.45.2.158", "netmask": "255.255.255.255"}], 84 | "type": "pppoe", 85 | }, 86 | ], 87 | "workgroup": "WORKGROUP", 88 | }, 89 | "success": True, 90 | } 91 | -------------------------------------------------------------------------------- /src/synology_dsm/api/surveillance_station/camera.py: -------------------------------------------------------------------------------- 1 | """SurveillanceStation camera.""" 2 | from .const import MOTION_DETECTION_DISABLED 3 | from .const import RECORDING_STATUS 4 | 5 | 6 | class SynoCamera: 7 | """An representation of a Synology SurveillanceStation camera.""" 8 | 9 | def __init__(self, data, live_view_data=None): 10 | """Initialize a Surveillance Station camera.""" 11 | self._data = data 12 | self.live_view = SynoCameraLiveView(live_view_data) 13 | self._motion_detection_enabled = None 14 | 15 | def update(self, data): 16 | """Update the camera.""" 17 | self._data = data 18 | 19 | def update_motion_detection(self, data): 20 | """Update the camera motion detection.""" 21 | self._motion_detection_enabled = ( 22 | MOTION_DETECTION_DISABLED != data["MDParam"]["source"] 23 | ) 24 | 25 | @property 26 | def id(self): 27 | """Return id of the camera.""" 28 | return self._data["id"] 29 | 30 | @property 31 | def name(self): 32 | """Return name of the camera.""" 33 | return self._data["name"] 34 | 35 | @property 36 | def model(self): 37 | """Return model of the camera.""" 38 | return self._data["model"] 39 | 40 | @property 41 | def resolution(self): 42 | """Return resolution of the camera.""" 43 | return self._data["resolution"] 44 | 45 | @property 46 | def fps(self): 47 | """Return FPS of the camera.""" 48 | return self._data["fps"] 49 | 50 | @property 51 | def is_enabled(self): 52 | """Return true if camera is enabled.""" 53 | return self._data["enabled"] 54 | 55 | @property 56 | def is_motion_detection_enabled(self): 57 | """Return true if motion detection is enabled.""" 58 | return self._motion_detection_enabled 59 | 60 | @property 61 | def is_recording(self): 62 | """Return true if camera is recording.""" 63 | return self._data["recStatus"] in RECORDING_STATUS 64 | 65 | 66 | class SynoCameraLiveView: 67 | """An representation of a Synology SurveillanceStation camera live view.""" 68 | 69 | def __init__(self, data): 70 | """Initialize a Surveillance Station camera live view.""" 71 | self.update(data) 72 | 73 | def update(self, data): 74 | """Update the camera live view.""" 75 | self._data = data 76 | 77 | @property 78 | def mjpeg_http(self): 79 | """Return the mjpeg stream (over http) path of the camera.""" 80 | return self._data["mjpegHttpPath"] 81 | 82 | @property 83 | def multicast(self): 84 | """Return the multi-cast path of the camera.""" 85 | return self._data["multicstPath"] 86 | 87 | @property 88 | def mxpeg_http(self): 89 | """Return the mxpeg stream path of the camera.""" 90 | return self._data["mxpegHttpPath"] 91 | 92 | @property 93 | def rtsp_http(self): 94 | """Return the RTSP stream (over http) path of the camera.""" 95 | return self._data["rtspOverHttpPath"] 96 | 97 | @property 98 | def rtsp(self): 99 | """Return the RTSP stream path of the camera.""" 100 | return self._data["rtspPath"] 101 | -------------------------------------------------------------------------------- /CONTRIBUTING.rst: -------------------------------------------------------------------------------- 1 | Contributor Guide 2 | ================= 3 | 4 | Thank you for your interest in improving this project. 5 | This project is open-source under the `MIT license`_ and 6 | welcomes contributions in the form of bug reports, feature requests, and pull requests. 7 | 8 | Here is a list of important resources for contributors: 9 | 10 | - `Source Code`_ 11 | - `Documentation`_ 12 | - `Issue Tracker`_ 13 | - `Code of Conduct`_ 14 | 15 | .. _MIT license: https://opensource.org/licenses/MIT 16 | .. _Source Code: https://github.com/ProtoThis/python-synology 17 | .. _Documentation: https://python-synology.readthedocs.io/ 18 | .. _Issue Tracker: https://github.com/ProtoThis/python-synology/issues 19 | 20 | How to report a bug 21 | ------------------- 22 | 23 | Report bugs on the `Issue Tracker`_. 24 | 25 | When filing an issue, make sure to answer these questions: 26 | 27 | - Which operating system and Python version are you using? 28 | - Which version of this project are you using? 29 | - What did you do? 30 | - What did you expect to see? 31 | - What did you see instead? 32 | 33 | The best way to get your bug fixed is to provide a test case, 34 | and/or steps to reproduce the issue. 35 | 36 | 37 | How to request a feature 38 | ------------------------ 39 | 40 | Request features on the `Issue Tracker`_. 41 | 42 | 43 | How to set up your development environment 44 | ------------------------------------------ 45 | 46 | You need Python 3.6+ and the following tools: 47 | 48 | - Poetry_ 49 | - Nox_ 50 | - nox-poetry_ 51 | 52 | Install the package with development requirements: 53 | 54 | .. code:: console 55 | 56 | $ poetry install 57 | 58 | You can now run an interactive Python session: 59 | 60 | .. code:: console 61 | 62 | $ poetry run python 63 | 64 | .. _Poetry: https://python-poetry.org/ 65 | .. _Nox: https://nox.thea.codes/ 66 | .. _nox-poetry: https://nox-poetry.readthedocs.io/ 67 | 68 | 69 | How to test the project 70 | ----------------------- 71 | 72 | Run the full test suite: 73 | 74 | .. code:: console 75 | 76 | $ nox 77 | 78 | List the available Nox sessions: 79 | 80 | .. code:: console 81 | 82 | $ nox --list-sessions 83 | 84 | You can also run a specific Nox session. 85 | For example, invoke the unit test suite like this: 86 | 87 | .. code:: console 88 | 89 | $ nox --session=tests 90 | 91 | Unit tests are located in the ``tests`` directory, 92 | and are written using the pytest_ testing framework. 93 | 94 | .. _pytest: https://pytest.readthedocs.io/ 95 | 96 | 97 | How to submit changes 98 | --------------------- 99 | 100 | Open a `pull request`_ to submit changes to this project. 101 | 102 | Your pull request needs to meet the following guidelines for acceptance: 103 | 104 | - The Nox test suite must pass without errors and warnings. 105 | - Include unit tests. This project maintains 100% code coverage. 106 | - If your changes add functionality, update the documentation accordingly. 107 | 108 | Feel free to submit early, though—we can always iterate on this. 109 | 110 | To run linting and code formatting checks before commiting your change, you can install pre-commit as a Git hook by running the following command: 111 | 112 | .. code:: console 113 | 114 | $ nox --session=pre-commit -- install 115 | 116 | It is recommended to open an issue before starting work on anything. 117 | This will allow a chance to talk it over with the owners and validate your approach. 118 | 119 | .. _pull request: https://github.com/ProtoThis/python-synology/pulls 120 | .. github-only 121 | .. _Code of Conduct: CODE_OF_CONDUCT.rst 122 | -------------------------------------------------------------------------------- /src/synology_dsm/api/core/system.py: -------------------------------------------------------------------------------- 1 | """DSM System data and actions.""" 2 | 3 | 4 | class SynoCoreSystem: 5 | """Class containing System data and actions.""" 6 | 7 | API_KEY = "SYNO.Core.System" 8 | 9 | def __init__(self, dsm): 10 | """Constructor method.""" 11 | self._dsm = dsm 12 | self._data = {} 13 | 14 | def update(self): 15 | """Updates System data.""" 16 | raw_data = self._dsm.get(self.API_KEY, "info") 17 | if raw_data: 18 | self._data = raw_data["data"] 19 | 20 | # 21 | # get information 22 | # 23 | @property 24 | def cpu_clock_speed(self): 25 | """Gets System CPU clock speed.""" 26 | return self._data.get("cpu_clock_speed") 27 | 28 | @property 29 | def cpu_cores(self): 30 | """Gets System CPU cores.""" 31 | return self._data.get("cpu_cores") 32 | 33 | @property 34 | def cpu_family(self): 35 | """Gets System CPU family.""" 36 | return self._data.get("cpu_family") 37 | 38 | @property 39 | def cpu_series(self): 40 | """Gets System CPU series.""" 41 | return self._data.get("cpu_series") 42 | 43 | @property 44 | def enabled_ntp(self): 45 | """Gets System NTP state.""" 46 | return self._data.get("enabled_ntp") 47 | 48 | @property 49 | def ntp_server(self): 50 | """Gets System NTP server.""" 51 | return self._data.get("ntp_server") 52 | 53 | @property 54 | def firmware_ver(self): 55 | """Gets System firmware version.""" 56 | return self._data.get("firmware_ver") 57 | 58 | @property 59 | def model(self): 60 | """Gets System model.""" 61 | return self._data.get("model") 62 | 63 | @property 64 | def ram_size(self): 65 | """Gets System ram size.""" 66 | return self._data.get("ram_size") 67 | 68 | @property 69 | def serial(self): 70 | """Gets System serial number.""" 71 | return self._data.get("serial") 72 | 73 | @property 74 | def sys_temp(self): 75 | """Gets System temperature.""" 76 | return self._data.get("sys_temp") 77 | 78 | @property 79 | def time(self): 80 | """Gets System time.""" 81 | return self._data.get("time") 82 | 83 | @property 84 | def time_zone(self): 85 | """Gets System time zone.""" 86 | return self._data.get("time_zone") 87 | 88 | @property 89 | def time_zone_desc(self): 90 | """Gets System time zone description.""" 91 | return self._data.get("time_zone_desc") 92 | 93 | @property 94 | def up_time(self): 95 | """Gets System uptime.""" 96 | return self._data.get("up_time") 97 | 98 | @property 99 | def usb_dev(self): 100 | """Gets System connected usb devices.""" 101 | return self._data.get("usb_dev", []) 102 | 103 | # 104 | # do system actions 105 | # 106 | def shutdown(self): 107 | """Shutdown NAS.""" 108 | res = self._dsm.get( 109 | self.API_KEY, 110 | "shutdown", 111 | max_version=1, # shutdown method is only available on api version 1 112 | ) 113 | return res 114 | 115 | def reboot(self): 116 | """Reboot NAS.""" 117 | res = self._dsm.get( 118 | self.API_KEY, 119 | "reboot", 120 | max_version=1, # reboot method is only available on api version 1 121 | ) 122 | return res 123 | -------------------------------------------------------------------------------- /src/synology_dsm/api/download_station/__init__.py: -------------------------------------------------------------------------------- 1 | """Synology DownloadStation API wrapper.""" 2 | from .task import SynoDownloadTask 3 | 4 | 5 | class SynoDownloadStation: 6 | """An implementation of a Synology DownloadStation.""" 7 | 8 | API_KEY = "SYNO.DownloadStation.*" 9 | INFO_API_KEY = "SYNO.DownloadStation.Info" 10 | STAT_API_KEY = "SYNO.DownloadStation.Statistic" 11 | TASK_API_KEY = "SYNO.DownloadStation.Task" 12 | 13 | def __init__(self, dsm): 14 | """Initialize a Download Station.""" 15 | self._dsm = dsm 16 | self._tasks_by_id = {} 17 | self.additionals = [ 18 | "detail", 19 | "file", 20 | ] # Can contain: detail, transfer, file, tracker, peer 21 | 22 | def update(self): 23 | """Update tasks from API.""" 24 | self._tasks_by_id = {} 25 | list_data = self._dsm.get( 26 | self.TASK_API_KEY, "List", {"additional": ",".join(self.additionals)} 27 | )["data"] 28 | for task_data in list_data["tasks"]: 29 | if task_data["id"] in self._tasks_by_id: 30 | self._tasks_by_id[task_data["id"]].update(task_data) 31 | else: 32 | self._tasks_by_id[task_data["id"]] = SynoDownloadTask(task_data) 33 | 34 | # Global 35 | def get_info(self): 36 | """Return general informations about the Download Station instance.""" 37 | return self._dsm.get(self.INFO_API_KEY, "GetInfo") 38 | 39 | def get_config(self): 40 | """Return configuration about the Download Station instance.""" 41 | return self._dsm.get(self.INFO_API_KEY, "GetConfig") 42 | 43 | def get_stat(self): 44 | """Return statistic about the Download Station instance.""" 45 | return self._dsm.get(self.STAT_API_KEY, "GetInfo") 46 | 47 | # Downloads 48 | def get_all_tasks(self): 49 | """Return a list of tasks.""" 50 | return self._tasks_by_id.values() 51 | 52 | def get_task(self, task_id): 53 | """Return task matching task_id.""" 54 | return self._tasks_by_id[task_id] 55 | 56 | def create(self, uri, unzip_password=None, destination=None): 57 | """Create a new task (uri accepts HTTP/FTP/magnet/ED2K links).""" 58 | res = self._dsm.post( 59 | self.TASK_API_KEY, 60 | "Create", 61 | { 62 | "uri": ",".join(uri) if isinstance(uri, list) else uri, 63 | "unzip_password": unzip_password, 64 | "destination": destination, 65 | }, 66 | ) 67 | self.update() 68 | return res 69 | 70 | def pause(self, task_id): 71 | """Pause a download task.""" 72 | res = self._dsm.get( 73 | self.TASK_API_KEY, 74 | "Pause", 75 | {"id": ",".join(task_id) if isinstance(task_id, list) else task_id}, 76 | ) 77 | self.update() 78 | return res 79 | 80 | def resume(self, task_id): 81 | """Resume a paused download task.""" 82 | res = self._dsm.get( 83 | self.TASK_API_KEY, 84 | "Resume", 85 | {"id": ",".join(task_id) if isinstance(task_id, list) else task_id}, 86 | ) 87 | self.update() 88 | return res 89 | 90 | def delete(self, task_id, force_complete=False): 91 | """Delete a download task.""" 92 | res = self._dsm.get( 93 | self.TASK_API_KEY, 94 | "Delete", 95 | { 96 | "id": ",".join(task_id) if isinstance(task_id, list) else task_id, 97 | "force_complete": force_complete, 98 | }, 99 | ) 100 | self.update() 101 | return res 102 | -------------------------------------------------------------------------------- /tests/api_data/dsm_6/core/const_6_core_system.py: -------------------------------------------------------------------------------- 1 | """DSM 6 SYNO.Core.System data.""" 2 | 3 | DSM_6_CORE_SYSTEM_DS918_PLUS = { 4 | "data": { 5 | "cpu_clock_speed": 1500, 6 | "cpu_cores": "4", 7 | "cpu_family": "Celeron", 8 | "cpu_series": "J3455", 9 | "cpu_vendor": "INTEL", 10 | "enabled_ntp": True, 11 | "firmware_date": "2020/07/08", 12 | "firmware_ver": "DSM 6.2.3-25426 Update 2", 13 | "model": "DS918+", 14 | "ntp_server": "time.google.com", 15 | "ram_size": 4096, 16 | "sata_dev": [], 17 | "serial": "1920PDN001501", 18 | "support_esata": "yes", 19 | "sys_temp": 40, 20 | "sys_tempwarn": False, 21 | "systempwarn": False, 22 | "temperature_warning": False, 23 | "time": "2020-10-19 23:33:52", 24 | "time_zone": "Brussels", 25 | "time_zone_desc": "(GMT+01:00) Brussels, Copenhagen, Madrid, Paris", 26 | "up_time": "75:12:9", 27 | "usb_dev": [ 28 | { 29 | "cls": "hub", 30 | "pid": "0612", 31 | "producer": "Genesys Logic, Inc.", 32 | "product": "Hub", 33 | "rev": "92.23", 34 | "vid": "05e3", 35 | }, 36 | { 37 | "cls": "other", 38 | "pid": "1790", 39 | "producer": "ASIX Electronics Corp.", 40 | "product": "AX88179 Gigabit Ethernet", 41 | "rev": "1.00", 42 | "vid": "0b95", 43 | }, 44 | { 45 | "cls": "hub", 46 | "pid": "0610", 47 | "producer": "Genesys Logic, Inc.", 48 | "product": "4-port hub", 49 | "rev": "92.23", 50 | "vid": "05e3", 51 | }, 52 | { 53 | "cls": "other", 54 | "pid": "0200", 55 | "producer": "Sigma Designs, Inc.", 56 | "product": "Aeotec Z-Stick Gen5 (ZW090) - UZB", 57 | "rev": "0.00", 58 | "vid": "0658", 59 | }, 60 | { 61 | "cls": "ups", 62 | "pid": "0002", 63 | "producer": "American Power Conversion", 64 | "product": "Uninterruptible Power Supply", 65 | "rev": "1.06", 66 | "vid": "051d", 67 | }, 68 | ], 69 | }, 70 | "success": True, 71 | } 72 | 73 | DSM_6_CORE_SYSTEM_DS218_PLAY = { 74 | "data": { 75 | "cpu_clock_speed": 1400, 76 | "cpu_cores": "4", 77 | "cpu_family": "RTD1296", 78 | "cpu_series": "SoC", 79 | "cpu_vendor": "Realtek", 80 | "enabled_ntp": True, 81 | "firmware_date": "2020/07/14", 82 | "firmware_ver": "DSM 6.2.3-25426 Update 2", 83 | "model": "DS218play", 84 | "ntp_server": "pool.ntp.org", 85 | "ram_size": 1024, 86 | "serial": "123456abcdefg", 87 | "support_esata": "no", 88 | "sys_temp": 40, 89 | "sys_tempwarn": False, 90 | "systempwarn": False, 91 | "temperature_warning": False, 92 | "time": "2020-10-16 20:26:58", 93 | "time_zone": "Amsterdam", 94 | "time_zone_desc": "(GMT+01:00) Amsterdam, Berlin, Rome, Stockholm, Vienna", 95 | "up_time": "289:31:54", 96 | "usb_dev": [ 97 | { 98 | "cls": "disk", 99 | "pid": "2621", 100 | "producer": "Western Digital Technologies, Inc.", 101 | "product": "Elements 2621", 102 | "rev": "10.26", 103 | "vid": "1058", 104 | } 105 | ], 106 | }, 107 | "success": True, 108 | } 109 | -------------------------------------------------------------------------------- /tests/api_data/dsm_6/core/const_6_core_utilization.py: -------------------------------------------------------------------------------- 1 | """DSM 6 SYNO.Core.System.Utilization data.""" 2 | 3 | DSM_6_CORE_UTILIZATION_ERROR_1055 = { 4 | "error": { 5 | "code": 1055, 6 | "errors": { 7 | "err_key": "", 8 | "err_line": 883, 9 | "err_msg": "Transmition failed.", 10 | "err_session": "", 11 | }, 12 | }, 13 | "success": False, 14 | } 15 | 16 | DSM_6_CORE_UTILIZATION = { 17 | "data": { 18 | "cpu": { 19 | "15min_load": 51, 20 | "1min_load": 37, 21 | "5min_load": 33, 22 | "device": "System", 23 | "other_load": 3, 24 | "system_load": 2, 25 | "user_load": 4, 26 | }, 27 | "disk": { 28 | "disk": [ 29 | { 30 | "device": "sdc", 31 | "display_name": "Drive 3", 32 | "read_access": 3, 33 | "read_byte": 55261, 34 | "type": "internal", 35 | "utilization": 12, 36 | "write_access": 15, 37 | "write_byte": 419425, 38 | }, 39 | { 40 | "device": "sda", 41 | "display_name": "Drive 1", 42 | "read_access": 3, 43 | "read_byte": 63905, 44 | "type": "internal", 45 | "utilization": 8, 46 | "write_access": 14, 47 | "write_byte": 414795, 48 | }, 49 | { 50 | "device": "sdb", 51 | "display_name": "Drive 2", 52 | "read_access": 3, 53 | "read_byte": 55891, 54 | "type": "internal", 55 | "utilization": 10, 56 | "write_access": 15, 57 | "write_byte": 415658, 58 | }, 59 | ], 60 | "total": { 61 | "device": "total", 62 | "read_access": 9, 63 | "read_byte": 175057, 64 | "utilization": 10, 65 | "write_access": 44, 66 | "write_byte": 1249878, 67 | }, 68 | }, 69 | "lun": [], 70 | "memory": { 71 | "avail_real": 156188, 72 | "avail_swap": 4146316, 73 | "buffer": 15172, 74 | "cached": 2764756, 75 | "device": "Memory", 76 | "memory_size": 4194304, 77 | "real_usage": 24, 78 | "si_disk": 0, 79 | "so_disk": 0, 80 | "swap_usage": 6, 81 | "total_real": 3867268, 82 | "total_swap": 4415404, 83 | }, 84 | "network": [ 85 | {"device": "total", "rx": 109549, "tx": 45097}, 86 | {"device": "eth0", "rx": 109549, "tx": 45097}, 87 | {"device": "eth1", "rx": 0, "tx": 0}, 88 | ], 89 | "space": { 90 | "total": { 91 | "device": "total", 92 | "read_access": 1, 93 | "read_byte": 27603, 94 | "utilization": 1, 95 | "write_access": 23, 96 | "write_byte": 132496, 97 | }, 98 | "volume": [ 99 | { 100 | "device": "md2", 101 | "display_name": "volume1", 102 | "read_access": 1, 103 | "read_byte": 27603, 104 | "utilization": 1, 105 | "write_access": 23, 106 | "write_byte": 132496, 107 | } 108 | ], 109 | }, 110 | "time": 1585503221, 111 | }, 112 | "success": True, 113 | } 114 | -------------------------------------------------------------------------------- /tests/api_data/dsm_6/__init__.py: -------------------------------------------------------------------------------- 1 | """DSM 6 datas.""" 2 | from .const_6_api_auth import DSM_6_AUTH_LOGIN 3 | from .const_6_api_auth import DSM_6_AUTH_LOGIN_2SA 4 | from .const_6_api_auth import DSM_6_AUTH_LOGIN_2SA_OTP 5 | from .const_6_api_info import DSM_6_API_INFO 6 | from .core.const_6_core_security import DSM_6_CORE_SECURITY 7 | from .core.const_6_core_security import DSM_6_CORE_SECURITY_UPDATE_OUTOFDATE 8 | from .core.const_6_core_share import DSM_6_CORE_SHARE 9 | from .core.const_6_core_system import DSM_6_CORE_SYSTEM_DS218_PLAY 10 | from .core.const_6_core_system import DSM_6_CORE_SYSTEM_DS918_PLUS 11 | from .core.const_6_core_upgrade import DSM_6_CORE_UPGRADE_FALSE 12 | from .core.const_6_core_upgrade import DSM_6_CORE_UPGRADE_TRUE 13 | from .core.const_6_core_utilization import DSM_6_CORE_UTILIZATION 14 | from .core.const_6_core_utilization import DSM_6_CORE_UTILIZATION_ERROR_1055 15 | from .download_station.const_6_download_station_info import ( 16 | DSM_6_DOWNLOAD_STATION_INFO_CONFIG, 17 | ) 18 | from .download_station.const_6_download_station_info import ( 19 | DSM_6_DOWNLOAD_STATION_INFO_INFO, 20 | ) 21 | from .download_station.const_6_download_station_stat import ( 22 | DSM_6_DOWNLOAD_STATION_STAT_INFO, 23 | ) 24 | from .download_station.const_6_download_station_task import ( 25 | DSM_6_DOWNLOAD_STATION_TASK_LIST, 26 | ) 27 | from .dsm.const_6_dsm_info import DSM_6_DSM_INFORMATION 28 | from .dsm.const_6_dsm_network import DSM_6_DSM_NETWORK_2LAN_1PPPOE 29 | from .storage.const_6_storage_storage import ( 30 | DSM_6_STORAGE_STORAGE_DS1515_PLUS_SHR2_10DISKS_1VOL_WITH_EXPANSION, 31 | ) 32 | from .storage.const_6_storage_storage import ( 33 | DSM_6_STORAGE_STORAGE_DS1819_PLUS_SHR2_8DISKS_1VOL, 34 | ) 35 | from .storage.const_6_storage_storage import ( 36 | DSM_6_STORAGE_STORAGE_DS213_PLUS_SHR1_2DISKS_2VOLS, 37 | ) 38 | from .storage.const_6_storage_storage import ( 39 | DSM_6_STORAGE_STORAGE_DS918_PLUS_RAID5_3DISKS_1VOL, 40 | ) 41 | from .surveillance_station.const_6_api_info import ( 42 | DSM_6_API_INFO as DSM_6_API_INFO_SURVEILLANCE_STATION, 43 | ) 44 | from .surveillance_station.const_6_surveillance_station_camera import ( 45 | DSM_6_SURVEILLANCE_STATION_CAMERA_EVENT_MD_PARAM_SAVE, 46 | ) 47 | from .surveillance_station.const_6_surveillance_station_camera import ( 48 | DSM_6_SURVEILLANCE_STATION_CAMERA_EVENT_MOTION_ENUM, 49 | ) 50 | from .surveillance_station.const_6_surveillance_station_camera import ( 51 | DSM_6_SURVEILLANCE_STATION_CAMERA_GET_LIVE_VIEW_PATH, 52 | ) 53 | from .surveillance_station.const_6_surveillance_station_camera import ( 54 | DSM_6_SURVEILLANCE_STATION_CAMERA_LIST, 55 | ) 56 | from .surveillance_station.const_6_surveillance_station_home_mode import ( 57 | DSM_6_SURVEILLANCE_STATION_HOME_MODE_GET_INFO, 58 | ) 59 | from .surveillance_station.const_6_surveillance_station_home_mode import ( 60 | DSM_6_SURVEILLANCE_STATION_HOME_MODE_SWITCH, 61 | ) 62 | 63 | __all__ = [ 64 | "DSM_6_AUTH_LOGIN", 65 | "DSM_6_AUTH_LOGIN_2SA", 66 | "DSM_6_AUTH_LOGIN_2SA_OTP", 67 | "DSM_6_API_INFO", 68 | "DSM_6_CORE_SECURITY", 69 | "DSM_6_CORE_SECURITY_UPDATE_OUTOFDATE", 70 | "DSM_6_CORE_SHARE", 71 | "DSM_6_CORE_SYSTEM_DS218_PLAY", 72 | "DSM_6_CORE_SYSTEM_DS918_PLUS", 73 | "DSM_6_CORE_UPGRADE_FALSE", 74 | "DSM_6_CORE_UPGRADE_TRUE", 75 | "DSM_6_CORE_UTILIZATION", 76 | "DSM_6_CORE_UTILIZATION_ERROR_1055", 77 | "DSM_6_DOWNLOAD_STATION_INFO_CONFIG", 78 | "DSM_6_DOWNLOAD_STATION_INFO_INFO", 79 | "DSM_6_DOWNLOAD_STATION_STAT_INFO", 80 | "DSM_6_DOWNLOAD_STATION_TASK_LIST", 81 | "DSM_6_DSM_INFORMATION", 82 | "DSM_6_DSM_NETWORK_2LAN_1PPPOE", 83 | "DSM_6_STORAGE_STORAGE_DS1515_PLUS_SHR2_10DISKS_1VOL_WITH_EXPANSION", 84 | "DSM_6_STORAGE_STORAGE_DS1819_PLUS_SHR2_8DISKS_1VOL", 85 | "DSM_6_STORAGE_STORAGE_DS213_PLUS_SHR1_2DISKS_2VOLS", 86 | "DSM_6_STORAGE_STORAGE_DS918_PLUS_RAID5_3DISKS_1VOL", 87 | "DSM_6_API_INFO_SURVEILLANCE_STATION", 88 | "DSM_6_SURVEILLANCE_STATION_CAMERA_EVENT_MD_PARAM_SAVE", 89 | "DSM_6_SURVEILLANCE_STATION_CAMERA_EVENT_MOTION_ENUM", 90 | "DSM_6_SURVEILLANCE_STATION_CAMERA_GET_LIVE_VIEW_PATH", 91 | "DSM_6_SURVEILLANCE_STATION_CAMERA_LIST", 92 | "DSM_6_SURVEILLANCE_STATION_HOME_MODE_GET_INFO", 93 | "DSM_6_SURVEILLANCE_STATION_HOME_MODE_SWITCH", 94 | ] 95 | -------------------------------------------------------------------------------- /tests/api_data/dsm_5/core/const_5_core_utilization.py: -------------------------------------------------------------------------------- 1 | """DSM 5 SYNO.Core.System.Utilization data.""" 2 | 3 | DSM_5_CORE_UTILIZATION = { 4 | "data": { 5 | "cpu": { 6 | "15min_load": 53, 7 | "1min_load": 57, 8 | "5min_load": 56, 9 | "device": "System", 10 | "other_load": 63, 11 | "system_load": 10, 12 | "user_load": 27, 13 | }, 14 | "disk": { 15 | "disk": [ 16 | { 17 | "device": "sda", 18 | "display_name": "Disk 1", 19 | "read_access": 21, 20 | "read_byte": 645529, 21 | "type": "internal", 22 | "utilization": 46, 23 | "write_access": 4, 24 | "write_byte": 86220, 25 | }, 26 | { 27 | "device": "sdb", 28 | "display_name": "Disk 2", 29 | "read_access": 23, 30 | "read_byte": 711338, 31 | "type": "internal", 32 | "utilization": 33, 33 | "write_access": 4, 34 | "write_byte": 95641, 35 | }, 36 | { 37 | "device": "sdc", 38 | "display_name": "Disk 3", 39 | "read_access": 21, 40 | "read_byte": 786841, 41 | "type": "internal", 42 | "utilization": 31, 43 | "write_access": 5, 44 | "write_byte": 99874, 45 | }, 46 | { 47 | "device": "sdd", 48 | "display_name": "Disk 4", 49 | "read_access": 21, 50 | "read_byte": 729907, 51 | "type": "internal", 52 | "utilization": 32, 53 | "write_access": 4, 54 | "write_byte": 76663, 55 | }, 56 | { 57 | "device": "sdq", 58 | "display_name": "USB Disk 1", 59 | "read_access": 0, 60 | "read_byte": 0, 61 | "type": "usb", 62 | "utilization": 0, 63 | "write_access": 0, 64 | "write_byte": 0, 65 | }, 66 | ], 67 | "total": { 68 | "device": "total", 69 | "read_access": 86, 70 | "read_byte": 2873615, 71 | "utilization": 28, 72 | "write_access": 17, 73 | "write_byte": 358398, 74 | }, 75 | }, 76 | "memory": { 77 | "avail_real": 8188, 78 | "avail_swap": 1933436, 79 | "buffer": 3700, 80 | "cached": 25636, 81 | "device": "Memory", 82 | "memory_size": 131072, 83 | "real_usage": 68, 84 | "si_disk": 5, 85 | "so_disk": 3, 86 | "swap_usage": 7, 87 | "total_real": 118464, 88 | "total_swap": 2097080, 89 | }, 90 | "network": [ 91 | {"device": "total", "rx": 1680, "tx": 553}, 92 | {"device": "eth0", "rx": 1680, "tx": 553}, 93 | ], 94 | "space": { 95 | "lun": [], 96 | "total": { 97 | "device": "total", 98 | "read_access": 261, 99 | "read_byte": 1069875, 100 | "utilization": 100, 101 | "write_access": 51, 102 | "write_byte": 208896, 103 | }, 104 | "volume": [ 105 | { 106 | "device": "md2", 107 | "display_name": "volume1", 108 | "read_access": 261, 109 | "read_byte": 1069875, 110 | "utilization": 100, 111 | "write_access": 51, 112 | "write_byte": 208896, 113 | } 114 | ], 115 | }, 116 | "time": 1586592505, 117 | }, 118 | "success": True, 119 | } 120 | -------------------------------------------------------------------------------- /tests/api_data/dsm_6/surveillance_station/const_6_surveillance_station_home_mode.py: -------------------------------------------------------------------------------- 1 | """DSM 6 SYNO.API.SurveillanceStation.HomeMode data.""" 2 | 3 | DSM_6_SURVEILLANCE_STATION_HOME_MODE_GET_INFO = { 4 | "data": { 5 | "actrule_on": False, 6 | "actrules": "-1", 7 | "cameras": "-1", 8 | "custom1_det": 1, 9 | "custom1_di": 1, 10 | "custom2_det": 1, 11 | "custom2_di": 1, 12 | "geo_delay_time": 60, 13 | "geo_lat": 12.00000, 14 | "geo_lng": -12.00000, 15 | "geo_radius": 100, 16 | "io_modules": "", 17 | "last_update_time": 0, 18 | "mode_schedule": "000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", 19 | "mode_schedule_next_time": -1, 20 | "mode_schedule_on": True, 21 | "notify_event_list": [ 22 | {"eventGroupType": 2, "eventType": 3, "filter": 4}, 23 | {"eventGroupType": 2, "eventType": 4, "filter": 4}, 24 | {"eventGroupType": 2, "eventType": 5, "filter": 0}, 25 | {"eventGroupType": 2, "eventType": 6, "filter": 0}, 26 | {"eventGroupType": 2, "eventType": 7, "filter": 0}, 27 | {"eventGroupType": 2, "eventType": 10, "filter": 0}, 28 | {"eventGroupType": 2, "eventType": 11, "filter": 4}, 29 | {"eventGroupType": 2, "eventType": 12, "filter": 0}, 30 | {"eventGroupType": 2, "eventType": 13, "filter": 0}, 31 | {"eventGroupType": 2, "eventType": 14, "filter": 0}, 32 | {"eventGroupType": 2, "eventType": 15, "filter": 0}, 33 | {"eventGroupType": 1, "eventType": 28, "filter": 4}, 34 | {"eventGroupType": 1, "eventType": 29, "filter": 4}, 35 | {"eventGroupType": 1, "eventType": 32, "filter": 4}, 36 | {"eventGroupType": 1, "eventType": 33, "filter": 4}, 37 | {"eventGroupType": 1, "eventType": 34, "filter": 4}, 38 | {"eventGroupType": 8, "eventType": 35, "filter": 0}, 39 | {"eventGroupType": 8, "eventType": 36, "filter": 0}, 40 | {"eventGroupType": 8, "eventType": 37, "filter": 0}, 41 | {"eventGroupType": 8, "eventType": 38, "filter": 0}, 42 | {"eventGroupType": 8, "eventType": 39, "filter": 0}, 43 | {"eventGroupType": 8, "eventType": 40, "filter": 0}, 44 | {"eventGroupType": 8, "eventType": 41, "filter": 0}, 45 | {"eventGroupType": 8, "eventType": 42, "filter": 0}, 46 | {"eventGroupType": 8, "eventType": 43, "filter": 0}, 47 | {"eventGroupType": 8, "eventType": 44, "filter": 0}, 48 | {"eventGroupType": 1, "eventType": 45, "filter": 0}, 49 | {"eventGroupType": 2, "eventType": 62, "filter": 0}, 50 | {"eventGroupType": 2, "eventType": 63, "filter": 0}, 51 | {"eventGroupType": 2, "eventType": 64, "filter": 0}, 52 | {"eventGroupType": 2, "eventType": 65, "filter": 0}, 53 | {"eventGroupType": 2, "eventType": 66, "filter": 0}, 54 | {"eventGroupType": 12, "eventType": 82, "filter": 0}, 55 | {"eventGroupType": 12, "eventType": 83, "filter": 0}, 56 | {"eventGroupType": 1, "eventType": 90, "filter": 0}, 57 | ], 58 | "notify_on": True, 59 | "on": True, 60 | "onetime_disable_on": False, 61 | "onetime_disable_time": 0, 62 | "onetime_enable_on": False, 63 | "onetime_enable_time": 0, 64 | "reason": 2, 65 | "rec_sch_custom_det_app_list": [ 66 | {"custom1_app_det": 0, "custom2_app_det": 0}, 67 | {"custom1_app_det": 0, "custom2_app_det": 0}, 68 | ], 69 | "rec_schedule": "111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111", 70 | "rec_schedule_on": False, 71 | "stream_profile": "1,1,1,1,1,1", 72 | "streaming_on": False, 73 | "wifi_ssid": "SSID", 74 | }, 75 | "success": True, 76 | } 77 | 78 | DSM_6_SURVEILLANCE_STATION_HOME_MODE_SWITCH = {"success": True} 79 | -------------------------------------------------------------------------------- /src/synology_dsm/exceptions.py: -------------------------------------------------------------------------------- 1 | """Library exceptions.""" 2 | from .const import API_AUTH 3 | from .const import ERROR_AUTH 4 | from .const import ERROR_COMMON 5 | from .const import ERROR_DOWNLOAD_SEARCH 6 | from .const import ERROR_DOWNLOAD_TASK 7 | from .const import ERROR_FILE 8 | from .const import ERROR_SURVEILLANCE 9 | from .const import ERROR_VIRTUALIZATION 10 | 11 | 12 | class SynologyDSMException(Exception): 13 | """Generic Synology DSM exception.""" 14 | 15 | def __init__(self, api, code, details=None): 16 | """Constructor method.""" 17 | reason = ERROR_COMMON.get(code) 18 | if api and not reason: 19 | if api == API_AUTH: 20 | reason = ERROR_AUTH.get(code) 21 | elif "SYNO.DownloadStation" in api: 22 | if "BTSearch" in api: 23 | reason = ERROR_DOWNLOAD_SEARCH.get(code) 24 | elif "Task" in api: 25 | reason = ERROR_DOWNLOAD_TASK.get(code) 26 | elif "SYNO.FileStation" in api: 27 | reason = ERROR_FILE.get(code) 28 | elif "SYNO.SurveillanceStation" in api: 29 | reason = ERROR_SURVEILLANCE.get(code) 30 | elif "SYNO.Virtualization" in api: 31 | reason = ERROR_VIRTUALIZATION.get(code) 32 | if not reason: 33 | reason = "Unknown" 34 | 35 | error_message = {"api": api, "code": code, "reason": reason, "details": details} 36 | super().__init__(error_message) 37 | 38 | 39 | # Request 40 | class SynologyDSMRequestException(SynologyDSMException): 41 | """Request exception.""" 42 | 43 | def __init__(self, exception): 44 | """Constructor method.""" 45 | ex_class = exception.__class__.__name__ 46 | ex_reason = exception.args[0] 47 | if hasattr(exception.args[0], "reason"): 48 | ex_reason = exception.args[0].reason 49 | super().__init__(None, -1, f"{ex_class} = {ex_reason}") 50 | 51 | 52 | # API 53 | class SynologyDSMAPINotExistsException(SynologyDSMException): 54 | """API not exists exception.""" 55 | 56 | def __init__(self, api): 57 | """Constructor method.""" 58 | super().__init__(api, -2, f"API {api} does not exists") 59 | 60 | 61 | class SynologyDSMAPIErrorException(SynologyDSMException): 62 | """API returns an error exception.""" 63 | 64 | def __init__(self, api, code, details): 65 | """Constructor method.""" 66 | super().__init__(api, code, details) 67 | 68 | 69 | # Login 70 | class SynologyDSMLoginFailedException(SynologyDSMException): 71 | """Failed to login exception.""" 72 | 73 | def __init__(self, code, details=None): 74 | """Constructor method.""" 75 | super().__init__(API_AUTH, code, details) 76 | 77 | 78 | class SynologyDSMLoginInvalidException(SynologyDSMLoginFailedException): 79 | """Invalid password & not admin account exception.""" 80 | 81 | def __init__(self, username): 82 | """Constructor method.""" 83 | message = f"Invalid password or not admin account: {username}" 84 | super().__init__(400, message) 85 | 86 | 87 | class SynologyDSMLoginDisabledAccountException(SynologyDSMLoginFailedException): 88 | """Guest & disabled account exception.""" 89 | 90 | def __init__(self, username): 91 | """Constructor method.""" 92 | message = f"Guest or disabled account: {username}" 93 | super().__init__(401, message) 94 | 95 | 96 | class SynologyDSMLoginPermissionDeniedException(SynologyDSMLoginFailedException): 97 | """No access to login exception.""" 98 | 99 | def __init__(self, username): 100 | """Constructor method.""" 101 | message = f"Permission denied for account: {username}" 102 | super().__init__(402, message) 103 | 104 | 105 | class SynologyDSMLogin2SARequiredException(SynologyDSMLoginFailedException): 106 | """2SA required to login exception.""" 107 | 108 | def __init__(self, username): 109 | """Constructor method.""" 110 | message = f"Two-step authentication required for account: {username}" 111 | super().__init__(403, message) 112 | 113 | 114 | class SynologyDSMLogin2SAFailedException(SynologyDSMLoginFailedException): 115 | """2SA code failed exception.""" 116 | 117 | def __init__(self): 118 | """Constructor method.""" 119 | message = "Two-step authentication failed, retry with a new pass code" 120 | super().__init__(404, message) 121 | -------------------------------------------------------------------------------- /.github/workflows/tests.yml: -------------------------------------------------------------------------------- 1 | name: Tests 2 | 3 | on: 4 | - push 5 | - pull_request 6 | 7 | jobs: 8 | tests: 9 | name: ${{ matrix.session }} ${{ matrix.python-version }} / ${{ matrix.os }} 10 | runs-on: ${{ matrix.os }} 11 | strategy: 12 | fail-fast: false 13 | matrix: 14 | include: 15 | # Commented session will be activated in the future 16 | - { python-version: 3.9, os: ubuntu-latest, session: "pre-commit" } 17 | - { python-version: 3.9, os: ubuntu-latest, session: "safety" } 18 | # - { python-version: 3.9, os: ubuntu-latest, session: "mypy" } 19 | # - { python-version: 3.8, os: ubuntu-latest, session: "mypy" } 20 | # - { python-version: 3.7, os: ubuntu-latest, session: "mypy" } 21 | - { python-version: 3.9, os: ubuntu-latest, session: "tests" } 22 | - { python-version: 3.8, os: ubuntu-latest, session: "tests" } 23 | - { python-version: 3.7, os: ubuntu-latest, session: "tests" } 24 | - { python-version: 3.9, os: windows-latest, session: "tests" } 25 | - { python-version: 3.9, os: macos-latest, session: "tests" } 26 | # - { python-version: 3.9, os: ubuntu-latest, session: "typeguard" } 27 | # - { python-version: 3.9, os: ubuntu-latest, session: "xdoctest" } 28 | # - { python-version: 3.8, os: ubuntu-latest, session: "docs-build" } 29 | 30 | env: 31 | NOXSESSION: ${{ matrix.session }} 32 | 33 | steps: 34 | - name: Check out the repository 35 | uses: actions/checkout@v2.3.4 36 | 37 | - name: Set up Python ${{ matrix.python-version }} 38 | uses: actions/setup-python@v2.1.4 39 | with: 40 | python-version: ${{ matrix.python-version }} 41 | 42 | - name: Upgrade pip 43 | run: | 44 | pip install --constraint=.github/workflows/constraints.txt pip 45 | pip --version 46 | 47 | - name: Install Poetry 48 | run: | 49 | pip install --constraint=.github/workflows/constraints.txt poetry 50 | poetry --version 51 | 52 | - name: Install Nox 53 | run: | 54 | pip install --constraint=.github/workflows/constraints.txt nox nox-poetry 55 | nox --version 56 | 57 | - name: Compute pre-commit cache key 58 | if: matrix.session == 'pre-commit' 59 | id: pre-commit-cache 60 | shell: python 61 | run: | 62 | import hashlib 63 | import sys 64 | 65 | python = "py{}.{}".format(*sys.version_info[:2]) 66 | payload = sys.version.encode() + sys.executable.encode() 67 | digest = hashlib.sha256(payload).hexdigest() 68 | result = "${{ runner.os }}-{}-{}-pre-commit".format(python, digest[:8]) 69 | 70 | print("::set-output name=result::{}".format(result)) 71 | 72 | - name: Restore pre-commit cache 73 | uses: actions/cache@v2.1.3 74 | if: matrix.session == 'pre-commit' 75 | with: 76 | path: ~/.cache/pre-commit 77 | key: ${{ steps.pre-commit-cache.outputs.result }}-${{ hashFiles('.pre-commit-config.yaml') }} 78 | restore-keys: | 79 | ${{ steps.pre-commit-cache.outputs.result }}- 80 | 81 | - name: Run Nox 82 | run: | 83 | nox --force-color --python=${{ matrix.python-version }} 84 | 85 | - name: Upload coverage data 86 | if: always() && matrix.session == 'tests' 87 | uses: "actions/upload-artifact@v2.2.1" 88 | with: 89 | name: coverage-data 90 | path: ".coverage.*" 91 | 92 | - name: Upload documentation 93 | if: matrix.session == 'docs-build' 94 | uses: actions/upload-artifact@v2.2.1 95 | with: 96 | name: docs 97 | path: docs/_build 98 | 99 | coverage: 100 | runs-on: ubuntu-latest 101 | needs: tests 102 | steps: 103 | - name: Check out the repository 104 | uses: actions/checkout@v2.3.4 105 | 106 | - name: Set up Python 3.9 107 | uses: actions/setup-python@v2.1.4 108 | with: 109 | python-version: 3.9 110 | 111 | - name: Upgrade pip 112 | run: | 113 | pip install --constraint=.github/workflows/constraints.txt pip 114 | pip --version 115 | 116 | - name: Install Poetry 117 | run: | 118 | pip install --constraint=.github/workflows/constraints.txt poetry 119 | poetry --version 120 | 121 | - name: Install Nox 122 | run: | 123 | pip install --constraint=.github/workflows/constraints.txt nox nox-poetry 124 | nox --version 125 | 126 | - name: Download coverage data 127 | uses: actions/download-artifact@v2.0.6 128 | with: 129 | name: coverage-data 130 | 131 | - name: Combine coverage data and display human readable report 132 | run: | 133 | nox --force-color --session=coverage 134 | 135 | - name: Create coverage report 136 | run: | 137 | nox --force-color --session=coverage -- xml 138 | 139 | - name: Upload coverage report 140 | uses: codecov/codecov-action@v1.0.15 141 | -------------------------------------------------------------------------------- /src/synology_dsm/api/surveillance_station/__init__.py: -------------------------------------------------------------------------------- 1 | """Synology SurveillanceStation API wrapper.""" 2 | from .camera import SynoCamera 3 | from .const import MOTION_DETECTION_BY_SURVEILLANCE 4 | from .const import MOTION_DETECTION_DISABLED 5 | 6 | 7 | class SynoSurveillanceStation: 8 | """An implementation of a Synology SurveillanceStation.""" 9 | 10 | API_KEY = "SYNO.SurveillanceStation.*" 11 | INFO_API_KEY = "SYNO.SurveillanceStation.Info" 12 | CAMERA_API_KEY = "SYNO.SurveillanceStation.Camera" 13 | CAMERA_EVENT_API_KEY = "SYNO.SurveillanceStation.Camera.Event" 14 | HOME_MODE_API_KEY = "SYNO.SurveillanceStation.HomeMode" 15 | SNAPSHOT_API_KEY = "SYNO.SurveillanceStation.SnapShot" 16 | 17 | def __init__(self, dsm): 18 | """Initialize a Surveillance Station.""" 19 | self._dsm = dsm 20 | self._cameras_by_id = {} 21 | 22 | def update(self): 23 | """Update cameras and motion settings with latest from API.""" 24 | self._cameras_by_id = {} 25 | list_data = self._dsm.get(self.CAMERA_API_KEY, "List", max_version=7)["data"] 26 | for camera_data in list_data["cameras"]: 27 | if camera_data["id"] in self._cameras_by_id: 28 | self._cameras_by_id[camera_data["id"]].update(camera_data) 29 | else: 30 | self._cameras_by_id[camera_data["id"]] = SynoCamera(camera_data) 31 | 32 | for camera_id in self._cameras_by_id: 33 | self._cameras_by_id[camera_id].update_motion_detection( 34 | self._dsm.get( 35 | self.CAMERA_EVENT_API_KEY, "MotionEnum", {"camId": camera_id} 36 | )["data"] 37 | ) 38 | 39 | if not self._cameras_by_id: 40 | return 41 | 42 | live_view_datas = self._dsm.get( 43 | self.CAMERA_API_KEY, 44 | "GetLiveViewPath", 45 | {"idList": ",".join(str(k) for k in self._cameras_by_id)}, 46 | )["data"] 47 | for live_view_data in live_view_datas: 48 | self._cameras_by_id[live_view_data["id"]].live_view.update(live_view_data) 49 | 50 | # Global 51 | def get_info(self): 52 | """Return general informations about the Surveillance Station instance.""" 53 | return self._dsm.get(self.INFO_API_KEY, "GetInfo") 54 | 55 | # Camera 56 | def get_all_cameras(self): 57 | """Return a list of cameras.""" 58 | return self._cameras_by_id.values() 59 | 60 | def get_camera(self, camera_id): 61 | """Return camera matching camera_id.""" 62 | return self._cameras_by_id[camera_id] 63 | 64 | def get_camera_live_view_path(self, camera_id, video_format=None): 65 | """Return camera live view path matching camera_id. 66 | 67 | Args: 68 | camera_id: ID of the camera we want to get the live view path. 69 | video_format: mjpeg_http | multicast | mxpeg_http | rtsp_http | rtsp. 70 | """ 71 | if video_format: 72 | return getattr(self._cameras_by_id[camera_id].live_view, video_format) 73 | return self._cameras_by_id[camera_id].live_view 74 | 75 | def get_camera_image(self, camera_id): 76 | """Return bytes of camera image for camera matching camera_id.""" 77 | return self._dsm.get( 78 | self.CAMERA_API_KEY, "GetSnapshot", {"id": camera_id, "cameraId": camera_id} 79 | ) 80 | 81 | def enable_camera(self, camera_id): 82 | """Enable camera(s) - multiple ID or single ex 1 or 1,2,3.""" 83 | return self._dsm.get(self.CAMERA_API_KEY, "Enable", {"idList": camera_id})[ 84 | "success" 85 | ] 86 | 87 | def disable_camera(self, camera_id): 88 | """Disable camera(s) - multiple ID or single ex 1 or 1,2,3.""" 89 | return self._dsm.get(self.CAMERA_API_KEY, "Disable", {"idList": camera_id})[ 90 | "success" 91 | ] 92 | 93 | # Snapshot 94 | def capture_camera_image(self, camera_id, save=True): 95 | """Capture a snapshot for camera matching camera_id.""" 96 | return self._dsm.get( 97 | self.SNAPSHOT_API_KEY, 98 | "TakeSnapshot", 99 | { 100 | "camId": camera_id, 101 | "blSave": int(save), # API requires an integer instead of a boolean 102 | }, 103 | ) 104 | 105 | def download_snapshot(self, snapshot_id, snapshot_size): 106 | """Download snapshot image binary for a givent snapshot_id. 107 | 108 | Args: 109 | snapshot_id: ID of the snapshot we want to download. 110 | snapshot_size: SNAPSHOT_SIZE_ICON | SNAPSHOT_SIZE_FULL. 111 | """ 112 | return self._dsm.get( 113 | self.SNAPSHOT_API_KEY, 114 | "LoadSnapshot", 115 | {"id": snapshot_id, "imgSize": snapshot_size}, 116 | ) 117 | 118 | # Motion 119 | def is_motion_detection_enabled(self, camera_id): 120 | """Return motion setting matching camera_id.""" 121 | return self._cameras_by_id[camera_id].is_motion_detection_enabled 122 | 123 | def enable_motion_detection(self, camera_id): 124 | """Enable motion detection for camera matching camera_id.""" 125 | return self._dsm.get( 126 | self.CAMERA_EVENT_API_KEY, 127 | "MDParamSave", 128 | {"camId": camera_id, "source": MOTION_DETECTION_BY_SURVEILLANCE}, 129 | ) 130 | 131 | def disable_motion_detection(self, camera_id): 132 | """Disable motion detection for camera matching camera_id.""" 133 | return self._dsm.get( 134 | self.CAMERA_EVENT_API_KEY, 135 | "MDParamSave", 136 | {"camId": camera_id, "source": MOTION_DETECTION_DISABLED}, 137 | ) 138 | 139 | # Home mode 140 | def get_home_mode_status(self): 141 | """Get the state of Home Mode.""" 142 | return self._dsm.get(self.HOME_MODE_API_KEY, "GetInfo")["data"]["on"] 143 | 144 | def set_home_mode(self, state): 145 | """Set the state of Home Mode (state: bool).""" 146 | return self._dsm.get( 147 | self.HOME_MODE_API_KEY, "Switch", {"on": str(state).lower()} 148 | )["success"] 149 | -------------------------------------------------------------------------------- /noxfile.py: -------------------------------------------------------------------------------- 1 | """Nox sessions.""" 2 | import shutil 3 | import sys 4 | from pathlib import Path 5 | from textwrap import dedent 6 | 7 | import nox 8 | import nox_poetry.patch 9 | from nox.sessions import Session 10 | 11 | 12 | package = "synology_dsm" 13 | python_versions = ["3.9", "3.8", "3.7"] 14 | # Comment some sessions as they need to work on the code to not fail. 15 | # Will be activated in future release. 16 | nox.options.sessions = ( 17 | "pre-commit", 18 | "safety", 19 | # "mypy", 20 | "tests", 21 | # "typeguard", 22 | # "xdoctest", 23 | # "docs-build", 24 | ) 25 | 26 | 27 | def activate_virtualenv_in_precommit_hooks(session: Session) -> None: 28 | """Activate virtualenv in hooks installed by pre-commit. 29 | 30 | This function patches git hooks installed by pre-commit to activate the 31 | session's virtual environment. This allows pre-commit to locate hooks in 32 | that environment when invoked from git. 33 | 34 | Args: 35 | session: The Session object. 36 | """ 37 | if session.bin is None: 38 | return 39 | 40 | virtualenv = session.env.get("VIRTUAL_ENV") 41 | if virtualenv is None: 42 | return 43 | 44 | hookdir = Path(".git") / "hooks" 45 | if not hookdir.is_dir(): 46 | return 47 | 48 | for hook in hookdir.iterdir(): 49 | if hook.name.endswith(".sample") or not hook.is_file(): 50 | continue 51 | 52 | text = hook.read_text() 53 | bindir = repr(session.bin)[1:-1] # strip quotes 54 | if not ( 55 | Path("A") == Path("a") and bindir.lower() in text.lower() or bindir in text 56 | ): 57 | continue 58 | 59 | lines = text.splitlines() 60 | if not (lines[0].startswith("#!") and "python" in lines[0].lower()): 61 | continue 62 | 63 | header = dedent( 64 | f"""\ 65 | import os 66 | os.environ["VIRTUAL_ENV"] = {virtualenv!r} 67 | os.environ["PATH"] = os.pathsep.join(( 68 | {session.bin!r}, 69 | os.environ.get("PATH", ""), 70 | )) 71 | """ 72 | ) 73 | 74 | lines.insert(1, header) 75 | hook.write_text("\n".join(lines)) 76 | 77 | 78 | @nox.session(name="pre-commit", python="3.9") 79 | def precommit(session: Session) -> None: 80 | """Lint using pre-commit.""" 81 | args = session.posargs or ["run", "--all-files", "--show-diff-on-failure"] 82 | session.install( 83 | "black", 84 | "darglint", 85 | "flake8", 86 | "flake8-bandit", 87 | "flake8-bugbear", 88 | "flake8-docstrings", 89 | "flake8-rst-docstrings", 90 | "pep8-naming", 91 | "pre-commit", 92 | "pre-commit-hooks", 93 | "reorder-python-imports", 94 | ) 95 | session.run("pre-commit", *args) 96 | if args and args[0] == "install": 97 | activate_virtualenv_in_precommit_hooks(session) 98 | 99 | 100 | @nox.session(python="3.9") 101 | def safety(session: Session) -> None: 102 | """Scan dependencies for insecure packages.""" 103 | requirements = nox_poetry.export_requirements(session) 104 | session.install("safety") 105 | session.run("safety", "check", f"--file={requirements}", "--bare") 106 | 107 | 108 | @nox.session(python=python_versions) 109 | def mypy(session: Session) -> None: 110 | """Type-check using mypy.""" 111 | args = session.posargs or ["src", "tests"] 112 | session.install(".") 113 | session.install("mypy", "pytest") 114 | session.run("mypy", *args) 115 | if not session.posargs: 116 | session.run("mypy", f"--python-executable={sys.executable}", "noxfile.py") 117 | 118 | 119 | @nox.session(python=python_versions) 120 | def tests(session: Session) -> None: 121 | """Run the test suite.""" 122 | session.install(".") 123 | session.install("coverage[toml]", "pytest", "pygments") 124 | try: 125 | session.run("coverage", "run", "--parallel", "-m", "pytest", *session.posargs) 126 | finally: 127 | if session.interactive: 128 | session.notify("coverage") 129 | 130 | 131 | @nox.session 132 | def coverage(session: Session) -> None: 133 | """Produce the coverage report.""" 134 | # Do not use session.posargs unless this is the only session. 135 | has_args = session.posargs and len(session._runner.manifest) == 1 136 | args = session.posargs if has_args else ["report"] 137 | 138 | session.install("coverage[toml]") 139 | 140 | if not has_args and any(Path().glob(".coverage.*")): 141 | session.run("coverage", "combine") 142 | 143 | session.run("coverage", *args) 144 | 145 | 146 | @nox.session(python=python_versions) 147 | def typeguard(session: Session) -> None: 148 | """Runtime type checking using Typeguard.""" 149 | session.install(".") 150 | session.install("pytest", "typeguard", "pygments") 151 | session.run("pytest", f"--typeguard-packages={package}", *session.posargs) 152 | 153 | 154 | @nox.session(python=python_versions) 155 | def xdoctest(session: Session) -> None: 156 | """Run examples with xdoctest.""" 157 | args = session.posargs or ["all"] 158 | session.install(".") 159 | session.install("xdoctest[colors]") 160 | session.run("python", "-m", "xdoctest", package, *args) 161 | 162 | 163 | @nox.session(name="docs-build", python="3.8") 164 | def docs_build(session: Session) -> None: 165 | """Build the documentation.""" 166 | args = session.posargs or ["docs", "docs/_build"] 167 | session.install(".") 168 | session.install("sphinx", "sphinx-rtd-theme") 169 | 170 | build_dir = Path("docs", "_build") 171 | if build_dir.exists(): 172 | shutil.rmtree(build_dir) 173 | 174 | session.run("sphinx-build", *args) 175 | 176 | 177 | @nox.session(python="3.8") 178 | def docs(session: Session) -> None: 179 | """Build and serve the documentation with live reloading on file changes.""" 180 | args = session.posargs or ["--open-browser", "docs", "docs/_build"] 181 | session.install(".") 182 | session.install("sphinx", "sphinx-autobuild", "sphinx-rtd-theme") 183 | 184 | build_dir = Path("docs", "_build") 185 | if build_dir.exists(): 186 | shutil.rmtree(build_dir) 187 | 188 | session.run("sphinx-autobuild", *args) 189 | -------------------------------------------------------------------------------- /src/synology_dsm/api/core/utilization.py: -------------------------------------------------------------------------------- 1 | """DSM Utilization data.""" 2 | from synology_dsm.helpers import SynoFormatHelper 3 | 4 | 5 | class SynoCoreUtilization: 6 | """Class containing Utilization data.""" 7 | 8 | API_KEY = "SYNO.Core.System.Utilization" 9 | 10 | def __init__(self, dsm): 11 | """Constructor method.""" 12 | self._dsm = dsm 13 | self._data = {} 14 | 15 | def update(self): 16 | """Updates utilization data.""" 17 | raw_data = self._dsm.get(self.API_KEY, "get") 18 | if raw_data: 19 | self._data = raw_data["data"] 20 | 21 | @property 22 | def cpu(self): 23 | """Gets CPU utilization.""" 24 | return self._data.get("cpu", {}) 25 | 26 | @property 27 | def cpu_other_load(self): 28 | """Other percentage of the total CPU load.""" 29 | return self.cpu.get("other_load") 30 | 31 | @property 32 | def cpu_user_load(self): 33 | """User percentage of the total CPU load.""" 34 | return self.cpu.get("user_load") 35 | 36 | @property 37 | def cpu_system_load(self): 38 | """System percentage of the total CPU load.""" 39 | return self.cpu.get("system_load") 40 | 41 | @property 42 | def cpu_total_load(self): 43 | """Total CPU load for Synology DSM.""" 44 | system_load = self.cpu_system_load 45 | user_load = self.cpu_user_load 46 | other_load = self.cpu_other_load 47 | 48 | if system_load is not None and user_load is not None and other_load is not None: 49 | return system_load + user_load + other_load 50 | return None 51 | 52 | @property 53 | def cpu_1min_load(self): 54 | """Average CPU load past minute.""" 55 | return self.cpu.get("1min_load") 56 | 57 | @property 58 | def cpu_5min_load(self): 59 | """Average CPU load past 5 minutes.""" 60 | return self.cpu.get("5min_load") 61 | 62 | @property 63 | def cpu_15min_load(self): 64 | """Average CPU load past 15 minutes.""" 65 | return self.cpu.get("15min_load") 66 | 67 | @property 68 | def memory(self): 69 | """Gets memory utilization.""" 70 | return self._data.get("memory") 71 | 72 | @property 73 | def memory_real_usage(self): 74 | """Real Memory usage from Synology DSM.""" 75 | if self.memory: 76 | return str(self._data["memory"]["real_usage"]) 77 | return None 78 | 79 | def memory_size(self, human_readable=False): 80 | """Total memory size of Synology DSM.""" 81 | if self.memory: 82 | # Memory is actually returned in KB's so multiply before converting 83 | return_data = int(self._data["memory"]["memory_size"]) * 1024 84 | if human_readable: 85 | return SynoFormatHelper.bytes_to_readable(return_data) 86 | return return_data 87 | return None 88 | 89 | def memory_available_swap(self, human_readable=False): 90 | """Total available memory swap.""" 91 | if self.memory: 92 | # Memory is actually returned in KB's so multiply before converting 93 | return_data = int(self._data["memory"]["avail_swap"]) * 1024 94 | if human_readable: 95 | return SynoFormatHelper.bytes_to_readable(return_data) 96 | return return_data 97 | return None 98 | 99 | def memory_cached(self, human_readable=False): 100 | """Total cached memory.""" 101 | if self.memory: 102 | # Memory is actually returned in KB's so multiply before converting 103 | return_data = int(self._data["memory"]["cached"]) * 1024 104 | if human_readable: 105 | return SynoFormatHelper.bytes_to_readable(return_data) 106 | return return_data 107 | return None 108 | 109 | def memory_available_real(self, human_readable=False): 110 | """Real available memory.""" 111 | if self.memory: 112 | # Memory is actually returned in KB's so multiply before converting 113 | return_data = int(self._data["memory"]["avail_real"]) * 1024 114 | if human_readable: 115 | return SynoFormatHelper.bytes_to_readable(return_data) 116 | return return_data 117 | return None 118 | 119 | def memory_total_real(self, human_readable=False): 120 | """Total available real memory.""" 121 | if self.memory: 122 | # Memory is actually returned in KB's so multiply before converting 123 | return_data = int(self._data["memory"]["total_real"]) * 1024 124 | if human_readable: 125 | return SynoFormatHelper.bytes_to_readable(return_data) 126 | return return_data 127 | return None 128 | 129 | def memory_total_swap(self, human_readable=False): 130 | """Total swap memory.""" 131 | if self.memory: 132 | # Memory is actually returned in KB's so multiply before converting 133 | return_data = int(self._data["memory"]["total_swap"]) * 1024 134 | if human_readable: 135 | return SynoFormatHelper.bytes_to_readable(return_data) 136 | return return_data 137 | return None 138 | 139 | @property 140 | def network(self): 141 | """Gets network utilization.""" 142 | return self._data.get("network", []) 143 | 144 | def _get_network(self, network_id): 145 | """Function to get specific network (eth0, total, etc).""" 146 | for network in self.network: 147 | if network["device"] == network_id: 148 | return network 149 | return None 150 | 151 | def network_up(self, human_readable=False): 152 | """Total upload speed being used.""" 153 | network = self._get_network("total") 154 | if network: 155 | return_data = int(network["tx"]) 156 | if human_readable: 157 | return SynoFormatHelper.bytes_to_readable(return_data) 158 | return return_data 159 | return None 160 | 161 | def network_down(self, human_readable=False): 162 | """Total download speed being used.""" 163 | network = self._get_network("total") 164 | if network: 165 | return_data = int(network["rx"]) 166 | if human_readable: 167 | return SynoFormatHelper.bytes_to_readable(return_data) 168 | return return_data 169 | return None 170 | -------------------------------------------------------------------------------- /tests/api_data/dsm_6/core/const_6_core_share.py: -------------------------------------------------------------------------------- 1 | """DSM 6 SYNO.Core.Share data.""" 2 | 3 | DSM_6_CORE_SHARE = { 4 | "data": { 5 | "shares": [ 6 | { 7 | "desc": "Docker Containers", 8 | "enable_recycle_bin": False, 9 | "enable_share_compress": False, 10 | "enable_share_cow": True, 11 | "enc_auto_mount": False, 12 | "encryption": 0, 13 | "force_readonly_reason": "", 14 | "hidden": True, 15 | "is_aclmode": True, 16 | "is_block_snap_action": False, 17 | "is_cluster_share": False, 18 | "is_cold_storage_share": False, 19 | "is_exfat_share": False, 20 | "is_force_readonly": False, 21 | "is_share_moving": False, 22 | "is_support_acl": True, 23 | "is_sync_share": False, 24 | "is_usb_share": False, 25 | "name": "docker", 26 | "quota_value": 0, 27 | "recycle_bin_admin_only": False, 28 | "share_quota_used": 0, 29 | "support_action": 511, 30 | "support_snapshot": True, 31 | "task_id": "", 32 | "unite_permission": False, 33 | "uuid": "78egut02-b5b1-4933-adt8-a9208526d234", 34 | "vol_path": "/volume1", 35 | }, 36 | { 37 | "desc": "", 38 | "enable_recycle_bin": True, 39 | "enable_share_compress": False, 40 | "enable_share_cow": True, 41 | "enc_auto_mount": False, 42 | "encryption": 0, 43 | "force_readonly_reason": "", 44 | "hidden": False, 45 | "is_aclmode": True, 46 | "is_block_snap_action": False, 47 | "is_cluster_share": False, 48 | "is_cold_storage_share": False, 49 | "is_exfat_share": False, 50 | "is_force_readonly": False, 51 | "is_share_moving": False, 52 | "is_support_acl": True, 53 | "is_sync_share": False, 54 | "is_usb_share": False, 55 | "name": "test_share", 56 | "quota_value": 0, 57 | "recycle_bin_admin_only": False, 58 | "share_quota_used": 36146658672640.0, 59 | "support_action": 511, 60 | "support_snapshot": True, 61 | "task_id": "", 62 | "unite_permission": False, 63 | "uuid": "2ee6c06a-8766-48b5-013d-63b18652a393", 64 | "vol_path": "/volume1", 65 | }, 66 | { 67 | "desc": "user home", 68 | "enable_recycle_bin": False, 69 | "enable_share_compress": False, 70 | "enable_share_cow": True, 71 | "enc_auto_mount": False, 72 | "encryption": 0, 73 | "force_readonly_reason": "", 74 | "hidden": False, 75 | "is_aclmode": True, 76 | "is_block_snap_action": False, 77 | "is_cluster_share": False, 78 | "is_cold_storage_share": False, 79 | "is_exfat_share": False, 80 | "is_force_readonly": False, 81 | "is_share_moving": False, 82 | "is_support_acl": True, 83 | "is_sync_share": False, 84 | "is_usb_share": False, 85 | "name": "homes", 86 | "quota_value": 0, 87 | "recycle_bin_admin_only": False, 88 | "share_quota_used": 0.015625, 89 | "support_action": 511, 90 | "support_snapshot": True, 91 | "task_id": "", 92 | "unite_permission": False, 93 | "uuid": "2b829t90-9512-4236-qqe0-d4133e9992d0", 94 | "vol_path": "/volume1", 95 | }, 96 | { 97 | "desc": "Log volume", 98 | "enable_recycle_bin": True, 99 | "enable_share_compress": True, 100 | "enable_share_cow": True, 101 | "enc_auto_mount": True, 102 | "encryption": 0, 103 | "force_readonly_reason": "", 104 | "hidden": True, 105 | "is_aclmode": True, 106 | "is_block_snap_action": False, 107 | "is_cluster_share": False, 108 | "is_cold_storage_share": False, 109 | "is_exfat_share": False, 110 | "is_force_readonly": False, 111 | "is_share_moving": False, 112 | "is_support_acl": True, 113 | "is_sync_share": False, 114 | "is_usb_share": False, 115 | "name": "logs", 116 | "quota_value": 0, 117 | "recycle_bin_admin_only": True, 118 | "share_quota_used": 947.28515625, 119 | "support_action": 511, 120 | "support_snapshot": True, 121 | "task_id": "", 122 | "unite_permission": False, 123 | "uuid": "b9876507-6880-4wes-8d61-6c984c0813ty", 124 | "vol_path": "/volume2", 125 | }, 126 | { 127 | "desc": "VMs", 128 | "enable_recycle_bin": False, 129 | "enable_share_compress": False, 130 | "enable_share_cow": True, 131 | "enc_auto_mount": False, 132 | "encryption": 0, 133 | "force_readonly_reason": "", 134 | "hidden": False, 135 | "is_aclmode": True, 136 | "is_block_snap_action": False, 137 | "is_cluster_share": False, 138 | "is_cold_storage_share": False, 139 | "is_exfat_share": False, 140 | "is_force_readonly": False, 141 | "is_share_moving": False, 142 | "is_support_acl": True, 143 | "is_sync_share": False, 144 | "is_usb_share": False, 145 | "name": "Virtual_Machines", 146 | "quota_value": 0, 147 | "recycle_bin_admin_only": False, 148 | "share_quota_used": 33911668, 149 | "support_action": 511, 150 | "support_snapshot": True, 151 | "task_id": "", 152 | "unite_permission": False, 153 | "uuid": "5416f693-04tt-4re2-b8e4-f6b18731689b", 154 | "vol_path": "/volume3", 155 | }, 156 | ], 157 | "total": 5, 158 | }, 159 | "success": True, 160 | } 161 | -------------------------------------------------------------------------------- /tests/api_data/dsm_6/core/const_6_core_security.py: -------------------------------------------------------------------------------- 1 | """DSM 6 SYNO.Core.SecurityScan.Status data.""" 2 | 3 | DSM_6_CORE_SECURITY = { 4 | "data": { 5 | "items": { 6 | "malware": { 7 | "category": "malware", 8 | "fail": { 9 | "danger": 0, 10 | "info": 0, 11 | "outOfDate": 0, 12 | "risk": 0, 13 | "warning": 0, 14 | }, 15 | "failSeverity": "safe", 16 | "progress": 100, 17 | "runningItem": "", 18 | "total": 3, 19 | "waitNum": 0, 20 | }, 21 | "network": { 22 | "category": "network", 23 | "fail": { 24 | "danger": 0, 25 | "info": 0, 26 | "outOfDate": 0, 27 | "risk": 0, 28 | "warning": 0, 29 | }, 30 | "failSeverity": "safe", 31 | "progress": 100, 32 | "runningItem": "", 33 | "total": 4, 34 | "waitNum": 0, 35 | }, 36 | "securitySetting": { 37 | "category": "securitySetting", 38 | "fail": { 39 | "danger": 0, 40 | "info": 0, 41 | "outOfDate": 0, 42 | "risk": 0, 43 | "warning": 0, 44 | }, 45 | "failSeverity": "safe", 46 | "progress": 100, 47 | "runningItem": "", 48 | "total": 0, 49 | "waitNum": 0, 50 | }, 51 | "systemCheck": { 52 | "category": "systemCheck", 53 | "fail": { 54 | "danger": 0, 55 | "info": 0, 56 | "outOfDate": 0, 57 | "risk": 0, 58 | "warning": 0, 59 | }, 60 | "failSeverity": "safe", 61 | "progress": 100, 62 | "runningItem": "", 63 | "total": 6, 64 | "waitNum": 0, 65 | }, 66 | "update": { 67 | "category": "update", 68 | "fail": { 69 | "danger": 0, 70 | "info": 0, 71 | "outOfDate": 0, 72 | "risk": 0, 73 | "warning": 0, 74 | }, 75 | "failSeverity": "safe", 76 | "progress": 100, 77 | "runningItem": "", 78 | "total": 4, 79 | "waitNum": 0, 80 | }, 81 | "userInfo": { 82 | "category": "userInfo", 83 | "fail": { 84 | "danger": 0, 85 | "info": 0, 86 | "outOfDate": 0, 87 | "risk": 0, 88 | "warning": 0, 89 | }, 90 | "failSeverity": "safe", 91 | "progress": 100, 92 | "runningItem": "", 93 | "total": 6, 94 | "waitNum": 0, 95 | }, 96 | }, 97 | "lastScanTime": "1588298442", 98 | "startTime": "", 99 | "success": True, 100 | "sysProgress": 100, 101 | "sysStatus": "safe", 102 | }, 103 | "success": True, 104 | } 105 | 106 | DSM_6_CORE_SECURITY_UPDATE_OUTOFDATE = { 107 | "data": { 108 | "items": { 109 | "malware": { 110 | "category": "malware", 111 | "fail": { 112 | "danger": 0, 113 | "info": 0, 114 | "outOfDate": 0, 115 | "risk": 0, 116 | "warning": 0, 117 | }, 118 | "failSeverity": "safe", 119 | "progress": 100, 120 | "runningItem": "", 121 | "total": 3, 122 | "waitNum": 0, 123 | }, 124 | "network": { 125 | "category": "network", 126 | "fail": { 127 | "danger": 0, 128 | "info": 0, 129 | "outOfDate": 0, 130 | "risk": 0, 131 | "warning": 0, 132 | }, 133 | "failSeverity": "safe", 134 | "progress": 100, 135 | "runningItem": "", 136 | "total": 4, 137 | "waitNum": 0, 138 | }, 139 | "securitySetting": { 140 | "category": "securitySetting", 141 | "fail": { 142 | "danger": 0, 143 | "info": 0, 144 | "outOfDate": 0, 145 | "risk": 0, 146 | "warning": 0, 147 | }, 148 | "failSeverity": "safe", 149 | "progress": 100, 150 | "runningItem": "", 151 | "total": 0, 152 | "waitNum": 0, 153 | }, 154 | "systemCheck": { 155 | "category": "systemCheck", 156 | "fail": { 157 | "danger": 0, 158 | "info": 0, 159 | "outOfDate": 0, 160 | "risk": 0, 161 | "warning": 0, 162 | }, 163 | "failSeverity": "safe", 164 | "progress": 100, 165 | "runningItem": "", 166 | "total": 6, 167 | "waitNum": 0, 168 | }, 169 | "update": { 170 | "category": "update", 171 | "fail": { 172 | "danger": 0, 173 | "info": 0, 174 | "outOfDate": 1, 175 | "risk": 0, 176 | "warning": 0, 177 | }, 178 | "failSeverity": "outOfDate", 179 | "progress": 100, 180 | "runningItem": "", 181 | "total": 4, 182 | "waitNum": 0, 183 | }, 184 | "userInfo": { 185 | "category": "userInfo", 186 | "fail": { 187 | "danger": 0, 188 | "info": 0, 189 | "outOfDate": 0, 190 | "risk": 0, 191 | "warning": 0, 192 | }, 193 | "failSeverity": "safe", 194 | "progress": 100, 195 | "runningItem": "", 196 | "total": 6, 197 | "waitNum": 0, 198 | }, 199 | }, 200 | "lastScanTime": "1590717640", 201 | "startTime": "", 202 | "success": True, 203 | "sysProgress": 100, 204 | "sysStatus": "outOfDate", 205 | }, 206 | "success": True, 207 | } 208 | -------------------------------------------------------------------------------- /src/synology_dsm/api/storage/storage.py: -------------------------------------------------------------------------------- 1 | """DSM Storage data.""" 2 | from synology_dsm.helpers import SynoFormatHelper 3 | 4 | 5 | class SynoStorage: 6 | """Class containing Storage data.""" 7 | 8 | API_KEY = "SYNO.Storage.CGI.Storage" 9 | 10 | def __init__(self, dsm): 11 | """Constructor method.""" 12 | self._dsm = dsm 13 | self._data = {} 14 | 15 | def update(self): 16 | """Updates storage data.""" 17 | raw_data = self._dsm.get(self.API_KEY, "load_info") 18 | if raw_data: 19 | self._data = raw_data 20 | if raw_data.get("data"): 21 | self._data = raw_data["data"] 22 | 23 | # Root 24 | @property 25 | def disks(self): 26 | """Gets all (internal) disks.""" 27 | return self._data.get("disks", []) 28 | 29 | @property 30 | def env(self): 31 | """Gets storage env.""" 32 | return self._data.get("env") 33 | 34 | @property 35 | def storage_pools(self): 36 | """Gets all storage pools.""" 37 | return self._data.get("storagePools", []) 38 | 39 | @property 40 | def volumes(self): 41 | """Gets all volumes.""" 42 | return self._data.get("volumes", []) 43 | 44 | # Volume 45 | @property 46 | def volumes_ids(self): 47 | """Returns volumes ids.""" 48 | volumes = [] 49 | for volume in self.volumes: 50 | volumes.append(volume["id"]) 51 | return volumes 52 | 53 | def get_volume(self, volume_id): 54 | """Returns a specific volume.""" 55 | for volume in self.volumes: 56 | if volume["id"] == volume_id: 57 | return volume 58 | return {} 59 | 60 | def volume_status(self, volume_id): 61 | """Status of the volume (normal, degraded, etc).""" 62 | return self.get_volume(volume_id).get("status") 63 | 64 | def volume_device_type(self, volume_id): 65 | """Returns the volume type (RAID1, RAID2, etc).""" 66 | return self.get_volume(volume_id).get("device_type") 67 | 68 | def volume_size_total(self, volume_id, human_readable=False): 69 | """Total size of volume.""" 70 | volume = self.get_volume(volume_id) 71 | if volume.get("size"): 72 | return_data = int(volume["size"]["total"]) 73 | if human_readable: 74 | return SynoFormatHelper.bytes_to_readable(return_data) 75 | return return_data 76 | return None 77 | 78 | def volume_size_used(self, volume_id, human_readable=False): 79 | """Total used size in volume.""" 80 | volume = self.get_volume(volume_id) 81 | if volume.get("size"): 82 | return_data = int(volume["size"]["used"]) 83 | if human_readable: 84 | return SynoFormatHelper.bytes_to_readable(return_data) 85 | return return_data 86 | return None 87 | 88 | def volume_percentage_used(self, volume_id): 89 | """Total used size in percentage for volume.""" 90 | volume = self.get_volume(volume_id) 91 | if volume.get("size"): 92 | total = int(volume["size"]["total"]) 93 | used = int(volume["size"]["used"]) 94 | 95 | if used and used > 0 and total and total > 0: 96 | return round((float(used) / float(total)) * 100.0, 1) 97 | return None 98 | 99 | def volume_disk_temp_avg(self, volume_id): 100 | """Average temperature of all disks making up the volume.""" 101 | vol_disks = self._get_disks_for_volume(volume_id) 102 | if vol_disks: 103 | total_temp = 0 104 | total_disks = 0 105 | 106 | for vol_disk in vol_disks: 107 | disk_temp = self.disk_temp(vol_disk["id"]) 108 | if disk_temp: 109 | total_disks += 1 110 | total_temp += disk_temp 111 | 112 | if total_temp > 0 and total_disks > 0: 113 | return round(total_temp / total_disks, 0) 114 | return None 115 | 116 | def volume_disk_temp_max(self, volume_id): 117 | """Maximum temperature of all disks making up the volume.""" 118 | vol_disks = self._get_disks_for_volume(volume_id) 119 | if vol_disks: 120 | max_temp = 0 121 | 122 | for vol_disk in vol_disks: 123 | disk_temp = self.disk_temp(vol_disk["id"]) 124 | if disk_temp and disk_temp > max_temp: 125 | max_temp = disk_temp 126 | return max_temp 127 | return None 128 | 129 | # Disk 130 | @property 131 | def disks_ids(self): 132 | """Returns (internal) disks ids.""" 133 | disks = [] 134 | for disk in self.disks: 135 | disks.append(disk["id"]) 136 | return disks 137 | 138 | def get_disk(self, disk_id): 139 | """Returns a specific disk.""" 140 | for disk in self.disks: 141 | if disk["id"] == disk_id: 142 | return disk 143 | return {} 144 | 145 | def _get_disks_for_volume(self, volume_id): 146 | """Returns a list of disk for a specific volume.""" 147 | disks = [] 148 | for pool in self.storage_pools: 149 | 150 | if pool.get("deploy_path") == volume_id: 151 | # RAID disk redundancy 152 | for disk_id in pool["disks"]: 153 | disks.append(self.get_disk(disk_id)) 154 | 155 | if pool.get("pool_child"): 156 | # SHR disk redundancy 157 | for pool_child in pool.get("pool_child"): 158 | if pool_child["id"] == volume_id: 159 | for disk_id in pool["disks"]: 160 | disks.append(self.get_disk(disk_id)) 161 | 162 | return disks 163 | 164 | def disk_name(self, disk_id): 165 | """The name of this disk.""" 166 | return self.get_disk(disk_id).get("name") 167 | 168 | def disk_device(self, disk_id): 169 | """The mount point of this disk.""" 170 | return self.get_disk(disk_id).get("device") 171 | 172 | def disk_smart_status(self, disk_id): 173 | """Status of disk according to S.M.A.R.T).""" 174 | return self.get_disk(disk_id).get("smart_status") 175 | 176 | def disk_status(self, disk_id): 177 | """Status of disk.""" 178 | return self.get_disk(disk_id).get("status") 179 | 180 | def disk_exceed_bad_sector_thr(self, disk_id): 181 | """Checks if disk has exceeded maximum bad sector threshold.""" 182 | return self.get_disk(disk_id).get("exceed_bad_sector_thr") 183 | 184 | def disk_below_remain_life_thr(self, disk_id): 185 | """Checks if disk has fallen below minimum life threshold.""" 186 | return self.get_disk(disk_id).get("below_remain_life_thr") 187 | 188 | def disk_temp(self, disk_id): 189 | """Returns the temperature of the disk.""" 190 | return self.get_disk(disk_id).get("temp") 191 | -------------------------------------------------------------------------------- /src/synology_dsm/const.py: -------------------------------------------------------------------------------- 1 | """Library constants.""" 2 | # APIs 3 | API_INFO = "SYNO.API.Info" 4 | API_AUTH = "SYNO.API.Auth" 5 | 6 | # SYNO.* 7 | ERROR_COMMON = { 8 | 100: "Unknown error", 9 | 101: "No parameter API, method, or version", 10 | 102: "API does not exist", 11 | 103: "API method does not exist", 12 | 104: "API version not supported", 13 | 105: "Insufficient user privilege", 14 | 106: "Session timeout", 15 | 107: "Session interrupted by duplicate login", 16 | 114: "Missing required parameters", 17 | 117: "Unknown internal error", 18 | 120: "Invalid parameter", 19 | 160: "Insufficient application privilege", 20 | } 21 | 22 | # SYNO.API.Auth 23 | ERROR_AUTH = { 24 | 400: "Invalid credentials", 25 | 401: "Guest or disabled account", 26 | 402: "Permission denied", 27 | 403: "One time password not specified", 28 | 404: "One time password authenticate failed", 29 | 405: "App portal incorrect", 30 | 406: "One time password code enforced", 31 | 407: "Max Tries (if auto blocking is set to true)", 32 | 408: "Password Expired Can not Change", 33 | 409: "Password Expired", 34 | 410: "Password must change (when first time use or after reset password by admin)", 35 | 411: "Account Locked (when account max try exceed)", 36 | } 37 | 38 | # SYNO.DownloadStation[2].BTSearch 39 | ERROR_DOWNLOAD_SEARCH = { 40 | 400: "Unknown error", 41 | 401: "Invalid parameter", 42 | 402: "Parse the user setting failed", 43 | 403: "Get category failed", 44 | 404: "Get the search result from DB failed", 45 | 405: "Get the user setting failed", 46 | } 47 | # SYNO.DownloadStation[2].Task 48 | ERROR_DOWNLOAD_TASK = { 49 | 400: "File upload failed", 50 | 401: "Max number of tasks reached", 51 | 402: "Destination denied", 52 | 403: "Destination does not exist", 53 | 404: "Invalid task id", 54 | 405: "Invalid task action", 55 | 406: "No default destination", 56 | 407: "Set destination failed", 57 | 408: "File does not exist", 58 | } 59 | 60 | # SYNO.FileStation.* 61 | ERROR_FILE = { 62 | 400: "Invalid parameter of file operation", 63 | 401: "Unknown error of file operation", 64 | 402: "System is too busy", 65 | 403: "Invalid user does this file operation", 66 | 404: "Invalid group does this file operation", 67 | 405: "Invalid user and group does this file operation", 68 | 406: "Can’t get user/group information from the account server Operation not permitted", 69 | 407: "Operation not permitted", 70 | 408: "No such file or directory", 71 | 409: "Non-supported file system", 72 | 410: "Failed to connect internet-based file system (ex: CIFS)", 73 | 411: "Read-only file system", 74 | 412: "Filename too long in the non-encrypted file system", 75 | 413: "Filename too long in the encrypted file system", 76 | 414: "File already exists", 77 | 415: "Disk quota exceeded", 78 | 416: "No space left on device", 79 | 417: "Input/output error", 80 | 418: "Illegal name or path", 81 | 419: "Illegal file name", 82 | 420: "Illegal file name on FAT file system", 83 | 421: "Device or resource busy", 84 | 599: "No such task of the file operation", 85 | 900: "Failed to delete file(s)/folder(s). More information in object", 86 | 1000: "Failed to copy files/folders. More information in object", 87 | 1001: "Failed to move files/folders. More information in object", 88 | 1002: "An error occurred at the destination. More information in object", 89 | 1003: "Cannot overwrite or skip the existing file because no overwrite parameter is given", 90 | 1004: "File cannot overwrite a folder with the same name, or folder cannot overwrite a file with the same name", 91 | 1006: "Cannot copy/move file/folder with special characters to a FAT32 file system", 92 | 1007: "Cannot copy/move a file bigger than 4G to a FAT32 file system", 93 | 1100: "Failed to create a folder. More information in object", 94 | 1101: "The number of folders to the parent folder would exceed the system limitation", 95 | 1300: "Failed to compress files/folders", 96 | 1301: "Cannot create the archive because the given archive name is too long", 97 | 1400: "Failed to extract files", 98 | 1401: "Cannot open the file as archive", 99 | 1402: "Failed to read archive data error", 100 | 1403: "Wrong archive password", 101 | 1404: "Failed to get the file and dir list in an archive", 102 | 1405: "Failed to find the item ID in an archive file", 103 | 1200: "Failed to rename it. More information in object", 104 | 1800: "There is no Content-Length information in the HTTP header or the received size doesn’t match the value of Content-Length information in the HTTP header", 105 | 1801: "Wait too long, no date can be received from client (Default maximum wait time is 3600 seconds)", 106 | 1802: "No filename information in the last part of file content", 107 | 1803: "Upload connection is cancelled", 108 | 1804: "Failed to upload too big file to FAT file system", 109 | 1805: "Can’t overwrite or skip the existed file, if no overwrite parameter is given", 110 | 2000: "Sharing link does not exist", 111 | 2001: "Cannot generate sharing link because too many sharing links exist", 112 | 2002: "Failed to access sharing links", 113 | } 114 | 115 | # SYNO.SurveillanceStation.* 116 | ERROR_SURVEILLANCE = { 117 | 400: "Execution failed", 118 | 401: "Invalid parameter", 119 | 402: "Camera disabled", 120 | 403: "Insufficient license", 121 | 404: "Codec activation failed", 122 | 405: "CMS server connection failed", 123 | 407: "CMS closed", 124 | 412: "Need to add license", 125 | 413: "Reach the maximum of platform", 126 | 414: "Some events not exist", 127 | 415: "Message connect failed", 128 | 417: "Test connection error", 129 | 418: "Object/VisualStation ID does not exist", 130 | 419: "VisualStation name repetition", 131 | 439: "Too many items selected", 132 | 446: "Task path already exist", 133 | 522: "Original task is migrating", 134 | 534: "Exceed name length limitation", 135 | } 136 | 137 | # SYNO.Virtualization.* 138 | ERROR_VIRTUALIZATION = { 139 | 400: "Unknown error", 140 | 401: "Bad parameter", 141 | 402: "Operation failed", 142 | 403: "Name conflict", 143 | 404: "The number of iSCSI LUNs has reached the system limit", 144 | 500: "Note: vdisk is based on iSCSI LUN, which is also limited by the system", 145 | 501: "The cluster is frozen. More than half of the hosts are offline", 146 | 600: "The cluster is in the incompatible mode. Please upgrade to a compatible DSM version and try again", 147 | 601: "The cluster is not ready", 148 | 700: "The host is offline", 149 | 900: "The storage is in invalid", 150 | 901: "Failed to set a host to a virtual machine", 151 | 902: "The virtual machine does not have a host", 152 | 903: "Failed to power on a virtual machine due to insufficient CPU threads", 153 | 904: "Failed to power on a virtual machine due to insufficient memory", 154 | 905: "The status of virtual machine is online", 155 | 906: "MAC conflict", 156 | 907: "Failed to create virtual machine because the selected image is not found", 157 | 908: "The status of virtual machine is offline", 158 | 909: "Failed to power on a virtual machine due to insufficient CPU threads for reservation on the host", 159 | 910: "Failed to power on the virtual machine because there is no corresponding networking on the host", 160 | 911: "Only the VirtIO hard disk controller can be used to boot the virtual machine remotely. Virtual machines with UEFI enabled cannot be powered on remotely", 161 | 1000: "Cannot find task_id", 162 | 1001: "Need Virtual Machine Manager Pro", 163 | 1400: "The result of image creating is partial success", 164 | 1600: "The virtual machine has been successfully edited. However, errors occurred while reserving the memory or CPU on the HA hosts", 165 | } 166 | -------------------------------------------------------------------------------- /tests/api_data/dsm_5/storage/const_5_storage_storage.py: -------------------------------------------------------------------------------- 1 | """DSM 5 SYNO.Storage.CGI.Storage data.""" 2 | from tests.const import UNIQUE_KEY 3 | 4 | DSM_5_STORAGE_STORAGE_DS410J_RAID5_4DISKS_1VOL = { 5 | "disks": [ 6 | {"id": "test_disk"}, 7 | { 8 | "container": { 9 | "order": 0, 10 | "str": "DS410j", 11 | "supportPwrBtnDisable": False, 12 | "type": "internal", 13 | }, 14 | "device": "/dev/sdd", 15 | "disable_secera": False, 16 | "diskType": "SATA", 17 | "erase_time": 374, 18 | "firm": "SC60", 19 | "has_system": True, 20 | "id": "sdd", 21 | "is4Kn": False, 22 | "isSsd": False, 23 | "is_erasing": False, 24 | "longName": "Disk 4", 25 | "model": "ST3000VN007-2E4166 ", 26 | "name": "Disk 4", 27 | "num_id": 4, 28 | "order": 4, 29 | "portType": "normal", 30 | "serial": "Z73095S2", 31 | "size_total": "3000592982016", 32 | "smart_status": "safe", 33 | "status": "normal", 34 | "support": False, 35 | "temp": 42, 36 | "used_by": "volume_1", 37 | "vendor": "Seagate", 38 | }, 39 | { 40 | "container": { 41 | "order": 0, 42 | "str": "DS410j", 43 | "supportPwrBtnDisable": False, 44 | "type": "internal", 45 | }, 46 | "device": "/dev/sdc", 47 | "disable_secera": False, 48 | "diskType": "SATA", 49 | "erase_time": 410, 50 | "firm": "80.00A80", 51 | "has_system": True, 52 | "id": "sdc", 53 | "is4Kn": False, 54 | "isSsd": False, 55 | "is_erasing": False, 56 | "longName": "Disk 3", 57 | "model": "WD30EZRZ-00Z5HB0 ", 58 | "name": "Disk 3", 59 | "num_id": 3, 60 | "order": 3, 61 | "portType": "normal", 62 | "serial": "WD-WCC4N0TEJ4F0", 63 | "size_total": "3000592982016", 64 | "smart_status": "safe", 65 | "status": "normal", 66 | "support": False, 67 | "temp": 42, 68 | "used_by": "volume_1", 69 | "vendor": "WDC ", 70 | }, 71 | { 72 | "container": { 73 | "order": 0, 74 | "str": "DS410j", 75 | "supportPwrBtnDisable": False, 76 | "type": "internal", 77 | }, 78 | "device": "/dev/sdb", 79 | "disable_secera": False, 80 | "diskType": "SATA", 81 | "erase_time": 408, 82 | "firm": "82.00A82", 83 | "has_system": True, 84 | "id": "sdb", 85 | "is4Kn": False, 86 | "isSsd": False, 87 | "is_erasing": False, 88 | "longName": "Disk 2", 89 | "model": "WD30EFRX-68EUZN0 ", 90 | "name": "Disk 2", 91 | "num_id": 2, 92 | "order": 2, 93 | "portType": "normal", 94 | "serial": "WD-WCC4N6LSVCVX", 95 | "size_total": "3000592982016", 96 | "smart_status": "safe", 97 | "status": "normal", 98 | "support": False, 99 | "temp": 43, 100 | "used_by": "volume_1", 101 | "vendor": "WDC ", 102 | }, 103 | { 104 | "container": { 105 | "order": 0, 106 | "str": "DS410j", 107 | "supportPwrBtnDisable": False, 108 | "type": "internal", 109 | }, 110 | "device": "/dev/sda", 111 | "disable_secera": False, 112 | "diskType": "SATA", 113 | "erase_time": 0, 114 | "firm": "82.00A82", 115 | "has_system": True, 116 | "id": "sda", 117 | "is4Kn": False, 118 | "isSsd": False, 119 | "is_erasing": False, 120 | "longName": "Disk 1", 121 | "model": "WD30EFRX-68N32N0 ", 122 | "name": "Disk 1", 123 | "num_id": 1, 124 | "order": 1, 125 | "portType": "normal", 126 | "serial": "WD-WCC7K5YA5H40", 127 | "size_total": "3000592982016", 128 | "smart_status": "90%", 129 | "status": "normal", 130 | "support": False, 131 | "temp": 44, 132 | "used_by": "volume_1", 133 | "vendor": "WDC ", 134 | }, 135 | ], 136 | "env": { 137 | "batchtask": {"max_task": 64, "remain_task": 64}, 138 | "bay_number": "4", 139 | "ebox": [], 140 | "fs_acting": False, 141 | "is_space_actioning": False, 142 | "isns": {"address": "", "enabled": False}, 143 | "isns_server": "", 144 | "max_fs_bytes": "17592181850112", 145 | "max_fs_bytes_high_end": "219902325555200", 146 | "model_name": "DS410j", 147 | "ram_enough_for_fs_high_end": False, 148 | "ram_size": 0, 149 | "ram_size_required": 32, 150 | "settingSwap": False, 151 | "showpooltab": False, 152 | "status": {"system_crashed": False, "system_need_repair": False}, 153 | "support": {"ebox": False, "raid_cross": False, "sysdef": True}, 154 | "unique_key": UNIQUE_KEY, 155 | }, 156 | "hotSpares": [], 157 | "iscsiLuns": [ 158 | { 159 | "can_do": { 160 | "data_scrubbing": True, 161 | "delete": True, 162 | "expand_by_disk": 1, 163 | "migrate": {"to_raid5+spare": "1-1", "to_raid6": 1}, 164 | }, 165 | "id": "iscsilun_LUN-1", 166 | "is_actioning": False, 167 | "iscsi_lun": { 168 | "blkNum": "19614744", 169 | "device_type": "file", 170 | "lid": 1, 171 | "location": "volume_1", 172 | "mapped_targets": [1], 173 | "name": "LUN-1", 174 | "restored_time": "0", 175 | "rootpath": "/volume1", 176 | "size": "10737418240", 177 | "thin_provision": False, 178 | "uuid": "fcf3a450-681c-06cb-fbb9-0400bdbe0780", 179 | "vaai_extent_size": "0", 180 | "vaai_support": False, 181 | }, 182 | "num_id": 1, 183 | "progress": {"percent": "-1", "step": "none"}, 184 | "status": "normal", 185 | } 186 | ], 187 | "iscsiTargets": [ 188 | { 189 | "auth": {"mutual_username": "", "type": "none", "username": ""}, 190 | "data_chksum": 0, 191 | "enabled": True, 192 | "hdr_chksum": 0, 193 | "iqn": "iqn.2000-01.com.synology:DiskStation.name", 194 | "mapped_logical_unit_number": [0], 195 | "mapped_luns": [1], 196 | "masking": [ 197 | {"iqn": "iqn.2000-01.com.synology:default.acl", "permission": "rw"} 198 | ], 199 | "multi_sessions": False, 200 | "name": "Target-1", 201 | "num_id": 1, 202 | "recv_seg_bytes": 262144, 203 | "remote": [], 204 | "send_seg_bytes": 4096, 205 | "status": "online", 206 | "tid": 1, 207 | } 208 | ], 209 | "ports": [], 210 | "storagePools": [], 211 | "success": True, 212 | "volumes": [ 213 | {"id": "test_volume"}, 214 | { 215 | "can_do": { 216 | "data_scrubbing": True, 217 | "delete": True, 218 | "expand_by_disk": 1, 219 | "migrate": {"to_raid5+spare": "1-1", "to_raid6": 1}, 220 | }, 221 | "container": "internal", 222 | "device_type": "raid_5", 223 | "disk_failure_number": 0, 224 | "disks": ["sda", "sdb", "sdc", "sdd"], 225 | "eppool_used": "10042748928", 226 | "fs_type": "ext3", 227 | "id": "volume_1", 228 | "is_acting": False, 229 | "is_actioning": False, 230 | "is_inode_full": False, 231 | "is_writable": True, 232 | "max_fs_size": "17592181850112", 233 | "maximal_disk_size": "0", 234 | "minimal_disk_size": "2995767869440", 235 | "num_id": 1, 236 | "pool_path": "", 237 | "progress": {"percent": "-1", "step": "none"}, 238 | "size": { 239 | "free_inode": "547237217", 240 | "total": "8846249701376", 241 | "total_device": "8987275100160", 242 | "total_inode": "548544512", 243 | "used": "5719795761152", 244 | }, 245 | "space_path": "/dev/md2", 246 | "spares": [], 247 | "ssd_trim": {"support": "not support"}, 248 | "status": "normal", 249 | "suggestions": [], 250 | "timebackup": True, 251 | "used_by_gluster": False, 252 | "vol_path": "/volume1", 253 | "vspace_can_do": { 254 | "drbd": { 255 | "resize": { 256 | "can_do": False, 257 | "errCode": 53504, 258 | "stopService": False, 259 | } 260 | }, 261 | "flashcache": { 262 | "apply": { 263 | "can_do": False, 264 | "errCode": 53504, 265 | "stopService": False, 266 | }, 267 | "remove": { 268 | "can_do": False, 269 | "errCode": 53504, 270 | "stopService": False, 271 | }, 272 | "resize": { 273 | "can_do": False, 274 | "errCode": 53504, 275 | "stopService": False, 276 | }, 277 | }, 278 | "snapshot": { 279 | "resize": {"can_do": True, "errCode": 0, "stopService": False} 280 | }, 281 | }, 282 | }, 283 | ], 284 | } 285 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | ========== 2 | Deprecated 3 | ========== 4 | Use now: https://github.com/hacf-fr/synologydsm-api 5 | 6 | 7 | =========================== 8 | Python API for Synology DSM 9 | =========================== 10 | 11 | .. image:: https://travis-ci.org/ProtoThis/python-synology.svg?branch=master 12 | :target: https://travis-ci.org/ProtoThis/python-synology 13 | 14 | .. image:: https://img.shields.io/pypi/v/python-synology.svg 15 | :alt: Library version 16 | :target: https://pypi.org/project/python-synology 17 | 18 | .. image:: https://img.shields.io/pypi/pyversions/python-synology.svg 19 | :alt: Supported versions 20 | :target: https://pypi.org/project/python-synology 21 | 22 | .. image:: https://pepy.tech/badge/python-synology 23 | :alt: Downloads 24 | :target: https://pypi.org/project/python-synology 25 | 26 | .. image:: https://img.shields.io/badge/code%20style-black-000000.svg 27 | :alt: Formated with Black 28 | :target: https://github.com/psf/black 29 | 30 | 31 | Installation 32 | ============ 33 | 34 | .. code-block:: bash 35 | 36 | [sudo] pip install python-synology 37 | 38 | 39 | Usage 40 | ===== 41 | 42 | You can import the module as `synology_dsm`. 43 | 44 | 45 | Constructor 46 | ----------- 47 | 48 | .. code-block:: python 49 | 50 | SynologyDSM( 51 | dsm_ip, 52 | dsm_port, 53 | username, 54 | password, 55 | use_https=False, 56 | verify_ssl=False, 57 | timeout=None, 58 | device_token=None, 59 | debugmode=False, 60 | ) 61 | 62 | ``device_token`` should be added when using a two-step authentication account, otherwise DSM will ask to login with a One Time Password (OTP) and requests will fail (see the login section for more details). 63 | 64 | Default ``timeout`` is 10 seconds. 65 | 66 | 67 | Login 68 | ------ 69 | 70 | The library automatically login at first request, but you better use the ``login()`` function separately to authenticate. 71 | 72 | It will return a boolean if it successed or faild to authenticate to DSM. 73 | 74 | If your account need a two-step authentication (2SA), ``login()`` will raise ``SynologyDSMLogin2SARequiredException``. 75 | Call the function again with a One Time Password (OTP) as parameter, like ``login("123456")`` (better to be a string to handle first zero). 76 | Store the ``device_token`` property so that you do not need to reconnect with password the next time you open a new ``SynologyDSM`` session. 77 | 78 | 79 | Code exemple 80 | ------------ 81 | 82 | Every API has an ``update()`` function that is needed to get the first data, then the data is cached and updated at the next ``update()`` call. 83 | 84 | The ``SynologyDSM`` class can also ``update()`` all APIs at once. 85 | 86 | .. code-block:: python 87 | 88 | from synology_dsm import SynologyDSM 89 | 90 | print("Creating Valid API") 91 | api = SynologyDSM("", "", "", "") 92 | 93 | print("=== Information ===") 94 | api.information.update() 95 | print("Model: " + str(api.information.model)) 96 | print("RAM: " + str(api.information.ram) + " MB") 97 | print("Serial number: " + str(api.information.serial)) 98 | print("Temperature: " + str(api.information.temperature) + " °C") 99 | print("Temp. warning: " + str(api.information.temperature_warn)) 100 | print("Uptime: " + str(api.information.uptime)) 101 | print("Full DSM version:" + str(api.information.version_string)) 102 | print("--") 103 | 104 | print("=== Utilisation ===") 105 | api.utilisation.update() 106 | print("CPU Load: " + str(api.utilisation.cpu_total_load) + " %") 107 | print("Memory Use: " + str(api.utilisation.memory_real_usage) + " %") 108 | print("Net Up: " + str(api.utilisation.network_up())) 109 | print("Net Down: " + str(api.utilisation.network_down())) 110 | print("--") 111 | 112 | print("=== Storage ===") 113 | api.storage.update() 114 | for volume_id in api.storage.volumes_ids: 115 | print("ID: " + str(volume_id)) 116 | print("Status: " + str(api.storage.volume_status(volume_id))) 117 | print("% Used: " + str(api.storage.volume_percentage_used(volume_id)) + " %") 118 | print("--") 119 | 120 | for disk_id in api.storage.disks_ids: 121 | print("ID: " + str(disk_id)) 122 | print("Name: " + str(api.storage.disk_name(disk_id))) 123 | print("S-Status: " + str(api.storage.disk_smart_status(disk_id))) 124 | print("Status: " + str(api.storage.disk_status(disk_id))) 125 | print("Temp: " + str(api.storage.disk_temp(disk_id))) 126 | print("--") 127 | 128 | print("=== Shared Folders ===") 129 | api.share.update() 130 | for share_uuid in api.share.shares_uuids: 131 | print("Share name: " + str(api.share.share_name(share_uuid))) 132 | print("Share path: " + str(api.share.share_path(share_uuid))) 133 | print("Space used: " + str(api.share.share_size(share_uuid, human_readable=True))) 134 | print("Recycle Bin Enabled: " + str(api.share.share_recycle_bin(share_uuid))) 135 | print("--") 136 | 137 | 138 | Download Station usage 139 | -------------------------- 140 | 141 | .. code-block:: python 142 | 143 | from synology_dsm import SynologyDSM 144 | 145 | api = SynologyDSM("", "", "", "") 146 | 147 | if "SYNO.DownloadStation.Info" in api.apis: 148 | 149 | api.download_station.get_info() 150 | api.download_station.get_config() 151 | 152 | # The download list will be updated after each of the following functions: 153 | # You should have the right on the (default) directory that the download will be saved, or you will get a 403 or 406 error 154 | api.download_station.create("http://commondatastorage.googleapis.com/gtv-videos-bucket/sample/BigBuckBunny.mp4") 155 | api.download_station.pause("dbid_1") 156 | # Like the other function, you can eather pass a str or a list 157 | api.download_station.resume(["dbid_1", "dbid_2"]) 158 | api.download_station.delete("dbid_3") 159 | 160 | # Manual update 161 | api.download_station.update() 162 | 163 | 164 | Surveillance Station usage 165 | -------------------------- 166 | 167 | .. code-block:: python 168 | 169 | from synology_dsm import SynologyDSM 170 | 171 | api = SynologyDSM("", "", "", "") 172 | surveillance = api.surveillance_station 173 | surveillance.update() # First update is required 174 | 175 | # Returns a list of cached cameras available 176 | cameras = surveillance.get_all_cameras() 177 | 178 | # Assuming there's at least one camera, get the first camera_id 179 | camera_id = cameras[0].camera_id 180 | 181 | # Returns cached camera object by camera_id 182 | camera = surveillance.get_camera(camera_id) 183 | 184 | # Returns cached motion detection enabled 185 | motion_setting = camera.is_motion_detection_enabled 186 | 187 | # Return bytes of camera image 188 | surveillance.get_camera_image(camera_id) 189 | 190 | # Updates all cameras/motion settings and cahce them 191 | surveillance.update() 192 | 193 | # Gets Home Mode status 194 | home_mode_status = surveillance.get_home_mode_status() 195 | 196 | # Sets home mode - true is on, false is off 197 | surveillance.set_home_mode(True) 198 | 199 | 200 | System usage 201 | -------------------------- 202 | 203 | .. code-block:: python 204 | 205 | from synology_dsm import SynologyDSM 206 | 207 | api = SynologyDSM("", "", "", "") 208 | system = api.system 209 | 210 | # Reboot NAS 211 | system.reboot() 212 | 213 | # Shutdown NAS 214 | system.shutdown() 215 | 216 | # Manual update system information 217 | system.update() 218 | 219 | # Get CPU information 220 | system.cpu_clock_speed 221 | system.cpu_cores 222 | system.cpu_family 223 | system.cpu_series 224 | 225 | # Get NTP settings 226 | system.enabled_ntp 227 | system.ntp_server 228 | 229 | # Get system information 230 | system.firmware_ver 231 | system.model 232 | system.ram_size 233 | system.serial 234 | system.sys_temp 235 | system.time 236 | system.time_zone 237 | system.time_zone_desc 238 | system.up_time 239 | 240 | # Get list of all connected USB devices 241 | system.usb_dev 242 | 243 | 244 | Upgrade usage 245 | -------------------------- 246 | 247 | .. code-block:: python 248 | 249 | from synology_dsm import SynologyDSM 250 | 251 | api = SynologyDSM("", "", "", "") 252 | upgrade = api.upgrade 253 | 254 | # Manual update upgrade information 255 | upgrade.update() 256 | 257 | # check if DSM update is available 258 | if upgrade.update_available: 259 | do something ... 260 | 261 | # get available version string (return None if no update available) 262 | upgrade.available_version 263 | 264 | # get need of reboot (return None if no update available) 265 | upgrade.reboot_needed 266 | 267 | # get need of service restarts (return None if no update available) 268 | upgrade.service_restarts 269 | 270 | 271 | Credits / Special Thanks 272 | ======================== 273 | - https://github.com/florianeinfalt 274 | - https://github.com/tchellomello 275 | - https://github.com/Quentame (Multiple API addition & tests) 276 | - https://github.com/aaska (DSM 5 tests) 277 | - https://github.com/chemelli74 (2SA tests) 278 | - https://github.com/snjoetw (Surveillance Station library) 279 | - https://github.com/shenxn (Surveillance Station tests) 280 | - https://github.com/Gestas (Shared Folders) 281 | 282 | Found Synology API "documentation" on this repo : https://github.com/kwent/syno/tree/master/definitions 283 | 284 | 285 | Official references 286 | =================== 287 | 288 | - `Calendar API documentation (2015-2019) `_ 289 | 290 | - `Download Station API documentation (2012-2014) `_ 291 | 292 | - `File Station API documentation (2013-2019) `_ 293 | 294 | - `Surveillance Station API documentation (2012-2020) `_ 295 | 296 | - `Virtual Machine Manager API documentation (2015-2019) `_ 297 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Library tests.""" 2 | from json import JSONDecodeError 3 | from urllib.parse import urlencode 4 | 5 | from requests.exceptions import ConnectionError as ConnError 6 | from requests.exceptions import RequestException 7 | from requests.exceptions import SSLError 8 | 9 | from .api_data.dsm_5 import DSM_5_API_INFO 10 | from .api_data.dsm_5 import DSM_5_AUTH_LOGIN 11 | from .api_data.dsm_5 import DSM_5_AUTH_LOGIN_2SA 12 | from .api_data.dsm_5 import DSM_5_AUTH_LOGIN_2SA_OTP 13 | from .api_data.dsm_5 import DSM_5_CORE_UTILIZATION 14 | from .api_data.dsm_5 import DSM_5_DSM_INFORMATION 15 | from .api_data.dsm_5 import DSM_5_DSM_NETWORK 16 | from .api_data.dsm_5 import DSM_5_STORAGE_STORAGE_DS410J_RAID5_4DISKS_1VOL 17 | from .api_data.dsm_6 import DSM_6_API_INFO 18 | from .api_data.dsm_6 import DSM_6_API_INFO_SURVEILLANCE_STATION 19 | from .api_data.dsm_6 import DSM_6_AUTH_LOGIN 20 | from .api_data.dsm_6 import DSM_6_AUTH_LOGIN_2SA 21 | from .api_data.dsm_6 import DSM_6_AUTH_LOGIN_2SA_OTP 22 | from .api_data.dsm_6 import DSM_6_CORE_SECURITY 23 | from .api_data.dsm_6 import DSM_6_CORE_SECURITY_UPDATE_OUTOFDATE 24 | from .api_data.dsm_6 import DSM_6_CORE_SHARE 25 | from .api_data.dsm_6 import DSM_6_CORE_SYSTEM_DS918_PLUS 26 | from .api_data.dsm_6 import DSM_6_CORE_UPGRADE_TRUE 27 | from .api_data.dsm_6 import DSM_6_CORE_UTILIZATION 28 | from .api_data.dsm_6 import DSM_6_CORE_UTILIZATION_ERROR_1055 29 | from .api_data.dsm_6 import DSM_6_DOWNLOAD_STATION_INFO_CONFIG 30 | from .api_data.dsm_6 import DSM_6_DOWNLOAD_STATION_INFO_INFO 31 | from .api_data.dsm_6 import DSM_6_DOWNLOAD_STATION_STAT_INFO 32 | from .api_data.dsm_6 import DSM_6_DOWNLOAD_STATION_TASK_LIST 33 | from .api_data.dsm_6 import DSM_6_DSM_INFORMATION 34 | from .api_data.dsm_6 import DSM_6_DSM_NETWORK_2LAN_1PPPOE 35 | from .api_data.dsm_6 import ( 36 | DSM_6_STORAGE_STORAGE_DS1515_PLUS_SHR2_10DISKS_1VOL_WITH_EXPANSION, 37 | ) 38 | from .api_data.dsm_6 import DSM_6_STORAGE_STORAGE_DS1819_PLUS_SHR2_8DISKS_1VOL 39 | from .api_data.dsm_6 import DSM_6_STORAGE_STORAGE_DS213_PLUS_SHR1_2DISKS_2VOLS 40 | from .api_data.dsm_6 import DSM_6_STORAGE_STORAGE_DS918_PLUS_RAID5_3DISKS_1VOL 41 | from .api_data.dsm_6 import DSM_6_SURVEILLANCE_STATION_CAMERA_EVENT_MD_PARAM_SAVE 42 | from .api_data.dsm_6 import DSM_6_SURVEILLANCE_STATION_CAMERA_EVENT_MOTION_ENUM 43 | from .api_data.dsm_6 import DSM_6_SURVEILLANCE_STATION_CAMERA_GET_LIVE_VIEW_PATH 44 | from .api_data.dsm_6 import DSM_6_SURVEILLANCE_STATION_CAMERA_LIST 45 | from .api_data.dsm_6 import DSM_6_SURVEILLANCE_STATION_HOME_MODE_GET_INFO 46 | from .api_data.dsm_6 import DSM_6_SURVEILLANCE_STATION_HOME_MODE_SWITCH 47 | from .const import DEVICE_TOKEN 48 | from .const import ERROR_AUTH_INVALID_CREDENTIALS 49 | from .const import ERROR_AUTH_MAX_TRIES 50 | from .const import ERROR_AUTH_OTP_AUTHENTICATE_FAILED 51 | from .const import ERROR_INSUFFICIENT_USER_PRIVILEGE 52 | from synology_dsm import SynologyDSM 53 | from synology_dsm.api.core.security import SynoCoreSecurity 54 | from synology_dsm.api.core.share import SynoCoreShare 55 | from synology_dsm.api.core.system import SynoCoreSystem 56 | from synology_dsm.api.core.upgrade import SynoCoreUpgrade 57 | from synology_dsm.api.core.utilization import SynoCoreUtilization 58 | from synology_dsm.api.download_station import SynoDownloadStation 59 | from synology_dsm.api.dsm.information import SynoDSMInformation 60 | from synology_dsm.api.dsm.network import SynoDSMNetwork 61 | from synology_dsm.api.storage.storage import SynoStorage 62 | from synology_dsm.api.surveillance_station import SynoSurveillanceStation 63 | from synology_dsm.const import API_AUTH 64 | from synology_dsm.const import API_INFO 65 | from synology_dsm.exceptions import SynologyDSMRequestException 66 | 67 | API_SWITCHER = { 68 | 5: { 69 | "API_INFO": DSM_5_API_INFO, 70 | "AUTH_LOGIN": DSM_5_AUTH_LOGIN, 71 | "AUTH_LOGIN_2SA": DSM_5_AUTH_LOGIN_2SA, 72 | "AUTH_LOGIN_2SA_OTP": DSM_5_AUTH_LOGIN_2SA_OTP, 73 | "DSM_INFORMATION": DSM_5_DSM_INFORMATION, 74 | "DSM_NETWORK": DSM_5_DSM_NETWORK, 75 | "CORE_UTILIZATION": DSM_5_CORE_UTILIZATION, 76 | "STORAGE_STORAGE": { 77 | "RAID": DSM_5_STORAGE_STORAGE_DS410J_RAID5_4DISKS_1VOL, 78 | }, 79 | }, 80 | 6: { 81 | "API_INFO": DSM_6_API_INFO, 82 | "AUTH_LOGIN": DSM_6_AUTH_LOGIN, 83 | "AUTH_LOGIN_2SA": DSM_6_AUTH_LOGIN_2SA, 84 | "AUTH_LOGIN_2SA_OTP": DSM_6_AUTH_LOGIN_2SA_OTP, 85 | "DSM_INFORMATION": DSM_6_DSM_INFORMATION, 86 | "DSM_NETWORK": DSM_6_DSM_NETWORK_2LAN_1PPPOE, 87 | "CORE_SECURITY": DSM_6_CORE_SECURITY, 88 | "CORE_SHARE": DSM_6_CORE_SHARE, 89 | "CORE_SYSTEM": DSM_6_CORE_SYSTEM_DS918_PLUS, 90 | "CORE_UTILIZATION": DSM_6_CORE_UTILIZATION, 91 | "CORE_UPGRADE": DSM_6_CORE_UPGRADE_TRUE, 92 | "STORAGE_STORAGE": { 93 | "RAID": DSM_6_STORAGE_STORAGE_DS918_PLUS_RAID5_3DISKS_1VOL, 94 | "SHR1": DSM_6_STORAGE_STORAGE_DS213_PLUS_SHR1_2DISKS_2VOLS, 95 | "SHR2": DSM_6_STORAGE_STORAGE_DS1819_PLUS_SHR2_8DISKS_1VOL, 96 | "SHR2_EXPANSION": DSM_6_STORAGE_STORAGE_DS1515_PLUS_SHR2_10DISKS_1VOL_WITH_EXPANSION, # noqa: B950 97 | }, 98 | }, 99 | } 100 | 101 | 102 | VALID_HOST = "nas.mywebsite.me" 103 | VALID_PORT = "443" 104 | VALID_HTTPS = True 105 | VALID_VERIFY_SSL = True 106 | VALID_USER = "valid_user" 107 | VALID_USER_2SA = "valid_user_2sa" 108 | VALID_PASSWORD = "valid_password" 109 | VALID_OTP = "123456" 110 | 111 | USER_MAX_TRY = "user_max" 112 | 113 | 114 | class SynologyDSMMock(SynologyDSM): 115 | """Mocked SynologyDSM.""" 116 | 117 | API_URI = "api=" 118 | 119 | def __init__( 120 | self, 121 | dsm_ip, 122 | dsm_port, 123 | username, 124 | password, 125 | use_https=False, 126 | verify_ssl=False, 127 | timeout=None, 128 | device_token=None, 129 | debugmode=False, 130 | ): 131 | """Constructor method.""" 132 | SynologyDSM.__init__( 133 | self, 134 | dsm_ip, 135 | dsm_port, 136 | username, 137 | password, 138 | use_https, 139 | verify_ssl, 140 | timeout, 141 | device_token, 142 | debugmode, 143 | ) 144 | 145 | self.verify_ssl = verify_ssl 146 | self.dsm_version = 6 # 5 or 6 147 | self.disks_redundancy = "RAID" # RAID or SHR[number][_EXPANSION] 148 | self.error = False 149 | self.with_surveillance = False 150 | 151 | def _execute_request(self, method, url, params, **kwargs): 152 | url += urlencode(params or {}) 153 | 154 | if "no_internet" in url: 155 | raise SynologyDSMRequestException( 156 | ConnError( 157 | ": Failed to establish a new connection: " 159 | "[Errno 8] nodename nor servname provided, or not known" 160 | ) 161 | ) 162 | 163 | if VALID_HOST not in url: 164 | raise SynologyDSMRequestException( 165 | ConnError( 166 | ":" 167 | " Failed to establish a new connection: [Errno 8] nodename " 168 | "nor servname provided, or not known" 169 | ) 170 | ) 171 | 172 | if VALID_PORT not in url and "https" not in url: 173 | raise SynologyDSMRequestException( 174 | JSONDecodeError("Expecting value", "document", 0) 175 | ) 176 | 177 | if VALID_PORT not in url: 178 | raise SynologyDSMRequestException( 179 | SSLError( 180 | "[SSL: WRONG_VERSION_NUMBER] wrong version number (_ssl.c:1076)" 181 | ) 182 | ) 183 | 184 | if "https" not in url: 185 | raise SynologyDSMRequestException(RequestException("Bad request")) 186 | 187 | if not self.verify_ssl: 188 | raise SynologyDSMRequestException( 189 | SSLError(f"hostname '192.168.0.35' doesn't match '{VALID_HOST}'") 190 | ) 191 | 192 | if API_INFO in url: 193 | if self.with_surveillance: 194 | return DSM_6_API_INFO_SURVEILLANCE_STATION 195 | return API_SWITCHER[self.dsm_version]["API_INFO"] 196 | 197 | if API_AUTH in url: 198 | if VALID_USER_2SA in url and VALID_PASSWORD in url: 199 | if "otp_code" not in url and "device_id" not in url: 200 | return API_SWITCHER[self.dsm_version]["AUTH_LOGIN_2SA"] 201 | 202 | if "device_id" in url and DEVICE_TOKEN in url: 203 | return API_SWITCHER[self.dsm_version]["AUTH_LOGIN"] 204 | 205 | if "otp_code" in url: 206 | if VALID_OTP in url: 207 | return API_SWITCHER[self.dsm_version]["AUTH_LOGIN_2SA_OTP"] 208 | return ERROR_AUTH_OTP_AUTHENTICATE_FAILED 209 | 210 | if VALID_USER in url and VALID_PASSWORD in url: 211 | return API_SWITCHER[self.dsm_version]["AUTH_LOGIN"] 212 | 213 | if USER_MAX_TRY in url: 214 | return ERROR_AUTH_MAX_TRIES 215 | 216 | return ERROR_AUTH_INVALID_CREDENTIALS 217 | 218 | if self.API_URI in url: 219 | if not self._session_id: 220 | return ERROR_INSUFFICIENT_USER_PRIVILEGE 221 | 222 | if SynoCoreSecurity.API_KEY in url: 223 | if self.error: 224 | return DSM_6_CORE_SECURITY_UPDATE_OUTOFDATE 225 | return API_SWITCHER[self.dsm_version]["CORE_SECURITY"] 226 | 227 | if SynoCoreShare.API_KEY in url: 228 | return API_SWITCHER[self.dsm_version]["CORE_SHARE"] 229 | 230 | if SynoCoreSystem.API_KEY in url: 231 | if SynoCoreUtilization.API_KEY in url: 232 | if self.error: 233 | return DSM_6_CORE_UTILIZATION_ERROR_1055 234 | return API_SWITCHER[self.dsm_version]["CORE_UTILIZATION"] 235 | return API_SWITCHER[self.dsm_version]["CORE_SYSTEM"] 236 | 237 | if SynoCoreUpgrade.API_KEY in url: 238 | return API_SWITCHER[self.dsm_version]["CORE_UPGRADE"] 239 | 240 | if SynoDSMInformation.API_KEY in url: 241 | return API_SWITCHER[self.dsm_version]["DSM_INFORMATION"] 242 | 243 | if SynoDSMNetwork.API_KEY in url: 244 | return API_SWITCHER[self.dsm_version]["DSM_NETWORK"] 245 | 246 | if SynoDownloadStation.INFO_API_KEY in url: 247 | if "GetInfo" in url: 248 | return DSM_6_DOWNLOAD_STATION_INFO_INFO 249 | if "GetConfig" in url: 250 | return DSM_6_DOWNLOAD_STATION_INFO_CONFIG 251 | if SynoDownloadStation.STAT_API_KEY in url: 252 | if "GetInfo" in url: 253 | return DSM_6_DOWNLOAD_STATION_STAT_INFO 254 | if SynoDownloadStation.TASK_API_KEY in url: 255 | if "List" in url: 256 | return DSM_6_DOWNLOAD_STATION_TASK_LIST 257 | 258 | if SynoStorage.API_KEY in url: 259 | return API_SWITCHER[self.dsm_version]["STORAGE_STORAGE"][ 260 | self.disks_redundancy 261 | ] 262 | 263 | if SynoSurveillanceStation.CAMERA_API_KEY in url: 264 | if "GetLiveViewPath" in url: 265 | return DSM_6_SURVEILLANCE_STATION_CAMERA_GET_LIVE_VIEW_PATH 266 | if "List" in url: 267 | assert params["version"] == 7 268 | return DSM_6_SURVEILLANCE_STATION_CAMERA_LIST 269 | if "MDParamSave" in url: 270 | return DSM_6_SURVEILLANCE_STATION_CAMERA_EVENT_MD_PARAM_SAVE 271 | if "MotionEnum" in url: 272 | return DSM_6_SURVEILLANCE_STATION_CAMERA_EVENT_MOTION_ENUM 273 | 274 | if SynoSurveillanceStation.HOME_MODE_API_KEY in url: 275 | if "GetInfo" in url: 276 | return DSM_6_SURVEILLANCE_STATION_HOME_MODE_GET_INFO 277 | if "Switch" in url: 278 | return DSM_6_SURVEILLANCE_STATION_HOME_MODE_SWITCH 279 | 280 | if ( 281 | "SYNO.FileStation.Upload" in url 282 | and "upload" in url 283 | and "file_already_exists" in kwargs["files"]["file"] 284 | ): 285 | return {"error": {"code": 1805}, "success": False} 286 | 287 | if ( 288 | "SYNO.DownloadStation2.Task" in url 289 | and "create" in url 290 | and "test_not_exists" in url 291 | ): 292 | return {"error": {"code": 408}, "success": False} 293 | 294 | return {"success": False} 295 | 296 | return None 297 | -------------------------------------------------------------------------------- /tests/api_data/dsm_6/download_station/const_6_download_station_task.py: -------------------------------------------------------------------------------- 1 | """DSM 6 SYNO.DownloadStation.Task data.""" 2 | 3 | DSM_6_DOWNLOAD_STATION_TASK_LIST = { 4 | "data": { 5 | "offset": 0, 6 | "tasks": [ 7 | { 8 | "additional": { 9 | "detail": { 10 | "completed_time": 0, 11 | "connected_leechers": 0, 12 | "connected_peers": 0, 13 | "connected_seeders": 0, 14 | "create_time": 1550089068, 15 | "destination": "Folder/containing/downloads", 16 | "seedelapsed": 0, 17 | "started_time": 1592549339, 18 | "total_peers": 0, 19 | "total_pieces": 1239, 20 | "unzip_password": "", 21 | "uri": "magnet:?xt=urn:btih:1234ABCD1234ABCD1234ABCD1234ABCD1234ABCD&dn=1234", 22 | "waiting_seconds": 0, 23 | }, 24 | "file": [ 25 | { 26 | "filename": "INFO.nfo", 27 | "index": 0, 28 | "priority": "low", 29 | "size": 1335, 30 | "size_downloaded": 0, 31 | "wanted": True, 32 | }, 33 | { 34 | "filename": "My super movie 2 2022.mkv", 35 | "index": 1, 36 | "priority": "normal", 37 | "size": 1591087515, 38 | "size_downloaded": 0, 39 | "wanted": True, 40 | }, 41 | { 42 | "filename": "My super movie 2 sample.mkv", 43 | "index": 2, 44 | "priority": "normal", 45 | "size": 2754524, 46 | "size_downloaded": 0, 47 | "wanted": False, 48 | }, 49 | { 50 | "filename": "My super movie 2021.mkv", 51 | "index": 3, 52 | "priority": "normal", 53 | "size": 1155085341, 54 | "size_downloaded": 0, 55 | "wanted": True, 56 | }, 57 | { 58 | "filename": "My super movie 2021 sample.mkv", 59 | "index": 4, 60 | "priority": "normal", 61 | "size": 4359701, 62 | "size_downloaded": 0, 63 | "wanted": False, 64 | }, 65 | { 66 | "filename": "My super movie 3 2023.mkv", 67 | "index": 5, 68 | "priority": "normal", 69 | "size": 1288819263, 70 | "size_downloaded": 0, 71 | "wanted": True, 72 | }, 73 | { 74 | "filename": "My super movie 3 sample.mkv", 75 | "index": 6, 76 | "priority": "normal", 77 | "size": 3077684, 78 | "size_downloaded": 0, 79 | "wanted": False, 80 | }, 81 | { 82 | "filename": "My super movie 4 2031.mkv", 83 | "index": 7, 84 | "priority": "normal", 85 | "size": 1149397942, 86 | "size_downloaded": 0, 87 | "wanted": True, 88 | }, 89 | { 90 | "filename": "My super movie 4 sample.mkv", 91 | "index": 8, 92 | "priority": "normal", 93 | "size": 2023179, 94 | "size_downloaded": 0, 95 | "wanted": False, 96 | }, 97 | ], 98 | }, 99 | "id": "dbid_86", 100 | "size": 5196586484, 101 | "status": "downloading", 102 | "title": "My super movie Complete 2021-2031", 103 | "type": "bt", 104 | "username": "Test_useR", 105 | }, 106 | { 107 | "additional": { 108 | "detail": { 109 | "completed_time": 0, 110 | "connected_leechers": 0, 111 | "connected_peers": 0, 112 | "connected_seeders": 0, 113 | "create_time": 1551214114, 114 | "destination": "Folder/containing/downloads", 115 | "seedelapsed": 0, 116 | "started_time": 1592549348, 117 | "total_peers": 0, 118 | "total_pieces": 948, 119 | "unzip_password": "", 120 | "uri": "magnet:?xt=urn:btih:1234ABCD1234ABCD1234ABCD1234ABCD1234ABCD&dn=1234", 121 | "waiting_seconds": 0, 122 | }, 123 | "file": [ 124 | { 125 | "filename": "Blade Swipper 1984.mkv", 126 | "index": 0, 127 | "priority": "normal", 128 | "size": 1986298376, 129 | "size_downloaded": 1602519560, 130 | "wanted": True, 131 | } 132 | ], 133 | }, 134 | "id": "dbid_164", 135 | "size": 1986298376, 136 | "status": "downloading", 137 | "title": "Blade Swipper 1984.mkv", 138 | "type": "bt", 139 | "username": "Test_useR", 140 | }, 141 | { 142 | "additional": { 143 | "detail": { 144 | "completed_time": 0, 145 | "connected_leechers": 0, 146 | "connected_peers": 50, 147 | "connected_seeders": 50, 148 | "create_time": 1585435581, 149 | "destination": "Folder/containing/downloads", 150 | "seedelapsed": 0, 151 | "started_time": 1592549349, 152 | "total_peers": 0, 153 | "total_pieces": 0, 154 | "unzip_password": "", 155 | "uri": "magnet:?xt=urn:btih:1234ABCD1234ABCD1234ABCD1234ABCD1234ABCD&dn=1234", 156 | "waiting_seconds": 0, 157 | }, 158 | "file": [], 159 | }, 160 | "id": "dbid_486", 161 | "size": 0, 162 | "status": "downloading", 163 | "title": "The falling State", 164 | "type": "bt", 165 | "username": "Test_useR", 166 | }, 167 | { 168 | "additional": { 169 | "detail": { 170 | "completed_time": 0, 171 | "connected_leechers": 0, 172 | "connected_peers": 1, 173 | "connected_seeders": 1, 174 | "create_time": 1591562665, 175 | "destination": "Folder/containing/downloads", 176 | "seedelapsed": 0, 177 | "started_time": 1591563597, 178 | "total_peers": 0, 179 | "total_pieces": 1494, 180 | "unzip_password": "", 181 | "uri": "magnet:?xt=urn:btih:1234ABCD1234ABCD1234ABCD1234ABCD1234ABCD&dn=1234", 182 | "waiting_seconds": 0, 183 | } 184 | }, 185 | "id": "dbid_518", 186 | "size": 391580448, 187 | "status": "paused", 188 | "title": "Welcome to the North.mkv", 189 | "type": "bt", 190 | "username": "Test_useR", 191 | }, 192 | { 193 | "additional": { 194 | "detail": { 195 | "completed_time": 1591565351, 196 | "connected_leechers": 0, 197 | "connected_peers": 0, 198 | "connected_seeders": 0, 199 | "create_time": 1591563606, 200 | "destination": "Folder/containing/downloads", 201 | "seedelapsed": 172800, 202 | "started_time": 1592601577, 203 | "total_peers": 0, 204 | "total_pieces": 5466, 205 | "unzip_password": "", 206 | "uri": "magnet:?xt=urn:btih:1234ABCD1234ABCD1234ABCD1234ABCD1234ABCD&dn=1234", 207 | "waiting_seconds": 0, 208 | } 209 | }, 210 | "id": "dbid_522", 211 | "size": 5731285821, 212 | "status": "finished", 213 | "title": "Birds of Pokémon.mkv", 214 | "type": "bt", 215 | "username": "Test_useR", 216 | }, 217 | { 218 | "additional": { 219 | "detail": { 220 | "completed_time": 1591566799, 221 | "connected_leechers": 0, 222 | "connected_peers": 0, 223 | "connected_seeders": 0, 224 | "create_time": 1591566523, 225 | "destination": "Folder/containing/downloads", 226 | "seedelapsed": 0, 227 | "started_time": 1591566696, 228 | "total_peers": 0, 229 | "total_pieces": 0, 230 | "unzip_password": "", 231 | "uri": "https://1fichier.com/?1234ABCD1234ABCD1234&af=22123", 232 | "waiting_seconds": 0, 233 | } 234 | }, 235 | "id": "dbid_531", 236 | "size": 2811892495, 237 | "status": "finished", 238 | "title": "1234ABCD1234ABCD1234", 239 | "type": "https", 240 | "username": "Test_useR", 241 | }, 242 | { 243 | "additional": { 244 | "detail": { 245 | "completed_time": 0, 246 | "connected_leechers": 0, 247 | "connected_peers": 0, 248 | "connected_seeders": 0, 249 | "create_time": 1591566903, 250 | "destination": "Folder/containing/downloads", 251 | "seedelapsed": 0, 252 | "started_time": 0, 253 | "total_peers": 0, 254 | "total_pieces": 0, 255 | "unzip_password": "", 256 | "uri": "https://1fichier.com/?123ABC123ABC123ABC12", 257 | "waiting_seconds": 0, 258 | } 259 | }, 260 | "id": "dbid_533", 261 | "size": 0, 262 | "status": "error", 263 | "status_extra": {"error_detail": "unknown"}, 264 | "title": "?123ABC123ABC123ABC12", 265 | "type": "https", 266 | "username": "Test_useR", 267 | }, 268 | { 269 | "additional": { 270 | "detail": { 271 | "completed_time": 0, 272 | "connected_leechers": 0, 273 | "connected_peers": 0, 274 | "connected_seeders": 0, 275 | "create_time": 1592605687, 276 | "destination": "Folder/containing/downloads", 277 | "seedelapsed": 0, 278 | "started_time": 1592605731, 279 | "total_peers": 0, 280 | "total_pieces": 0, 281 | "unzip_password": "", 282 | "uri": "https://1fichier.com/?123ABC123ABC123ABC12", 283 | "waiting_seconds": 0, 284 | } 285 | }, 286 | "id": "dbid_549", 287 | "size": 0, 288 | "status": "error", 289 | "status_extra": {"error_detail": "broken_link"}, 290 | "title": "123ABC123ABC123ABC12", 291 | "type": "https", 292 | "username": "Test_useR", 293 | }, 294 | ], 295 | "total": 8, 296 | }, 297 | "success": True, 298 | } 299 | -------------------------------------------------------------------------------- /tests/test_synology_dsm_5.py: -------------------------------------------------------------------------------- 1 | """Synology DSM tests.""" 2 | from unittest import TestCase 3 | 4 | from . import SynologyDSMMock 5 | from . import VALID_HOST 6 | from . import VALID_HTTPS 7 | from . import VALID_OTP 8 | from . import VALID_PASSWORD 9 | from . import VALID_PORT 10 | from . import VALID_USER 11 | from . import VALID_USER_2SA 12 | from . import VALID_VERIFY_SSL 13 | from .const import DEVICE_TOKEN 14 | from .const import SESSION_ID 15 | from synology_dsm.const import API_AUTH 16 | from synology_dsm.const import API_INFO 17 | from synology_dsm.exceptions import SynologyDSMAPIErrorException 18 | from synology_dsm.exceptions import SynologyDSMAPINotExistsException 19 | from synology_dsm.exceptions import SynologyDSMLogin2SAFailedException 20 | from synology_dsm.exceptions import SynologyDSMLogin2SARequiredException 21 | from synology_dsm.exceptions import SynologyDSMLoginInvalidException 22 | from synology_dsm.exceptions import SynologyDSMRequestException 23 | 24 | 25 | class TestSynologyDSM(TestCase): 26 | """SynologyDSM test cases.""" 27 | 28 | api = None 29 | 30 | def setUp(self): 31 | """Context initialisation called for all tests.""" 32 | self.api = SynologyDSMMock( 33 | VALID_HOST, 34 | VALID_PORT, 35 | VALID_USER, 36 | VALID_PASSWORD, 37 | VALID_HTTPS, 38 | VALID_VERIFY_SSL, 39 | ) 40 | self.api.dsm_version = 5 41 | 42 | def test_init(self): 43 | """Test init.""" 44 | assert self.api.username 45 | assert self.api._base_url 46 | assert not self.api.apis.get(API_AUTH) 47 | assert not self.api._session_id 48 | 49 | def test_connection_failed(self): 50 | """Test failed connection.""" 51 | api = SynologyDSMMock( 52 | "no_internet", 53 | VALID_PORT, 54 | VALID_USER, 55 | VALID_PASSWORD, 56 | VALID_HTTPS, 57 | VALID_VERIFY_SSL, 58 | ) 59 | api.dsm_version = 5 60 | with self.assertRaises(SynologyDSMRequestException): 61 | assert not api.login() 62 | assert not api.apis.get(API_AUTH) 63 | assert not api._session_id 64 | 65 | api = SynologyDSMMock( 66 | "host", 67 | VALID_PORT, 68 | VALID_USER, 69 | VALID_PASSWORD, 70 | VALID_HTTPS, 71 | VALID_VERIFY_SSL, 72 | ) 73 | api.dsm_version = 5 74 | with self.assertRaises(SynologyDSMRequestException): 75 | assert not api.login() 76 | assert not api.apis.get(API_AUTH) 77 | assert not api._session_id 78 | 79 | api = SynologyDSMMock( 80 | VALID_HOST, 0, VALID_USER, VALID_PASSWORD, VALID_HTTPS, VALID_VERIFY_SSL 81 | ) 82 | api.dsm_version = 5 83 | with self.assertRaises(SynologyDSMRequestException): 84 | assert not api.login() 85 | assert not api.apis.get(API_AUTH) 86 | assert not api._session_id 87 | 88 | api = SynologyDSMMock( 89 | VALID_HOST, 90 | VALID_PORT, 91 | VALID_USER, 92 | VALID_PASSWORD, 93 | False, 94 | VALID_VERIFY_SSL, 95 | ) 96 | api.dsm_version = 5 97 | with self.assertRaises(SynologyDSMRequestException): 98 | assert not api.login() 99 | assert not api.apis.get(API_AUTH) 100 | assert not api._session_id 101 | 102 | def test_login(self): 103 | """Test login.""" 104 | assert self.api.login() 105 | assert self.api.apis.get(API_AUTH) 106 | assert self.api._session_id == SESSION_ID 107 | assert self.api._syno_token is None 108 | 109 | def test_login_failed(self): 110 | """Test failed login.""" 111 | api = SynologyDSMMock( 112 | VALID_HOST, 113 | VALID_PORT, 114 | "user", 115 | VALID_PASSWORD, 116 | VALID_HTTPS, 117 | VALID_VERIFY_SSL, 118 | ) 119 | api.dsm_version = 5 120 | with self.assertRaises(SynologyDSMLoginInvalidException): 121 | assert not api.login() 122 | assert api.apis.get(API_AUTH) 123 | assert not api._session_id 124 | 125 | api = SynologyDSMMock( 126 | VALID_HOST, 127 | VALID_PORT, 128 | VALID_USER, 129 | "pass", 130 | VALID_HTTPS, 131 | VALID_VERIFY_SSL, 132 | ) 133 | api.dsm_version = 5 134 | with self.assertRaises(SynologyDSMLoginInvalidException): 135 | assert not api.login() 136 | assert api.apis.get(API_AUTH) 137 | assert not api._session_id 138 | 139 | def test_login_2sa(self): 140 | """Test login with 2SA.""" 141 | api = SynologyDSMMock( 142 | VALID_HOST, 143 | VALID_PORT, 144 | VALID_USER_2SA, 145 | VALID_PASSWORD, 146 | VALID_HTTPS, 147 | VALID_VERIFY_SSL, 148 | ) 149 | api.dsm_version = 5 150 | with self.assertRaises(SynologyDSMLogin2SARequiredException): 151 | api.login() 152 | api.login(VALID_OTP) 153 | 154 | assert api._session_id == SESSION_ID 155 | assert api._syno_token is None 156 | assert api._device_token == DEVICE_TOKEN 157 | assert api.device_token == DEVICE_TOKEN 158 | 159 | def test_login_2sa_new_session(self): 160 | """Test login with 2SA and a new session with granted device.""" 161 | api = SynologyDSMMock( 162 | VALID_HOST, 163 | VALID_PORT, 164 | VALID_USER_2SA, 165 | VALID_PASSWORD, 166 | VALID_HTTPS, 167 | VALID_VERIFY_SSL, 168 | device_token=DEVICE_TOKEN, 169 | ) 170 | api.dsm_version = 5 171 | assert api.login() 172 | 173 | assert api._session_id == SESSION_ID 174 | assert api._syno_token is None 175 | assert api._device_token == DEVICE_TOKEN 176 | assert api.device_token == DEVICE_TOKEN 177 | 178 | def test_login_2sa_failed(self): 179 | """Test failed login with 2SA.""" 180 | api = SynologyDSMMock( 181 | VALID_HOST, 182 | VALID_PORT, 183 | VALID_USER_2SA, 184 | VALID_PASSWORD, 185 | VALID_HTTPS, 186 | VALID_VERIFY_SSL, 187 | ) 188 | api.dsm_version = 5 189 | with self.assertRaises(SynologyDSMLogin2SARequiredException): 190 | api.login() 191 | with self.assertRaises(SynologyDSMLogin2SAFailedException): 192 | api.login(888888) 193 | 194 | assert api._session_id is None 195 | assert api._syno_token is None 196 | assert api._device_token is None 197 | 198 | def test_request_get(self): 199 | """Test get request.""" 200 | assert self.api.get(API_INFO, "query") 201 | assert self.api.get(API_AUTH, "login") 202 | assert self.api.get("SYNO.DownloadStation2.Task", "list") 203 | assert self.api.get(API_AUTH, "logout") 204 | 205 | def test_request_get_failed(self): 206 | """Test failed get request.""" 207 | with self.assertRaises(SynologyDSMAPINotExistsException): 208 | assert self.api.get("SYNO.Virtualization.API.Task.Info", "list") 209 | 210 | def test_request_post(self): 211 | """Test post request.""" 212 | assert self.api.post( 213 | "SYNO.FileStation.Upload", 214 | "upload", 215 | params={"dest_folder_path": "/upload/test", "create_parents": True}, 216 | files={"file": "open('file.txt','rb')"}, 217 | ) 218 | 219 | assert self.api.post( 220 | "SYNO.DownloadStation2.Task", 221 | "create", 222 | params={ 223 | "uri": "ftps://192.0.0.1:21/test/test.zip", 224 | "username": "admin", 225 | "password": "1234", 226 | }, 227 | ) 228 | 229 | def test_request_post_failed(self): 230 | """Test failed post request.""" 231 | with self.assertRaises(SynologyDSMAPIErrorException): 232 | assert self.api.post( 233 | "SYNO.FileStation.Upload", 234 | "upload", 235 | params={"dest_folder_path": "/upload/test", "create_parents": True}, 236 | files={"file": "open('file_already_exists.txt','rb')"}, 237 | ) 238 | 239 | with self.assertRaises(SynologyDSMAPIErrorException): 240 | assert self.api.post( 241 | "SYNO.DownloadStation2.Task", 242 | "create", 243 | params={ 244 | "uri": "ftps://192.0.0.1:21/test/test_not_exists.zip", 245 | "username": "admin", 246 | "password": "1234", 247 | }, 248 | ) 249 | 250 | def test_information(self): 251 | """Test information.""" 252 | assert self.api.information 253 | self.api.information.update() 254 | assert self.api.information.model == "DS3615xs" 255 | assert self.api.information.ram == 6144 256 | assert self.api.information.serial == "B3J4N01003" 257 | assert self.api.information.temperature == 40 258 | assert not self.api.information.temperature_warn 259 | assert self.api.information.uptime == 3897 260 | assert self.api.information.version == "5967" 261 | assert self.api.information.version_string == "DSM 5.2-5967 Update 9" 262 | 263 | def test_network(self): 264 | """Test network.""" 265 | assert self.api.network 266 | self.api.network.update() 267 | assert self.api.network.dns 268 | assert self.api.network.gateway 269 | assert self.api.network.hostname 270 | assert self.api.network.interfaces 271 | assert self.api.network.interface("eth0") 272 | assert self.api.network.interface("eth1") is None 273 | assert self.api.network.macs 274 | assert self.api.network.workgroup 275 | 276 | def test_utilisation(self): 277 | """Test utilization.""" 278 | assert self.api.utilisation 279 | self.api.utilisation.update() 280 | 281 | def test_utilisation_cpu(self): 282 | """Test utilization CPU.""" 283 | self.api.utilisation.update() 284 | assert self.api.utilisation.cpu 285 | assert self.api.utilisation.cpu_other_load 286 | assert self.api.utilisation.cpu_user_load 287 | assert self.api.utilisation.cpu_system_load 288 | assert self.api.utilisation.cpu_total_load 289 | assert self.api.utilisation.cpu_1min_load 290 | assert self.api.utilisation.cpu_5min_load 291 | assert self.api.utilisation.cpu_15min_load 292 | 293 | def test_utilisation_memory(self): 294 | """Test utilization memory.""" 295 | self.api.utilisation.update() 296 | assert self.api.utilisation.memory 297 | assert self.api.utilisation.memory_real_usage 298 | assert self.api.utilisation.memory_size() 299 | assert self.api.utilisation.memory_size(True) 300 | assert self.api.utilisation.memory_available_swap() 301 | assert self.api.utilisation.memory_available_swap(True) 302 | assert self.api.utilisation.memory_cached() 303 | assert self.api.utilisation.memory_cached(True) 304 | assert self.api.utilisation.memory_available_real() 305 | assert self.api.utilisation.memory_available_real(True) 306 | assert self.api.utilisation.memory_total_real() 307 | assert self.api.utilisation.memory_total_real(True) 308 | assert self.api.utilisation.memory_total_swap() 309 | assert self.api.utilisation.memory_total_swap(True) 310 | 311 | def test_utilisation_network(self): 312 | """Test utilization network.""" 313 | self.api.utilisation.update() 314 | assert self.api.utilisation.network 315 | assert self.api.utilisation.network_up() 316 | assert self.api.utilisation.network_up(True) 317 | assert self.api.utilisation.network_down() 318 | assert self.api.utilisation.network_down(True) 319 | 320 | def test_storage(self): 321 | """Test storage roots.""" 322 | assert self.api.storage 323 | self.api.storage.update() 324 | assert self.api.storage.disks 325 | assert self.api.storage.env 326 | assert self.api.storage.storage_pools == [] 327 | assert self.api.storage.volumes 328 | 329 | def test_storage_volumes(self): 330 | """Test storage volumes.""" 331 | self.api.storage.update() 332 | # Basics 333 | assert self.api.storage.volumes_ids 334 | for volume_id in self.api.storage.volumes_ids: 335 | if volume_id == "test_volume": 336 | continue 337 | assert self.api.storage.volume_status(volume_id) 338 | assert self.api.storage.volume_device_type(volume_id) 339 | assert self.api.storage.volume_size_total(volume_id) 340 | assert self.api.storage.volume_size_total(volume_id, True) 341 | assert self.api.storage.volume_size_used(volume_id) 342 | assert self.api.storage.volume_size_used(volume_id, True) 343 | assert self.api.storage.volume_percentage_used(volume_id) 344 | assert ( 345 | self.api.storage.volume_disk_temp_avg(volume_id) is None 346 | ) # because of empty storagePools 347 | assert ( 348 | self.api.storage.volume_disk_temp_max(volume_id) is None 349 | ) # because of empty storagePools 350 | 351 | # Existing volume 352 | assert self.api.storage.volume_status("volume_1") == "normal" 353 | assert self.api.storage.volume_device_type("volume_1") == "raid_5" 354 | assert self.api.storage.volume_size_total("volume_1") == 8846249701376 355 | assert self.api.storage.volume_size_total("volume_1", True) == "8.0Tb" 356 | assert self.api.storage.volume_size_used("volume_1") == 5719795761152 357 | assert self.api.storage.volume_size_used("volume_1", True) == "5.2Tb" 358 | assert self.api.storage.volume_percentage_used("volume_1") == 64.7 359 | assert ( 360 | self.api.storage.volume_disk_temp_avg("volume_1") is None 361 | ) # because of empty storagePools 362 | assert ( 363 | self.api.storage.volume_disk_temp_max("volume_1") is None 364 | ) # because of empty storagePools 365 | 366 | # Non existing volume 367 | assert not self.api.storage.volume_status("not_a_volume") 368 | assert not self.api.storage.volume_device_type("not_a_volume") 369 | assert not self.api.storage.volume_size_total("not_a_volume") 370 | assert not self.api.storage.volume_size_total("not_a_volume", True) 371 | assert not self.api.storage.volume_size_used("not_a_volume") 372 | assert not self.api.storage.volume_size_used("not_a_volume", True) 373 | assert not self.api.storage.volume_percentage_used("not_a_volume") 374 | assert not self.api.storage.volume_disk_temp_avg("not_a_volume") 375 | assert not self.api.storage.volume_disk_temp_max("not_a_volume") 376 | 377 | # Test volume 378 | assert self.api.storage.volume_status("test_volume") is None 379 | assert self.api.storage.volume_device_type("test_volume") is None 380 | assert self.api.storage.volume_size_total("test_volume") is None 381 | assert self.api.storage.volume_size_total("test_volume", True) is None 382 | assert self.api.storage.volume_size_used("test_volume") is None 383 | assert self.api.storage.volume_size_used("test_volume", True) is None 384 | assert self.api.storage.volume_percentage_used("test_volume") is None 385 | assert self.api.storage.volume_disk_temp_avg("test_volume") is None 386 | assert self.api.storage.volume_disk_temp_max("test_volume") is None 387 | 388 | def test_storage_disks(self): 389 | """Test storage disks.""" 390 | self.api.storage.update() 391 | # Basics 392 | assert self.api.storage.disks_ids 393 | for disk_id in self.api.storage.disks_ids: 394 | if disk_id == "test_disk": 395 | continue 396 | assert "Disk" in self.api.storage.disk_name(disk_id) 397 | assert "/dev/" in self.api.storage.disk_device(disk_id) 398 | if disk_id == "sda": 399 | assert self.api.storage.disk_smart_status(disk_id) == "90%" 400 | else: 401 | assert self.api.storage.disk_smart_status(disk_id) == "safe" 402 | assert self.api.storage.disk_status(disk_id) == "normal" 403 | assert not self.api.storage.disk_exceed_bad_sector_thr(disk_id) 404 | assert not self.api.storage.disk_below_remain_life_thr(disk_id) 405 | assert self.api.storage.disk_temp(disk_id) 406 | 407 | # Non existing disk 408 | assert not self.api.storage.disk_name("not_a_disk") 409 | assert not self.api.storage.disk_device("not_a_disk") 410 | assert not self.api.storage.disk_smart_status("not_a_disk") 411 | assert not self.api.storage.disk_status("not_a_disk") 412 | assert not self.api.storage.disk_exceed_bad_sector_thr("not_a_disk") 413 | assert not self.api.storage.disk_below_remain_life_thr("not_a_disk") 414 | assert not self.api.storage.disk_temp("not_a_disk") 415 | 416 | # Test disk 417 | assert self.api.storage.disk_name("test_disk") is None 418 | assert self.api.storage.disk_device("test_disk") is None 419 | assert self.api.storage.disk_smart_status("test_disk") is None 420 | assert self.api.storage.disk_status("test_disk") is None 421 | assert self.api.storage.disk_exceed_bad_sector_thr("test_disk") is None 422 | assert self.api.storage.disk_below_remain_life_thr("test_disk") is None 423 | assert self.api.storage.disk_temp("test_disk") is None 424 | -------------------------------------------------------------------------------- /src/synology_dsm/synology_dsm.py: -------------------------------------------------------------------------------- 1 | """Class to interact with Synology DSM.""" 2 | import socket 3 | from json import JSONDecodeError 4 | from urllib.parse import quote 5 | 6 | import urllib3 7 | from requests import Session 8 | from requests.exceptions import RequestException 9 | 10 | from .api.core.security import SynoCoreSecurity 11 | from .api.core.share import SynoCoreShare 12 | from .api.core.system import SynoCoreSystem 13 | from .api.core.upgrade import SynoCoreUpgrade 14 | from .api.core.utilization import SynoCoreUtilization 15 | from .api.download_station import SynoDownloadStation 16 | from .api.dsm.information import SynoDSMInformation 17 | from .api.dsm.network import SynoDSMNetwork 18 | from .api.storage.storage import SynoStorage 19 | from .api.surveillance_station import SynoSurveillanceStation 20 | from .const import API_AUTH 21 | from .const import API_INFO 22 | from .exceptions import SynologyDSMAPIErrorException 23 | from .exceptions import SynologyDSMAPINotExistsException 24 | from .exceptions import SynologyDSMLogin2SAFailedException 25 | from .exceptions import SynologyDSMLogin2SARequiredException 26 | from .exceptions import SynologyDSMLoginDisabledAccountException 27 | from .exceptions import SynologyDSMLoginFailedException 28 | from .exceptions import SynologyDSMLoginInvalidException 29 | from .exceptions import SynologyDSMLoginPermissionDeniedException 30 | from .exceptions import SynologyDSMRequestException 31 | 32 | 33 | class SynologyDSM: 34 | """Class containing the main Synology DSM functions.""" 35 | 36 | DSM_5_WEIRD_URL_API = [ 37 | SynoStorage.API_KEY, 38 | ] 39 | 40 | def __init__( 41 | self, 42 | dsm_ip: str, 43 | dsm_port: int, 44 | username: str, 45 | password: str, 46 | use_https: bool = False, 47 | verify_ssl: bool = False, 48 | timeout: int = None, 49 | device_token: str = None, 50 | debugmode: bool = False, 51 | ): 52 | """Constructor method.""" 53 | self.username = username 54 | self._password = password 55 | self._timeout = timeout or 10 56 | self._debugmode = debugmode 57 | self._verify = verify_ssl & use_https 58 | 59 | # Session 60 | self._session = Session() 61 | self._session.verify = self._verify 62 | 63 | # Login 64 | self._session_id = None 65 | self._syno_token = None 66 | self._device_token = device_token 67 | 68 | # Services 69 | self._apis = { 70 | "SYNO.API.Info": {"maxVersion": 1, "minVersion": 1, "path": "query.cgi"} 71 | } 72 | self._download = None 73 | self._information = None 74 | self._network = None 75 | self._security = None 76 | self._share = None 77 | self._storage = None 78 | self._surveillance = None 79 | self._system = None 80 | self._utilisation = None 81 | self._upgrade = None 82 | 83 | # Build variables 84 | if use_https: 85 | if not verify_ssl: 86 | # https://urllib3.readthedocs.io/en/latest/advanced-usage.html#ssl-warnings # noqa: B950 87 | # disable SSL warnings due to the auto-genenerated cert 88 | urllib3.disable_warnings() 89 | 90 | self._base_url = f"https://{dsm_ip}:{dsm_port}" 91 | else: 92 | self._base_url = f"http://{dsm_ip}:{dsm_port}" 93 | 94 | def _debuglog(self, message: str): 95 | """Outputs message if debug mode is enabled.""" 96 | if self._debugmode: 97 | print("DEBUG: " + message) 98 | 99 | def _is_weird_api_url(self, api: str) -> bool: 100 | """Returns True if the API URL is not common. 101 | 102 | Common template is nas_base_url/webapi/path?params 103 | Only handles DSM 5 for now. 104 | """ 105 | return ( 106 | api in self.DSM_5_WEIRD_URL_API 107 | and self._information 108 | and self._information.version 109 | and int(self._information.version) < 7321 # < DSM 6 110 | ) 111 | 112 | def _build_url(self, api: str) -> str: 113 | if self._is_weird_api_url(api): 114 | if api == SynoStorage.API_KEY: 115 | return ( 116 | f"{self._base_url}/webman/modules/StorageManager/" 117 | f"storagehandler.cgi?" 118 | ) 119 | 120 | return f"{self._base_url}/webapi/{self.apis[api]['path']}?" 121 | 122 | def discover_apis(self): 123 | """Retreives available API infos from the NAS.""" 124 | if self._apis.get(API_AUTH): 125 | return 126 | self._apis = self.get(API_INFO, "query")["data"] 127 | 128 | @property 129 | def apis(self): 130 | """Gets available API infos from the NAS.""" 131 | return self._apis 132 | 133 | def login(self, otp_code: str = None) -> bool: 134 | """Create a logged session.""" 135 | # First reset the session 136 | self._debuglog("Creating new session") 137 | self._session = Session() 138 | self._session.verify = self._verify 139 | 140 | params = { 141 | "account": self.username, 142 | "passwd": self._password, 143 | # "enable_syno_token": "yes", 144 | "enable_device_token": "yes", 145 | "device_name": socket.gethostname(), 146 | "format": "sid", 147 | } 148 | 149 | if otp_code: 150 | params["otp_code"] = otp_code 151 | if self._device_token: 152 | params["device_id"] = self._device_token 153 | 154 | # Request login 155 | result = self.get(API_AUTH, "login", params) 156 | 157 | # Handle errors 158 | if result.get("error"): 159 | switcher = { 160 | 400: SynologyDSMLoginInvalidException(self.username), 161 | 401: SynologyDSMLoginDisabledAccountException(self.username), 162 | 402: SynologyDSMLoginPermissionDeniedException(self.username), 163 | 403: SynologyDSMLogin2SARequiredException(self.username), 164 | 404: SynologyDSMLogin2SAFailedException(), 165 | } 166 | raise switcher.get( 167 | result["error"]["code"], 168 | SynologyDSMLoginFailedException(result["error"]["code"], self.username), 169 | ) 170 | 171 | # Parse result if valid 172 | self._session_id = result["data"]["sid"] 173 | if result["data"].get("synotoken"): 174 | # Not available on API version < 3 175 | self._syno_token = result["data"]["synotoken"] 176 | if result["data"].get("did"): 177 | # Not available on API version < 6 && device token is given once 178 | # per device_name 179 | self._device_token = result["data"]["did"] 180 | self._debuglog("Authentication successful, token: " + str(self._session_id)) 181 | 182 | if not self._information: 183 | self._information = SynoDSMInformation(self) 184 | self._information.update() 185 | 186 | return result["success"] 187 | 188 | def logout(self) -> bool: 189 | """Log out of the session.""" 190 | result = self.get(API_AUTH, "logout") 191 | self._session = None 192 | return result["success"] 193 | 194 | @property 195 | def device_token(self) -> str: 196 | """Gets the device token. 197 | 198 | Used to remember the 2SA access was granted on this device. 199 | """ 200 | return self._device_token 201 | 202 | def get(self, api: str, method: str, params: dict = None, **kwargs): 203 | """Handles API GET request.""" 204 | return self._request("GET", api, method, params, **kwargs) 205 | 206 | def post(self, api: str, method: str, params: dict = None, **kwargs): 207 | """Handles API POST request.""" 208 | return self._request("POST", api, method, params, **kwargs) 209 | 210 | def _request( 211 | self, 212 | request_method: str, 213 | api: str, 214 | method: str, 215 | params: dict = None, 216 | retry_once: bool = True, 217 | **kwargs, 218 | ): 219 | """Handles API request.""" 220 | # Discover existing APIs 221 | if api != API_INFO: 222 | self.discover_apis() 223 | 224 | # Check if logged 225 | if not self._session_id and api not in [API_AUTH, API_INFO]: 226 | self.login() 227 | 228 | # Build request params 229 | if not params: 230 | params = {} 231 | params["api"] = api 232 | params["version"] = 1 233 | 234 | if not self._is_weird_api_url(api): 235 | # Check if API is available 236 | if not self.apis.get(api): 237 | raise SynologyDSMAPINotExistsException(api) 238 | params["version"] = self.apis[api]["maxVersion"] 239 | max_version = kwargs.pop("max_version", None) 240 | if max_version and params["version"] > max_version: 241 | params["version"] = max_version 242 | 243 | params["method"] = method 244 | 245 | if api == SynoStorage.API_KEY: 246 | params["action"] = method 247 | if self._session_id: 248 | params["_sid"] = self._session_id 249 | if self._syno_token: 250 | params["SynoToken"] = self._syno_token 251 | 252 | url = self._build_url(api) 253 | 254 | # Request data 255 | response = self._execute_request(request_method, url, params, **kwargs) 256 | self._debuglog("Request Method: " + request_method) 257 | self._debuglog("Successful returned data") 258 | self._debuglog("API: " + api) 259 | self._debuglog("RESPONSE: " + str(response)) 260 | 261 | # Handle data errors 262 | if isinstance(response, dict) and response.get("error") and api != API_AUTH: 263 | self._debuglog("Session error: " + str(response["error"]["code"])) 264 | if response["error"]["code"] == 119 and retry_once: 265 | # Session ID not valid 266 | # see https://github.com/aerialls/synology-srm/pull/3 267 | self._session_id = None 268 | self._syno_token = None 269 | self._device_token = None 270 | return self._request(request_method, api, method, params, False) 271 | raise SynologyDSMAPIErrorException( 272 | api, response["error"]["code"], response["error"].get("errors") 273 | ) 274 | 275 | return response 276 | 277 | def _execute_request(self, method: str, url: str, params: dict, **kwargs): 278 | """Function to execute and handle a request.""" 279 | # Execute Request 280 | try: 281 | if method == "GET": 282 | encoded_params = "&".join( 283 | f"{key}={quote(str(value))}" for key, value in params.items() 284 | ) 285 | response = self._session.get( 286 | url, params=encoded_params, timeout=self._timeout, **kwargs 287 | ) 288 | elif method == "POST": 289 | data = {} 290 | data.update(params) 291 | data.update(kwargs.pop("data", {})) 292 | data["mimeType"] = "application/json" 293 | kwargs["data"] = data 294 | self._debuglog("POST data: " + str(data)) 295 | 296 | response = self._session.post( 297 | url, params=params, timeout=self._timeout, **kwargs 298 | ) 299 | 300 | self._debuglog("Request url: " + response.url) 301 | self._debuglog("Request status_code: " + str(response.status_code)) 302 | self._debuglog("Request headers: " + str(response.headers)) 303 | 304 | if response.status_code == 200: 305 | # We got a DSM response 306 | content_type = response.headers.get("Content-Type", "").split(";")[0] 307 | 308 | if content_type in [ 309 | "application/json", 310 | "text/json", 311 | "text/plain", # Can happen with some API 312 | ]: 313 | return response.json() 314 | 315 | return response.content 316 | 317 | # We got a 400, 401 or 404 ... 318 | raise RequestException(response) 319 | 320 | except (RequestException, JSONDecodeError) as exp: 321 | raise SynologyDSMRequestException(exp) from exp 322 | 323 | def update(self, with_information: bool = False, with_network: bool = False): 324 | """Updates the various instanced modules.""" 325 | if self._download: 326 | self._download.update() 327 | 328 | if self._information and with_information: 329 | self._information.update() 330 | 331 | if self._network and with_network: 332 | self._network.update() 333 | 334 | if self._security: 335 | self._security.update() 336 | 337 | if self._utilisation: 338 | self._utilisation.update() 339 | 340 | if self._storage: 341 | self._storage.update() 342 | 343 | if self._share: 344 | self._share.update() 345 | 346 | if self._surveillance: 347 | self._surveillance.update() 348 | 349 | if self._system: 350 | self._system.update() 351 | 352 | if self._upgrade: 353 | self._upgrade.update() 354 | 355 | def reset(self, api: any) -> bool: 356 | """Reset an API to avoid fetching in on update.""" 357 | if isinstance(api, str): 358 | if api in ("information", SynoDSMInformation.API_KEY): 359 | return False 360 | if hasattr(self, "_" + api): 361 | setattr(self, "_" + api, None) 362 | return True 363 | if api == SynoCoreSecurity.API_KEY: 364 | self._security = None 365 | return True 366 | if api == SynoCoreShare.API_KEY: 367 | self._share = None 368 | return True 369 | if api == SynoCoreSystem.API_KEY: 370 | self._system = None 371 | return True 372 | if api == SynoCoreUpgrade.API_KEY: 373 | self._upgrade = None 374 | return True 375 | if api == SynoCoreUtilization.API_KEY: 376 | self._utilisation = None 377 | return True 378 | if api == SynoDownloadStation.API_KEY: 379 | self._download = None 380 | return True 381 | if api == SynoStorage.API_KEY: 382 | self._storage = None 383 | return True 384 | if api == SynoSurveillanceStation.API_KEY: 385 | self._surveillance = None 386 | return True 387 | if isinstance(api, SynoCoreSecurity): 388 | self._security = None 389 | return True 390 | if isinstance(api, SynoCoreShare): 391 | self._share = None 392 | return True 393 | if isinstance(api, SynoCoreSystem): 394 | self._system = None 395 | return True 396 | if isinstance(api, SynoCoreUpgrade): 397 | self._utilisation = None 398 | return True 399 | if isinstance(api, SynoCoreUtilization): 400 | self._utilisation = None 401 | return True 402 | if isinstance(api, SynoDownloadStation): 403 | self._download = None 404 | return True 405 | if isinstance(api, SynoStorage): 406 | self._storage = None 407 | return True 408 | if isinstance(api, SynoSurveillanceStation): 409 | self._surveillance = None 410 | return True 411 | return False 412 | 413 | @property 414 | def download_station(self) -> SynoDownloadStation: 415 | """Gets NAS DownloadStation.""" 416 | if not self._download: 417 | self._download = SynoDownloadStation(self) 418 | return self._download 419 | 420 | @property 421 | def information(self) -> SynoDSMInformation: 422 | """Gets NAS informations.""" 423 | if not self._information: 424 | self._information = SynoDSMInformation(self) 425 | return self._information 426 | 427 | @property 428 | def network(self) -> SynoDSMNetwork: 429 | """Gets NAS network informations.""" 430 | if not self._network: 431 | self._network = SynoDSMNetwork(self) 432 | return self._network 433 | 434 | @property 435 | def security(self) -> SynoCoreSecurity: 436 | """Gets NAS security informations.""" 437 | if not self._security: 438 | self._security = SynoCoreSecurity(self) 439 | return self._security 440 | 441 | @property 442 | def share(self) -> SynoCoreShare: 443 | """Gets NAS shares information.""" 444 | if not self._share: 445 | self._share = SynoCoreShare(self) 446 | return self._share 447 | 448 | @property 449 | def storage(self) -> SynoStorage: 450 | """Gets NAS storage informations.""" 451 | if not self._storage: 452 | self._storage = SynoStorage(self) 453 | return self._storage 454 | 455 | @property 456 | def surveillance_station(self) -> SynoSurveillanceStation: 457 | """Gets NAS SurveillanceStation.""" 458 | if not self._surveillance: 459 | self._surveillance = SynoSurveillanceStation(self) 460 | return self._surveillance 461 | 462 | @property 463 | def system(self) -> SynoCoreSystem: 464 | """Gets NAS system information.""" 465 | if not self._system: 466 | self._system = SynoCoreSystem(self) 467 | return self._system 468 | 469 | @property 470 | def upgrade(self) -> SynoCoreUpgrade: 471 | """Gets NAS upgrade informations.""" 472 | if not self._upgrade: 473 | self._upgrade = SynoCoreUpgrade(self) 474 | return self._upgrade 475 | 476 | @property 477 | def utilisation(self) -> SynoCoreUtilization: 478 | """Gets NAS utilisation informations.""" 479 | if not self._utilisation: 480 | self._utilisation = SynoCoreUtilization(self) 481 | return self._utilisation 482 | --------------------------------------------------------------------------------