├── .github ├── ISSUE_TEMPLATE │ └── config.yml ├── dependabot.yml ├── release-drafter.yml └── workflows │ ├── pr-labels.yaml │ ├── release-drafter.yml │ ├── release.yml │ └── test.yml ├── .gitignore ├── .pre-commit-config.yaml ├── DEVELOPMENT.md ├── Dockerfile ├── Dockerfile.dev ├── LICENSE ├── README.md ├── dashboard ├── .gitignore ├── README.md ├── note.md ├── package-lock.json ├── package.json ├── public │ └── index.html ├── rollup.config.mjs ├── script │ ├── build │ ├── develop │ └── setup ├── src │ ├── client │ │ ├── client-context.ts │ │ ├── client.ts │ │ ├── connection.ts │ │ ├── exceptions.ts │ │ └── models │ │ │ ├── descriptions.ts │ │ │ ├── model.ts │ │ │ └── node.ts │ ├── components │ │ ├── dialog-box │ │ │ ├── dialog-box.ts │ │ │ └── show-dialog-box.ts │ │ ├── dialogs │ │ │ ├── acl │ │ │ │ └── model.ts │ │ │ ├── binding │ │ │ │ ├── model.ts │ │ │ │ ├── node-binding-dialog.ts │ │ │ │ └── show-node-binding-dialog.ts │ │ │ └── commission-node-dialog │ │ │ │ ├── commission-node-dialog.ts │ │ │ │ ├── commission-node-existing.ts │ │ │ │ ├── commission-node-thread.ts │ │ │ │ ├── commission-node-wifi.ts │ │ │ │ └── show-commission-node-dialog.ts │ │ └── ha-svg-icon.ts │ ├── entrypoint │ │ └── main.ts │ ├── pages │ │ ├── components │ │ │ ├── context.ts │ │ │ ├── footer.ts │ │ │ ├── header.ts │ │ │ ├── node-details.ts │ │ │ └── server-details.ts │ │ ├── matter-cluster-view.ts │ │ ├── matter-dashboard-app.ts │ │ ├── matter-endpoint-view.ts │ │ ├── matter-node-view.ts │ │ └── matter-server-view.ts │ └── util │ │ ├── clone_class.ts │ │ ├── fire_event.ts │ │ ├── prevent_default.ts │ │ └── routing.ts └── tsconfig.json ├── docker-entrypoint.sh ├── docs ├── docker.md ├── matter_logo.svg ├── os_requirements.md └── websockets_api.md ├── main.py ├── matter_server ├── __init__.py ├── client │ ├── __init__.py │ ├── client.py │ ├── connection.py │ ├── exceptions.py │ └── models │ │ ├── __init__.py │ │ ├── device_types.py │ │ └── node.py ├── common │ ├── __init__.py │ ├── const.py │ ├── custom_clusters.py │ ├── errors.py │ ├── helpers │ │ ├── api.py │ │ ├── json.py │ │ ├── logger.py │ │ └── util.py │ └── models.py ├── py.typed └── server │ ├── __init__.py │ ├── __main__.py │ ├── client_handler.py │ ├── const.py │ ├── device_controller.py │ ├── helpers │ ├── __init__.py │ ├── attributes.py │ ├── custom_web_runner.py │ ├── paa_certificates.py │ └── utils.py │ ├── ota │ ├── __init__.py │ ├── dcl.py │ └── provider.py │ ├── sdk.py │ ├── server.py │ ├── stack.py │ ├── storage.py │ └── vendor_info.py ├── pyproject.toml ├── scripts ├── beautify_diagnostics.py ├── example.py ├── generate_descriptions.py ├── generate_devices.py ├── run-in-env.sh └── setup.sh └── tests ├── __init__.py ├── common ├── __init__.py ├── test_parser.py └── test_utils.py ├── fixtures ├── __init__.py ├── nodes │ ├── _fake_template │ ├── contact-sensor-example-app.json │ ├── fake-bridge-two-light.json │ ├── fake-temperature-sensor.json │ ├── fake_color_temperature_light.json │ ├── fake_extended_color_light.json │ ├── fake_flow_sensor.json │ ├── fake_humidity_sensor.json │ ├── fake_light_sensor.json │ ├── fake_pressure_sensor.json │ ├── lighting-example-app.json │ └── onoff-switch-example-app.json └── nodes_in_ha │ ├── contact-sensor-example-app.json │ ├── fake-bridge-two-light.json │ ├── fake-temperature-sensor.json │ ├── fake_color_temperature_light.json │ ├── fake_extended_color_light.json │ ├── fake_flow_sensor.json │ ├── fake_humidity_sensor.json │ ├── fake_light_sensor.json │ ├── fake_pressure_sensor.json │ ├── lighting-example-app.json │ └── onoff-switch-example-app.json ├── ruff.toml ├── server ├── __init__.py ├── ota │ ├── fixtures │ │ ├── 4442-67-197888.json │ │ ├── 4442-67-197910.json │ │ ├── 4442-67-198340.json │ │ ├── 4442-67.json │ │ ├── 4447-8194-1000.json │ │ ├── 4447-8194-1011-invalid.json │ │ ├── 4447-8194-1011-valid.json │ │ └── 4447-8194.json │ └── test_dcl.py └── test_server.py └── test_device_controller.py /.github/ISSUE_TEMPLATE/config.yml: -------------------------------------------------------------------------------- 1 | blank_issues_enabled: true 2 | contact_links: 3 | - name: I have an issue or bug regarding using Matter with Home Assistant 4 | url: https://github.com/home-assistant/core/issues?q=is:issue+is:open+label:%22integration:+matter%22 5 | about: This is the issue tracker for the Matter server library. Unless you are an application developer utilizing this library, please do not post issues directly here. Instead report them at our Home Assistant core project. 6 | - name: I'm looking for documentation how to setup Matter in Home Assistant 7 | url: https://www.home-assistant.io/integrations/matter/ 8 | about: We have extensive documentation for Matter on our website. 9 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | # To get started with Dependabot version updates, you'll need to specify which 2 | # package ecosystems to update and where the package manifests are located. 3 | # Please see the documentation for all configuration options: 4 | # https://help.github.com/github/administering-a-repository/configuration-options-for-dependency-updates 5 | 6 | version: 2 7 | updates: 8 | - package-ecosystem: "github-actions" 9 | directory: "/" 10 | schedule: 11 | interval: daily 12 | open-pull-requests-limit: 10 13 | - package-ecosystem: "pip" 14 | directory: "/" # Location of package manifests 15 | schedule: 16 | interval: "weekly" 17 | open-pull-requests-limit: 10 18 | -------------------------------------------------------------------------------- /.github/release-drafter.yml: -------------------------------------------------------------------------------- 1 | name-template: '$RESOLVED_VERSION' 2 | tag-template: '$RESOLVED_VERSION' 3 | change-template: '- #$NUMBER - $TITLE (@$AUTHOR)' 4 | categories: 5 | - title: "⚠ Breaking Changes" 6 | labels: 7 | - 'breaking-change' 8 | - title: '⬆️ Dependencies and CI' 9 | collapse-after: 1 10 | labels: 11 | - 'dependencies' 12 | - 'ci' 13 | template: | 14 | ## What’s Changed 15 | 16 | $CHANGES 17 | version-resolver: 18 | major: 19 | labels: 20 | - 'breaking-change' 21 | minor: 22 | labels: 23 | - 'new-feature' 24 | - 'enhancement' 25 | default: patch 26 | -------------------------------------------------------------------------------- /.github/workflows/pr-labels.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | name: PR Labels 3 | 4 | # yamllint disable-line rule:truthy 5 | on: 6 | pull_request: 7 | types: 8 | - synchronize 9 | - labeled 10 | - unlabeled 11 | branches: 12 | - main 13 | 14 | jobs: 15 | pr_labels: 16 | name: Verify 17 | runs-on: ubuntu-latest 18 | steps: 19 | - name: 🏷 Verify PR has a valid label 20 | uses: ludeeus/action-require-labels@1.1.0 21 | with: 22 | labels: >- 23 | breaking-change, bugfix, refactor, new-feature, maintenance, ci, dependencies, docs 24 | -------------------------------------------------------------------------------- /.github/workflows/release-drafter.yml: -------------------------------------------------------------------------------- 1 | name: Release Drafter 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | 8 | jobs: 9 | update_release_draft: 10 | runs-on: ubuntu-latest 11 | steps: 12 | # Drafts your next Release notes as Pull Requests are merged into "master" 13 | - uses: release-drafter/release-drafter@v6.1.0 14 | env: 15 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 16 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Publish releases 2 | 3 | on: 4 | release: 5 | types: [published] 6 | env: 7 | PYTHON_VERSION: "3.12" 8 | NODE_VERSION: "18.x" 9 | 10 | jobs: 11 | build-and-publish-pypi: 12 | name: Builds and publishes releases to PyPI 13 | runs-on: ubuntu-latest 14 | outputs: 15 | version: ${{ steps.vars.outputs.tag }} 16 | steps: 17 | - uses: actions/checkout@v4.2.2 18 | - name: Get tag 19 | id: vars 20 | run: echo "tag=${GITHUB_REF#refs/*/}" >> $GITHUB_OUTPUT 21 | - name: Validate version number 22 | run: >- 23 | if [[ "${{ github.event.release.prerelease }}" == "true" ]]; then 24 | if ! [[ "${{ steps.vars.outputs.tag }}" =~ "b" ]]; then 25 | echo "Pre-release: Tag is missing beta suffix (${{ steps.vars.outputs.tag }})" 26 | exit 1 27 | fi 28 | else 29 | if [[ "${{ steps.vars.outputs.tag }}" =~ "b" ]]; then 30 | echo "Release: Tag must not have a beta suffix (${{ steps.vars.outputs.tag }})" 31 | exit 1 32 | fi 33 | fi 34 | - name: Set up Python ${{ env.PYTHON_VERSION }} 35 | uses: actions/setup-python@v5.6.0 36 | with: 37 | python-version: ${{ env.PYTHON_VERSION }} 38 | - name: Set up Node ${{ env.NODE_VERSION }} 39 | uses: actions/setup-node@v4 40 | with: 41 | node-version: ${{ env.NODE_VERSION }} 42 | - name: Install build 43 | run: >- 44 | pip install build tomli tomli-w 45 | - name: Set Python project version from tag 46 | shell: python 47 | run: |- 48 | import tomli 49 | import tomli_w 50 | 51 | with open("pyproject.toml", "rb") as f: 52 | pyproject = tomli.load(f) 53 | 54 | pyproject["project"]["version"] = "${{ steps.vars.outputs.tag }}" 55 | 56 | with open("pyproject.toml", "wb") as f: 57 | tomli_w.dump(pyproject, f) 58 | - name: Build dashboard 59 | run: | 60 | pip install -e . 61 | dashboard/script/setup 62 | dashboard/script/build 63 | - name: Build python package 64 | run: >- 65 | python3 -m build 66 | - name: Publish release to PyPI 67 | uses: pypa/gh-action-pypi-publish@v1.12.4 68 | with: 69 | user: __token__ 70 | password: ${{ secrets.PYPI_TOKEN }} 71 | - name: Wait for PyPI 72 | run: sleep 300 73 | build-and-push-container-image: 74 | name: Builds and pushes the Matter Server container to ghcr.io 75 | runs-on: ubuntu-latest 76 | permissions: 77 | packages: write 78 | needs: build-and-publish-pypi 79 | steps: 80 | - uses: actions/checkout@v4.2.2 81 | - name: Log in to the GitHub container registry 82 | uses: docker/login-action@v3.4.0 83 | with: 84 | registry: ghcr.io 85 | username: ${{ github.repository_owner }} 86 | password: ${{ secrets.GITHUB_TOKEN }} 87 | - name: Set up Docker Buildx 88 | uses: docker/setup-buildx-action@v3.10.0 89 | - name: Version number for tags 90 | id: tags 91 | shell: bash 92 | run: |- 93 | patch=${GITHUB_REF#refs/*/} 94 | echo "patch=${patch}" >> $GITHUB_OUTPUT 95 | echo "minor=${patch%.*}" >> $GITHUB_OUTPUT 96 | echo "major=${patch%.*.*}" >> $GITHUB_OUTPUT 97 | - name: Build and Push release 98 | uses: docker/build-push-action@v6.18.0 99 | if: github.event.release.prerelease == false 100 | with: 101 | context: . 102 | platforms: linux/amd64,linux/arm64 103 | file: Dockerfile 104 | tags: |- 105 | ghcr.io/${{ github.repository_owner }}/python-matter-server:${{ steps.tags.outputs.patch }}, 106 | ghcr.io/${{ github.repository_owner }}/python-matter-server:${{ steps.tags.outputs.minor }}, 107 | ghcr.io/${{ github.repository_owner }}/python-matter-server:${{ steps.tags.outputs.major }}, 108 | ghcr.io/${{ github.repository_owner }}/python-matter-server:stable 109 | push: true 110 | build-args: "PYTHON_MATTER_SERVER=${{ needs.build-and-publish-pypi.outputs.version }}" 111 | - name: Build and Push pre-release 112 | uses: docker/build-push-action@v6.18.0 113 | if: github.event.release.prerelease == true 114 | with: 115 | context: . 116 | platforms: linux/amd64,linux/arm64 117 | file: Dockerfile 118 | tags: |- 119 | ghcr.io/${{ github.repository_owner }}/python-matter-server:${{ steps.tags.outputs.patch }}, 120 | ghcr.io/${{ github.repository_owner }}/python-matter-server:beta 121 | push: true 122 | build-args: "PYTHON_MATTER_SERVER=${{ needs.build-and-publish-pypi.outputs.version }}" 123 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | # This workflow will install Python dependencies, run tests and lint 2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions 3 | 4 | name: Test 5 | 6 | on: 7 | push: 8 | branches: [main] 9 | pull_request: 10 | branches: [main] 11 | 12 | jobs: 13 | lint: 14 | runs-on: ubuntu-latest 15 | continue-on-error: true 16 | 17 | steps: 18 | - name: Check out code from GitHub 19 | uses: actions/checkout@v4.2.2 20 | - name: Set up Python 21 | uses: actions/setup-python@v5.6.0 22 | with: 23 | python-version: "3.12" 24 | - name: Install dependencies 25 | run: | 26 | sudo apt-get update 27 | python -m pip install --upgrade pip 28 | pip install -e .[server] 29 | pip install -e .[test] 30 | - name: Lint/test with pre-commit 31 | run: SKIP=no-commit-to-branch pre-commit run --all-files 32 | 33 | test: 34 | runs-on: ubuntu-latest 35 | continue-on-error: true 36 | strategy: 37 | fail-fast: false 38 | matrix: 39 | python-version: 40 | - "3.12" 41 | - "3.13" 42 | 43 | steps: 44 | - name: Check out code from GitHub 45 | uses: actions/checkout@v4.2.2 46 | - name: Set up Python ${{ matrix.python-version }} 47 | uses: actions/setup-python@v5.6.0 48 | with: 49 | python-version: ${{ matrix.python-version }} 50 | - name: Install dependencies 51 | run: | 52 | sudo apt-get update 53 | sudo apt-get install -y libgirepository1.0-dev 54 | python -m pip install --upgrade pip 55 | pip install -e .[server] 56 | pip install -e .[test] 57 | - name: Pytest 58 | run: pytest --durations 10 --cov-report term-missing --cov=matter_server --cov-report=xml tests/ 59 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .nox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | *.py,cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | cover/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | .pybuilder/ 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | # For a library or package, you might want to ignore these files since the code is 87 | # intended to run in multiple environments; otherwise, check them in: 88 | # .python-version 89 | 90 | # pipenv 91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 94 | # install all needed dependencies. 95 | #Pipfile.lock 96 | 97 | # poetry 98 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 99 | # This is especially recommended for binary packages to ensure reproducibility, and is more 100 | # commonly ignored for libraries. 101 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 102 | #poetry.lock 103 | 104 | # pdm 105 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 106 | #pdm.lock 107 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 108 | # in version control. 109 | # https://pdm.fming.dev/#use-with-ide 110 | .pdm.toml 111 | 112 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 113 | __pypackages__/ 114 | 115 | # Celery stuff 116 | celerybeat-schedule 117 | celerybeat.pid 118 | 119 | # SageMath parsed files 120 | *.sage.py 121 | 122 | # Environments 123 | .env 124 | .venv 125 | env/ 126 | venv/ 127 | ENV/ 128 | env.bak/ 129 | venv.bak/ 130 | 131 | # Spyder project settings 132 | .spyderproject 133 | .spyproject 134 | 135 | # Rope project settings 136 | .ropeproject 137 | 138 | # mkdocs documentation 139 | /site 140 | 141 | # mypy 142 | .mypy_cache/ 143 | .dmypy.json 144 | dmypy.json 145 | 146 | # Pyre type checker 147 | .pyre/ 148 | 149 | # pytype static type analyzer 150 | .pytype/ 151 | 152 | # Cython debug symbols 153 | cython_debug/ 154 | 155 | # PyCharm 156 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 157 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 158 | # and can be added to the global gitignore or merged into this file. For a more nuclear 159 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 160 | #.idea/ 161 | 162 | # Visual Studio Code 163 | .vscode/* 164 | 165 | .DS_Store 166 | credentials/ 167 | venv39/ 168 | 169 | # ruff 170 | .ruff_cache/ 171 | 172 | # prebuilt dashboard files 173 | matter_server/dashboard/ 174 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | # See https://pre-commit.com for more information 2 | # See https://pre-commit.com/hooks.html for more hooks 3 | repos: 4 | - repo: local 5 | hooks: 6 | - id: ruff-check 7 | name: 🐶 Ruff Linter 8 | language: system 9 | types: [python] 10 | entry: scripts/run-in-env.sh ruff check --fix 11 | require_serial: true 12 | stages: [pre-commit, pre-push, manual] 13 | - id: ruff-format 14 | name: 🐶 Ruff Formatter 15 | language: system 16 | types: [python] 17 | entry: scripts/run-in-env.sh ruff format 18 | require_serial: true 19 | stages: [pre-commit, pre-push, manual] 20 | - id: check-ast 21 | name: 🐍 Check Python AST 22 | language: system 23 | types: [python] 24 | entry: scripts/run-in-env.sh check-ast 25 | - id: check-case-conflict 26 | name: 🔠 Check for case conflicts 27 | language: system 28 | entry: scripts/run-in-env.sh check-case-conflict 29 | - id: check-docstring-first 30 | name: ℹ️ Check docstring is first 31 | language: system 32 | types: [python] 33 | entry: scripts/run-in-env.sh check-docstring-first 34 | - id: check-executables-have-shebangs 35 | name: 🧐 Check that executables have shebangs 36 | language: system 37 | types: [text, executable] 38 | entry: scripts/run-in-env.sh check-executables-have-shebangs 39 | stages: [pre-commit, pre-push, manual] 40 | - id: check-json 41 | name: { Check JSON files 42 | language: system 43 | types: [json] 44 | entry: scripts/run-in-env.sh check-json 45 | - id: check-merge-conflict 46 | name: 💥 Check for merge conflicts 47 | language: system 48 | types: [text] 49 | entry: scripts/run-in-env.sh check-merge-conflict 50 | - id: check-symlinks 51 | name: 🔗 Check for broken symlinks 52 | language: system 53 | types: [symlink] 54 | entry: scripts/run-in-env.sh check-symlinks 55 | - id: check-toml 56 | name: ✅ Check TOML files 57 | language: system 58 | types: [toml] 59 | entry: scripts/run-in-env.sh check-toml 60 | - id: codespell 61 | name: ✅ Check code for common misspellings 62 | language: system 63 | types: [text] 64 | entry: scripts/run-in-env.sh codespell 65 | - id: detect-private-key 66 | name: 🕵️ Detect Private Keys 67 | language: system 68 | types: [text] 69 | entry: scripts/run-in-env.sh detect-private-key 70 | - id: end-of-file-fixer 71 | name: ⮐ Fix End of Files 72 | language: system 73 | types: [text] 74 | entry: scripts/run-in-env.sh end-of-file-fixer 75 | stages: [pre-commit, pre-push, manual] 76 | - id: no-commit-to-branch 77 | name: 🛑 Don't commit to main branch 78 | language: system 79 | entry: scripts/run-in-env.sh no-commit-to-branch 80 | pass_filenames: false 81 | always_run: true 82 | args: 83 | - --branch=main 84 | - id: pylint 85 | name: 🌟 Starring code with pylint 86 | language: system 87 | types: [python] 88 | entry: scripts/run-in-env.sh pylint matter_server/ tests/ 89 | require_serial: true 90 | pass_filenames: false 91 | - id: trailing-whitespace 92 | name: ✄ Trim Trailing Whitespace 93 | language: system 94 | types: [text] 95 | entry: scripts/run-in-env.sh trailing-whitespace-fixer 96 | stages: [pre-commit, pre-push, manual] 97 | - id: mypy 98 | name: mypy 99 | entry: scripts/run-in-env.sh mypy 100 | language: script 101 | types: [python] 102 | require_serial: true 103 | files: ^(matter_server|pylint)/.+\.py$ 104 | -------------------------------------------------------------------------------- /DEVELOPMENT.md: -------------------------------------------------------------------------------- 1 | # Setting up your development environment 2 | 3 | **For enabling Matter support within Home Assistant, please refer to the Home Assistant documentation. These instructions are for development only!** 4 | 5 | Development is only possible on a (recent) Linux or MacOS machine. Other operating systems are **not supported**. See [here](docs/os_requirements.md) for a full list of requirements to the OS and network, especially if you plan on communicating with Thread based devices. 6 | 7 | - Download/clone the repo to your local machine. 8 | - Set-up the development environment: `scripts/setup.sh` 9 | - Create the `/data` directory if it does not exist with permissions for the user running the python-matter-server. 10 | 11 | ## Start Matter server 12 | 13 | You can check out the [example script](/scripts/example.py) in the scripts folder for generic directions to run the client and server. 14 | 15 | - To run the server in `info` log-level, you can run: `python -m matter_server.server` 16 | - To start the server in `debug` log-level, you can run: `python -m matter_server.server --log-level debug` 17 | - To start the server with SDK in `progress` log-level, you can run: `python -m matter_server.server --log-level-sdk progress`. This will display more information from the Matter SDK (C++) side of the Matter Server. 18 | 19 | Use `--help` to get a list of possible log levels and other command line arguments. 20 | 21 | The server runs a Matter Controller and includes all logic for storing node information, interviews and subscriptions. To interact with this controller we've created a small Websockets API with an RPC-like interface. The library contains a client as reference implementation which in turn is used by Home Assistant. Splitting the server from the client allows the scenario where multiple consumers can communicate to the same Matter fabric and the Matter fabric can keep running while the consumer (e.g. Home Assistant is down). 22 | 23 | If you happen to get `OSError: [Errno 105] No buffer space available.`, increase the IPv4 group limits with: 24 | ``` 25 | echo "net.ipv4.igmp_max_memberships=1024" | sudo tee -a /etc/sysctl.d/local.conf 26 | sudo service procps force-reload 27 | ``` 28 | 29 | ## Python client library only 30 | 31 | There is also a Python client library hosted in this repository (used by Home Assistant), which consumes the Websockets API published from the server. 32 | 33 | The client library has a dependency on the chip/matter clusters package which contains all (Cluster) models and this package is os/platform independent. The server library depends on the Matter Core SDK (still named CHIP) which is architecture and OS specific. We build (and publish) wheels for Linux (amd64 and aarch64) to pypi but for other platforms (like Macos) you will need to build those wheels yourself using the exact same version of the SDK as we use for the clusters package. Take a look at our build script for directions: https://github.com/home-assistant-libs/chip-wheels/blob/main/.github/workflows/build.yaml 34 | 35 | To only install the client part: `pip install python-matter-server` 36 | 37 | ## Websocket commands 38 | 39 | [Websocket documentation](/docs/websockets_api.md) 40 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.12-slim-bookworm 2 | 3 | # Set shell 4 | SHELL ["/bin/bash", "-o", "pipefail", "-c"] 5 | 6 | WORKDIR /app 7 | 8 | RUN \ 9 | set -x \ 10 | && apt-get update \ 11 | && apt-get install -y --no-install-recommends \ 12 | curl \ 13 | libuv1 \ 14 | zlib1g \ 15 | libjson-c5 \ 16 | libnl-3-200 \ 17 | libnl-route-3-200 \ 18 | unzip \ 19 | gdb \ 20 | iputils-ping \ 21 | iproute2 \ 22 | && apt-get purge -y --auto-remove \ 23 | && rm -rf \ 24 | /var/lib/apt/lists/* \ 25 | /usr/src/* 26 | 27 | ARG PYTHON_MATTER_SERVER 28 | 29 | ENV chip_example_url "https://github.com/home-assistant-libs/matter-linux-ota-provider/releases/download/2025.5.0" 30 | ARG TARGETPLATFORM 31 | 32 | RUN \ 33 | set -x \ 34 | && echo "${TARGETPLATFORM}" \ 35 | && if [ "${TARGETPLATFORM}" = "linux/amd64" ]; then \ 36 | curl -Lo /usr/local/bin/chip-ota-provider-app "${chip_example_url}/chip-ota-provider-app-x86-64"; \ 37 | elif [ "${TARGETPLATFORM}" = "linux/arm64" ]; then \ 38 | curl -Lo /usr/local/bin/chip-ota-provider-app "${chip_example_url}/chip-ota-provider-app-aarch64"; \ 39 | else \ 40 | exit 1; \ 41 | fi \ 42 | && chmod +x /usr/local/bin/chip-ota-provider-app 43 | 44 | # hadolint ignore=DL3013 45 | RUN \ 46 | pip3 install --no-cache-dir "python-matter-server[server]==${PYTHON_MATTER_SERVER}" 47 | 48 | VOLUME ["/data"] 49 | EXPOSE 5580 50 | 51 | ENTRYPOINT [ "matter-server" ] 52 | CMD [ "--storage-path", "/data", "--paa-root-cert-dir", "/data/credentials" ] 53 | -------------------------------------------------------------------------------- /Dockerfile.dev: -------------------------------------------------------------------------------- 1 | FROM python:3.11-slim-bullseye 2 | 3 | # Set shell 4 | SHELL ["/bin/bash", "-o", "pipefail", "-c"] 5 | 6 | RUN \ 7 | set -x \ 8 | && apt-get update \ 9 | && apt-get install -y --no-install-recommends \ 10 | libuv1 \ 11 | zlib1g \ 12 | libjson-c5 \ 13 | libnl-3-200 \ 14 | libnl-route-3-200 \ 15 | unzip \ 16 | libcairo2 \ 17 | gdb \ 18 | curl 19 | 20 | RUN \ 21 | curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.7/install.sh | bash \ 22 | && export NVM_DIR="$HOME/.nvm" \ 23 | && [ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" \ 24 | && nvm install 18 25 | 26 | RUN \ 27 | apt-get purge -y --auto-remove \ 28 | && rm -rf \ 29 | /var/lib/apt/lists/* \ 30 | /usr/src/* 31 | 32 | WORKDIR /app 33 | COPY . ./ 34 | 35 | WORKDIR /app/dashboard/script 36 | RUN ./setup 37 | RUN ./build 38 | 39 | WORKDIR /app 40 | 41 | # hadolint ignore=DL3013 42 | RUN \ 43 | pip3 install -U pip && \ 44 | pip3 install --no-cache-dir .[server] 45 | 46 | VOLUME ["/data"] 47 | EXPOSE 5580 48 | ENTRYPOINT ["./docker-entrypoint.sh"] 49 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Open Home Foundation Matter Server 2 | 3 | ![Matter Logo](docs/matter_logo.svg) 4 | 5 | The Open Home Foundation Matter Server is an [officially certified](https://csa-iot.org/csa_product/open-home-foundation-matter-server/) Software Component to create a Matter controller. It serves as the foundation to provide Matter support to [Home Assistant](https://home-assistant.io) but its universal approach makes it suitable to be used in other projects too. 6 | 7 | This project implements a Matter Controller Server over WebSockets using the 8 | [official Matter (formerly CHIP) SDK](https://github.com/project-chip/connectedhomeip) 9 | as a base and provides both a server and client implementation. 10 | 11 | The Open Home Foundation Matter Server software component is funded by [Nabu Casa](https://www.nabucasa.com/) (a member of the CSA) and donated to [The Open Home Foundation](https://www.openhomefoundation.org/). 12 | 13 | ## Support 14 | 15 | For developers, making use of this component or contributing to it, use the issue tracker within this repository and/or reach out on discord. 16 | 17 | For users of Home Assistant, seek support in the official Home Assistant support channels. 18 | 19 | - The Home Assistant [Community Forum](https://community.home-assistant.io/). 20 | - The Home Assistant [Discord Chat Server](https://discord.gg/c5DvZ4e). 21 | - Join [the Reddit subreddit in /r/homeassistant](https://reddit.com/r/homeassistant). 22 | 23 | - If you experience issues using Matter with Home Assistant, please open an issue 24 | report in the [Home Assistant Core repository](https://github.com/home-assistant/core/issues/new/choose). 25 | 26 | Please do not create Home Assistant enduser support issues in the issue tracker of this repository. 27 | 28 | ## Development 29 | 30 | Want to help out with development, testing, and/or documentation? Great! As both this project and Matter keeps evolving there will be a lot to improve. Reach out to us on discord if you want to help out. 31 | 32 | [Development documentation](DEVELOPMENT.md) 33 | 34 | ## Installation / Running the Matter Server 35 | 36 | - Endusers of Home Assistant, refer to the [Home Assistant documentation](https://www.home-assistant.io/integrations/matter/) how to run Matter in Home Assistant using the official Matter Server add-on, which is based on this project. 37 | 38 | - For running the server and/or client in your development environment, see the [Development documentation](DEVELOPMENT.md). 39 | 40 | - For running the Matter Server as a standalone docker container, see our instructions [here](docs/docker.md). 41 | 42 | > [!NOTE] 43 | > Both Matter and this implementation are in an early state and features are probably missing or could be improved. See our [development notes](#development) how you can help out, with development and/or testing. 44 | -------------------------------------------------------------------------------- /dashboard/.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | dist 3 | -------------------------------------------------------------------------------- /dashboard/README.md: -------------------------------------------------------------------------------- 1 | # Matter Dashboard 2 | 3 | This is the dashboard for the Python Matter Server project. It is meant to be used for debugging and testing. 4 | 5 | ## Development 6 | 7 | Install the dependencies: 8 | 9 | ```bash 10 | script/setup 11 | ``` 12 | 13 | NOTE: The python matter server (dependencies) also need to be present in order for the setup to succeed. 14 | 15 | Run the development server: 16 | 17 | ```bash 18 | script/develop 19 | ``` 20 | 21 | The dashboard will be available at [http://localhost:5010](http://localhost:5010). When you open it from localhost, it will ask you for your websocket server URL. 22 | 23 | The websocket URL of the Home Assistant add-on will be something like `ws://homeassistant.local:5580`. If you are running the Python Matter Server locally, it will be `ws://localhost:5580`. 24 | 25 | If you want to use the dashboard with the Python Matter Server Home Assistant add-on, you need to configure it to make the WebSocket server available on the network. Go to the [add-on info page](https://my.home-assistant.io/redirect/supervisor_addon/?addon=core_matter_server), click on Configuration. Under "Network", show disabled ports and enter the port you want to use for the WebSocket server (e.g. 5580). Then, click "save" and restart the add-on when prompted. 26 | 27 | ## Production build 28 | 29 | The production build is generated when you run 30 | 31 | ```bash 32 | script/build 33 | ``` 34 | 35 | The production build is generated into the matter_server folder, to be picked up by 36 | the webserver of the python matter server. 37 | -------------------------------------------------------------------------------- /dashboard/note.md: -------------------------------------------------------------------------------- 1 | ``` 2 |
5 |
${"privilege:" + entry.privilege}
6 |
${"authMode:" + entry.authMode}
7 |
${"fabricIndex:" + entry.fabricIndex}
8 |
9 |
subjects:${JSON.stringify(entry.subjects)}
10 |
targets:${JSON.stringify(entry.targets)}
11 | 12 | ``` 13 | -------------------------------------------------------------------------------- /dashboard/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "matter-dashboard", 3 | "version": "1.0.0", 4 | "description": "Dashboard for Python Matter Server", 5 | "repository": "https://github.com/home-assistant-libs/python-matter-server/", 6 | "author": "Home Assistant maintainers", 7 | "license": "Apache-2.0", 8 | "scripts": { 9 | "prepublishOnly": "script/build" 10 | }, 11 | "devDependencies": { 12 | "@babel/preset-env": "^7.23.9", 13 | "@rollup/plugin-babel": "^6.0.4", 14 | "@rollup/plugin-commonjs": "^25.0.7", 15 | "@rollup/plugin-json": "^6.1.0", 16 | "@rollup/plugin-node-resolve": "^15.2.3", 17 | "@rollup/plugin-terser": "^0.4.4", 18 | "@rollup/plugin-typescript": "^11.1.6", 19 | "prettier": "^3.2.5", 20 | "rollup": "^4.10.0", 21 | "serve": "^14.2.1", 22 | "typescript": "^5.3.3" 23 | }, 24 | "dependencies": { 25 | "@lit/context": "^1.1.2", 26 | "@material/web": "^1.2.0", 27 | "@mdi/js": "^7.4.47", 28 | "lit": "^3.1.2", 29 | "memoize-one": "^6.0.0", 30 | "tslib": "^2.6.2" 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /dashboard/public/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | Python Matter Server 7 | 8 | 38 | 39 | 40 | 41 | -------------------------------------------------------------------------------- /dashboard/rollup.config.mjs: -------------------------------------------------------------------------------- 1 | import nodeResolve from "@rollup/plugin-node-resolve"; 2 | import json from "@rollup/plugin-json"; 3 | import terser from "@rollup/plugin-terser"; 4 | import babel from "@rollup/plugin-babel"; 5 | import commonjs from "@rollup/plugin-commonjs"; 6 | 7 | const config = { 8 | input: "dist/entrypoint/main.js", 9 | output: { 10 | dir: "dist/web/js", 11 | format: "module", 12 | }, 13 | preserveEntrySignatures: false, 14 | plugins: [ 15 | commonjs(), 16 | nodeResolve({ 17 | browser: true, 18 | preferBuiltins: false, 19 | }), 20 | babel({ 21 | babelHelpers: "bundled", 22 | presets: [ 23 | [ 24 | "@babel/preset-env", 25 | { 26 | targets: { 27 | chrome: "84", 28 | }, 29 | }, 30 | ], 31 | ], 32 | }), 33 | json(), 34 | ], 35 | }; 36 | 37 | if (process.env.NODE_ENV === "production") { 38 | config.plugins.push( 39 | terser({ 40 | ecma: 2019, 41 | toplevel: true, 42 | format: { 43 | comments: false, 44 | }, 45 | }) 46 | ); 47 | } 48 | 49 | export default config; 50 | -------------------------------------------------------------------------------- /dashboard/script/build: -------------------------------------------------------------------------------- 1 | #!/bin/bash -i 2 | 3 | # Stop on errors 4 | set -e 5 | 6 | cd "$(dirname "$0")/.." 7 | 8 | rm -rf dist 9 | rm -rf ../matter_server/dashboard 10 | NODE_ENV=production npm exec -- tsc 11 | NODE_ENV=production npm exec -- rollup -c 12 | cp -r public/* dist/web 13 | mv dist/web ../matter_server/dashboard 14 | -------------------------------------------------------------------------------- /dashboard/script/develop: -------------------------------------------------------------------------------- 1 | #!/bin/bash -i 2 | 3 | # Stop on errors 4 | set -e 5 | 6 | cd "$(dirname "$0")/.." 7 | 8 | rm -rf dist 9 | mkdir -p dist/web 10 | cp -r public/* dist/web 11 | 12 | # Quit all background tasks when script exits 13 | trap "kill 0" EXIT 14 | 15 | # Run tsc once as rollup expects those files 16 | npm exec -- tsc || true 17 | 18 | npm exec -- serve -p 5010 dist/web & 19 | npm exec -- tsc --watch & 20 | npm exec -- rollup -c --watch 21 | -------------------------------------------------------------------------------- /dashboard/script/setup: -------------------------------------------------------------------------------- 1 | #!/bin/bash -i 2 | 3 | # Stop on errors 4 | set -e 5 | 6 | cd "$(dirname "$0")/.." 7 | 8 | # Install node modules 9 | npm install 10 | 11 | # generate descriptions file (from python source) 12 | cd ".." 13 | python3 scripts/generate_descriptions.py 14 | -------------------------------------------------------------------------------- /dashboard/src/client/client-context.ts: -------------------------------------------------------------------------------- 1 | import { createContext } from "@lit/context"; 2 | import type { MatterClient } from "./client"; 3 | 4 | export const clientContext = createContext('client'); 5 | -------------------------------------------------------------------------------- /dashboard/src/client/connection.ts: -------------------------------------------------------------------------------- 1 | import { CommandMessage, ServerInfoMessage } from "./models/model"; 2 | 3 | export class Connection { 4 | public serverInfo?: ServerInfoMessage = undefined; 5 | 6 | private socket?: WebSocket; 7 | 8 | constructor(public ws_server_url: string) { 9 | this.ws_server_url = ws_server_url; 10 | } 11 | 12 | get connected() { 13 | return this.socket?.readyState === WebSocket.OPEN; 14 | } 15 | 16 | async connect(onMessage: (msg: Record) => void, onConnectionLost: () => void) { 17 | if (this.socket) { 18 | throw new Error("Already connected"); 19 | } 20 | 21 | console.debug("Trying to connect"); 22 | 23 | return new Promise((resolve, reject) => { 24 | this.socket = new WebSocket(this.ws_server_url); 25 | 26 | this.socket.onopen = () => { 27 | console.log("WebSocket Connected"); 28 | }; 29 | 30 | this.socket.onclose = (event) => { 31 | console.log( 32 | `WebSocket Closed: Code=${event.code}, Reason=${event.reason}` 33 | ); 34 | onConnectionLost(); 35 | }; 36 | 37 | this.socket.onerror = (error) => { 38 | console.error("WebSocket Error: ", error); 39 | console.dir(error); 40 | reject(new Error("WebSocket Error")); 41 | }; 42 | 43 | this.socket.onmessage = (event: MessageEvent) => { 44 | const data = JSON.parse(event.data); 45 | console.log("WebSocket OnMessage", data); 46 | if (!this.serverInfo) { 47 | this.serverInfo = data; 48 | resolve(undefined); 49 | return; 50 | } 51 | onMessage(data); 52 | }; 53 | }); 54 | } 55 | 56 | disconnect() { 57 | if (this.socket) { 58 | this.socket.close(); 59 | this.socket = undefined; 60 | } 61 | } 62 | 63 | sendMessage(message: CommandMessage): void { 64 | if (!this.socket) { 65 | throw new Error("Not connected"); 66 | } 67 | console.log("WebSocket send message", message); 68 | this.socket.send(JSON.stringify(message)); 69 | } 70 | } 71 | 72 | export default Connection; 73 | -------------------------------------------------------------------------------- /dashboard/src/client/exceptions.ts: -------------------------------------------------------------------------------- 1 | export class MatterError extends Error {} 2 | 3 | export class InvalidServerVersion extends MatterError {} 4 | -------------------------------------------------------------------------------- /dashboard/src/client/models/node.ts: -------------------------------------------------------------------------------- 1 | export class MatterNode { 2 | node_id: number; 3 | date_commissioned: string; // Dates will be strings in JSON 4 | last_interview: string; 5 | interview_version: number; 6 | available: boolean; 7 | is_bridge: boolean; 8 | attributes: { [key: string]: any }; 9 | attribute_subscriptions: Array<[number | null, number | null, number | null]>; 10 | 11 | constructor(public data: Record) { 12 | this.node_id = data.node_id; 13 | this.date_commissioned = data.date_commissioned; 14 | this.last_interview = data.last_interview; 15 | this.interview_version = data.interview_version; 16 | this.available = data.available; 17 | this.is_bridge = data.is_bridge; 18 | this.attributes = data.attributes; 19 | this.attribute_subscriptions = data.attribute_subscriptions; 20 | } 21 | 22 | get nodeLabel(): string { 23 | const label = this.attributes["0/40/5"]; 24 | if (!label) return ''; 25 | if (label.includes("\u0000\u0000")) return ''; 26 | return label.trim(); 27 | } 28 | 29 | get vendorName(): string { 30 | return this.attributes["0/40/1"]; 31 | } 32 | 33 | get productName(): string { 34 | return this.attributes["0/40/3"]; 35 | } 36 | 37 | get serialNumber(): string { 38 | return this.attributes["0/40/15"]; 39 | } 40 | 41 | get updateState(): number | undefined { 42 | return this.attributes["0/42/2"]; 43 | } 44 | 45 | get updateStateProgress(): number | undefined { 46 | return this.attributes["0/42/3"]; 47 | } 48 | 49 | update(data: Record): MatterNode { 50 | return new MatterNode({ ...this.data, ...data }); 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /dashboard/src/components/dialog-box/dialog-box.ts: -------------------------------------------------------------------------------- 1 | import "@material/web/button/text-button"; 2 | import "@material/web/dialog/dialog"; 3 | import { html, LitElement } from "lit"; 4 | import { customElement, property } from "lit/decorators.js"; 5 | import { preventDefault } from "../../util/prevent_default"; 6 | import type { PromptDialogBoxParams } from "./show-dialog-box"; 7 | @customElement("dialox-box") 8 | export class DialogBox extends LitElement { 9 | @property({ attribute: false }) public params!: PromptDialogBoxParams; 10 | 11 | @property({ attribute: false }) public dialogResult!: ( 12 | result: boolean 13 | ) => void; 14 | 15 | @property() public type!: "alert" | "prompt"; 16 | 17 | protected render() { 18 | const params = this.params; 19 | return html` 20 | 21 | ${params.title ? html`
${params.title}
` : ""} 22 | ${params.text ? html`
${params.text}
` : ""} 23 |
24 | ${this.type === "prompt" 25 | ? html` 26 | 27 | ${params.cancelText || "Cancel"} 28 | 29 | ` 30 | : ""} 31 | 32 | ${params.confirmText || "OK"} 33 | 34 |
35 |
36 | `; 37 | } 38 | 39 | private _cancel() { 40 | this._setResult(false); 41 | } 42 | 43 | private _confirm() { 44 | this._setResult(true); 45 | } 46 | 47 | _setResult(result: boolean) { 48 | this.dialogResult(result); 49 | this.shadowRoot!.querySelector("md-dialog")!.close(); 50 | } 51 | 52 | private _handleClosed() { 53 | this.parentElement!.removeChild(this); 54 | } 55 | } 56 | 57 | declare global { 58 | interface HTMLElementTagNameMap { 59 | "dialox-box": DialogBox; 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /dashboard/src/components/dialog-box/show-dialog-box.ts: -------------------------------------------------------------------------------- 1 | import type { TemplateResult } from "lit"; 2 | 3 | interface BaseDialogBoxParams { 4 | confirmText?: string; 5 | text: string | TemplateResult; 6 | title: string; 7 | } 8 | 9 | export interface PromptDialogBoxParams extends BaseDialogBoxParams { 10 | cancelText?: string; 11 | } 12 | 13 | const showDialogBox = async ( 14 | type: "alert" | "prompt", 15 | dialogParams: PromptDialogBoxParams 16 | ) => { 17 | await import("./dialog-box"); 18 | return new Promise((resolve) => { 19 | const dialog = document.createElement("dialox-box"); 20 | dialog.params = dialogParams; 21 | dialog.dialogResult = resolve; 22 | dialog.type = type; 23 | document.body.appendChild(dialog); 24 | }); 25 | }; 26 | 27 | export const showAlertDialog = (dialogParams: BaseDialogBoxParams) => 28 | showDialogBox("alert", dialogParams); 29 | 30 | export const showPromptDialog = (dialogParams: BaseDialogBoxParams) => 31 | showDialogBox("prompt", dialogParams); 32 | -------------------------------------------------------------------------------- /dashboard/src/components/dialogs/acl/model.ts: -------------------------------------------------------------------------------- 1 | 2 | export type AccessControlEntryRawInput = { 3 | "1": number; 4 | "2": number; 5 | "3": number[]; 6 | "4": null; 7 | "254": number; 8 | }; 9 | 10 | export type AccessControlTargetStruct = { 11 | cluster: number | undefined; 12 | endpoint: number | undefined; 13 | deviceType: number | undefined; 14 | }; 15 | 16 | export type AccessControlEntryStruct = { 17 | privilege: number; 18 | authMode: number; 19 | subjects: number[]; 20 | targets: AccessControlTargetStruct[] | undefined; 21 | fabricIndex: number; 22 | }; 23 | 24 | export class AccessControlTargetTransformer { 25 | private static readonly KEY_MAPPING: { 26 | [inputKey: string]: keyof AccessControlTargetStruct; 27 | } = { 28 | "0": "cluster", 29 | "1": "endpoint", 30 | "2": "deviceType", 31 | }; 32 | 33 | public static transform(input: any): AccessControlTargetStruct { 34 | if (!input || typeof input !== "object") { 35 | throw new Error("Invalid input: expected an object"); 36 | } 37 | 38 | const result: Partial = {}; 39 | const keyMapping = AccessControlTargetTransformer.KEY_MAPPING; 40 | 41 | for (const key in input) { 42 | if (key in keyMapping) { 43 | const mappedKey = keyMapping[key]; 44 | if (mappedKey) { 45 | const value = input[key]; 46 | if (value === undefined) continue; 47 | result[mappedKey] = value; 48 | } 49 | } 50 | } 51 | return result as AccessControlTargetStruct; 52 | } 53 | } 54 | 55 | export class AccessControlEntryDataTransformer { 56 | private static readonly KEY_MAPPING: { 57 | [inputKey: string]: keyof AccessControlEntryStruct; 58 | } = { 59 | "1": "privilege", 60 | "2": "authMode", 61 | "3": "subjects", 62 | "4": "targets", 63 | "254": "fabricIndex", 64 | }; 65 | 66 | public static transform(input: any): AccessControlEntryStruct { 67 | if (!input || typeof input !== "object") { 68 | throw new Error("Invalid input: expected an object"); 69 | } 70 | 71 | const result: Partial = {}; 72 | const keyMapping = AccessControlEntryDataTransformer.KEY_MAPPING; 73 | 74 | for (const key in input) { 75 | if (key in keyMapping) { 76 | const mappedKey = keyMapping[key]; 77 | if (mappedKey) { 78 | const value = input[key]; 79 | if (value === undefined) continue; 80 | if (mappedKey === "subjects") { 81 | result[mappedKey] = Array.isArray(value) ? value : undefined; 82 | } else if (mappedKey === "targets") { 83 | if (Array.isArray(value)) { 84 | const _targets = Object.values(value).map((val) => 85 | AccessControlTargetTransformer.transform(val), 86 | ); 87 | result[mappedKey] = _targets; 88 | } else { 89 | result[mappedKey] = undefined; 90 | } 91 | } else { 92 | result[mappedKey] = value; 93 | } 94 | } 95 | } 96 | } 97 | 98 | if ( 99 | result.privilege === undefined || 100 | result.authMode === undefined || 101 | result.subjects === undefined || 102 | result.fabricIndex === undefined 103 | ) { 104 | throw new Error("Missing required fields in AccessControlEntryStruct"); 105 | } 106 | 107 | return result as AccessControlEntryStruct; 108 | } 109 | } 110 | -------------------------------------------------------------------------------- /dashboard/src/components/dialogs/binding/model.ts: -------------------------------------------------------------------------------- 1 | export type InputType = { 2 | [key: string]: number | number[] | undefined; 3 | }; 4 | 5 | export interface BindingEntryStruct { 6 | node: number; 7 | group: number | undefined; 8 | endpoint: number; 9 | cluster: number | undefined; 10 | fabricIndex: number | undefined; 11 | } 12 | 13 | export class BindingEntryDataTransformer { 14 | private static readonly KEY_MAPPING: { 15 | [inputKey: string]: keyof BindingEntryStruct; 16 | } = { 17 | "1": "node", 18 | "3": "endpoint", 19 | "4": "cluster", 20 | "254": "fabricIndex", 21 | }; 22 | 23 | public static transform(input: any): BindingEntryStruct { 24 | if (!input || typeof input !== "object") { 25 | throw new Error("Invalid input: expected an object"); 26 | } 27 | 28 | const result: Partial = {}; 29 | const keyMapping = BindingEntryDataTransformer.KEY_MAPPING; 30 | 31 | for (const key in input) { 32 | if (key in keyMapping) { 33 | const mappedKey = keyMapping[key]; 34 | if (mappedKey) { 35 | const value = input[key]; 36 | if (value === undefined) { 37 | continue; 38 | } 39 | if (mappedKey === "fabricIndex") { 40 | result[mappedKey] = value === undefined ? undefined : Number(value); 41 | } else if (mappedKey === "node" || mappedKey === "endpoint") { 42 | result[mappedKey] = Number(value); 43 | } else { 44 | result[mappedKey] = value as BindingEntryStruct[typeof mappedKey]; 45 | } 46 | } 47 | } 48 | } 49 | 50 | // Validate required fields 51 | if ( 52 | result.node === undefined || 53 | result.endpoint === undefined || 54 | result.fabricIndex === undefined 55 | ) { 56 | throw new Error("Missing required fields in BindingEntryStruct"); 57 | } 58 | 59 | return result as BindingEntryStruct; 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /dashboard/src/components/dialogs/binding/show-node-binding-dialog.ts: -------------------------------------------------------------------------------- 1 | import { MatterClient } from "../../../client/client"; 2 | import { MatterNode } from "../../../client/models/node"; 3 | 4 | export const showNodeBindingDialog = async ( 5 | client: MatterClient, 6 | node: MatterNode, 7 | endpoint: number, 8 | ) => { 9 | await import("./node-binding-dialog"); 10 | const dialog = document.createElement("node-binding-dialog"); 11 | dialog.client = client; 12 | dialog.node = node; 13 | dialog.endpoint = endpoint; 14 | document 15 | .querySelector("matter-dashboard-app") 16 | ?.renderRoot.appendChild(dialog); 17 | }; 18 | -------------------------------------------------------------------------------- /dashboard/src/components/dialogs/commission-node-dialog/commission-node-dialog.ts: -------------------------------------------------------------------------------- 1 | import "@material/web/button/text-button"; 2 | import "@material/web/dialog/dialog"; 3 | import "@material/web/list/list"; 4 | import "@material/web/list/list-item"; 5 | import { html, LitElement } from "lit"; 6 | import { customElement, property, state } from "lit/decorators.js"; 7 | import { MatterNode } from "../../../client/models/node"; 8 | import { preventDefault } from "../../../util/prevent_default"; 9 | import { MatterClient } from "../../../client/client"; 10 | 11 | @customElement("commission-node-dialog") 12 | export class ComissionNodeDialog extends LitElement { 13 | 14 | @property({ attribute: false }) public client!: MatterClient; 15 | 16 | @state() private _mode?: "wifi" | "thread" | "existing"; 17 | 18 | protected render() { 19 | return html` 20 | 21 |
Commission node
22 |
23 | ${!this._mode 24 | ? html` 25 | Commission new WiFi device 28 | Commission new Thread device 31 | Commission existing device 34 | ` 35 | : this._mode === "wifi" 36 | ? html`` 37 | : this._mode === "thread" 38 | ? html`` 39 | : html``} 40 |
41 |
42 | Cancel 43 |
44 |
45 | `; 46 | } 47 | 48 | private _commissionWifi() { 49 | if (!this.client.serverInfo.bluetooth_enabled) { 50 | return; 51 | } 52 | import("./commission-node-wifi"); 53 | this._mode = "wifi"; 54 | } 55 | 56 | private _commissionThread() { 57 | if (!this.client.serverInfo.bluetooth_enabled) { 58 | return; 59 | } 60 | import("./commission-node-thread"); 61 | this._mode = "thread"; 62 | } 63 | 64 | private _commissionExisting() { 65 | import("./commission-node-existing"); 66 | this._mode = "existing"; 67 | } 68 | 69 | private _nodeCommissioned(ev: CustomEvent) { 70 | window.location.href = `#node/${ev.detail.node_id}`; 71 | this._close(); 72 | } 73 | 74 | private _close() { 75 | this.shadowRoot!.querySelector("md-dialog")!.close(); 76 | } 77 | 78 | private _handleClosed() { 79 | this.parentNode!.removeChild(this); 80 | } 81 | } 82 | 83 | declare global { 84 | interface HTMLElementTagNameMap { 85 | "commission-node-dialog": ComissionNodeDialog; 86 | } 87 | 88 | interface HASSDomEvents { 89 | "node-commissioned": MatterNode; 90 | } 91 | } 92 | -------------------------------------------------------------------------------- /dashboard/src/components/dialogs/commission-node-dialog/commission-node-existing.ts: -------------------------------------------------------------------------------- 1 | import { consume } from "@lit/context"; 2 | import "@material/web/textfield/outlined-text-field"; 3 | import "@material/web/progress/circular-progress"; 4 | import type { MdOutlinedTextField } from "@material/web/textfield/outlined-text-field"; 5 | import { LitElement, html, nothing } from "lit"; 6 | import { customElement, property, query, state } from "lit/decorators.js"; 7 | import { MatterClient } from "../../../client/client"; 8 | import { clientContext } from "../../../client/client-context"; 9 | import { fireEvent } from "../../../util/fire_event"; 10 | 11 | @customElement("commission-node-existing") 12 | export class CommissionNodeExisting extends LitElement { 13 | @consume({ context: clientContext, subscribe: true }) 14 | @property({ attribute: false }) 15 | public client!: MatterClient; 16 | 17 | @state() 18 | private _loading: boolean = false; 19 | 20 | @query("md-outlined-text-field[label='Share code']") 21 | private _pairingCodeField!: MdOutlinedTextField; 22 | 23 | protected render() { 24 | return html` 25 | 26 |
27 |
28 | Commission${this._loading ? html`` : nothing}`; 31 | } 32 | 33 | private async _commissionNode() { 34 | this._loading = true; 35 | try { 36 | const node = await this.client.commissionWithCode(this._pairingCodeField.value, true); 37 | fireEvent(this, "node-commissioned", node); 38 | } catch (err) { 39 | alert(`Error commissioning node: ${(err as Error).message}`); 40 | } finally { 41 | this._loading = false; 42 | } 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /dashboard/src/components/dialogs/commission-node-dialog/commission-node-thread.ts: -------------------------------------------------------------------------------- 1 | import { consume } from "@lit/context"; 2 | import "@material/web/textfield/outlined-text-field"; 3 | import "@material/web/progress/circular-progress"; 4 | import type { MdOutlinedTextField } from "@material/web/textfield/outlined-text-field"; 5 | import { LitElement, html, nothing } from "lit"; 6 | import { customElement, property, query, state } from "lit/decorators.js"; 7 | import { MatterClient } from "../../../client/client"; 8 | import { clientContext } from "../../../client/client-context"; 9 | import { fireEvent } from "../../../util/fire_event"; 10 | 11 | @customElement("commission-node-thread") 12 | export class CommissionNodeThread extends LitElement { 13 | @consume({ context: clientContext, subscribe: true }) 14 | @property({ attribute: false }) 15 | public client!: MatterClient; 16 | 17 | @state() 18 | private _loading: boolean = false; 19 | 20 | @query("md-outlined-text-field[label='Thread dataset']") 21 | private _datasetField!: MdOutlinedTextField; 22 | @query("md-outlined-text-field[label='Pairing code']") 23 | private _pairingCodeField!: MdOutlinedTextField; 24 | 25 | protected render() { 26 | if (!this.client.serverInfo.thread_credentials_set) { 27 | return html` 28 | 29 |
30 |
31 | Set Thread Dataset${this._loading ? html`` : nothing}`; 34 | } 35 | return html` 36 | 37 |
38 |
39 | Commission${this._loading ? html`` : nothing}`; 42 | } 43 | 44 | private async _setThreadDataset() { 45 | const dataset = this._datasetField.value; 46 | if (!dataset) { 47 | alert("Dataset is required"); 48 | return; 49 | } 50 | this._loading = true; 51 | try { 52 | await this.client.setThreadOperationalDataset(dataset); 53 | } catch (err) { 54 | alert(`Error setting Thread dataset: ${(err as Error).message}`); 55 | } finally { 56 | this._loading = false; 57 | } 58 | 59 | } 60 | 61 | private async _commissionNode() { 62 | this._loading = true; 63 | try { 64 | const node = await this.client.commissionWithCode(this._pairingCodeField.value, false); 65 | fireEvent(this, "node-commissioned", node); 66 | } catch (err) { 67 | alert(`Error commissioning node: ${(err as Error).message}`); 68 | } finally { 69 | this._loading = false; 70 | } 71 | } 72 | } 73 | -------------------------------------------------------------------------------- /dashboard/src/components/dialogs/commission-node-dialog/commission-node-wifi.ts: -------------------------------------------------------------------------------- 1 | import { consume } from "@lit/context"; 2 | import "@material/web/textfield/outlined-text-field"; 3 | import "@material/web/progress/circular-progress"; 4 | import type { MdOutlinedTextField } from "@material/web/textfield/outlined-text-field"; 5 | import { LitElement, html, nothing } from "lit"; 6 | import { customElement, property, query, state } from "lit/decorators.js"; 7 | import { MatterClient } from "../../../client/client"; 8 | import { clientContext } from "../../../client/client-context"; 9 | import { fireEvent } from "../../../util/fire_event"; 10 | 11 | @customElement("commission-node-wifi") 12 | export class CommissionNodeWifi extends LitElement { 13 | @consume({ context: clientContext, subscribe: true }) 14 | @property({ attribute: false }) 15 | public client!: MatterClient; 16 | 17 | @state() 18 | private _loading: boolean = false; 19 | 20 | @query("md-outlined-text-field[label='SSID']") 21 | private _ssidField!: MdOutlinedTextField; 22 | @query("md-outlined-text-field[label='Password']") 23 | private _passwordField!: MdOutlinedTextField; 24 | @query("md-outlined-text-field[label='Pairing code']") 25 | private _pairingCodeField!: MdOutlinedTextField; 26 | 27 | protected render() { 28 | if (!this.client.serverInfo.wifi_credentials_set) { 29 | return html` 30 | 31 | 32 | 33 |
34 |
35 | Set WiFi Credentials${this._loading ? html`` : nothing}`; 38 | } 39 | return html` 40 | 41 |
42 |
43 | Commission${this._loading ? html`` : nothing}`; 46 | } 47 | 48 | private _setWifiCredentials() { 49 | const ssid = this._ssidField.value; 50 | if (!ssid) { 51 | alert("SSID is required"); 52 | return; 53 | } 54 | const password = this._passwordField.value; 55 | if (!password) { 56 | alert("Password is required"); 57 | return; 58 | } 59 | this._loading = true; 60 | try { 61 | this.client.setWifiCredentials(ssid, password); 62 | } catch (err) { 63 | alert(`Error setting WiFi credentials: \n${(err as Error).message}`); 64 | } finally { 65 | this._loading = false; 66 | } 67 | } 68 | 69 | private async _commissionNode() { 70 | try { 71 | if (!this._pairingCodeField.value) { 72 | alert("Pairing code is required"); 73 | return; 74 | } 75 | this._loading = true; 76 | const node = await this.client.commissionWithCode(this._pairingCodeField.value, false); 77 | fireEvent(this, "node-commissioned", node); 78 | } catch (err) { 79 | alert(`Error commissioning node: \n${(err as Error).message}`); 80 | } finally { 81 | this._loading = false; 82 | } 83 | } 84 | } 85 | -------------------------------------------------------------------------------- /dashboard/src/components/dialogs/commission-node-dialog/show-commission-node-dialog.ts: -------------------------------------------------------------------------------- 1 | import { MatterClient } from "../../../client/client"; 2 | 3 | 4 | export const showCommissionNodeDialog = async (client: MatterClient 5 | ) => { 6 | await import("./commission-node-dialog"); 7 | const dialog = document.createElement("commission-node-dialog"); 8 | dialog.client = client; 9 | document.querySelector("matter-dashboard-app")?.renderRoot.appendChild(dialog); 10 | } 11 | -------------------------------------------------------------------------------- /dashboard/src/components/ha-svg-icon.ts: -------------------------------------------------------------------------------- 1 | import { 2 | css, 3 | CSSResultGroup, 4 | LitElement, 5 | nothing, 6 | svg, 7 | SVGTemplateResult, 8 | } from "lit"; 9 | import { customElement, property } from "lit/decorators.js"; 10 | 11 | @customElement("ha-svg-icon") 12 | export class HaSvgIcon extends LitElement { 13 | @property() public path?: string; 14 | 15 | @property() public secondaryPath?: string; 16 | 17 | @property() public viewBox?: string; 18 | 19 | protected render(): SVGTemplateResult { 20 | return svg` 21 | `; 41 | } 42 | 43 | static get styles(): CSSResultGroup { 44 | return css` 45 | :host { 46 | display: var(--ha-icon-display, inline-flex); 47 | align-items: center; 48 | justify-content: center; 49 | position: relative; 50 | vertical-align: middle; 51 | fill: var(--icon-primary-color, currentcolor); 52 | width: var(--mdc-icon-size, 24px); 53 | height: var(--mdc-icon-size, 24px); 54 | } 55 | svg { 56 | width: 100%; 57 | height: 100%; 58 | pointer-events: none; 59 | display: block; 60 | } 61 | path.primary-path { 62 | opacity: var(--icon-primary-opactity, 1); 63 | } 64 | path.secondary-path { 65 | fill: var(--icon-secondary-color, currentcolor); 66 | opacity: var(--icon-secondary-opactity, 0.5); 67 | } 68 | `; 69 | } 70 | } 71 | declare global { 72 | interface HTMLElementTagNameMap { 73 | "ha-svg-icon": HaSvgIcon; 74 | } 75 | } 76 | -------------------------------------------------------------------------------- /dashboard/src/entrypoint/main.ts: -------------------------------------------------------------------------------- 1 | import { MatterClient } from "../client/client"; 2 | 3 | async function main() { 4 | import("../pages/matter-dashboard-app"); 5 | 6 | let url = ""; 7 | 8 | // Detect if we're running in the (production) webserver included in the matter server or not. 9 | const isProductionServer = location.href.includes(":5580") || location.href.includes("hassio_ingress") || location.href.includes("/api/ingress/"); 10 | 11 | if (!isProductionServer) { 12 | // development server, ask for url to matter server 13 | let storageUrl = localStorage.getItem("matterURL"); 14 | if (!storageUrl) { 15 | storageUrl = prompt( 16 | "Enter Websocket URL to a running Matter Server", 17 | "ws://localhost:5580/ws" 18 | ); 19 | if (!storageUrl) { 20 | alert("Unable to connect without URL"); 21 | return; 22 | } 23 | localStorage.setItem("matterURL", storageUrl); 24 | } 25 | url = storageUrl; 26 | } 27 | else { 28 | // assume production server running inside the matter server 29 | // Turn httpX url into wsX url and append "/ws" 30 | let baseUrl = window.location.origin + window.location.pathname; 31 | if (baseUrl.endsWith('/')) { baseUrl = baseUrl.slice(0, -1); } 32 | url = baseUrl.replace('http', 'ws') + '/ws'; 33 | console.log(`Connecting to Matter Server API using url: ${url}`); 34 | } 35 | 36 | const client = new MatterClient(url, isProductionServer); 37 | 38 | const dashboard = document.createElement("matter-dashboard-app"); 39 | dashboard.client = client; 40 | document.body.append(dashboard); 41 | } 42 | 43 | main(); 44 | -------------------------------------------------------------------------------- /dashboard/src/pages/components/context.ts: -------------------------------------------------------------------------------- 1 | 2 | import { createContext } from "@lit/context"; 3 | 4 | // export const bindingContext = createContext(""); 5 | export const bindingContext = createContext("binding"); 6 | -------------------------------------------------------------------------------- /dashboard/src/pages/components/footer.ts: -------------------------------------------------------------------------------- 1 | import { LitElement, css, html } from "lit"; 2 | import { customElement } from "lit/decorators.js"; 3 | 4 | 5 | @customElement("dashboard-footer") 6 | export class DashboardFooter extends LitElement { 7 | 8 | protected render() { 9 | return html` 10 | 14 | `; 15 | } 16 | 17 | static styles = css` 18 | .footer { 19 | padding: 16px; 20 | text-align: center; 21 | font-size: 0.8em; 22 | color: var(--md-sys-color-on-surface); 23 | display: flex; 24 | flex-direction: column; 25 | position: relative; 26 | clear: both; 27 | } 28 | 29 | .footer a { 30 | color: var(--md-sys-color-on-surface); 31 | } 32 | `; 33 | 34 | } 35 | -------------------------------------------------------------------------------- /dashboard/src/pages/components/header.ts: -------------------------------------------------------------------------------- 1 | import "@material/web/iconbutton/icon-button"; 2 | import { LitElement, css, html, nothing } from "lit"; 3 | import { customElement, property } from "lit/decorators.js"; 4 | import "@material/web/list/list"; 5 | import "@material/web/list/list-item"; 6 | import "@material/web/divider/divider"; 7 | import "@material/web/button/outlined-button"; 8 | import "../../components/ha-svg-icon"; 9 | import { mdiArrowLeft, mdiLogout } from "@mdi/js"; 10 | import { MatterClient } from "../../client/client"; 11 | 12 | 13 | interface HeaderAction { 14 | label: string; 15 | icon: string; 16 | action: void; 17 | } 18 | 19 | @customElement("dashboard-header") 20 | export class DashboardHeader extends LitElement { 21 | 22 | @property() public backButton?: string; 23 | @property() public actions?: HeaderAction[]; 24 | 25 | public client?: MatterClient; 26 | 27 | protected render() { 28 | 29 | return html` 30 |
31 | 32 | 33 | ${this.backButton ? html` 34 | 35 | 36 | 37 | 38 | ` : ''} 39 | 40 |
${this.title || ''}
41 |
42 |
43 | ${this.actions?.map((action) => { 44 | return html` 45 | 46 | 47 | 48 | ` 49 | })} 50 | 51 | ${this.client?.isProduction 52 | ? nothing 53 | : html` 54 | 55 | 56 | 57 | `} 58 |
59 |
60 | `; 61 | } 62 | 63 | static styles = css` 64 | 65 | .header { 66 | background-color: var(--md-sys-color-primary); 67 | color: var(--md-sys-color-on-primary); 68 | --icon-primary-color: var(--md-sys-color-on-primary); 69 | font-weight: 400; 70 | display: flex; 71 | align-items: center; 72 | padding-left: 18px; 73 | padding-right: 8px; 74 | height: 48px; 75 | } 76 | 77 | md-icon-button { 78 | margin-right: 8px; 79 | } 80 | 81 | .flex { 82 | flex: 1; 83 | } 84 | 85 | `; 86 | 87 | } 88 | -------------------------------------------------------------------------------- /dashboard/src/pages/components/server-details.ts: -------------------------------------------------------------------------------- 1 | import "@material/web/button/filled-button"; 2 | import "@material/web/button/outlined-button"; 3 | import "@material/web/button/text-button"; 4 | import "@material/web/divider/divider"; 5 | import "@material/web/iconbutton/icon-button"; 6 | import "@material/web/list/list"; 7 | import "@material/web/list/list-item"; 8 | import { mdiFile, mdiPlus } from "@mdi/js"; 9 | import { LitElement, css, html, nothing } from "lit"; 10 | import { customElement } from "lit/decorators.js"; 11 | import { MatterClient } from "../../client/client"; 12 | import { 13 | showAlertDialog, 14 | showPromptDialog, 15 | } from "../../components/dialog-box/show-dialog-box"; 16 | import { showCommissionNodeDialog } from "../../components/dialogs/commission-node-dialog/show-commission-node-dialog"; 17 | import "../../components/ha-svg-icon"; 18 | 19 | @customElement("server-details") 20 | export class ServerDetails extends LitElement { 21 | public client?: MatterClient; 22 | 23 | protected render() { 24 | if (!this.client) return html``; 25 | 26 | return html` 27 | 28 | 29 |
30 | Python Matter Server ${this.client.isProduction ? "" : `(${this.client.serverBaseAddress})`} 31 | ${ 32 | this.client.connection.connected 33 | ? nothing 34 | : html`OFFLINE` 35 | } 36 |
37 |
38 | 39 |
40 |
FabricId:
${this.client.serverInfo.fabric_id} 41 |
42 |
43 |
Compressed FabricId:
${this.client.serverInfo.compressed_fabric_id} 44 |
45 |
46 |
SDK Wheels Version:
${this.client.serverInfo.sdk_version} 47 |
48 |
49 |
Schema Version:
${this.client.serverInfo.schema_version} 50 |
51 |
52 |
Node count:
${Object.keys(this.client.nodes).length} 53 |
54 |
55 | 56 | 57 | Commission node 58 | Import node 59 | 60 |
61 | 62 | 68 | 69 | `; 70 | } 71 | 72 | private _commissionNode() { 73 | console.log(this.client); 74 | showCommissionNodeDialog(this.client!); 75 | } 76 | 77 | private async _uploadDiagnosticsDumpFile() { 78 | if ( 79 | !(await showPromptDialog({ 80 | title: "Add test node", 81 | text: "Do you want to add a test node from a diagnostics dump ?", 82 | confirmText: "Select file", 83 | })) 84 | ) { 85 | return; 86 | } 87 | // @ts-ignore:next-line 88 | const fileElem = this.renderRoot.getElementById( 89 | "fileElem" 90 | ) as HTMLInputElement; 91 | fileElem!.click(); 92 | } 93 | 94 | private _onFileInput = (event: Event) => { 95 | const fileElem = event.target as HTMLInputElement; 96 | if (fileElem.files!.length > 0) { 97 | const selectedFile = fileElem.files![0]; 98 | var reader = new FileReader(); 99 | reader.readAsText(selectedFile, "UTF-8"); 100 | reader.onload = async () => { 101 | try { 102 | await this.client!.importTestNode(reader.result?.toString() || ""); 103 | } catch (err: any) { 104 | showAlertDialog({ 105 | title: "Failed to import test node", 106 | text: err.message, 107 | }); 108 | } 109 | }; 110 | } 111 | event.preventDefault(); 112 | }; 113 | 114 | static styles = css` 115 | .btn { 116 | --md-outlined-button-container-shape: 0px; 117 | } 118 | 119 | .left { 120 | width: 30%; 121 | display: inline-table; 122 | } 123 | .whitespace { 124 | height: 15px; 125 | } 126 | `; 127 | } 128 | -------------------------------------------------------------------------------- /dashboard/src/pages/matter-cluster-view.ts: -------------------------------------------------------------------------------- 1 | import "@material/web/divider/divider"; 2 | import "@material/web/iconbutton/icon-button"; 3 | import "@material/web/list/list"; 4 | import "@material/web/list/list-item"; 5 | import { LitElement, css, html } from "lit"; 6 | import { customElement, property } from "lit/decorators.js"; 7 | import { MatterClient } from "../client/client"; 8 | import { clusters } from "../client/models/descriptions"; 9 | import { MatterNode } from "../client/models/node"; 10 | import { showAlertDialog } from "../components/dialog-box/show-dialog-box"; 11 | import "../components/ha-svg-icon"; 12 | import "../pages/components/node-details"; 13 | import { provide } from "@lit/context"; 14 | import { bindingContext } from "./components/context"; 15 | 16 | declare global { 17 | interface HTMLElementTagNameMap { 18 | "matter-cluster-view": MatterClusterView; 19 | } 20 | } 21 | 22 | function clusterAttributes( 23 | attributes: { [key: string]: any }, 24 | endpoint: number, 25 | cluster: number 26 | ) { 27 | // extract unique clusters from the node attributes, as (sorted) array 28 | return Object.keys(attributes) 29 | .filter((key) => key.startsWith(`${endpoint}/${cluster}`)) 30 | .map((key) => { 31 | const attributeKey = Number(key.split("/")[2]); 32 | return { key: attributeKey, value: attributes[key] }; 33 | }, []); 34 | } 35 | 36 | @customElement("matter-cluster-view") 37 | class MatterClusterView extends LitElement { 38 | public client!: MatterClient; 39 | 40 | @property() 41 | public node?: MatterNode; 42 | 43 | @provide({ context: bindingContext }) 44 | @property() 45 | public endpoint!: number; 46 | 47 | @property() 48 | public cluster?: number; 49 | 50 | render() { 51 | if (!this.node || this.endpoint == undefined || this.cluster == undefined) { 52 | return html` 53 |

Node, endpoint or cluster not found!

54 | 55 | `; 56 | } 57 | 58 | return html` 59 | 64 | 65 | 66 |
67 | 68 |
69 | 70 | 71 |
72 | 73 | 74 |
75 | Attributes of 77 | ${clusters[this.cluster]?.label || "Custom/Unknown Cluster"} 78 | Cluster on Endpoint ${this.endpoint} 80 |
81 |
82 | ClusterId ${this.cluster} (0x00${this.cluster.toString(16)}) 83 |
84 |
85 | ${clusterAttributes( 86 | this.node.attributes, 87 | this.endpoint, 88 | this.cluster 89 | ).map((attribute) => { 90 | return html` 91 | 92 |
93 | ${clusters[this.cluster!]?.attributes[attribute.key]?.label || 94 | "Custom/Unknown Attribute"} 95 |
96 |
97 | AttributeId: ${attribute.key} 98 | (0x00${attribute.key.toString(16)}) - Value type: 99 | ${clusters[this.cluster!]?.attributes[attribute.key]?.type || 100 | "unknown"} 101 |
102 |
103 | ${JSON.stringify(attribute.value).length > 20 104 | ? html`` 111 | : JSON.stringify(attribute.value)} 112 |
113 |
114 | 115 | `; 116 | })} 117 |
118 |
119 | `; 120 | } 121 | 122 | private async _showAttributeValue(value: any) { 123 | showAlertDialog({ 124 | title: "Attribute value", 125 | text: JSON.stringify(value), 126 | }); 127 | } 128 | 129 | private _goBack() { 130 | history.back(); 131 | } 132 | 133 | static styles = css` 134 | :host { 135 | display: block; 136 | background-color: var(--md-sys-color-background); 137 | } 138 | 139 | .header { 140 | background-color: var(--md-sys-color-primary); 141 | color: var(--md-sys-color-on-primary); 142 | --icon-primary-color: var(--md-sys-color-on-primary); 143 | font-weight: 400; 144 | display: flex; 145 | align-items: center; 146 | padding-right: 8px; 147 | height: 48px; 148 | } 149 | 150 | md-icon-button { 151 | margin-right: 8px; 152 | } 153 | 154 | .flex { 155 | flex: 1; 156 | } 157 | 158 | .container { 159 | padding: 16px; 160 | max-width: 95%; 161 | margin: 0 auto; 162 | } 163 | 164 | .status { 165 | color: var(--danger-color); 166 | font-weight: bold; 167 | font-size: 0.8em; 168 | } 169 | `; 170 | } 171 | -------------------------------------------------------------------------------- /dashboard/src/pages/matter-dashboard-app.ts: -------------------------------------------------------------------------------- 1 | import { ContextProvider } from "@lit/context"; 2 | import { LitElement, PropertyValueMap, html } from "lit"; 3 | import { customElement, state } from "lit/decorators.js"; 4 | import { MatterClient } from "../client/client"; 5 | import { clientContext } from "../client/client-context"; 6 | import { MatterError } from "../client/exceptions"; 7 | import { clone } from "../util/clone_class"; 8 | import type { Route } from "../util/routing"; 9 | import "./matter-cluster-view"; 10 | import "./matter-endpoint-view"; 11 | import "./matter-node-view"; 12 | import "./matter-server-view"; 13 | 14 | declare global { 15 | interface HTMLElementTagNameMap { 16 | "matter-dashboard-app": MatterDashboardApp; 17 | } 18 | } 19 | 20 | @customElement("matter-dashboard-app") 21 | class MatterDashboardApp extends LitElement { 22 | @state() private _route: Route = { 23 | prefix: "", 24 | path: [], 25 | }; 26 | 27 | public client!: MatterClient; 28 | 29 | @state() 30 | private _state: "connecting" | "connected" | "error" | "disconnected" = "connecting"; 31 | 32 | private _error: string | undefined; 33 | 34 | private provider = new ContextProvider(this, { context: clientContext, initialValue: this.client }); 35 | 36 | protected firstUpdated( 37 | _changedProperties: PropertyValueMap | Map 38 | ): void { 39 | super.firstUpdated(_changedProperties); 40 | this.client.startListening().then( 41 | () => { 42 | this._state = "connected"; 43 | this.client.addEventListener("nodes_changed", () => { 44 | this.requestUpdate(); 45 | this.provider.setValue(clone(this.client)); 46 | }); 47 | this.client.addEventListener("server_info_updated", () => { 48 | this.provider.setValue(clone(this.client)); 49 | }); 50 | this.client.addEventListener("connection_lost", () => { 51 | this._state = "disconnected"; 52 | }); 53 | }, 54 | (err: MatterError) => { 55 | this._state = "error"; 56 | this._error = err.message; 57 | } 58 | ) 59 | 60 | // Handle history changes 61 | const updateRoute = () => { 62 | const pathParts = location.hash.substring(1).split("/"); 63 | this._route = { 64 | prefix: pathParts.length == 1 ? "" : pathParts[0], 65 | path: pathParts.length == 1 ? pathParts : pathParts.slice(1), 66 | }; 67 | }; 68 | window.addEventListener("hashchange", updateRoute); 69 | updateRoute(); 70 | } 71 | 72 | render() { 73 | if (this._state === "connecting") { 74 | return html`

Connecting...

`; 75 | } 76 | if (this._state === "disconnected") { 77 | return html`

Connection lost

`; 78 | } 79 | if (this._state === "error") { 80 | return html` 81 |

Error: ${this._error}

82 | 83 | `; 84 | } 85 | if (this._route.prefix === "node" && this._route.path.length == 3) { 86 | // cluster level 87 | return html` 88 | 94 | `; 95 | } 96 | if (this._route.prefix === "node" && this._route.path.length == 2) { 97 | // endpoint level 98 | return html` 99 | 104 | `; 105 | } 106 | if (this._route.prefix === "node") { 107 | // node level 108 | return html` 109 | 113 | `; 114 | } 115 | // root level: server overview 116 | return html``; 121 | } 122 | } 123 | -------------------------------------------------------------------------------- /dashboard/src/pages/matter-endpoint-view.ts: -------------------------------------------------------------------------------- 1 | import "@material/web/iconbutton/icon-button"; 2 | import { LitElement, css, html } from "lit"; 3 | import { guard } from 'lit/directives/guard.js'; 4 | import { customElement, property } from "lit/decorators.js"; 5 | import "@material/web/list/list"; 6 | import "@material/web/list/list-item"; 7 | import "@material/web/divider/divider"; 8 | import { MatterClient } from "../client/client"; 9 | import "../components/ha-svg-icon"; 10 | import { mdiChevronRight } from "@mdi/js"; 11 | import { MatterNode } from "../client/models/node"; 12 | import { DeviceType, clusters, device_types } from "../client/models/descriptions"; 13 | 14 | declare global { 15 | interface HTMLElementTagNameMap { 16 | "matter-endpoint-view": MatterEndpointView; 17 | } 18 | } 19 | 20 | function getUniqueClusters(node: MatterNode, endpoint: Number) { 21 | return Array.from(new Set(Object.keys(node!.attributes) 22 | .filter(key => key.startsWith(`${endpoint.toString()}/`)) 23 | .map(key => Number(key.split("/")[1])))) 24 | .sort((a, b) => { return a - b }); 25 | } 26 | 27 | export function getEndpointDeviceTypes(node: MatterNode, endpoint: Number): DeviceType[] { 28 | const rawValues: Record[] | undefined = node.attributes[`${endpoint}/29/0`]; 29 | if (!rawValues) return []; 30 | return rawValues.map((rawValue) => { return device_types[rawValue["0"] || rawValue["deviceType"]] }) 31 | } 32 | 33 | @customElement("matter-endpoint-view") 34 | class MatterEndpointView extends LitElement { 35 | public client!: MatterClient; 36 | 37 | @property() 38 | public node?: MatterNode; 39 | 40 | @property() 41 | public endpoint?: number; 42 | 43 | render() { 44 | 45 | if (!this.node || this.endpoint == undefined) { 46 | return html` 47 |

Node or endpoint not found!

48 | 53 | `; 54 | } 55 | 56 | return html` 57 | 62 | 63 | 64 |
65 | 69 |
70 | 71 | 72 |
73 | 74 | 75 |
76 | Clusters on Endpoint ${this.endpoint} 77 |
78 |
79 | Device Type(s): ${getEndpointDeviceTypes(this.node, this.endpoint).map(deviceType => { return deviceType.label }).join(" / ")} 80 |
81 |
82 | ${guard([this.node?.attributes.length], () => getUniqueClusters(this.node!, this.endpoint!).map((cluster) => { 83 | return html` 84 | 85 |
86 | ${clusters[cluster]?.label || 'Custom/Unknown Cluster'} 87 |
88 |
89 | ClusterId ${cluster} (0x00${cluster.toString(16)}) 90 |
91 | 92 |
93 | `; 94 | }))} 95 |
96 |
97 | `; 98 | } 99 | 100 | private _goBack() { 101 | history.back(); 102 | } 103 | 104 | static styles = css` 105 | :host { 106 | display: block; 107 | background-color: var(--md-sys-color-background); 108 | } 109 | 110 | .header { 111 | background-color: var(--md-sys-color-primary); 112 | color: var(--md-sys-color-on-primary); 113 | --icon-primary-color: var(--md-sys-color-on-primary); 114 | font-weight: 400; 115 | display: flex; 116 | align-items: center; 117 | padding-right: 8px; 118 | height: 48px; 119 | } 120 | 121 | md-icon-button { 122 | margin-right: 8px; 123 | } 124 | 125 | .flex { 126 | flex: 1; 127 | } 128 | 129 | .container { 130 | padding: 16px; 131 | max-width: 95%; 132 | margin: 0 auto; 133 | } 134 | 135 | .status { 136 | color: var(--danger-color); 137 | font-weight: bold; 138 | font-size: 0.8em; 139 | } 140 | `; 141 | } 142 | -------------------------------------------------------------------------------- /dashboard/src/pages/matter-node-view.ts: -------------------------------------------------------------------------------- 1 | import "@material/web/iconbutton/icon-button"; 2 | import { LitElement, css, html } from "lit"; 3 | import { guard } from 'lit/directives/guard.js'; 4 | import { customElement, property } from "lit/decorators.js"; 5 | import "@material/web/list/list"; 6 | import "@material/web/list/list-item"; 7 | import "@material/web/divider/divider"; 8 | import { MatterClient } from "../client/client"; 9 | import "../components/ha-svg-icon"; 10 | import "./components/header"; 11 | import "./components/node-details"; 12 | import { mdiChevronRight } from "@mdi/js"; 13 | import { MatterNode } from "../client/models/node"; 14 | import { getEndpointDeviceTypes } from "./matter-endpoint-view"; 15 | 16 | declare global { 17 | interface HTMLElementTagNameMap { 18 | "matter-node-view": MatterNodeView; 19 | } 20 | } 21 | 22 | function getUniqueEndpoints(node: MatterNode) { 23 | // extract unique endpoints from the node attributes, as (sorted) array 24 | return Array.from(new Set(Object.keys(node!.attributes).map(key => Number(key.split("/")[0])))).sort((a, b) => { return a - b }); 25 | } 26 | 27 | @customElement("matter-node-view") 28 | class MatterNodeView extends LitElement { 29 | public client!: MatterClient; 30 | 31 | @property() 32 | public node?: MatterNode; 33 | 34 | render() { 35 | 36 | if (!this.node) { 37 | return html` 38 |

Node not found!

39 | 44 | `; 45 | } 46 | 47 | return html` 48 | 53 | 54 | 55 |
56 | 60 |
61 | 62 | 63 |
64 | 65 | 66 |
67 | Endpoints 68 |
69 |
70 | ${guard([this.node?.attributes.length], () => getUniqueEndpoints(this.node!).map((endPointId) => { 71 | return html` 72 | 73 |
74 | Endpoint ${endPointId} 75 |
76 |
77 | Device Type(s): ${getEndpointDeviceTypes(this.node!, endPointId).map(deviceType => { return deviceType.label }).join(" / ")} 78 |
79 | 80 |
81 | `; 82 | }))} 83 |
84 |
85 | 86 | 87 | `; 88 | } 89 | 90 | private _goBack() { 91 | history.back(); 92 | } 93 | 94 | static styles = css` 95 | 96 | :host { 97 | display: flex; 98 | background-color: var(--md-sys-color-background); 99 | box-sizing: border-box; 100 | flex-direction: column; 101 | min-height: 100vh; 102 | } 103 | 104 | .container { 105 | padding: 16px; 106 | max-width: 95%; 107 | margin: 0 auto; 108 | width: 100%; 109 | } 110 | 111 | @media (max-width: 600px) { 112 | .container { 113 | padding: 16px 0; 114 | } 115 | } 116 | 117 | .status { 118 | color: var(--danger-color); 119 | font-weight: bold; 120 | font-size: 0.8em; 121 | } 122 | `; 123 | } 124 | -------------------------------------------------------------------------------- /dashboard/src/pages/matter-server-view.ts: -------------------------------------------------------------------------------- 1 | import "@material/web/iconbutton/icon-button"; 2 | import "@material/web/divider/divider"; 3 | import "@material/web/list/list"; 4 | import "@material/web/list/list-item"; 5 | import { LitElement, css, html, nothing } from "lit"; 6 | import { customElement, property } from "lit/decorators.js"; 7 | import { MatterClient } from "../client/client"; 8 | import "../components/ha-svg-icon"; 9 | import "./components/header"; 10 | import "./components/server-details"; 11 | import "./components/footer"; 12 | import { mdiChevronRight } from "@mdi/js"; 13 | import memoizeOne from "memoize-one"; 14 | 15 | declare global { 16 | interface HTMLElementTagNameMap { 17 | "matter-server-view": MatterServerView; 18 | } 19 | } 20 | 21 | @customElement("matter-server-view") 22 | class MatterServerView extends LitElement { 23 | public client!: MatterClient; 24 | 25 | @property() 26 | public nodes!: MatterClient["nodes"]; 27 | 28 | private nodeEntries = memoizeOne((nodes: this["nodes"]) => 29 | Object.entries(nodes) 30 | ); 31 | 32 | render() { 33 | const nodes = this.nodeEntries(this.nodes); 34 | 35 | return html` 36 | 37 | 41 | 42 | 43 |
44 | 47 |
48 | 49 | 50 |
51 | 52 | 53 |
54 | Nodes 55 |
56 |
57 | ${nodes.map(([id, node]) => { 58 | return html` 59 | 60 |
61 | Node ${node.node_id} 62 | ${node.available 63 | ? "" 64 | : html`OFFLINE`} 65 |
66 |
${node.nodeLabel ? `${node.nodeLabel} | ` : nothing} ${node.vendorName} | ${node.productName}
67 | 68 |
69 | `; 70 | })} 71 |
72 |
73 | 74 | `; 75 | } 76 | 77 | 78 | 79 | 80 | 81 | static styles = css` 82 | :host { 83 | display: flex; 84 | background-color: var(--md-sys-color-background); 85 | box-sizing: border-box; 86 | flex-direction: column; 87 | } 88 | 89 | .container { 90 | padding: 16px; 91 | max-width: 95%; 92 | margin: 0 auto; 93 | flex: 1; 94 | width: 100%; 95 | } 96 | 97 | @media (max-width: 600px) { 98 | .container { 99 | padding: 16px 0; 100 | } 101 | } 102 | 103 | span[slot="start"] { 104 | width: 40px; 105 | text-align: center; 106 | } 107 | 108 | .status { 109 | color: var(--danger-color); 110 | font-weight: bold; 111 | font-size: 0.8em; 112 | } 113 | 114 | `; 115 | } 116 | -------------------------------------------------------------------------------- /dashboard/src/util/clone_class.ts: -------------------------------------------------------------------------------- 1 | export const clone = (orig: any) => Object.assign(Object.create(Object.getPrototypeOf(orig)), orig) 2 | -------------------------------------------------------------------------------- /dashboard/src/util/fire_event.ts: -------------------------------------------------------------------------------- 1 | // Polymer legacy event helpers used courtesy of the Polymer project. 2 | // 3 | // Copyright (c) 2017 The Polymer Authors. All rights reserved. 4 | // 5 | // Redistribution and use in source and binary forms, with or without 6 | // modification, are permitted provided that the following conditions are 7 | // met: 8 | // 9 | // * Redistributions of source code must retain the above copyright 10 | // notice, this list of conditions and the following disclaimer. 11 | // * Redistributions in binary form must reproduce the above 12 | // copyright notice, this list of conditions and the following disclaimer 13 | // in the documentation and/or other materials provided with the 14 | // distribution. 15 | // * Neither the name of Google Inc. nor the names of its 16 | // contributors may be used to endorse or promote products derived from 17 | // this software without specific prior written permission. 18 | // 19 | // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 20 | // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 21 | // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 22 | // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 23 | // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 24 | // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 25 | // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 26 | // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 27 | // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 28 | // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | 31 | declare global { 32 | // eslint-disable-next-line 33 | interface HASSDomEvents {} 34 | } 35 | 36 | export type ValidHassDomEvent = keyof HASSDomEvents; 37 | 38 | export interface HASSDomEvent extends Event { 39 | detail: T; 40 | } 41 | 42 | /** 43 | * Dispatches a custom event with an optional detail value. 44 | * 45 | * @param {string} type Name of event type. 46 | * @param {*=} detail Detail value containing event-specific 47 | * payload. 48 | * @param {{ bubbles: (boolean|undefined), 49 | * cancelable: (boolean|undefined), 50 | * composed: (boolean|undefined) }=} 51 | * options Object specifying options. These may include: 52 | * `bubbles` (boolean, defaults to `true`), 53 | * `cancelable` (boolean, defaults to false), and 54 | * `node` on which to fire the event (HTMLElement, defaults to `this`). 55 | * @return {Event} The new event that was fired. 56 | */ 57 | export const fireEvent = ( 58 | node: HTMLElement | Window, 59 | type: HassEvent, 60 | detail?: HASSDomEvents[HassEvent], 61 | options?: { 62 | bubbles?: boolean; 63 | cancelable?: boolean; 64 | composed?: boolean; 65 | } 66 | ) => { 67 | options = options || {}; 68 | // @ts-ignore 69 | detail = detail === null || detail === undefined ? {} : detail; 70 | const event = new Event(type, { 71 | bubbles: options.bubbles === undefined ? true : options.bubbles, 72 | cancelable: Boolean(options.cancelable), 73 | composed: options.composed === undefined ? true : options.composed, 74 | }); 75 | (event as any).detail = detail; 76 | node.dispatchEvent(event); 77 | return event; 78 | }; 79 | -------------------------------------------------------------------------------- /dashboard/src/util/prevent_default.ts: -------------------------------------------------------------------------------- 1 | export const preventDefault = (ev: Event) => ev.preventDefault(); 2 | -------------------------------------------------------------------------------- /dashboard/src/util/routing.ts: -------------------------------------------------------------------------------- 1 | export interface Route { 2 | prefix: string; 3 | path: string[]; 4 | } 5 | -------------------------------------------------------------------------------- /dashboard/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "lib": ["es2019", "dom"], 4 | "target": "es2019", 5 | "module": "es2020", 6 | "moduleResolution": "node", 7 | "resolveJsonModule": true, 8 | "outDir": "dist", 9 | "declaration": true, 10 | "experimentalDecorators": true, 11 | "noFallthroughCasesInSwitch": true, 12 | "noImplicitReturns": true, 13 | "noUnusedLocals": true, 14 | "forceConsistentCasingInFileNames": true, 15 | "strict": true, 16 | "skipLibCheck": true, 17 | "importHelpers": true 18 | }, 19 | "include": [ 20 | "src/*", 21 | "src/entrypoint/main.ts", 22 | "src/pages/matter-dashboard-app.ts" 23 | ] 24 | } 25 | -------------------------------------------------------------------------------- /docker-entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | set -e 3 | 4 | set -- matter-server --storage-path /data "$@" 5 | 6 | echo "Starting server:" "$@" 7 | exec "$@" 8 | -------------------------------------------------------------------------------- /docs/docker.md: -------------------------------------------------------------------------------- 1 | # Running Matter Server in Docker 2 | 3 | For testing/evaluation purposes or as a guideline towards other application developers that want to run the Matter Server, we do provide an [official Docker container image](https://github.com/home-assistant-libs/python-matter-server/pkgs/container/python-matter-server). Just make sure that the underlying operating system on which you intend to run the docker container matches the [requirements needed for Matter and Thread](os_requirements.md) so better not attempt to run it on a specific purpose operating system such as a NAS. 4 | 5 | > [!NOTE] **Attention Home Assistant users:** The docker image is provided as-is and without official support (due to all the complex requirements to the underlying host/OS). Use it at your own risk if you know what you're doing. 6 | 7 | We strongly recommend using Home Assistant OS along with the official Matter 8 | Server add-on to use Matter with Home Assistant. The Matter integration 9 | automatically installs the Matter Server as an add-on. Please refer to the 10 | [Home Assistant documentation](https://www.home-assistant.io/integrations/matter/). 11 | 12 | Home Assistant OS has been tested and tuned to be used with Matter and Thread, 13 | which makes this combination the best tested and largely worry free 14 | environment. 15 | 16 | If you still prefer a self-managed container installation, you might experience 17 | communication issues with Matter devices, especially Thread based devices. 18 | This is mostly because the container installation uses host networking, and 19 | relies on the networking managed by your operating system. 20 | 21 | ## Running the Matter Server using container image 22 | 23 | With the following command you can run the Matter Server in a container using 24 | Docker. The Matter network data (fabric information) are stored in a newly 25 | created directory `data` in the current directory. Adjust the command to 26 | choose another location instead. 27 | 28 | ``` 29 | mkdir data 30 | docker run -d \ 31 | --name matter-server \ 32 | --restart=unless-stopped \ 33 | --security-opt apparmor=unconfined \ 34 | -v $(pwd)/data:/data \ 35 | --network=host \ 36 | ghcr.io/home-assistant-libs/python-matter-server:stable 37 | ``` 38 | 39 | > [!NOTE] 40 | > The container has a default command line set (see Dockerfile). If you intend to pass additional arguments, you have to pass the default command line as well. 41 | 42 | To use local commissioning with Bluetooth, make sure to pass the D-Bus socket as well: 43 | 44 | ```sh 45 | docker run -d \ 46 | --name matter-server \ 47 | --restart=unless-stopped \ 48 | --security-opt apparmor=unconfined \ 49 | -v $(pwd)/data:/data \ 50 | -v /run/dbus:/run/dbus:ro \ 51 | --network=host \ 52 | ghcr.io/home-assistant-libs/python-matter-server:stable --storage-path /data --paa-root-cert-dir /data/credentials --bluetooth-adapter 0 53 | ``` 54 | 55 | ## Running using Docker compose 56 | 57 | ```yaml 58 | services: 59 | # python-matter-server 60 | matter-server: 61 | image: ghcr.io/home-assistant-libs/python-matter-server:stable 62 | container_name: matter-server 63 | restart: unless-stopped 64 | # Required for mDNS to work correctly 65 | network_mode: host 66 | security_opt: 67 | # Needed for Bluetooth via dbus 68 | - apparmor:unconfined 69 | volumes: 70 | # Create an .env file that sets the USERDIR environment variable. 71 | - ${USERDIR:-$HOME}/docker/matter-server/data:/data/ 72 | # Required for Bluetooth via D-Bus 73 | - /run/dbus:/run/dbus:ro 74 | # If you adjust command line, make sure to pass the default CMD arguments too: 75 | #command: --storage-path /data --paa-root-cert-dir /data/credentials --bluetooth-adapter 0 76 | ``` 77 | -------------------------------------------------------------------------------- /docs/os_requirements.md: -------------------------------------------------------------------------------- 1 | # OS Requirements 2 | 3 | Matter is based on IPv6 link-local multicast protocols and thus running the Matter Server (or developing it) is not as straightforward as any other application, mostly due to the bad shape of IPv6 support in various Linux distributions, let alone the IPv6 Neighbor 4 | Discovery Protocol, which is required for Thread. 5 | 6 | ## Networking 7 | 8 | Matter uses link-local multicast protocols which do not work across different LANs or VLANs so best to use either a complete flat network or ensure that the machine running Matter Server is on the same (v)LAN as the devices, any border routers and the phone/device used for commissioning. 9 | 10 | The host network interface needs IPv6 support enabled. 11 | 12 | Be aware of any (semi) professional networking gear such as Unifi or Omada which has options to filter multicast traffic, often called something like "Multicast optimizations" or something along those lines. Disable such features, they are helpful in a high density enterprise network, but they're killing domestic protocols that rely on multicast like Matter, Airplay etc. 13 | 14 | Also do not enable any mdns forwarders on the network (the option is called mDNS on Unifi for example) as they tend to corrupt or severely hinder the Matter packets on the network. 15 | 16 | In some cases it is known that IGMP/MLD snopping implementations on network gear may help or hinder Matter traffic. Play with these options if you have network equipment that offer it. 17 | 18 | As a general rule of thumb, if you use standard, home user oriented network equipment, you have the highest rate of success with Matter. 19 | 20 | ## Operating system 21 | 22 | The only supported operating systems for developing or running the Matter Server are (recent) versions of (64 bits) MacOS and a very recent distribution (including kernel) of Linux. Running it on non 64 bits architecture or another operating system (even WSL) is not supported. 23 | 24 | For a MacOS (development) environment, things will work fine out of the box from MacOS 14 or higher (arm-based CPU). In combination with a python venv, it makes up for the recommended development environment for working on the Matter codebase. 25 | 26 | For a Linux operating system, keep the following recomemndations in mind: 27 | 28 | > your host must process ICMPv6 Router Advertisements. See the [openthread.io 29 | > Bidirectional IPv6 Connectivity code labs](https://openthread.io/codelabs/openthread-border-router#6) 30 | > on how-to setup your host correctly. Note that NetworkManager has its own ICMPv6 31 | > Router Advertisement processing. A recent version of NetworkManager is 32 | > necessary, and there are still known issues (see NetworkManager issue 33 | > [#1232](https://gitlab.freedesktop.org/NetworkManager/NetworkManager/-/issues/1232)). 34 | 35 | The Home Assistant Operating System 10 and newer correctly processes ICMPv6 Router Advertisements. The Matter Server is provided as an add-on to that operating system, thus including all the required fixes. 36 | 37 | ### Requirements to communicate with Thread devices through Thread border routers 38 | 39 | For communication through Thread border routers which are not running on the same 40 | host as the Matter Controller server to work, IPv6 routing needs to be properly 41 | working. IPv6 routing is largely setup automatically through the IPv6 Neighbor 42 | Discovery Protocol, specifically the Route Information Options (RIO). However, 43 | if IPv6 Neighbor Discovery RIO's are processed, and processed correctly depends on the network 44 | management software your system is using. There may be bugs and caveats in 45 | processing this Route Information Options. 46 | 47 | In general, make sure the kernel option `CONFIG_IPV6_ROUTER_PREF` is enabled and 48 | that IPv6 forwarding is disabled (sysctl variable `net.ipv6.conf.all.forwarding`). 49 | If IPv6 forwarding is enabled, the Linux kernel doesn't employ reachability 50 | probing (RFC 4191), which can lead to longer outages (up to 30min) until 51 | network changes are detected. 52 | 53 | If you are using NetworkManager, make sure to use at least NetworkManager 1.42. 54 | Previous versions lose track of routes and stale routes can lead to unreachable 55 | Thread devices. All current released NetworkManager versions can't handle 56 | multiple routes to the same network properly. This means if you have multiple 57 | Thread border routers, the fallback won't work immediately (see [NetworkManager 58 | issue #1232](https://gitlab.freedesktop.org/NetworkManager/NetworkManager/-/issues/1232)). 59 | 60 | We currently don't have experience with systemd-networkd. It seems to have its 61 | own IPv6 Neighbor Discovery Protocol handling. 62 | 63 | If you don't use NetworkManager or systemd-networkd, you can use the kernel's 64 | IPv6 Neighbor Discovery Protocol handling. 65 | 66 | Make sure the kernel options `CONFIG_IPV6_ROUTE_INFO` is enabled and the 67 | following sysctl variables are set: 68 | 69 | ```sh 70 | sysctl -w net.ipv6.conf.wlan0.accept_ra=1 71 | sysctl -w net.ipv6.conf.wlan0.accept_ra_rt_info_max_plen=64 72 | ``` 73 | 74 | If your system has IPv6 forwarding enabled (not recommended, see above), you'll 75 | have to use `2` for the accept_ra variable. See also the [Thread Border Router - Bidirectional IPv6 Connectivity and DNS-Based Service Discovery codelab](https://openthread.io/codelabs/openthread-border-router#6). 76 | -------------------------------------------------------------------------------- /docs/websockets_api.md: -------------------------------------------------------------------------------- 1 | # Websocket documentation 2 | 3 | This list is not intended to be complete, for a complete oversight see the client implementation. 4 | 5 | ## Websocket commands 6 | 7 | Here are the most frequently used commands: 8 | 9 | **Set WiFi credentials** 10 | 11 | Inform the controller about the WiFi credentials it needs to send when commissioning a new device. 12 | 13 | ```json 14 | { 15 | "message_id": "1", 16 | "command": "set_wifi_credentials", 17 | "args": { 18 | "ssid": "wifi-name-here", 19 | "credentials": "wifi-password-here" 20 | } 21 | } 22 | ``` 23 | 24 | **Set Thread dataset** 25 | 26 | Inform the controller about the Thread credentials it needs to use when commissioning a new device. 27 | 28 | ```json 29 | { 30 | "message_id": "1", 31 | "command": "set_thread_dataset", 32 | "args": { 33 | "dataset": "put-credentials-here" 34 | } 35 | } 36 | ``` 37 | 38 | **Commission with code** 39 | 40 | Commission a new device. For WiFi or Thread based devices, the credentials need to be set upfront, otherwise, commissioning will fail. Supports both QR-code syntax (MT:...) and manual pairing code as string. 41 | The controller will use bluetooth for the commissioning of wireless devices. If the machine running the Python Matter Server controller lacks Bluetooth support, commissioning will only work for devices already connected to the network (by cable or another controller). 42 | 43 | Matter QR-code 44 | 45 | ```json 46 | { 47 | "message_id": "2", 48 | "command": "commission_with_code", 49 | "args": { 50 | "code": "MT:Y.ABCDEFG123456789" 51 | } 52 | } 53 | ``` 54 | 55 | Manual pairing code 56 | 57 | ```json 58 | { 59 | "message_id": "2", 60 | "command": "commission_with_code", 61 | "args": { 62 | "code": "35325335079", 63 | "network_only": true 64 | } 65 | } 66 | ``` 67 | 68 | **Open Commissioning window** 69 | 70 | Open a commissioning window to commission a device present on this controller to another. 71 | Returns code to use as discriminator. 72 | 73 | ```json 74 | { 75 | "message_id": "2", 76 | "command": "open_commissioning_window", 77 | "args": { 78 | "node_id": 1 79 | } 80 | } 81 | ``` 82 | 83 | **Get Nodes** 84 | 85 | Get all nodes already commissioned on the controller. 86 | 87 | ```json 88 | { 89 | "message_id": "2", 90 | "command": "get_nodes" 91 | } 92 | ``` 93 | 94 | **Get Node** 95 | 96 | Get info of a single Node. 97 | 98 | ```json 99 | { 100 | "message_id": "2", 101 | "command": "get_node", 102 | "args": { 103 | "node_id": 1 104 | } 105 | } 106 | ``` 107 | 108 | **Start listening** 109 | 110 | When the start_listening command is issued, the server will dump all existing nodes. From that moment on all events (including node attribute changes) will be forwarded. 111 | 112 | ```json 113 | { 114 | "message_id": "3", 115 | "command": "start_listening" 116 | } 117 | ``` 118 | 119 | **Read an attribute** 120 | 121 | Here is an example of reading `OnOff` attribute on a switch (OnOff cluster) 122 | 123 | ```json 124 | { 125 | "message_id": "read", 126 | "command": "read_attribute", 127 | "args": { 128 | "node_id": 1, 129 | "attribute_path": "1/6/0" 130 | } 131 | } 132 | ``` 133 | 134 | **Write an attribute** 135 | 136 | Here is an example of writing `OnTime` attribute on a switch (OnOff cluster) 137 | 138 | ```json 139 | { 140 | "message_id": "write", 141 | "command": "write_attribute", 142 | "args": { 143 | "node_id": 1, 144 | "attribute_path": "1/6/16385", 145 | "value": 10 146 | } 147 | } 148 | ``` 149 | 150 | **Send a command** 151 | 152 | Here is an example of turning on a switch (OnOff cluster) 153 | 154 | ```json 155 | { 156 | "message_id": "example", 157 | "command": "device_command", 158 | "args": { 159 | "endpoint_id": 1, 160 | "node_id": 1, 161 | "payload": {}, 162 | "cluster_id": 6, 163 | "command_name": "On" 164 | } 165 | } 166 | ``` 167 | 168 | **Python script to send a command** 169 | 170 | Because we use the datamodels of the Matter SDK, this is a little bit more involved. 171 | Here is an example of turning on a switch: 172 | 173 | ```python 174 | import json 175 | 176 | # Import the CHIP clusters 177 | from chip.clusters import Objects as clusters 178 | 179 | # Import the ability to turn objects into dictionaries, and vice-versa 180 | from matter_server.common.helpers.util import dataclass_from_dict,dataclass_to_dict 181 | 182 | command = clusters.OnOff.Commands.On() 183 | payload = dataclass_to_dict(command) 184 | 185 | 186 | message = { 187 | "message_id": "example", 188 | "command": "device_command", 189 | "args": { 190 | "endpoint_id": 1, 191 | "node_id": 1, 192 | "payload": payload, 193 | "cluster_id": command.cluster_id, 194 | "command_name": "On" 195 | } 196 | } 197 | 198 | print(json.dumps(message, indent=2)) 199 | ``` 200 | 201 | You can also provide parameters for the cluster commands. Here's how to change the brightness for example: 202 | 203 | ```python 204 | command = clusters.LevelControl.Commands.MoveToLevelWithOnOff( 205 | level=int(value), # provide a percentage 206 | transitionTime=0, # in seconds 207 | ) 208 | ``` 209 | -------------------------------------------------------------------------------- /main.py: -------------------------------------------------------------------------------- 1 | """Run the Matter Server.""" 2 | 3 | import sys 4 | 5 | from matter_server.server.__main__ import main 6 | 7 | if __name__ == "__main__": 8 | sys.exit(main()) 9 | -------------------------------------------------------------------------------- /matter_server/__init__.py: -------------------------------------------------------------------------------- 1 | """Provide the Matter Server including both a client and server.""" 2 | -------------------------------------------------------------------------------- /matter_server/client/__init__.py: -------------------------------------------------------------------------------- 1 | """Client for the MatterServer.""" 2 | 3 | from .client import MatterClient 4 | 5 | __all__ = ["MatterClient"] 6 | -------------------------------------------------------------------------------- /matter_server/client/exceptions.py: -------------------------------------------------------------------------------- 1 | """Client-specific Exceptions for matter-server library.""" 2 | 3 | from __future__ import annotations 4 | 5 | 6 | class MatterClientException(Exception): 7 | """Generic Matter exception.""" 8 | 9 | 10 | class TransportError(MatterClientException): 11 | """Exception raised to represent transport errors.""" 12 | 13 | def __init__(self, message: str, error: Exception | None = None) -> None: 14 | """Initialize a transport error.""" 15 | super().__init__(message) 16 | self.error = error 17 | 18 | 19 | class ConnectionClosed(TransportError): 20 | """Exception raised when the connection is closed.""" 21 | 22 | 23 | class CannotConnect(TransportError): 24 | """Exception raised when failed to connect the client.""" 25 | 26 | def __init__(self, error: Exception) -> None: 27 | """Initialize a cannot connect error.""" 28 | super().__init__(f"{error}", error) 29 | 30 | 31 | class ConnectionFailed(TransportError): 32 | """Exception raised when an established connection fails.""" 33 | 34 | def __init__(self, error: Exception | None = None) -> None: 35 | """Initialize a connection failed error.""" 36 | if error is None: 37 | super().__init__("Connection failed.") 38 | return 39 | super().__init__(f"{error}", error) 40 | 41 | 42 | class NotConnected(MatterClientException): 43 | """Exception raised when not connected to client.""" 44 | 45 | 46 | class InvalidState(MatterClientException): 47 | """Exception raised when data gets in invalid state.""" 48 | 49 | 50 | class InvalidMessage(MatterClientException): 51 | """Exception raised when an invalid message is received.""" 52 | 53 | 54 | class InvalidServerVersion(MatterClientException): 55 | """Exception raised when connected to server with incompatible version.""" 56 | 57 | 58 | class ServerVersionTooOld(InvalidServerVersion): 59 | """Exception raised when connected to server with is too old to support this client.""" 60 | 61 | 62 | class ServerVersionTooNew(InvalidServerVersion): 63 | """Exception raised when connected to server with is too new for this client.""" 64 | -------------------------------------------------------------------------------- /matter_server/client/models/__init__.py: -------------------------------------------------------------------------------- 1 | """Client-only models for the Python Matter Server library.""" 2 | -------------------------------------------------------------------------------- /matter_server/common/__init__.py: -------------------------------------------------------------------------------- 1 | """Provide common files for the Matter Server.""" 2 | -------------------------------------------------------------------------------- /matter_server/common/const.py: -------------------------------------------------------------------------------- 1 | """Constants that are shared between server and client.""" 2 | 3 | # schema version is used to determine compatibility between server and client 4 | # bump schema if we add new features and/or make other (breaking) changes 5 | SCHEMA_VERSION = 11 6 | 7 | 8 | VERBOSE_LOG_LEVEL = 5 9 | -------------------------------------------------------------------------------- /matter_server/common/errors.py: -------------------------------------------------------------------------------- 1 | """Matter Exceptions.""" 2 | 3 | from __future__ import annotations 4 | 5 | # mapping from error_code to Exception class 6 | ERROR_MAP: dict[int, type] = {} 7 | 8 | 9 | class MatterError(Exception): 10 | """Generic Matter exception.""" 11 | 12 | error_code = 0 13 | 14 | def __init_subclass__(cls, *args, **kwargs) -> None: # type: ignore[no-untyped-def] 15 | """Register a subclass.""" 16 | super().__init_subclass__(*args, **kwargs) 17 | ERROR_MAP[cls.error_code] = cls 18 | 19 | 20 | class UnknownError(MatterError): 21 | """Error raised when there an unknown/invalid command is requested.""" 22 | 23 | error_code = 0 # to map all generic errors 24 | 25 | 26 | class NodeCommissionFailed(MatterError): 27 | """Error raised when interview of a device failed.""" 28 | 29 | error_code = 1 30 | 31 | 32 | class NodeInterviewFailed(MatterError): 33 | """Error raised when interview of a device failed.""" 34 | 35 | error_code = 2 36 | 37 | 38 | class NodeNotReady(MatterError): 39 | """Error raised when performing action on node that has not been fully added.""" 40 | 41 | error_code = 3 42 | 43 | 44 | class NodeNotResolving(MatterError): 45 | """Error raised when no CASE session could be established.""" 46 | 47 | error_code = 4 48 | 49 | 50 | class NodeNotExists(MatterError): 51 | """Error raised when performing action on node that does not exist.""" 52 | 53 | error_code = 5 54 | 55 | 56 | class VersionMismatch(MatterError): 57 | """Issue raised when SDK version mismatches.""" 58 | 59 | error_code = 6 60 | 61 | 62 | class SDKStackError(MatterError): 63 | """Generic SDK stack error.""" 64 | 65 | error_code = 7 66 | 67 | 68 | class InvalidArguments(MatterError): 69 | """Error raised when there are invalid arguments provided for a command.""" 70 | 71 | error_code = 8 72 | 73 | 74 | class InvalidCommand(MatterError): 75 | """Error raised when there an unknown/invalid command is requested.""" 76 | 77 | error_code = 9 78 | 79 | 80 | class UpdateCheckError(MatterError): 81 | """Error raised when there was an error during searching for updates.""" 82 | 83 | error_code = 10 84 | 85 | 86 | class UpdateError(MatterError): 87 | """Error raised when there was an error during applying updates.""" 88 | 89 | error_code = 11 90 | 91 | 92 | def exception_from_error_code(error_code: int) -> type[MatterError]: 93 | """Return correct Exception class from error_code.""" 94 | return ERROR_MAP.get(error_code, MatterError) 95 | -------------------------------------------------------------------------------- /matter_server/common/helpers/api.py: -------------------------------------------------------------------------------- 1 | """Several helpers for the WebSockets API.""" 2 | 3 | from __future__ import annotations 4 | 5 | from collections.abc import Callable, Coroutine 6 | from dataclasses import MISSING, dataclass 7 | import inspect 8 | from typing import Any, TypeVar, get_type_hints 9 | 10 | from matter_server.common.helpers.util import parse_value 11 | 12 | _F = TypeVar("_F", bound=Callable[..., Any]) 13 | 14 | 15 | @dataclass 16 | class APICommandHandler: 17 | """Model for an API command handler.""" 18 | 19 | command: str 20 | signature: inspect.Signature 21 | type_hints: dict[str, Any] 22 | target: Callable[..., Coroutine[Any, Any, Any]] 23 | 24 | @classmethod 25 | def parse( 26 | cls, command: str, func: Callable[..., Coroutine[Any, Any, Any]] 27 | ) -> "APICommandHandler": 28 | """Parse APICommandHandler by providing a function.""" 29 | return APICommandHandler( 30 | command=command, 31 | signature=inspect.signature(func), 32 | type_hints=get_type_hints(func), 33 | target=func, 34 | ) 35 | 36 | 37 | def api_command(command: str) -> Callable[[_F], _F]: 38 | """Decorate a function as API route/command.""" 39 | 40 | def decorate(func: _F) -> _F: 41 | func.api_cmd = command # type: ignore[attr-defined] 42 | return func 43 | 44 | return decorate 45 | 46 | 47 | def parse_arguments( 48 | func_sig: inspect.Signature, 49 | func_types: dict[str, Any], 50 | args: dict | None = None, 51 | strict: bool = False, 52 | ) -> dict[str, Any]: 53 | """Parse (and convert) incoming arguments to correct types.""" 54 | if args is None: 55 | args = {} 56 | final_args = {} 57 | # ignore extra args if not strict 58 | if strict: 59 | for key, value in args.items(): 60 | if key not in func_sig.parameters: 61 | raise KeyError(f"Invalid parameter: '{key}'") 62 | # parse arguments to correct type 63 | for name, param in func_sig.parameters.items(): 64 | value = args.get(name) 65 | if param.default is inspect.Parameter.empty: 66 | default = MISSING 67 | else: 68 | default = param.default 69 | final_args[name] = parse_value(name, value, func_types[name], default) 70 | return final_args 71 | -------------------------------------------------------------------------------- /matter_server/common/helpers/json.py: -------------------------------------------------------------------------------- 1 | """Helpers to work with (de)serializing of json.""" 2 | 3 | from base64 import b64encode 4 | from typing import Any 5 | 6 | from chip.clusters.Types import Nullable 7 | from chip.tlv import float32, uint 8 | import orjson 9 | 10 | JSON_ENCODE_EXCEPTIONS = (TypeError, ValueError) 11 | JSON_DECODE_EXCEPTIONS = (orjson.JSONDecodeError,) 12 | 13 | 14 | def json_encoder_default(obj: Any) -> Any: 15 | """Convert Special objects. 16 | 17 | Hand other objects to the original method. 18 | """ 19 | # pylint: disable=too-many-return-statements 20 | if getattr(obj, "do_not_serialize", None): 21 | return None 22 | if isinstance(obj, (set, tuple)): 23 | return list(obj) 24 | if isinstance(obj, float32): 25 | return float(obj) 26 | if isinstance(obj, uint): 27 | return int(obj) 28 | if isinstance(obj, Nullable): 29 | return None 30 | if isinstance(obj, bytes): 31 | return b64encode(obj).decode("utf-8") 32 | if isinstance(obj, Exception): 33 | return str(obj) 34 | if type(obj) is type: # pylint: disable=unidiomatic-typecheck 35 | return f"{obj.__module__}.{obj.__qualname__}" 36 | raise TypeError 37 | 38 | 39 | def json_dumps(data: Any) -> str: 40 | """Dump json string.""" 41 | return orjson.dumps( 42 | data, 43 | option=orjson.OPT_NON_STR_KEYS | orjson.OPT_INDENT_2, 44 | default=json_encoder_default, 45 | ).decode("utf-8") 46 | 47 | 48 | json_loads = orjson.loads 49 | -------------------------------------------------------------------------------- /matter_server/common/helpers/logger.py: -------------------------------------------------------------------------------- 1 | """Logger related helpers.""" 2 | 3 | import logging 4 | from typing import cast 5 | 6 | import coloredlogs 7 | from coloredlogs import ColoredFormatter 8 | 9 | 10 | class MatterFormatter(ColoredFormatter): # type: ignore[misc] 11 | """Custom formatter for Matter project.""" 12 | 13 | def __init__( 14 | self, 15 | fmt: str, 16 | node_fmt: str, 17 | datefmt: str, 18 | style: str = coloredlogs.DEFAULT_FORMAT_STYLE, 19 | level_styles: dict | None = None, 20 | field_styles: dict | None = None, 21 | ): 22 | """Initialize the Matter specific log formatter.""" 23 | super().__init__(fmt, datefmt, style, level_styles, field_styles) 24 | self._node_style = logging.PercentStyle(self.colorize_format(node_fmt, style)) 25 | 26 | def format(self, record: logging.LogRecord) -> str: 27 | """Format the log record.""" 28 | original_style = self._style # type: ignore[has-type] 29 | if hasattr(record, "node"): 30 | self._style = self._node_style 31 | result = super().format(record) 32 | self._style = original_style 33 | return cast(str, result) 34 | 35 | 36 | class MatterNodeFilter(logging.Filter): 37 | """Filter for Matter project to filter by node.""" 38 | 39 | def __init__(self, node: set[int], name: str = ""): 40 | """Initialize the filter.""" 41 | super().__init__(name) 42 | self.node = node 43 | 44 | def filter(self, record: logging.LogRecord) -> bool: 45 | """Filter the log record.""" 46 | if not hasattr(record, "node"): 47 | return True 48 | 49 | # Always display warnings and above 50 | if record.levelno >= logging.WARNING: 51 | return True 52 | return record.node in self.node 53 | -------------------------------------------------------------------------------- /matter_server/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/home-assistant-libs/python-matter-server/9a006602b966507ae5cf72397530a33867835e92/matter_server/py.typed -------------------------------------------------------------------------------- /matter_server/server/__init__.py: -------------------------------------------------------------------------------- 1 | """Implementation of a Websocket-based Matter proxy (using CHIP SDK).""" 2 | 3 | from .server import MatterServer 4 | 5 | __all__ = ["MatterServer"] 6 | -------------------------------------------------------------------------------- /matter_server/server/const.py: -------------------------------------------------------------------------------- 1 | """Server-only constants for the Python Matter Server.""" 2 | 3 | import pathlib 4 | from typing import Final 5 | 6 | # The minimum schema version (of a client) the server can support 7 | MIN_SCHEMA_VERSION = 9 8 | 9 | # schema version of our data model 10 | # only bump if the format of the data in MatterNodeData changed 11 | # and a full re-interview is mandatory 12 | DATA_MODEL_SCHEMA_VERSION = 6 13 | 14 | # Keep default location inherited from early version of the Python 15 | # bindings. 16 | DEFAULT_PAA_ROOT_CERTS_DIR: Final[pathlib.Path] = ( 17 | pathlib.Path(__file__) 18 | .parent.resolve() 19 | .parent.resolve() 20 | .parent.resolve() 21 | .joinpath("credentials/development/paa-root-certs") 22 | ) 23 | 24 | DEFAULT_OTA_PROVIDER_DIR: Final[pathlib.Path] = pathlib.Path().cwd().joinpath("updates") 25 | -------------------------------------------------------------------------------- /matter_server/server/helpers/__init__.py: -------------------------------------------------------------------------------- 1 | """Helpers/utils for the Matter Server.""" 2 | 3 | DCL_PRODUCTION_URL = "https://on.dcl.csa-iot.org" 4 | DCL_TEST_URL = "https://on.test-net.dcl.csa-iot.org" 5 | -------------------------------------------------------------------------------- /matter_server/server/helpers/attributes.py: -------------------------------------------------------------------------------- 1 | """Helpers to manage Cluster attributes.""" 2 | 3 | from typing import Any 4 | 5 | from ...common.helpers.util import create_attribute_path 6 | 7 | 8 | def parse_attributes_from_read_result( 9 | raw_tlv_attributes: dict[int, dict[int, dict[int, Any]]], 10 | ) -> dict[str, Any]: 11 | """Parse attributes from ReadResult's TLV Attributes.""" 12 | result = {} 13 | # prefer raw tlv attributes as it requires less parsing back and forth 14 | for endpoint_id, clusters in raw_tlv_attributes.items(): 15 | for cluster_id, attribute in clusters.items(): 16 | for attribute_id, attr_value in attribute.items(): 17 | # we are only interested in the raw values and let the client 18 | # match back from the id's to the correct cluster/attribute classes 19 | # attributes are stored in form of AttributePath: 20 | # ENDPOINT/CLUSTER_ID/ATTRIBUTE_ID 21 | attribute_path = create_attribute_path( 22 | endpoint_id, cluster_id, attribute_id 23 | ) 24 | result[attribute_path] = attr_value 25 | return result 26 | -------------------------------------------------------------------------------- /matter_server/server/helpers/custom_web_runner.py: -------------------------------------------------------------------------------- 1 | """Multiple host capable aiohttp Site.""" 2 | 3 | from __future__ import annotations 4 | 5 | import asyncio 6 | from typing import TYPE_CHECKING 7 | 8 | from aiohttp import web 9 | from yarl import URL 10 | 11 | if TYPE_CHECKING: 12 | from ssl import SSLContext 13 | 14 | 15 | class MultiHostTCPSite(web.BaseSite): 16 | """Multiple host capable aiohttp Site. 17 | 18 | Vanilla TCPSite accepts only str as host. However, the underlying asyncio's 19 | create_server() implementation does take a list of strings to bind to multiple 20 | host IP's. To support multiple server_host entries (e.g. to enable dual-stack 21 | explicitly), we would like to pass an array of strings. 22 | """ 23 | 24 | __slots__ = ("_host", "_hosturl", "_port", "_reuse_address", "_reuse_port") 25 | 26 | def __init__( 27 | self, 28 | runner: web.BaseRunner, 29 | host: None | str | list[str], 30 | port: int, 31 | *, 32 | ssl_context: SSLContext | None = None, 33 | backlog: int = 128, 34 | reuse_address: bool | None = None, 35 | reuse_port: bool | None = None, 36 | ) -> None: 37 | """Initialize HomeAssistantTCPSite.""" 38 | super().__init__( 39 | runner, 40 | ssl_context=ssl_context, 41 | backlog=backlog, 42 | ) 43 | self._host = host 44 | self._port = port 45 | self._reuse_address = reuse_address 46 | self._reuse_port = reuse_port 47 | 48 | @property 49 | def name(self) -> str: 50 | """Return server URL.""" 51 | scheme = "https" if self._ssl_context else "http" 52 | host = self._host[0] if isinstance(self._host, list) else "0.0.0.0" 53 | return str(URL.build(scheme=scheme, host=host, port=self._port)) 54 | 55 | async def start(self) -> None: 56 | """Start server.""" 57 | await super().start() 58 | loop = asyncio.get_running_loop() 59 | server = self._runner.server 60 | assert server is not None 61 | self._server = await loop.create_server( 62 | server, 63 | self._host, 64 | self._port, 65 | ssl=self._ssl_context, 66 | backlog=self._backlog, 67 | reuse_address=self._reuse_address, 68 | reuse_port=self._reuse_port, 69 | ) 70 | -------------------------------------------------------------------------------- /matter_server/server/helpers/utils.py: -------------------------------------------------------------------------------- 1 | """Utils for Matter server.""" 2 | 3 | import asyncio 4 | from contextlib import suppress 5 | import platform 6 | 7 | import async_timeout 8 | 9 | PLATFORM_MAC = platform.system() == "Darwin" 10 | 11 | 12 | async def ping_ip(ip_address: str, timeout: int = 2, attempts: int = 1) -> bool: # noqa: ASYNC109 timeout parameter required for native ping timeout 13 | """Ping given (IPv4 or IPv6) IP-address.""" 14 | is_ipv6 = ":" in ip_address 15 | if is_ipv6 and PLATFORM_MAC: 16 | # macos does not have support for -W (timeout) on ping6 ?! 17 | cmd = f"ping6 -c 1 {ip_address}" 18 | elif is_ipv6: 19 | cmd = f"ping -6 -c 1 -W {timeout} {ip_address}" 20 | else: 21 | cmd = f"ping -c 1 -W {timeout} {ip_address}" 22 | while attempts: 23 | attempts -= 1 24 | try: 25 | # we add an additional timeout here as safeguard and to account for the fact 26 | # that macos does not seem to have timeout on ping6 27 | async with async_timeout.timeout(timeout + 2): 28 | success = (await check_output(cmd))[0] == 0 29 | if success or not attempts: 30 | return success 31 | except asyncio.TimeoutError: 32 | pass 33 | # sleep between attempts 34 | await asyncio.sleep(10) 35 | return False 36 | 37 | 38 | async def check_output(shell_cmd: str) -> tuple[int | None, bytes]: 39 | """Run shell subprocess and return output.""" 40 | proc = await asyncio.create_subprocess_shell( 41 | shell_cmd, 42 | stderr=asyncio.subprocess.STDOUT, 43 | stdout=asyncio.subprocess.PIPE, 44 | ) 45 | try: 46 | stdout, _ = await proc.communicate() 47 | except asyncio.CancelledError: 48 | with suppress(ProcessLookupError): 49 | proc.terminate() 50 | await proc.communicate() 51 | raise 52 | return (proc.returncode, stdout) 53 | -------------------------------------------------------------------------------- /matter_server/server/ota/__init__.py: -------------------------------------------------------------------------------- 1 | """OTA implementation for the Matter Server.""" 2 | 3 | import asyncio 4 | import json 5 | from logging import LoggerAdapter 6 | from pathlib import Path 7 | 8 | from matter_server.common.models import UpdateSource 9 | from matter_server.server.ota import dcl 10 | 11 | MatterProduct = tuple[int, int] 12 | 13 | _local_updates: dict[MatterProduct, dict[int | str, dict]] = {} 14 | 15 | 16 | async def load_local_updates(ota_provider_dir: Path) -> None: 17 | """Load updates from locally stored json files.""" 18 | 19 | def _load_update(ota_provider_dir: Path) -> None: 20 | if not ota_provider_dir.exists(): 21 | return 22 | for update_file in ota_provider_dir.glob("*.json"): 23 | with open(update_file) as f: 24 | update = json.load(f) 25 | model_version = update["modelVersion"] 26 | model_key = (model_version["vid"], model_version["pid"]) 27 | update_dict = _local_updates.get(model_key, {}) 28 | # Store by string or integer, this allows update by both 29 | update_dict[model_version["softwareVersion"]] = model_version 30 | update_dict[model_version["softwareVersionString"]] = model_version 31 | _local_updates[model_key] = update_dict 32 | 33 | await asyncio.get_running_loop().run_in_executor( 34 | None, _load_update, ota_provider_dir 35 | ) 36 | 37 | 38 | async def check_for_update( 39 | logger: LoggerAdapter, 40 | vid: int, 41 | pid: int, 42 | current_software_version: int, 43 | requested_software_version: int | str | None = None, 44 | ) -> tuple[UpdateSource, dict] | tuple[None, None]: 45 | """Check for software updates.""" 46 | if local_updates := _local_updates.get((vid, pid)): 47 | logger.info("Local updates found for this device") 48 | if requested_software_version is None: 49 | # Use integer version to reliably determine absolute latest version 50 | versions = filter( 51 | lambda version: isinstance(version, int), local_updates.keys() 52 | ) 53 | return UpdateSource.LOCAL, local_updates[max(versions)] 54 | if requested_software_version in local_updates: 55 | return UpdateSource.LOCAL, local_updates[requested_software_version] 56 | 57 | if dcl_update := await dcl.check_for_update( 58 | logger, vid, pid, current_software_version, requested_software_version 59 | ): 60 | return UpdateSource.MAIN_NET_DCL, dcl_update 61 | return None, None 62 | -------------------------------------------------------------------------------- /matter_server/server/ota/dcl.py: -------------------------------------------------------------------------------- 1 | """Handle OTA software version endpoints of the DCL.""" 2 | 3 | from http import HTTPStatus 4 | import logging 5 | from typing import Any, cast 6 | 7 | from aiohttp import ClientError, ClientSession 8 | 9 | from matter_server.common.errors import UpdateCheckError 10 | from matter_server.server.helpers import DCL_PRODUCTION_URL 11 | 12 | 13 | async def _get_software_versions(session: ClientSession, vid: int, pid: int) -> Any: 14 | """Check DCL if there are updates available for a particular node.""" 15 | # fetch the paa certificates list 16 | async with session.get(f"/dcl/model/versions/{vid}/{pid}") as response: 17 | if response.status == HTTPStatus.NOT_FOUND: 18 | return None 19 | response.raise_for_status() 20 | return await response.json() 21 | 22 | 23 | async def _get_software_version( 24 | session: ClientSession, vid: int, pid: int, software_version: int 25 | ) -> Any: 26 | """Check DCL if there are updates available for a particular node.""" 27 | # fetch the paa certificates list 28 | async with session.get( 29 | f"/dcl/model/versions/{vid}/{pid}/{software_version}" 30 | ) as response: 31 | response.raise_for_status() 32 | return await response.json() 33 | 34 | 35 | async def _check_update_version( 36 | session: ClientSession, 37 | vid: int, 38 | pid: int, 39 | current_software_version: int, 40 | requested_software_version: int, 41 | requested_software_version_string: str | None = None, 42 | ) -> None | dict: 43 | version_res: dict = await _get_software_version( 44 | session, vid, pid, requested_software_version 45 | ) 46 | if not isinstance(version_res, dict): 47 | raise TypeError("Unexpected DCL response.") 48 | 49 | if "modelVersion" not in version_res: 50 | raise ValueError("Unexpected DCL response.") 51 | 52 | version_candidate: dict = cast(dict, version_res["modelVersion"]) 53 | 54 | # If we are looking for a specific version by string, check if it matches 55 | if ( 56 | requested_software_version_string is not None 57 | and version_candidate["softwareVersionString"] 58 | != requested_software_version_string 59 | ): 60 | return None 61 | 62 | if version_candidate["softwareVersionValid"] is False: 63 | return None 64 | 65 | if version_candidate["otaUrl"].strip() == "": 66 | return None 67 | 68 | # Check minApplicableSoftwareVersion/maxApplicableSoftwareVersion 69 | min_sw_version = version_candidate["minApplicableSoftwareVersion"] 70 | max_sw_version = version_candidate["maxApplicableSoftwareVersion"] 71 | if ( 72 | current_software_version < min_sw_version 73 | or current_software_version > max_sw_version 74 | ): 75 | return None 76 | 77 | return version_candidate 78 | 79 | 80 | async def check_for_update( 81 | logger: logging.LoggerAdapter, 82 | vid: int, 83 | pid: int, 84 | current_software_version: int, 85 | requested_software_version: int | str | None = None, 86 | ) -> None | dict: 87 | """Check if there is a software update available on the DCL.""" 88 | try: 89 | async with ClientSession( 90 | base_url=DCL_PRODUCTION_URL, raise_for_status=False 91 | ) as session: 92 | # If a specific version as integer is requested, just fetch it (and hope it exists) 93 | if isinstance(requested_software_version, int): 94 | return await _check_update_version( 95 | session, 96 | vid, 97 | pid, 98 | current_software_version, 99 | requested_software_version, 100 | ) 101 | 102 | # Get all versions and check each one of them. 103 | versions = await _get_software_versions(session, vid, pid) 104 | if versions is None: 105 | logger.info( 106 | "There is no update information for this device on the DCL." 107 | ) 108 | return None 109 | 110 | all_software_versions: list[int] = versions["modelVersions"][ 111 | "softwareVersions" 112 | ] 113 | newer_software_versions = [ 114 | version 115 | for version in all_software_versions 116 | if version > current_software_version 117 | ] 118 | 119 | # Check if there is a newer software version available, no downgrade possible 120 | if not newer_software_versions: 121 | return None 122 | 123 | # Check if latest firmware is applicable, and backtrack from there 124 | for version in sorted(newer_software_versions, reverse=True): 125 | if version_candidate := await _check_update_version( 126 | session, 127 | vid, 128 | pid, 129 | current_software_version, 130 | version, 131 | requested_software_version, 132 | ): 133 | return version_candidate 134 | logger.debug("Software version %d not applicable.", version) 135 | return None 136 | 137 | except (ClientError, TimeoutError) as err: 138 | raise UpdateCheckError( 139 | f"Fetching software versions from DCL for device with vendor id {vid} product id {pid} failed." 140 | ) from err 141 | -------------------------------------------------------------------------------- /matter_server/server/vendor_info.py: -------------------------------------------------------------------------------- 1 | """Fetches vendor info from the CSA.""" 2 | 3 | from __future__ import annotations 4 | 5 | import logging 6 | from typing import TYPE_CHECKING 7 | 8 | from aiohttp import ClientError, ClientSession 9 | 10 | from ..common.helpers.api import api_command 11 | from ..common.helpers.util import dataclass_from_dict, dataclass_to_dict 12 | from ..common.models import APICommand, VendorInfo as VendorInfoModel 13 | 14 | if TYPE_CHECKING: 15 | from .server import MatterServer 16 | 17 | LOGGER = logging.getLogger(__name__) 18 | PRODUCTION_URL = "https://on.dcl.csa-iot.org" 19 | DATA_KEY_VENDOR_INFO = "vendor_info" 20 | 21 | 22 | TEST_VENDOR = VendorInfoModel( 23 | vendor_id=65521, 24 | vendor_name="Test", 25 | company_legal_name="Test", 26 | company_preferred_name="Test", 27 | vendor_landing_page_url="https://csa-iot.org", 28 | creator="", 29 | ) 30 | NABUCASA_VENDOR = VendorInfoModel( 31 | vendor_id=4939, 32 | vendor_name="Nabu Casa", 33 | company_legal_name="Nabu Casa Inc.", 34 | company_preferred_name="Nabu Casa", 35 | vendor_landing_page_url="https://nabucasa.com/", 36 | creator="", 37 | ) 38 | 39 | 40 | class VendorInfo: 41 | """Fetches vendor info from the CSA and handles api calls to get it.""" 42 | 43 | def __init__(self, server: MatterServer): 44 | """Initialize the vendor info.""" 45 | self._data: dict[int, VendorInfoModel] = { 46 | # add test vendor ID 47 | TEST_VENDOR.vendor_id: TEST_VENDOR, 48 | # add nabucasa vendor while we're not yet certified 49 | NABUCASA_VENDOR.vendor_id: NABUCASA_VENDOR, 50 | } 51 | self._server = server 52 | 53 | async def start(self) -> None: 54 | """Async initialize the vendor info.""" 55 | self._load_vendors() 56 | await self._fetch_vendors() 57 | self._save_vendors() 58 | 59 | def _load_vendors(self) -> None: 60 | """Load vendor info from storage.""" 61 | LOGGER.info("Loading vendor info from storage.") 62 | vendor_count = 0 63 | data = self._server.storage.get(DATA_KEY_VENDOR_INFO, {}) 64 | for vendor_id, vendor_info in data.items(): 65 | self._data[vendor_id] = dataclass_from_dict(VendorInfoModel, vendor_info) 66 | vendor_count += 1 67 | LOGGER.info("Loaded %s vendors from storage.", vendor_count) 68 | 69 | async def _fetch_vendors(self) -> None: 70 | """Fetch the vendor names from the CSA.""" 71 | LOGGER.info("Fetching the latest vendor info from DCL.") 72 | vendors: dict[int, VendorInfoModel] = {} 73 | try: 74 | async with ClientSession(raise_for_status=True) as session: 75 | page_token: str | None = "" 76 | while page_token is not None: 77 | async with session.get( 78 | f"{PRODUCTION_URL}/dcl/vendorinfo/vendors", 79 | params={"pagination.key": page_token}, 80 | ) as response: 81 | data = await response.json() 82 | for vendorinfo in data["vendorInfo"]: 83 | vendors[vendorinfo["vendorID"]] = VendorInfoModel( 84 | vendor_id=vendorinfo["vendorID"], 85 | vendor_name=vendorinfo["vendorName"], 86 | company_legal_name=vendorinfo["companyLegalName"], 87 | company_preferred_name=vendorinfo[ 88 | "companyPreferredName" 89 | ], 90 | vendor_landing_page_url=vendorinfo[ 91 | "vendorLandingPageURL" 92 | ], 93 | creator=vendorinfo["creator"], 94 | ) 95 | page_token = data.get("pagination", {}).get("next_key", None) 96 | except ClientError as err: 97 | LOGGER.error("Unable to fetch vendor info from DCL: %s", err) 98 | else: 99 | LOGGER.info("Fetched %s vendors from DCL.", len(vendors)) 100 | 101 | self._data.update(vendors) 102 | 103 | def _save_vendors(self) -> None: 104 | """Save vendor info to storage.""" 105 | LOGGER.info("Saving vendor info to storage.") 106 | self._server.storage.set( 107 | DATA_KEY_VENDOR_INFO, 108 | { 109 | vendor_id: dataclass_to_dict(vendor_info) 110 | for vendor_id, vendor_info in self._data.items() 111 | }, 112 | ) 113 | 114 | @api_command(APICommand.GET_VENDOR_NAMES) 115 | async def get_vendor_names( 116 | self, filter_vendors: list[int] | None = None 117 | ) -> dict[int, str]: 118 | """Get a map of vendor ids to vendor names.""" 119 | if filter_vendors: 120 | vendors: dict[int, str] = {} 121 | for vendor_id in filter_vendors: 122 | if vendor_id in filter_vendors and vendor_id in self._data: 123 | vendors[vendor_id] = self._data[vendor_id].vendor_name 124 | return vendors 125 | 126 | return { 127 | vendor_id: vendor_info.vendor_name 128 | for vendor_id, vendor_info in self._data.items() 129 | } 130 | -------------------------------------------------------------------------------- /scripts/beautify_diagnostics.py: -------------------------------------------------------------------------------- 1 | """Script to beautify diagnostics output.""" 2 | 3 | import json 4 | import sys 5 | 6 | from chip.clusters.ClusterObjects import ALL_ATTRIBUTES, ALL_CLUSTERS 7 | import yaml 8 | 9 | from matter_server.client.models.device_types import ALL_TYPES 10 | 11 | 12 | def main(): 13 | """Run the script.""" 14 | if len(sys.argv) != 2: 15 | print("Usage: {} ".format(sys.argv[0])) 16 | sys.exit(1) 17 | 18 | with open(sys.argv[1]) as f: 19 | data = json.load(f) 20 | 21 | if "node" in data["data"]: 22 | nodes = [data["data"]["node"]] 23 | else: 24 | nodes = data["data"]["server"]["nodes"] 25 | 26 | for node in nodes: 27 | process_node(node) 28 | 29 | yaml.safe_dump(data, sys.stdout, indent=2, sort_keys=False) 30 | 31 | 32 | def process_node(node): 33 | """Process a node.""" 34 | endpoints = {} 35 | cluster_warn = set() 36 | 37 | for attr_path, value in node["attributes"].items(): 38 | endpoint_id, cluster_id, attr_id = attr_path.split("/") 39 | cluster_id = int(cluster_id) 40 | endpoint_id = int(endpoint_id) 41 | attr_id = int(attr_id) 42 | 43 | if cluster_id in ALL_CLUSTERS: 44 | cluster_name = f"{ALL_CLUSTERS[cluster_id].__name__} ({cluster_id} / 0x{cluster_id:04x})" 45 | else: 46 | if cluster_id not in cluster_warn: 47 | print("Unknown cluster ID: {}".format(cluster_id)) 48 | cluster_warn.add(cluster_id) 49 | cluster_name = f"{cluster_id} (unknown)" 50 | 51 | if cluster_id in ALL_ATTRIBUTES and attr_id in ALL_ATTRIBUTES[cluster_id]: 52 | attr_name = f"{ALL_ATTRIBUTES[cluster_id][attr_id].__name__} ({attr_id} / 0x{attr_id:04x})" 53 | else: 54 | if cluster_id not in cluster_warn: 55 | print( 56 | "Unknown attribute ID: {} in cluster {} ({})".format( 57 | attr_id, cluster_name, cluster_id 58 | ) 59 | ) 60 | attr_name = f"{attr_id} (unknown)" 61 | 62 | if endpoint_id not in endpoints: 63 | endpoints[endpoint_id] = {} 64 | 65 | if cluster_name not in endpoints[endpoint_id]: 66 | endpoints[endpoint_id][cluster_name] = {} 67 | 68 | endpoints[endpoint_id][cluster_name][attr_name] = value 69 | 70 | # Augment device types 71 | for endpoint in endpoints.values(): 72 | if not (descriptor_cls := endpoint.get("Descriptor (29 / 0x001d)")): 73 | continue 74 | 75 | if not (device_types := descriptor_cls.get("DeviceTypeList (0 / 0x0000)")): 76 | continue 77 | 78 | for device_type in device_types: 79 | device_type_id = device_type["deviceType"] 80 | if device_type_id in ALL_TYPES: 81 | device_type_name = ALL_TYPES[device_type_id].__name__ 82 | else: 83 | device_type_name = f"{device_type} (unknown)" 84 | 85 | device_type["name"] = device_type_name 86 | device_type["hex"] = f"0x{device_type_id:04x}" 87 | 88 | node["attributes"] = { 89 | f"Endpoint {endpoint_id}": clusters 90 | for endpoint_id, clusters in endpoints.items() 91 | } 92 | 93 | 94 | main() 95 | -------------------------------------------------------------------------------- /scripts/example.py: -------------------------------------------------------------------------------- 1 | """Example script to test the Matter server and client.""" 2 | 3 | import argparse 4 | import asyncio 5 | import logging 6 | import os 7 | from pathlib import Path 8 | 9 | import aiohttp 10 | from aiorun import run 11 | import coloredlogs 12 | 13 | from matter_server.client.client import MatterClient 14 | from matter_server.server.server import MatterServer 15 | 16 | logging.basicConfig(level=logging.DEBUG) 17 | _LOGGER = logging.getLogger(__name__) 18 | 19 | DEFAULT_VENDOR_ID = 0xFFF1 20 | DEFAULT_FABRIC_ID = 1 21 | DEFAULT_PORT = 5580 22 | DEFAULT_URL = f"http://127.0.0.1:{DEFAULT_PORT}/ws" 23 | DEFAULT_STORAGE_PATH = os.path.join(Path.home(), ".matter_server") 24 | 25 | 26 | # Get parsed passed in arguments. 27 | parser = argparse.ArgumentParser(description="Matter Server Example.") 28 | parser.add_argument( 29 | "--storage-path", 30 | type=str, 31 | default=DEFAULT_STORAGE_PATH, 32 | help=f"Storage path to keep persistent data, defaults to {DEFAULT_STORAGE_PATH}", 33 | ) 34 | parser.add_argument( 35 | "--port", 36 | type=int, 37 | default=DEFAULT_PORT, 38 | help=f"TCP Port on which to run the Matter WebSockets Server, defaults to {DEFAULT_PORT}", 39 | ) 40 | parser.add_argument( 41 | "--log-level", 42 | type=str, 43 | default="info", 44 | help="Provide logging level. Example --log-level debug, default=info, possible=(critical, error, warning, info, debug)", 45 | ) 46 | parser.add_argument( 47 | "--primary-interface", 48 | type=str, 49 | default=None, 50 | help="Primary network interface for link-local addresses (optional).", 51 | ) 52 | 53 | args = parser.parse_args() 54 | 55 | 56 | if __name__ == "__main__": 57 | # configure logging 58 | logging.basicConfig(level=args.log_level.upper()) 59 | coloredlogs.install(level=args.log_level.upper()) 60 | 61 | # make sure storage path exists 62 | if not os.path.isdir(args.storage_path): 63 | os.mkdir(args.storage_path) 64 | 65 | # Init server 66 | server = MatterServer( 67 | args.storage_path, 68 | DEFAULT_VENDOR_ID, 69 | DEFAULT_FABRIC_ID, 70 | int(args.port), 71 | args.primary_interface, 72 | ) 73 | 74 | async def run_matter(): 75 | """Run the Matter server and client.""" 76 | # start Matter Server 77 | await server.start() 78 | 79 | # run the client 80 | url = f"http://127.0.0.1:{args.port}/ws" 81 | async with aiohttp.ClientSession() as session: 82 | async with MatterClient(url, session) as client: 83 | # start listening 84 | asyncio.create_task(client.start_listening()) 85 | # allow the client to initialize 86 | await asyncio.sleep(10) 87 | # dump full node info on random (available) node 88 | for node in client.get_nodes(): 89 | if not node.available: 90 | continue 91 | print() 92 | print(node) 93 | res = await client.node_diagnostics(node.node_id) 94 | print(res) 95 | print() 96 | break 97 | 98 | async def handle_stop(loop: asyncio.AbstractEventLoop): 99 | """Handle server stop.""" 100 | await server.stop() 101 | 102 | # run the server 103 | run(run_matter(), shutdown_callback=handle_stop) 104 | -------------------------------------------------------------------------------- /scripts/generate_descriptions.py: -------------------------------------------------------------------------------- 1 | """Generate descriptions.json for the dashboard.""" 2 | 3 | # NOTE: we need to do a wildcard import from models.clusters to include all 4 | # custom clusters in the output defined in that file 5 | # pylint: disable=wildcard-import, unused-wildcard-import, invalid-name 6 | import pathlib 7 | from typing import Any, Final 8 | 9 | from chip.clusters.ClusterObjects import ( 10 | ALL_ATTRIBUTES as SDK_ALL_ATTRIBUTES, 11 | ALL_CLUSTERS as SDK_ALL_CLUSTERS, 12 | Cluster, 13 | ClusterAttributeDescriptor, 14 | ) 15 | 16 | from matter_server.client.models.device_types import ( 17 | ALL_TYPES as DEVICE_TYPES, 18 | DeviceType, 19 | ) 20 | from matter_server.common.custom_clusters import ( 21 | ALL_CUSTOM_ATTRIBUTES, 22 | ALL_CUSTOM_CLUSTERS, 23 | ) 24 | from matter_server.common.helpers.json import json_dumps 25 | 26 | OUTPUT_FILE: Final[pathlib.Path] = ( 27 | pathlib.Path(__file__) 28 | .parent.resolve() 29 | .parent.resolve() 30 | .joinpath("dashboard/src/client/models/descriptions.ts") 31 | ) 32 | 33 | ALL_CLUSTERS = {**SDK_ALL_CLUSTERS, **ALL_CUSTOM_CLUSTERS} 34 | ALL_ATTRIBUTES = SDK_ALL_ATTRIBUTES 35 | for cluster_id, attributes in ALL_CUSTOM_ATTRIBUTES.items(): 36 | ALL_ATTRIBUTES[cluster_id] = attributes 37 | 38 | 39 | def generate_device_type_description(device_type: DeviceType) -> dict[str, Any]: 40 | """Generate a (human readable) description for a device type as dict.""" 41 | return { 42 | "id": device_type.device_type, 43 | "label": (device_type.__doc__ or device_type.__name__).replace(".", ""), 44 | "clusters": {x.id for x in device_type.clusters}, 45 | } 46 | 47 | 48 | def create_pretty_name_for_type(_type: type) -> str: 49 | """Create pretty name for Python type annotation.""" 50 | return ( 51 | str(_type) 52 | .replace("<", "") 53 | .replace(">", "") 54 | .replace("typing.", "") 55 | .replace("chip.tlv.", "") 56 | .replace("chip.clusters.Types.", "") 57 | .replace("chip.clusters.Objects.", "") 58 | .replace("class", "") 59 | .replace("'", "") 60 | .strip() 61 | ) 62 | 63 | 64 | def generate_attribute_description( 65 | attribute: ClusterAttributeDescriptor, 66 | ) -> dict[str, Any]: 67 | """Generate a (human readable) description for a ClusterAttribute as dict.""" 68 | return { 69 | "id": attribute.attribute_id, 70 | "cluster_id": attribute.cluster_id, 71 | "label": attribute.attribute_type.Label or attribute.__name__, 72 | "type": create_pretty_name_for_type(attribute.attribute_type.Type), 73 | } 74 | 75 | 76 | def generate_cluster_description(cluster: Cluster) -> dict[str, Any]: 77 | """Generate a (human readable) description for a Cluster as dict.""" 78 | return { 79 | "id": cluster.id, 80 | "label": cluster.__name__, 81 | "attributes": { 82 | attribute_id: generate_attribute_description(attribute) 83 | for attribute_id, attribute in ALL_ATTRIBUTES[cluster.id].items() 84 | }, 85 | } 86 | 87 | 88 | device_types = { 89 | dev_type_id: generate_device_type_description(dev_type) 90 | for dev_type_id, dev_type in DEVICE_TYPES.items() 91 | } 92 | 93 | clusters = { 94 | cluster_id: generate_cluster_description(cluster) 95 | for cluster_id, cluster in ALL_CLUSTERS.items() 96 | } 97 | 98 | output = """ 99 | /* Descriptions for SDK Objects. This file is auto generated, do not edit. */ 100 | 101 | export interface DeviceType { 102 | id: number; 103 | label: string; 104 | clusters: number[]; 105 | } 106 | 107 | export interface ClusterAttributeDescription { 108 | id: number; 109 | cluster_id: number; 110 | label: string; 111 | type: string; 112 | } 113 | 114 | export interface ClusterDescription { 115 | id: number; 116 | label: string; 117 | attributes: { [attribute_id: string]: ClusterAttributeDescription } 118 | } 119 | 120 | """ 121 | output += f""" 122 | export const device_types: Record = {json_dumps(device_types)} 123 | 124 | export const clusters: Record = {json_dumps(clusters)} 125 | 126 | """ 127 | 128 | OUTPUT_FILE.write_text(output, encoding="utf-8") 129 | -------------------------------------------------------------------------------- /scripts/generate_devices.py: -------------------------------------------------------------------------------- 1 | """Generate device types from matter-devices.xml.""" 2 | 3 | import pathlib 4 | import urllib.request 5 | 6 | import black 7 | import xmltodict 8 | 9 | REPO_ROOT = pathlib.Path(__file__).parent.parent 10 | 11 | DEVICE_XML = ( 12 | "https://raw.githubusercontent.com/project-chip/connectedhomeip" 13 | "/master/src/app/zap-templates/zcl/data-model/chip/matter-devices.xml" 14 | ) 15 | 16 | OUTPUT_PYTHON = REPO_ROOT / "matter_server/client/models/device_types.py" 17 | 18 | 19 | def gen_cls_name(name: str): 20 | """Generate a class name from a cluster name.""" 21 | # Convert uppercase words to title case 22 | name = "".join( 23 | # Don't mess up wifi name 24 | part if part == "WiFi" else part[0].upper() + part[1:].lower() 25 | for part in name.split(" ") 26 | ) 27 | 28 | new_name = [] 29 | 30 | next_upper = False 31 | for char in name: 32 | if char in ("-", "/"): 33 | next_upper = True 34 | continue 35 | elif char in ("."): 36 | continue 37 | elif next_upper: 38 | char = char.upper() 39 | next_upper = False 40 | 41 | new_name.append(char) 42 | 43 | return "".join(new_name) 44 | 45 | 46 | def main(): 47 | """Generate device types from matter-devices.xml.""" 48 | with urllib.request.urlopen(DEVICE_XML) as response: # noqa: S310 49 | xml_data = response.read().decode("utf-8") 50 | data = xmltodict.parse(xml_data) 51 | output = [ 52 | ''' 53 | """ 54 | Definitions for all known Device types. 55 | 56 | This file is auto generated from `zcl/data-model/chip/matter-devices.xml` 57 | Do not override! 58 | """ 59 | from __future__ import annotations 60 | 61 | import typing 62 | 63 | from chip.clusters import Objects as all_clusters 64 | 65 | ALL_TYPES: dict[int, type["DeviceType"]] = {} 66 | 67 | 68 | 69 | class DeviceType: 70 | """Base class for Matter device types.""" 71 | 72 | device_type: int 73 | clusters: set[type[all_clusters.Cluster]] 74 | 75 | def __init_subclass__(cls, *, device_type: int, **kwargs: typing.Any) -> None: 76 | """Register a subclass.""" 77 | super().__init_subclass__(**kwargs) 78 | cls.device_type = device_type 79 | ALL_TYPES[device_type] = cls 80 | 81 | def __hash__(self) -> int: 82 | """Return unique hash for this object.""" 83 | return self.device_type 84 | 85 | ''' 86 | ] 87 | 88 | for device in data["configurator"]["deviceType"]: 89 | name = device["typeName"] 90 | 91 | print(name, device["deviceId"]["#text"]) 92 | 93 | clusters = device["clusters"]["include"] 94 | if not isinstance(clusters, list): 95 | clusters = [clusters] 96 | 97 | for cluster in clusters: 98 | print(cluster["@cluster"]) 99 | 100 | print() 101 | 102 | if not clusters: 103 | output_clusters = "set()" 104 | else: 105 | output_clusters = ( 106 | "{" 107 | + ",".join( 108 | f"all_clusters.{gen_cls_name(cluster['@cluster'])}" 109 | for cluster in clusters 110 | if ( 111 | # It's a server cluster 112 | cluster["@server"] == "true" 113 | # It's optional server cluster 114 | or cluster["@serverLocked"] == "false" 115 | ) 116 | ) 117 | + ",}" # extra comma to force black to do a cluster per line 118 | ) 119 | 120 | output.append( 121 | """ 122 | 123 | class {cls_name}(DeviceType, device_type={device_id}): 124 | \"""{device_name}.\""" 125 | 126 | clusters = {output_clusters} 127 | 128 | 129 | """.format( 130 | device_name=name, 131 | cls_name=gen_cls_name(name), 132 | device_id=device["deviceId"]["#text"], 133 | output_clusters=output_clusters, 134 | ) 135 | ) 136 | 137 | formatted = black.format_str("\n\n".join(output), mode=black.Mode()) 138 | OUTPUT_PYTHON.write_text(formatted) 139 | 140 | 141 | if __name__ == "__main__": 142 | main() 143 | -------------------------------------------------------------------------------- /scripts/run-in-env.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env sh 2 | set -eu 3 | 4 | # Activate pyenv and virtualenv if present, then run the specified command 5 | 6 | # pyenv, pyenv-virtualenv 7 | if [ -s .python-version ]; then 8 | PYENV_VERSION=$(head -n 1 .python-version) 9 | export PYENV_VERSION 10 | fi 11 | 12 | # other common virtualenvs 13 | my_path=$(git rev-parse --show-toplevel) 14 | 15 | for venv in venv .venv .; do 16 | if [ -f "${my_path}/${venv}/bin/activate" ]; then 17 | . "${my_path}/${venv}/bin/activate" 18 | break 19 | fi 20 | done 21 | 22 | exec "$@" 23 | -------------------------------------------------------------------------------- /scripts/setup.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # Setups the development environment. 3 | 4 | # Stop on errors 5 | set -e 6 | 7 | cd "$(dirname "$0")/.." 8 | 9 | env_name=${1:-".venv"} 10 | 11 | if [ -d "$env_name" ]; then 12 | echo "Virtual environment '$env_name' already exists." 13 | else 14 | echo "Creating Virtual environment..." 15 | python -m venv .venv 16 | fi 17 | echo "Activating virtual environment..." 18 | source .venv/bin/activate 19 | 20 | echo "Installing development dependencies..." 21 | 22 | pip install -e ".[server]" 23 | pip install -e ".[test]" 24 | pre-commit install 25 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests.""" 2 | -------------------------------------------------------------------------------- /tests/common/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/home-assistant-libs/python-matter-server/9a006602b966507ae5cf72397530a33867835e92/tests/common/__init__.py -------------------------------------------------------------------------------- /tests/common/test_parser.py: -------------------------------------------------------------------------------- 1 | """Test parser functions that converts the incoming json from API into dataclass models.""" 2 | 3 | from dataclasses import dataclass 4 | import datetime 5 | from enum import Enum, IntEnum 6 | from typing import Optional, Union 7 | 8 | from chip.clusters.Types import Nullable, NullValue 9 | import pytest 10 | 11 | from matter_server.common.helpers.util import dataclass_from_dict, parse_value 12 | 13 | 14 | class MatterIntEnum(IntEnum): 15 | """Basic Matter Test IntEnum""" 16 | 17 | A = 0 18 | B = 1 19 | C = 2 20 | 21 | 22 | class MatterEnum(Enum): 23 | """Basic Matter Test Enum""" 24 | 25 | A = "a" 26 | B = "b" 27 | C = "c" 28 | 29 | 30 | @dataclass 31 | class BasicModelChild: 32 | """Basic test model.""" 33 | 34 | a: int 35 | b: str 36 | c: str 37 | d: Optional[int] 38 | 39 | 40 | @dataclass 41 | class BasicModel: 42 | """Basic test model.""" 43 | 44 | a: int 45 | b: float 46 | c: str 47 | d: Optional[int] 48 | e: BasicModelChild 49 | f: datetime.datetime 50 | g: MatterEnum 51 | h: MatterIntEnum 52 | i: str = "default" 53 | 54 | 55 | def test_dataclass_from_dict(): 56 | """Test dataclass from dict parsing.""" 57 | raw = { 58 | "a": 1, 59 | "b": 1.0, 60 | "c": "hello", 61 | "d": 1, 62 | "e": {"a": 2, "b": "test", "c": "test", "d": None}, 63 | "f": "2022-12-09T06:58:00Z", 64 | "g": "a", 65 | "h": 2, 66 | } 67 | res = dataclass_from_dict(BasicModel, raw) 68 | # test the basic values 69 | assert isinstance(res, BasicModel) 70 | assert res.a == 1 71 | assert res.b == 1.0 72 | assert res.d == 1 73 | # test recursive parsing 74 | assert isinstance(res.e, BasicModelChild) 75 | # test default value 76 | assert res.i == "default" 77 | # test int gets converted to float 78 | raw["b"] = 2 79 | res = dataclass_from_dict(BasicModel, raw) 80 | assert res.b == 2.0 81 | # test datetime string 82 | assert isinstance(res.f, datetime.datetime) 83 | assert res.f.month == 12 84 | assert res.f.day == 9 85 | # test parse (valid) MatterEnum 86 | assert res.g == MatterEnum.A 87 | # test parse (valid) MatterIntEnum 88 | assert res.h == MatterIntEnum.C 89 | # test parse invalid enum value returns raw value 90 | raw2 = {**raw} 91 | raw2["h"] = 5 92 | res2 = dataclass_from_dict(BasicModel, raw2) 93 | assert res2.h == 5 94 | # test string doesn't match int 95 | with pytest.raises(TypeError): 96 | raw2 = {**raw} 97 | raw2["a"] = "blah" 98 | dataclass_from_dict(BasicModel, raw2) 99 | # test missing key result in keyerror 100 | with pytest.raises(KeyError): 101 | raw2 = {**raw} 102 | del raw2["a"] 103 | dataclass_from_dict(BasicModel, raw2, strict=True) 104 | # test extra keys silently ignored in non-strict mode 105 | raw2 = {**raw} 106 | raw2["extrakey"] = "something" 107 | dataclass_from_dict(BasicModel, raw2, strict=False) 108 | # test extra keys not silently ignored in strict mode 109 | with pytest.raises(KeyError): 110 | dataclass_from_dict(BasicModel, raw2, strict=True) 111 | # test NOCStruct.noc edge case 112 | res = parse_value("NOCStruct.noc", 5, bytes) 113 | assert res == b"" 114 | 115 | 116 | def test_parse_value(): 117 | """Test special cases in the parse_value helper.""" 118 | # test None value which is allowed 119 | assert parse_value("test", None, int, allow_none=True) is None 120 | # test unexpected None value 121 | with pytest.raises(KeyError): 122 | parse_value("test", None, int, allow_none=False) 123 | # test sdk Nullable type 124 | assert parse_value("test", None, Nullable) is None 125 | assert parse_value("test", None, Nullable, allow_sdk_types=True) == NullValue 126 | assert ( 127 | parse_value( 128 | "test", None, Union[int, Nullable], allow_none=False, allow_sdk_types=True 129 | ) 130 | == NullValue 131 | ) 132 | assert ( 133 | parse_value( 134 | "test", 135 | None, 136 | Union[None, int, Nullable], 137 | allow_none=False, 138 | allow_sdk_types=True, 139 | ) 140 | == NullValue 141 | ) 142 | -------------------------------------------------------------------------------- /tests/common/test_utils.py: -------------------------------------------------------------------------------- 1 | """Test the util functions.""" 2 | 3 | from matter_server.common.helpers.util import convert_ip_address, convert_mac_address 4 | 5 | 6 | def test_convert_functions() -> None: 7 | """Test convert_ip_address and convert_mac_address util.""" 8 | assert convert_ip_address("wKgBNA==") == "192.168.1.52" 9 | assert ( 10 | convert_ip_address("KgARtxIxhABW70T//kmvxg==", True) 11 | == "2a00:11b7:1231:8400:56ef:44ff:fe49:afc6" 12 | ) 13 | assert convert_mac_address("ji4yiD/r91c=") == "8e:2e:32:88:3f:eb:f7:57" 14 | -------------------------------------------------------------------------------- /tests/fixtures/__init__.py: -------------------------------------------------------------------------------- 1 | """Text fixtures for Matter server tests.""" 2 | 3 | import pathlib 4 | 5 | FIXTURES_ROOT = pathlib.Path(__file__).parent 6 | NODE_FIXTURES_ROOT = FIXTURES_ROOT / "nodes" 7 | NODE_IN_HA_FIXTURES_ROOT = FIXTURES_ROOT / "nodes_in_ha" 8 | -------------------------------------------------------------------------------- /tests/fixtures/nodes/_fake_template: -------------------------------------------------------------------------------- 1 | { 2 | "attributes": { 3 | "0": { 4 | "Descriptor": { 5 | "deviceTypeList": [ 6 | { 7 | "type": 22, 8 | "revision": 1, 9 | "_type": "chip.clusters.Objects.Descriptor.Structs.DeviceTypeStruct" 10 | } 11 | ], 12 | "serverList": [29, 37, 40, 48, 49, 50, 51, 60, 62, 64, 65], 13 | "clientList": [], 14 | "partsList": [9, 10], 15 | "generatedCommandList": [], 16 | "acceptedCommandList": [], 17 | "attributeList": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], 18 | "featureMap": 0, 19 | "clusterRevision": 1, 20 | "_type": "chip.clusters.Objects.Descriptor" 21 | }, 22 | "Basic": { 23 | "dataModelRevision": 0, 24 | "vendorName": "Mock Vendor", 25 | "vendorID": 1234, 26 | "productName": "Mock Device", 27 | "productID": 2, 28 | "nodeLabel": "My Mock Device", 29 | "location": "nl", 30 | "hardwareVersion": 123, 31 | "hardwareVersionString": "TEST_VERSION", 32 | "softwareVersion": 12345, 33 | "softwareVersionString": "123.4.5", 34 | "manufacturingDate": null, 35 | "partNumber": null, 36 | "productURL": null, 37 | "productLabel": null, 38 | "serialNumber": null, 39 | "localConfigDisabled": null, 40 | "reachable": null, 41 | "uniqueID": "mock-device-id", 42 | "capabilityMinima": null, 43 | "generatedCommandList": [], 44 | "acceptedCommandList": [], 45 | "attributeList": [ 46 | 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 18, 65528, 65529, 65531, 65532, 47 | 65533 48 | ], 49 | "featureMap": 0, 50 | "clusterRevision": 3, 51 | "_type": "chip.clusters.Objects.Basic" 52 | } 53 | }, 54 | "9": { 55 | "Descriptor": { 56 | "deviceTypeList": [ 57 | { 58 | "type": DEVICE_TYPE, 59 | "revision": 1, 60 | "_type": "chip.clusters.Objects.Descriptor.Structs.DeviceTypeStruct" 61 | } 62 | ], 63 | "serverList": [6, 29, 57, 768, 8, 40], 64 | "clientList": [], 65 | "partsList": [], 66 | "generatedCommandList": [], 67 | "acceptedCommandList": [], 68 | "attributeList": [0, 1, 2, 3, 65528, 65529, 65531, 65533], 69 | "featureMap": null, 70 | "clusterRevision": 1, 71 | "_type": "chip.clusters.Objects.Descriptor" 72 | }, 73 | CLUSTER_DEF 74 | } 75 | }, 76 | "events": [], 77 | "node_id": 4338 78 | } 79 | -------------------------------------------------------------------------------- /tests/fixtures/nodes/fake-bridge-two-light.json: -------------------------------------------------------------------------------- 1 | { 2 | "attributes": { 3 | "0": { 4 | "Descriptor": { 5 | "deviceTypeList": [ 6 | { 7 | "type": 22, 8 | "revision": 1, 9 | "_type": "chip.clusters.Objects.Descriptor.Structs.DeviceTypeStruct" 10 | }, 11 | { 12 | "type": 14, 13 | "revision": 1, 14 | "_type": "chip.clusters.Objects.Descriptor.Structs.DeviceTypeStruct" 15 | } 16 | ], 17 | "serverList": [29, 37, 40, 48, 49, 50, 51, 60, 62, 64, 65], 18 | "clientList": [], 19 | "partsList": [9, 10], 20 | "generatedCommandList": [], 21 | "acceptedCommandList": [], 22 | "attributeList": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], 23 | "featureMap": 0, 24 | "clusterRevision": 1, 25 | "_type": "chip.clusters.Objects.Descriptor" 26 | }, 27 | "Basic": { 28 | "dataModelRevision": 0, 29 | "vendorName": "Mock Vendor", 30 | "vendorID": 1234, 31 | "productName": "Mock Bridge", 32 | "productID": 2, 33 | "nodeLabel": "My Mock Bridge", 34 | "location": "nl", 35 | "hardwareVersion": 123, 36 | "hardwareVersionString": "TEST_VERSION", 37 | "softwareVersion": 12345, 38 | "softwareVersionString": "123.4.5", 39 | "manufacturingDate": null, 40 | "partNumber": null, 41 | "productURL": null, 42 | "productLabel": null, 43 | "serialNumber": null, 44 | "localConfigDisabled": null, 45 | "reachable": null, 46 | "uniqueID": "mock-hub-id", 47 | "capabilityMinima": null, 48 | "generatedCommandList": [], 49 | "acceptedCommandList": [], 50 | "attributeList": [ 51 | 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 18, 65528, 65529, 65531, 65532, 52 | 65533 53 | ], 54 | "featureMap": 0, 55 | "clusterRevision": 3, 56 | "_type": "chip.clusters.Objects.Basic" 57 | } 58 | }, 59 | "9": { 60 | "OnOff": { 61 | "onOff": true, 62 | "globalSceneControl": true, 63 | "onTime": 0, 64 | "offWaitTime": 0, 65 | "startUpOnOff": 0, 66 | "generatedCommandList": [], 67 | "acceptedCommandList": [0, 1, 2, 64, 65, 66], 68 | "attributeList": [ 69 | 0, 16384, 16385, 16386, 16387, 65528, 65529, 65531, 65532, 65533 70 | ], 71 | "featureMap": 1, 72 | "clusterRevision": 4, 73 | "_type": "chip.clusters.Objects.OnOff" 74 | }, 75 | "Descriptor": { 76 | "deviceTypeList": [ 77 | { 78 | "type": 256, 79 | "revision": 1, 80 | "_type": "chip.clusters.Objects.Descriptor.Structs.DeviceTypeStruct" 81 | }, 82 | { 83 | "type": 19, 84 | "revision": 1, 85 | "_type": "chip.clusters.Objects.Descriptor.Structs.DeviceTypeStruct" 86 | } 87 | ], 88 | "serverList": [6, 29, 57, 768, 8, 40], 89 | "clientList": [], 90 | "partsList": [], 91 | "generatedCommandList": [], 92 | "acceptedCommandList": [], 93 | "attributeList": [0, 1, 2, 3, 65528, 65529, 65531, 65533], 94 | "featureMap": null, 95 | "clusterRevision": 1, 96 | "_type": "chip.clusters.Objects.Descriptor" 97 | }, 98 | "BridgedDeviceBasic": { 99 | "nodeLabel": "Kitchen Ceiling", 100 | "reachable": true, 101 | "vendorID": 1234, 102 | "softwareVersionString": "67.8.9", 103 | "softwareVersion": 6789, 104 | "vendorName": "Mock Vendor", 105 | "productName": "Mock Light", 106 | "uniqueID": "mock-id-kitchen-ceiling", 107 | "generatedCommandList": [], 108 | "acceptedCommandList": [], 109 | "attributeList": [ 110 | 5, 17, 2, 4, 10, 9, 1, 3, 18, 65528, 65529, 65531, 65532, 65533 111 | ], 112 | "_type": "chip.clusters.Objects.BridgedDeviceBasic" 113 | } 114 | }, 115 | "10": { 116 | "OnOff": { 117 | "onOff": false, 118 | "globalSceneControl": true, 119 | "onTime": 0, 120 | "offWaitTime": 0, 121 | "startUpOnOff": 0, 122 | "generatedCommandList": [], 123 | "acceptedCommandList": [0, 1, 2, 64, 65, 66], 124 | "attributeList": [ 125 | 0, 16384, 16385, 16386, 16387, 65528, 65529, 65531, 65532, 65533 126 | ], 127 | "featureMap": 1, 128 | "clusterRevision": 4, 129 | "_type": "chip.clusters.Objects.OnOff" 130 | }, 131 | "Descriptor": { 132 | "deviceTypeList": [ 133 | { 134 | "type": 256, 135 | "revision": 1, 136 | "_type": "chip.clusters.Objects.Descriptor.Structs.DeviceTypeStruct" 137 | }, 138 | { 139 | "type": 19, 140 | "revision": 1, 141 | "_type": "chip.clusters.Objects.Descriptor.Structs.DeviceTypeStruct" 142 | } 143 | ], 144 | "serverList": [6, 29, 57, 768, 40], 145 | "clientList": [], 146 | "partsList": [], 147 | "generatedCommandList": [], 148 | "acceptedCommandList": [], 149 | "attributeList": [0, 1, 2, 3, 65528, 65529, 65531, 65533], 150 | "featureMap": null, 151 | "clusterRevision": 1, 152 | "_type": "chip.clusters.Objects.Descriptor" 153 | }, 154 | "BridgedDeviceBasic": { 155 | "nodeLabel": "Living Room Ceiling", 156 | "reachable": true, 157 | "vendorID": 1234, 158 | "softwareVersionString": "1.49.1", 159 | "softwareVersion": 19988481, 160 | "vendorName": "Mock Vendor", 161 | "productName": "Mock Light", 162 | "uniqueID": "mock-id-living-room-ceiling", 163 | "generatedCommandList": [], 164 | "acceptedCommandList": [], 165 | "attributeList": [ 166 | 5, 17, 2, 4, 10, 9, 1, 3, 18, 65528, 65529, 65531, 65532, 65533 167 | ], 168 | "_type": "chip.clusters.Objects.BridgedDeviceBasic" 169 | } 170 | } 171 | }, 172 | "events": [], 173 | "node_id": 4338 174 | } 175 | -------------------------------------------------------------------------------- /tests/fixtures/nodes/fake-temperature-sensor.json: -------------------------------------------------------------------------------- 1 | { 2 | "attributes": { 3 | "0": { 4 | "Descriptor": { 5 | "deviceTypeList": [ 6 | { 7 | "type": 22, 8 | "revision": 1, 9 | "_type": "chip.clusters.Objects.Descriptor.Structs.DeviceTypeStruct" 10 | } 11 | ], 12 | "serverList": [29, 37, 40, 48, 49, 50, 51, 60, 62, 64, 65], 13 | "clientList": [], 14 | "partsList": [9, 10], 15 | "generatedCommandList": [], 16 | "acceptedCommandList": [], 17 | "attributeList": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], 18 | "featureMap": 0, 19 | "clusterRevision": 1, 20 | "_type": "chip.clusters.Objects.Descriptor" 21 | }, 22 | "Basic": { 23 | "dataModelRevision": 0, 24 | "vendorName": "Mock Vendor", 25 | "vendorID": 1234, 26 | "productName": "Mock Device", 27 | "productID": 2, 28 | "nodeLabel": "My Mock Device", 29 | "location": "nl", 30 | "hardwareVersion": 123, 31 | "hardwareVersionString": "TEST_VERSION", 32 | "softwareVersion": 12345, 33 | "softwareVersionString": "123.4.5", 34 | "manufacturingDate": null, 35 | "partNumber": null, 36 | "productURL": null, 37 | "productLabel": null, 38 | "serialNumber": null, 39 | "localConfigDisabled": null, 40 | "reachable": null, 41 | "uniqueID": "mock-device-id", 42 | "capabilityMinima": null, 43 | "generatedCommandList": [], 44 | "acceptedCommandList": [], 45 | "attributeList": [ 46 | 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 18, 65528, 65529, 65531, 65532, 47 | 65533 48 | ], 49 | "featureMap": 0, 50 | "clusterRevision": 3, 51 | "_type": "chip.clusters.Objects.Basic" 52 | } 53 | }, 54 | "9": { 55 | "Descriptor": { 56 | "deviceTypeList": [ 57 | { 58 | "type": 770, 59 | "revision": 1, 60 | "_type": "chip.clusters.Objects.Descriptor.Structs.DeviceTypeStruct" 61 | } 62 | ], 63 | "serverList": [6, 29, 57, 768, 8, 40], 64 | "clientList": [], 65 | "partsList": [], 66 | "generatedCommandList": [], 67 | "acceptedCommandList": [], 68 | "attributeList": [0, 1, 2, 3, 65528, 65529, 65531, 65533], 69 | "featureMap": null, 70 | "clusterRevision": 1, 71 | "_type": "chip.clusters.Objects.Descriptor" 72 | }, 73 | "TemperatureMeasurement": { 74 | "measuredValue": 2100, 75 | "minMeasuredValue": null, 76 | "maxMeasuredValue": null, 77 | "tolerance": 0, 78 | "generatedCommandList": [], 79 | "acceptedCommandList": [], 80 | "attributeList": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], 81 | "featureMap": 0, 82 | "clusterRevision": 4, 83 | "_type": "chip.clusters.Objects.TemperatureMeasurement" 84 | } 85 | } 86 | }, 87 | "events": [], 88 | "node_id": 4338 89 | } 90 | -------------------------------------------------------------------------------- /tests/fixtures/nodes/fake_color_temperature_light.json: -------------------------------------------------------------------------------- 1 | { 2 | "attributes": { 3 | "0": { 4 | "Descriptor": { 5 | "deviceTypeList": [ 6 | { 7 | "type": 22, 8 | "revision": 1, 9 | "_type": "chip.clusters.Objects.Descriptor.Structs.DeviceTypeStruct" 10 | } 11 | ], 12 | "serverList": [29, 37, 40, 48, 49, 50, 51, 60, 62, 64, 65], 13 | "clientList": [], 14 | "partsList": [9, 10], 15 | "generatedCommandList": [], 16 | "acceptedCommandList": [], 17 | "attributeList": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], 18 | "featureMap": 0, 19 | "clusterRevision": 1, 20 | "_type": "chip.clusters.Objects.Descriptor" 21 | }, 22 | "Basic": { 23 | "dataModelRevision": 0, 24 | "vendorName": "Mock Vendor", 25 | "vendorID": 1234, 26 | "productName": "Mock Device", 27 | "productID": 2, 28 | "nodeLabel": "My Mock Device", 29 | "location": "nl", 30 | "hardwareVersion": 123, 31 | "hardwareVersionString": "TEST_VERSION", 32 | "softwareVersion": 12345, 33 | "softwareVersionString": "123.4.5", 34 | "manufacturingDate": null, 35 | "partNumber": null, 36 | "productURL": null, 37 | "productLabel": null, 38 | "serialNumber": null, 39 | "localConfigDisabled": null, 40 | "reachable": null, 41 | "uniqueID": "mock-device-id", 42 | "capabilityMinima": null, 43 | "generatedCommandList": [], 44 | "acceptedCommandList": [], 45 | "attributeList": [ 46 | 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 18, 65528, 65529, 65531, 65532, 47 | 65533 48 | ], 49 | "featureMap": 0, 50 | "clusterRevision": 3, 51 | "_type": "chip.clusters.Objects.Basic" 52 | } 53 | }, 54 | "9": { 55 | "Descriptor": { 56 | "deviceTypeList": [ 57 | { 58 | "type": 268, 59 | "revision": 1, 60 | "_type": "chip.clusters.Objects.Descriptor.Structs.DeviceTypeStruct" 61 | } 62 | ], 63 | "serverList": [6, 29, 57, 768, 8, 40], 64 | "clientList": [], 65 | "partsList": [], 66 | "generatedCommandList": [], 67 | "acceptedCommandList": [], 68 | "attributeList": [0, 1, 2, 3, 65528, 65529, 65531, 65533], 69 | "featureMap": null, 70 | "clusterRevision": 1, 71 | "_type": "chip.clusters.Objects.Descriptor" 72 | }, 73 | "OnOff": { 74 | "onOff": true, 75 | "globalSceneControl": true, 76 | "onTime": 0, 77 | "offWaitTime": 0, 78 | "startUpOnOff": null, 79 | "generatedCommandList": [], 80 | "acceptedCommandList": [0, 1, 2, 64, 65, 66], 81 | "attributeList": [ 82 | 0, 16384, 16385, 16386, 16387, 65528, 65529, 65531, 65532, 65533 83 | ], 84 | "featureMap": 1, 85 | "clusterRevision": 4, 86 | "_type": "chip.clusters.Objects.OnOff" 87 | }, 88 | "LevelControl": { 89 | "currentLevel": 4, 90 | "remainingTime": 0, 91 | "minLevel": 1, 92 | "maxLevel": 254, 93 | "currentFrequency": 0, 94 | "minFrequency": 0, 95 | "maxFrequency": 0, 96 | "options": 0, 97 | "onOffTransitionTime": 0, 98 | "onLevel": null, 99 | "onTransitionTime": 0, 100 | "offTransitionTime": 0, 101 | "defaultMoveRate": 50, 102 | "startUpCurrentLevel": null, 103 | "generatedCommandList": [], 104 | "acceptedCommandList": [0, 1, 2, 3, 4, 5, 6, 7], 105 | "attributeList": [ 106 | 0, 1, 2, 3, 4, 5, 6, 15, 16, 17, 18, 19, 20, 16384, 65528, 65529, 107 | 65531, 65532, 65533 108 | ], 109 | "featureMap": 3, 110 | "clusterRevision": 5, 111 | "_type": "chip.clusters.Objects.LevelControl" 112 | } 113 | } 114 | }, 115 | "events": [], 116 | "node_id": 4338 117 | } 118 | -------------------------------------------------------------------------------- /tests/fixtures/nodes/fake_extended_color_light.json: -------------------------------------------------------------------------------- 1 | { 2 | "attributes": { 3 | "0": { 4 | "Descriptor": { 5 | "deviceTypeList": [ 6 | { 7 | "type": 22, 8 | "revision": 1, 9 | "_type": "chip.clusters.Objects.Descriptor.Structs.DeviceTypeStruct" 10 | } 11 | ], 12 | "serverList": [29, 37, 40, 48, 49, 50, 51, 60, 62, 64, 65], 13 | "clientList": [], 14 | "partsList": [9, 10], 15 | "generatedCommandList": [], 16 | "acceptedCommandList": [], 17 | "attributeList": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], 18 | "featureMap": 0, 19 | "clusterRevision": 1, 20 | "_type": "chip.clusters.Objects.Descriptor" 21 | }, 22 | "Basic": { 23 | "dataModelRevision": 0, 24 | "vendorName": "Mock Vendor", 25 | "vendorID": 1234, 26 | "productName": "Mock Device", 27 | "productID": 2, 28 | "nodeLabel": "My Mock Device", 29 | "location": "nl", 30 | "hardwareVersion": 123, 31 | "hardwareVersionString": "TEST_VERSION", 32 | "softwareVersion": 12345, 33 | "softwareVersionString": "123.4.5", 34 | "manufacturingDate": null, 35 | "partNumber": null, 36 | "productURL": null, 37 | "productLabel": null, 38 | "serialNumber": null, 39 | "localConfigDisabled": null, 40 | "reachable": null, 41 | "uniqueID": "mock-device-id", 42 | "capabilityMinima": null, 43 | "generatedCommandList": [], 44 | "acceptedCommandList": [], 45 | "attributeList": [ 46 | 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 18, 65528, 65529, 65531, 65532, 47 | 65533 48 | ], 49 | "featureMap": 0, 50 | "clusterRevision": 3, 51 | "_type": "chip.clusters.Objects.Basic" 52 | } 53 | }, 54 | "9": { 55 | "Descriptor": { 56 | "deviceTypeList": [ 57 | { 58 | "type": 269, 59 | "revision": 1, 60 | "_type": "chip.clusters.Objects.Descriptor.Structs.DeviceTypeStruct" 61 | } 62 | ], 63 | "serverList": [6, 29, 57, 768, 8, 40], 64 | "clientList": [], 65 | "partsList": [], 66 | "generatedCommandList": [], 67 | "acceptedCommandList": [], 68 | "attributeList": [0, 1, 2, 3, 65528, 65529, 65531, 65533], 69 | "featureMap": null, 70 | "clusterRevision": 1, 71 | "_type": "chip.clusters.Objects.Descriptor" 72 | }, 73 | "OnOff": { 74 | "onOff": true, 75 | "globalSceneControl": true, 76 | "onTime": 0, 77 | "offWaitTime": 0, 78 | "startUpOnOff": null, 79 | "generatedCommandList": [], 80 | "acceptedCommandList": [0, 1, 2, 64, 65, 66], 81 | "attributeList": [ 82 | 0, 16384, 16385, 16386, 16387, 65528, 65529, 65531, 65532, 65533 83 | ], 84 | "featureMap": 1, 85 | "clusterRevision": 4, 86 | "_type": "chip.clusters.Objects.OnOff" 87 | }, 88 | "LevelControl": { 89 | "currentLevel": 4, 90 | "remainingTime": 0, 91 | "minLevel": 1, 92 | "maxLevel": 254, 93 | "currentFrequency": 0, 94 | "minFrequency": 0, 95 | "maxFrequency": 0, 96 | "options": 0, 97 | "onOffTransitionTime": 0, 98 | "onLevel": null, 99 | "onTransitionTime": 0, 100 | "offTransitionTime": 0, 101 | "defaultMoveRate": 50, 102 | "startUpCurrentLevel": null, 103 | "generatedCommandList": [], 104 | "acceptedCommandList": [0, 1, 2, 3, 4, 5, 6, 7], 105 | "attributeList": [ 106 | 0, 1, 2, 3, 4, 5, 6, 15, 16, 17, 18, 19, 20, 16384, 65528, 65529, 107 | 65531, 65532, 65533 108 | ], 109 | "featureMap": 3, 110 | "clusterRevision": 5, 111 | "_type": "chip.clusters.Objects.LevelControl" 112 | } 113 | } 114 | }, 115 | "events": [], 116 | "node_id": 4338 117 | } 118 | -------------------------------------------------------------------------------- /tests/fixtures/nodes/fake_flow_sensor.json: -------------------------------------------------------------------------------- 1 | { 2 | "attributes": { 3 | "0": { 4 | "Descriptor": { 5 | "deviceTypeList": [ 6 | { 7 | "type": 22, 8 | "revision": 1, 9 | "_type": "chip.clusters.Objects.Descriptor.Structs.DeviceTypeStruct" 10 | } 11 | ], 12 | "serverList": [29, 37, 40, 48, 49, 50, 51, 60, 62, 64, 65], 13 | "clientList": [], 14 | "partsList": [9, 10], 15 | "generatedCommandList": [], 16 | "acceptedCommandList": [], 17 | "attributeList": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], 18 | "featureMap": 0, 19 | "clusterRevision": 1, 20 | "_type": "chip.clusters.Objects.Descriptor" 21 | }, 22 | "Basic": { 23 | "dataModelRevision": 0, 24 | "vendorName": "Mock Vendor", 25 | "vendorID": 1234, 26 | "productName": "Mock Device", 27 | "productID": 2, 28 | "nodeLabel": "My Mock Device", 29 | "location": "nl", 30 | "hardwareVersion": 123, 31 | "hardwareVersionString": "TEST_VERSION", 32 | "softwareVersion": 12345, 33 | "softwareVersionString": "123.4.5", 34 | "manufacturingDate": null, 35 | "partNumber": null, 36 | "productURL": null, 37 | "productLabel": null, 38 | "serialNumber": null, 39 | "localConfigDisabled": null, 40 | "reachable": null, 41 | "uniqueID": "mock-device-id", 42 | "capabilityMinima": null, 43 | "generatedCommandList": [], 44 | "acceptedCommandList": [], 45 | "attributeList": [ 46 | 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 18, 65528, 65529, 65531, 65532, 47 | 65533 48 | ], 49 | "featureMap": 0, 50 | "clusterRevision": 3, 51 | "_type": "chip.clusters.Objects.Basic" 52 | } 53 | }, 54 | "9": { 55 | "Descriptor": { 56 | "deviceTypeList": [ 57 | { 58 | "type": 774, 59 | "revision": 1, 60 | "_type": "chip.clusters.Objects.Descriptor.Structs.DeviceTypeStruct" 61 | } 62 | ], 63 | "serverList": [6, 29, 57, 768, 8, 40], 64 | "clientList": [], 65 | "partsList": [], 66 | "generatedCommandList": [], 67 | "acceptedCommandList": [], 68 | "attributeList": [0, 1, 2, 3, 65528, 65529, 65531, 65533], 69 | "featureMap": null, 70 | "clusterRevision": 1, 71 | "_type": "chip.clusters.Objects.Descriptor" 72 | }, 73 | "FlowMeasurement": { 74 | "measuredValue": 2, 75 | "minMeasuredValue": 0, 76 | "maxMeasuredValue": 0, 77 | "tolerance": 0, 78 | "generatedCommandList": [], 79 | "acceptedCommandList": [], 80 | "attributeList": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], 81 | "featureMap": 0, 82 | "clusterRevision": 3, 83 | "_type": "chip.clusters.Objects.FlowMeasurement" 84 | } 85 | } 86 | }, 87 | "events": [], 88 | "node_id": 4338 89 | } 90 | -------------------------------------------------------------------------------- /tests/fixtures/nodes/fake_humidity_sensor.json: -------------------------------------------------------------------------------- 1 | { 2 | "attributes": { 3 | "0": { 4 | "Descriptor": { 5 | "deviceTypeList": [ 6 | { 7 | "type": 22, 8 | "revision": 1, 9 | "_type": "chip.clusters.Objects.Descriptor.Structs.DeviceTypeStruct" 10 | } 11 | ], 12 | "serverList": [29, 37, 40, 48, 49, 50, 51, 60, 62, 64, 65], 13 | "clientList": [], 14 | "partsList": [9, 10], 15 | "generatedCommandList": [], 16 | "acceptedCommandList": [], 17 | "attributeList": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], 18 | "featureMap": 0, 19 | "clusterRevision": 1, 20 | "_type": "chip.clusters.Objects.Descriptor" 21 | }, 22 | "Basic": { 23 | "dataModelRevision": 0, 24 | "vendorName": "Mock Vendor", 25 | "vendorID": 1234, 26 | "productName": "Mock Device", 27 | "productID": 2, 28 | "nodeLabel": "My Mock Device", 29 | "location": "nl", 30 | "hardwareVersion": 123, 31 | "hardwareVersionString": "TEST_VERSION", 32 | "softwareVersion": 12345, 33 | "softwareVersionString": "123.4.5", 34 | "manufacturingDate": null, 35 | "partNumber": null, 36 | "productURL": null, 37 | "productLabel": null, 38 | "serialNumber": null, 39 | "localConfigDisabled": null, 40 | "reachable": null, 41 | "uniqueID": "mock-device-id", 42 | "capabilityMinima": null, 43 | "generatedCommandList": [], 44 | "acceptedCommandList": [], 45 | "attributeList": [ 46 | 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 18, 65528, 65529, 65531, 65532, 47 | 65533 48 | ], 49 | "featureMap": 0, 50 | "clusterRevision": 3, 51 | "_type": "chip.clusters.Objects.Basic" 52 | } 53 | }, 54 | "9": { 55 | "Descriptor": { 56 | "deviceTypeList": [ 57 | { 58 | "type": 775, 59 | "revision": 1, 60 | "_type": "chip.clusters.Objects.Descriptor.Structs.DeviceTypeStruct" 61 | } 62 | ], 63 | "serverList": [6, 29, 57, 768, 8, 40], 64 | "clientList": [], 65 | "partsList": [], 66 | "generatedCommandList": [], 67 | "acceptedCommandList": [], 68 | "attributeList": [0, 1, 2, 3, 65528, 65529, 65531, 65533], 69 | "featureMap": null, 70 | "clusterRevision": 1, 71 | "_type": "chip.clusters.Objects.Descriptor" 72 | }, 73 | "RelativeHumidityMeasurement": { 74 | "measuredValue": 3000, 75 | "minMeasuredValue": 0, 76 | "maxMeasuredValue": 10000, 77 | "tolerance": 0, 78 | "generatedCommandList": [], 79 | "acceptedCommandList": [], 80 | "attributeList": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], 81 | "featureMap": 0, 82 | "clusterRevision": 3, 83 | "_type": "chip.clusters.Objects.RelativeHumidityMeasurement" 84 | } 85 | } 86 | }, 87 | "events": [], 88 | "node_id": 4338 89 | } 90 | -------------------------------------------------------------------------------- /tests/fixtures/nodes/fake_light_sensor.json: -------------------------------------------------------------------------------- 1 | { 2 | "attributes": { 3 | "0": { 4 | "Descriptor": { 5 | "deviceTypeList": [ 6 | { 7 | "type": 22, 8 | "revision": 1, 9 | "_type": "chip.clusters.Objects.Descriptor.Structs.DeviceTypeStruct" 10 | } 11 | ], 12 | "serverList": [29, 37, 40, 48, 49, 50, 51, 60, 62, 64, 65], 13 | "clientList": [], 14 | "partsList": [9, 10], 15 | "generatedCommandList": [], 16 | "acceptedCommandList": [], 17 | "attributeList": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], 18 | "featureMap": 0, 19 | "clusterRevision": 1, 20 | "_type": "chip.clusters.Objects.Descriptor" 21 | }, 22 | "Basic": { 23 | "dataModelRevision": 0, 24 | "vendorName": "Mock Vendor", 25 | "vendorID": 1234, 26 | "productName": "Mock Device", 27 | "productID": 2, 28 | "nodeLabel": "My Mock Device", 29 | "location": "nl", 30 | "hardwareVersion": 123, 31 | "hardwareVersionString": "TEST_VERSION", 32 | "softwareVersion": 12345, 33 | "softwareVersionString": "123.4.5", 34 | "manufacturingDate": null, 35 | "partNumber": null, 36 | "productURL": null, 37 | "productLabel": null, 38 | "serialNumber": null, 39 | "localConfigDisabled": null, 40 | "reachable": null, 41 | "uniqueID": "mock-device-id", 42 | "capabilityMinima": null, 43 | "generatedCommandList": [], 44 | "acceptedCommandList": [], 45 | "attributeList": [ 46 | 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 18, 65528, 65529, 65531, 65532, 47 | 65533 48 | ], 49 | "featureMap": 0, 50 | "clusterRevision": 3, 51 | "_type": "chip.clusters.Objects.Basic" 52 | } 53 | }, 54 | "9": { 55 | "Descriptor": { 56 | "deviceTypeList": [ 57 | { 58 | "type": 262, 59 | "revision": 1, 60 | "_type": "chip.clusters.Objects.Descriptor.Structs.DeviceTypeStruct" 61 | } 62 | ], 63 | "serverList": [6, 29, 57, 768, 8, 40], 64 | "clientList": [], 65 | "partsList": [], 66 | "generatedCommandList": [], 67 | "acceptedCommandList": [], 68 | "attributeList": [0, 1, 2, 3, 65528, 65529, 65531, 65533], 69 | "featureMap": null, 70 | "clusterRevision": 1, 71 | "_type": "chip.clusters.Objects.Descriptor" 72 | }, 73 | "IlluminanceMeasurement": { 74 | "measuredValue": 1000, 75 | "minMeasuredValue": 1, 76 | "maxMeasuredValue": 65534, 77 | "tolerance": 0, 78 | "lightSensorType": null, 79 | "generatedCommandList": [], 80 | "acceptedCommandList": [], 81 | "attributeList": [0, 1, 2, 3, 4, 65528, 65529, 65531, 65532, 65533], 82 | "featureMap": 0, 83 | "clusterRevision": 3, 84 | "_type": "chip.clusters.Objects.IlluminanceMeasurement" 85 | } 86 | } 87 | }, 88 | "events": [], 89 | "node_id": 4338 90 | } 91 | -------------------------------------------------------------------------------- /tests/fixtures/nodes/fake_pressure_sensor.json: -------------------------------------------------------------------------------- 1 | { 2 | "attributes": { 3 | "0": { 4 | "Descriptor": { 5 | "deviceTypeList": [ 6 | { 7 | "type": 22, 8 | "revision": 1, 9 | "_type": "chip.clusters.Objects.Descriptor.Structs.DeviceTypeStruct" 10 | } 11 | ], 12 | "serverList": [29, 37, 40, 48, 49, 50, 51, 60, 62, 64, 65], 13 | "clientList": [], 14 | "partsList": [9, 10], 15 | "generatedCommandList": [], 16 | "acceptedCommandList": [], 17 | "attributeList": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], 18 | "featureMap": 0, 19 | "clusterRevision": 1, 20 | "_type": "chip.clusters.Objects.Descriptor" 21 | }, 22 | "Basic": { 23 | "dataModelRevision": 0, 24 | "vendorName": "Mock Vendor", 25 | "vendorID": 1234, 26 | "productName": "Mock Device", 27 | "productID": 2, 28 | "nodeLabel": "My Mock Device", 29 | "location": "nl", 30 | "hardwareVersion": 123, 31 | "hardwareVersionString": "TEST_VERSION", 32 | "softwareVersion": 12345, 33 | "softwareVersionString": "123.4.5", 34 | "manufacturingDate": null, 35 | "partNumber": null, 36 | "productURL": null, 37 | "productLabel": null, 38 | "serialNumber": null, 39 | "localConfigDisabled": null, 40 | "reachable": null, 41 | "uniqueID": "mock-device-id", 42 | "capabilityMinima": null, 43 | "generatedCommandList": [], 44 | "acceptedCommandList": [], 45 | "attributeList": [ 46 | 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 18, 65528, 65529, 65531, 65532, 47 | 65533 48 | ], 49 | "featureMap": 0, 50 | "clusterRevision": 3, 51 | "_type": "chip.clusters.Objects.Basic" 52 | } 53 | }, 54 | "9": { 55 | "Descriptor": { 56 | "deviceTypeList": [ 57 | { 58 | "type": 773, 59 | "revision": 1, 60 | "_type": "chip.clusters.Objects.Descriptor.Structs.DeviceTypeStruct" 61 | } 62 | ], 63 | "serverList": [6, 29, 57, 768, 8, 40], 64 | "clientList": [], 65 | "partsList": [], 66 | "generatedCommandList": [], 67 | "acceptedCommandList": [], 68 | "attributeList": [0, 1, 2, 3, 65528, 65529, 65531, 65533], 69 | "featureMap": null, 70 | "clusterRevision": 1, 71 | "_type": "chip.clusters.Objects.Descriptor" 72 | }, 73 | "PressureMeasurement": { 74 | "measuredValue": 0, 75 | "minMeasuredValue": 0, 76 | "maxMeasuredValue": 0, 77 | "tolerance": null, 78 | "scaledValue": null, 79 | "minScaledValue": null, 80 | "maxScaledValue": null, 81 | "scaledTolerance": null, 82 | "scale": null, 83 | "generatedCommandList": [], 84 | "acceptedCommandList": [], 85 | "attributeList": [0, 1, 2, 65528, 65529, 65531, 65532, 65533], 86 | "featureMap": 0, 87 | "clusterRevision": 3, 88 | "_type": "chip.clusters.Objects.PressureMeasurement" 89 | } 90 | } 91 | }, 92 | "events": [], 93 | "node_id": 4338 94 | } 95 | -------------------------------------------------------------------------------- /tests/fixtures/nodes_in_ha/contact-sensor-example-app.json: -------------------------------------------------------------------------------- 1 | { 2 | "is_bridge": false, 3 | "node_devices": [ 4 | { 5 | "device_type_instances": [ 6 | { 7 | "type": "ContactSensor", 8 | "platforms": ["binary_sensor"], 9 | "entities": [ 10 | { 11 | "entity_id": "binary_sensor.my_sensor_contact", 12 | "state": "off", 13 | "attributes": { 14 | "device_class": "door" 15 | } 16 | } 17 | ] 18 | } 19 | ] 20 | } 21 | ] 22 | } 23 | -------------------------------------------------------------------------------- /tests/fixtures/nodes_in_ha/fake-bridge-two-light.json: -------------------------------------------------------------------------------- 1 | { 2 | "is_bridge": true, 3 | "node_devices": [ 4 | { 5 | "device_type_instances": [ 6 | { 7 | "type": "OnOffLight", 8 | "platforms": ["light"], 9 | "entities": [ 10 | { 11 | "entity_id": "light.kitchen_ceiling", 12 | "state": "on" 13 | } 14 | ] 15 | } 16 | ] 17 | }, 18 | { 19 | "device_type_instances": [ 20 | { 21 | "type": "OnOffLight", 22 | "platforms": ["light"], 23 | "entities": [ 24 | { 25 | "entity_id": "light.living_room_ceiling", 26 | "state": "off" 27 | } 28 | ] 29 | } 30 | ] 31 | } 32 | ] 33 | } 34 | -------------------------------------------------------------------------------- /tests/fixtures/nodes_in_ha/fake-temperature-sensor.json: -------------------------------------------------------------------------------- 1 | { 2 | "is_bridge": false, 3 | "node_devices": [ 4 | { 5 | "device_type_instances": [ 6 | { 7 | "type": "TemperatureSensor", 8 | "platforms": ["sensor"], 9 | "entities": [ 10 | { 11 | "entity_id": "sensor.my_mock_device_temperature", 12 | "state": "21.0", 13 | "attributes": { 14 | "device_class": "temperature", 15 | "state_class": "measurement", 16 | "unit_of_measurement": "°C" 17 | } 18 | } 19 | ] 20 | } 21 | ] 22 | } 23 | ] 24 | } 25 | -------------------------------------------------------------------------------- /tests/fixtures/nodes_in_ha/fake_color_temperature_light.json: -------------------------------------------------------------------------------- 1 | { 2 | "is_bridge": false, 3 | "node_devices": [ 4 | { 5 | "device_type_instances": [ 6 | { 7 | "type": "ColorTemperatureLight", 8 | "platforms": ["light"], 9 | "entities": [ 10 | { 11 | "entity_id": "light.my_mock_device", 12 | "state": "on", 13 | "attributes": { 14 | "supported_features": 0, 15 | "supported_color_modes": ["brightness"], 16 | "brightness": 3 17 | } 18 | } 19 | ] 20 | } 21 | ] 22 | } 23 | ] 24 | } 25 | -------------------------------------------------------------------------------- /tests/fixtures/nodes_in_ha/fake_extended_color_light.json: -------------------------------------------------------------------------------- 1 | { 2 | "is_bridge": false, 3 | "node_devices": [ 4 | { 5 | "device_type_instances": [ 6 | { 7 | "type": "ExtendedColorLight", 8 | "platforms": ["light"], 9 | "entities": [ 10 | { 11 | "entity_id": "light.my_mock_device", 12 | "state": "on", 13 | "attributes": { 14 | "supported_features": 0, 15 | "supported_color_modes": ["brightness"], 16 | "brightness": 3 17 | } 18 | } 19 | ] 20 | } 21 | ] 22 | } 23 | ] 24 | } 25 | -------------------------------------------------------------------------------- /tests/fixtures/nodes_in_ha/fake_flow_sensor.json: -------------------------------------------------------------------------------- 1 | { 2 | "is_bridge": false, 3 | "node_devices": [ 4 | { 5 | "device_type_instances": [ 6 | { 7 | "type": "FlowSensor", 8 | "platforms": ["sensor"], 9 | "entities": [ 10 | { 11 | "entity_id": "sensor.my_mock_device_flow", 12 | "state": "0.2", 13 | "attributes": { 14 | "state_class": "measurement", 15 | "unit_of_measurement": "m³/h" 16 | } 17 | } 18 | ] 19 | } 20 | ] 21 | } 22 | ] 23 | } 24 | -------------------------------------------------------------------------------- /tests/fixtures/nodes_in_ha/fake_humidity_sensor.json: -------------------------------------------------------------------------------- 1 | { 2 | "is_bridge": false, 3 | "node_devices": [ 4 | { 5 | "device_type_instances": [ 6 | { 7 | "type": "HumiditySensor", 8 | "platforms": ["sensor"], 9 | "entities": [ 10 | { 11 | "entity_id": "sensor.my_mock_device_humidity", 12 | "state": "30.0", 13 | "attributes": { 14 | "device_class": "humidity", 15 | "state_class": "measurement", 16 | "unit_of_measurement": "%" 17 | } 18 | } 19 | ] 20 | } 21 | ] 22 | } 23 | ] 24 | } 25 | -------------------------------------------------------------------------------- /tests/fixtures/nodes_in_ha/fake_light_sensor.json: -------------------------------------------------------------------------------- 1 | { 2 | "is_bridge": false, 3 | "node_devices": [ 4 | { 5 | "device_type_instances": [ 6 | { 7 | "type": "LightSensor", 8 | "platforms": ["sensor"], 9 | "entities": [ 10 | { 11 | "entity_id": "sensor.my_mock_device_light", 12 | "state": "1.3", 13 | "attributes": { 14 | "device_class": "illuminance", 15 | "state_class": "measurement", 16 | "unit_of_measurement": "lx" 17 | } 18 | } 19 | ] 20 | } 21 | ] 22 | } 23 | ] 24 | } 25 | -------------------------------------------------------------------------------- /tests/fixtures/nodes_in_ha/fake_pressure_sensor.json: -------------------------------------------------------------------------------- 1 | { 2 | "is_bridge": false, 3 | "node_devices": [ 4 | { 5 | "device_type_instances": [ 6 | { 7 | "type": "PressureSensor", 8 | "platforms": ["sensor"], 9 | "entities": [ 10 | { 11 | "entity_id": "sensor.my_mock_device_pressure", 12 | "state": "0.0", 13 | "attributes": { 14 | "device_class": "pressure", 15 | "state_class": "measurement", 16 | "unit_of_measurement": "kPa" 17 | } 18 | } 19 | ] 20 | } 21 | ] 22 | } 23 | ] 24 | } 25 | -------------------------------------------------------------------------------- /tests/fixtures/nodes_in_ha/lighting-example-app.json: -------------------------------------------------------------------------------- 1 | { 2 | "is_bridge": false, 3 | "node_devices": [ 4 | { 5 | "device_type_instances": [ 6 | { 7 | "type": "DimmableLight", 8 | "platforms": ["light"], 9 | "entities": [ 10 | { 11 | "entity_id": "light.my_cool_light", 12 | "state": "off", 13 | "attributes": { 14 | "supported_features": 0, 15 | "supported_color_modes": ["brightness"] 16 | } 17 | } 18 | ] 19 | } 20 | ] 21 | } 22 | ] 23 | } 24 | -------------------------------------------------------------------------------- /tests/fixtures/nodes_in_ha/onoff-switch-example-app.json: -------------------------------------------------------------------------------- 1 | { 2 | "is_bridge": false, 3 | "node_devices": [ 4 | { 5 | "device_type_instances": [ 6 | { 7 | "type": "OnOffPlugInUnit", 8 | "platforms": ["switch"], 9 | "entities": [ 10 | { 11 | "entity_id": "switch.my_switch", 12 | "state": "off", 13 | "attributes": { 14 | "device_class": "outlet" 15 | } 16 | } 17 | ] 18 | } 19 | ] 20 | } 21 | ] 22 | } 23 | -------------------------------------------------------------------------------- /tests/ruff.toml: -------------------------------------------------------------------------------- 1 | # This extend our general Ruff rules specifically for tests 2 | extend = "../pyproject.toml" 3 | 4 | lint.extend-select = [ 5 | "PT", # Use @pytest.fixture without parentheses 6 | ] 7 | 8 | lint.extend-ignore = [ 9 | "S101", # Use of assert detected. As these are tests... 10 | "SLF001", # Tests will access private/protected members... 11 | "TC002", # pytest doesn't like this one... 12 | "PT012", # annoying with tests 13 | ] 14 | -------------------------------------------------------------------------------- /tests/server/__init__.py: -------------------------------------------------------------------------------- 1 | """Test the server.""" 2 | -------------------------------------------------------------------------------- /tests/server/ota/fixtures/4442-67-197888.json: -------------------------------------------------------------------------------- 1 | { 2 | "modelVersion": { 3 | "vid": 4442, 4 | "pid": 67, 5 | "softwareVersion": 197888, 6 | "softwareVersionString": "3.5.0", 7 | "cdVersionNumber": 1, 8 | "firmwareInformation": "", 9 | "softwareVersionValid": true, 10 | "otaUrl": "", 11 | "otaFileSize": "0", 12 | "otaChecksum": "", 13 | "otaChecksumType": 0, 14 | "minApplicableSoftwareVersion": 0, 15 | "maxApplicableSoftwareVersion": 197888, 16 | "releaseNotesUrl": "", 17 | "creator": "cosmos1sggrrmw05e6alve8umwdaszade5qxyt53kthud" 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /tests/server/ota/fixtures/4442-67-197910.json: -------------------------------------------------------------------------------- 1 | { 2 | "modelVersion": { 3 | "vid": 4442, 4 | "pid": 67, 5 | "softwareVersion": 197910, 6 | "softwareVersionString": "3.5.22", 7 | "cdVersionNumber": 1, 8 | "firmwareInformation": "", 9 | "softwareVersionValid": true, 10 | "otaUrl": "", 11 | "otaFileSize": "0", 12 | "otaChecksum": "", 13 | "otaChecksumType": 0, 14 | "minApplicableSoftwareVersion": 0, 15 | "maxApplicableSoftwareVersion": 197910, 16 | "releaseNotesUrl": "", 17 | "creator": "cosmos1sggrrmw05e6alve8umwdaszade5qxyt53kthud" 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /tests/server/ota/fixtures/4442-67-198340.json: -------------------------------------------------------------------------------- 1 | { 2 | "modelVersion": { 3 | "vid": 4442, 4 | "pid": 67, 5 | "softwareVersion": 198340, 6 | "softwareVersionString": "3.6.196", 7 | "cdVersionNumber": 1, 8 | "firmwareInformation": "", 9 | "softwareVersionValid": true, 10 | "otaUrl": "https://nl67-firmware.s3.amazonaws.com/3.6.196_r8.matter", 11 | "otaFileSize": "755976", 12 | "otaChecksum": "46pqTh87M5fNhvR/zzt1M0RNQcVa8bApOCat7aKQ3KA=", 13 | "otaChecksumType": 1, 14 | "minApplicableSoftwareVersion": 198317, 15 | "maxApplicableSoftwareVersion": 198340, 16 | "releaseNotesUrl": "", 17 | "creator": "cosmos1sggrrmw05e6alve8umwdaszade5qxyt53kthud" 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /tests/server/ota/fixtures/4442-67.json: -------------------------------------------------------------------------------- 1 | { 2 | "modelVersions": { 3 | "vid": 4442, 4 | "pid": 67, 5 | "softwareVersions": [ 6 | 197120, 7 | 197888, 8 | 197910, 9 | 198340 10 | ] 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /tests/server/ota/fixtures/4447-8194-1000.json: -------------------------------------------------------------------------------- 1 | { 2 | "modelVersion": { 3 | "vid": 4447, 4 | "pid": 8194, 5 | "softwareVersion": 1000, 6 | "softwareVersionString": "1.0.0.0", 7 | "cdVersionNumber": 1, 8 | "firmwareInformation": "", 9 | "softwareVersionValid": true, 10 | "otaUrl": "", 11 | "otaFileSize": "0", 12 | "otaChecksum": "", 13 | "otaChecksumType": 0, 14 | "minApplicableSoftwareVersion": 0, 15 | "maxApplicableSoftwareVersion": 999, 16 | "releaseNotesUrl": "", 17 | "creator": "cosmos1qpz3ghnqj6my7gzegkftzav9hpxymkx6zdk73v" 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /tests/server/ota/fixtures/4447-8194-1011-invalid.json: -------------------------------------------------------------------------------- 1 | { 2 | "modelVersion": { 3 | "vid": 4447, 4 | "pid": 8194, 5 | "softwareVersion": 1011, 6 | "softwareVersionString": "1.0.1.1", 7 | "cdVersionNumber": 1, 8 | "firmwareInformation": "", 9 | "softwareVersionValid": false, 10 | "otaUrl": "https://cdn.aqara.com/cdn/opencloud-product/mainland/product-firmware/prd/aqara.matter.4447_8194/20240306154144_rel_up_to_enc_ota_sbl_app_aqara.matter.4447_8194_1.0.1.1_115F_2002_20240115195007_7a9b91.ota", 11 | "otaFileSize": "615708", 12 | "otaChecksum": "rFZ6WdH0DuuCf7HVoRmNjCF73mYZ98DGYpHoDKmf0Bw=", 13 | "otaChecksumType": 1, 14 | "minApplicableSoftwareVersion": 1000, 15 | "maxApplicableSoftwareVersion": 1010, 16 | "releaseNotesUrl": "", 17 | "creator": "cosmos1qpz3ghnqj6my7gzegkftzav9hpxymkx6zdk73v" 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /tests/server/ota/fixtures/4447-8194-1011-valid.json: -------------------------------------------------------------------------------- 1 | { 2 | "modelVersion": { 3 | "vid": 4447, 4 | "pid": 8194, 5 | "softwareVersion": 1011, 6 | "softwareVersionString": "1.0.1.1", 7 | "cdVersionNumber": 1, 8 | "firmwareInformation": "", 9 | "softwareVersionValid": true, 10 | "otaUrl": "https://cdn.aqara.com/cdn/opencloud-product/mainland/product-firmware/prd/aqara.matter.4447_8194/20240306154144_rel_up_to_enc_ota_sbl_app_aqara.matter.4447_8194_1.0.1.1_115F_2002_20240115195007_7a9b91.ota", 11 | "otaFileSize": "615708", 12 | "otaChecksum": "rFZ6WdH0DuuCf7HVoRmNjCF73mYZ98DGYpHoDKmf0Bw=", 13 | "otaChecksumType": 1, 14 | "minApplicableSoftwareVersion": 1000, 15 | "maxApplicableSoftwareVersion": 1010, 16 | "releaseNotesUrl": "", 17 | "creator": "cosmos1qpz3ghnqj6my7gzegkftzav9hpxymkx6zdk73v" 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /tests/server/ota/fixtures/4447-8194.json: -------------------------------------------------------------------------------- 1 | { 2 | "modelVersions": { 3 | "vid": 4447, 4 | "pid": 8194, 5 | "softwareVersions": [ 6 | 1000, 7 | 1011 8 | ] 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /tests/server/ota/test_dcl.py: -------------------------------------------------------------------------------- 1 | """Test DCL OTA updates.""" 2 | 3 | import json 4 | import pathlib 5 | from typing import Any 6 | from unittest.mock import MagicMock 7 | 8 | from aioresponses import aioresponses 9 | import pytest 10 | 11 | from matter_server.server.helpers import DCL_PRODUCTION_URL 12 | from matter_server.server.ota.dcl import check_for_update 13 | 14 | 15 | def _load_fixture(file_name) -> Any: 16 | path = pathlib.Path(__file__).parent.joinpath("fixtures", file_name) 17 | with open(path, "r") as f: 18 | return json.load(f) 19 | 20 | 21 | @pytest.fixture(name="aioresponse") 22 | def mock_aioresponse(): 23 | """Mock the aiohttp.ClientSession.""" 24 | with aioresponses() as m: 25 | yield m 26 | 27 | 28 | def mock_dcl_version( 29 | aioresponse, vid: int, pid: int, version: int | None = None, suffix: str = "" 30 | ) -> dict: 31 | """Test.""" 32 | if version: 33 | data = _load_fixture(f"{vid}-{pid}-{version}{suffix}.json") 34 | url = DCL_PRODUCTION_URL + f"/dcl/model/versions/{vid}/{pid}/{version}" 35 | else: 36 | data = _load_fixture(f"{vid}-{pid}{suffix}.json") 37 | url = DCL_PRODUCTION_URL + f"/dcl/model/versions/{vid}/{pid}" 38 | aioresponse.get(url=url, status=200, payload=data) 39 | return data 40 | 41 | 42 | @pytest.fixture(name="get_software_versions", autouse=True) 43 | def _mock_get_software_versions(aioresponse) -> None: 44 | """Mock the _get_software_versions function.""" 45 | mock_dcl_version(aioresponse, 4447, 8194) 46 | mock_dcl_version(aioresponse, 4447, 8194, 1000) 47 | 48 | 49 | async def test_check_updates(aioresponse): 50 | """Test the case where the latest software version is applicable.""" 51 | # Call the function with a current software version of 1000 52 | data = mock_dcl_version(aioresponse, 4447, 8194, 1011, "-valid") 53 | result = await check_for_update(MagicMock(), 4447, 8194, 1000) 54 | 55 | assert result == data["modelVersion"] 56 | 57 | 58 | async def test_check_updates_not_applicable(aioresponse): 59 | """Test the case where the latest software version is not applicable.""" 60 | # Call the function with a current software version of 2000 61 | mock_dcl_version(aioresponse, 4447, 8194, 1011, "-valid") 62 | result = await check_for_update(MagicMock(), 4447, 8194, 2000) 63 | 64 | assert result is None 65 | 66 | 67 | async def test_check_updates_not_applicable_not_valid(aioresponse): 68 | """Test the case where the latest software version is not valid.""" 69 | mock_dcl_version(aioresponse, 4447, 8194, 1011, "-invalid") 70 | result = await check_for_update(MagicMock(), 4447, 8194, 1000) 71 | 72 | assert result is None 73 | 74 | 75 | async def test_check_updates_specific_version(aioresponse): 76 | """Test the case to get a specific version.""" 77 | # Call the function with a current software version of 1000 and request 1011 as update 78 | data = mock_dcl_version(aioresponse, 4447, 8194, 1011, "-valid") 79 | result = await check_for_update(MagicMock(), 4447, 8194, 1000, 1011) 80 | 81 | assert result == data["modelVersion"] 82 | 83 | 84 | async def test_check_no_update_if_url_empty(aioresponse): 85 | """Test the case checks if latest version gets picked version.""" 86 | # Call the function with a current software version of 1000 and request 1011 as update 87 | mock_dcl_version(aioresponse, 4442, 67) 88 | mock_dcl_version(aioresponse, 4442, 67, 197888) 89 | mock_dcl_version(aioresponse, 4442, 67, 197910) 90 | mock_dcl_version(aioresponse, 4442, 67, 198340) 91 | result = await check_for_update(MagicMock(), 4442, 67, 197120) 92 | 93 | assert result is None 94 | -------------------------------------------------------------------------------- /tests/test_device_controller.py: -------------------------------------------------------------------------------- 1 | """Device controller tests.""" 2 | 3 | import pytest 4 | 5 | from matter_server.server.device_controller import RE_MDNS_SERVICE_NAME 6 | 7 | 8 | @pytest.mark.parametrize( 9 | ("name", "expected"), 10 | [ 11 | ( 12 | "D22DC25523A78A89-0000000000000125._matter._tcp.local.", 13 | ("D22DC25523A78A89", "0000000000000125"), 14 | ), 15 | ( 16 | "d22dc25523a78a89-0000000000000125._matter._tcp.local.", 17 | ("d22dc25523a78a89", "0000000000000125"), 18 | ), 19 | ], 20 | ) 21 | def test_valid_mdns_service_names(name, expected): 22 | """Test valid mDNS service names.""" 23 | match = RE_MDNS_SERVICE_NAME.match(name) 24 | assert match is not None 25 | assert match.groups() == expected 26 | 27 | 28 | @pytest.mark.parametrize( 29 | "name", 30 | [ 31 | "D22DC25523A78A89-0000000000000125 (2)._matter._tcp.local.", 32 | "D22DC25523A78A89-0000000000000125.._matter._tcp.local.", 33 | "G22DC25523A78A89-0000000000000125._matter._tcp.local.", # invalid hex 34 | "D22DC25523A78A89-0000000000000125._matterc._udp.local.", 35 | ], 36 | ) 37 | def test_invalid_mdns_service_names(name): 38 | """Test invalid mDNS service names.""" 39 | assert RE_MDNS_SERVICE_NAME.match(name) is None 40 | --------------------------------------------------------------------------------