├── .github
├── FUNDING.yml
├── ISSUE_TEMPLATE
│ └── config.yml
├── PULL_REQUEST_TEMPLATE.md
├── actions
│ ├── build-image
│ │ └── action.yaml
│ └── restore-python
│ │ └── action.yml
├── dependabot.yml
└── workflows
│ ├── ci-api-proto.yml
│ ├── ci-docker.yml
│ ├── ci.yml
│ ├── lock.yml
│ ├── matchers
│ ├── ci-custom.json
│ ├── clang-tidy.json
│ ├── gcc.json
│ ├── lint-python.json
│ ├── pytest.json
│ └── python.json
│ ├── needs-docs.yml
│ ├── release.yml
│ ├── stale.yml
│ ├── sync-device-classes.yml
│ └── yaml-lint.yml
├── .gitignore
├── README.md
├── requirements.txt
├── requirements_dev.txt
├── requirements_optional.txt
├── requirements_test.txt
└── script
├── api_protobuf
└── api_protobuf.py
├── build_codeowners.py
├── build_language_schema.py
├── bump-version.py
├── ci-custom.py
├── ci-suggest-changes
├── clang-format
├── clang-tidy
├── component_test
├── devcontainer-post-create
├── fulltest
├── helpers.py
├── lint-cpp
├── lint-python
├── list-components.py
├── platformio_install_deps.py
├── quicklint
├── run-in-env.sh
├── setup
├── sync-device_class.py
├── test
├── test_build_components
└── unit_test
/.github/FUNDING.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # These are supported funding model platforms
3 |
4 | custom: https://www.nabucasa.com
5 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/config.yml:
--------------------------------------------------------------------------------
1 | ---
2 | blank_issues_enabled: true
3 | # contact_links:
4 | # - name: Issue Tracker
5 | # url: https://github.com/esphome/issues
6 | # about: Please create bug reports in the dedicated issue tracker.
7 | # - name: Feature Request Tracker
8 | # url: https://github.com/esphome/feature-requests
9 | # about: |
10 | # Please create feature requests in the dedicated feature request tracker.
11 | # - name: Frequently Asked Question
12 | # url: https://esphome.io/guides/faq.html
13 | # about: |
14 | # Please view the FAQ for common questions and what
15 | # to include in a bug report.
16 |
--------------------------------------------------------------------------------
/.github/PULL_REQUEST_TEMPLATE.md:
--------------------------------------------------------------------------------
1 | # What does this implement/fix?
2 |
3 |
4 |
5 | ## Types of changes
6 |
7 | - [ ] Bugfix (non-breaking change which fixes an issue)
8 | - [ ] New feature (non-breaking change which adds functionality)
9 | - [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected)
10 | - [ ] Other
11 |
12 | **Related issue or feature (if applicable):** fixes
13 |
14 | **Pull request in [esphome-docs](https://github.com/esphome/esphome-docs) with documentation (if applicable):** esphome/esphome-docs#
15 |
16 | ## Test Environment
17 |
18 | - [ ] ESP32
19 | - [ ] ESP32 IDF
20 | - [ ] ESP8266
21 | - [ ] RP2040
22 | - [ ] BK72xx
23 | - [ ] RTL87xx
24 |
25 | ## Example entry for `config.yaml`:
26 |
32 |
33 | ```yaml
34 | # Example config.yaml
35 |
36 | ```
37 |
38 | ## Checklist:
39 | - [ ] The code change is tested and works locally.
40 | - [ ] Tests have been added to verify that the new code works (under `tests/` folder).
41 |
42 | If user exposed functionality or configuration variables are added/changed:
43 | - [ ] Documentation added/updated in [esphome-docs](https://github.com/esphome/esphome-docs).
44 |
--------------------------------------------------------------------------------
/.github/actions/build-image/action.yaml:
--------------------------------------------------------------------------------
1 | name: Build Image
2 | inputs:
3 | platform:
4 | description: "Platform to build for"
5 | required: true
6 | example: "linux/amd64"
7 | target:
8 | description: "Target to build"
9 | required: true
10 | example: "docker"
11 | baseimg:
12 | description: "Base image type"
13 | required: true
14 | example: "docker"
15 | suffix:
16 | description: "Suffix to add to tags"
17 | required: true
18 | version:
19 | description: "Version to build"
20 | required: true
21 | example: "2023.12.0"
22 | runs:
23 | using: "composite"
24 | steps:
25 | - name: Generate short tags
26 | id: tags
27 | shell: bash
28 | run: |
29 | output=$(docker/generate_tags.py \
30 | --tag "${{ inputs.version }}" \
31 | --suffix "${{ inputs.suffix }}")
32 | echo $output
33 | for l in $output; do
34 | echo $l >> $GITHUB_OUTPUT
35 | done
36 |
37 | # set cache-to only if dev branch
38 | - id: cache-to
39 | shell: bash
40 | run: |-
41 | if [[ "${{ github.ref }}" == "refs/heads/dev" ]]; then
42 | echo "value=type=gha,mode=max" >> $GITHUB_OUTPUT
43 | else
44 | echo "value=" >> $GITHUB_OUTPUT
45 | fi
46 |
47 | - name: Build and push to ghcr by digest
48 | id: build-ghcr
49 | uses: docker/build-push-action@v6.0.0
50 | with:
51 | context: .
52 | file: ./docker/Dockerfile
53 | platforms: ${{ inputs.platform }}
54 | target: ${{ inputs.target }}
55 | cache-from: type=gha
56 | cache-to: ${{ steps.cache-to.outputs.value }}
57 | build-args: |
58 | BASEIMGTYPE=${{ inputs.baseimg }}
59 | BUILD_VERSION=${{ inputs.version }}
60 | outputs: |
61 | type=image,name=ghcr.io/${{ steps.tags.outputs.image_name }},push-by-digest=true,name-canonical=true,push=true
62 |
63 | - name: Export ghcr digests
64 | shell: bash
65 | run: |
66 | mkdir -p /tmp/digests/${{ inputs.target }}/ghcr
67 | digest="${{ steps.build-ghcr.outputs.digest }}"
68 | touch "/tmp/digests/${{ inputs.target }}/ghcr/${digest#sha256:}"
69 |
70 | - name: Build and push to dockerhub by digest
71 | id: build-dockerhub
72 | uses: docker/build-push-action@v6.0.0
73 | with:
74 | context: .
75 | file: ./docker/Dockerfile
76 | platforms: ${{ inputs.platform }}
77 | target: ${{ inputs.target }}
78 | cache-from: type=gha
79 | cache-to: ${{ steps.cache-to.outputs.value }}
80 | build-args: |
81 | BASEIMGTYPE=${{ inputs.baseimg }}
82 | BUILD_VERSION=${{ inputs.version }}
83 | outputs: |
84 | type=image,name=docker.io/${{ steps.tags.outputs.image_name }},push-by-digest=true,name-canonical=true,push=true
85 |
86 | - name: Export dockerhub digests
87 | shell: bash
88 | run: |
89 | mkdir -p /tmp/digests/${{ inputs.target }}/dockerhub
90 | digest="${{ steps.build-dockerhub.outputs.digest }}"
91 | touch "/tmp/digests/${{ inputs.target }}/dockerhub/${digest#sha256:}"
92 |
--------------------------------------------------------------------------------
/.github/actions/restore-python/action.yml:
--------------------------------------------------------------------------------
1 | name: Restore Python
2 | inputs:
3 | python-version:
4 | description: Python version to restore
5 | required: true
6 | type: string
7 | cache-key:
8 | description: Cache key to use
9 | required: true
10 | type: string
11 | outputs:
12 | python-version:
13 | description: Python version restored
14 | value: ${{ steps.python.outputs.python-version }}
15 | runs:
16 | using: "composite"
17 | steps:
18 | - name: Set up Python ${{ inputs.python-version }}
19 | id: python
20 | uses: actions/setup-python@v5.1.0
21 | with:
22 | python-version: ${{ inputs.python-version }}
23 | - name: Restore Python virtual environment
24 | id: cache-venv
25 | uses: actions/cache/restore@v4.0.2
26 | with:
27 | path: venv
28 | # yamllint disable-line rule:line-length
29 | key: ${{ runner.os }}-${{ steps.python.outputs.python-version }}-venv-${{ inputs.cache-key }}
30 | - name: Create Python virtual environment
31 | if: steps.cache-venv.outputs.cache-hit != 'true' && runner.os != 'Windows'
32 | shell: bash
33 | run: |
34 | python -m venv venv
35 | source venv/bin/activate
36 | python --version
37 | pip install -r requirements.txt -r requirements_optional.txt -r requirements_test.txt
38 | pip install -e .
39 | - name: Create Python virtual environment
40 | if: steps.cache-venv.outputs.cache-hit != 'true' && runner.os == 'Windows'
41 | shell: bash
42 | run: |
43 | python -m venv venv
44 | ./venv/Scripts/activate
45 | python --version
46 | pip install -r requirements.txt -r requirements_optional.txt -r requirements_test.txt
47 | pip install -e .
48 |
--------------------------------------------------------------------------------
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | ---
2 | version: 2
3 | updates:
4 | - package-ecosystem: pip
5 | directory: "/"
6 | schedule:
7 | interval: daily
8 | ignore:
9 | # Hypotehsis is only used for testing and is updated quite often
10 | - dependency-name: hypothesis
11 | - package-ecosystem: github-actions
12 | directory: "/"
13 | schedule:
14 | interval: daily
15 | open-pull-requests-limit: 10
16 | - package-ecosystem: github-actions
17 | directory: "/.github/actions/build-image"
18 | schedule:
19 | interval: daily
20 | open-pull-requests-limit: 10
21 | - package-ecosystem: github-actions
22 | directory: "/.github/actions/restore-python"
23 | schedule:
24 | interval: daily
25 | open-pull-requests-limit: 10
26 |
--------------------------------------------------------------------------------
/.github/workflows/ci-api-proto.yml:
--------------------------------------------------------------------------------
1 | name: API Proto CI
2 |
3 | on:
4 | pull_request:
5 | paths:
6 | - "esphome/components/api/api.proto"
7 | - "esphome/components/api/api_pb2.cpp"
8 | - "esphome/components/api/api_pb2.h"
9 | - "esphome/components/api/api_pb2_service.cpp"
10 | - "esphome/components/api/api_pb2_service.h"
11 | - "script/api_protobuf/api_protobuf.py"
12 | - ".github/workflows/ci-api-proto.yml"
13 |
14 | permissions:
15 | contents: read
16 | pull-requests: write
17 |
18 | jobs:
19 | check:
20 | name: Check generated files
21 | runs-on: ubuntu-latest
22 | steps:
23 | - name: Checkout
24 | uses: actions/checkout@v4.1.7
25 | - name: Set up Python
26 | uses: actions/setup-python@v5.1.0
27 | with:
28 | python-version: "3.11"
29 |
30 | - name: Install apt dependencies
31 | run: |
32 | sudo apt update
33 | sudo apt-cache show protobuf-compiler
34 | sudo apt install -y protobuf-compiler
35 | protoc --version
36 | - name: Install python dependencies
37 | run: pip install aioesphomeapi -c requirements.txt -r requirements_dev.txt
38 | - name: Generate files
39 | run: script/api_protobuf/api_protobuf.py
40 | - name: Check for changes
41 | run: |
42 | if ! git diff --quiet; then
43 | echo "## Job Failed" | tee -a $GITHUB_STEP_SUMMARY
44 | echo "You have altered the generated proto files but they do not match what is expected." | tee -a $GITHUB_STEP_SUMMARY
45 | echo "Please run 'script/api_protobuf/api_protobuf.py' and commit the changes." | tee -a $GITHUB_STEP_SUMMARY
46 | exit 1
47 | fi
48 | - if: failure()
49 | name: Review PR
50 | uses: actions/github-script@v7.0.1
51 | with:
52 | script: |
53 | await github.rest.pulls.createReview({
54 | pull_number: context.issue.number,
55 | owner: context.repo.owner,
56 | repo: context.repo.repo,
57 | event: 'REQUEST_CHANGES',
58 | body: 'You have altered the generated proto files but they do not match what is expected.\nPlease run "script/api_protobuf/api_protobuf.py" and commit the changes.'
59 | })
60 | - if: success()
61 | name: Dismiss review
62 | uses: actions/github-script@v7.0.1
63 | with:
64 | script: |
65 | let reviews = await github.rest.pulls.listReviews({
66 | pull_number: context.issue.number,
67 | owner: context.repo.owner,
68 | repo: context.repo.repo
69 | });
70 | for (let review of reviews.data) {
71 | if (review.user.login === 'github-actions[bot]' && review.state === 'CHANGES_REQUESTED') {
72 | await github.rest.pulls.dismissReview({
73 | pull_number: context.issue.number,
74 | owner: context.repo.owner,
75 | repo: context.repo.repo,
76 | review_id: review.id,
77 | message: 'Files now match the expected proto files.'
78 | });
79 | }
80 | }
81 |
--------------------------------------------------------------------------------
/.github/workflows/ci-docker.yml:
--------------------------------------------------------------------------------
1 | ---
2 | name: CI for docker images
3 |
4 | # Only run when docker paths change
5 |
6 | on:
7 | push:
8 | branches: [dev, beta, release]
9 | paths:
10 | - "docker/**"
11 | - ".github/workflows/ci-docker.yml"
12 | - "requirements*.txt"
13 | - "platformio.ini"
14 | - "script/platformio_install_deps.py"
15 |
16 | pull_request:
17 | paths:
18 | - "docker/**"
19 | - ".github/workflows/ci-docker.yml"
20 | - "requirements*.txt"
21 | - "platformio.ini"
22 | - "script/platformio_install_deps.py"
23 |
24 | permissions:
25 | contents: read
26 | packages: read
27 |
28 | concurrency:
29 | # yamllint disable-line rule:line-length
30 | group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
31 | cancel-in-progress: true
32 |
33 | jobs:
34 | check-docker:
35 | name: Build docker containers
36 | runs-on: ubuntu-latest
37 | strategy:
38 | fail-fast: false
39 | matrix:
40 | arch: [amd64, armv7, aarch64]
41 | build_type: ["ha-addon", "docker", "lint"]
42 | steps:
43 | - uses: actions/checkout@v4.1.7
44 | - name: Set up Python
45 | uses: actions/setup-python@v5.1.0
46 | with:
47 | python-version: "3.9"
48 | - name: Set up Docker Buildx
49 | uses: docker/setup-buildx-action@v3.3.0
50 | - name: Set up QEMU
51 | uses: docker/setup-qemu-action@v3.0.0
52 |
53 | - name: Set TAG
54 | run: |
55 | echo "TAG=check" >> $GITHUB_ENV
56 |
57 | - name: Run build
58 | run: |
59 | docker/build.py \
60 | --tag "${TAG}" \
61 | --arch "${{ matrix.arch }}" \
62 | --build-type "${{ matrix.build_type }}" \
63 | build
64 |
--------------------------------------------------------------------------------
/.github/workflows/ci.yml:
--------------------------------------------------------------------------------
1 | ---
2 | name: CI
3 |
4 | on:
5 | pull_request:
6 | paths:
7 | - "**"
8 | - "!.github/workflows/*.yml"
9 | - ".github/workflows/ci.yml"
10 | - "!.yamllint"
11 | - "!.github/dependabot.yml"
12 | merge_group:
13 |
14 | permissions:
15 | contents: read
16 |
17 | env:
18 | DEFAULT_PYTHON: "3.9"
19 | PYUPGRADE_TARGET: "--py39-plus"
20 |
21 | concurrency:
22 | # yamllint disable-line rule:line-length
23 | group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
24 | cancel-in-progress: true
25 |
26 | jobs:
27 | common:
28 | name: Create common environment
29 | runs-on: ubuntu-latest
30 | outputs:
31 | cache-key: ${{ steps.cache-key.outputs.key }}
32 | steps:
33 | - name: Check out code from GitHub
34 | uses: actions/checkout@v4.1.7
35 | - name: Generate cache-key
36 | id: cache-key
37 | run: echo key="${{ hashFiles('requirements.txt', 'requirements_optional.txt', 'requirements_test.txt') }}" >> $GITHUB_OUTPUT
38 | - name: Set up Python ${{ env.DEFAULT_PYTHON }}
39 | id: python
40 | uses: actions/setup-python@v5.1.0
41 | with:
42 | python-version: ${{ env.DEFAULT_PYTHON }}
43 | - name: Restore Python virtual environment
44 | id: cache-venv
45 | uses: actions/cache@v4.0.2
46 | with:
47 | path: venv
48 | # yamllint disable-line rule:line-length
49 | key: ${{ runner.os }}-${{ steps.python.outputs.python-version }}-venv-${{ steps.cache-key.outputs.key }}
50 | - name: Create Python virtual environment
51 | if: steps.cache-venv.outputs.cache-hit != 'true'
52 | run: |
53 | python -m venv venv
54 | . venv/bin/activate
55 | python --version
56 | pip install -r requirements.txt -r requirements_optional.txt -r requirements_test.txt
57 | pip install -e .
58 |
59 | black:
60 | name: Check black
61 | runs-on: ubuntu-latest
62 | needs:
63 | - common
64 | steps:
65 | - name: Check out code from GitHub
66 | uses: actions/checkout@v4.1.7
67 | - name: Restore Python
68 | uses: ./.github/actions/restore-python
69 | with:
70 | python-version: ${{ env.DEFAULT_PYTHON }}
71 | cache-key: ${{ needs.common.outputs.cache-key }}
72 | - name: Run black
73 | run: |
74 | . venv/bin/activate
75 | black --verbose esphome tests
76 | - name: Suggested changes
77 | run: script/ci-suggest-changes
78 | if: always()
79 |
80 | flake8:
81 | name: Check flake8
82 | runs-on: ubuntu-latest
83 | needs:
84 | - common
85 | steps:
86 | - name: Check out code from GitHub
87 | uses: actions/checkout@v4.1.7
88 | - name: Restore Python
89 | uses: ./.github/actions/restore-python
90 | with:
91 | python-version: ${{ env.DEFAULT_PYTHON }}
92 | cache-key: ${{ needs.common.outputs.cache-key }}
93 | - name: Run flake8
94 | run: |
95 | . venv/bin/activate
96 | flake8 esphome
97 | - name: Suggested changes
98 | run: script/ci-suggest-changes
99 | if: always()
100 |
101 | pylint:
102 | name: Check pylint
103 | runs-on: ubuntu-latest
104 | needs:
105 | - common
106 | steps:
107 | - name: Check out code from GitHub
108 | uses: actions/checkout@v4.1.7
109 | - name: Restore Python
110 | uses: ./.github/actions/restore-python
111 | with:
112 | python-version: ${{ env.DEFAULT_PYTHON }}
113 | cache-key: ${{ needs.common.outputs.cache-key }}
114 | - name: Run pylint
115 | run: |
116 | . venv/bin/activate
117 | pylint -f parseable --persistent=n esphome
118 | - name: Suggested changes
119 | run: script/ci-suggest-changes
120 | if: always()
121 |
122 | pyupgrade:
123 | name: Check pyupgrade
124 | runs-on: ubuntu-latest
125 | needs:
126 | - common
127 | steps:
128 | - name: Check out code from GitHub
129 | uses: actions/checkout@v4.1.7
130 | - name: Restore Python
131 | uses: ./.github/actions/restore-python
132 | with:
133 | python-version: ${{ env.DEFAULT_PYTHON }}
134 | cache-key: ${{ needs.common.outputs.cache-key }}
135 | - name: Run pyupgrade
136 | run: |
137 | . venv/bin/activate
138 | pyupgrade ${{ env.PYUPGRADE_TARGET }} `find esphome -name "*.py" -type f`
139 | - name: Suggested changes
140 | run: script/ci-suggest-changes
141 | if: always()
142 |
143 | ci-custom:
144 | name: Run script/ci-custom
145 | runs-on: ubuntu-latest
146 | needs:
147 | - common
148 | steps:
149 | - name: Check out code from GitHub
150 | uses: actions/checkout@v4.1.7
151 | - name: Restore Python
152 | uses: ./.github/actions/restore-python
153 | with:
154 | python-version: ${{ env.DEFAULT_PYTHON }}
155 | cache-key: ${{ needs.common.outputs.cache-key }}
156 | - name: Register matcher
157 | run: echo "::add-matcher::.github/workflows/matchers/ci-custom.json"
158 | - name: Run script/ci-custom
159 | run: |
160 | . venv/bin/activate
161 | script/ci-custom.py
162 | script/build_codeowners.py --check
163 |
164 | pytest:
165 | name: Run pytest
166 | strategy:
167 | fail-fast: false
168 | matrix:
169 | python-version:
170 | - "3.9"
171 | - "3.10"
172 | - "3.11"
173 | - "3.12"
174 | os:
175 | - ubuntu-latest
176 | - macOS-latest
177 | - windows-latest
178 | exclude:
179 | # Minimize CI resource usage
180 | # by only running the Python version
181 | # version used for docker images on Windows and macOS
182 | - python-version: "3.12"
183 | os: windows-latest
184 | - python-version: "3.10"
185 | os: windows-latest
186 | - python-version: "3.9"
187 | os: windows-latest
188 | - python-version: "3.12"
189 | os: macOS-latest
190 | - python-version: "3.10"
191 | os: macOS-latest
192 | - python-version: "3.9"
193 | os: macOS-latest
194 | runs-on: ${{ matrix.os }}
195 | needs:
196 | - common
197 | steps:
198 | - name: Check out code from GitHub
199 | uses: actions/checkout@v4.1.7
200 | - name: Restore Python
201 | uses: ./.github/actions/restore-python
202 | with:
203 | python-version: ${{ matrix.python-version }}
204 | cache-key: ${{ needs.common.outputs.cache-key }}
205 | - name: Register matcher
206 | run: echo "::add-matcher::.github/workflows/matchers/pytest.json"
207 | - name: Run pytest
208 | if: matrix.os == 'windows-latest'
209 | run: |
210 | ./venv/Scripts/activate
211 | pytest -vv --cov-report=xml --tb=native tests
212 | - name: Run pytest
213 | if: matrix.os == 'ubuntu-latest' || matrix.os == 'macOS-latest'
214 | run: |
215 | . venv/bin/activate
216 | pytest -vv --cov-report=xml --tb=native tests
217 | - name: Upload coverage to Codecov
218 | uses: codecov/codecov-action@v4
219 | with:
220 | token: ${{ secrets.CODECOV_TOKEN }}
221 |
222 | clang-format:
223 | name: Check clang-format
224 | runs-on: ubuntu-latest
225 | needs:
226 | - common
227 | steps:
228 | - name: Check out code from GitHub
229 | uses: actions/checkout@v4.1.7
230 | - name: Restore Python
231 | uses: ./.github/actions/restore-python
232 | with:
233 | python-version: ${{ env.DEFAULT_PYTHON }}
234 | cache-key: ${{ needs.common.outputs.cache-key }}
235 | - name: Install clang-format
236 | run: |
237 | . venv/bin/activate
238 | pip install clang-format -c requirements_dev.txt
239 | - name: Run clang-format
240 | run: |
241 | . venv/bin/activate
242 | script/clang-format -i
243 | git diff-index --quiet HEAD --
244 | - name: Suggested changes
245 | run: script/ci-suggest-changes
246 | if: always()
247 |
248 | compile-tests-list:
249 | runs-on: ubuntu-latest
250 | outputs:
251 | matrix: ${{ steps.set-matrix.outputs.matrix }}
252 | steps:
253 | - name: Check out code from GitHub
254 | uses: actions/checkout@v4.1.7
255 | - name: Find all YAML test files
256 | id: set-matrix
257 | run: echo "matrix=$(ls tests/test*.yaml | jq -R -s -c 'split("\n")[:-1]')" >> $GITHUB_OUTPUT
258 |
259 | validate-tests:
260 | name: Validate YAML test ${{ matrix.file }}
261 | runs-on: ubuntu-latest
262 | needs:
263 | - common
264 | - compile-tests-list
265 | strategy:
266 | fail-fast: false
267 | matrix:
268 | file: ${{ fromJson(needs.compile-tests-list.outputs.matrix) }}
269 | steps:
270 | - name: Check out code from GitHub
271 | uses: actions/checkout@v4.1.7
272 | - name: Restore Python
273 | uses: ./.github/actions/restore-python
274 | with:
275 | python-version: ${{ env.DEFAULT_PYTHON }}
276 | cache-key: ${{ needs.common.outputs.cache-key }}
277 | - name: Run esphome config ${{ matrix.file }}
278 | run: |
279 | . venv/bin/activate
280 | esphome config ${{ matrix.file }}
281 |
282 | compile-tests:
283 | name: Run YAML test ${{ matrix.file }}
284 | runs-on: ubuntu-latest
285 | needs:
286 | - common
287 | - black
288 | - ci-custom
289 | - clang-format
290 | - flake8
291 | - pylint
292 | - pytest
293 | - pyupgrade
294 | - compile-tests-list
295 | - validate-tests
296 | strategy:
297 | fail-fast: false
298 | max-parallel: 2
299 | matrix:
300 | file: ${{ fromJson(needs.compile-tests-list.outputs.matrix) }}
301 | steps:
302 | - name: Check out code from GitHub
303 | uses: actions/checkout@v4.1.7
304 | - name: Restore Python
305 | uses: ./.github/actions/restore-python
306 | with:
307 | python-version: ${{ env.DEFAULT_PYTHON }}
308 | cache-key: ${{ needs.common.outputs.cache-key }}
309 | - name: Run esphome compile ${{ matrix.file }}
310 | run: |
311 | . venv/bin/activate
312 | esphome compile ${{ matrix.file }}
313 |
314 | clang-tidy:
315 | name: ${{ matrix.name }}
316 | runs-on: ubuntu-latest
317 | needs:
318 | - common
319 | - black
320 | - ci-custom
321 | - clang-format
322 | - flake8
323 | - pylint
324 | - pytest
325 | - pyupgrade
326 | strategy:
327 | fail-fast: false
328 | max-parallel: 2
329 | matrix:
330 | include:
331 | - id: clang-tidy
332 | name: Run script/clang-tidy for ESP8266
333 | options: --environment esp8266-arduino-tidy --grep USE_ESP8266
334 | pio_cache_key: tidyesp8266
335 | - id: clang-tidy
336 | name: Run script/clang-tidy for ESP32 Arduino 1/4
337 | options: --environment esp32-arduino-tidy --split-num 4 --split-at 1
338 | pio_cache_key: tidyesp32
339 | - id: clang-tidy
340 | name: Run script/clang-tidy for ESP32 Arduino 2/4
341 | options: --environment esp32-arduino-tidy --split-num 4 --split-at 2
342 | pio_cache_key: tidyesp32
343 | - id: clang-tidy
344 | name: Run script/clang-tidy for ESP32 Arduino 3/4
345 | options: --environment esp32-arduino-tidy --split-num 4 --split-at 3
346 | pio_cache_key: tidyesp32
347 | - id: clang-tidy
348 | name: Run script/clang-tidy for ESP32 Arduino 4/4
349 | options: --environment esp32-arduino-tidy --split-num 4 --split-at 4
350 | pio_cache_key: tidyesp32
351 | - id: clang-tidy
352 | name: Run script/clang-tidy for ESP32 IDF
353 | options: --environment esp32-idf-tidy --grep USE_ESP_IDF
354 | pio_cache_key: tidyesp32-idf
355 |
356 | steps:
357 | - name: Check out code from GitHub
358 | uses: actions/checkout@v4.1.7
359 | - name: Restore Python
360 | uses: ./.github/actions/restore-python
361 | with:
362 | python-version: ${{ env.DEFAULT_PYTHON }}
363 | cache-key: ${{ needs.common.outputs.cache-key }}
364 |
365 | - name: Cache platformio
366 | if: github.ref == 'refs/heads/dev'
367 | uses: actions/cache@v4.0.2
368 | with:
369 | path: ~/.platformio
370 | key: platformio-${{ matrix.pio_cache_key }}
371 |
372 | - name: Cache platformio
373 | if: github.ref != 'refs/heads/dev'
374 | uses: actions/cache/restore@v4.0.2
375 | with:
376 | path: ~/.platformio
377 | key: platformio-${{ matrix.pio_cache_key }}
378 |
379 | - name: Install clang-tidy
380 | run: sudo apt-get install clang-tidy-14
381 |
382 | - name: Register problem matchers
383 | run: |
384 | echo "::add-matcher::.github/workflows/matchers/gcc.json"
385 | echo "::add-matcher::.github/workflows/matchers/clang-tidy.json"
386 |
387 | - name: Run 'pio run --list-targets -e esp32-idf-tidy'
388 | if: matrix.name == 'Run script/clang-tidy for ESP32 IDF'
389 | run: |
390 | . venv/bin/activate
391 | mkdir -p .temp
392 | pio run --list-targets -e esp32-idf-tidy
393 |
394 | - name: Run clang-tidy
395 | run: |
396 | . venv/bin/activate
397 | script/clang-tidy --all-headers --fix ${{ matrix.options }}
398 | env:
399 | # Also cache libdeps, store them in a ~/.platformio subfolder
400 | PLATFORMIO_LIBDEPS_DIR: ~/.platformio/libdeps
401 |
402 | - name: Suggested changes
403 | run: script/ci-suggest-changes
404 | # yamllint disable-line rule:line-length
405 | if: always()
406 |
407 | list-components:
408 | runs-on: ubuntu-latest
409 | needs:
410 | - common
411 | if: github.event_name == 'pull_request'
412 | outputs:
413 | components: ${{ steps.list-components.outputs.components }}
414 | count: ${{ steps.list-components.outputs.count }}
415 | steps:
416 | - name: Check out code from GitHub
417 | uses: actions/checkout@v4.1.7
418 | with:
419 | # Fetch enough history so `git merge-base refs/remotes/origin/dev HEAD` works.
420 | fetch-depth: 500
421 | - name: Get target branch
422 | id: target-branch
423 | run: |
424 | echo "branch=${{ github.event.pull_request.base.ref }}" >> $GITHUB_OUTPUT
425 | - name: Fetch ${{ steps.target-branch.outputs.branch }} branch
426 | run: |
427 | git -c protocol.version=2 fetch --no-tags --prune --no-recurse-submodules --depth=1 origin +refs/heads/${{ steps.target-branch.outputs.branch }}:refs/remotes/origin/${{ steps.target-branch.outputs.branch }}
428 | git merge-base refs/remotes/origin/${{ steps.target-branch.outputs.branch }} HEAD
429 | - name: Restore Python
430 | uses: ./.github/actions/restore-python
431 | with:
432 | python-version: ${{ env.DEFAULT_PYTHON }}
433 | cache-key: ${{ needs.common.outputs.cache-key }}
434 | - name: Find changed components
435 | id: list-components
436 | run: |
437 | . venv/bin/activate
438 | components=$(script/list-components.py --changed --branch ${{ steps.target-branch.outputs.branch }})
439 | output_components=$(echo "$components" | jq -R -s -c 'split("\n")[:-1] | map(select(length > 0))')
440 | count=$(echo "$output_components" | jq length)
441 |
442 | echo "components=$output_components" >> $GITHUB_OUTPUT
443 | echo "count=$count" >> $GITHUB_OUTPUT
444 |
445 | echo "$count Components:"
446 | echo "$output_components" | jq
447 |
448 | test-build-components:
449 | name: Component test ${{ matrix.file }}
450 | runs-on: ubuntu-latest
451 | needs:
452 | - common
453 | - list-components
454 | if: github.event_name == 'pull_request' && fromJSON(needs.list-components.outputs.count) > 0 && fromJSON(needs.list-components.outputs.count) < 100
455 | strategy:
456 | fail-fast: false
457 | max-parallel: 2
458 | matrix:
459 | file: ${{ fromJson(needs.list-components.outputs.components) }}
460 | steps:
461 | - name: Install dependencies
462 | run: sudo apt-get install libsodium-dev libsdl2-dev
463 |
464 | - name: Check out code from GitHub
465 | uses: actions/checkout@v4.1.7
466 | - name: Restore Python
467 | uses: ./.github/actions/restore-python
468 | with:
469 | python-version: ${{ env.DEFAULT_PYTHON }}
470 | cache-key: ${{ needs.common.outputs.cache-key }}
471 | - name: test_build_components -e config -c ${{ matrix.file }}
472 | run: |
473 | . venv/bin/activate
474 | ./script/test_build_components -e config -c ${{ matrix.file }}
475 | - name: test_build_components -e compile -c ${{ matrix.file }}
476 | run: |
477 | . venv/bin/activate
478 | ./script/test_build_components -e compile -c ${{ matrix.file }}
479 |
480 | test-build-components-splitter:
481 | name: Split components for testing into 20 groups maximum
482 | runs-on: ubuntu-latest
483 | needs:
484 | - common
485 | - list-components
486 | if: github.event_name == 'pull_request' && fromJSON(needs.list-components.outputs.count) >= 100
487 | outputs:
488 | matrix: ${{ steps.split.outputs.components }}
489 | steps:
490 | - name: Check out code from GitHub
491 | uses: actions/checkout@v4.1.7
492 | - name: Split components into 20 groups
493 | id: split
494 | run: |
495 | components=$(echo '${{ needs.list-components.outputs.components }}' | jq -c '.[]' | shuf | jq -s -c '[_nwise(20) | join(" ")]')
496 | echo "components=$components" >> $GITHUB_OUTPUT
497 |
498 | test-build-components-split:
499 | name: Test split components
500 | runs-on: ubuntu-latest
501 | needs:
502 | - common
503 | - list-components
504 | - test-build-components-splitter
505 | if: github.event_name == 'pull_request' && fromJSON(needs.list-components.outputs.count) >= 100
506 | strategy:
507 | fail-fast: false
508 | max-parallel: 4
509 | matrix:
510 | components: ${{ fromJson(needs.test-build-components-splitter.outputs.matrix) }}
511 | steps:
512 | - name: List components
513 | run: echo ${{ matrix.components }}
514 |
515 | - name: Install dependencies
516 | run: sudo apt-get install libsodium-dev libsdl2-dev
517 |
518 | - name: Check out code from GitHub
519 | uses: actions/checkout@v4.1.7
520 | - name: Restore Python
521 | uses: ./.github/actions/restore-python
522 | with:
523 | python-version: ${{ env.DEFAULT_PYTHON }}
524 | cache-key: ${{ needs.common.outputs.cache-key }}
525 | - name: Validate config
526 | run: |
527 | . venv/bin/activate
528 | for component in ${{ matrix.components }}; do
529 | ./script/test_build_components -e config -c $component
530 | done
531 | - name: Compile config
532 | run: |
533 | . venv/bin/activate
534 | for component in ${{ matrix.components }}; do
535 | ./script/test_build_components -e compile -c $component
536 | done
537 |
538 | ci-status:
539 | name: CI Status
540 | runs-on: ubuntu-latest
541 | needs:
542 | - common
543 | - black
544 | - ci-custom
545 | - clang-format
546 | - flake8
547 | - pylint
548 | - pytest
549 | - pyupgrade
550 | - compile-tests
551 | - clang-tidy
552 | - list-components
553 | - test-build-components
554 | - test-build-components-splitter
555 | - test-build-components-split
556 | if: always()
557 | steps:
558 | - name: Success
559 | if: ${{ !(contains(needs.*.result, 'failure')) }}
560 | run: exit 0
561 | - name: Failure
562 | if: ${{ contains(needs.*.result, 'failure') }}
563 | env:
564 | JSON_DOC: ${{ toJSON(needs) }}
565 | run: |
566 | echo $JSON_DOC | jq
567 | exit 1
568 |
--------------------------------------------------------------------------------
/.github/workflows/lock.yml:
--------------------------------------------------------------------------------
1 | ---
2 | name: Lock
3 |
4 | on:
5 | schedule:
6 | - cron: "30 0 * * *"
7 | workflow_dispatch:
8 |
9 | permissions:
10 | issues: write
11 | pull-requests: write
12 |
13 | concurrency:
14 | group: lock
15 |
16 | jobs:
17 | lock:
18 | runs-on: ubuntu-latest
19 | steps:
20 | - uses: dessant/lock-threads@v5.0.1
21 | with:
22 | pr-inactive-days: "1"
23 | pr-lock-reason: ""
24 | exclude-any-pr-labels: keep-open
25 |
26 | issue-inactive-days: "7"
27 | issue-lock-reason: ""
28 | exclude-any-issue-labels: keep-open
29 |
--------------------------------------------------------------------------------
/.github/workflows/matchers/ci-custom.json:
--------------------------------------------------------------------------------
1 | {
2 | "problemMatcher": [
3 | {
4 | "owner": "ci-custom",
5 | "pattern": [
6 | {
7 | "regexp": "^(.*):(\\d+):(\\d+):\\s+lint:\\s+(.*)$",
8 | "file": 1,
9 | "line": 2,
10 | "column": 3,
11 | "message": 4
12 | }
13 | ]
14 | }
15 | ]
16 | }
17 |
--------------------------------------------------------------------------------
/.github/workflows/matchers/clang-tidy.json:
--------------------------------------------------------------------------------
1 | {
2 | "problemMatcher": [
3 | {
4 | "owner": "clang-tidy",
5 | "pattern": [
6 | {
7 | "regexp": "^(.*):(\\d+):(\\d+):\\s+(error):\\s+(.*) \\[([a-z0-9,\\-]+)\\]\\s*$",
8 | "file": 1,
9 | "line": 2,
10 | "column": 3,
11 | "severity": 4,
12 | "message": 5
13 | }
14 | ]
15 | }
16 | ]
17 | }
18 |
--------------------------------------------------------------------------------
/.github/workflows/matchers/gcc.json:
--------------------------------------------------------------------------------
1 | {
2 | "problemMatcher": [
3 | {
4 | "owner": "gcc",
5 | "severity": "error",
6 | "pattern": [
7 | {
8 | "regexp": "^src/(.*):(\\d+):(\\d+):\\s+(?:fatal\\s+)?(warning|error):\\s+(.*)$",
9 | "file": 1,
10 | "line": 2,
11 | "column": 3,
12 | "severity": 4,
13 | "message": 5
14 | }
15 | ]
16 | }
17 | ]
18 | }
19 |
--------------------------------------------------------------------------------
/.github/workflows/matchers/lint-python.json:
--------------------------------------------------------------------------------
1 | {
2 | "problemMatcher": [
3 | {
4 | "owner": "black",
5 | "severity": "error",
6 | "pattern": [
7 | {
8 | "regexp": "^(.*): (Please format this file with the black formatter)",
9 | "file": 1,
10 | "message": 2
11 | }
12 | ]
13 | },
14 | {
15 | "owner": "flake8",
16 | "severity": "error",
17 | "pattern": [
18 | {
19 | "regexp": "^(.*):(\\d+): ([EFCDNW]\\d{3}.*)$",
20 | "file": 1,
21 | "line": 2,
22 | "message": 3
23 | }
24 | ]
25 | },
26 | {
27 | "owner": "pylint",
28 | "severity": "error",
29 | "pattern": [
30 | {
31 | "regexp": "^(.*):(\\d+): (\\[[EFCRW]\\d{4}\\(.*\\),.*\\].*)$",
32 | "file": 1,
33 | "line": 2,
34 | "message": 3
35 | }
36 | ]
37 | }
38 | ]
39 | }
40 |
--------------------------------------------------------------------------------
/.github/workflows/matchers/pytest.json:
--------------------------------------------------------------------------------
1 | {
2 | "problemMatcher": [
3 | {
4 | "owner": "pytest",
5 | "fileLocation": "absolute",
6 | "pattern": [
7 | {
8 | "regexp": "^\\s+File \"(.*)\", line (\\d+), in (.*)$",
9 | "file": 1,
10 | "line": 2
11 | },
12 | {
13 | "regexp": "^\\s+(.*)$",
14 | "message": 1
15 | }
16 | ]
17 | }
18 | ]
19 | }
20 |
--------------------------------------------------------------------------------
/.github/workflows/matchers/python.json:
--------------------------------------------------------------------------------
1 | {
2 | "problemMatcher": [
3 | {
4 | "owner": "python",
5 | "pattern": [
6 | {
7 | "regexp": "^\\s*File\\s\\\"(.*)\\\",\\sline\\s(\\d+),\\sin\\s(.*)$",
8 | "file": 1,
9 | "line": 2
10 | },
11 | {
12 | "regexp": "^\\s*raise\\s(.*)\\(\\'(.*)\\'\\)$",
13 | "message": 2
14 | }
15 | ]
16 | }
17 | ]
18 | }
19 |
--------------------------------------------------------------------------------
/.github/workflows/needs-docs.yml:
--------------------------------------------------------------------------------
1 | name: Needs Docs
2 |
3 | on:
4 | pull_request:
5 | types: [labeled, unlabeled]
6 |
7 | jobs:
8 | check:
9 | name: Check
10 | runs-on: ubuntu-latest
11 | steps:
12 | - name: Check for needs-docs label
13 | uses: actions/github-script@v7.0.1
14 | with:
15 | script: |
16 | const { data: labels } = await github.rest.issues.listLabelsOnIssue({
17 | owner: context.repo.owner,
18 | repo: context.repo.repo,
19 | issue_number: context.issue.number
20 | });
21 | const needsDocs = labels.find(label => label.name === 'needs-docs');
22 | if (needsDocs) {
23 | core.setFailed('Pull request needs docs');
24 | }
25 |
--------------------------------------------------------------------------------
/.github/workflows/release.yml:
--------------------------------------------------------------------------------
1 | ---
2 | name: Publish Release
3 |
4 | on:
5 | workflow_dispatch:
6 | release:
7 | types: [published]
8 | schedule:
9 | - cron: "0 2 * * *"
10 |
11 | permissions:
12 | contents: read
13 |
14 | jobs:
15 | init:
16 | name: Initialize build
17 | runs-on: ubuntu-latest
18 | outputs:
19 | tag: ${{ steps.tag.outputs.tag }}
20 | branch_build: ${{ steps.tag.outputs.branch_build }}
21 | steps:
22 | - uses: actions/checkout@v4.1.7
23 | - name: Get tag
24 | id: tag
25 | # yamllint disable rule:line-length
26 | run: |
27 | if [[ "${{ github.event_name }}" = "release" ]]; then
28 | TAG="${{ github.event.release.tag_name}}"
29 | BRANCH_BUILD="false"
30 | else
31 | TAG=$(cat esphome/const.py | sed -n -E "s/^__version__\s+=\s+\"(.+)\"$/\1/p")
32 | today="$(date --utc '+%Y%m%d')"
33 | TAG="${TAG}${today}"
34 | BRANCH=${GITHUB_REF#refs/heads/}
35 | if [[ "$BRANCH" != "dev" ]]; then
36 | TAG="${TAG}-${BRANCH}"
37 | BRANCH_BUILD="true"
38 | else
39 | BRANCH_BUILD="false"
40 | fi
41 | fi
42 | echo "tag=${TAG}" >> $GITHUB_OUTPUT
43 | echo "branch_build=${BRANCH_BUILD}" >> $GITHUB_OUTPUT
44 | # yamllint enable rule:line-length
45 |
46 | deploy-pypi:
47 | name: Build and publish to PyPi
48 | if: github.repository == 'esphome/esphome' && github.event_name == 'release'
49 | runs-on: ubuntu-latest
50 | permissions:
51 | contents: read
52 | id-token: write
53 | steps:
54 | - uses: actions/checkout@v4.1.7
55 | - name: Set up Python
56 | uses: actions/setup-python@v5.1.0
57 | with:
58 | python-version: "3.x"
59 | - name: Set up python environment
60 | env:
61 | ESPHOME_NO_VENV: 1
62 | run: script/setup
63 | - name: Build
64 | run: |-
65 | pip3 install build
66 | python3 -m build
67 | - name: Publish
68 | uses: pypa/gh-action-pypi-publish@v1.9.0
69 |
70 | deploy-docker:
71 | name: Build ESPHome ${{ matrix.platform }}
72 | if: github.repository == 'esphome/esphome'
73 | permissions:
74 | contents: read
75 | packages: write
76 | runs-on: ubuntu-latest
77 | needs: [init]
78 | strategy:
79 | fail-fast: false
80 | matrix:
81 | platform:
82 | - linux/amd64
83 | - linux/arm/v7
84 | - linux/arm64
85 | steps:
86 | - uses: actions/checkout@v4.1.7
87 | - name: Set up Python
88 | uses: actions/setup-python@v5.1.0
89 | with:
90 | python-version: "3.9"
91 |
92 | - name: Set up Docker Buildx
93 | uses: docker/setup-buildx-action@v3.3.0
94 | - name: Set up QEMU
95 | if: matrix.platform != 'linux/amd64'
96 | uses: docker/setup-qemu-action@v3.0.0
97 |
98 | - name: Log in to docker hub
99 | uses: docker/login-action@v3.2.0
100 | with:
101 | username: ${{ secrets.DOCKER_USER }}
102 | password: ${{ secrets.DOCKER_PASSWORD }}
103 | - name: Log in to the GitHub container registry
104 | uses: docker/login-action@v3.2.0
105 | with:
106 | registry: ghcr.io
107 | username: ${{ github.actor }}
108 | password: ${{ secrets.GITHUB_TOKEN }}
109 |
110 | - name: Build docker
111 | uses: ./.github/actions/build-image
112 | with:
113 | platform: ${{ matrix.platform }}
114 | target: docker
115 | baseimg: docker
116 | suffix: ""
117 | version: ${{ needs.init.outputs.tag }}
118 |
119 | - name: Build ha-addon
120 | uses: ./.github/actions/build-image
121 | with:
122 | platform: ${{ matrix.platform }}
123 | target: hassio
124 | baseimg: hassio
125 | suffix: "hassio"
126 | version: ${{ needs.init.outputs.tag }}
127 |
128 | - name: Build lint
129 | uses: ./.github/actions/build-image
130 | with:
131 | platform: ${{ matrix.platform }}
132 | target: lint
133 | baseimg: docker
134 | suffix: lint
135 | version: ${{ needs.init.outputs.tag }}
136 |
137 | - name: Sanitize platform name
138 | id: sanitize
139 | run: |
140 | echo "${{ matrix.platform }}" | sed 's|/|-|g' > /tmp/platform
141 | echo name=$(cat /tmp/platform) >> $GITHUB_OUTPUT
142 |
143 | - name: Upload digests
144 | uses: actions/upload-artifact@v4.3.3
145 | with:
146 | name: digests-${{ steps.sanitize.outputs.name }}
147 | path: /tmp/digests
148 | retention-days: 1
149 |
150 | deploy-manifest:
151 | name: Publish ESPHome ${{ matrix.image.title }} to ${{ matrix.registry }}
152 | runs-on: ubuntu-latest
153 | needs:
154 | - init
155 | - deploy-docker
156 | if: github.repository == 'esphome/esphome'
157 | permissions:
158 | contents: read
159 | packages: write
160 | strategy:
161 | fail-fast: false
162 | matrix:
163 | image:
164 | - title: "ha-addon"
165 | target: "hassio"
166 | suffix: "hassio"
167 | - title: "docker"
168 | target: "docker"
169 | suffix: ""
170 | - title: "lint"
171 | target: "lint"
172 | suffix: "lint"
173 | registry:
174 | - ghcr
175 | - dockerhub
176 | steps:
177 | - uses: actions/checkout@v4.1.7
178 |
179 | - name: Download digests
180 | uses: actions/download-artifact@v4.1.7
181 | with:
182 | pattern: digests-*
183 | path: /tmp/digests
184 | merge-multiple: true
185 |
186 | - name: Set up Docker Buildx
187 | uses: docker/setup-buildx-action@v3.3.0
188 |
189 | - name: Log in to docker hub
190 | if: matrix.registry == 'dockerhub'
191 | uses: docker/login-action@v3.2.0
192 | with:
193 | username: ${{ secrets.DOCKER_USER }}
194 | password: ${{ secrets.DOCKER_PASSWORD }}
195 | - name: Log in to the GitHub container registry
196 | if: matrix.registry == 'ghcr'
197 | uses: docker/login-action@v3.2.0
198 | with:
199 | registry: ghcr.io
200 | username: ${{ github.actor }}
201 | password: ${{ secrets.GITHUB_TOKEN }}
202 |
203 | - name: Generate short tags
204 | id: tags
205 | run: |
206 | output=$(docker/generate_tags.py \
207 | --tag "${{ needs.init.outputs.tag }}" \
208 | --suffix "${{ matrix.image.suffix }}" \
209 | --registry "${{ matrix.registry }}")
210 | echo $output
211 | for l in $output; do
212 | echo $l >> $GITHUB_OUTPUT
213 | done
214 |
215 | - name: Create manifest list and push
216 | working-directory: /tmp/digests/${{ matrix.image.target }}/${{ matrix.registry }}
217 | run: |
218 | docker buildx imagetools create $(jq -Rcnr 'inputs | . / "," | map("-t " + .) | join(" ")' <<< "${{ steps.tags.outputs.tags}}") \
219 | $(printf '${{ steps.tags.outputs.image }}@sha256:%s ' *)
220 |
221 | deploy-ha-addon-repo:
222 | if: github.repository == 'esphome/esphome' && needs.init.outputs.branch_build == 'false'
223 | runs-on: ubuntu-latest
224 | needs:
225 | - init
226 | - deploy-manifest
227 | steps:
228 | - name: Trigger Workflow
229 | uses: actions/github-script@v7.0.1
230 | with:
231 | github-token: ${{ secrets.DEPLOY_HA_ADDON_REPO_TOKEN }}
232 | script: |
233 | let description = "ESPHome";
234 | if (context.eventName == "release") {
235 | description = ${{ toJSON(github.event.release.body) }};
236 | }
237 | github.rest.actions.createWorkflowDispatch({
238 | owner: "esphome",
239 | repo: "home-assistant-addon",
240 | workflow_id: "bump-version.yml",
241 | ref: "main",
242 | inputs: {
243 | version: "${{ needs.init.outputs.tag }}",
244 | content: description
245 | }
246 | })
247 |
--------------------------------------------------------------------------------
/.github/workflows/stale.yml:
--------------------------------------------------------------------------------
1 | ---
2 | name: Stale
3 |
4 | on:
5 | schedule:
6 | - cron: "30 0 * * *"
7 | workflow_dispatch:
8 |
9 | permissions:
10 | issues: write
11 | pull-requests: write
12 |
13 | concurrency:
14 | group: lock
15 |
16 | jobs:
17 | stale:
18 | runs-on: ubuntu-latest
19 | steps:
20 | - uses: actions/stale@v9.0.0
21 | with:
22 | days-before-pr-stale: 90
23 | days-before-pr-close: 7
24 | days-before-issue-stale: -1
25 | days-before-issue-close: -1
26 | remove-stale-when-updated: true
27 | stale-pr-label: "stale"
28 | exempt-pr-labels: "not-stale"
29 | stale-pr-message: >
30 | There hasn't been any activity on this pull request recently. This
31 | pull request has been automatically marked as stale because of that
32 | and will be closed if no further activity occurs within 7 days.
33 | Thank you for your contributions.
34 |
35 | # Use stale to automatically close issues with a
36 | # reference to the issue tracker
37 | close-issues:
38 | runs-on: ubuntu-latest
39 | steps:
40 | - uses: actions/stale@v9.0.0
41 | with:
42 | days-before-pr-stale: -1
43 | days-before-pr-close: -1
44 | days-before-issue-stale: 1
45 | days-before-issue-close: 1
46 | remove-stale-when-updated: true
47 | stale-issue-label: "stale"
48 | exempt-issue-labels: "not-stale"
49 | stale-issue-message: >
50 | https://github.com/esphome/esphome/issues/430
51 |
--------------------------------------------------------------------------------
/.github/workflows/sync-device-classes.yml:
--------------------------------------------------------------------------------
1 | ---
2 | name: Synchronise Device Classes from Home Assistant
3 |
4 | on:
5 | workflow_dispatch:
6 | schedule:
7 | - cron: "45 6 * * *"
8 |
9 | jobs:
10 | sync:
11 | name: Sync Device Classes
12 | runs-on: ubuntu-latest
13 | if: github.repository == 'esphome/esphome'
14 | steps:
15 | - name: Checkout
16 | uses: actions/checkout@v4.1.7
17 |
18 | - name: Checkout Home Assistant
19 | uses: actions/checkout@v4.1.7
20 | with:
21 | repository: home-assistant/core
22 | path: lib/home-assistant
23 |
24 | - name: Setup Python
25 | uses: actions/setup-python@v5.1.0
26 | with:
27 | python-version: 3.12
28 |
29 | - name: Install Home Assistant
30 | run: |
31 | python -m pip install --upgrade pip
32 | pip install -e lib/home-assistant
33 |
34 | - name: Sync
35 | run: |
36 | python ./script/sync-device_class.py
37 |
38 | - name: Commit changes
39 | uses: peter-evans/create-pull-request@v6.0.5
40 | with:
41 | commit-message: "Synchronise Device Classes from Home Assistant"
42 | committer: esphomebot
43 | author: esphomebot
44 | branch: sync/device-classes
45 | delete-branch: true
46 | title: "Synchronise Device Classes from Home Assistant"
47 | body-path: .github/PULL_REQUEST_TEMPLATE.md
48 | token: ${{ secrets.DEVICE_CLASS_SYNC_TOKEN }}
49 |
--------------------------------------------------------------------------------
/.github/workflows/yaml-lint.yml:
--------------------------------------------------------------------------------
1 | ---
2 | name: YAML lint
3 |
4 | on:
5 | push:
6 | branches: [dev, beta, release]
7 | paths:
8 | - "**.yaml"
9 | - "**.yml"
10 | pull_request:
11 | paths:
12 | - "**.yaml"
13 | - "**.yml"
14 |
15 | jobs:
16 | yamllint:
17 | name: yamllint
18 | runs-on: ubuntu-latest
19 | steps:
20 | - name: Check out code from GitHub
21 | uses: actions/checkout@v4.1.7
22 | - name: Run yamllint
23 | uses: frenck/action-yamllint@v1.5.0
24 | with:
25 | strict: true
26 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Hide sublime text stuff
10 | *.sublime-project
11 | *.sublime-workspace
12 |
13 | # Intellij Idea
14 | .idea
15 |
16 | # Eclipse
17 | .project
18 | .cproject
19 | .pydevproject
20 | .settings/
21 |
22 | # Vim
23 | *.swp
24 |
25 | # Hide some OS X stuff
26 | .DS_Store
27 | .AppleDouble
28 | .LSOverride
29 | Icon
30 |
31 | # Thumbnails
32 | ._*
33 |
34 | # Distribution / packaging
35 | .Python
36 | build/
37 | develop-eggs/
38 | dist/
39 | downloads/
40 | eggs/
41 | .eggs/
42 | lib/
43 | lib64/
44 | parts/
45 | sdist/
46 | var/
47 | wheels/
48 | *.egg-info/
49 | .installed.cfg
50 | *.egg
51 | MANIFEST
52 |
53 | # Installer logs
54 | pip-log.txt
55 | pip-delete-this-directory.txt
56 |
57 | # Unit test / coverage reports
58 | htmlcov/
59 | .tox/
60 | .coverage
61 | .coverage.*
62 | .cache
63 | .esphome
64 | nosetests.xml
65 | coverage.xml
66 | cov.xml
67 | *.cover
68 | .hypothesis/
69 | .pytest_cache/
70 |
71 | # Translations
72 | *.mo
73 | *.pot
74 |
75 | # pyenv
76 | .python-version
77 |
78 | # Environments
79 | .env
80 | .venv
81 | env/
82 | venv/
83 | ENV/
84 | env.bak/
85 | venv.bak/
86 | venv-*/
87 |
88 | # mypy
89 | .mypy_cache/
90 |
91 | .pioenvs
92 | .piolibdeps
93 | .pio
94 | .vscode/
95 | !.vscode/tasks.json
96 | CMakeListsPrivate.txt
97 | CMakeLists.txt
98 |
99 | # User-specific stuff:
100 | .idea/**/workspace.xml
101 | .idea/**/tasks.xml
102 | .idea/dictionaries
103 |
104 | # Sensitive or high-churn files:
105 | .idea/**/dataSources/
106 | .idea/**/dataSources.ids
107 | .idea/**/dataSources.xml
108 | .idea/**/dataSources.local.xml
109 | .idea/**/dynamic.xml
110 |
111 | # CMake
112 | cmake-build-*/
113 |
114 | CMakeCache.txt
115 | CMakeFiles
116 | CMakeScripts
117 | Testing
118 | Makefile
119 | cmake_install.cmake
120 | install_manifest.txt
121 | compile_commands.json
122 | CTestTestfile.cmake
123 | /*.cbp
124 |
125 | .clang_complete
126 | .gcc-flags.json
127 |
128 | config/
129 | tests/build/
130 | tests/.esphome/
131 | /.temp-clang-tidy.cpp
132 | /.temp/
133 | .pio/
134 |
135 | sdkconfig.*
136 | !sdkconfig.defaults
137 |
138 | .tests/
139 |
140 | /components
141 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # muart-group/esphome
2 |
3 | ESPHome fork for development of the `mitsubishi_uart` component. Check out the documentation for more info: [muart-group.github.io/](muart-group.github.io/).
4 |
5 | #### The [`dev` branch](https://github.com/muart-group/esphome/tree/dev) is the branch targeted for merging upstream to ESPHome.
6 |
7 | #### The [`muart/edge` branch](https://github.com/muart-group/esphome/tree/muart/edge) contains new experimental features for testing.
8 |
9 | Once [this PR](https://github.com/esphome/esphome/pull/6794) is merged, bugs can be reported in the main ESPHome issue tracking repository. Until then, all issues may be reported here. (Experimental feature requests or exploration may still end up living here instead of in ESPHome, but we'll have to see)
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | async_timeout==4.0.3; python_version <= "3.10"
2 | cryptography==42.0.2
3 | voluptuous==0.14.2
4 | PyYAML==6.0.1
5 | paho-mqtt==1.6.1
6 | colorama==0.4.6
7 | icmplib==3.0.4
8 | tornado==6.4
9 | tzlocal==5.2 # from time
10 | tzdata>=2021.1 # from time
11 | pyserial==3.5
12 | platformio==6.1.15 # When updating platformio, also update Dockerfile
13 | esptool==4.7.0
14 | click==8.1.7
15 | esphome-dashboard==20240613.0
16 | aioesphomeapi==24.3.0
17 | zeroconf==0.132.2
18 | python-magic==0.4.27
19 | ruamel.yaml==0.18.6 # dashboard_import
20 |
21 | # esp-idf requires this, but doesn't bundle it by default
22 | # https://github.com/espressif/esp-idf/blob/220590d599e134d7a5e7f1e683cc4550349ffbf8/requirements.txt#L24
23 | kconfiglib==13.7.1
24 |
25 | # esp-idf >= 5.0 requires this
26 | pyparsing >= 3.0
27 |
28 | # For autocompletion
29 | argcomplete>=2.0.0
30 |
--------------------------------------------------------------------------------
/requirements_dev.txt:
--------------------------------------------------------------------------------
1 | # Useful stuff when working in a development environment
2 | clang-format==13.0.1 # also change in .pre-commit-config.yaml and Dockerfile when updating
3 | clang-tidy==14.0.6 # When updating clang-tidy, also update Dockerfile
4 | yamllint==1.35.1 # also change in .pre-commit-config.yaml when updating
5 |
--------------------------------------------------------------------------------
/requirements_optional.txt:
--------------------------------------------------------------------------------
1 | pillow==10.2.0
2 | cairosvg==2.7.1
3 |
--------------------------------------------------------------------------------
/requirements_test.txt:
--------------------------------------------------------------------------------
1 | pylint==3.1.0
2 | flake8==7.0.0 # also change in .pre-commit-config.yaml when updating
3 | black==24.4.2 # also change in .pre-commit-config.yaml when updating
4 | pyupgrade==3.15.2 # also change in .pre-commit-config.yaml when updating
5 | pre-commit
6 |
7 | # Unit tests
8 | pytest==8.2.0
9 | pytest-cov==5.0.0
10 | pytest-mock==3.14.0
11 | pytest-asyncio==0.23.6
12 | asyncmock==0.4.2
13 | hypothesis==6.92.1
14 |
--------------------------------------------------------------------------------
/script/api_protobuf/api_protobuf.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | """Python 3 script to automatically generate C++ classes for ESPHome's native API.
3 |
4 | It's pretty crappy spaghetti code, but it works.
5 |
6 | you need to install protobuf-compiler:
7 | running protoc --version should return
8 | libprotoc 3.6.1
9 |
10 | then run this script with python3 and the files
11 |
12 | esphome/components/api/api_pb2_service.h
13 | esphome/components/api/api_pb2_service.cpp
14 | esphome/components/api/api_pb2.h
15 | esphome/components/api/api_pb2.cpp
16 |
17 | will be generated, they still need to be formatted
18 | """
19 |
20 | import os
21 | import re
22 | import sys
23 | from abc import ABC, abstractmethod
24 | from pathlib import Path
25 | from subprocess import call
26 | from textwrap import dedent
27 |
28 | # Generate with
29 | # protoc --python_out=script/api_protobuf -I esphome/components/api/ api_options.proto
30 | import aioesphomeapi.api_options_pb2 as pb
31 | import google.protobuf.descriptor_pb2 as descriptor
32 |
33 | FILE_HEADER = """// This file was automatically generated with a tool.
34 | // See scripts/api_protobuf/api_protobuf.py
35 | """
36 |
37 |
38 | def indent_list(text, padding=" "):
39 | lines = []
40 | for line in text.splitlines():
41 | if line == "":
42 | p = ""
43 | elif line.startswith("#ifdef") or line.startswith("#endif"):
44 | p = ""
45 | else:
46 | p = padding
47 | lines.append(p + line)
48 | return lines
49 |
50 |
51 | def indent(text, padding=" "):
52 | return "\n".join(indent_list(text, padding))
53 |
54 |
55 | def camel_to_snake(name):
56 | # https://stackoverflow.com/a/1176023
57 | s1 = re.sub("(.)([A-Z][a-z]+)", r"\1_\2", name)
58 | return re.sub("([a-z0-9])([A-Z])", r"\1_\2", s1).lower()
59 |
60 |
61 | class TypeInfo(ABC):
62 | def __init__(self, field):
63 | self._field = field
64 |
65 | @property
66 | def default_value(self):
67 | return ""
68 |
69 | @property
70 | def name(self):
71 | return self._field.name
72 |
73 | @property
74 | def arg_name(self):
75 | return self.name
76 |
77 | @property
78 | def field_name(self):
79 | return self.name
80 |
81 | @property
82 | def number(self):
83 | return self._field.number
84 |
85 | @property
86 | def repeated(self):
87 | return self._field.label == 3
88 |
89 | @property
90 | def cpp_type(self):
91 | raise NotImplementedError
92 |
93 | @property
94 | def reference_type(self):
95 | return f"{self.cpp_type} "
96 |
97 | @property
98 | def const_reference_type(self):
99 | return f"{self.cpp_type} "
100 |
101 | @property
102 | def public_content(self) -> str:
103 | return [self.class_member]
104 |
105 | @property
106 | def protected_content(self) -> str:
107 | return []
108 |
109 | @property
110 | def class_member(self) -> str:
111 | return f"{self.cpp_type} {self.field_name}{{{self.default_value}}};"
112 |
113 | @property
114 | def decode_varint_content(self) -> str:
115 | content = self.decode_varint
116 | if content is None:
117 | return None
118 | return dedent(
119 | f"""\
120 | case {self.number}: {{
121 | this->{self.field_name} = {content};
122 | return true;
123 | }}"""
124 | )
125 |
126 | decode_varint = None
127 |
128 | @property
129 | def decode_length_content(self) -> str:
130 | content = self.decode_length
131 | if content is None:
132 | return None
133 | return dedent(
134 | f"""\
135 | case {self.number}: {{
136 | this->{self.field_name} = {content};
137 | return true;
138 | }}"""
139 | )
140 |
141 | decode_length = None
142 |
143 | @property
144 | def decode_32bit_content(self) -> str:
145 | content = self.decode_32bit
146 | if content is None:
147 | return None
148 | return dedent(
149 | f"""\
150 | case {self.number}: {{
151 | this->{self.field_name} = {content};
152 | return true;
153 | }}"""
154 | )
155 |
156 | decode_32bit = None
157 |
158 | @property
159 | def decode_64bit_content(self) -> str:
160 | content = self.decode_64bit
161 | if content is None:
162 | return None
163 | return dedent(
164 | f"""\
165 | case {self.number}: {{
166 | this->{self.field_name} = {content};
167 | return true;
168 | }}"""
169 | )
170 |
171 | decode_64bit = None
172 |
173 | @property
174 | def encode_content(self):
175 | return f"buffer.{self.encode_func}({self.number}, this->{self.field_name});"
176 |
177 | encode_func = None
178 |
179 | @property
180 | def dump_content(self):
181 | o = f'out.append(" {self.name}: ");\n'
182 | o += self.dump(f"this->{self.field_name}") + "\n"
183 | o += 'out.append("\\n");\n'
184 | return o
185 |
186 | @abstractmethod
187 | def dump(self, name: str):
188 | pass
189 |
190 |
191 | TYPE_INFO = {}
192 |
193 |
194 | def register_type(name):
195 | def func(value):
196 | TYPE_INFO[name] = value
197 | return value
198 |
199 | return func
200 |
201 |
202 | @register_type(1)
203 | class DoubleType(TypeInfo):
204 | cpp_type = "double"
205 | default_value = "0.0"
206 | decode_64bit = "value.as_double()"
207 | encode_func = "encode_double"
208 |
209 | def dump(self, name):
210 | o = f'sprintf(buffer, "%g", {name});\n'
211 | o += "out.append(buffer);"
212 | return o
213 |
214 |
215 | @register_type(2)
216 | class FloatType(TypeInfo):
217 | cpp_type = "float"
218 | default_value = "0.0f"
219 | decode_32bit = "value.as_float()"
220 | encode_func = "encode_float"
221 |
222 | def dump(self, name):
223 | o = f'sprintf(buffer, "%g", {name});\n'
224 | o += "out.append(buffer);"
225 | return o
226 |
227 |
228 | @register_type(3)
229 | class Int64Type(TypeInfo):
230 | cpp_type = "int64_t"
231 | default_value = "0"
232 | decode_varint = "value.as_int64()"
233 | encode_func = "encode_int64"
234 |
235 | def dump(self, name):
236 | o = f'sprintf(buffer, "%lld", {name});\n'
237 | o += "out.append(buffer);"
238 | return o
239 |
240 |
241 | @register_type(4)
242 | class UInt64Type(TypeInfo):
243 | cpp_type = "uint64_t"
244 | default_value = "0"
245 | decode_varint = "value.as_uint64()"
246 | encode_func = "encode_uint64"
247 |
248 | def dump(self, name):
249 | o = f'sprintf(buffer, "%llu", {name});\n'
250 | o += "out.append(buffer);"
251 | return o
252 |
253 |
254 | @register_type(5)
255 | class Int32Type(TypeInfo):
256 | cpp_type = "int32_t"
257 | default_value = "0"
258 | decode_varint = "value.as_int32()"
259 | encode_func = "encode_int32"
260 |
261 | def dump(self, name):
262 | o = f'sprintf(buffer, "%" PRId32, {name});\n'
263 | o += "out.append(buffer);"
264 | return o
265 |
266 |
267 | @register_type(6)
268 | class Fixed64Type(TypeInfo):
269 | cpp_type = "uint64_t"
270 | default_value = "0"
271 | decode_64bit = "value.as_fixed64()"
272 | encode_func = "encode_fixed64"
273 |
274 | def dump(self, name):
275 | o = f'sprintf(buffer, "%llu", {name});\n'
276 | o += "out.append(buffer);"
277 | return o
278 |
279 |
280 | @register_type(7)
281 | class Fixed32Type(TypeInfo):
282 | cpp_type = "uint32_t"
283 | default_value = "0"
284 | decode_32bit = "value.as_fixed32()"
285 | encode_func = "encode_fixed32"
286 |
287 | def dump(self, name):
288 | o = f'sprintf(buffer, "%" PRIu32, {name});\n'
289 | o += "out.append(buffer);"
290 | return o
291 |
292 |
293 | @register_type(8)
294 | class BoolType(TypeInfo):
295 | cpp_type = "bool"
296 | default_value = "false"
297 | decode_varint = "value.as_bool()"
298 | encode_func = "encode_bool"
299 |
300 | def dump(self, name):
301 | o = f"out.append(YESNO({name}));"
302 | return o
303 |
304 |
305 | @register_type(9)
306 | class StringType(TypeInfo):
307 | cpp_type = "std::string"
308 | default_value = ""
309 | reference_type = "std::string &"
310 | const_reference_type = "const std::string &"
311 | decode_length = "value.as_string()"
312 | encode_func = "encode_string"
313 |
314 | def dump(self, name):
315 | o = f'out.append("\'").append({name}).append("\'");'
316 | return o
317 |
318 |
319 | @register_type(11)
320 | class MessageType(TypeInfo):
321 | @property
322 | def cpp_type(self):
323 | return self._field.type_name[1:]
324 |
325 | default_value = ""
326 |
327 | @property
328 | def reference_type(self):
329 | return f"{self.cpp_type} &"
330 |
331 | @property
332 | def const_reference_type(self):
333 | return f"const {self.cpp_type} &"
334 |
335 | @property
336 | def encode_func(self):
337 | return f"encode_message<{self.cpp_type}>"
338 |
339 | @property
340 | def decode_length(self):
341 | return f"value.as_message<{self.cpp_type}>()"
342 |
343 | def dump(self, name):
344 | o = f"{name}.dump_to(out);"
345 | return o
346 |
347 |
348 | @register_type(12)
349 | class BytesType(TypeInfo):
350 | cpp_type = "std::string"
351 | default_value = ""
352 | reference_type = "std::string &"
353 | const_reference_type = "const std::string &"
354 | decode_length = "value.as_string()"
355 | encode_func = "encode_string"
356 |
357 | def dump(self, name):
358 | o = f'out.append("\'").append({name}).append("\'");'
359 | return o
360 |
361 |
362 | @register_type(13)
363 | class UInt32Type(TypeInfo):
364 | cpp_type = "uint32_t"
365 | default_value = "0"
366 | decode_varint = "value.as_uint32()"
367 | encode_func = "encode_uint32"
368 |
369 | def dump(self, name):
370 | o = f'sprintf(buffer, "%" PRIu32, {name});\n'
371 | o += "out.append(buffer);"
372 | return o
373 |
374 |
375 | @register_type(14)
376 | class EnumType(TypeInfo):
377 | @property
378 | def cpp_type(self):
379 | return f"enums::{self._field.type_name[1:]}"
380 |
381 | @property
382 | def decode_varint(self):
383 | return f"value.as_enum<{self.cpp_type}>()"
384 |
385 | default_value = ""
386 |
387 | @property
388 | def encode_func(self):
389 | return f"encode_enum<{self.cpp_type}>"
390 |
391 | def dump(self, name):
392 | o = f"out.append(proto_enum_to_string<{self.cpp_type}>({name}));"
393 | return o
394 |
395 |
396 | @register_type(15)
397 | class SFixed32Type(TypeInfo):
398 | cpp_type = "int32_t"
399 | default_value = "0"
400 | decode_32bit = "value.as_sfixed32()"
401 | encode_func = "encode_sfixed32"
402 |
403 | def dump(self, name):
404 | o = f'sprintf(buffer, "%" PRId32, {name});\n'
405 | o += "out.append(buffer);"
406 | return o
407 |
408 |
409 | @register_type(16)
410 | class SFixed64Type(TypeInfo):
411 | cpp_type = "int64_t"
412 | default_value = "0"
413 | decode_64bit = "value.as_sfixed64()"
414 | encode_func = "encode_sfixed64"
415 |
416 | def dump(self, name):
417 | o = f'sprintf(buffer, "%lld", {name});\n'
418 | o += "out.append(buffer);"
419 | return o
420 |
421 |
422 | @register_type(17)
423 | class SInt32Type(TypeInfo):
424 | cpp_type = "int32_t"
425 | default_value = "0"
426 | decode_varint = "value.as_sint32()"
427 | encode_func = "encode_sint32"
428 |
429 | def dump(self, name):
430 | o = f'sprintf(buffer, "%" PRId32, {name});\n'
431 | o += "out.append(buffer);"
432 | return o
433 |
434 |
435 | @register_type(18)
436 | class SInt64Type(TypeInfo):
437 | cpp_type = "int64_t"
438 | default_value = "0"
439 | decode_varint = "value.as_sint64()"
440 | encode_func = "encode_sint64"
441 |
442 | def dump(self, name):
443 | o = f'sprintf(buffer, "%lld", {name});\n'
444 | o += "out.append(buffer);"
445 | return o
446 |
447 |
448 | class RepeatedTypeInfo(TypeInfo):
449 | def __init__(self, field):
450 | super().__init__(field)
451 | self._ti = TYPE_INFO[field.type](field)
452 |
453 | @property
454 | def cpp_type(self):
455 | return f"std::vector<{self._ti.cpp_type}>"
456 |
457 | @property
458 | def reference_type(self):
459 | return f"{self.cpp_type} &"
460 |
461 | @property
462 | def const_reference_type(self):
463 | return f"const {self.cpp_type} &"
464 |
465 | @property
466 | def decode_varint_content(self) -> str:
467 | content = self._ti.decode_varint
468 | if content is None:
469 | return None
470 | return dedent(
471 | f"""\
472 | case {self.number}: {{
473 | this->{self.field_name}.push_back({content});
474 | return true;
475 | }}"""
476 | )
477 |
478 | @property
479 | def decode_length_content(self) -> str:
480 | content = self._ti.decode_length
481 | if content is None:
482 | return None
483 | return dedent(
484 | f"""\
485 | case {self.number}: {{
486 | this->{self.field_name}.push_back({content});
487 | return true;
488 | }}"""
489 | )
490 |
491 | @property
492 | def decode_32bit_content(self) -> str:
493 | content = self._ti.decode_32bit
494 | if content is None:
495 | return None
496 | return dedent(
497 | f"""\
498 | case {self.number}: {{
499 | this->{self.field_name}.push_back({content});
500 | return true;
501 | }}"""
502 | )
503 |
504 | @property
505 | def decode_64bit_content(self) -> str:
506 | content = self._ti.decode_64bit
507 | if content is None:
508 | return None
509 | return dedent(
510 | f"""\
511 | case {self.number}: {{
512 | this->{self.field_name}.push_back({content});
513 | return true;
514 | }}"""
515 | )
516 |
517 | @property
518 | def _ti_is_bool(self):
519 | # std::vector is specialized for bool, reference does not work
520 | return isinstance(self._ti, BoolType)
521 |
522 | @property
523 | def encode_content(self):
524 | o = f"for (auto {'' if self._ti_is_bool else '&'}it : this->{self.field_name}) {{\n"
525 | o += f" buffer.{self._ti.encode_func}({self.number}, it, true);\n"
526 | o += "}"
527 | return o
528 |
529 | @property
530 | def dump_content(self):
531 | o = f'for (const auto {"" if self._ti_is_bool else "&"}it : this->{self.field_name}) {{\n'
532 | o += f' out.append(" {self.name}: ");\n'
533 | o += indent(self._ti.dump("it")) + "\n"
534 | o += ' out.append("\\n");\n'
535 | o += "}\n"
536 | return o
537 |
538 | def dump(self, _: str):
539 | pass
540 |
541 |
542 | def build_enum_type(desc):
543 | name = desc.name
544 | out = f"enum {name} : uint32_t {{\n"
545 | for v in desc.value:
546 | out += f" {v.name} = {v.number},\n"
547 | out += "};\n"
548 |
549 | cpp = "#ifdef HAS_PROTO_MESSAGE_DUMP\n"
550 | cpp += f"template<> const char *proto_enum_to_string(enums::{name} value) {{\n"
551 | cpp += " switch (value) {\n"
552 | for v in desc.value:
553 | cpp += f" case enums::{v.name}:\n"
554 | cpp += f' return "{v.name}";\n'
555 | cpp += " default:\n"
556 | cpp += ' return "UNKNOWN";\n'
557 | cpp += " }\n"
558 | cpp += "}\n"
559 | cpp += "#endif\n"
560 |
561 | return out, cpp
562 |
563 |
564 | def build_message_type(desc):
565 | public_content = []
566 | protected_content = []
567 | decode_varint = []
568 | decode_length = []
569 | decode_32bit = []
570 | decode_64bit = []
571 | encode = []
572 | dump = []
573 |
574 | for field in desc.field:
575 | if field.label == 3:
576 | ti = RepeatedTypeInfo(field)
577 | else:
578 | ti = TYPE_INFO[field.type](field)
579 | protected_content.extend(ti.protected_content)
580 | public_content.extend(ti.public_content)
581 | encode.append(ti.encode_content)
582 |
583 | if ti.decode_varint_content:
584 | decode_varint.append(ti.decode_varint_content)
585 | if ti.decode_length_content:
586 | decode_length.append(ti.decode_length_content)
587 | if ti.decode_32bit_content:
588 | decode_32bit.append(ti.decode_32bit_content)
589 | if ti.decode_64bit_content:
590 | decode_64bit.append(ti.decode_64bit_content)
591 | if ti.dump_content:
592 | dump.append(ti.dump_content)
593 |
594 | cpp = ""
595 | if decode_varint:
596 | decode_varint.append("default:\n return false;")
597 | o = f"bool {desc.name}::decode_varint(uint32_t field_id, ProtoVarInt value) {{\n"
598 | o += " switch (field_id) {\n"
599 | o += indent("\n".join(decode_varint), " ") + "\n"
600 | o += " }\n"
601 | o += "}\n"
602 | cpp += o
603 | prot = "bool decode_varint(uint32_t field_id, ProtoVarInt value) override;"
604 | protected_content.insert(0, prot)
605 | if decode_length:
606 | decode_length.append("default:\n return false;")
607 | o = f"bool {desc.name}::decode_length(uint32_t field_id, ProtoLengthDelimited value) {{\n"
608 | o += " switch (field_id) {\n"
609 | o += indent("\n".join(decode_length), " ") + "\n"
610 | o += " }\n"
611 | o += "}\n"
612 | cpp += o
613 | prot = "bool decode_length(uint32_t field_id, ProtoLengthDelimited value) override;"
614 | protected_content.insert(0, prot)
615 | if decode_32bit:
616 | decode_32bit.append("default:\n return false;")
617 | o = f"bool {desc.name}::decode_32bit(uint32_t field_id, Proto32Bit value) {{\n"
618 | o += " switch (field_id) {\n"
619 | o += indent("\n".join(decode_32bit), " ") + "\n"
620 | o += " }\n"
621 | o += "}\n"
622 | cpp += o
623 | prot = "bool decode_32bit(uint32_t field_id, Proto32Bit value) override;"
624 | protected_content.insert(0, prot)
625 | if decode_64bit:
626 | decode_64bit.append("default:\n return false;")
627 | o = f"bool {desc.name}::decode_64bit(uint32_t field_id, Proto64Bit value) {{\n"
628 | o += " switch (field_id) {\n"
629 | o += indent("\n".join(decode_64bit), " ") + "\n"
630 | o += " }\n"
631 | o += "}\n"
632 | cpp += o
633 | prot = "bool decode_64bit(uint32_t field_id, Proto64Bit value) override;"
634 | protected_content.insert(0, prot)
635 |
636 | o = f"void {desc.name}::encode(ProtoWriteBuffer buffer) const {{"
637 | if encode:
638 | if len(encode) == 1 and len(encode[0]) + len(o) + 3 < 120:
639 | o += f" {encode[0]} "
640 | else:
641 | o += "\n"
642 | o += indent("\n".join(encode)) + "\n"
643 | o += "}\n"
644 | cpp += o
645 | prot = "void encode(ProtoWriteBuffer buffer) const override;"
646 | public_content.append(prot)
647 |
648 | o = f"void {desc.name}::dump_to(std::string &out) const {{"
649 | if dump:
650 | if len(dump) == 1 and len(dump[0]) + len(o) + 3 < 120:
651 | o += f" {dump[0]} "
652 | else:
653 | o += "\n"
654 | o += " __attribute__((unused)) char buffer[64];\n"
655 | o += f' out.append("{desc.name} {{\\n");\n'
656 | o += indent("\n".join(dump)) + "\n"
657 | o += ' out.append("}");\n'
658 | else:
659 | o2 = f'out.append("{desc.name} {{}}");'
660 | if len(o) + len(o2) + 3 < 120:
661 | o += f" {o2} "
662 | else:
663 | o += "\n"
664 | o += f" {o2}\n"
665 | o += "}\n"
666 | cpp += "#ifdef HAS_PROTO_MESSAGE_DUMP\n"
667 | cpp += o
668 | cpp += "#endif\n"
669 | prot = "#ifdef HAS_PROTO_MESSAGE_DUMP\n"
670 | prot += "void dump_to(std::string &out) const override;\n"
671 | prot += "#endif\n"
672 | public_content.append(prot)
673 |
674 | out = f"class {desc.name} : public ProtoMessage {{\n"
675 | out += " public:\n"
676 | out += indent("\n".join(public_content)) + "\n"
677 | out += "\n"
678 | out += " protected:\n"
679 | out += indent("\n".join(protected_content))
680 | if len(protected_content) > 0:
681 | out += "\n"
682 | out += "};\n"
683 | return out, cpp
684 |
685 |
686 | SOURCE_BOTH = 0
687 | SOURCE_SERVER = 1
688 | SOURCE_CLIENT = 2
689 |
690 | RECEIVE_CASES = {}
691 |
692 | ifdefs = {}
693 |
694 |
695 | def get_opt(desc, opt, default=None):
696 | if not desc.options.HasExtension(opt):
697 | return default
698 | return desc.options.Extensions[opt]
699 |
700 |
701 | def build_service_message_type(mt):
702 | snake = camel_to_snake(mt.name)
703 | id_ = get_opt(mt, pb.id)
704 | if id_ is None:
705 | return None
706 |
707 | source = get_opt(mt, pb.source, 0)
708 |
709 | ifdef = get_opt(mt, pb.ifdef)
710 | log = get_opt(mt, pb.log, True)
711 | hout = ""
712 | cout = ""
713 |
714 | if ifdef is not None:
715 | ifdefs[str(mt.name)] = ifdef
716 | hout += f"#ifdef {ifdef}\n"
717 | cout += f"#ifdef {ifdef}\n"
718 |
719 | if source in (SOURCE_BOTH, SOURCE_SERVER):
720 | # Generate send
721 | func = f"send_{snake}"
722 | hout += f"bool {func}(const {mt.name} &msg);\n"
723 | cout += f"bool APIServerConnectionBase::{func}(const {mt.name} &msg) {{\n"
724 | if log:
725 | cout += "#ifdef HAS_PROTO_MESSAGE_DUMP\n"
726 | cout += f' ESP_LOGVV(TAG, "{func}: %s", msg.dump().c_str());\n'
727 | cout += "#endif\n"
728 | # cout += f' this->set_nodelay({str(nodelay).lower()});\n'
729 | cout += f" return this->send_message_<{mt.name}>(msg, {id_});\n"
730 | cout += "}\n"
731 | if source in (SOURCE_BOTH, SOURCE_CLIENT):
732 | # Generate receive
733 | func = f"on_{snake}"
734 | hout += f"virtual void {func}(const {mt.name} &value){{}};\n"
735 | case = ""
736 | if ifdef is not None:
737 | case += f"#ifdef {ifdef}\n"
738 | case += f"{mt.name} msg;\n"
739 | case += "msg.decode(msg_data, msg_size);\n"
740 | if log:
741 | case += "#ifdef HAS_PROTO_MESSAGE_DUMP\n"
742 | case += f'ESP_LOGVV(TAG, "{func}: %s", msg.dump().c_str());\n'
743 | case += "#endif\n"
744 | case += f"this->{func}(msg);\n"
745 | if ifdef is not None:
746 | case += "#endif\n"
747 | case += "break;"
748 | RECEIVE_CASES[id_] = case
749 |
750 | if ifdef is not None:
751 | hout += "#endif\n"
752 | cout += "#endif\n"
753 |
754 | return hout, cout
755 |
756 |
757 | def main():
758 | cwd = Path(__file__).resolve().parent
759 | root = cwd.parent.parent / "esphome" / "components" / "api"
760 | prot_file = root / "api.protoc"
761 | call(["protoc", "-o", str(prot_file), "-I", str(root), "api.proto"])
762 | proto_content = prot_file.read_bytes()
763 |
764 | # pylint: disable-next=no-member
765 | d = descriptor.FileDescriptorSet.FromString(proto_content)
766 |
767 | file = d.file[0]
768 | content = FILE_HEADER
769 | content += """\
770 | #pragma once
771 |
772 | #include "proto.h"
773 |
774 | namespace esphome {
775 | namespace api {
776 |
777 | """
778 |
779 | cpp = FILE_HEADER
780 | cpp += """\
781 | #include "api_pb2.h"
782 | #include "esphome/core/log.h"
783 |
784 | #include
785 |
786 | namespace esphome {
787 | namespace api {
788 |
789 | """
790 |
791 | content += "namespace enums {\n\n"
792 |
793 | for enum in file.enum_type:
794 | s, c = build_enum_type(enum)
795 | content += s
796 | cpp += c
797 |
798 | content += "\n} // namespace enums\n\n"
799 |
800 | mt = file.message_type
801 |
802 | for m in mt:
803 | s, c = build_message_type(m)
804 | content += s
805 | cpp += c
806 |
807 | content += """\
808 |
809 | } // namespace api
810 | } // namespace esphome
811 | """
812 | cpp += """\
813 |
814 | } // namespace api
815 | } // namespace esphome
816 | """
817 |
818 | with open(root / "api_pb2.h", "w", encoding="utf-8") as f:
819 | f.write(content)
820 |
821 | with open(root / "api_pb2.cpp", "w", encoding="utf-8") as f:
822 | f.write(cpp)
823 |
824 | hpp = FILE_HEADER
825 | hpp += """\
826 | #pragma once
827 |
828 | #include "api_pb2.h"
829 | #include "esphome/core/defines.h"
830 |
831 | namespace esphome {
832 | namespace api {
833 |
834 | """
835 |
836 | cpp = FILE_HEADER
837 | cpp += """\
838 | #include "api_pb2_service.h"
839 | #include "esphome/core/log.h"
840 |
841 | namespace esphome {
842 | namespace api {
843 |
844 | static const char *const TAG = "api.service";
845 |
846 | """
847 |
848 | class_name = "APIServerConnectionBase"
849 |
850 | hpp += f"class {class_name} : public ProtoService {{\n"
851 | hpp += " public:\n"
852 |
853 | for mt in file.message_type:
854 | obj = build_service_message_type(mt)
855 | if obj is None:
856 | continue
857 | hout, cout = obj
858 | hpp += indent(hout) + "\n"
859 | cpp += cout
860 |
861 | cases = list(RECEIVE_CASES.items())
862 | cases.sort()
863 | hpp += " protected:\n"
864 | hpp += " bool read_message(uint32_t msg_size, uint32_t msg_type, uint8_t *msg_data) override;\n"
865 | out = f"bool {class_name}::read_message(uint32_t msg_size, uint32_t msg_type, uint8_t *msg_data) {{\n"
866 | out += " switch (msg_type) {\n"
867 | for i, case in cases:
868 | c = f"case {i}: {{\n"
869 | c += indent(case) + "\n"
870 | c += "}"
871 | out += indent(c, " ") + "\n"
872 | out += " default:\n"
873 | out += " return false;\n"
874 | out += " }\n"
875 | out += " return true;\n"
876 | out += "}\n"
877 | cpp += out
878 | hpp += "};\n"
879 |
880 | serv = file.service[0]
881 | class_name = "APIServerConnection"
882 | hpp += "\n"
883 | hpp += f"class {class_name} : public {class_name}Base {{\n"
884 | hpp += " public:\n"
885 | hpp_protected = ""
886 | cpp += "\n"
887 |
888 | m = serv.method[0]
889 | for m in serv.method:
890 | func = m.name
891 | inp = m.input_type[1:]
892 | ret = m.output_type[1:]
893 | is_void = ret == "void"
894 | snake = camel_to_snake(inp)
895 | on_func = f"on_{snake}"
896 | needs_conn = get_opt(m, pb.needs_setup_connection, True)
897 | needs_auth = get_opt(m, pb.needs_authentication, True)
898 |
899 | ifdef = ifdefs.get(inp, None)
900 |
901 | if ifdef is not None:
902 | hpp += f"#ifdef {ifdef}\n"
903 | hpp_protected += f"#ifdef {ifdef}\n"
904 | cpp += f"#ifdef {ifdef}\n"
905 |
906 | hpp_protected += f" void {on_func}(const {inp} &msg) override;\n"
907 | hpp += f" virtual {ret} {func}(const {inp} &msg) = 0;\n"
908 | cpp += f"void {class_name}::{on_func}(const {inp} &msg) {{\n"
909 | body = ""
910 | if needs_conn:
911 | body += "if (!this->is_connection_setup()) {\n"
912 | body += " this->on_no_setup_connection();\n"
913 | body += " return;\n"
914 | body += "}\n"
915 | if needs_auth:
916 | body += "if (!this->is_authenticated()) {\n"
917 | body += " this->on_unauthenticated_access();\n"
918 | body += " return;\n"
919 | body += "}\n"
920 |
921 | if is_void:
922 | body += f"this->{func}(msg);\n"
923 | else:
924 | body += f"{ret} ret = this->{func}(msg);\n"
925 | ret_snake = camel_to_snake(ret)
926 | body += f"if (!this->send_{ret_snake}(ret)) {{\n"
927 | body += " this->on_fatal_error();\n"
928 | body += "}\n"
929 | cpp += indent(body) + "\n" + "}\n"
930 |
931 | if ifdef is not None:
932 | hpp += "#endif\n"
933 | hpp_protected += "#endif\n"
934 | cpp += "#endif\n"
935 |
936 | hpp += " protected:\n"
937 | hpp += hpp_protected
938 | hpp += "};\n"
939 |
940 | hpp += """\
941 |
942 | } // namespace api
943 | } // namespace esphome
944 | """
945 | cpp += """\
946 |
947 | } // namespace api
948 | } // namespace esphome
949 | """
950 |
951 | with open(root / "api_pb2_service.h", "w", encoding="utf-8") as f:
952 | f.write(hpp)
953 |
954 | with open(root / "api_pb2_service.cpp", "w", encoding="utf-8") as f:
955 | f.write(cpp)
956 |
957 | prot_file.unlink()
958 |
959 | try:
960 | import clang_format
961 |
962 | def exec_clang_format(path):
963 | clang_format_path = os.path.join(
964 | os.path.dirname(clang_format.__file__), "data", "bin", "clang-format"
965 | )
966 | call([clang_format_path, "-i", path])
967 |
968 | exec_clang_format(root / "api_pb2_service.h")
969 | exec_clang_format(root / "api_pb2_service.cpp")
970 | exec_clang_format(root / "api_pb2.h")
971 | exec_clang_format(root / "api_pb2.cpp")
972 | except ImportError:
973 | pass
974 |
975 |
976 | if __name__ == "__main__":
977 | sys.exit(main())
978 |
--------------------------------------------------------------------------------
/script/build_codeowners.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | from pathlib import Path
3 | import sys
4 | import argparse
5 | from collections import defaultdict
6 |
7 | from esphome.helpers import write_file_if_changed
8 | from esphome.config import get_component, get_platform
9 | from esphome.core import CORE
10 | from esphome.const import KEY_CORE, KEY_TARGET_FRAMEWORK
11 |
12 | parser = argparse.ArgumentParser()
13 | parser.add_argument(
14 | "--check", help="Check if the CODEOWNERS file is up to date.", action="store_true"
15 | )
16 | args = parser.parse_args()
17 |
18 | # The root directory of the repo
19 | root = Path(__file__).parent.parent
20 | components_dir = root / "esphome" / "components"
21 |
22 | BASE = """
23 | # This file is generated by script/build_codeowners.py
24 | # People marked here will be automatically requested for a review
25 | # when the code that they own is touched.
26 | #
27 | # Every time an issue is created with a label corresponding to an integration,
28 | # the integration's code owner is automatically notified.
29 |
30 | # Core Code
31 | pyproject.toml @esphome/core
32 | esphome/*.py @esphome/core
33 | esphome/core/* @esphome/core
34 |
35 | # Integrations
36 | """.strip()
37 |
38 | parts = [BASE]
39 |
40 | # Fake some directory so that get_component works
41 | CORE.config_path = str(root)
42 | CORE.data[KEY_CORE] = {KEY_TARGET_FRAMEWORK: None}
43 |
44 | codeowners = defaultdict(list)
45 |
46 | for path in components_dir.iterdir():
47 | if not path.is_dir():
48 | continue
49 | if not (path / "__init__.py").is_file():
50 | continue
51 |
52 | name = path.name
53 | comp = get_component(name)
54 | if comp is None:
55 | print(
56 | f"Cannot find component {name}. Make sure current path is pip installed ESPHome"
57 | )
58 | sys.exit(1)
59 |
60 | codeowners[f"esphome/components/{name}/*"].extend(comp.codeowners)
61 |
62 | for platform_path in path.iterdir():
63 | platform_name = platform_path.stem
64 | platform = get_platform(platform_name, name)
65 | if platform is None:
66 | continue
67 |
68 | if platform_path.is_dir():
69 | # Sub foldered platforms get their own line
70 | if not (platform_path / "__init__.py").is_file():
71 | continue
72 | codeowners[f"esphome/components/{name}/{platform_name}/*"].extend(
73 | platform.codeowners
74 | )
75 | continue
76 |
77 | # Non-subfoldered platforms add to codeowners at component level
78 | if not platform_path.is_file() or platform_path.name == "__init__.py":
79 | continue
80 | codeowners[f"esphome/components/{name}/*"].extend(platform.codeowners)
81 |
82 |
83 | for path, owners in sorted(codeowners.items()):
84 | owners = sorted(set(owners))
85 | if not owners:
86 | continue
87 | for owner in owners:
88 | if not owner.startswith("@"):
89 | print(
90 | f"Codeowner {owner} for integration {path} must start with an '@' symbol!"
91 | )
92 | sys.exit(1)
93 | parts.append(f"{path} {' '.join(owners)}")
94 |
95 |
96 | # End newline
97 | parts.append("")
98 | content = "\n".join(parts)
99 | codeowners_file = root / "CODEOWNERS"
100 |
101 | if args.check:
102 | if codeowners_file.read_text() != content:
103 | print("CODEOWNERS file is not up to date.")
104 | print("Please run `script/build_codeowners.py`")
105 | sys.exit(1)
106 | print("CODEOWNERS file is up to date")
107 | else:
108 | write_file_if_changed(codeowners_file, content)
109 | print("Wrote CODEOWNERS")
110 |
--------------------------------------------------------------------------------
/script/build_language_schema.py:
--------------------------------------------------------------------------------
1 | import inspect
2 | import json
3 | import argparse
4 | import os
5 | import glob
6 | import re
7 | import voluptuous as vol
8 |
9 | # NOTE: Cannot import other esphome components globally as a modification in vol_schema
10 | # is needed before modules are loaded
11 | import esphome.schema_extractors as ejs
12 |
13 | ejs.EnableSchemaExtraction = True
14 |
15 | # schema format:
16 | # Schemas are split in several files in json format, one for core stuff, one for each platform (sensor, binary_sensor, etc) and
17 | # one for each component (dallas, sim800l, etc.) component can have schema for root component/hub and also for platform component,
18 | # e.g. dallas has hub component which has pin and then has the sensor platform which has sensor name, index, etc.
19 | # When files are loaded they are merged in a single object.
20 | # The root format is
21 |
22 | S_CONFIG_VAR = "config_var"
23 | S_CONFIG_VARS = "config_vars"
24 | S_CONFIG_SCHEMA = "CONFIG_SCHEMA"
25 | S_COMPONENT = "component"
26 | S_COMPONENTS = "components"
27 | S_PLATFORMS = "platforms"
28 | S_SCHEMA = "schema"
29 | S_SCHEMAS = "schemas"
30 | S_EXTENDS = "extends"
31 | S_TYPE = "type"
32 | S_NAME = "name"
33 |
34 | parser = argparse.ArgumentParser()
35 | parser.add_argument(
36 | "--output-path", default=".", help="Output path", type=os.path.abspath
37 | )
38 |
39 | args = parser.parse_args()
40 |
41 | DUMP_RAW = False
42 | DUMP_UNKNOWN = False
43 | DUMP_PATH = False
44 | JSON_DUMP_PRETTY = True
45 |
46 | # store here dynamic load of esphome components
47 | components = {}
48 |
49 | schema_core = {}
50 |
51 | # output is where all is built
52 | output = {"core": schema_core}
53 | # The full generated output is here here
54 | schema_full = {"components": output}
55 |
56 | # A string, string map, key is the str(schema) and value is
57 | # a tuple, first element is the schema reference and second is the schema path given, the schema reference is needed to test as different schemas have same key
58 | known_schemas = {}
59 |
60 | solve_registry = []
61 |
62 |
63 | def get_component_names():
64 | # pylint: disable-next=redefined-outer-name,reimported
65 | from esphome.loader import CORE_COMPONENTS_PATH
66 |
67 | component_names = ["esphome", "sensor", "esp32", "esp8266"]
68 |
69 | for d in os.listdir(CORE_COMPONENTS_PATH):
70 | if not d.startswith("__") and os.path.isdir(
71 | os.path.join(CORE_COMPONENTS_PATH, d)
72 | ):
73 | if d not in component_names:
74 | component_names.append(d)
75 |
76 | return component_names
77 |
78 |
79 | def load_components():
80 | from esphome.config import get_component
81 |
82 | for domain in get_component_names():
83 | components[domain] = get_component(domain)
84 |
85 |
86 | # pylint: disable=wrong-import-position
87 | from esphome.const import CONF_TYPE, KEY_CORE
88 | from esphome.core import CORE
89 |
90 | # pylint: enable=wrong-import-position
91 |
92 | CORE.data[KEY_CORE] = {}
93 | load_components()
94 |
95 | # Import esphome after loading components (so schema is tracked)
96 | # pylint: disable=wrong-import-position
97 | import esphome.core as esphome_core
98 | import esphome.config_validation as cv
99 | from esphome import automation
100 | from esphome import pins
101 | from esphome.components import remote_base
102 | from esphome.loader import get_platform, CORE_COMPONENTS_PATH
103 | from esphome.helpers import write_file_if_changed
104 | from esphome.util import Registry
105 |
106 | # pylint: enable=wrong-import-position
107 |
108 |
109 | def write_file(name, obj):
110 | full_path = os.path.join(args.output_path, name + ".json")
111 | if JSON_DUMP_PRETTY:
112 | json_str = json.dumps(obj, indent=2)
113 | else:
114 | json_str = json.dumps(obj, separators=(",", ":"))
115 | write_file_if_changed(full_path, json_str)
116 | print(f"Wrote {full_path}")
117 |
118 |
119 | def delete_extra_files(keep_names):
120 | for d in os.listdir(args.output_path):
121 | if d.endswith(".json") and d[:-5] not in keep_names:
122 | os.remove(os.path.join(args.output_path, d))
123 | print(f"Deleted {d}")
124 |
125 |
126 | def register_module_schemas(key, module, manifest=None):
127 | for name, schema in module_schemas(module):
128 | register_known_schema(key, name, schema)
129 |
130 | if manifest:
131 | # Multi conf should allow list of components
132 | # not sure about 2nd part of the if, might be useless config (e.g. as3935)
133 | if manifest.multi_conf and S_CONFIG_SCHEMA in output[key][S_SCHEMAS]:
134 | output[key][S_SCHEMAS][S_CONFIG_SCHEMA]["is_list"] = True
135 |
136 |
137 | def register_known_schema(module, name, schema):
138 | if module not in output:
139 | output[module] = {S_SCHEMAS: {}}
140 | config = convert_config(schema, f"{module}/{name}")
141 | if S_TYPE not in config:
142 | print(f"Config var without type: {module}.{name}")
143 |
144 | output[module][S_SCHEMAS][name] = config
145 | repr_schema = repr(schema)
146 | if repr_schema in known_schemas:
147 | schema_info = known_schemas[repr_schema]
148 | schema_info.append((schema, f"{module}.{name}"))
149 | else:
150 | known_schemas[repr_schema] = [(schema, f"{module}.{name}")]
151 |
152 |
153 | def module_schemas(module):
154 | # This should yield elements in order so extended schemas are resolved properly
155 | # To do this we check on the source code where the symbol is seen first. Seems to work.
156 | try:
157 | module_str = inspect.getsource(module)
158 | except TypeError:
159 | # improv
160 | module_str = ""
161 | except OSError:
162 | # some empty __init__ files
163 | module_str = ""
164 | schemas = {}
165 | for m_attr_name in dir(module):
166 | m_attr_obj = getattr(module, m_attr_name)
167 | if is_convertible_schema(m_attr_obj):
168 | schemas[module_str.find(m_attr_name)] = [m_attr_name, m_attr_obj]
169 |
170 | for pos in sorted(schemas.keys()):
171 | yield schemas[pos]
172 |
173 |
174 | found_registries = {}
175 |
176 | # Pin validators keys are the functions in pin which validate the pins
177 | pin_validators = {}
178 |
179 |
180 | def add_pin_validators():
181 | for m_attr_name in dir(pins):
182 | if "gpio" in m_attr_name:
183 | s = pin_validators[repr(getattr(pins, m_attr_name))] = {}
184 | if "schema" in m_attr_name:
185 | s["schema"] = True # else is just number
186 | if "internal" in m_attr_name:
187 | s["internal"] = True
188 | if "input" in m_attr_name:
189 | s["modes"] = ["input"]
190 | elif "output" in m_attr_name:
191 | s["modes"] = ["output"]
192 | else:
193 | s["modes"] = []
194 | if "pullup" in m_attr_name:
195 | s["modes"].append("pullup")
196 | from esphome.components.adc import sensor as adc_sensor
197 |
198 | pin_validators[repr(adc_sensor.validate_adc_pin)] = {
199 | "internal": True,
200 | "modes": ["input"],
201 | }
202 |
203 |
204 | def add_module_registries(domain, module):
205 | for attr_name in dir(module):
206 | attr_obj = getattr(module, attr_name)
207 | if isinstance(attr_obj, Registry):
208 | if attr_obj == automation.ACTION_REGISTRY:
209 | reg_type = "action"
210 | reg_domain = "core"
211 | found_registries[repr(attr_obj)] = reg_type
212 | elif attr_obj == automation.CONDITION_REGISTRY:
213 | reg_type = "condition"
214 | reg_domain = "core"
215 | found_registries[repr(attr_obj)] = reg_type
216 | else: # attr_name == "FILTER_REGISTRY":
217 | reg_domain = domain
218 | reg_type = attr_name.partition("_")[0].lower()
219 | found_registries[repr(attr_obj)] = f"{domain}.{reg_type}"
220 |
221 | for name in attr_obj.keys():
222 | if "." not in name:
223 | reg_entry_name = name
224 | else:
225 | parts = name.split(".")
226 | if len(parts) == 2:
227 | reg_domain = parts[0]
228 | reg_entry_name = parts[1]
229 | else:
230 | reg_domain = ".".join([parts[1], parts[0]])
231 | reg_entry_name = parts[2]
232 |
233 | if reg_domain not in output:
234 | output[reg_domain] = {}
235 | if reg_type not in output[reg_domain]:
236 | output[reg_domain][reg_type] = {}
237 | output[reg_domain][reg_type][reg_entry_name] = convert_config(
238 | attr_obj[name].schema, f"{reg_domain}/{reg_type}/{reg_entry_name}"
239 | )
240 |
241 | # print(f"{domain} - {attr_name} - {name}")
242 |
243 |
244 | def do_pins():
245 | # do pin registries
246 | pins_providers = schema_core["pins"] = []
247 | for pin_registry in pins.PIN_SCHEMA_REGISTRY:
248 | s = convert_config(
249 | pins.PIN_SCHEMA_REGISTRY[pin_registry][1], f"pins/{pin_registry}"
250 | )
251 | if pin_registry not in output:
252 | output[pin_registry] = {} # mcp23xxx does not create a component yet
253 | output[pin_registry]["pin"] = s
254 | pins_providers.append(pin_registry)
255 |
256 |
257 | def setBoards(obj, boards):
258 | obj[S_TYPE] = "enum"
259 | obj["values"] = {}
260 | for k, v in boards.items():
261 | obj["values"][k] = {"docs": v["name"]}
262 |
263 |
264 | def do_esp32():
265 | import esphome.components.esp32.boards as esp32_boards
266 |
267 | setBoards(
268 | output["esp32"]["schemas"]["CONFIG_SCHEMA"]["schema"]["config_vars"]["board"],
269 | esp32_boards.BOARDS,
270 | )
271 |
272 |
273 | def do_esp8266():
274 | import esphome.components.esp8266.boards as esp8266_boards
275 |
276 | setBoards(
277 | output["esp8266"]["schemas"]["CONFIG_SCHEMA"]["schema"]["config_vars"]["board"],
278 | esp8266_boards.BOARDS,
279 | )
280 |
281 |
282 | def fix_remote_receiver():
283 | if "remote_receiver.binary_sensor" not in output:
284 | return
285 | remote_receiver_schema = output["remote_receiver.binary_sensor"]["schemas"]
286 | remote_receiver_schema["CONFIG_SCHEMA"] = {
287 | "type": "schema",
288 | "schema": {
289 | "extends": ["binary_sensor.BINARY_SENSOR_SCHEMA", "core.COMPONENT_SCHEMA"],
290 | "config_vars": output["remote_base"].pop("binary"),
291 | },
292 | }
293 | remote_receiver_schema["CONFIG_SCHEMA"]["schema"]["config_vars"]["receiver_id"] = {
294 | "key": "GeneratedID",
295 | "use_id_type": "remote_base::RemoteReceiverBase",
296 | "type": "use_id",
297 | }
298 |
299 |
300 | def fix_script():
301 | if "script" not in output:
302 | return
303 | output["script"][S_SCHEMAS][S_CONFIG_SCHEMA][S_TYPE] = S_SCHEMA
304 | config_schema = output["script"][S_SCHEMAS][S_CONFIG_SCHEMA]
305 | config_schema[S_SCHEMA][S_CONFIG_VARS]["id"]["id_type"] = {
306 | "class": "script::Script"
307 | }
308 | config_schema["is_list"] = True
309 |
310 |
311 | def fix_font():
312 | if "font" not in output:
313 | return
314 | output["font"][S_SCHEMAS]["FILE_SCHEMA"] = output["font"][S_SCHEMAS].pop(
315 | "TYPED_FILE_SCHEMA"
316 | )
317 |
318 |
319 | def fix_menu():
320 | if "display_menu_base" not in output:
321 | return
322 | # # Menu has a recursive schema which is not kept properly
323 | schemas = output["display_menu_base"][S_SCHEMAS]
324 | # 1. Move items to a new schema
325 | schemas["MENU_TYPES"] = {
326 | S_TYPE: S_SCHEMA,
327 | S_SCHEMA: {
328 | S_CONFIG_VARS: {
329 | "items": schemas["DISPLAY_MENU_BASE_SCHEMA"][S_SCHEMA][S_CONFIG_VARS][
330 | "items"
331 | ]
332 | }
333 | },
334 | }
335 | # 2. Remove items from the base schema
336 | schemas["DISPLAY_MENU_BASE_SCHEMA"][S_SCHEMA][S_CONFIG_VARS].pop("items")
337 | # 3. Add extends to this
338 | schemas["DISPLAY_MENU_BASE_SCHEMA"][S_SCHEMA][S_EXTENDS].append(
339 | "display_menu_base.MENU_TYPES"
340 | )
341 | # 4. Configure menu items inside as recursive
342 | menu = schemas["MENU_TYPES"][S_SCHEMA][S_CONFIG_VARS]["items"]["types"]["menu"]
343 | menu[S_CONFIG_VARS].pop("items")
344 | menu[S_EXTENDS] = ["display_menu_base.MENU_TYPES"]
345 |
346 |
347 | def get_logger_tags():
348 | pattern = re.compile(r'^static const char \*const TAG = "(\w.*)";', re.MULTILINE)
349 | # tags not in components dir
350 | tags = [
351 | "app",
352 | "component",
353 | "entity_base",
354 | "scheduler",
355 | "api.service",
356 | ]
357 | for x in os.walk(CORE_COMPONENTS_PATH):
358 | for y in glob.glob(os.path.join(x[0], "*.cpp")):
359 | with open(y, encoding="utf-8") as file:
360 | data = file.read()
361 | match = pattern.search(data)
362 | if match:
363 | tags.append(match.group(1))
364 | return tags
365 |
366 |
367 | def add_logger_tags():
368 | if "logger" not in output or "schemas" not in output["logger"]:
369 | return
370 | tags = get_logger_tags()
371 | logs = output["logger"]["schemas"]["CONFIG_SCHEMA"]["schema"]["config_vars"][
372 | "logs"
373 | ]["schema"]["config_vars"]
374 | for t in tags:
375 | logs[t] = logs["string"].copy()
376 | logs.pop("string")
377 |
378 |
379 | def add_referenced_recursive(referenced_schemas, config_var, path, eat_schema=False):
380 | assert (
381 | S_CONFIG_VARS not in config_var and S_EXTENDS not in config_var
382 | ) # S_TYPE in cv or "key" in cv or len(cv) == 0
383 | if (
384 | config_var.get(S_TYPE) in ["schema", "trigger", "maybe"]
385 | and S_SCHEMA in config_var
386 | ):
387 | schema = config_var[S_SCHEMA]
388 | for k, v in schema.get(S_CONFIG_VARS, {}).items():
389 | if eat_schema:
390 | new_path = path + [S_CONFIG_VARS, k]
391 | else:
392 | new_path = path + ["schema", S_CONFIG_VARS, k]
393 | add_referenced_recursive(referenced_schemas, v, new_path)
394 | for k in schema.get(S_EXTENDS, []):
395 | if k not in referenced_schemas:
396 | referenced_schemas[k] = [path]
397 | else:
398 | if path not in referenced_schemas[k]:
399 | referenced_schemas[k].append(path)
400 |
401 | s1 = get_str_path_schema(k)
402 | p = k.split(".")
403 | if len(p) == 3 and path[0] == f"{p[0]}.{p[1]}":
404 | # special case for schema inside platforms
405 | add_referenced_recursive(
406 | referenced_schemas, s1, [path[0], "schemas", p[2]]
407 | )
408 | else:
409 | add_referenced_recursive(
410 | referenced_schemas, s1, [p[0], "schemas", p[1]]
411 | )
412 | elif config_var.get(S_TYPE) == "typed":
413 | for tk, tv in config_var.get("types").items():
414 | add_referenced_recursive(
415 | referenced_schemas,
416 | {
417 | S_TYPE: S_SCHEMA,
418 | S_SCHEMA: tv,
419 | },
420 | path + ["types", tk],
421 | eat_schema=True,
422 | )
423 |
424 |
425 | def get_str_path_schema(strPath):
426 | parts = strPath.split(".")
427 | if len(parts) > 2:
428 | parts[0] += "." + parts[1]
429 | parts[1] = parts[2]
430 | s1 = output.get(parts[0], {}).get(S_SCHEMAS, {}).get(parts[1], {})
431 | return s1
432 |
433 |
434 | def pop_str_path_schema(strPath):
435 | parts = strPath.split(".")
436 | if len(parts) > 2:
437 | parts[0] += "." + parts[1]
438 | parts[1] = parts[2]
439 | output.get(parts[0], {}).get(S_SCHEMAS, {}).pop(parts[1])
440 |
441 |
442 | def get_arr_path_schema(path):
443 | s = output
444 | for x in path:
445 | s = s[x]
446 | return s
447 |
448 |
449 | def merge(source, destination):
450 | """
451 | run me with nosetests --with-doctest file.py
452 |
453 | >>> a = { 'first' : { 'all_rows' : { 'pass' : 'dog', 'number' : '1' } } }
454 | >>> b = { 'first' : { 'all_rows' : { 'fail' : 'cat', 'number' : '5' } } }
455 | >>> merge(b, a) == { 'first' : { 'all_rows' : { 'pass' : 'dog', 'fail' : 'cat', 'number' : '5' } } }
456 | True
457 | """
458 | for key, value in source.items():
459 | if isinstance(value, dict):
460 | # get node or create one
461 | node = destination.setdefault(key, {})
462 | merge(value, node)
463 | else:
464 | destination[key] = value
465 |
466 | return destination
467 |
468 |
469 | def is_platform_schema(schema_name):
470 | # added mostly because of schema_name == "microphone.MICROPHONE_SCHEMA"
471 | # and "alarm_control_panel"
472 | # which is shrunk because there is only one component of the schema (i2s_audio)
473 | component = schema_name.split(".")[0]
474 | return component in components and components[component].is_platform_component
475 |
476 |
477 | def shrink():
478 | """Shrink the extending schemas which has just an end type, e.g. at this point
479 | ota / port is type schema with extended pointing to core.port, this should instead be
480 | type number. core.port is number
481 |
482 | This also fixes enums, as they are another schema and they are instead put in the same cv
483 | """
484 |
485 | # referenced_schemas contains a dict, keys are all that are shown in extends: [] arrays, values are lists of paths that are pointing to that extend
486 | # e.g. key: core.COMPONENT_SCHEMA has a lot of paths of config vars which extends this schema
487 |
488 | pass_again = True
489 |
490 | while pass_again:
491 | pass_again = False
492 |
493 | referenced_schemas = {}
494 |
495 | for k, v in output.items():
496 | for kv, vv in v.items():
497 | if kv != "pin" and isinstance(vv, dict):
498 | for kvv, vvv in vv.items():
499 | add_referenced_recursive(referenced_schemas, vvv, [k, kv, kvv])
500 |
501 | for x, paths in referenced_schemas.items():
502 | if len(paths) == 1 and not is_platform_schema(x):
503 | key_s = get_str_path_schema(x)
504 | arr_s = get_arr_path_schema(paths[0])
505 | # key_s |= arr_s
506 | # key_s.pop(S_EXTENDS)
507 | pass_again = True
508 | if S_SCHEMA in arr_s:
509 | if S_EXTENDS in arr_s[S_SCHEMA]:
510 | arr_s[S_SCHEMA].pop(S_EXTENDS)
511 | else:
512 | print("expected extends here!" + x)
513 | arr_s = merge(key_s, arr_s)
514 | if arr_s[S_TYPE] in ["enum", "typed"]:
515 | arr_s.pop(S_SCHEMA)
516 | else:
517 | arr_s.pop(S_EXTENDS)
518 | arr_s |= key_s[S_SCHEMA]
519 | print(x)
520 |
521 | # simple types should be spread on each component,
522 | # for enums so far these are logger.is_log_level, cover.validate_cover_state and pulse_counter.sensor.COUNT_MODE_SCHEMA
523 | # then for some reasons sensor filter registry falls here
524 | # then are all simple types, integer and strings
525 | for x, paths in referenced_schemas.items():
526 | key_s = get_str_path_schema(x)
527 | if key_s and key_s[S_TYPE] in ["enum", "registry", "integer", "string"]:
528 | if key_s[S_TYPE] == "registry":
529 | print("Spreading registry: " + x)
530 | for target in paths:
531 | target_s = get_arr_path_schema(target)
532 | assert target_s[S_SCHEMA][S_EXTENDS] == [x]
533 | target_s.pop(S_SCHEMA)
534 | target_s |= key_s
535 | if key_s[S_TYPE] in ["integer", "string"]:
536 | target_s["data_type"] = x.split(".")[1]
537 | # remove this dangling again
538 | pop_str_path_schema(x)
539 | elif not key_s:
540 | for target in paths:
541 | target_s = get_arr_path_schema(target)
542 | if S_SCHEMA not in target_s:
543 | # an empty schema like speaker.SPEAKER_SCHEMA
544 | target_s[S_EXTENDS].remove(x)
545 | continue
546 | assert target_s[S_SCHEMA][S_EXTENDS] == [x]
547 | target_s.pop(S_SCHEMA)
548 | target_s.pop(S_TYPE) # undefined
549 | target_s["data_type"] = x.split(".")[1]
550 | # remove this dangling again
551 | pop_str_path_schema(x)
552 |
553 | # remove dangling items (unreachable schemas)
554 | for domain, domain_schemas in output.items():
555 | for schema_name in list(domain_schemas.get(S_SCHEMAS, {}).keys()):
556 | s = f"{domain}.{schema_name}"
557 | if (
558 | not s.endswith("." + S_CONFIG_SCHEMA)
559 | and s not in referenced_schemas
560 | and not is_platform_schema(s)
561 | ):
562 | print(f"Removing {s}")
563 | domain_schemas[S_SCHEMAS].pop(schema_name)
564 |
565 |
566 | def build_schema():
567 | print("Building schema")
568 |
569 | # check esphome was not loaded globally (IDE auto imports)
570 | if len(ejs.extended_schemas) == 0:
571 | raise LookupError(
572 | "no data collected. Did you globally import an ESPHome component?"
573 | )
574 |
575 | # Core schema
576 | schema_core[S_SCHEMAS] = {}
577 | register_module_schemas("core", cv)
578 |
579 | platforms = {}
580 | schema_core[S_PLATFORMS] = platforms
581 | core_components = {}
582 | schema_core[S_COMPONENTS] = core_components
583 |
584 | add_pin_validators()
585 |
586 | # Load a preview of each component
587 | for domain, manifest in components.items():
588 | if manifest.is_platform_component:
589 | # e.g. sensor, binary sensor, add S_COMPONENTS
590 | # note: S_COMPONENTS is not filled until loaded, e.g.
591 | # if lock: is not used, then we don't need to know about their
592 | # platforms yet.
593 | output[domain] = {S_COMPONENTS: {}, S_SCHEMAS: {}}
594 | platforms[domain] = {}
595 | elif manifest.config_schema is not None:
596 | # e.g. dallas
597 | output[domain] = {S_SCHEMAS: {S_CONFIG_SCHEMA: {}}}
598 |
599 | # Generate platforms (e.g. sensor, binary_sensor, climate )
600 | for domain in platforms:
601 | c = components[domain]
602 | register_module_schemas(domain, c.module)
603 |
604 | # Generate components
605 | for domain, manifest in components.items():
606 | if domain not in platforms:
607 | if manifest.config_schema is not None:
608 | core_components[domain] = {}
609 | if len(manifest.dependencies) > 0:
610 | core_components[domain]["dependencies"] = manifest.dependencies
611 | register_module_schemas(domain, manifest.module, manifest)
612 |
613 | for platform in platforms:
614 | platform_manifest = get_platform(domain=platform, platform=domain)
615 | if platform_manifest is not None:
616 | output[platform][S_COMPONENTS][domain] = {}
617 | if len(platform_manifest.dependencies) > 0:
618 | output[platform][S_COMPONENTS][domain][
619 | "dependencies"
620 | ] = platform_manifest.dependencies
621 | register_module_schemas(
622 | f"{domain}.{platform}", platform_manifest.module, platform_manifest
623 | )
624 |
625 | # Do registries
626 | add_module_registries("core", automation)
627 | for domain, manifest in components.items():
628 | add_module_registries(domain, manifest.module)
629 | add_module_registries("remote_base", remote_base)
630 |
631 | # update props pointing to registries
632 | for reg_config_var in solve_registry:
633 | (registry, config_var) = reg_config_var
634 | config_var[S_TYPE] = "registry"
635 | config_var["registry"] = found_registries[repr(registry)]
636 |
637 | do_pins()
638 | do_esp8266()
639 | do_esp32()
640 | fix_remote_receiver()
641 | fix_script()
642 | fix_font()
643 | add_logger_tags()
644 | shrink()
645 | fix_menu()
646 |
647 | # aggregate components, so all component info is in same file, otherwise we have dallas.json, dallas.sensor.json, etc.
648 | data = {}
649 | for component, component_schemas in output.items():
650 | if "." in component:
651 | key = component.partition(".")[0]
652 | if key not in data:
653 | data[key] = {}
654 | data[key][component] = component_schemas
655 | else:
656 | if component not in data:
657 | data[component] = {}
658 | data[component] |= {component: component_schemas}
659 |
660 | # bundle core inside esphome
661 | data["esphome"]["core"] = data.pop("core")["core"]
662 |
663 | for c, s in data.items():
664 | write_file(c, s)
665 | delete_extra_files(data.keys())
666 |
667 |
668 | def is_convertible_schema(schema):
669 | if schema is None:
670 | return False
671 | if isinstance(schema, (cv.Schema, cv.All, cv.Any)):
672 | return True
673 | if repr(schema) in ejs.hidden_schemas:
674 | return True
675 | if repr(schema) in ejs.typed_schemas:
676 | return True
677 | if repr(schema) in ejs.list_schemas:
678 | return True
679 | if repr(schema) in ejs.registry_schemas:
680 | return True
681 | if isinstance(schema, dict):
682 | for k in schema.keys():
683 | if isinstance(k, (cv.Required, cv.Optional)):
684 | return True
685 | return False
686 |
687 |
688 | def convert_config(schema, path):
689 | converted = {}
690 | convert(schema, converted, path)
691 | return converted
692 |
693 |
694 | def convert(schema, config_var, path):
695 | """config_var can be a config_var or a schema: both are dicts
696 | config_var has a S_TYPE property, if this is S_SCHEMA, then it has a S_SCHEMA property
697 | schema does not have a type property, schema can have optionally both S_CONFIG_VARS and S_EXTENDS
698 | """
699 | repr_schema = repr(schema)
700 |
701 | if path.startswith("ads1115.sensor") and path.endswith("gain"):
702 | print(path)
703 |
704 | if repr_schema in known_schemas:
705 | schema_info = known_schemas[(repr_schema)]
706 | for schema_instance, name in schema_info:
707 | if schema_instance is schema:
708 | assert S_CONFIG_VARS not in config_var
709 | assert S_EXTENDS not in config_var
710 | if S_TYPE not in config_var:
711 | config_var[S_TYPE] = S_SCHEMA
712 | # assert config_var[S_TYPE] == S_SCHEMA
713 |
714 | if S_SCHEMA not in config_var:
715 | config_var[S_SCHEMA] = {}
716 | if S_EXTENDS not in config_var[S_SCHEMA]:
717 | config_var[S_SCHEMA][S_EXTENDS] = [name]
718 | elif name not in config_var[S_SCHEMA][S_EXTENDS]:
719 | config_var[S_SCHEMA][S_EXTENDS].append(name)
720 | return
721 |
722 | # Extended schemas are tracked when the .extend() is used in a schema
723 | if repr_schema in ejs.extended_schemas:
724 | extended = ejs.extended_schemas.get(repr_schema)
725 | # The midea actions are extending an empty schema (resulted in the templatize not templatizing anything)
726 | # this causes a recursion in that this extended looks the same in extended schema as the extended[1]
727 | if repr_schema == repr(extended[1]):
728 | assert path.startswith("midea_ac/")
729 | return
730 |
731 | assert len(extended) == 2
732 | convert(extended[0], config_var, path + "/extL")
733 | convert(extended[1], config_var, path + "/extR")
734 | return
735 |
736 | if isinstance(schema, cv.All):
737 | i = 0
738 | for inner in schema.validators:
739 | i = i + 1
740 | convert(inner, config_var, path + f"/val {i}")
741 | return
742 |
743 | if hasattr(schema, "validators"):
744 | i = 0
745 | for inner in schema.validators:
746 | i = i + 1
747 | convert(inner, config_var, path + f"/val {i}")
748 |
749 | if isinstance(schema, cv.Schema):
750 | convert(schema.schema, config_var, path + "/all")
751 | return
752 |
753 | if isinstance(schema, dict):
754 | convert_keys(config_var, schema, path)
755 | return
756 |
757 | if repr_schema in ejs.list_schemas:
758 | config_var["is_list"] = True
759 | items_schema = ejs.list_schemas[repr_schema][0]
760 | convert(items_schema, config_var, path + "/list")
761 | return
762 |
763 | if DUMP_RAW:
764 | config_var["raw"] = repr_schema
765 |
766 | # pylint: disable=comparison-with-callable
767 | if schema == cv.boolean:
768 | config_var[S_TYPE] = "boolean"
769 | elif schema == automation.validate_potentially_and_condition:
770 | config_var[S_TYPE] = "registry"
771 | config_var["registry"] = "condition"
772 | elif schema in (cv.int_, cv.int_range):
773 | config_var[S_TYPE] = "integer"
774 | elif schema in (cv.string, cv.string_strict, cv.valid_name):
775 | config_var[S_TYPE] = "string"
776 |
777 | elif isinstance(schema, vol.Schema):
778 | # test: esphome/project
779 | config_var[S_TYPE] = "schema"
780 | config_var["schema"] = convert_config(schema.schema, path + "/s")["schema"]
781 |
782 | elif repr_schema in pin_validators:
783 | config_var |= pin_validators[repr_schema]
784 | config_var[S_TYPE] = "pin"
785 |
786 | # pylint: disable-next=too-many-nested-blocks
787 | elif repr_schema in ejs.hidden_schemas:
788 | schema_type = ejs.hidden_schemas[repr_schema]
789 |
790 | data = schema(ejs.SCHEMA_EXTRACT)
791 |
792 | # enums, e.g. esp32/variant
793 | if schema_type == "one_of":
794 | config_var[S_TYPE] = "enum"
795 | config_var["values"] = dict.fromkeys(list(data))
796 | elif schema_type == "enum":
797 | config_var[S_TYPE] = "enum"
798 | config_var["values"] = dict.fromkeys(list(data.keys()))
799 | elif schema_type == "maybe":
800 | config_var[S_TYPE] = S_SCHEMA
801 | config_var["maybe"] = data[1]
802 | config_var["schema"] = convert_config(data[0], path + "/maybe")["schema"]
803 | # esphome/on_boot
804 | elif schema_type == "automation":
805 | extra_schema = None
806 | config_var[S_TYPE] = "trigger"
807 | if automation.AUTOMATION_SCHEMA == ejs.extended_schemas[repr(data)][0]:
808 | extra_schema = ejs.extended_schemas[repr(data)][1]
809 | if (
810 | extra_schema is not None and len(extra_schema) > 1
811 | ): # usually only trigger_id here
812 | config = convert_config(extra_schema, path + "/extra")
813 | if "schema" in config:
814 | automation_schema = config["schema"]
815 | if not (
816 | len(automation_schema["config_vars"]) == 1
817 | and "trigger_id" in automation_schema["config_vars"]
818 | ):
819 | automation_schema["config_vars"]["then"] = {S_TYPE: "trigger"}
820 | if "trigger_id" in automation_schema["config_vars"]:
821 | automation_schema["config_vars"].pop("trigger_id")
822 |
823 | config_var[S_TYPE] = "trigger"
824 | config_var["schema"] = automation_schema
825 | # some triggers can have a list of actions directly, while others needs to have some other configuration,
826 | # e.g. sensor.on_value_rang, and the list of actions is only accepted under "then" property.
827 | try:
828 | schema({"delay": "1s"})
829 | except cv.Invalid:
830 | config_var["has_required_var"] = True
831 | else:
832 | print("figure out " + path)
833 | elif schema_type == "effects":
834 | config_var[S_TYPE] = "registry"
835 | config_var["registry"] = "light.effects"
836 | config_var["filter"] = data[0]
837 | elif schema_type == "templatable":
838 | config_var["templatable"] = True
839 | convert(data, config_var, path + "/templat")
840 | elif schema_type == "triggers":
841 | # remote base
842 | convert(data, config_var, path + "/trigger")
843 | elif schema_type == "sensor":
844 | schema = data
845 | convert(data, config_var, path + "/trigger")
846 | elif schema_type == "declare_id":
847 | # pylint: disable=protected-access
848 | parents = data._parents
849 |
850 | config_var["id_type"] = {
851 | "class": str(data.base),
852 | "parents": (
853 | [str(x.base) for x in parents]
854 | if isinstance(parents, list)
855 | else None
856 | ),
857 | }
858 | elif schema_type == "use_id":
859 | if inspect.ismodule(data):
860 | m_attr_obj = getattr(data, "CONFIG_SCHEMA")
861 | use_schema = known_schemas.get(repr(m_attr_obj))
862 | if use_schema:
863 | [output_module, output_name] = use_schema[0][1].split(".")
864 | use_id_config = output[output_module][S_SCHEMAS][output_name]
865 | config_var["use_id_type"] = use_id_config["schema"]["config_vars"][
866 | "id"
867 | ]["id_type"]["class"]
868 | config_var[S_TYPE] = "use_id"
869 | else:
870 | print("TODO deferred?")
871 | else:
872 | if isinstance(data, str):
873 | # TODO: Figure out why pipsolar does this
874 | config_var["use_id_type"] = data
875 | else:
876 | config_var["use_id_type"] = str(data.base)
877 | config_var[S_TYPE] = "use_id"
878 | else:
879 | raise TypeError("Unknown extracted schema type")
880 | elif config_var.get("key") == "GeneratedID":
881 | if path.startswith("i2c/CONFIG_SCHEMA/") and path.endswith("/id"):
882 | config_var["id_type"] = {
883 | "class": "i2c::I2CBus",
884 | "parents": ["Component"],
885 | }
886 | elif path == "uart/CONFIG_SCHEMA/val 1/extL/all/id":
887 | config_var["id_type"] = {
888 | "class": "uart::UARTComponent",
889 | "parents": ["Component"],
890 | }
891 | elif path == "pins/esp32/val 1/id":
892 | config_var["id_type"] = "pin"
893 | else:
894 | raise TypeError("Cannot determine id_type for " + path)
895 |
896 | elif repr_schema in ejs.registry_schemas:
897 | solve_registry.append((ejs.registry_schemas[repr_schema], config_var))
898 |
899 | elif repr_schema in ejs.typed_schemas:
900 | config_var[S_TYPE] = "typed"
901 | types = config_var["types"] = {}
902 | typed_schema = ejs.typed_schemas[repr_schema]
903 | if len(typed_schema) > 1:
904 | config_var["typed_key"] = typed_schema[1].get("key", CONF_TYPE)
905 | for schema_key, schema_type in typed_schema[0][0].items():
906 | config = convert_config(schema_type, path + "/type_" + schema_key)
907 | types[schema_key] = config["schema"]
908 |
909 | elif DUMP_UNKNOWN:
910 | if S_TYPE not in config_var:
911 | config_var["unknown"] = repr_schema
912 |
913 | if DUMP_PATH:
914 | config_var["path"] = path
915 | if S_TYPE not in config_var:
916 | pass
917 | # print(path)
918 |
919 |
920 | def get_overridden_config(key, converted):
921 | # check if the key is in any extended schema in this converted schema, i.e.
922 | # if we see a on_value_range in a dallas sensor, then this is overridden because
923 | # it is already defined in sensor
924 | assert S_CONFIG_VARS not in converted and S_EXTENDS not in converted
925 | config = converted.get(S_SCHEMA, {})
926 |
927 | return get_overridden_key_inner(key, config, {})
928 |
929 |
930 | def get_overridden_key_inner(key, config, ret):
931 | if S_EXTENDS not in config:
932 | return ret
933 | for s in config[S_EXTENDS]:
934 | p = s.partition(".")
935 | s1 = output.get(p[0], {}).get(S_SCHEMAS, {}).get(p[2], {}).get(S_SCHEMA)
936 | if s1:
937 | if key in s1.get(S_CONFIG_VARS, {}):
938 | for k, v in s1.get(S_CONFIG_VARS)[key].items():
939 | if k not in ret: # keep most overridden
940 | ret[k] = v
941 | get_overridden_key_inner(key, s1, ret)
942 |
943 | return ret
944 |
945 |
946 | def convert_keys(converted, schema, path):
947 | for k, v in schema.items():
948 | # deprecated stuff
949 | if repr(v).startswith("", str(k), re.IGNORECASE
964 | )
965 | if key_string_match:
966 | converted["key_type"] = key_string_match.group(1)
967 | else:
968 | converted["key_type"] = str(k)
969 |
970 | esphome_core.CORE.data = {
971 | esphome_core.KEY_CORE: {esphome_core.KEY_TARGET_PLATFORM: "esp8266"}
972 | }
973 | if hasattr(k, "default") and str(k.default) != "...":
974 | default_value = k.default()
975 | if default_value is not None:
976 | result["default"] = str(default_value)
977 |
978 | # Do value
979 | convert(v, result, path + f"/{str(k)}")
980 | if "schema" not in converted:
981 | converted[S_TYPE] = "schema"
982 | converted["schema"] = {S_CONFIG_VARS: {}}
983 | if S_CONFIG_VARS not in converted["schema"]:
984 | converted["schema"][S_CONFIG_VARS] = {}
985 | for base_k, base_v in get_overridden_config(k, converted).items():
986 | if base_k in result and base_v == result[base_k]:
987 | result.pop(base_k)
988 | converted["schema"][S_CONFIG_VARS][str(k)] = result
989 | if "key" in converted and converted["key"] == "String":
990 | config_vars = converted["schema"]["config_vars"]
991 | assert len(config_vars) == 1
992 | key = list(config_vars.keys())[0]
993 | assert key.startswith("<")
994 | config_vars["string"] = config_vars.pop(key)
995 |
996 |
997 | build_schema()
998 |
--------------------------------------------------------------------------------
/script/bump-version.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import argparse
4 | import re
5 | from dataclasses import dataclass
6 | import sys
7 |
8 |
9 | @dataclass
10 | class Version:
11 | major: int
12 | minor: int
13 | patch: int
14 | beta: int = 0
15 | dev: bool = False
16 |
17 | def __str__(self):
18 | return f"{self.major}.{self.minor}.{self.full_patch}"
19 |
20 | @property
21 | def full_patch(self):
22 | res = f"{self.patch}"
23 | if self.beta > 0:
24 | res += f"b{self.beta}"
25 | if self.dev:
26 | res += "-dev"
27 | return res
28 |
29 | @classmethod
30 | def parse(cls, value):
31 | match = re.match(r"(\d+).(\d+).(\d+)(b\d+)?(-dev)?", value)
32 | assert match is not None
33 | major = int(match[1])
34 | minor = int(match[2])
35 | patch = int(match[3])
36 | beta = int(match[4][1:]) if match[4] else 0
37 | dev = bool(match[5])
38 | return Version(major=major, minor=minor, patch=patch, beta=beta, dev=dev)
39 |
40 |
41 | def sub(path, pattern, repl, expected_count=1):
42 | with open(path, encoding="utf-8") as fh:
43 | content = fh.read()
44 | content, count = re.subn(pattern, repl, content, flags=re.MULTILINE)
45 | if expected_count is not None:
46 | assert count == expected_count, f"Pattern {pattern} replacement failed!"
47 | with open(path, "w", encoding="utf-8") as fh:
48 | fh.write(content)
49 |
50 |
51 | def write_version(version: Version):
52 | sub(
53 | "esphome/const.py",
54 | r"^__version__ = .*$",
55 | f'__version__ = "{version}"',
56 | )
57 |
58 |
59 | def main():
60 | parser = argparse.ArgumentParser()
61 | parser.add_argument("new_version", type=str)
62 | args = parser.parse_args()
63 |
64 | version = Version.parse(args.new_version)
65 | print(f"Bumping to {version}")
66 | write_version(version)
67 | return 0
68 |
69 |
70 | if __name__ == "__main__":
71 | sys.exit(main() or 0)
72 |
--------------------------------------------------------------------------------
/script/ci-custom.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import argparse
4 | import codecs
5 | import collections
6 | import fnmatch
7 | import functools
8 | import os.path
9 | import re
10 | import sys
11 | import time
12 |
13 | import colorama
14 | from helpers import filter_changed, git_ls_files, print_error_for_file, styled
15 |
16 | sys.path.append(os.path.dirname(__file__))
17 |
18 |
19 | def find_all(a_str, sub):
20 | if not a_str.find(sub):
21 | # Optimization: If str is not in whole text, then do not try
22 | # on each line
23 | return
24 | for i, line in enumerate(a_str.split("\n")):
25 | column = 0
26 | while True:
27 | column = line.find(sub, column)
28 | if column == -1:
29 | break
30 | yield i, column
31 | column += len(sub)
32 |
33 |
34 | file_types = (
35 | ".h",
36 | ".c",
37 | ".cpp",
38 | ".tcc",
39 | ".yaml",
40 | ".yml",
41 | ".ini",
42 | ".txt",
43 | ".ico",
44 | ".svg",
45 | ".png",
46 | ".py",
47 | ".html",
48 | ".js",
49 | ".md",
50 | ".sh",
51 | ".css",
52 | ".proto",
53 | ".conf",
54 | ".cfg",
55 | ".woff",
56 | ".woff2",
57 | "",
58 | )
59 | cpp_include = ("*.h", "*.c", "*.cpp", "*.tcc")
60 | py_include = ("*.py",)
61 | ignore_types = (".ico", ".png", ".woff", ".woff2", "", ".ttf", ".otf")
62 |
63 | LINT_FILE_CHECKS = []
64 | LINT_CONTENT_CHECKS = []
65 | LINT_POST_CHECKS = []
66 | EXECUTABLE_BIT = {}
67 |
68 | errors = collections.defaultdict(list)
69 |
70 |
71 | def add_errors(fname, errs):
72 | if not isinstance(errs, list):
73 | errs = [errs]
74 | for err in errs:
75 | if err is None:
76 | continue
77 | try:
78 | lineno, col, msg = err
79 | except ValueError:
80 | lineno = 1
81 | col = 1
82 | msg = err
83 | if not isinstance(msg, str):
84 | raise ValueError("Error is not instance of string!")
85 | if not isinstance(lineno, int):
86 | raise ValueError("Line number is not an int!")
87 | if not isinstance(col, int):
88 | raise ValueError("Column number is not an int!")
89 | errors[fname].append((lineno, col, msg))
90 |
91 |
92 | def run_check(lint_obj, fname, *args):
93 | include = lint_obj["include"]
94 | exclude = lint_obj["exclude"]
95 | func = lint_obj["func"]
96 | if include is not None:
97 | for incl in include:
98 | if fnmatch.fnmatch(fname, incl):
99 | break
100 | else:
101 | return None
102 | for excl in exclude:
103 | if fnmatch.fnmatch(fname, excl):
104 | return None
105 | return func(*args)
106 |
107 |
108 | def run_checks(lints, fname, *args):
109 | for lint in lints:
110 | start = time.process_time()
111 | try:
112 | add_errors(fname, run_check(lint, fname, *args))
113 | except Exception:
114 | print(f"Check {lint['func'].__name__} on file {fname} failed:")
115 | raise
116 | duration = time.process_time() - start
117 | lint.setdefault("durations", []).append(duration)
118 |
119 |
120 | def _add_check(checks, func, include=None, exclude=None):
121 | checks.append(
122 | {
123 | "include": include,
124 | "exclude": exclude or [],
125 | "func": func,
126 | }
127 | )
128 |
129 |
130 | def lint_file_check(**kwargs):
131 | def decorator(func):
132 | _add_check(LINT_FILE_CHECKS, func, **kwargs)
133 | return func
134 |
135 | return decorator
136 |
137 |
138 | def lint_content_check(**kwargs):
139 | def decorator(func):
140 | _add_check(LINT_CONTENT_CHECKS, func, **kwargs)
141 | return func
142 |
143 | return decorator
144 |
145 |
146 | def lint_post_check(func):
147 | _add_check(LINT_POST_CHECKS, func)
148 | return func
149 |
150 |
151 | def lint_re_check(regex, **kwargs):
152 | flags = kwargs.pop("flags", re.MULTILINE)
153 | prog = re.compile(regex, flags)
154 | decor = lint_content_check(**kwargs)
155 |
156 | def decorator(func):
157 | @functools.wraps(func)
158 | def new_func(fname, content):
159 | errs = []
160 | for match in prog.finditer(content):
161 | if "NOLINT" in match.group(0):
162 | continue
163 | lineno = content.count("\n", 0, match.start()) + 1
164 | substr = content[: match.start()]
165 | col = len(substr) - substr.rfind("\n")
166 | err = func(fname, match)
167 | if err is None:
168 | continue
169 | errs.append((lineno, col + 1, err))
170 | return errs
171 |
172 | return decor(new_func)
173 |
174 | return decorator
175 |
176 |
177 | def lint_content_find_check(find, only_first=False, **kwargs):
178 | decor = lint_content_check(**kwargs)
179 |
180 | def decorator(func):
181 | @functools.wraps(func)
182 | def new_func(fname, content):
183 | find_ = find
184 | if callable(find):
185 | find_ = find(fname, content)
186 | errs = []
187 | for line, col in find_all(content, find_):
188 | err = func(fname)
189 | errs.append((line + 1, col + 1, err))
190 | if only_first:
191 | break
192 | return errs
193 |
194 | return decor(new_func)
195 |
196 | return decorator
197 |
198 |
199 | @lint_file_check(include=["*.ino"])
200 | def lint_ino(fname):
201 | return "This file extension (.ino) is not allowed. Please use either .cpp or .h"
202 |
203 |
204 | @lint_file_check(
205 | exclude=[f"*{f}" for f in file_types]
206 | + [
207 | ".clang-*",
208 | ".dockerignore",
209 | ".editorconfig",
210 | "*.gitignore",
211 | "LICENSE",
212 | "pylintrc",
213 | "MANIFEST.in",
214 | "docker/Dockerfile*",
215 | "docker/rootfs/*",
216 | "script/*",
217 | ]
218 | )
219 | def lint_ext_check(fname):
220 | return (
221 | "This file extension is not a registered file type. If this is an error, please "
222 | "update the script/ci-custom.py script."
223 | )
224 |
225 |
226 | @lint_file_check(
227 | exclude=[
228 | "**.sh",
229 | "docker/ha-addon-rootfs/**",
230 | "docker/*.py",
231 | "script/*",
232 | ]
233 | )
234 | def lint_executable_bit(fname):
235 | ex = EXECUTABLE_BIT[fname]
236 | if ex != 100644:
237 | return (
238 | f"File has invalid executable bit {ex}. If running from a windows machine please "
239 | "see disabling executable bit in git."
240 | )
241 | return None
242 |
243 |
244 | @lint_content_find_check(
245 | "\t",
246 | only_first=True,
247 | exclude=[
248 | "esphome/dashboard/static/ace.js",
249 | "esphome/dashboard/static/ext-searchbox.js",
250 | ],
251 | )
252 | def lint_tabs(fname):
253 | return "File contains tab character. Please convert tabs to spaces."
254 |
255 |
256 | @lint_content_find_check("\r", only_first=True)
257 | def lint_newline(fname):
258 | return "File contains Windows newline. Please set your editor to Unix newline mode."
259 |
260 |
261 | @lint_content_check(exclude=["*.svg"])
262 | def lint_end_newline(fname, content):
263 | if content and not content.endswith("\n"):
264 | return "File does not end with a newline, please add an empty line at the end of the file."
265 | return None
266 |
267 |
268 | CPP_RE_EOL = r".*?(?://.*?)?$"
269 | PY_RE_EOL = r".*?(?:#.*?)?$"
270 |
271 |
272 | def highlight(s):
273 | return f"\033[36m{s}\033[0m"
274 |
275 |
276 | @lint_re_check(
277 | r"^#define\s+([a-zA-Z0-9_]+)\s+(0b[10]+|0x[0-9a-fA-F]+|\d+)\s*?(?:\/\/.*?)?$",
278 | include=cpp_include,
279 | exclude=[
280 | "esphome/core/log.h",
281 | "esphome/components/socket/headers.h",
282 | "esphome/core/defines.h",
283 | ],
284 | )
285 | def lint_no_defines(fname, match):
286 | s = highlight(f"static const uint8_t {match.group(1)} = {match.group(2)};")
287 | return (
288 | "#define macros for integer constants are not allowed, please use "
289 | f"{s} style instead (replace uint8_t with the appropriate "
290 | "datatype). See also Google style guide."
291 | )
292 |
293 |
294 | @lint_re_check(r"^\s*delay\((\d+)\);" + CPP_RE_EOL, include=cpp_include)
295 | def lint_no_long_delays(fname, match):
296 | duration_ms = int(match.group(1))
297 | if duration_ms < 50:
298 | return None
299 | return (
300 | f"{highlight(match.group(0).strip())} - long calls to delay() are not allowed "
301 | "in ESPHome because everything executes in one thread. Calling delay() will "
302 | "block the main thread and slow down ESPHome.\n"
303 | "If there's no way to work around the delay() and it doesn't execute often, please add "
304 | "a '// NOLINT' comment to the line."
305 | )
306 |
307 |
308 | @lint_content_check(include=["esphome/const.py"])
309 | def lint_const_ordered(fname, content):
310 | """Lint that value in const.py are ordered.
311 |
312 | Reason: Otherwise people add it to the end, and then that results in merge conflicts.
313 | """
314 | lines = content.splitlines()
315 | errs = []
316 | for start in ["CONF_", "ICON_", "UNIT_"]:
317 | matching = [
318 | (i + 1, line) for i, line in enumerate(lines) if line.startswith(start)
319 | ]
320 | ordered = list(sorted(matching, key=lambda x: x[1].replace("_", " ")))
321 | ordered = [(mi, ol) for (mi, _), (_, ol) in zip(matching, ordered)]
322 | for (mi, mline), (_, ol) in zip(matching, ordered):
323 | if mline == ol:
324 | continue
325 | target = next(i for i, line in ordered if line == mline)
326 | target_text = next(line for i, line in matching if target == i)
327 | errs.append(
328 | (
329 | mi,
330 | 1,
331 | f"Constant {highlight(mline)} is not ordered, please make sure all "
332 | f"constants are ordered. See line {mi} (should go to line {target}, "
333 | f"{target_text})",
334 | )
335 | )
336 | return errs
337 |
338 |
339 | @lint_re_check(r'^\s*CONF_([A-Z_0-9a-z]+)\s+=\s+[\'"](.*?)[\'"]\s*?$', include=["*.py"])
340 | def lint_conf_matches(fname, match):
341 | const = match.group(1)
342 | value = match.group(2)
343 | const_norm = const.lower()
344 | value_norm = value.replace(".", "_")
345 | if const_norm == value_norm:
346 | return None
347 | return (
348 | f"Constant {highlight('CONF_' + const)} does not match value {highlight(value)}! "
349 | "Please make sure the constant's name matches its value!"
350 | )
351 |
352 |
353 | CONF_RE = r'^(CONF_[a-zA-Z0-9_]+)\s*=\s*[\'"].*?[\'"]\s*?$'
354 | with codecs.open("esphome/const.py", "r", encoding="utf-8") as const_f_handle:
355 | constants_content = const_f_handle.read()
356 | CONSTANTS = [m.group(1) for m in re.finditer(CONF_RE, constants_content, re.MULTILINE)]
357 |
358 | CONSTANTS_USES = collections.defaultdict(list)
359 |
360 |
361 | @lint_re_check(CONF_RE, include=["*.py"], exclude=["esphome/const.py"])
362 | def lint_conf_from_const_py(fname, match):
363 | name = match.group(1)
364 | if name not in CONSTANTS:
365 | CONSTANTS_USES[name].append(fname)
366 | return None
367 | return (
368 | f"Constant {highlight(name)} has already been defined in const.py - "
369 | "please import the constant from const.py directly."
370 | )
371 |
372 |
373 | RAW_PIN_ACCESS_RE = (
374 | r"^\s(pinMode|digitalWrite|digitalRead)\((.*)->get_pin\(\),\s*([^)]+).*\)"
375 | )
376 |
377 |
378 | @lint_re_check(RAW_PIN_ACCESS_RE, include=cpp_include)
379 | def lint_no_raw_pin_access(fname, match):
380 | func = match.group(1)
381 | pin = match.group(2)
382 | mode = match.group(3)
383 | new_func = {
384 | "pinMode": "pin_mode",
385 | "digitalWrite": "digital_write",
386 | "digitalRead": "digital_read",
387 | }[func]
388 | new_code = highlight(f"{pin}->{new_func}({mode})")
389 | return f"Don't use raw {func} calls. Instead, use the `->{new_func}` function: {new_code}"
390 |
391 |
392 | # Functions from Arduino framework that are forbidden to use directly
393 | ARDUINO_FORBIDDEN = [
394 | "digitalWrite",
395 | "digitalRead",
396 | "pinMode",
397 | "shiftOut",
398 | "shiftIn",
399 | "radians",
400 | "degrees",
401 | "interrupts",
402 | "noInterrupts",
403 | "lowByte",
404 | "highByte",
405 | "bitRead",
406 | "bitSet",
407 | "bitClear",
408 | "bitWrite",
409 | "bit",
410 | "analogRead",
411 | "analogWrite",
412 | "pulseIn",
413 | "pulseInLong",
414 | "tone",
415 | ]
416 | ARDUINO_FORBIDDEN_RE = r"[^\w\d](" + r"|".join(ARDUINO_FORBIDDEN) + r")\(.*"
417 |
418 |
419 | @lint_re_check(
420 | ARDUINO_FORBIDDEN_RE,
421 | include=cpp_include,
422 | exclude=[
423 | "esphome/components/mqtt/custom_mqtt_device.h",
424 | "esphome/components/sun/sun.cpp",
425 | ],
426 | )
427 | def lint_no_arduino_framework_functions(fname, match):
428 | nolint = highlight("// NOLINT")
429 | return (
430 | f"The function {highlight(match.group(1))} from the Arduino framework is forbidden to be "
431 | f"used directly in the ESPHome codebase. Please use ESPHome's abstractions and equivalent "
432 | f"C++ instead.\n"
433 | f"\n"
434 | f"(If the function is strictly necessary, please add `{nolint}` to the end of the line)"
435 | )
436 |
437 |
438 | IDF_CONVERSION_FORBIDDEN = {
439 | "ARDUINO_ARCH_ESP32": "USE_ESP32",
440 | "ARDUINO_ARCH_ESP8266": "USE_ESP8266",
441 | "pgm_read_byte": "progmem_read_byte",
442 | "ICACHE_RAM_ATTR": "IRAM_ATTR",
443 | "esphome/core/esphal.h": "esphome/core/hal.h",
444 | }
445 | IDF_CONVERSION_FORBIDDEN_RE = r"(" + r"|".join(IDF_CONVERSION_FORBIDDEN) + r").*"
446 |
447 |
448 | @lint_re_check(
449 | IDF_CONVERSION_FORBIDDEN_RE,
450 | include=cpp_include,
451 | )
452 | def lint_no_removed_in_idf_conversions(fname, match):
453 | replacement = IDF_CONVERSION_FORBIDDEN[match.group(1)]
454 | return (
455 | f"The macro {highlight(match.group(1))} can no longer be used in ESPHome directly. "
456 | f"Please use {highlight(replacement)} instead."
457 | )
458 |
459 |
460 | @lint_re_check(
461 | r"[^\w\d]byte +[\w\d]+\s*=",
462 | include=cpp_include,
463 | exclude={
464 | "esphome/components/tuya/tuya.h",
465 | },
466 | )
467 | def lint_no_byte_datatype(fname, match):
468 | return (
469 | f"The datatype {highlight('byte')} is not allowed to be used in ESPHome. "
470 | f"Please use {highlight('uint8_t')} instead."
471 | )
472 |
473 |
474 | @lint_post_check
475 | def lint_constants_usage():
476 | errs = []
477 | for constant, uses in CONSTANTS_USES.items():
478 | if len(uses) < 3:
479 | continue
480 | errs.append(
481 | f"Constant {highlight(constant)} is defined in {len(uses)} files. Please move all definitions of the "
482 | f"constant to const.py (Uses: {', '.join(uses)})"
483 | )
484 | return errs
485 |
486 |
487 | def relative_cpp_search_text(fname, content):
488 | parts = fname.split("/")
489 | integration = parts[2]
490 | return f'#include "esphome/components/{integration}'
491 |
492 |
493 | @lint_content_find_check(relative_cpp_search_text, include=["esphome/components/*.cpp"])
494 | def lint_relative_cpp_import(fname):
495 | return (
496 | "Component contains absolute import - Components must always use "
497 | "relative imports.\n"
498 | "Change:\n"
499 | ' #include "esphome/components/abc/abc.h"\n'
500 | "to:\n"
501 | ' #include "abc.h"\n\n'
502 | )
503 |
504 |
505 | def relative_py_search_text(fname, content):
506 | parts = fname.split("/")
507 | integration = parts[2]
508 | return f"esphome.components.{integration}"
509 |
510 |
511 | @lint_content_find_check(
512 | relative_py_search_text,
513 | include=["esphome/components/*.py"],
514 | exclude=[
515 | "esphome/components/libretiny/generate_components.py",
516 | "esphome/components/web_server/__init__.py",
517 | ],
518 | )
519 | def lint_relative_py_import(fname):
520 | return (
521 | "Component contains absolute import - Components must always use "
522 | "relative imports within the integration.\n"
523 | "Change:\n"
524 | ' from esphome.components.abc import abc_ns"\n'
525 | "to:\n"
526 | " from . import abc_ns\n\n"
527 | )
528 |
529 |
530 | @lint_content_check(
531 | include=[
532 | "esphome/components/*.h",
533 | "esphome/components/*.cpp",
534 | "esphome/components/*.tcc",
535 | ],
536 | exclude=[
537 | "esphome/components/socket/headers.h",
538 | "esphome/components/esp32/core.cpp",
539 | "esphome/components/esp8266/core.cpp",
540 | "esphome/components/rp2040/core.cpp",
541 | "esphome/components/libretiny/core.cpp",
542 | "esphome/components/host/core.cpp",
543 | ],
544 | )
545 | def lint_namespace(fname, content):
546 | expected_name = re.match(
547 | r"^esphome/components/([^/]+)/.*", fname.replace(os.path.sep, "/")
548 | ).group(1)
549 | search = f"namespace {expected_name}"
550 | if search in content:
551 | return None
552 | return (
553 | "Invalid namespace found in C++ file. All integration C++ files should put all "
554 | "functions in a separate namespace that matches the integration's name. "
555 | f"Please make sure the file contains {highlight(search)}"
556 | )
557 |
558 |
559 | @lint_content_find_check('"esphome.h"', include=cpp_include, exclude=["tests/custom.h"])
560 | def lint_esphome_h(fname):
561 | return (
562 | "File contains reference to 'esphome.h' - This file is "
563 | "auto-generated and should only be used for *custom* "
564 | "components. Please replace with references to the direct files."
565 | )
566 |
567 |
568 | @lint_content_check(include=["*.h"])
569 | def lint_pragma_once(fname, content):
570 | if "#pragma once" not in content:
571 | return (
572 | "Header file contains no 'pragma once' header guard. Please add a "
573 | "'#pragma once' line at the top of the file."
574 | )
575 | return None
576 |
577 |
578 | def lint_inclusive_language(fname, match):
579 | # From https://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git/commit/?id=49decddd39e5f6132ccd7d9fdc3d7c470b0061bb
580 | return (
581 | "Avoid the use of whitelist/blacklist/slave.\n"
582 | "Recommended replacements for 'master / slave' are:\n"
583 | " '{primary,main} / {secondary,replica,subordinate}\n"
584 | " '{initiator,requester} / {target,responder}'\n"
585 | " '{controller,host} / {device,worker,proxy}'\n"
586 | " 'leader / follower'\n"
587 | " 'director / performer'\n"
588 | "\n"
589 | "Recommended replacements for 'blacklist/whitelist' are:\n"
590 | " 'denylist / allowlist'\n"
591 | " 'blocklist / passlist'"
592 | )
593 |
594 |
595 | lint_re_check(
596 | r"(whitelist|blacklist|slave)" + PY_RE_EOL,
597 | include=py_include,
598 | exclude=["script/ci-custom.py"],
599 | flags=re.IGNORECASE | re.MULTILINE,
600 | )(lint_inclusive_language)
601 |
602 |
603 | lint_re_check(
604 | r"(whitelist|blacklist|slave)" + CPP_RE_EOL,
605 | include=cpp_include,
606 | flags=re.IGNORECASE | re.MULTILINE,
607 | )(lint_inclusive_language)
608 |
609 |
610 | @lint_re_check(r"[\t\r\f\v ]+$")
611 | def lint_trailing_whitespace(fname, match):
612 | return "Trailing whitespace detected"
613 |
614 |
615 | @lint_content_find_check(
616 | "ESP_LOG",
617 | include=["*.h", "*.tcc"],
618 | exclude=[
619 | "esphome/components/binary_sensor/binary_sensor.h",
620 | "esphome/components/button/button.h",
621 | "esphome/components/climate/climate.h",
622 | "esphome/components/cover/cover.h",
623 | "esphome/components/datetime/date_entity.h",
624 | "esphome/components/datetime/time_entity.h",
625 | "esphome/components/datetime/datetime_entity.h",
626 | "esphome/components/display/display.h",
627 | "esphome/components/event/event.h",
628 | "esphome/components/fan/fan.h",
629 | "esphome/components/i2c/i2c.h",
630 | "esphome/components/lock/lock.h",
631 | "esphome/components/mqtt/mqtt_component.h",
632 | "esphome/components/number/number.h",
633 | "esphome/components/one_wire/one_wire.h",
634 | "esphome/components/output/binary_output.h",
635 | "esphome/components/output/float_output.h",
636 | "esphome/components/nextion/nextion_base.h",
637 | "esphome/components/select/select.h",
638 | "esphome/components/sensor/sensor.h",
639 | "esphome/components/stepper/stepper.h",
640 | "esphome/components/switch/switch.h",
641 | "esphome/components/text/text.h",
642 | "esphome/components/text_sensor/text_sensor.h",
643 | "esphome/components/valve/valve.h",
644 | "esphome/core/component.h",
645 | "esphome/core/gpio.h",
646 | "esphome/core/log.h",
647 | "tests/custom.h",
648 | ],
649 | )
650 | def lint_log_in_header(fname):
651 | return (
652 | "Found reference to ESP_LOG in header file. Using ESP_LOG* in header files "
653 | "is currently not possible - please move the definition to a source file (.cpp)"
654 | )
655 |
656 |
657 | def main():
658 | colorama.init()
659 |
660 | parser = argparse.ArgumentParser()
661 | parser.add_argument(
662 | "files", nargs="*", default=[], help="files to be processed (regex on path)"
663 | )
664 | parser.add_argument(
665 | "-c", "--changed", action="store_true", help="Only run on changed files"
666 | )
667 | parser.add_argument(
668 | "--print-slowest", action="store_true", help="Print the slowest checks"
669 | )
670 | args = parser.parse_args()
671 |
672 | global EXECUTABLE_BIT
673 | EXECUTABLE_BIT = git_ls_files()
674 | files = list(EXECUTABLE_BIT.keys())
675 | # Match against re
676 | file_name_re = re.compile("|".join(args.files))
677 | files = [p for p in files if file_name_re.search(p)]
678 |
679 | if args.changed:
680 | files = filter_changed(files)
681 |
682 | files.sort()
683 |
684 | for fname in files:
685 | _, ext = os.path.splitext(fname)
686 | run_checks(LINT_FILE_CHECKS, fname, fname)
687 | if ext in ignore_types:
688 | continue
689 | try:
690 | with codecs.open(fname, "r", encoding="utf-8") as f_handle:
691 | content = f_handle.read()
692 | except UnicodeDecodeError:
693 | add_errors(
694 | fname,
695 | "File is not readable as UTF-8. Please set your editor to UTF-8 mode.",
696 | )
697 | continue
698 | run_checks(LINT_CONTENT_CHECKS, fname, fname, content)
699 |
700 | run_checks(LINT_POST_CHECKS, "POST")
701 |
702 | for f, errs in sorted(errors.items()):
703 | bold = functools.partial(styled, colorama.Style.BRIGHT)
704 | bold_red = functools.partial(styled, (colorama.Style.BRIGHT, colorama.Fore.RED))
705 | err_str = (
706 | f"{bold(f'{f}:{lineno}:{col}:')} {bold_red('lint:')} {msg}\n"
707 | for lineno, col, msg in errs
708 | )
709 | print_error_for_file(f, "\n".join(err_str))
710 |
711 | if args.print_slowest:
712 | lint_times = []
713 | for lint in LINT_FILE_CHECKS + LINT_CONTENT_CHECKS + LINT_POST_CHECKS:
714 | durations = lint.get("durations", [])
715 | lint_times.append((sum(durations), len(durations), lint["func"].__name__))
716 | lint_times.sort(key=lambda x: -x[0])
717 | for i in range(min(len(lint_times), 10)):
718 | dur, invocations, name = lint_times[i]
719 | print(f" - '{name}' took {dur:.2f}s total (ran on {invocations} files)")
720 | print(f"Total time measured: {sum(x[0] for x in lint_times):.2f}s")
721 |
722 | return len(errors)
723 |
724 |
725 | if __name__ == "__main__":
726 | sys.exit(main())
727 |
--------------------------------------------------------------------------------
/script/ci-suggest-changes:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | set -e
4 |
5 | if git diff-index --quiet HEAD --; then
6 | echo "No changes detected, formatting is correct!"
7 | exit 0
8 | else
9 | echo "========================================================="
10 | echo "Your formatting is not correct, ESPHome uses clang-format to format"
11 | echo "all source files in a unified way. Please apply the changes listed below"
12 | echo
13 | echo "The following files need to be changed:"
14 | git diff HEAD --name-only | sed 's/^/ /'
15 | echo
16 | echo
17 | echo "========================================================="
18 | echo
19 | git diff HEAD
20 | exit 1
21 | fi
22 |
--------------------------------------------------------------------------------
/script/clang-format:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | from helpers import (
4 | print_error_for_file,
5 | get_output,
6 | git_ls_files,
7 | filter_changed,
8 | get_binary,
9 | )
10 | import argparse
11 | import click
12 | import colorama
13 | import multiprocessing
14 | import os
15 | import queue
16 | import re
17 | import subprocess
18 | import sys
19 | import threading
20 |
21 |
22 |
23 | def run_format(executable, args, queue, lock, failed_files):
24 | """Takes filenames out of queue and runs clang-format on them."""
25 | while True:
26 | path = queue.get()
27 | invocation = [executable]
28 | if args.inplace:
29 | invocation.append("-i")
30 | else:
31 | invocation.extend(["--dry-run", "-Werror"])
32 | invocation.append(path)
33 |
34 | proc = subprocess.run(invocation, capture_output=True, encoding="utf-8")
35 | if proc.returncode != 0:
36 | with lock:
37 | print_error_for_file(path, proc.stderr)
38 | failed_files.append(path)
39 | queue.task_done()
40 |
41 |
42 | def progress_bar_show(value):
43 | return value if value is not None else ""
44 |
45 |
46 | def main():
47 | colorama.init()
48 |
49 | parser = argparse.ArgumentParser()
50 | parser.add_argument(
51 | "-j",
52 | "--jobs",
53 | type=int,
54 | default=multiprocessing.cpu_count(),
55 | help="number of format instances to be run in parallel.",
56 | )
57 | parser.add_argument(
58 | "files", nargs="*", default=[], help="files to be processed (regex on path)"
59 | )
60 | parser.add_argument(
61 | "-i", "--inplace", action="store_true", help="reformat files in-place"
62 | )
63 | parser.add_argument(
64 | "-c", "--changed", action="store_true", help="only run on changed files"
65 | )
66 | args = parser.parse_args()
67 |
68 | files = []
69 | for path in git_ls_files(["*.cpp", "*.h", "*.tcc"]):
70 | files.append(os.path.relpath(path, os.getcwd()))
71 |
72 | if args.files:
73 | # Match against files specified on command-line
74 | file_name_re = re.compile("|".join(args.files))
75 | files = [p for p in files if file_name_re.search(p)]
76 |
77 | if args.changed:
78 | files = filter_changed(files)
79 |
80 | files.sort()
81 |
82 | failed_files = []
83 | try:
84 | executable = get_binary("clang-format", 13)
85 | task_queue = queue.Queue(args.jobs)
86 | lock = threading.Lock()
87 | for _ in range(args.jobs):
88 | t = threading.Thread(
89 | target=run_format, args=(executable, args, task_queue, lock, failed_files)
90 | )
91 | t.daemon = True
92 | t.start()
93 |
94 | # Fill the queue with files.
95 | with click.progressbar(
96 | files, width=30, file=sys.stderr, item_show_func=progress_bar_show
97 | ) as bar:
98 | for name in bar:
99 | task_queue.put(name)
100 |
101 | # Wait for all threads to be done.
102 | task_queue.join()
103 |
104 | except FileNotFoundError as ex:
105 | return 1
106 | except KeyboardInterrupt:
107 | print()
108 | print("Ctrl-C detected, goodbye.")
109 | # Kill subprocesses (and ourselves!)
110 | # No simple, clean alternative appears to be available.
111 | os.kill(0, 9)
112 | return 2 # Will not execute.
113 |
114 | return len(failed_files)
115 |
116 |
117 | if __name__ == "__main__":
118 | sys.exit(main())
119 |
--------------------------------------------------------------------------------
/script/clang-tidy:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | from helpers import (
4 | print_error_for_file,
5 | get_output,
6 | filter_grep,
7 | build_all_include,
8 | temp_header_file,
9 | git_ls_files,
10 | filter_changed,
11 | load_idedata,
12 | root_path,
13 | basepath,
14 | get_binary,
15 | )
16 | import argparse
17 | import click
18 | import colorama
19 | import multiprocessing
20 | import os
21 | import queue
22 | import re
23 | import shutil
24 | import subprocess
25 | import sys
26 | import tempfile
27 | import threading
28 |
29 |
30 |
31 | def clang_options(idedata):
32 | cmd = []
33 |
34 | # extract target architecture from triplet in g++ filename
35 | triplet = os.path.basename(idedata["cxx_path"])[:-4]
36 | if triplet.startswith("xtensa-"):
37 | # clang doesn't support Xtensa (yet?), so compile in 32-bit mode and pretend we're the Xtensa compiler
38 | cmd.append("-m32")
39 | cmd.append("-D__XTENSA__")
40 | else:
41 | cmd.append(f"--target={triplet}")
42 |
43 | # set flags
44 | cmd.extend(
45 | [
46 | # disable built-in include directories from the host
47 | "-nostdinc",
48 | "-nostdinc++",
49 | # replace pgmspace.h, as it uses GNU extensions clang doesn't support
50 | # https://github.com/earlephilhower/newlib-xtensa/pull/18
51 | "-D_PGMSPACE_H_",
52 | "-Dpgm_read_byte(s)=(*(const uint8_t *)(s))",
53 | "-Dpgm_read_byte_near(s)=(*(const uint8_t *)(s))",
54 | "-Dpgm_read_word(s)=(*(const uint16_t *)(s))",
55 | "-Dpgm_read_dword(s)=(*(const uint32_t *)(s))",
56 | "-DPROGMEM=",
57 | "-DPGM_P=const char *",
58 | "-DPSTR(s)=(s)",
59 | # this next one is also needed with upstream pgmspace.h
60 | # suppress warning about identifier naming in expansion of this macro
61 | "-DPSTRN(s, n)=(s)",
62 | # suppress warning about attribute cannot be applied to type
63 | # https://github.com/esp8266/Arduino/pull/8258
64 | "-Ddeprecated(x)=",
65 | # allow to condition code on the presence of clang-tidy
66 | "-DCLANG_TIDY",
67 | # (esp-idf) Disable this header because they use asm with registers clang-tidy doesn't know
68 | "-D__XTENSA_API_H__",
69 | # (esp-idf) Fix __once_callable in some libstdc++ headers
70 | "-D_GLIBCXX_HAVE_TLS",
71 | ]
72 | )
73 |
74 | # copy compiler flags, except those clang doesn't understand.
75 | cmd.extend(
76 | flag
77 | for flag in idedata["cxx_flags"]
78 | if flag
79 | not in (
80 | "-free",
81 | "-fipa-pta",
82 | "-fstrict-volatile-bitfields",
83 | "-mlongcalls",
84 | "-mtext-section-literals",
85 | "-mfix-esp32-psram-cache-issue",
86 | "-mfix-esp32-psram-cache-strategy=memw",
87 | "-fno-tree-switch-conversion",
88 | )
89 | )
90 |
91 | # defines
92 | cmd.extend(f"-D{define}" for define in idedata["defines"])
93 |
94 | # add toolchain include directories using -isystem to suppress their errors
95 | # idedata contains include directories for all toolchains of this platform, only use those from the one in use
96 | toolchain_dir = os.path.normpath(f"{idedata['cxx_path']}/../../")
97 | for directory in idedata["includes"]["toolchain"]:
98 | if directory.startswith(toolchain_dir):
99 | cmd.extend(["-isystem", directory])
100 |
101 | # add library include directories using -isystem to suppress their errors
102 | for directory in sorted(set(idedata["includes"]["build"])):
103 | # skip our own directories, we add those later
104 | if (
105 | not directory.startswith(f"{root_path}/")
106 | or directory.startswith(f"{root_path}/.pio/")
107 | or directory.startswith(f"{root_path}/managed_components/")
108 | ):
109 | cmd.extend(["-isystem", directory])
110 |
111 | # add the esphome include directory using -I
112 | cmd.extend(["-I", root_path])
113 |
114 | return cmd
115 |
116 |
117 | pids = set()
118 |
119 | def run_tidy(executable, args, options, tmpdir, queue, lock, failed_files):
120 | while True:
121 | path = queue.get()
122 | invocation = [executable]
123 |
124 | if tmpdir is not None:
125 | invocation.append("--export-fixes")
126 | # Get a temporary file. We immediately close the handle so clang-tidy can
127 | # overwrite it.
128 | (handle, name) = tempfile.mkstemp(suffix=".yaml", dir=tmpdir)
129 | os.close(handle)
130 | invocation.append(name)
131 |
132 | if args.quiet:
133 | invocation.append("--quiet")
134 |
135 | if sys.stdout.isatty():
136 | invocation.append("--use-color")
137 |
138 | invocation.append(f"--header-filter={os.path.abspath(basepath)}/.*")
139 | invocation.append(os.path.abspath(path))
140 | invocation.append("--")
141 | invocation.extend(options)
142 |
143 | proc = subprocess.run(invocation, capture_output=True, encoding="utf-8")
144 | if proc.returncode != 0:
145 | with lock:
146 | print_error_for_file(path, proc.stdout)
147 | failed_files.append(path)
148 | queue.task_done()
149 |
150 |
151 | def progress_bar_show(value):
152 | if value is None:
153 | return ""
154 |
155 |
156 | def split_list(a, n):
157 | k, m = divmod(len(a), n)
158 | return [a[i * k + min(i, m) : (i + 1) * k + min(i + 1, m)] for i in range(n)]
159 |
160 |
161 | def main():
162 | colorama.init()
163 |
164 | parser = argparse.ArgumentParser()
165 | parser.add_argument(
166 | "-j",
167 | "--jobs",
168 | type=int,
169 | default=multiprocessing.cpu_count(),
170 | help="number of tidy instances to be run in parallel.",
171 | )
172 | parser.add_argument(
173 | "-e",
174 | "--environment",
175 | default="esp32-arduino-tidy",
176 | help="the PlatformIO environment to use (as defined in platformio.ini)",
177 | )
178 | parser.add_argument(
179 | "files", nargs="*", default=[], help="files to be processed (regex on path)"
180 | )
181 | parser.add_argument("--fix", action="store_true", help="apply fix-its")
182 | parser.add_argument(
183 | "-q", "--quiet", action="store_false", help="run clang-tidy in quiet mode"
184 | )
185 | parser.add_argument(
186 | "-c", "--changed", action="store_true", help="only run on changed files"
187 | )
188 | parser.add_argument("-g", "--grep", help="only run on files containing value")
189 | parser.add_argument(
190 | "--split-num", type=int, help="split the files into X jobs.", default=None
191 | )
192 | parser.add_argument(
193 | "--split-at", type=int, help="which split is this? starts at 1", default=None
194 | )
195 | parser.add_argument(
196 | "--all-headers",
197 | action="store_true",
198 | help="create a dummy file that checks all headers",
199 | )
200 | args = parser.parse_args()
201 |
202 | idedata = load_idedata(args.environment)
203 | options = clang_options(idedata)
204 |
205 | files = []
206 | for path in git_ls_files(["*.cpp"]):
207 | files.append(os.path.relpath(path, os.getcwd()))
208 |
209 | if args.files:
210 | # Match against files specified on command-line
211 | file_name_re = re.compile("|".join(args.files))
212 | files = [p for p in files if file_name_re.search(p)]
213 |
214 | if args.changed:
215 | files = filter_changed(files)
216 |
217 | if args.grep:
218 | files = filter_grep(files, args.grep)
219 |
220 | files.sort()
221 |
222 | if args.split_num:
223 | files = split_list(files, args.split_num)[args.split_at - 1]
224 |
225 | if args.all_headers and args.split_at in (None, 1):
226 | build_all_include()
227 | files.insert(0, temp_header_file)
228 |
229 | tmpdir = None
230 | if args.fix:
231 | tmpdir = tempfile.mkdtemp()
232 |
233 | failed_files = []
234 | try:
235 | executable = get_binary("clang-tidy", 14)
236 | task_queue = queue.Queue(args.jobs)
237 | lock = threading.Lock()
238 | for _ in range(args.jobs):
239 | t = threading.Thread(
240 | target=run_tidy,
241 | args=(executable, args, options, tmpdir, task_queue, lock, failed_files),
242 | )
243 | t.daemon = True
244 | t.start()
245 |
246 | # Fill the queue with files.
247 | with click.progressbar(
248 | files, width=30, file=sys.stderr, item_show_func=progress_bar_show
249 | ) as bar:
250 | for name in bar:
251 | task_queue.put(name)
252 |
253 | # Wait for all threads to be done.
254 | task_queue.join()
255 |
256 | except FileNotFoundError as ex:
257 | return 1
258 | except KeyboardInterrupt:
259 | print()
260 | print("Ctrl-C detected, goodbye.")
261 | if tmpdir:
262 | shutil.rmtree(tmpdir)
263 | # Kill subprocesses (and ourselves!)
264 | # No simple, clean alternative appears to be available.
265 | os.kill(0, 9)
266 | return 2 # Will not execute.
267 |
268 | if args.fix and failed_files:
269 | print("Applying fixes ...")
270 | try:
271 | try:
272 | subprocess.call(["clang-apply-replacements-14", tmpdir])
273 | except FileNotFoundError:
274 | subprocess.call(["clang-apply-replacements", tmpdir])
275 | except FileNotFoundError:
276 | print("Error please install clang-apply-replacements-14 or clang-apply-replacements.\n", file=sys.stderr)
277 | except:
278 | print("Error applying fixes.\n", file=sys.stderr)
279 | raise
280 |
281 | return len(failed_files)
282 |
283 |
284 | if __name__ == "__main__":
285 | sys.exit(main())
286 |
--------------------------------------------------------------------------------
/script/component_test:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | set -e
4 |
5 | cd "$(dirname "$0")/.."
6 |
7 | set -x
8 |
9 | pytest tests/component_tests
10 |
--------------------------------------------------------------------------------
/script/devcontainer-post-create:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -e
4 | # set -x
5 |
6 | apt update
7 | apt-get install avahi-utils -y
8 |
9 | mkdir -p config
10 | script/setup
11 |
12 | cpp_json=.vscode/c_cpp_properties.json
13 | if [ ! -f $cpp_json ]; then
14 | echo "Initializing PlatformIO..."
15 | pio init --ide vscode --silent
16 | sed -i "/\\/workspaces\/esphome\/include/d" $cpp_json
17 | else
18 | echo "Cpp environment already configured. To reconfigure it you can run one the following commands:"
19 | echo " pio init --ide vscode"
20 | fi
21 |
--------------------------------------------------------------------------------
/script/fulltest:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | set -e
4 |
5 | cd "$(dirname "$0")/.."
6 |
7 | set -x
8 |
9 | script/ci-custom.py
10 | script/lint-python
11 | script/lint-cpp
12 | script/unit_test
13 | script/component_test
14 | script/test
15 | script/test_build_components
16 |
--------------------------------------------------------------------------------
/script/helpers.py:
--------------------------------------------------------------------------------
1 | import json
2 | import os.path
3 | import re
4 | import subprocess
5 | from pathlib import Path
6 |
7 | import colorama
8 |
9 | root_path = os.path.abspath(os.path.normpath(os.path.join(__file__, "..", "..")))
10 | basepath = os.path.join(root_path, "esphome")
11 | temp_folder = os.path.join(root_path, ".temp")
12 | temp_header_file = os.path.join(temp_folder, "all-include.cpp")
13 |
14 |
15 | def styled(color, msg, reset=True):
16 | prefix = "".join(color) if isinstance(color, tuple) else color
17 | suffix = colorama.Style.RESET_ALL if reset else ""
18 | return prefix + msg + suffix
19 |
20 |
21 | def print_error_for_file(file, body):
22 | print(
23 | styled(colorama.Fore.GREEN, "### File ")
24 | + styled((colorama.Fore.GREEN, colorama.Style.BRIGHT), file)
25 | )
26 | print()
27 | if body is not None:
28 | print(body)
29 | print()
30 |
31 |
32 | def build_all_include():
33 | # Build a cpp file that includes all header files in this repo.
34 | # Otherwise header-only integrations would not be tested by clang-tidy
35 | headers = []
36 | for path in walk_files(basepath):
37 | filetypes = (".h",)
38 | ext = os.path.splitext(path)[1]
39 | if ext in filetypes:
40 | path = os.path.relpath(path, root_path)
41 | include_p = path.replace(os.path.sep, "/")
42 | headers.append(f'#include "{include_p}"')
43 | headers.sort()
44 | headers.append("")
45 | content = "\n".join(headers)
46 | p = Path(temp_header_file)
47 | p.parent.mkdir(exist_ok=True)
48 | p.write_text(content, encoding="utf-8")
49 |
50 |
51 | def walk_files(path):
52 | for root, _, files in os.walk(path):
53 | for name in files:
54 | yield os.path.join(root, name)
55 |
56 |
57 | def get_output(*args):
58 | with subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) as proc:
59 | output, _ = proc.communicate()
60 | return output.decode("utf-8")
61 |
62 |
63 | def get_err(*args):
64 | with subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) as proc:
65 | _, err = proc.communicate()
66 | return err.decode("utf-8")
67 |
68 |
69 | def splitlines_no_ends(string):
70 | return [s.strip() for s in string.splitlines()]
71 |
72 |
73 | def changed_files(branch="dev"):
74 | check_remotes = ["upstream", "origin"]
75 | check_remotes.extend(splitlines_no_ends(get_output("git", "remote")))
76 | for remote in check_remotes:
77 | command = ["git", "merge-base", f"refs/remotes/{remote}/{branch}", "HEAD"]
78 | try:
79 | merge_base = splitlines_no_ends(get_output(*command))[0]
80 | break
81 | # pylint: disable=bare-except
82 | except: # noqa: E722
83 | pass
84 | else:
85 | raise ValueError("Git not configured")
86 | command = ["git", "diff", merge_base, "--name-only"]
87 | changed = splitlines_no_ends(get_output(*command))
88 | changed = [os.path.relpath(f, os.getcwd()) for f in changed]
89 | changed.sort()
90 | return changed
91 |
92 |
93 | def filter_changed(files):
94 | changed = changed_files()
95 | files = [f for f in files if f in changed]
96 | print("Changed files:")
97 | if not files:
98 | print(" No changed files!")
99 | for c in files:
100 | print(f" {c}")
101 | return files
102 |
103 |
104 | def filter_grep(files, value):
105 | matched = []
106 | for file in files:
107 | with open(file, encoding="utf-8") as handle:
108 | contents = handle.read()
109 | if value in contents:
110 | matched.append(file)
111 | return matched
112 |
113 |
114 | def git_ls_files(patterns=None):
115 | command = ["git", "ls-files", "-s"]
116 | if patterns is not None:
117 | command.extend(patterns)
118 | with subprocess.Popen(command, stdout=subprocess.PIPE) as proc:
119 | output, _ = proc.communicate()
120 | lines = [x.split() for x in output.decode("utf-8").splitlines()]
121 | return {s[3].strip(): int(s[0]) for s in lines}
122 |
123 |
124 | def load_idedata(environment):
125 | platformio_ini = Path(root_path) / "platformio.ini"
126 | temp_idedata = Path(temp_folder) / f"idedata-{environment}.json"
127 | changed = False
128 | if not platformio_ini.is_file() or not temp_idedata.is_file():
129 | changed = True
130 | elif platformio_ini.stat().st_mtime >= temp_idedata.stat().st_mtime:
131 | changed = True
132 |
133 | if "idf" in environment:
134 | # remove full sdkconfig when the defaults have changed so that it is regenerated
135 | default_sdkconfig = Path(root_path) / "sdkconfig.defaults"
136 | temp_sdkconfig = Path(temp_folder) / f"sdkconfig-{environment}"
137 |
138 | if not temp_sdkconfig.is_file():
139 | changed = True
140 | elif default_sdkconfig.stat().st_mtime >= temp_sdkconfig.stat().st_mtime:
141 | temp_sdkconfig.unlink()
142 | changed = True
143 |
144 | if not changed:
145 | return json.loads(temp_idedata.read_text())
146 |
147 | # ensure temp directory exists before running pio, as it writes sdkconfig to it
148 | Path(temp_folder).mkdir(exist_ok=True)
149 |
150 | stdout = subprocess.check_output(["pio", "run", "-t", "idedata", "-e", environment])
151 | match = re.search(r'{\s*".*}', stdout.decode("utf-8"))
152 | data = json.loads(match.group())
153 |
154 | temp_idedata.write_text(json.dumps(data, indent=2) + "\n")
155 | return data
156 |
157 |
158 | def get_binary(name: str, version: str) -> str:
159 | binary_file = f"{name}-{version}"
160 | try:
161 | result = subprocess.check_output([binary_file, "-version"])
162 | if result.returncode == 0:
163 | return binary_file
164 | except Exception:
165 | pass
166 | binary_file = name
167 | try:
168 | result = subprocess.run(
169 | [binary_file, "-version"], text=True, capture_output=True
170 | )
171 | if result.returncode == 0 and (f"version {version}") in result.stdout:
172 | return binary_file
173 | raise FileNotFoundError(f"{name} not found")
174 |
175 | except FileNotFoundError as ex:
176 | print(
177 | f"""
178 | Oops. It looks like {name} is not installed. It should be available under venv/bin
179 | and in PATH after running in turn:
180 | script/setup
181 | source venv/bin/activate.
182 |
183 | Please confirm you can run "{name} -version" or "{name}-{version} -version"
184 | in your terminal and install
185 | {name} (v{version}) if necessary.
186 |
187 | Note you can also upload your code as a pull request on GitHub and see the CI check
188 | output to apply {name}
189 | """
190 | )
191 | raise
192 |
--------------------------------------------------------------------------------
/script/lint-cpp:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | set -e
4 |
5 | cd "$(dirname "$0")/.."
6 |
7 | set -x
8 |
9 | script/clang-tidy $@ --fix --all-headers
10 | script/clang-format $@ -i
11 |
--------------------------------------------------------------------------------
/script/lint-python:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | from helpers import (
4 | styled,
5 | print_error_for_file,
6 | get_output,
7 | get_err,
8 | git_ls_files,
9 | filter_changed,
10 | )
11 | import argparse
12 | import colorama
13 | import os
14 | import re
15 | import sys
16 |
17 | curfile = None
18 |
19 |
20 | def print_error(file, lineno, msg):
21 | global curfile
22 |
23 | if curfile != file:
24 | print_error_for_file(file, None)
25 | curfile = file
26 |
27 | if lineno is not None:
28 | print(f"{styled(colorama.Style.BRIGHT, f'{file}:{lineno}:')} {msg}")
29 | else:
30 | print(f"{styled(colorama.Style.BRIGHT, f'{file}:')} {msg}")
31 |
32 |
33 | def main():
34 | colorama.init()
35 |
36 | parser = argparse.ArgumentParser()
37 | parser.add_argument(
38 | "files", nargs="*", default=[], help="files to be processed (regex on path)"
39 | )
40 | parser.add_argument(
41 | "-c", "--changed", action="store_true", help="Only run on changed files"
42 | )
43 | parser.add_argument(
44 | "-a",
45 | "--apply",
46 | action="store_true",
47 | help="Apply changes to files where possible",
48 | )
49 | args = parser.parse_args()
50 |
51 | files = []
52 | for path in git_ls_files():
53 | filetypes = (".py",)
54 | ext = os.path.splitext(path)[1]
55 | if ext in filetypes and path.startswith("esphome"):
56 | path = os.path.relpath(path, os.getcwd())
57 | files.append(path)
58 | # Match against re
59 | file_name_re = re.compile("|".join(args.files))
60 | files = [p for p in files if file_name_re.search(p)]
61 |
62 | if args.changed:
63 | files = filter_changed(files)
64 |
65 | files.sort()
66 | if not files:
67 | sys.exit(0)
68 |
69 | errors = 0
70 |
71 | cmd = ["black", "--verbose"] + ([] if args.apply else ["--check"]) + files
72 | print("Running black...")
73 | print()
74 | log = get_err(*cmd)
75 | for line in log.splitlines():
76 | WOULD_REFORMAT = "would reformat"
77 | if line.startswith(WOULD_REFORMAT):
78 | file_ = line[len(WOULD_REFORMAT) + 1 :]
79 | print_error(file_, None, "Please format this file with the black formatter")
80 | errors += 1
81 |
82 | cmd = ["flake8"] + files
83 | print()
84 | print("Running flake8...")
85 | print()
86 | log = get_output(*cmd)
87 | for line in log.splitlines():
88 | line = line.split(":", 4)
89 | if len(line) < 4:
90 | continue
91 | file_ = line[0]
92 | linno = line[1]
93 | msg = (":".join(line[3:])).strip()
94 | print_error(file_, linno, msg)
95 | errors += 1
96 |
97 | cmd = ["pylint", "-f", "parseable", "--persistent=n"] + files
98 | print()
99 | print("Running pylint...")
100 | print()
101 | log = get_output(*cmd)
102 | for line in log.splitlines():
103 | line = line.split(":", 3)
104 | if len(line) < 3:
105 | continue
106 | file_ = line[0]
107 | linno = line[1]
108 | msg = (":".join(line[2:])).strip()
109 | print_error(file_, linno, msg)
110 | errors += 1
111 |
112 | PYUPGRADE_TARGET = "--py39-plus"
113 | cmd = ["pyupgrade", PYUPGRADE_TARGET] + files
114 | print()
115 | print("Running pyupgrade...")
116 | print()
117 | log = get_err(*cmd)
118 | for line in log.splitlines():
119 | REWRITING = "Rewriting"
120 | if line.startswith(REWRITING):
121 | file_ = line[len(REWRITING) + 1 :]
122 | print_error(
123 | file_, None, f"Please run pyupgrade {PYUPGRADE_TARGET} on this file"
124 | )
125 | errors += 1
126 |
127 | sys.exit(errors)
128 |
129 |
130 | if __name__ == "__main__":
131 | main()
132 |
--------------------------------------------------------------------------------
/script/list-components.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | from pathlib import Path
3 | import sys
4 | import argparse
5 |
6 | from helpers import git_ls_files, changed_files
7 | from esphome.loader import get_component, get_platform
8 | from esphome.core import CORE
9 | from esphome.const import (
10 | KEY_CORE,
11 | KEY_TARGET_FRAMEWORK,
12 | KEY_TARGET_PLATFORM,
13 | PLATFORM_ESP32,
14 | PLATFORM_ESP8266,
15 | )
16 |
17 |
18 | def filter_component_files(str):
19 | return str.startswith("esphome/components/") | str.startswith("tests/components/")
20 |
21 |
22 | def extract_component_names_array_from_files_array(files):
23 | components = []
24 | for file in files:
25 | file_parts = file.split("/")
26 | if len(file_parts) >= 4:
27 | component_name = file_parts[2]
28 | if component_name not in components:
29 | components.append(component_name)
30 | return components
31 |
32 |
33 | def add_item_to_components_graph(components_graph, parent, child):
34 | if not parent.startswith("__") and parent != child:
35 | if parent not in components_graph:
36 | components_graph[parent] = []
37 | if child not in components_graph[parent]:
38 | components_graph[parent].append(child)
39 |
40 |
41 | def create_components_graph():
42 | # The root directory of the repo
43 | root = Path(__file__).parent.parent
44 | components_dir = root / "esphome" / "components"
45 | # Fake some directory so that get_component works
46 | CORE.config_path = str(root)
47 | # Various configuration to capture different outcomes used by `AUTO_LOAD` function.
48 | TARGET_CONFIGURATIONS = [
49 | {KEY_TARGET_FRAMEWORK: None, KEY_TARGET_PLATFORM: None},
50 | {KEY_TARGET_FRAMEWORK: "arduino", KEY_TARGET_PLATFORM: None},
51 | {KEY_TARGET_FRAMEWORK: "esp-idf", KEY_TARGET_PLATFORM: None},
52 | {KEY_TARGET_FRAMEWORK: None, KEY_TARGET_PLATFORM: PLATFORM_ESP32},
53 | ]
54 | CORE.data[KEY_CORE] = TARGET_CONFIGURATIONS[0]
55 |
56 | components_graph = {}
57 |
58 | for path in components_dir.iterdir():
59 | if not path.is_dir():
60 | continue
61 | if not (path / "__init__.py").is_file():
62 | continue
63 | name = path.name
64 | comp = get_component(name)
65 | if comp is None:
66 | print(
67 | f"Cannot find component {name}. Make sure current path is pip installed ESPHome"
68 | )
69 | sys.exit(1)
70 |
71 | for dependency in comp.dependencies:
72 | add_item_to_components_graph(
73 | components_graph, dependency.split(".")[0], name
74 | )
75 |
76 | for target_config in TARGET_CONFIGURATIONS:
77 | CORE.data[KEY_CORE] = target_config
78 | for auto_load in comp.auto_load:
79 | add_item_to_components_graph(components_graph, auto_load, name)
80 | # restore config
81 | CORE.data[KEY_CORE] = TARGET_CONFIGURATIONS[0]
82 |
83 | for platform_path in path.iterdir():
84 | platform_name = platform_path.stem
85 | platform = get_platform(platform_name, name)
86 | if platform is None:
87 | continue
88 |
89 | add_item_to_components_graph(components_graph, platform_name, name)
90 |
91 | for dependency in platform.dependencies:
92 | add_item_to_components_graph(
93 | components_graph, dependency.split(".")[0], name
94 | )
95 |
96 | for target_config in TARGET_CONFIGURATIONS:
97 | CORE.data[KEY_CORE] = target_config
98 | for auto_load in platform.auto_load:
99 | add_item_to_components_graph(components_graph, auto_load, name)
100 | # restore config
101 | CORE.data[KEY_CORE] = TARGET_CONFIGURATIONS[0]
102 |
103 | return components_graph
104 |
105 |
106 | def find_children_of_component(components_graph, component_name, depth=0):
107 | if component_name not in components_graph:
108 | return []
109 |
110 | children = []
111 |
112 | for child in components_graph[component_name]:
113 | children.append(child)
114 | if depth < 10:
115 | children.extend(
116 | find_children_of_component(components_graph, child, depth + 1)
117 | )
118 | # Remove duplicate values
119 | return list(set(children))
120 |
121 |
122 | def main():
123 | parser = argparse.ArgumentParser()
124 | parser.add_argument(
125 | "-c", "--changed", action="store_true", help="Only run on changed files"
126 | )
127 | parser.add_argument(
128 | "-b", "--branch", help="Branch to compare changed files against"
129 | )
130 | args = parser.parse_args()
131 |
132 | if args.branch and not args.changed:
133 | parser.error("--branch requires --changed")
134 |
135 | files = git_ls_files()
136 | files = filter(filter_component_files, files)
137 |
138 | if args.changed:
139 | if args.branch:
140 | changed = changed_files(args.branch)
141 | else:
142 | changed = changed_files()
143 | files = [f for f in files if f in changed]
144 |
145 | components = extract_component_names_array_from_files_array(files)
146 |
147 | if args.changed:
148 | components_graph = create_components_graph()
149 |
150 | all_changed_components = components.copy()
151 | for c in components:
152 | all_changed_components.extend(
153 | find_children_of_component(components_graph, c)
154 | )
155 | # Remove duplicate values
156 | all_changed_components = list(set(all_changed_components))
157 |
158 | for c in sorted(all_changed_components):
159 | print(c)
160 | else:
161 | for c in sorted(components):
162 | print(c)
163 |
164 |
165 | if __name__ == "__main__":
166 | main()
167 |
--------------------------------------------------------------------------------
/script/platformio_install_deps.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # This script is used to preinstall
3 | # all platformio libraries in the global storage
4 |
5 | import argparse
6 | import configparser
7 | import subprocess
8 |
9 | config = configparser.ConfigParser(inline_comment_prefixes=(";",))
10 |
11 | parser = argparse.ArgumentParser(description="")
12 | parser.add_argument("file", help="Path to platformio.ini", nargs=1)
13 | parser.add_argument("-l", "--libraries", help="Install libraries", action="store_true")
14 | parser.add_argument("-p", "--platforms", help="Install platforms", action="store_true")
15 | parser.add_argument("-t", "--tools", help="Install tools", action="store_true")
16 |
17 | args = parser.parse_args()
18 |
19 | config.read(args.file)
20 |
21 |
22 | libs = []
23 | tools = []
24 | platforms = []
25 | # Extract from every lib_deps key in all sections
26 | for section in config.sections():
27 | conf = config[section]
28 | if "lib_deps" in conf and args.libraries:
29 | for lib_dep in conf["lib_deps"].splitlines():
30 | if not lib_dep:
31 | # Empty line or comment
32 | continue
33 | if lib_dep.startswith("${"):
34 | # Extending from another section
35 | continue
36 | if "@" not in lib_dep:
37 | # No version pinned, this is an internal lib
38 | continue
39 | libs.append("-l")
40 | libs.append(lib_dep)
41 | if "platform" in conf and args.platforms:
42 | platforms.append("-p")
43 | platforms.append(conf["platform"])
44 | if "platform_packages" in conf and args.tools:
45 | for tool in conf["platform_packages"].splitlines():
46 | if not tool:
47 | # Empty line or comment
48 | continue
49 | if tool.startswith("${"):
50 | # Extending from another section
51 | continue
52 | if tool.find("https://github.com") != -1:
53 | split = tool.find("@")
54 | tool = tool[split + 1 :]
55 | tools.append("-t")
56 | tools.append(tool)
57 |
58 | subprocess.check_call(["platformio", "pkg", "install", "-g", *libs, *platforms, *tools])
59 |
--------------------------------------------------------------------------------
/script/quicklint:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -e
4 |
5 | cd "$(dirname "$0")/.."
6 |
7 | set -x
8 |
9 | script/ci-custom.py -c
10 | script/lint-python -c
11 | script/lint-cpp -c
12 |
--------------------------------------------------------------------------------
/script/run-in-env.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env sh
2 | set -eu
3 |
4 | my_path=$(git rev-parse --show-toplevel)
5 |
6 | for venv in venv .venv .; do
7 | if [ -f "${my_path}/${venv}/bin/activate" ]; then
8 | . "${my_path}/${venv}/bin/activate"
9 | break
10 | fi
11 | done
12 |
13 | exec "$@"
14 |
--------------------------------------------------------------------------------
/script/setup:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Set up ESPHome dev environment
3 |
4 | set -e
5 |
6 | cd "$(dirname "$0")/.."
7 | location="venv/bin/activate"
8 | if [ ! -n "$DEVCONTAINER" ] && [ ! -n "$VIRTUAL_ENV" ] && [ ! "$ESPHOME_NO_VENV" ]; then
9 | python3 -m venv venv
10 | if [ -f venv/Scripts/activate ]; then
11 | location="venv/Scripts/activate"
12 | fi
13 | source $location
14 | fi
15 |
16 | pip3 install -r requirements.txt -r requirements_optional.txt -r requirements_test.txt -r requirements_dev.txt
17 | pip3 install setuptools wheel
18 | pip3 install -e ".[dev,test,displays]" --config-settings editable_mode=compat
19 |
20 | pre-commit install
21 |
22 | script/platformio_install_deps.py platformio.ini --libraries --tools --platforms
23 |
24 | echo
25 | echo
26 | echo "Virtual environment created. Run 'source $location' to use it."
27 |
--------------------------------------------------------------------------------
/script/sync-device_class.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import re
4 |
5 | # pylint: disable=import-error
6 | from homeassistant.components.binary_sensor import BinarySensorDeviceClass
7 | from homeassistant.components.button import ButtonDeviceClass
8 | from homeassistant.components.cover import CoverDeviceClass
9 | from homeassistant.components.event import EventDeviceClass
10 | from homeassistant.components.number import NumberDeviceClass
11 | from homeassistant.components.sensor import SensorDeviceClass
12 | from homeassistant.components.switch import SwitchDeviceClass
13 | from homeassistant.components.update import UpdateDeviceClass
14 | from homeassistant.components.valve import ValveDeviceClass
15 |
16 | # pylint: enable=import-error
17 |
18 | BLOCKLIST = (
19 | # requires special support on HA side
20 | "enum",
21 | )
22 |
23 | DOMAINS = {
24 | "binary_sensor": BinarySensorDeviceClass,
25 | "button": ButtonDeviceClass,
26 | "cover": CoverDeviceClass,
27 | "event": EventDeviceClass,
28 | "number": NumberDeviceClass,
29 | "sensor": SensorDeviceClass,
30 | "switch": SwitchDeviceClass,
31 | "update": UpdateDeviceClass,
32 | "valve": ValveDeviceClass,
33 | }
34 |
35 |
36 | def sub(path, pattern, repl):
37 | with open(path, encoding="utf-8") as handle:
38 | content = handle.read()
39 | content = re.sub(pattern, repl, content, flags=re.MULTILINE)
40 | with open(path, "w", encoding="utf-8") as handle:
41 | handle.write(content)
42 |
43 |
44 | def main():
45 | classes = {"EMPTY": ""}
46 | allowed = {}
47 |
48 | for domain, enum in DOMAINS.items():
49 | available = {
50 | cls.value.upper(): cls.value for cls in enum if cls.value not in BLOCKLIST
51 | }
52 |
53 | classes.update(available)
54 | allowed[domain] = list(available.keys()) + ["EMPTY"]
55 |
56 | # replace constant defines in const.py
57 | out = ""
58 | for cls in sorted(classes):
59 | out += f'DEVICE_CLASS_{cls.upper()} = "{classes[cls]}"\n'
60 | sub("esphome/const.py", '(DEVICE_CLASS_\\w+ = "\\w*"\r?\n)+', out)
61 |
62 | for domain in sorted(allowed):
63 | # replace imports
64 | out = ""
65 | for item in sorted(allowed[domain]):
66 | out += f" DEVICE_CLASS_{item.upper()},\n"
67 |
68 | sub(
69 | f"esphome/components/{domain}/__init__.py",
70 | "( DEVICE_CLASS_\\w+,\r?\n)+",
71 | out,
72 | )
73 |
74 |
75 | if __name__ == "__main__":
76 | main()
77 |
--------------------------------------------------------------------------------
/script/test:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | set -e
4 |
5 | cd "$(dirname "$0")/.."
6 |
7 | set -x
8 |
9 | for f in ./tests/test*.yaml; do
10 | esphome compile $f
11 | done
12 |
--------------------------------------------------------------------------------
/script/test_build_components:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | set -e
4 |
5 | # Parse parameter:
6 | # - `e` - Parameter for `esphome` command. Default `compile`. Common alternative is `config`.
7 | # - `c` - Component folder name to test. Default `*`.
8 | esphome_command="compile"
9 | target_component="*"
10 | while getopts e:c: flag
11 | do
12 | case $flag in
13 | e) esphome_command=${OPTARG};;
14 | c) target_component=${OPTARG};;
15 | \?) echo "Usage: $0 [-e ] [-c ]" 1>&2; exit 1;;
16 | esac
17 | done
18 |
19 | cd "$(dirname "$0")/.."
20 |
21 | if ! [ -d "./tests/test_build_components/build" ]; then
22 | mkdir ./tests/test_build_components/build
23 | fi
24 |
25 | start_esphome() {
26 | # create dynamic yaml file in `build` folder.
27 | # `./tests/test_build_components/build/[target_component].[test_name].[target_platform_with_version].yaml`
28 | component_test_file="./tests/test_build_components/build/$target_component.$test_name.$target_platform_with_version.yaml"
29 |
30 | cp $target_platform_file $component_test_file
31 | if [[ "$OSTYPE" == "darwin"* ]]; then
32 | # macOS sed is...different
33 | sed -i '' "s!\$component_test_file!../../.$f!g" $component_test_file
34 | else
35 | sed -i "s!\$component_test_file!../../.$f!g" $component_test_file
36 | fi
37 |
38 | # Start esphome process
39 | echo "> [$target_component] [$test_name] [$target_platform_with_version]"
40 | set -x
41 | # TODO: Validate escape of Command line substitution value
42 | python -m esphome -s component_name $target_component -s component_dir ../../components/$target_component -s test_name $test_name -s target_platform $target_platform $esphome_command $component_test_file
43 | { set +x; } 2>/dev/null
44 | }
45 |
46 | # Find all test yaml files.
47 | # - `./tests/components/[target_component]/[test_name].[target_platform].yaml`
48 | # - `./tests/components/[target_component]/[test_name].all.yaml`
49 | for f in ./tests/components/$target_component/*.*.yaml; do
50 | [ -f "$f" ] || continue
51 | IFS='/' read -r -a folder_name <<< "$f"
52 | target_component="${folder_name[3]}"
53 |
54 | IFS='.' read -r -a file_name <<< "${folder_name[4]}"
55 | test_name="${file_name[0]}"
56 | target_platform="${file_name[1]}"
57 | file_name_parts=${#file_name[@]}
58 |
59 | if [ "$target_platform" = "all" ] || [ $file_name_parts = 2 ]; then
60 | # Test has *not* defined a specific target platform. Need to run tests for all possible target platforms.
61 |
62 | for target_platform_file in ./tests/test_build_components/build_components_base.*.yaml; do
63 | IFS='/' read -r -a folder_name <<< "$target_platform_file"
64 | IFS='.' read -r -a file_name <<< "${folder_name[3]}"
65 | target_platform="${file_name[1]}"
66 |
67 | start_esphome
68 | done
69 |
70 | else
71 | # Test has defined a specific target platform.
72 |
73 | # Validate we have a base test yaml for selected platform.
74 | # The target_platform is sourced from the following location.
75 | # 1. `./tests/test_build_components/build_components_base.[target_platform].yaml`
76 | # 2. `./tests/test_build_components/build_components_base.[target_platform]-ard.yaml`
77 | target_platform_file="./tests/test_build_components/build_components_base.$target_platform.yaml"
78 | if ! [ -f "$target_platform_file" ]; then
79 | echo "No base test file [./tests/test_build_components/build_components_base.$target_platform.yaml] for component test [$f] found."
80 | exit 1
81 | fi
82 |
83 | for target_platform_file in ./tests/test_build_components/build_components_base.$target_platform*.yaml; do
84 | # trim off "./tests/test_build_components/build_components_base." prefix
85 | target_platform_with_version=${target_platform_file:52}
86 | # ...now remove suffix starting with "." leaving just the test target hardware and software platform (possibly with version)
87 | # For example: "esp32-s3-idf-50"
88 | target_platform_with_version=${target_platform_with_version%.*}
89 | start_esphome
90 | done
91 | fi
92 | done
93 |
--------------------------------------------------------------------------------
/script/unit_test:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | set -e
4 |
5 | cd "$(dirname "$0")/.."
6 |
7 | set -x
8 |
9 | pytest tests/unit_tests
10 |
--------------------------------------------------------------------------------