├── .vscode ├── extensions.json └── settings.json ├── requirements.txt ├── src ├── settings.ts ├── index.ts ├── python │ ├── startKasaApi.py │ └── pythonChecker.ts ├── taskQueue.ts ├── accessoryInformation.ts ├── devices │ ├── create.ts │ ├── kasaDevices.ts │ ├── index.ts │ └── deviceManager.ts ├── config.ts └── utils.ts ├── .github ├── ISSUE_TEMPLATE │ ├── config.yml │ └── issue.yml ├── labels-reference.md ├── actions │ ├── ensure-jq │ │ └── action.yml │ ├── clear-labels │ │ └── action.yml │ ├── detect-fork │ │ └── action.yml │ ├── serialize-labels │ │ └── action.yml │ └── sticky-comment │ │ └── action.yml ├── CODEOWNERS ├── workflows │ ├── shared-codeql.yml │ ├── stale.yml │ ├── beta-to-stable.yml │ ├── shared-discord-notify.yml │ ├── shared-build-lint-test.yml │ ├── release-flow.yml │ ├── release-publish.yml │ ├── shared-dependabot-auto-merge.yml │ ├── label-and-validate-issue.yml │ └── label-and-validate-pr.yml ├── labeler.yml ├── pull_request_template.md ├── dependabot.yml ├── scripts │ ├── release_publish.py │ ├── beta_to_stable.py │ ├── pr_manager.py │ ├── issue_manager.py │ ├── common.py │ └── discord_tools.py └── copilot-instructions.md ├── nodemon.json ├── copyPythonFiles.js ├── tsconfig.json ├── LICENSE ├── config.sample.json ├── CONTRIBUTING.md ├── eslint.config.mjs ├── package.json ├── .gitignore ├── .npmignore ├── config.schema.json └── CHANGELOG.md /.vscode/extensions.json: -------------------------------------------------------------------------------- 1 | { 2 | "recommendations": [ 3 | "dbaeumer.vscode-eslint" 4 | ] 5 | } -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | anyio==4.10.0 2 | python-kasa==0.10.2 3 | quart==0.20.0 4 | tzdata==2025.2 5 | uvicorn==0.36.0 -------------------------------------------------------------------------------- /src/settings.ts: -------------------------------------------------------------------------------- 1 | export const PLATFORM_NAME = 'KasaPython'; 2 | export const PLUGIN_NAME = 'homebridge-kasa-python'; -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "files.eol": "\n", 3 | "editor.codeActionsOnSave": { 4 | "source.fixAll.eslint": "explicit" 5 | }, 6 | "editor.rulers": [ 140 ], 7 | "eslint.enable": true 8 | } -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/config.yml: -------------------------------------------------------------------------------- 1 | blank_issues_enabled: false 2 | contact_links: 3 | - name: Homebridge Discord Community 4 | url: https://discord.gg/kqNCe2D 5 | about: Ask your questions in the kasa-python channel 6 | -------------------------------------------------------------------------------- /nodemon.json: -------------------------------------------------------------------------------- 1 | { 2 | "watch": [ 3 | "src" 4 | ], 5 | "ext": "ts", 6 | "ignore": [], 7 | "exec": "tsc && homebridge -I -D", 8 | "signal": "SIGTERM", 9 | "env": { 10 | "NODE_OPTIONS": "--trace-warnings" 11 | } 12 | } -------------------------------------------------------------------------------- /src/index.ts: -------------------------------------------------------------------------------- 1 | import type { API } from 'homebridge'; 2 | 3 | import { PLATFORM_NAME } from './settings.js'; 4 | import KasaPythonPlatform from './platform.js'; 5 | 6 | export default (api: API): void => { 7 | api.registerPlatform(PLATFORM_NAME, KasaPythonPlatform); 8 | }; -------------------------------------------------------------------------------- /.github/labels-reference.md: -------------------------------------------------------------------------------- 1 | # Maintainer Label Reference 2 | 3 | Core classification: 4 | - bug 5 | - enhancement 6 | - breaking-change 7 | - question 8 | - docs 9 | - dependency 10 | - fix 11 | 12 | Auxiliary: 13 | - needs-info 14 | - stale 15 | - workflow 16 | - branding 17 | 18 | Canonical forms only. Do NOT add variants like "breaking change" (with space) or "documentation". -------------------------------------------------------------------------------- /.github/actions/ensure-jq/action.yml: -------------------------------------------------------------------------------- 1 | name: Ensure jq 2 | description: Ensure jq is Installed 3 | runs: 4 | using: composite 5 | steps: 6 | - name: Ensure jq 7 | id: ensure-jq 8 | shell: bash 9 | run: | 10 | set -euo pipefail 11 | if ! command -v jq >/dev/null 2>&1; then 12 | sudo apt-get update 13 | sudo apt-get install -y jq 14 | fi 15 | -------------------------------------------------------------------------------- /copyPythonFiles.js: -------------------------------------------------------------------------------- 1 | import fs from 'fs'; 2 | import path from 'path'; 3 | import { fileURLToPath } from 'url'; 4 | 5 | const __filename = fileURLToPath(import.meta.url); 6 | const __dirname = path.dirname(__filename); 7 | const srcDir = path.join(__dirname, 'src', 'python'); 8 | const destDir = path.join(__dirname, 'dist', 'python'); 9 | 10 | if (!fs.existsSync(destDir)) { 11 | fs.mkdirSync(destDir, { recursive: true }); 12 | } 13 | 14 | fs.readdirSync(srcDir).forEach(file => { 15 | if (path.extname(file) === '.py') { 16 | fs.copyFileSync(path.join(srcDir, file), path.join(destDir, file)); 17 | } 18 | }); -------------------------------------------------------------------------------- /.github/actions/clear-labels/action.yml: -------------------------------------------------------------------------------- 1 | name: Clear Labels 2 | description: Replace labels on an issue or PR with an empty set 3 | inputs: 4 | number: 5 | description: Issue or PR number 6 | required: true 7 | runs: 8 | using: composite 9 | steps: 10 | - name: Clear Labels 11 | id: clear 12 | shell: bash 13 | env: 14 | NUMBER: ${{ inputs.number }} 15 | run: | 16 | set -euo pipefail 17 | printf '{"labels":[]}\n' > payload.json 18 | gh api --method PUT -H "Accept: application/vnd.github+json" repos/"$GITHUB_REPOSITORY"/issues/"$NUMBER"/labels --input payload.json 19 | -------------------------------------------------------------------------------- /.github/actions/detect-fork/action.yml: -------------------------------------------------------------------------------- 1 | name: Detect Fork Context 2 | description: Output Fork Detection 3 | outputs: 4 | is_fork: 5 | description: True if the Repository is a Fork, false if it is the Main Repository 6 | value: ${{ steps.detect-fork-context.outputs.is_fork }} 7 | runs: 8 | using: composite 9 | steps: 10 | - name: Detect Fork Context 11 | id: detect-fork-context 12 | shell: bash 13 | run: | 14 | set -euo pipefail 15 | if [ "${GITHUB_REPOSITORY}" != "ZeliardM/homebridge-kasa-python" ]; then 16 | echo "is_fork=true" >> "$GITHUB_OUTPUT" 17 | else 18 | echo "is_fork=false" >> "$GITHUB_OUTPUT" 19 | fi 20 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "allowSyntheticDefaultImports": true, 4 | "declaration": true, 5 | "esModuleInterop": true, 6 | "forceConsistentCasingInFileNames": true, 7 | "outDir": "dist", 8 | "rootDir": "src", 9 | "lib": [ 10 | "DOM", 11 | "ES2022" 12 | ], 13 | "module": "Node16", 14 | "moduleResolution": "node16", 15 | "resolveJsonModule": true, 16 | "sourceMap": true, 17 | "strict": true, 18 | "target": "ES2022", 19 | "types": ["node"] 20 | }, 21 | "include": [ 22 | "eslint.config.mjs", 23 | "homebridge-ui", 24 | "src/**/*.ts" 25 | ], 26 | "exclude": [ 27 | "dist", 28 | "node_modules" 29 | ] 30 | } 31 | -------------------------------------------------------------------------------- /.github/actions/serialize-labels/action.yml: -------------------------------------------------------------------------------- 1 | name: Serialize Labels 2 | description: Writes provided labels JSON to a file and exposes its path as an output 3 | inputs: 4 | labels_json: 5 | description: JSON array of label names 6 | required: true 7 | outputs: 8 | path: 9 | description: Path to the written labels JSON file 10 | value: ${{ steps.serialize.outputs.path }} 11 | runs: 12 | using: composite 13 | steps: 14 | - name: Serialize Labels 15 | id: serialize 16 | shell: bash 17 | run: | 18 | set -euo pipefail 19 | mkdir -p .github 20 | echo '${{ inputs.labels_json }}' > .github/labels.json 21 | cat .github/labels.json | jq . > /dev/null 22 | echo "path=.github/labels.json" >> "$GITHUB_OUTPUT" 23 | -------------------------------------------------------------------------------- /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | # Default owner for everything 2 | * @ZeliardM 3 | 4 | # Core source and configuration 5 | /src/ @ZeliardM 6 | /config.schema.json @ZeliardM 7 | /package.json @ZeliardM 8 | /requirements.txt @ZeliardM 9 | /CHANGELOG.md @ZeliardM 10 | /README.md @ZeliardM 11 | /CONTRIBUTING.md @ZeliardM 12 | 13 | # GitHub configuration and automation 14 | /.github/workflows/ @ZeliardM 15 | /.github/scripts/ @ZeliardM 16 | /.github/ISSUE_TEMPLATE/ @ZeliardM 17 | /.github/labeler.yml @ZeliardM 18 | /.github/dependabot.yml @ZeliardM 19 | /.github/copilot-instructions.md @ZeliardM 20 | -------------------------------------------------------------------------------- /.github/workflows/shared-codeql.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: CodeQL 3 | 4 | on: 5 | workflow_call: {} 6 | 7 | permissions: 8 | contents: read 9 | security-events: write 10 | 11 | jobs: 12 | analyze: 13 | name: Analyze 14 | runs-on: ubuntu-latest 15 | strategy: 16 | fail-fast: false 17 | matrix: 18 | language: [javascript, python] 19 | steps: 20 | - name: Checkout Actions 21 | id: checkout 22 | uses: actions/checkout@v6 23 | with: 24 | fetch-depth: 0 25 | 26 | - name: Initialize CodeQL 27 | id: init 28 | uses: github/codeql-action/init@v4 29 | with: 30 | languages: ${{ matrix.language }} 31 | 32 | - name: Autobuild CodeQL 33 | id: autobuild 34 | uses: github/codeql-action/autobuild@v4 35 | 36 | - name: Analyze CodeQL 37 | id: analyze 38 | uses: github/codeql-action/analyze@v4 39 | with: 40 | category: "/language:${{ matrix.language }}" 41 | -------------------------------------------------------------------------------- /.github/labeler.yml: -------------------------------------------------------------------------------- 1 | docs: 2 | - changed-files: 3 | - any-glob-to-any-file: '**/*.md' 4 | 5 | branding: 6 | - changed-files: 7 | - any-glob-to-any-file: branding/** 8 | 9 | enhancement: 10 | - any: 11 | - changed-files: 12 | - any-glob-to-any-file: 'src/**/*' 13 | - any-glob-to-any-file: 'config.schema.json' 14 | - head-branch: 15 | - '^feature/.*' 16 | - '^feat/.*' 17 | 18 | dependency: 19 | - any: 20 | - changed-files: 21 | - any-glob-to-any-file: 'package.json' 22 | - any-glob-to-any-file: 'package-lock.json' 23 | - any-glob-to-any-file: 'requirements.txt' 24 | 25 | workflow: 26 | - changed-files: 27 | - any-glob-to-any-file: '.github/**' 28 | 29 | fix: 30 | - any: 31 | - head-branch: 32 | - '^fix/.*' 33 | - '^hotfix/.*' 34 | - '.*/fix-.*' 35 | 36 | bug: 37 | - any: 38 | - head-branch: 39 | - '^bug/.*' 40 | - '.*/bug-.*' 41 | 42 | breaking-change: 43 | - head-branch: 44 | - '.*[bB]reaking.*' 45 | 46 | beta: 47 | - base-branch: 48 | - 'beta' 49 | 50 | latest: 51 | - base-branch: 52 | - 'latest' 53 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2024 Robert Louis Morren, Jr. 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /src/python/startKasaApi.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | import uvicorn 4 | 5 | def log(message: str, level: str = "INFO"): 6 | print(f"[Kasa API] {level}: {message}", file=sys.stdout if level != "ERROR" else sys.stderr) 7 | 8 | def start_api(port: int, hideHomeKitMatter: bool): 9 | try: 10 | log("Starting Kasa API...") 11 | 12 | os.environ["HIDE_HOMEKIT_MATTER"] = "true" if hideHomeKitMatter else "false" 13 | 14 | uvicorn.run( 15 | "kasaApi:app", 16 | host="0.0.0.0", 17 | port=port, 18 | loop="asyncio", 19 | workers=1, 20 | timeout_keep_alive=120, 21 | limit_concurrency=1000, 22 | ) 23 | except Exception as e: 24 | log(f"Failed to start Kasa API: {e}", level="ERROR") 25 | sys.exit(1) 26 | 27 | if __name__ == '__main__': 28 | try: 29 | port = int(sys.argv[1]) 30 | hideHomeKitMatter = sys.argv[2].lower() == "true" 31 | start_api(port, hideHomeKitMatter) 32 | except IndexError: 33 | log("Missing arguments: python startKasaApi.py ", level="ERROR") 34 | sys.exit(1) -------------------------------------------------------------------------------- /.github/actions/sticky-comment/action.yml: -------------------------------------------------------------------------------- 1 | name: Sticky Comment 2 | description: Upsert a sticky comment identified by a marker on issues/PRs 3 | inputs: 4 | number: 5 | description: Issue or PR number 6 | required: true 7 | marker: 8 | description: Marker string present in the sticky comment body 9 | required: true 10 | body_file: 11 | description: Path to a file containing the comment body 12 | required: true 13 | runs: 14 | using: composite 15 | steps: 16 | - name: Sticky Comment 17 | id: sticky 18 | shell: bash 19 | env: 20 | NUMBER: ${{ inputs.number }} 21 | MARK: ${{ inputs.marker }} 22 | run: | 23 | set -euo pipefail 24 | CID="$(gh api repos/"$GITHUB_REPOSITORY"/issues/"$NUMBER"/comments --paginate \ 25 | --jq '.[] | select(.body | contains(env.MARK)) | .id' | head -n1 || true)" 26 | BODY="$(cat "${{ inputs.body_file }}")" 27 | if [ -n "$CID" ]; then 28 | gh api repos/"$GITHUB_REPOSITORY"/issues/comments/"$CID" -X PATCH -f body="$BODY" 29 | else 30 | gh issue comment "$NUMBER" --body-file "${{ inputs.body_file }}" 31 | fi 32 | -------------------------------------------------------------------------------- /src/taskQueue.ts: -------------------------------------------------------------------------------- 1 | import type { Logging } from 'homebridge'; 2 | 3 | export class TaskQueue { 4 | private queue: (() => Promise)[] = []; 5 | private running: boolean = false; 6 | private log: Logging; 7 | private resolveEmptyQueue: (() => void) | null = null; 8 | 9 | constructor(log: Logging) { 10 | this.log = log; 11 | } 12 | 13 | public addTask(task: () => Promise): void { 14 | this.queue.push(task); 15 | this.processQueue(); 16 | } 17 | 18 | private async processQueue(): Promise { 19 | if (this.running) { 20 | return; 21 | } 22 | this.running = true; 23 | while (this.queue.length > 0) { 24 | const task = this.queue.shift(); 25 | if (task) { 26 | try { 27 | await task(); 28 | } catch (error) { 29 | this.log.error('Error processing task:', error); 30 | } 31 | } 32 | } 33 | this.running = false; 34 | if (this.resolveEmptyQueue) { 35 | this.resolveEmptyQueue(); 36 | this.resolveEmptyQueue = null; 37 | } 38 | } 39 | 40 | public async waitForEmptyQueue(): Promise { 41 | if (this.queue.length === 0 && !this.running) { 42 | return; 43 | } 44 | return new Promise((resolve) => { 45 | this.resolveEmptyQueue = resolve; 46 | }); 47 | } 48 | } -------------------------------------------------------------------------------- /config.sample.json: -------------------------------------------------------------------------------- 1 | { 2 | "bridge": { 3 | "name": "Homebridge", 4 | "username": "11:22:33:AA:BB:CC", 5 | "port": 51999, 6 | "pin": "001-02-003" 7 | }, 8 | "description": "This is an example configuration file.", 9 | "platforms": [ 10 | { 11 | "platform": "KasaPython", 12 | "name": "KasaPython", 13 | "enableCredentials": true, 14 | "username": "Username", 15 | "password": "Password", 16 | "hideHomeKitMatter": true, 17 | "pollingInterval": 5, 18 | "discoveryPollingInterval": 300, 19 | "offlineInterval": 7, 20 | "waitTimeUpdate": 100, 21 | "pythonPath": "/usr/bin/python3", 22 | "advancedPythonLogging": false, 23 | "additionalBroadcasts": [ 24 | "192.168.1.255", 25 | "192.168.2.255" 26 | ], 27 | "manualDevices": [ 28 | { 29 | "host": "192.168.1.100", 30 | "alias": "Living Room Plug" 31 | }, 32 | { 33 | "host": "192.168.2.100", 34 | "alias": "Bedroom Power Strip" 35 | } 36 | ], 37 | "excludeMacAddresses": [ 38 | "AA:BB:CC:11:22:33", 39 | "CC:BB:AA:33:22:11" 40 | ], 41 | "includeMacAddresses": [ 42 | "AA:BB:CC:44:55:66", 43 | "CC:BB:AA:66:55:44" 44 | ] 45 | } 46 | ], 47 | "accessories": [] 48 | } -------------------------------------------------------------------------------- /.github/workflows/stale.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Stale 3 | 4 | on: 5 | schedule: 6 | - cron: '30 1 * * *' 7 | 8 | permissions: 9 | issues: write 10 | pull-requests: write 11 | 12 | jobs: 13 | stale: 14 | name: Stale Issues and Pull Requests 15 | runs-on: ubuntu-latest 16 | steps: 17 | - name: Stale 18 | id: stale 19 | uses: actions/stale@v10 20 | with: 21 | repo-token: ${{ secrets.GITHUB_TOKEN }} 22 | days-before-issue-stale: 60 23 | days-before-issue-close: 7 24 | days-before-pr-stale: 30 25 | days-before-pr-close: 7 26 | stale-issue-label: stale 27 | stale-pr-label: stale 28 | exempt-issue-labels: 'pinned,security,enhancement,bug,breaking-change,needs-info' 29 | exempt-pr-labels: 'pinned,security,work-in-progress,breaking-change' 30 | exempt-all-pr-assignees: true 31 | exempt-all-issue-assignees: true 32 | operations-per-run: 50 33 | stale-issue-message: | 34 | This issue is stale after 60 days of inactivity. It will close in 7 days unless updated. 35 | close-issue-message: | 36 | Closing stale issue due to inactivity. Re-open if still valid. 37 | stale-pr-message: | 38 | This pull request is stale after 30 days of inactivity. It will close in 7 days unless updated. 39 | close-pr-message: | 40 | Closing stale pull request due to inactivity. Re-open if still valid. 41 | -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | # Pull Request 2 | 3 | 12 | 13 | ## Summary 14 | 15 | 16 | ## Type (choose at least one classification) 17 | 18 | - [ ] fix (bug fix) 19 | - [ ] bug 20 | - [ ] enhancement / feature 21 | - [ ] docs 22 | - [ ] dependency 23 | - [ ] breaking-change 24 | - [ ] internal / workflow 25 | 26 | ## Details 27 | 28 | 29 | ## Testing 30 | 31 | - [ ] Build OK (`npm run build`) 32 | - [ ] Lint clean 33 | - [ ] Node import OK 34 | - [ ] Python import OK 35 | - [ ] Device(s) tested: 36 | - [ ] No unrelated changes 37 | 38 | ## Screenshots / Logs (optional) 39 | 40 | ## Breaking Change Explanation (REQUIRED if breaking-change label) 41 | Markers exactly: 42 | 43 | BREAKING_CHANGE_EXPLANATION_START 44 | 45 | BREAKING_CHANGE_EXPLANATION_END 46 | 47 | ## Checklist 48 | - [ ] Base branch is `beta` 49 | - [ ] Classification labels applied (see “Type” above) 50 | - [ ] Changelog impact understood 51 | - [ ] Docs updated where appropriate 52 | - [ ] Linked issues (if any): #123 -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "npm" 4 | directory: "/" 5 | schedule: 6 | interval: "weekly" 7 | day: "friday" 8 | time: "15:00" 9 | timezone: "America/New_York" 10 | target-branch: "beta" 11 | labels: ["dependency"] 12 | assignees: ["ZeliardM"] 13 | versioning-strategy: "increase" 14 | commit-message: 15 | prefix: "npm" 16 | prefix-development: "npm" 17 | include: "scope" 18 | groups: 19 | typescript-eslint: 20 | patterns: ["@typescript-eslint/*", "typescript", "ts-essentials"] 21 | eslint: 22 | patterns: ["@eslint/js", "eslint"] 23 | stylistic: 24 | patterns: ["@stylistic/eslint-plugin"] 25 | md5: 26 | patterns: ["@types/md5", "md5"] 27 | 28 | - package-ecosystem: "pip" 29 | directory: "/" 30 | schedule: 31 | interval: "weekly" 32 | day: "friday" 33 | time: "15:00" 34 | timezone: "America/New_York" 35 | target-branch: "beta" 36 | labels: ["dependency"] 37 | assignees: ["ZeliardM"] 38 | versioning-strategy: "increase" 39 | commit-message: 40 | prefix: "pip" 41 | prefix-development: "pip" 42 | include: "scope" 43 | groups: 44 | pydantic: 45 | patterns: ["pydantic", "pydantic_core", "pydantic-core"] 46 | crypt: 47 | patterns: ["cryptography", "chacha20poly1305-reuseable"] 48 | aio: 49 | patterns: ["aiosignal", "aiohttp"] 50 | 51 | - package-ecosystem: "github-actions" 52 | directory: "/" 53 | schedule: 54 | interval: "weekly" 55 | day: "friday" 56 | time: "15:00" 57 | timezone: "America/New_York" 58 | target-branch: "beta" 59 | assignees: ["ZeliardM"] 60 | commit-message: 61 | prefix: "ci" 62 | include: "scope" -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | 3 | Thank you for contributing to homebridge-kasa-python! 4 | 5 | ## Branch Model 6 | - latest: Stable release branch. 7 | - beta: Active development toward next release. 8 | - Feature branches: Create from beta (not latest) and open PRs targeting beta. 9 | 10 | ## Pull Requests 11 | 1. Install Python deps first: 12 | ```bash 13 | python -m pip install --upgrade pip 14 | python -m pip install -r requirements.txt 15 | ``` 16 | 2. Install Node deps: `npm ci` 17 | 3. Lint: `npm run lint` (must pass). 18 | 4. Build: `npm run build` 19 | 5. Sanity imports: 20 | ```bash 21 | node -e "import('./dist/index.js').then(()=>console.log('Node OK'))" 22 | python -c "import kasa; print('Python OK')" 23 | ``` 24 | 6. Label appropriately: enhancement, fix, breaking-change, docs, dependency. 25 | 7. One logical change per PR. 26 | 27 | ## Release Flow (Simplified) 28 | - Merge PR into beta -> CHANGELOG updated & draft/update of current beta automatically. 29 | - Unpublished beta.0 aggregates subsequent PRs (single tag). 30 | - After publishing a beta, new changes create beta.(N+1) drafts. 31 | - Breaking change after publish escalates to new major base beta.0 when allowed. 32 | - Manual conversion (workflow_dispatch) consolidates published betas into a stable draft. 33 | - Publishing stable adds finalization entry. 34 | 35 | ## Changelog & Bodies 36 | - vX.Y.Z-beta.N 37 | ``` 38 | 39 | ## Category 40 | - entry 41 | 42 | **Full Changelog**: compare/... 43 | ``` 44 | - Stable body omits “-beta.N”. 45 | 46 | ## Labels 47 | - breaking-change 48 | - enhancement / feature 49 | - fix / bug 50 | - docs / dependency 51 | 52 | ## Security / Quality 53 | - CodeQL runs weekly and on PRs. 54 | - Dependabot weekly updates target beta. 55 | 56 | ## Questions 57 | Open a support request issue or discussion. 58 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/issue.yml: -------------------------------------------------------------------------------- 1 | name: New Issue 2 | description: File a Bug, Feature Request, Support Question, Breaking Change Proposal, Docs or Dependency Issue 3 | labels: [] 4 | body: 5 | - type: dropdown 6 | id: type 7 | attributes: 8 | label: Type 9 | options: 10 | - Bug 11 | - Feature 12 | - Support 13 | - Breaking Change 14 | - Docs 15 | - Dependency 16 | validations: 17 | required: true 18 | - type: textarea 19 | id: summary 20 | attributes: 21 | label: Summary 22 | placeholder: One-line summary... 23 | validations: 24 | required: true 25 | - type: textarea 26 | id: details 27 | attributes: 28 | label: Details 29 | description: Provide reproduction (bugs), motivation (features), or rationale (breaking). 30 | placeholder: | 31 | For Bugs: 32 | Steps: 33 | Expected: 34 | Actual: 35 | For Features: 36 | Motivation: 37 | Proposed Behavior: 38 | For Breaking Change: 39 | Rationale: 40 | Impact: 41 | validations: 42 | required: true 43 | - type: textarea 44 | id: environment 45 | attributes: 46 | label: Environment (Required for Bugs) 47 | placeholder: | 48 | Plugin Version: 49 | Homebridge Version: 50 | Node.js Version: 51 | NPM Version: 52 | Python Version: 53 | OS: 54 | validations: 55 | required: false 56 | - type: textarea 57 | id: migration 58 | attributes: 59 | label: Migration Strategy (Breaking Change Only) 60 | placeholder: | 61 | 1. Step 62 | 2. Step 63 | validations: 64 | required: false 65 | - type: textarea 66 | id: logs 67 | attributes: 68 | label: Logs (If Relevant) 69 | render: text 70 | description: Redact sensitive data 71 | validations: 72 | required: false 73 | - type: textarea 74 | id: extra 75 | attributes: 76 | label: Additional Context 77 | validations: 78 | required: false 79 | -------------------------------------------------------------------------------- /eslint.config.mjs: -------------------------------------------------------------------------------- 1 | import js from '@eslint/js'; 2 | import path from 'node:path'; 3 | import tsParser from '@typescript-eslint/parser'; 4 | import stylistic from '@stylistic/eslint-plugin'; 5 | import { fileURLToPath } from 'node:url'; 6 | import { FlatCompat } from '@eslint/eslintrc'; 7 | 8 | const __filename = fileURLToPath(import.meta.url); 9 | const __dirname = path.dirname(__filename); 10 | const compat = new FlatCompat({ 11 | baseDirectory: __dirname, 12 | recommendedConfig: js.configs.recommended, 13 | allConfig: js.configs.all, 14 | }); 15 | 16 | export default [{ 17 | ignores: ['**/dist'], 18 | }, ...compat.extends( 19 | 'eslint:recommended', 20 | 'plugin:@typescript-eslint/eslint-recommended', 21 | 'plugin:@typescript-eslint/recommended', 22 | ), { 23 | languageOptions: { 24 | parser: tsParser, 25 | ecmaVersion: 2018, 26 | sourceType: 'module', 27 | }, 28 | 29 | plugins: { 30 | '@stylistic': stylistic, 31 | }, 32 | 33 | rules: { 34 | quotes: ['warn', 'single'], 35 | indent: ['warn', 2, { 36 | SwitchCase: 1, 37 | }], 38 | semi: ['off'], 39 | 'comma-dangle': ['warn', 'always-multiline'], 40 | 'dot-notation': 'off', 41 | eqeqeq: 'warn', 42 | curly: ['warn', 'all'], 43 | 'brace-style': ['warn'], 44 | 'prefer-arrow-callback': ['warn'], 45 | 'max-len': ['warn', 140], 46 | 'no-console': ['warn'], 47 | 'no-non-null-assertion': ['off'], 48 | 'comma-spacing': ['error'], 49 | 'no-multi-spaces': ['warn', { 50 | ignoreEOLComments: true, 51 | }], 52 | 'no-trailing-spaces': ['warn'], 53 | 'no-constant-condition': 'off', 54 | 'lines-between-class-members': ['warn', 'always', { 55 | exceptAfterSingleLine: true, 56 | }], 57 | '@typescript-eslint/explicit-function-return-type': 'off', 58 | '@typescript-eslint/no-non-null-assertion': 'off', 59 | '@typescript-eslint/explicit-module-boundary-types': 'off', 60 | '@stylistic/semi': ['warn'], 61 | '@stylistic/member-delimiter-style': ['warn', { 62 | multiline: { 63 | delimiter: 'semi', 64 | requireLast: true, 65 | }, 66 | singleline: { 67 | delimiter: 'semi', 68 | requireLast: false, 69 | }, 70 | }], 71 | }, 72 | }]; -------------------------------------------------------------------------------- /src/accessoryInformation.ts: -------------------------------------------------------------------------------- 1 | import type { HAP, PlatformAccessory, Service } from 'homebridge'; 2 | 3 | import type HomeKitDevice from './devices/index.js'; 4 | 5 | export default function platformAccessoryInformation( 6 | hap: HAP, 7 | ): (platformAccessory: PlatformAccessory, homekitDevice: HomeKitDevice) => Service | undefined { 8 | const { Characteristic, Service: { AccessoryInformation } } = hap; 9 | 10 | return (platformAccessory: PlatformAccessory, homekitDevice: HomeKitDevice) => { 11 | const existingInfoService = platformAccessory.getService(AccessoryInformation); 12 | if (existingInfoService) { 13 | if (existingInfoService.getCharacteristic(Characteristic.Name).value !== homekitDevice.name) { 14 | existingInfoService.setCharacteristic(Characteristic.Name, homekitDevice.name); 15 | } else if (existingInfoService.getCharacteristic(Characteristic.Manufacturer).value !== homekitDevice.manufacturer) { 16 | existingInfoService.setCharacteristic(Characteristic.Manufacturer, homekitDevice.manufacturer); 17 | } else if (existingInfoService.getCharacteristic(Characteristic.Model).value !== homekitDevice.model) { 18 | existingInfoService.setCharacteristic(Characteristic.Model, homekitDevice.model); 19 | } else if (existingInfoService.getCharacteristic(Characteristic.SerialNumber).value !== homekitDevice.serialNumber) { 20 | existingInfoService.setCharacteristic(Characteristic.SerialNumber, homekitDevice.serialNumber); 21 | } else if (existingInfoService.getCharacteristic(Characteristic.FirmwareRevision).value !== homekitDevice.firmwareRevision) { 22 | existingInfoService.setCharacteristic(Characteristic.FirmwareRevision, homekitDevice.firmwareRevision); 23 | } 24 | return existingInfoService; 25 | } else { 26 | const infoService = platformAccessory.addService(AccessoryInformation); 27 | infoService 28 | .setCharacteristic(Characteristic.Name, homekitDevice.name) 29 | .setCharacteristic(Characteristic.Manufacturer, homekitDevice.manufacturer) 30 | .setCharacteristic(Characteristic.Model, homekitDevice.model) 31 | .setCharacteristic(Characteristic.SerialNumber, homekitDevice.serialNumber) 32 | .setCharacteristic(Characteristic.FirmwareRevision, homekitDevice.firmwareRevision); 33 | return infoService; 34 | } 35 | }; 36 | } -------------------------------------------------------------------------------- /.github/workflows/beta-to-stable.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Convert Beta to Stable Pull Request 3 | 4 | on: 5 | workflow_dispatch: {} 6 | 7 | permissions: 8 | contents: read 9 | issues: write 10 | pull-requests: write 11 | 12 | env: 13 | GITHUB_REPOSITORY: ${{ github.repository }} 14 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 15 | 16 | jobs: 17 | detect-fork: 18 | name: Detect Fork 19 | runs-on: ubuntu-latest 20 | outputs: 21 | is_fork: ${{ steps.detect-fork-context.outputs.is_fork }} 22 | steps: 23 | - name: Checkout Actions 24 | id: checkout 25 | uses: actions/checkout@v6 26 | with: 27 | fetch-depth: 0 28 | 29 | - name: Detect Fork Context 30 | id: detect-fork-context 31 | uses: ./.github/actions/detect-fork 32 | 33 | prepare: 34 | name: Validate Environment 35 | runs-on: ubuntu-latest 36 | needs: detect-fork 37 | if: needs.detect-fork.outputs.is_fork == 'false' 38 | steps: 39 | - name: Validate Environment 40 | id: validate 41 | shell: bash 42 | run: | 43 | set -euo pipefail 44 | : "${GITHUB_REPOSITORY:?GITHUB_REPOSITORY is required}" 45 | : "${GITHUB_TOKEN:?GITHUB_TOKEN is required}" 46 | 47 | create-pr: 48 | name: Create Stable Release Pull Request 49 | runs-on: ubuntu-latest 50 | needs: prepare 51 | if: needs.prepare.result == 'success' 52 | steps: 53 | - name: Checkout Actions 54 | id: checkout 55 | uses: actions/checkout@v6 56 | with: 57 | fetch-depth: 0 58 | 59 | - name: Setup Python 3.12 60 | id: setup-python 61 | uses: actions/setup-python@v6 62 | with: 63 | python-version: '3.12' 64 | 65 | - name: Create Stable Release Pull Request 66 | id: create-pr 67 | shell: bash 68 | run: | 69 | set -euo pipefail 70 | python3 .github/scripts/beta_to_stable.py 71 | 72 | fork-notice: 73 | name: Fork Notice 74 | runs-on: ubuntu-latest 75 | needs: detect-fork 76 | if: needs.detect-fork.outputs.is_fork == 'true' 77 | steps: 78 | - name: Fork Notice 79 | id: fork-notice 80 | shell: bash 81 | run: | 82 | set -euo pipefail 83 | echo "::notice:: This workflow is running in a forked repository. All privileged steps have been skipped." 84 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "displayName": "Homebridge Kasa Python", 3 | "name": "homebridge-kasa-python", 4 | "version": "2.9.1", 5 | "description": "Plugin that uses Python-Kasa API to communicate with Kasa Devices.", 6 | "license": "MIT", 7 | "type": "module", 8 | "repository": { 9 | "type": "git", 10 | "url": "git+https://github.com/ZeliardM/homebridge-kasa-python.git" 11 | }, 12 | "bugs": { 13 | "url": "https://github.com/ZeliardM/homebridge-kasa-python/issues" 14 | }, 15 | "engines": { 16 | "homebridge": "^1.11.0 || ^2.0.0", 17 | "node": "^20 || ^22 || ^24", 18 | "python": "^3.11 || ^3.12 || ^3.13" 19 | }, 20 | "publishConfig": { 21 | "access": "public" 22 | }, 23 | "main": "dist/index.js", 24 | "scripts": { 25 | "build": "npm ci && rimraf -I ./dist && tsc && node copyPythonFiles.js", 26 | "lint": "eslint src/**/*.ts --max-warnings=0", 27 | "prepublishOnly": "npm run lint && npm run build", 28 | "test": "echo \"No test specified\" && exit 0", 29 | "watch": "npm run build && npm link && nodemon" 30 | }, 31 | "keywords": [ 32 | "homebridge", 33 | "homebridge-plugin", 34 | "homebridge-kasa", 35 | "homebridge-kasa-python", 36 | "homekit", 37 | "kasa", 38 | "plugin", 39 | "python", 40 | "python-kasa", 41 | "smarthome", 42 | "tapo", 43 | "tplink", 44 | "tplink-kasa", 45 | "tplink-tapo", 46 | "tplink-smarthome" 47 | ], 48 | "files": [ 49 | "config.schema.json", 50 | "dist", 51 | "LICENSE", 52 | "requirements.txt" 53 | ], 54 | "devDependencies": { 55 | "@eslint/eslintrc": "^3.3.1", 56 | "@eslint/js": "^9.36.0", 57 | "@stylistic/eslint-plugin": "^5.4.0", 58 | "@types/node": "^24.5.2", 59 | "@typescript-eslint/parser": "^8.44.1", 60 | "eslint": "^9.36.0", 61 | "globals": "^16.4.0", 62 | "homebridge": "^2.0.0-beta.30", 63 | "node-persist": "^4.0.4", 64 | "nodemon": "^3.1.10", 65 | "rimraf": "^6.0.1", 66 | "ts-node": "^10.9.2", 67 | "typescript-eslint": "^8.44.1" 68 | }, 69 | "homepage": "https://github.com/ZeliardM/homebridge-kasa-python#readme", 70 | "funding": [ 71 | { 72 | "type": "github", 73 | "url": "https://github.com/sponsors/ZeliardM" 74 | }, 75 | { 76 | "type": "paypal", 77 | "url": "https://www.paypal.me/ZeliardM/USD" 78 | } 79 | ], 80 | "dependencies": { 81 | "axios": "^1.12.2", 82 | "eventsource": "^4.0.0", 83 | "ts-essentials": "^10.1.1", 84 | "typescript": "^5.9.2" 85 | }, 86 | "overrides": { 87 | "node-persist": "^4.0.4" 88 | } 89 | } 90 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Custom ignore rules 2 | user_storage/* 3 | !user_storage/config.example.json 4 | .venv 5 | 6 | # Ignore compiled code 7 | dist 8 | 9 | # ------------- Defaults ------------- # 10 | 11 | # Logs 12 | logs 13 | *.log 14 | npm-debug.log* 15 | yarn-debug.log* 16 | yarn-error.log* 17 | lerna-debug.log* 18 | 19 | # Diagnostic reports (https://nodejs.org/api/report.html) 20 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 21 | 22 | # Runtime data 23 | pids 24 | *.pid 25 | *.seed 26 | *.pid.lock 27 | 28 | # Directory for instrumented libs generated by jscoverage/JSCover 29 | lib-cov 30 | 31 | # Coverage directory used by tools like istanbul 32 | coverage 33 | *.lcov 34 | 35 | # nyc test coverage 36 | .nyc_output 37 | 38 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 39 | .grunt 40 | 41 | # Bower dependency directory (https://bower.io/) 42 | bower_components 43 | 44 | # node-waf configuration 45 | .lock-wscript 46 | 47 | # Compiled binary addons (https://nodejs.org/api/addons.html) 48 | build/Release 49 | 50 | # Dependency directories 51 | node_modules/ 52 | jspm_packages/ 53 | 54 | # Snowpack dependency directory (https://snowpack.dev/) 55 | web_modules/ 56 | 57 | # TypeScript cache 58 | *.tsbuildinfo 59 | 60 | # Optional npm cache directory 61 | .npm 62 | 63 | # Optional eslint cache 64 | .eslintcache 65 | 66 | # Microbundle cache 67 | .rpt2_cache/ 68 | .rts2_cache_cjs/ 69 | .rts2_cache_es/ 70 | .rts2_cache_umd/ 71 | 72 | # Optional REPL history 73 | .node_repl_history 74 | 75 | # Output of 'npm pack' 76 | *.tgz 77 | 78 | # Yarn Integrity file 79 | .yarn-integrity 80 | 81 | # dotenv environment variables file 82 | .env 83 | .env.test 84 | 85 | # parcel-bundler cache (https://parceljs.org/) 86 | .cache 87 | .parcel-cache 88 | 89 | # Next.js build output 90 | .next 91 | 92 | # Nuxt.js build / generate output 93 | .nuxt 94 | dist 95 | 96 | # Gatsby files 97 | .cache/ 98 | # Comment in the public line in if your project uses Gatsby and not Next.js 99 | # https://nextjs.org/blog/next-9-1#public-directory-support 100 | # public 101 | 102 | # vuepress build output 103 | .vuepress/dist 104 | 105 | # Serverless directories 106 | .serverless/ 107 | 108 | # FuseBox cache 109 | .fusebox/ 110 | 111 | # DynamoDB Local files 112 | .dynamodb/ 113 | 114 | # TernJS port file 115 | .tern-port 116 | 117 | # Stores VSCode versions used for testing VSCode extensions 118 | .vscode-test 119 | 120 | # yarn v2 121 | 122 | .yarn/cache 123 | .yarn/unplugged 124 | .yarn/build-state.yml 125 | .pnp.* 126 | 127 | # Webstorm 128 | .idea 129 | __pycache__/ 130 | -------------------------------------------------------------------------------- /.npmignore: -------------------------------------------------------------------------------- 1 | .venv 2 | 3 | # Ignore source code 4 | src 5 | 6 | # ------------- Defaults ------------- # 7 | 8 | # gitHub actions 9 | .github 10 | 11 | # eslint 12 | .eslintrc 13 | 14 | # typescript 15 | tsconfig.json 16 | 17 | # vscode 18 | .vscode 19 | 20 | # nodemon 21 | nodemon.json 22 | 23 | # Logs 24 | logs 25 | *.log 26 | npm-debug.log* 27 | yarn-debug.log* 28 | yarn-error.log* 29 | lerna-debug.log* 30 | 31 | # Diagnostic reports (https://nodejs.org/api/report.html) 32 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 33 | 34 | # Runtime data 35 | pids 36 | *.pid 37 | *.seed 38 | *.pid.lock 39 | 40 | # Directory for instrumented libs generated by jscoverage/JSCover 41 | lib-cov 42 | 43 | # Coverage directory used by tools like istanbul 44 | coverage 45 | *.lcov 46 | 47 | # nyc test coverage 48 | .nyc_output 49 | 50 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 51 | .grunt 52 | 53 | # Bower dependency directory (https://bower.io/) 54 | bower_components 55 | 56 | # node-waf configuration 57 | .lock-wscript 58 | 59 | # Compiled binary addons (https://nodejs.org/api/addons.html) 60 | build/Release 61 | 62 | # Dependency directories 63 | node_modules/ 64 | jspm_packages/ 65 | 66 | # Snowpack dependency directory (https://snowpack.dev/) 67 | web_modules/ 68 | 69 | # TypeScript cache 70 | *.tsbuildinfo 71 | 72 | # Optional npm cache directory 73 | .npm 74 | 75 | # Optional eslint cache 76 | .eslintcache 77 | 78 | # Microbundle cache 79 | .rpt2_cache/ 80 | .rts2_cache_cjs/ 81 | .rts2_cache_es/ 82 | .rts2_cache_umd/ 83 | 84 | # Optional REPL history 85 | .node_repl_history 86 | 87 | # Output of 'npm pack' 88 | *.tgz 89 | 90 | # Yarn Integrity file 91 | .yarn-integrity 92 | 93 | # dotenv environment variables file 94 | .env 95 | .env.test 96 | 97 | # parcel-bundler cache (https://parceljs.org/) 98 | .cache 99 | .parcel-cache 100 | 101 | # Next.js build output 102 | .next 103 | 104 | # Nuxt.js build / generate output 105 | .nuxt 106 | 107 | # Gatsby files 108 | .cache/ 109 | # Comment in the public line in if your project uses Gatsby and not Next.js 110 | # https://nextjs.org/blog/next-9-1#public-directory-support 111 | # public 112 | 113 | # vuepress build output 114 | .vuepress/dist 115 | 116 | # Serverless directories 117 | .serverless/ 118 | 119 | # FuseBox cache 120 | .fusebox/ 121 | 122 | # DynamoDB Local files 123 | .dynamodb/ 124 | 125 | # TernJS port file 126 | .tern-port 127 | 128 | # Stores VSCode versions used for testing VSCode extensions 129 | .vscode-test 130 | 131 | # yarn v2 132 | 133 | .yarn/cache 134 | .yarn/unplugged 135 | .yarn/build-state.yml 136 | .pnp.* 137 | -------------------------------------------------------------------------------- /.github/workflows/shared-discord-notify.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Discord Notification 3 | 4 | on: 5 | workflow_call: 6 | inputs: 7 | title: 8 | description: Title for the Discord Notification 9 | required: true 10 | type: string 11 | secrets: 12 | discord_webhook: 13 | description: Discord Webhook URL 14 | required: true 15 | 16 | jobs: 17 | notify: 18 | name: Send Discord Notification 19 | runs-on: ubuntu-latest 20 | steps: 21 | - name: Checkout Actions 22 | id: checkout 23 | uses: actions/checkout@v6 24 | with: 25 | fetch-depth: 0 26 | 27 | - name: Setup Python 3.12 28 | id: setup-python 29 | uses: actions/setup-python@v6 30 | with: 31 | python-version: '3.12' 32 | 33 | - name: Install Python requests 34 | id: install_requests 35 | shell: bash 36 | run: | 37 | set -euo pipefail 38 | python -m pip install --upgrade pip 39 | python -m pip install requests 40 | 41 | - name: Get Discord Payload 42 | id: webhook 43 | uses: sarisia/actions-status-discord@v1 44 | with: 45 | status: Success 46 | title: ${{ inputs.title }} 47 | description: Version `${{ github.event.release.tag_name }}` 48 | url: https://github.com/${{ github.repository }}/releases/tag/${{ github.event.release.tag_name }} 49 | username: Homebridge 50 | avatar_url: https://raw.githubusercontent.com/homebridge/branding/latest/logos/homebridge-color-round-stylized.png 51 | color: 4726621 52 | nodetail: false 53 | nocontext: false 54 | notimestamp: false 55 | nofail: false 56 | ack_no_webhook: true 57 | 58 | - name: Build Trimmed Event Field Value 59 | id: release_notes 60 | shell: bash 61 | env: 62 | MODE: trim 63 | run: | 64 | set -euo pipefail 65 | python3 .github/scripts/discord_tools.py 66 | 67 | - name: Edit Payload with Trimmed Release Notes 68 | id: edit_payload 69 | shell: bash 70 | env: 71 | MODE: edit-payload 72 | EVENT_VALUE: ${{ steps.release_notes.outputs.body }} 73 | WEBHOOK_PAYLOAD: ${{ steps.webhook.outputs.payload }} 74 | run: | 75 | set -euo pipefail 76 | python3 .github/scripts/discord_tools.py 77 | 78 | - name: Post to Discord 79 | id: post 80 | shell: bash 81 | env: 82 | MODE: post 83 | EDITED_PAYLOAD: ${{ steps.edit_payload.outputs.edited_payload }} 84 | WEBHOOK_URL: ${{ secrets.discord_webhook }} 85 | run: | 86 | set -euo pipefail 87 | python3 .github/scripts/discord_tools.py 88 | -------------------------------------------------------------------------------- /src/devices/create.ts: -------------------------------------------------------------------------------- 1 | import HomeKitDevice from './index.js'; 2 | import HomeKitDeviceLightBulb from './homekitLightBulb.js'; 3 | import HomeKitDevicePlug from './homekitPlug.js'; 4 | import HomeKitDevicePowerStrip from './homekitPowerStrip.js'; 5 | import HomeKitDeviceSwitch from './homekitSwitch.js'; 6 | import HomeKitDeviceSwitchWithChildren from './homekitSwitchWithChildren.js'; 7 | import { LightBulbs, Plugs, PowerStrips, Switches } from './kasaDevices.js'; 8 | import type KasaPythonPlatform from '../platform.js'; 9 | import type { KasaDevice, LightBulb, Plug, PowerStrip, Switch } from './kasaDevices.js'; 10 | 11 | function isLightBulb(device: KasaDevice): device is LightBulb { 12 | return LightBulbs.includes(device.sys_info.model); 13 | } 14 | 15 | function isPlug(device: KasaDevice): device is Plug { 16 | return Plugs.includes(device.sys_info.model); 17 | } 18 | 19 | function isPowerStrip(device: KasaDevice): device is PowerStrip { 20 | return PowerStrips.includes(device.sys_info.model); 21 | } 22 | 23 | function isSwitch(device: KasaDevice): device is Switch { 24 | return Switches.includes(device.sys_info.model); 25 | } 26 | 27 | export default async function create( 28 | platform: KasaPythonPlatform, 29 | kasaDevice: KasaDevice, 30 | ): Promise { 31 | let homeKitDevice: HomeKitDevice | undefined; 32 | 33 | if (isLightBulb(kasaDevice)) { 34 | const lightBulb = kasaDevice as LightBulb; 35 | platform.log.debug('HomeKit device is a LightBulb:', lightBulb.sys_info.model); 36 | homeKitDevice = new HomeKitDeviceLightBulb(platform, lightBulb); 37 | } else if (isPlug(kasaDevice)) { 38 | const plug = kasaDevice as Plug; 39 | platform.log.debug('HomeKit device is a Plug:', plug.sys_info.model); 40 | homeKitDevice = new HomeKitDevicePlug(platform, plug); 41 | } else if (isPowerStrip(kasaDevice)) { 42 | const powerStrip = kasaDevice as PowerStrip; 43 | platform.log.debug('HomeKit device is a PowerStrip:', powerStrip.sys_info.model); 44 | homeKitDevice = new HomeKitDevicePowerStrip(platform, powerStrip); 45 | } else if (isSwitch(kasaDevice)) { 46 | const switchDevice = kasaDevice as Switch; 47 | platform.log.debug('HomeKit device is a Switch:', switchDevice.sys_info.model); 48 | if (switchDevice.sys_info.child_num > 0) { 49 | homeKitDevice = new HomeKitDeviceSwitchWithChildren(platform, switchDevice); 50 | } else { 51 | homeKitDevice = new HomeKitDeviceSwitch(platform, switchDevice); 52 | } 53 | } else { 54 | platform.log.error('Unknown device type:', kasaDevice); 55 | return undefined; 56 | } 57 | if (homeKitDevice) { 58 | try { 59 | await homeKitDevice.initialize(); 60 | } catch (error) { 61 | platform.log.error(`Error initializing device [${kasaDevice.sys_info.device_id}]:`, error); 62 | return undefined; 63 | } 64 | } 65 | 66 | return homeKitDevice; 67 | } -------------------------------------------------------------------------------- /.github/workflows/shared-build-lint-test.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Build, Lint, and Test 3 | 4 | on: 5 | workflow_call: {} 6 | 7 | concurrency: 8 | group: build-lint-test-${{ github.ref }} 9 | cancel-in-progress: true 10 | 11 | jobs: 12 | build_lint_test: 13 | name: Build, Lint, and Test 14 | runs-on: ubuntu-latest 15 | strategy: 16 | fail-fast: false 17 | matrix: 18 | node-version: ${{ fromJson('["20","22","24"]') }} 19 | python-version: ${{ fromJson('["3.11","3.12","3.13"]') }} 20 | steps: 21 | - name: Checkout Actions 22 | id: checkout 23 | uses: actions/checkout@v6 24 | with: 25 | fetch-depth: 0 26 | 27 | - name: Setup Node ${{ matrix.node-version }} 28 | id: setup-node 29 | uses: actions/setup-node@v6 30 | with: 31 | node-version: ${{ matrix.node-version }} 32 | cache: npm 33 | 34 | - name: Setup Python ${{ matrix.python-version }} 35 | id: setup-python 36 | uses: actions/setup-python@v6 37 | with: 38 | python-version: ${{ matrix.python-version }} 39 | cache: pip 40 | 41 | - name: Update npm 42 | id: update-npm 43 | shell: bash 44 | run: | 45 | set -euo pipefail 46 | npm install -g npm@latest 47 | 48 | - name: Install Python Dependencies 49 | id: install-python-deps 50 | shell: bash 51 | run: | 52 | set -euo pipefail 53 | python -m pip install --upgrade pip 54 | if [ -f requirements.txt ]; then pip install -r requirements.txt; fi 55 | 56 | - name: Install Node Dependencies 57 | id: install-node-deps 58 | shell: bash 59 | run: | 60 | set -euo pipefail 61 | npm ci 62 | 63 | - name: Lint 64 | id: lint 65 | shell: bash 66 | run: | 67 | set -euo pipefail 68 | npm run lint 69 | 70 | - name: Build 71 | id: build 72 | shell: bash 73 | run: | 74 | set -euo pipefail 75 | npm run build 76 | 77 | - name: Node Import Test 78 | id: node-import-test 79 | shell: bash 80 | run: | 81 | set -euo pipefail 82 | node <<'NODE' 83 | (async () => { 84 | try { 85 | await import('./dist/index.js'); 86 | console.log('Node Import OK'); 87 | } catch (e) { 88 | console.error(e); 89 | process.exit(1); 90 | } 91 | })(); 92 | NODE 93 | 94 | - name: Python Import Test 95 | id: python-import-test 96 | shell: bash 97 | run: | 98 | set -euo pipefail 99 | if [ -f requirements.txt ]; then 100 | set -e 101 | while IFS= read -r pkg || [ -n "$pkg" ]; do 102 | line="$(printf '%s' "$pkg" | sed 's/[[:space:]]//g')" 103 | [ -z "$line" ] && continue 104 | case "$line" in \#*) continue ;; esac 105 | mod="$(printf '%s' "$line" | cut -d'=' -f1 | sed 's/\[.*\]//' | tr '-' '_' )" 106 | case "$mod" in 107 | python_kasa) mod="kasa" ;; 108 | esac 109 | echo "Import check: $mod" 110 | python -c "import ${mod}" 111 | done < requirements.txt 112 | echo "Python Imports OK" 113 | fi 114 | 115 | - name: Outdated / Audit 116 | id: outdated-audit 117 | shell: bash 118 | run: | 119 | set -euo pipefail 120 | npm list --outdated || true 121 | npm audit || true 122 | -------------------------------------------------------------------------------- /.github/scripts/release_publish.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """Single-script publish handler. 3 | 4 | - Resolving the release tag from the GitHub event 5 | - Checking out the tag 6 | - Ensuring package.json's version matches the tag 7 | - Publishing to npm with the correct dist-tag 8 | - Writing NPM_VERSION to $GITHUB_OUTPUT on success 9 | 10 | On failure, rollback is delegated to `release_manager.py` in MODE=rollback. 11 | """ 12 | import os 13 | import sys 14 | 15 | SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__)) 16 | if SCRIPT_DIR not in sys.path: 17 | sys.path.insert(0, SCRIPT_DIR) 18 | import common 19 | 20 | from common import Context 21 | 22 | def _resolve_tag_and_expected_version(evt: dict) -> tuple[str, str]: 23 | release_info = evt.get("release") or {} 24 | tag = release_info.get("tag_name") 25 | if not tag: 26 | raise RuntimeError("No tag found from event context") 27 | expected = tag[1:] if tag.startswith("v") else tag 28 | return tag, expected 29 | 30 | def _delegate_rollback(context: Context) -> None: 31 | print("::warning::npm publish failed; delegating rollback to release_manager.py...") 32 | env = os.environ.copy() 33 | env["GITHUB_REPOSITORY"] = context.github_repository 34 | env["GITHUB_TOKEN"] = context.github_token 35 | env["MODE"] = "rollback" 36 | env["TAG"] = context.tag 37 | if context.target_branch: 38 | env["TARGET_BRANCH"] = context.target_branch 39 | try: 40 | common.run( 41 | ["python3", ".github/scripts/release_manager.py"], 42 | check=True, 43 | env=env, 44 | ) 45 | except Exception as e: 46 | print(f"::warning::Rollback via release_manager.py encountered an error: {e}") 47 | 48 | def main() -> None: 49 | github_repository = os.environ.get("GITHUB_REPOSITORY") 50 | github_token = os.environ.get("GITHUB_TOKEN") 51 | context = Context(github_repository=github_repository, github_token=github_token) 52 | evt = common.read_event() 53 | tag, expected_version = _resolve_tag_and_expected_version(evt) 54 | release_info = evt.get("release") 55 | context.tag = tag 56 | context.target_branch = release_info.get("target_commitish") 57 | npm_tag = os.environ.get("NPM_TAG") 58 | try: 59 | print(f"[publish] Resolved tag: {tag}") 60 | print(f"[publish] Expected version from tag: {expected_version}") 61 | if not common.npm_available(): 62 | raise RuntimeError("npm is not available on PATH") 63 | common.git_checkout_tag(tag) 64 | current_ver = common.npm_read_version() 65 | print(f"[publish] package.json version: {current_ver}") 66 | print(f"[publish] expected version: {expected_version}") 67 | if current_ver != expected_version: 68 | print("[publish] package.json version mismatch; updating via npm version --no-git-tag-version") 69 | common.npm_set_version_no_git_tag(expected_version) 70 | current_ver = common.npm_read_version() 71 | print(f"[publish] Updated package.json version -> {current_ver}") 72 | if current_ver != expected_version: 73 | raise RuntimeError( 74 | f"package.json version ({current_ver}) does not match expected " 75 | f"version from tag ({expected_version})" 76 | ) 77 | publish_cmd = "npm publish --provenance --access public" 78 | if npm_tag: 79 | publish_cmd = f"npm publish --tag {npm_tag} --provenance --access public" 80 | common.run(publish_cmd) 81 | version_out = common.npm_read_version() 82 | gh_out = os.environ.get("GITHUB_OUTPUT") 83 | if gh_out: 84 | with open(gh_out, "a", encoding="utf-8") as f: 85 | f.write(f"NPM_VERSION={version_out}\n") 86 | print(f"::notice::Publish succeeded: {version_out}") 87 | except Exception as e: 88 | print(f"::error::Publish failed: {e}") 89 | _delegate_rollback(context) 90 | sys.exit(1) 91 | 92 | if __name__ == "__main__": 93 | main() -------------------------------------------------------------------------------- /src/devices/kasaDevices.ts: -------------------------------------------------------------------------------- 1 | export type KasaDevice = LightBulb | Plug | PowerStrip | Switch; 2 | 3 | export interface SysInfo { 4 | alias: string; 5 | brightness?: number; 6 | children?: ChildDevice[]; 7 | child_num: number; 8 | color_temp?: number; 9 | device_id: string; 10 | device_type: string; 11 | energy?: Energy; 12 | fan_speed_level?: number; 13 | host: string; 14 | hw_ver: string; 15 | hsv?: HSV; 16 | mac: string; 17 | model: string; 18 | state?: boolean; 19 | sw_ver: string; 20 | [key: string]: string | number | boolean | ChildDevice[] | Energy | HSV | undefined; 21 | } 22 | 23 | export interface FeatureInfo { 24 | brightness?: boolean; 25 | color_temp?: boolean; 26 | energy?: boolean; 27 | fan?: boolean; 28 | hsv?: boolean; 29 | } 30 | 31 | export interface ChildDevice { 32 | alias: string; 33 | brightness?: number; 34 | color_temp?: number; 35 | energy?: Energy; 36 | fan_speed_level?: number; 37 | hsv?: HSV; 38 | id: string; 39 | state: boolean; 40 | [key: string]: string | number | boolean | Energy | HSV | undefined; 41 | } 42 | 43 | export interface Energy { 44 | current: number; 45 | voltage: number; 46 | power: number; 47 | total: number; 48 | today: number; 49 | month: number; 50 | } 51 | 52 | export interface HSV { 53 | hue: number; 54 | saturation: number; 55 | } 56 | 57 | export interface DeviceConfig { 58 | host: string; 59 | timeout: number; 60 | credentials?: { 61 | username: string; 62 | password: string; 63 | }; 64 | connection_type: { 65 | device_family: string; 66 | encryption_type: string; 67 | https: boolean; 68 | }; 69 | uses_http: boolean; 70 | } 71 | 72 | export interface ConfigDevice { 73 | host: string; 74 | alias: string; 75 | } 76 | 77 | export interface LightBulb { 78 | sys_info: SysInfo; 79 | feature_info: FeatureInfo; 80 | last_seen: Date; 81 | offline: boolean; 82 | } 83 | 84 | export interface Plug { 85 | sys_info: SysInfo; 86 | feature_info: FeatureInfo; 87 | last_seen: Date; 88 | offline: boolean; 89 | } 90 | 91 | export interface PowerStrip { 92 | sys_info: SysInfo; 93 | feature_info: FeatureInfo; 94 | last_seen: Date; 95 | offline: boolean; 96 | } 97 | 98 | export interface Switch { 99 | sys_info: SysInfo; 100 | feature_info: FeatureInfo; 101 | last_seen: Date; 102 | offline: boolean; 103 | } 104 | 105 | export const Plugs = [ 106 | 'EP10', 107 | 'EP25', 108 | 'HS100', 109 | 'HS103', 110 | 'HS105', 111 | 'HS110', 112 | 'KP100', 113 | 'KP105', 114 | 'KP115', 115 | 'KP125', 116 | 'KP125M', 117 | 'KP401', 118 | 'P100', 119 | 'P110', 120 | 'P110M', 121 | 'P115', 122 | 'P125M', 123 | 'P135', 124 | 'TP15', 125 | ]; 126 | 127 | export const PowerStrips = [ 128 | 'EP40', 129 | 'EP40M', 130 | 'HS107', 131 | 'HS300', 132 | 'KP200', 133 | 'KP303', 134 | 'KP400', 135 | 'P210M', 136 | 'P300', 137 | 'P304M', 138 | 'P306', 139 | 'P400M', 140 | 'TP25', 141 | ]; 142 | 143 | export const Switches = [ 144 | 'ES20M', 145 | 'HS200', 146 | 'HS210', 147 | 'HS220', 148 | 'KP405', 149 | 'KS200', 150 | 'KS200M', 151 | 'KS205', 152 | 'KS220', 153 | 'KS220M', 154 | 'KS225', 155 | 'KS230', 156 | 'KS240', 157 | 'S500', 158 | 'S500D', 159 | 'S505', 160 | 'S505D', 161 | 'TS15', 162 | ]; 163 | 164 | export const LightBulbs = [ 165 | 'KL110', 166 | 'KL120', 167 | 'KL125', 168 | 'KL130', 169 | 'KL135', 170 | 'KL50', 171 | 'KL60', 172 | 'LB100', 173 | 'LB110', 174 | 'L510', 175 | 'L510 Series', 176 | 'L530', 177 | 'L535', 178 | 'L630', 179 | 'KL400L5', 180 | 'KL400L10', 181 | 'KL420L5', 182 | 'KL430', 183 | 'L900', 184 | 'L900', 185 | 'L920', 186 | 'L930', 187 | ]; 188 | 189 | export const Unsupported = [ 190 | 'C100', 191 | 'C110', 192 | 'C210', 193 | 'C225', 194 | 'C325WB', 195 | 'C520WS', 196 | 'TC65', 197 | 'TC70', 198 | 'KH100', 199 | 'H100', 200 | 'H200', 201 | 'KE100', 202 | 'S200B', 203 | 'S200D', 204 | 'T100', 205 | 'T110', 206 | 'T300', 207 | 'T310', 208 | 'T315', 209 | ]; -------------------------------------------------------------------------------- /.github/scripts/beta_to_stable.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """ 3 | Promote the latest published beta prerelease into a stable PR. 4 | - Auto-detects latest beta tag vX.Y.Z-beta.N 5 | - Creates/updates PR from beta -> latest 6 | - Adds labels: stable-conversion 7 | """ 8 | import os 9 | import re 10 | import sys 11 | 12 | SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__)) 13 | if SCRIPT_DIR not in sys.path: 14 | sys.path.insert(0, SCRIPT_DIR) 15 | import common 16 | 17 | from common import Context 18 | 19 | def _latest_beta(repo: str, token: str): 20 | for page in range(1, 51): 21 | code, releases = common.github_api(repo, token, f"/releases?per_page=100&page={page}") 22 | if code != 200 or not releases: 23 | break 24 | for r in releases: 25 | tag = r.get("tag_name", "").strip() 26 | if r.get("draft") or not r.get("prerelease"): 27 | continue 28 | if "-beta." not in tag or r.get("target_commitish") != "beta": 29 | continue 30 | m = re.match(r"^(v\d+\.\d+\.\d+)-beta\.(\d+)$", tag) 31 | if not m: 32 | continue 33 | return tag, m.group(1) 34 | if len(releases) < 100: 35 | break 36 | print("::error::No published beta prerelease found.") 37 | sys.exit(1) 38 | 39 | def _ensure_stable_not_published(repo: str, token: str, stable_tag: str): 40 | code, rel = common.github_api(repo, token, f"/releases/tags/{stable_tag}") 41 | if code == 200 and not rel.get("draft") and not rel.get("prerelease"): 42 | print(f"::warning::Stable release {stable_tag} exists. Continuing...") 43 | 44 | def main(): 45 | github_repository = os.environ.get("GITHUB_REPOSITORY") 46 | github_token = os.environ.get("GITHUB_TOKEN") 47 | context = Context(github_repository=github_repository, github_token=github_token) 48 | beta_tag, stable_tag = _latest_beta(context.github_repository, context.github_token) 49 | _ensure_stable_not_published(context.github_repository, context.github_token, stable_tag) 50 | code, pulls = common.github_api(context.github_repository, context.github_token, "/pulls?state=open&base=latest") 51 | if code != 200 or not isinstance(pulls, list): 52 | print("::error::Unable to list PRs") 53 | sys.exit(1) 54 | existing = next( 55 | ( 56 | pr 57 | for pr in pulls 58 | if pr.get("base", {}).get("ref") == "latest" 59 | and pr.get("head", {}).get("ref") == "beta" 60 | ), 61 | None, 62 | ) 63 | title = f"Release: {stable_tag}" 64 | body = ( 65 | "This PR promotes the current beta branch into the latest branch for a stable release.\n\n" 66 | f"Detected from last published beta prerelease: {beta_tag}\n" 67 | "\n" 68 | ) 69 | pr = existing 70 | if existing: 71 | number = existing.get("number") 72 | code, _ = common.github_api( 73 | context.github_repository, 74 | context.github_token, 75 | f"/pulls/{number}", 76 | method="PATCH", 77 | data={"title": title, "body": body}, 78 | ) 79 | if code not in (200, 201): 80 | print("::error::Failed to update PR") 81 | sys.exit(1) 82 | else: 83 | code, pr = common.github_api( 84 | context.github_repository, 85 | context.github_token, 86 | "/pulls", 87 | method="POST", 88 | data={"title": title, "head": "beta", "base": "latest", "body": body, "draft": False}, 89 | ) 90 | if code not in (200, 201) or not isinstance(pr, dict): 91 | print(f"::error::Failed to create PR: {pr}") 92 | sys.exit(1) 93 | number = pr.get("number") 94 | labels = {"labels": ["stable-conversion"]} 95 | code, _ = common.github_api(context.github_repository, context.github_token, f"/issues/{number}/labels", method="POST", data=labels) 96 | if code not in (200, 201): 97 | common.github_api(context.github_repository, context.github_token, f"/issues/{number}/labels", method="PUT", data=labels) 98 | pr_url = pr.get("html_url") if pr else f"#{number}" 99 | print(f"::notice::PR ready: {pr_url} (stable: {stable_tag}, from beta: {beta_tag})") 100 | 101 | if __name__ == "__main__": 102 | main() -------------------------------------------------------------------------------- /.github/scripts/pr_manager.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """ 3 | Single-script PR handler. 4 | 5 | Responsible ONLY for semantic validation of the PR. All environment checks, 6 | fork handling, retargeting, and labeling are done in the workflow. 7 | 8 | Rules: 9 | - Skip validation for draft PRs. 10 | - Base branch must be 'beta', except for stable-conversion PRs: 11 | * base == 'latest' 12 | * head == 'beta' 13 | * AND label 'stable-conversion' 14 | - Needs at least one classification label: 15 | bug, fix, enhancement, feature, breaking-change, docs, dependency, internal, workflow 16 | - If breaking-change label: 17 | * Require markers: 18 | BREAKING_CHANGE_EXPLANATION_START 19 | ...explanation... 20 | BREAKING_CHANGE_EXPLANATION_END 21 | * Explanation must be at least 60 characters. 22 | 23 | Returns messages used by the workflow to build a sticky comment 24 | """ 25 | import json 26 | import os 27 | import sys 28 | 29 | SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__)) 30 | if SCRIPT_DIR not in sys.path: 31 | sys.path.insert(0, SCRIPT_DIR) 32 | import common 33 | 34 | from common import Context 35 | 36 | CLASSIFICATION = { 37 | "bug", 38 | "fix", 39 | "enhancement", 40 | "feature", 41 | "breaking-change", 42 | "docs", 43 | "dependency", 44 | "internal", 45 | "workflow", 46 | } 47 | START = "BREAKING_CHANGE_EXPLANATION_START" 48 | END = "BREAKING_CHANGE_EXPLANATION_END" 49 | 50 | def _handle_validate(context: Context) -> dict: 51 | code, pr = common.github_api(context.github_repository, context.github_token, f"/pulls/{context.pull_request_number}") 52 | if code != 200 or not isinstance(pr, dict): 53 | return { 54 | "ok": False, 55 | "messages": [f"Unable to fetch PR #{context.pull_request_number} (status {code})."], 56 | } 57 | if pr.get("draft") is True: 58 | return {"ok": False, "messages": [f"PR #{context.pull_request_number} is a draft; skipping validation."]} 59 | base = (pr.get("base") or {}).get("ref", "") or "" 60 | head_ref = (pr.get("head") or {}).get("ref", "") or "" 61 | body = pr.get("body") or "" 62 | labels: list[str] = [] 63 | label_code, data = common.github_api(context.github_repository, context.github_token, f"/issues/{context.pull_request_number}/labels") 64 | if label_code == 200 and isinstance(data, list): 65 | labels = [((l.get("name") or "").lower()) for l in data] 66 | if not labels: 67 | labels = [((l.get("name") or "").lower()) for l in pr.get("labels", [])] 68 | ok = True 69 | messages: list[str] = [] 70 | is_stable_conversion = ( 71 | base == "latest" 72 | and head_ref == "beta" 73 | and "stable-conversion" in labels 74 | ) 75 | if not (base == "beta" or is_stable_conversion): 76 | ok = False 77 | messages.append( 78 | f'Invalid base branch "{base}". ' 79 | 'Pull requests must target "beta", except for stable-conversion PRs ' 80 | '(beta -> latest with the "stable-conversion" label).' 81 | ) 82 | if not any(l in CLASSIFICATION for l in labels): 83 | needed = ", ".join(sorted(CLASSIFICATION)) 84 | current = ", ".join(sorted(set(labels))) if labels else "" 85 | ok = False 86 | messages.append( 87 | "Missing classification label. " 88 | f"Required: one of [{needed}]. Current labels: {current}." 89 | ) 90 | if "breaking-change" in labels: 91 | s = body.find(START) 92 | e = body.find(END) 93 | if s == -1 or e == -1 or e <= s: 94 | ok = False 95 | messages.append( 96 | "The `breaking-change` label requires explanation markers:\n" 97 | f"{START}\n" 98 | "...detailed explanation and migration steps...\n" 99 | f"{END}" 100 | ) 101 | else: 102 | expl = (body[s + len(START) : e]).strip() 103 | if len(expl) < 60: 104 | ok = False 105 | messages.append( 106 | f"Breaking change explanation too short ({len(expl)} characters). " 107 | f"Provide rationale and migration steps (minimum 60 characters) " 108 | f"between {START} and {END}." 109 | ) 110 | return {"ok": ok, "messages": messages} 111 | 112 | def main(): 113 | github_repository = os.getenv("GITHUB_REPOSITORY") 114 | github_token = os.getenv("GITHUB_TOKEN") 115 | pull_request_number = os.getenv("PULL_REQUEST_NUMBER") 116 | context = Context(github_token=github_token, github_repository=github_repository, pull_request_number=pull_request_number) 117 | result = _handle_validate(context) 118 | print(json.dumps(result)) 119 | sys.exit(0) 120 | 121 | if __name__ == "__main__": 122 | main() -------------------------------------------------------------------------------- /.github/workflows/release-flow.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Release Flow 3 | 4 | on: 5 | pull_request: 6 | types: [closed] 7 | branches: [beta, latest] 8 | push: 9 | branches: [beta] 10 | 11 | permissions: 12 | contents: write 13 | pull-requests: read 14 | 15 | env: 16 | GITHUB_REPOSITORY: ${{ github.repository }} 17 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 18 | 19 | concurrency: 20 | group: release-flow-${{ github.ref }} 21 | cancel-in-progress: true 22 | 23 | jobs: 24 | detect-fork: 25 | name: Detect Fork 26 | runs-on: ubuntu-latest 27 | outputs: 28 | is_fork: ${{ steps.detect-fork-context.outputs.is_fork }} 29 | steps: 30 | - name: Checkout Actions 31 | id: checkout 32 | uses: actions/checkout@v6 33 | with: 34 | fetch-depth: 0 35 | 36 | - name: Detect Fork Context 37 | id: detect-fork-context 38 | uses: ./.github/actions/detect-fork 39 | 40 | prepare: 41 | name: Validate Environment 42 | runs-on: ubuntu-latest 43 | needs: detect-fork 44 | if: needs.detect-fork.outputs.is_fork == 'false' 45 | steps: 46 | - name: Validate Environment 47 | id: validate 48 | shell: bash 49 | run: | 50 | set -euo pipefail 51 | : "${GITHUB_REPOSITORY:?GITHUB_REPOSITORY is required}" 52 | : "${GITHUB_TOKEN:?GITHUB_TOKEN is required}" 53 | 54 | pr-merge: 55 | name: Pull Request Merge 56 | runs-on: ubuntu-latest 57 | needs: prepare 58 | if: > 59 | needs.prepare.result == 'success' && 60 | github.event_name == 'pull_request' && 61 | github.event.pull_request.merged == true && 62 | ( 63 | github.event.pull_request.base.ref == 'beta' || 64 | ( 65 | github.event.pull_request.base.ref == 'latest' && 66 | contains(github.event.pull_request.labels.*.name, 'stable-conversion') 67 | ) 68 | ) 69 | env: 70 | PULL_REQUEST_AUTHOR: ${{ github.event.pull_request.user.login }} 71 | PULL_REQUEST_BRANCH: ${{ github.event.pull_request.base.ref }} 72 | PULL_REQUEST_NUMBER: ${{ github.event.pull_request.number }} 73 | PULL_REQUEST_TITLE: ${{ github.event.pull_request.title }} 74 | steps: 75 | - name: Checkout Actions 76 | id: checkout 77 | uses: actions/checkout@v6 78 | with: 79 | fetch-depth: 0 80 | 81 | - name: Setup Node 24 82 | id: setup-node 83 | uses: actions/setup-node@v6 84 | with: 85 | node-version: 24 86 | 87 | - name: Update npm 88 | id: update-npm 89 | shell: bash 90 | run: | 91 | set -euo pipefail 92 | npm install -g npm@latest 93 | 94 | - name: Setup Python 3.12 95 | id: setup-python 96 | uses: actions/setup-python@v6 97 | with: 98 | python-version: '3.12' 99 | 100 | - name: Ensure jq 101 | id: ensure-jq 102 | uses: ./.github/actions/ensure-jq 103 | 104 | - name: Serialize Labels 105 | id: labels 106 | uses: ./.github/actions/serialize-labels 107 | with: 108 | labels_json: ${{ toJson(github.event.pull_request.labels.*.name) }} 109 | 110 | - name: Update CHANGELOG.md and Create/Update Draft Release 111 | id: update-release 112 | if: steps.labels.outcome == 'success' 113 | shell: bash 114 | env: 115 | MODE: pr-merge 116 | PULL_REQUEST_LABELS: ${{ steps.labels.outputs.path }} 117 | run: | 118 | set -euo pipefail 119 | python .github/scripts/release_manager.py 120 | 121 | manual-commit-push: 122 | name: Manual Commit Push 123 | runs-on: ubuntu-latest 124 | needs: prepare 125 | if: > 126 | needs.prepare.result == 'success' && 127 | github.event_name == 'push' && 128 | github.ref == 'refs/heads/beta' && 129 | github.actor != 'github-actions[bot]' 130 | env: 131 | HEAD_AFTER: ${{ github.event.after }} 132 | HEAD_BEFORE: ${{ github.event.before }} 133 | steps: 134 | - name: Checkout Actions 135 | id: checkout 136 | uses: actions/checkout@v6 137 | with: 138 | fetch-depth: 0 139 | 140 | - name: Setup Node 24 141 | id: setup-node 142 | uses: actions/setup-node@v6 143 | with: 144 | node-version: 24 145 | 146 | - name: Update npm 147 | id: update-npm 148 | shell: bash 149 | run: | 150 | set -euo pipefail 151 | npm install -g npm@latest 152 | 153 | - name: Setup Python 3.12 154 | id: setup-python 155 | uses: actions/setup-python@v6 156 | with: 157 | python-version: '3.12' 158 | 159 | - name: Update CHANGELOG.md and Create/Update Draft Release 160 | id: update-release 161 | shell: bash 162 | env: 163 | MODE: commit-push 164 | run: | 165 | set -euo pipefail 166 | python .github/scripts/release_manager.py 167 | 168 | fork-notice: 169 | name: Fork Notice 170 | runs-on: ubuntu-latest 171 | needs: detect-fork 172 | if: needs.detect-fork.outputs.is_fork == 'true' 173 | steps: 174 | - name: Fork Notice 175 | id: fork-notice 176 | shell: bash 177 | run: | 178 | set -euo pipefail 179 | echo "::notice:: This workflow is running in a forked repository. All privileged steps have been skipped." 180 | -------------------------------------------------------------------------------- /.github/workflows/release-publish.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Release Publish 3 | 4 | on: 5 | release: 6 | types: [published] 7 | 8 | permissions: 9 | contents: write 10 | id-token: write 11 | security-events: write 12 | 13 | env: 14 | GITHUB_REPOSITORY: ${{ github.repository }} 15 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 16 | 17 | jobs: 18 | detect-fork: 19 | name: Detect Fork 20 | runs-on: ubuntu-latest 21 | outputs: 22 | is_fork: ${{ steps.detect-fork-context.outputs.is_fork }} 23 | steps: 24 | - name: Checkout Actions 25 | id: checkout 26 | uses: actions/checkout@v6 27 | with: 28 | fetch-depth: 0 29 | 30 | - name: Detect Fork Context 31 | id: detect-fork-context 32 | uses: ./.github/actions/detect-fork 33 | 34 | prepare: 35 | name: Validate Environment 36 | runs-on: ubuntu-latest 37 | needs: detect-fork 38 | if: needs.detect-fork.outputs.is_fork == 'false' 39 | steps: 40 | - name: Validate Environment 41 | id: validate 42 | shell: bash 43 | env: 44 | DISCORD_WEBHOOK_URL: ${{ secrets.DISCORD_WEBHOOK_URL }} 45 | run: | 46 | set -euo pipefail 47 | : "${GITHUB_TOKEN:?GITHUB_TOKEN is required}" 48 | : "${GITHUB_REPOSITORY:?GITHUB_REPOSITORY is required}" 49 | : "${DISCORD_WEBHOOK_URL:?DISCORD_WEBHOOK_URL is required}" 50 | 51 | finalize_release: 52 | name: Finalize CHANGELOG.md and Release 53 | runs-on: ubuntu-latest 54 | needs: prepare 55 | if: needs.prepare.result == 'success' 56 | steps: 57 | - name: Checkout Actions 58 | id: checkout 59 | uses: actions/checkout@v6 60 | with: 61 | fetch-depth: 0 62 | ref: ${{ github.event.release.target_commitish }} 63 | 64 | - name: Setup Node 24 65 | id: setup-node 66 | uses: actions/setup-node@v6 67 | with: 68 | node-version: 24 69 | 70 | - name: Update npm 71 | id: update-npm 72 | shell: bash 73 | run: | 74 | set -euo pipefail 75 | npm install -g npm@latest 76 | 77 | - name: Setup Python 3.12 78 | id: setup-python 79 | uses: actions/setup-python@v6 80 | with: 81 | python-version: '3.12' 82 | 83 | - name: Finalize CHANGELOG.md and Release 84 | id: finalize-release 85 | shell: bash 86 | env: 87 | IS_BETA: ${{ github.event.release.prerelease == true }} 88 | MODE: finalize 89 | TAG: ${{ github.event.release.tag_name }} 90 | run: | 91 | set -euo pipefail 92 | python .github/scripts/release_manager.py 93 | 94 | build_lint_test: 95 | name: Build, Lint, and Test 96 | needs: finalize_release 97 | if: needs.finalize_release.result == 'success' 98 | uses: ./.github/workflows/shared-build-lint-test.yml 99 | 100 | codeql: 101 | name: CodeQL Analysis 102 | needs: build_lint_test 103 | if: needs.build_lint_test.result == 'success' 104 | uses: ./.github/workflows/shared-codeql.yml 105 | 106 | publish: 107 | name: Publish to NPM 108 | runs-on: ubuntu-latest 109 | needs: codeql 110 | if: needs.codeql.result == 'success' 111 | steps: 112 | - name: Checkout Actions 113 | id: checkout 114 | uses: actions/checkout@v6 115 | with: 116 | fetch-depth: 0 117 | ref: ${{ github.event.release.tag_name || github.ref_name }} 118 | 119 | - name: Setup Node 24 120 | id: setup-node 121 | uses: actions/setup-node@v6 122 | with: 123 | node-version: 24 124 | registry-url: https://registry.npmjs.org 125 | cache: npm 126 | 127 | - name: Update npm 128 | id: update-npm 129 | shell: bash 130 | run: | 131 | set -euo pipefail 132 | npm install -g npm@latest 133 | 134 | - name: Install Node Dependencies 135 | id: install-node-deps 136 | shell: bash 137 | run: | 138 | set -euo pipefail 139 | npm ci 140 | 141 | - name: Publish Package and Handle Rollback 142 | id: publish 143 | shell: bash 144 | env: 145 | NPM_TAG: ${{ github.event.release.prerelease && 'beta' || '' }} 146 | run: | 147 | set -euo pipefail 148 | python3 .github/scripts/release_publish.py 149 | 150 | - name: Notice 151 | id: notice 152 | shell: bash 153 | run: | 154 | set -euo pipefail 155 | echo "::notice::Published on npm - ${{ steps.publish.outputs.NPM_VERSION }}" 156 | 157 | notify_discord: 158 | name: Discord Webhooks 159 | needs: publish 160 | if: needs.publish.result == 'success' 161 | uses: ./.github/workflows/shared-discord-notify.yml 162 | with: 163 | title: ${{ github.event.release.prerelease && 'Kasa Python Beta Release' || 'Kasa Python Release' }} 164 | secrets: 165 | discord_webhook: ${{ secrets.DISCORD_WEBHOOK_URL }} 166 | 167 | fork-notice: 168 | name: Fork Notice 169 | runs-on: ubuntu-latest 170 | needs: detect-fork 171 | if: needs.detect-fork.outputs.is_fork == 'true' 172 | steps: 173 | - name: Fork Notice 174 | id: fork-notice 175 | shell: bash 176 | run: | 177 | set -euo pipefail 178 | echo "::notice:: This workflow is running in a forked repository. All privileged steps have been skipped." 179 | -------------------------------------------------------------------------------- /src/devices/index.ts: -------------------------------------------------------------------------------- 1 | import { PlatformAccessoryEvent } from 'homebridge'; 2 | import { 3 | Categories, 4 | Characteristic, 5 | CharacteristicValue, 6 | HapStatusError, 7 | Logger, 8 | Nullable, 9 | PlatformAccessory, 10 | Service, 11 | WithUUID, 12 | } from 'homebridge'; 13 | 14 | import AccessoryInformation from '../accessoryInformation.js'; 15 | import DeviceManager from './deviceManager.js'; 16 | import { prefixLogger } from '../utils.js'; 17 | import type KasaPythonPlatform from '../platform.js'; 18 | import type { KasaDevice, ChildDevice } from './kasaDevices.js'; 19 | import type { KasaPythonAccessoryContext } from '../platform.js'; 20 | 21 | export default abstract class HomeKitDevice { 22 | readonly log: Logger; 23 | protected deviceManager: DeviceManager | undefined; 24 | homebridgeAccessory: PlatformAccessory; 25 | public isUpdating: boolean = false; 26 | 27 | constructor( 28 | readonly platform: KasaPythonPlatform, 29 | public kasaDevice: KasaDevice, 30 | readonly category: Categories, 31 | readonly categoryName: string, 32 | ) { 33 | this.deviceManager = platform.deviceManager; 34 | this.log = prefixLogger(platform.log, `[${this.name}]`); 35 | this.homebridgeAccessory = this.initializeAccessory(); 36 | this.homebridgeAccessory.on(PlatformAccessoryEvent.IDENTIFY, () => this.identify()); 37 | } 38 | 39 | private initializeAccessory(): PlatformAccessory { 40 | const uuid = this.platform.api.hap.uuid.generate(this.id); 41 | const homebridgeAccessory = this.platform.configuredAccessories.get(uuid); 42 | let platformAccessory: PlatformAccessory; 43 | if (!homebridgeAccessory) { 44 | this.log.debug(`Creating new Platform Accessory [${this.id}] [${uuid}] category: ${this.categoryName}`); 45 | platformAccessory = new this.platform.api.platformAccessory(this.name, uuid, this.category); 46 | platformAccessory.context.deviceId = this.id; 47 | platformAccessory.context.lastSeen = this.kasaDevice.last_seen; 48 | platformAccessory.context.offline = this.kasaDevice.offline; 49 | this.platform.registerPlatformAccessory(platformAccessory); 50 | } else { 51 | this.log.debug(`Existing Platform Accessory found [${homebridgeAccessory.context.deviceId}] ` + 52 | `[${homebridgeAccessory.UUID}] category: ${this.categoryName}`); 53 | platformAccessory = homebridgeAccessory; 54 | this.updateAccessory(platformAccessory); 55 | } 56 | const accInfo = AccessoryInformation(this.platform.api.hap)(platformAccessory, this); 57 | if (!accInfo) { 58 | this.log.error('Could not retrieve default AccessoryInformation'); 59 | } 60 | return platformAccessory; 61 | } 62 | 63 | private updateAccessory(platformAccessory: PlatformAccessory): void { 64 | this.correctAccessoryProperties(platformAccessory, { 65 | displayName: this.name, 66 | category: this.category, 67 | context: { 68 | deviceId: this.id, 69 | lastSeen: this.kasaDevice.last_seen, 70 | offline: this.kasaDevice.offline, 71 | }, 72 | }); 73 | this.platform.configuredAccessories.set(platformAccessory.UUID, platformAccessory); 74 | this.platform.api.updatePlatformAccessories([platformAccessory]); 75 | } 76 | 77 | private correctAccessoryProperties(obj: T, properties: Partial): void { 78 | for (const [key, expectedValue] of Object.entries(properties)) { 79 | if (obj[key as keyof T] !== expectedValue) { 80 | this.log.debug(`Correcting Platform Accessory ${key} from: ${String(obj[key as keyof T])} to: ${String(expectedValue)}`); 81 | obj[key as keyof T] = expectedValue as T[keyof T]; 82 | } 83 | } 84 | } 85 | 86 | get id(): string { 87 | return this.kasaDevice.sys_info.device_id; 88 | } 89 | 90 | get name(): string { 91 | return this.kasaDevice.sys_info.alias; 92 | } 93 | 94 | get manufacturer(): string { 95 | return 'TP-Link'; 96 | } 97 | 98 | get model(): string { 99 | return `${this.kasaDevice.sys_info.model} ${this.kasaDevice.sys_info.hw_ver}`; 100 | } 101 | 102 | get serialNumber(): string { 103 | return this.kasaDevice.sys_info.mac; 104 | } 105 | 106 | get firmwareRevision(): string { 107 | return this.kasaDevice.sys_info.sw_ver; 108 | } 109 | 110 | protected extractChildIndex(child: ChildDevice): number { 111 | const id = String(child.id ?? ''); 112 | const match = id.match(/(\d{1,2})$/); 113 | return match ? parseInt(match[1], 10) : 0; 114 | } 115 | 116 | abstract identify(): void; 117 | 118 | abstract initialize(): Promise; 119 | 120 | abstract updateAfterPeriodicDiscovery(): void; 121 | 122 | abstract startPolling(): Promise; 123 | 124 | abstract stopPolling(): Promise; 125 | 126 | addService(serviceConstructor: WithUUID, name: string, subType?: string): Service { 127 | const serviceName = this.platform.getServiceName(serviceConstructor); 128 | this.log.debug(`Creating new ${serviceName} Service on ${name}${subType ? ` [${subType}]` : ''}`); 129 | return this.homebridgeAccessory.addService(serviceConstructor, name, subType ? subType : ''); 130 | } 131 | 132 | updateValue( 133 | service: Service, 134 | characteristic: Characteristic, 135 | deviceAlias: string, 136 | value: Nullable | Error | HapStatusError, 137 | ): void { 138 | this.log.info(`Updating ${this.platform.lsc(service, characteristic)} on ${deviceAlias} to ${value}`); 139 | characteristic.updateValue(value); 140 | } 141 | } -------------------------------------------------------------------------------- /.github/workflows/shared-dependabot-auto-merge.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Dependabot Auto-Merge 3 | 4 | on: 5 | workflow_call: {} 6 | 7 | permissions: 8 | contents: write 9 | issues: write 10 | pull-requests: write 11 | 12 | env: 13 | GITHUB_REPOSITORY: ${{ github.repository }} 14 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 15 | PULL_REQUEST_AUTHOR: ${{ github.event.pull_request.user.login }} 16 | PULL_REQUEST_BRANCH: ${{ github.event.pull_request.base.ref }} 17 | PULL_REQUEST_NUMBER: ${{ github.event.pull_request.number }} 18 | PULL_REQUEST_TITLE: ${{ github.event.pull_request.title }} 19 | PULL_REQUEST_URL: ${{ github.event.pull_request.html_url }} 20 | 21 | concurrency: 22 | group: dependabot-auto-merge-${{ github.event.pull_request.number }} 23 | cancel-in-progress: true 24 | 25 | jobs: 26 | auto-merge: 27 | name: Auto-Merge Dependabot Pull Request 28 | runs-on: ubuntu-latest 29 | outputs: 30 | is_merged: ${{ steps.merge_output.outputs.is_merged }} 31 | steps: 32 | - name: Checkout Actions 33 | id: checkout 34 | uses: actions/checkout@v6 35 | with: 36 | fetch-depth: 0 37 | 38 | - name: Ensure jq 39 | id: ensure-jq 40 | uses: ./.github/actions/ensure-jq 41 | 42 | - name: Determine Safety from Labels 43 | id: safety 44 | shell: bash 45 | run: | 46 | set -euo pipefail 47 | LABELS="$(gh api repos/"$GITHUB_REPOSITORY"/issues/"$PULL_REQUEST_NUMBER"/labels --jq '.[].name' || true)" 48 | block=true 49 | if ! printf '%s\n' "$LABELS" | grep -qi '^dependency-major$'; then 50 | block=false 51 | fi 52 | echo "block=$block" >> "$GITHUB_OUTPUT" 53 | 54 | - name: Build Sticky Comment 55 | id: build_sticky 56 | shell: bash 57 | run: | 58 | set -euo pipefail 59 | if [ "${{ steps.safety.outputs.block }}" = "false" ]; then 60 | { 61 | echo "✅ Dependabot Update is Safe and will be Auto-Merged." 62 | echo 63 | echo "" 64 | } > body.txt 65 | else 66 | { 67 | echo "⚠️ Dependabot Update Requires Manual Review." 68 | echo 69 | echo "" 70 | } > body.txt 71 | fi 72 | 73 | - name: Upsert Sticky Comment 74 | id: upsert_sticky 75 | uses: ./.github/actions/sticky-comment 76 | with: 77 | number: ${{ env.PULL_REQUEST_NUMBER }} 78 | marker: "" 79 | body_file: body.txt 80 | 81 | - name: Auto-Merge Pull Request 82 | id: auto_merge 83 | if: steps.safety.outcome == 'success' && steps.safety.outputs.block == 'false' 84 | continue-on-error: true 85 | shell: bash 86 | run: | 87 | set -euo pipefail 88 | gh pr merge --auto --squash "$PULL_REQUEST_URL" 89 | 90 | - name: Set Merge Result Output 91 | id: merge_output 92 | if: always() 93 | shell: bash 94 | run: | 95 | set -euo pipefail 96 | if [ "${{ steps.auto_merge.outcome }}" = "success" ]; then 97 | echo "is_merged=true" >> "$GITHUB_OUTPUT" 98 | else 99 | echo "is_merged=false" >> "$GITHUB_OUTPUT" 100 | fi 101 | 102 | post-merge-release: 103 | name: Update CHANGELOG.md and Release from Dependabot Merge 104 | runs-on: ubuntu-latest 105 | needs: auto-merge 106 | if: needs.auto-merge.outputs.is_merged == 'true' 107 | steps: 108 | - name: Checkout Actions 109 | id: checkout 110 | uses: actions/checkout@v6 111 | with: 112 | fetch-depth: 0 113 | 114 | - name: Setup Node 24 115 | id: setup-node 116 | uses: actions/setup-node@v6 117 | with: 118 | node-version: 24 119 | 120 | - name: Update npm 121 | id: update-npm 122 | shell: bash 123 | run: | 124 | set -euo pipefail 125 | npm install -g npm@latest 126 | 127 | - name: Setup Python 3.12 128 | id: setup-python 129 | uses: actions/setup-python@v6 130 | with: 131 | python-version: '3.12' 132 | 133 | - name: Ensure jq 134 | id: ensure-jq 135 | uses: ./.github/actions/ensure-jq 136 | 137 | - name: Fetch current labels for Pull Request 138 | id: fetch-labels 139 | shell: bash 140 | run: | 141 | set -euo pipefail 142 | labels_json="$(gh api repos/"$GITHUB_REPOSITORY"/issues/"$PULL_REQUEST_NUMBER"/labels --jq '.[].name' | jq -R . | jq -s .)" 143 | { 144 | echo 'labels<> "$GITHUB_OUTPUT" 148 | 149 | - name: Serialize Labels 150 | id: labels 151 | uses: ./.github/actions/serialize-labels 152 | with: 153 | labels_json: ${{ steps.fetch-labels.outputs.labels }} 154 | 155 | - name: Sync Local beta, Update CHANGELOG.md, and Create/Update Release 156 | id: sync-update-release 157 | shell: bash 158 | env: 159 | MODE: pr-merge 160 | PULL_REQUEST_LABELS: ${{ steps.labels.outputs.path }} 161 | run: | 162 | set -euo pipefail 163 | MAX_ATTEMPTS=${MAX_ATTEMPTS:-5} 164 | BASE_SLEEP=${BASE_SLEEP:-5} 165 | JITTER=${JITTER:-10} 166 | attempt=1 167 | echo "::notice::Starting sync+release_manager loop (max ${MAX_ATTEMPTS} attempts)" 168 | while [ "$attempt" -le "$MAX_ATTEMPTS" ]; do 169 | echo "::notice::Attempt ${attempt} of ${MAX_ATTEMPTS}" 170 | echo "::notice::Syncing local beta -> origin/beta" 171 | git remote set-url origin "https://x-access-token:${GITHUB_TOKEN}@github.com/${GITHUB_REPOSITORY}.git" 172 | if ! git fetch origin beta --depth=1; then 173 | echo "::warning::git fetch failed on attempt ${attempt}" 174 | FETCH_OK=false 175 | else 176 | FETCH_OK=true 177 | fi 178 | if [ "$FETCH_OK" = "true" ]; then 179 | git checkout -B beta origin/beta || true 180 | git reset --hard origin/beta || true 181 | git clean -fd || true 182 | fi 183 | echo "::notice::Running release_manager.py (MODE=${MODE})" 184 | if python .github/scripts/release_manager.py; then 185 | echo "::notice::release_manager.py succeeded on attempt ${attempt}" 186 | exit 0 187 | else 188 | echo "::warning::release_manager.py failed on attempt ${attempt}" 189 | fi 190 | if [ "$attempt" -lt "$MAX_ATTEMPTS" ]; then 191 | sleep_sec=$(( BASE_SLEEP * attempt + (RANDOM % JITTER) )) 192 | echo "::notice::Retrying in ${sleep_sec}s (including jitter) ..." 193 | sleep "${sleep_sec}" 194 | fi 195 | attempt=$(( attempt + 1 )) 196 | done 197 | echo "::error::All ${MAX_ATTEMPTS} attempts to sync+update CHANGELOG failed. Manual intervention required." 198 | exit 1 199 | -------------------------------------------------------------------------------- /.github/workflows/label-and-validate-issue.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Label & Validate Issues 3 | 4 | on: 5 | issues: 6 | types: [opened, edited, reopened] 7 | 8 | permissions: 9 | contents: read 10 | issues: write 11 | 12 | env: 13 | GITHUB_REPOSITORY: ${{ github.repository }} 14 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 15 | ISSUE_NUMBER: ${{ github.event.issue.number }} 16 | 17 | concurrency: 18 | group: label-and-validate-issue-${{ github.event.issue.number }} 19 | cancel-in-progress: true 20 | 21 | jobs: 22 | detect-fork: 23 | name: Detect Fork 24 | runs-on: ubuntu-latest 25 | outputs: 26 | is_fork: ${{ steps.detect-fork-context.outputs.is_fork }} 27 | steps: 28 | - name: Checkout Actions 29 | id: checkout 30 | uses: actions/checkout@v6 31 | with: 32 | fetch-depth: 0 33 | 34 | - name: Detect Fork Context 35 | id: detect-fork-context 36 | uses: ./.github/actions/detect-fork 37 | 38 | prepare: 39 | name: Validate Environment 40 | runs-on: ubuntu-latest 41 | needs: detect-fork 42 | if: needs.detect-fork.outputs.is_fork == 'false' 43 | steps: 44 | - name: Validate Environment 45 | id: validate 46 | shell: bash 47 | run: | 48 | set -euo pipefail 49 | : "${GITHUB_REPOSITORY:?GITHUB_REPOSITORY is required}" 50 | : "${GITHUB_TOKEN:?GITHUB_TOKEN is required}" 51 | : "${ISSUE_NUMBER:?ISSUE_NUMBER is required}" 52 | 53 | classify: 54 | name: Classify Issue 55 | runs-on: ubuntu-latest 56 | needs: prepare 57 | if: needs.prepare.result == 'success' && github.actor != 'github-actions[bot]' 58 | outputs: 59 | applied_label: ${{ steps.classify.outputs.applied_label }} 60 | needs_info: ${{ steps.classify.outputs.needs_info }} 61 | steps: 62 | - name: Checkout Actions 63 | id: checkout 64 | uses: actions/checkout@v6 65 | with: 66 | fetch-depth: 0 67 | 68 | - name: Ensure jq 69 | id: ensure-jq 70 | uses: ./.github/actions/ensure-jq 71 | 72 | - name: Classify 73 | id: classify 74 | shell: bash 75 | env: 76 | MODE: classify 77 | run: | 78 | set -euo pipefail 79 | RESULT="$(python3 .github/scripts/issue_manager.py)" 80 | echo "Raw classification: $RESULT" 81 | APPLIED="$(echo "$RESULT" | jq -r '.applied_label // empty')" 82 | NEEDS="$(echo "$RESULT" | jq -r '.needs_info // false')" 83 | echo "applied_label=$APPLIED" >> "$GITHUB_OUTPUT" 84 | echo "needs_info=$NEEDS" >> "$GITHUB_OUTPUT" 85 | 86 | label: 87 | name: Clear & Apply Labels 88 | runs-on: ubuntu-latest 89 | needs: classify 90 | if: needs.classify.result == 'success' 91 | steps: 92 | - name: Checkout Actions 93 | id: checkout 94 | uses: actions/checkout@v6 95 | with: 96 | fetch-depth: 0 97 | 98 | - name: Clear Labels 99 | id: clear-labels 100 | uses: ./.github/actions/clear-labels 101 | with: 102 | number: ${{ env.ISSUE_NUMBER }} 103 | 104 | - name: Apply Label 105 | id: apply-label 106 | shell: bash 107 | run: | 108 | set -euo pipefail 109 | LABEL="${{ needs.classify.outputs.applied_label }}" 110 | if [ -n "$LABEL" ]; then 111 | gh issue edit ${{ env.ISSUE_NUMBER }} --add-label "$LABEL" || true 112 | fi 113 | 114 | - name: Apply or Remove needs-info Label 115 | id: apply-remove-needs-info 116 | shell: bash 117 | run: | 118 | set -euo pipefail 119 | NEEDS="${{ needs.classify.outputs.needs_info }}" 120 | CURRENT_LABELS="$(gh api repos/"$GITHUB_REPOSITORY"/issues/"$ISSUE_NUMBER"/labels --jq '.[].name' || true)" 121 | has_needs_info=false 122 | if printf '%s\n' "$CURRENT_LABELS" | grep -qi '^needs-info$'; then 123 | has_needs_info=true 124 | fi 125 | if [ "$NEEDS" = "true" ]; then 126 | gh issue edit "$ISSUE_NUMBER" --add-label needs-info || true 127 | elif [ "$NEEDS" = "false" ] && [ "$has_needs_info" = "true" ]; then 128 | gh issue edit "$ISSUE_NUMBER" --remove-label needs-info || true 129 | fi 130 | 131 | validate-issue: 132 | name: Validate Issue 133 | runs-on: ubuntu-latest 134 | needs: label 135 | if: needs.label.result == 'success' 136 | steps: 137 | - name: Checkout Actions 138 | id: checkout 139 | uses: actions/checkout@v6 140 | with: 141 | fetch-depth: 0 142 | 143 | - name: Ensure jq 144 | id: ensure-jq 145 | uses: ./.github/actions/ensure-jq 146 | 147 | - name: Validate Issue 148 | id: validate-issue 149 | shell: bash 150 | env: 151 | MODE: validate 152 | run: | 153 | set -euo pipefail 154 | RESULT="$(python3 .github/scripts/issue_manager.py)" 155 | echo "Raw validation: $RESULT" 156 | OK="$(echo "$RESULT" | jq -r '.ok // false')" 157 | MSGS="$(echo "$RESULT" | jq -c '.messages // []')" 158 | echo "ok=$OK" >> "$GITHUB_OUTPUT" 159 | echo "messages=$MSGS" >> "$GITHUB_OUTPUT" 160 | 161 | - name: Build Validation Sticky 162 | id: build-validation-sticky 163 | shell: bash 164 | run: | 165 | set -euo pipefail 166 | MARK="" 167 | OK="${{ steps.validate-issue.outputs.ok }}" 168 | MSGS_JSON='${{ steps.validate-issue.outputs.messages }}' 169 | if [ -n "$MSGS_JSON" ] && [ "$MSGS_JSON" != "null" ]; then 170 | VALMSGS="$(printf '%s' "$MSGS_JSON" | jq -r '.[]?')" 171 | else 172 | VALMSGS="" 173 | fi 174 | if [ "$OK" != "true" ]; then 175 | { 176 | echo "❌ Issue validation failed." 177 | echo 178 | if [ -n "$VALMSGS" ]; then 179 | echo "Details:" 180 | echo '```' 181 | printf '%s\n' "$VALMSGS" 182 | echo '```' 183 | echo 184 | fi 185 | echo "Please update the issue with the requested information." 186 | echo 187 | echo "$MARK" 188 | } > body.txt 189 | else 190 | { 191 | echo "✅ Issue classified, labeled, and validated successfully!" 192 | echo 193 | echo "Thank you. This issue has the required information and labels." 194 | echo 195 | echo "$MARK" 196 | } > body.txt 197 | fi 198 | 199 | - name: Upsert Validation Sticky 200 | id: upsert-validation-sticky 201 | uses: ./.github/actions/sticky-comment 202 | with: 203 | number: ${{ env.ISSUE_NUMBER }} 204 | marker: "" 205 | body_file: body.txt 206 | 207 | fork-notice: 208 | name: Fork Notice 209 | runs-on: ubuntu-latest 210 | needs: detect-fork 211 | if: needs.detect-fork.outputs.is_fork == 'true' 212 | steps: 213 | - name: Fork Notice 214 | id: fork-notice 215 | shell: bash 216 | run: | 217 | set -euo pipefail 218 | echo "::notice:: This workflow is running in a forked repository. All privileged steps have been skipped." 219 | -------------------------------------------------------------------------------- /src/config.ts: -------------------------------------------------------------------------------- 1 | import { isObjectLike } from './utils.js'; 2 | import type { ConfigDevice } from './devices/kasaDevices.js'; 3 | 4 | export class ConfigParseError extends Error { 5 | constructor( 6 | message: string, 7 | public errors?: string[] | null, 8 | public unknownError?: unknown, 9 | ) { 10 | super(message); 11 | this.name = 'ConfigParseError'; 12 | this.message = this.formatMessage(message, errors, unknownError); 13 | Error.captureStackTrace(this, this.constructor); 14 | } 15 | 16 | private formatMessage( 17 | message: string, 18 | errors?: string[] | null, 19 | unknownError?: unknown, 20 | ): string { 21 | let formattedMessage = message; 22 | if (errors && errors.length > 0) { 23 | const errorsAsString = errors.join('\n'); 24 | formattedMessage += `:\n${errorsAsString}`; 25 | } 26 | if (unknownError instanceof Error) { 27 | formattedMessage += `\nAdditional Error: ${unknownError.message}`; 28 | } else if (unknownError) { 29 | formattedMessage += `\nAdditional Error: [Error details not available: ${unknownError}]`; 30 | } 31 | return formattedMessage; 32 | } 33 | } 34 | 35 | export interface KasaPythonConfigInput { 36 | name?: string; 37 | enableCredentials?: boolean; 38 | username?: string; 39 | password?: string; 40 | hideHomeKitMatter?: boolean; 41 | pollingInterval?: number; 42 | discoveryPollingInterval?: number; 43 | offlineInterval?: number; 44 | additionalBroadcasts?: string[]; 45 | manualDevices?: (string | ConfigDevice)[]; 46 | excludeMacAddresses?: string[]; 47 | includeMacAddresses?: string[]; 48 | waitTimeUpdate?: number; 49 | pythonPath?: string; 50 | advancedPythonLogging?: boolean; 51 | } 52 | 53 | export type KasaPythonConfig = { 54 | name: string; 55 | enableCredentials: boolean; 56 | username: string; 57 | password: string; 58 | homekitOptions: { 59 | hideHomeKitMatter: boolean; 60 | }; 61 | discoveryOptions: { 62 | pollingInterval: number; 63 | discoveryPollingInterval: number; 64 | offlineInterval: number; 65 | additionalBroadcasts: string[]; 66 | manualDevices: ConfigDevice[]; 67 | excludeMacAddresses: string[]; 68 | includeMacAddresses: string[]; 69 | }; 70 | advancedOptions: { 71 | waitTimeUpdate: number; 72 | pythonPath?: string; 73 | advancedPythonLogging: boolean; 74 | }; 75 | }; 76 | 77 | export const defaultConfig: KasaPythonConfig = { 78 | name: 'kasa-python', 79 | enableCredentials: false, 80 | username: '', 81 | password: '', 82 | homekitOptions: { 83 | hideHomeKitMatter: true, 84 | }, 85 | discoveryOptions: { 86 | pollingInterval: 5, 87 | discoveryPollingInterval: 300, 88 | offlineInterval: 7, 89 | additionalBroadcasts: [], 90 | manualDevices: [], 91 | excludeMacAddresses: [], 92 | includeMacAddresses: [], 93 | }, 94 | advancedOptions: { 95 | waitTimeUpdate: 100, 96 | pythonPath: '', 97 | advancedPythonLogging: false, 98 | }, 99 | }; 100 | 101 | function convertManualDevices(manualDevices: (string | ConfigDevice)[] | undefined | null): ConfigDevice[] { 102 | if (!manualDevices || manualDevices.length === 0) { 103 | return []; 104 | } 105 | 106 | return manualDevices.map(device => { 107 | if (typeof device === 'string') { 108 | return { host: device, alias: 'Will Be Filled By Plug-In Automatically' }; 109 | } else if ('breakoutChildDevices' in device) { 110 | delete device.breakoutChildDevices; 111 | } else if ('host' in device && !('alias' in device)) { 112 | (device as ConfigDevice).alias = 'Will Be Filled By Plug-In Automatically'; 113 | } 114 | return device; 115 | }); 116 | } 117 | 118 | function validateConfig(config: Record): string[] { 119 | const errors: string[] = []; 120 | 121 | validateType(config, 'name', 'string', errors); 122 | validateType(config, 'enableCredentials', 'boolean', errors); 123 | validateType(config, 'username', 'string', errors); 124 | validateType(config, 'password', 'string', errors); 125 | validateType(config, 'hideHomeKitMatter', 'boolean', errors); 126 | validateType(config, 'pollingInterval', 'number', errors); 127 | validateType(config, 'discoveryPollingInterval', 'number', errors); 128 | validateType(config, 'offlineInterval', 'number', errors); 129 | 130 | if (config.additionalBroadcasts !== undefined && !Array.isArray(config.additionalBroadcasts)) { 131 | errors.push('`additionalBroadcasts` should be an array of strings.'); 132 | } 133 | 134 | if (config.manualDevices !== undefined && !Array.isArray(config.manualDevices)) { 135 | errors.push('`manualDevices` should be an array.'); 136 | } 137 | 138 | if (config.excludeMacAddresses !== undefined && !Array.isArray(config.excludeMacAddresses)) { 139 | errors.push('`excludeMacAddresses` should be an array of strings.'); 140 | } 141 | 142 | if (config.includeMacAddresses !== undefined && !Array.isArray(config.includeMacAddresses)) { 143 | errors.push('`includeMacAddresses` should be an array of strings.'); 144 | } 145 | 146 | validateType(config, 'waitTimeUpdate', 'number', errors); 147 | validateType(config, 'pythonPath', 'string', errors); 148 | validateType(config, 'advancedPythonLogging', 'boolean', errors); 149 | 150 | return errors; 151 | } 152 | 153 | function validateType( 154 | config: Record, 155 | key: string, 156 | expectedType: string, 157 | errors: string[], 158 | ) { 159 | if (config[key] !== undefined && typeof config[key] !== expectedType) { 160 | errors.push(`\`${key}\` should be a ${expectedType}.`); 161 | } 162 | } 163 | 164 | export function parseConfig(config: Record): KasaPythonConfig { 165 | const errors = validateConfig(config); 166 | if (errors.length > 0) { 167 | throw new ConfigParseError('Error parsing config', errors); 168 | } 169 | 170 | if (!isObjectLike(config)) { 171 | throw new ConfigParseError('Error parsing config'); 172 | } 173 | 174 | const c = { ...defaultConfig, ...config } as KasaPythonConfigInput; 175 | 176 | return { 177 | name: c.name ?? defaultConfig.name, 178 | enableCredentials: c.enableCredentials ?? defaultConfig.enableCredentials, 179 | username: c.username ?? defaultConfig.username, 180 | password: c.password ?? defaultConfig.password, 181 | homekitOptions: { 182 | hideHomeKitMatter: c.hideHomeKitMatter ?? defaultConfig.homekitOptions.hideHomeKitMatter, 183 | }, 184 | discoveryOptions: { 185 | pollingInterval: (c.pollingInterval ?? defaultConfig.discoveryOptions.pollingInterval) * 1000, 186 | discoveryPollingInterval: (c.discoveryPollingInterval ?? defaultConfig.discoveryOptions.discoveryPollingInterval) * 1000, 187 | offlineInterval: (c.offlineInterval ?? defaultConfig.discoveryOptions.offlineInterval) * 24 * 60 * 60 * 1000, 188 | additionalBroadcasts: c.additionalBroadcasts ?? defaultConfig.discoveryOptions.additionalBroadcasts, 189 | manualDevices: c.manualDevices ? convertManualDevices(c.manualDevices) : defaultConfig.discoveryOptions.manualDevices, 190 | excludeMacAddresses: c.excludeMacAddresses ?? defaultConfig.discoveryOptions.excludeMacAddresses, 191 | includeMacAddresses: c.includeMacAddresses ?? defaultConfig.discoveryOptions.includeMacAddresses, 192 | }, 193 | advancedOptions: { 194 | waitTimeUpdate: c.waitTimeUpdate ?? defaultConfig.advancedOptions.waitTimeUpdate, 195 | pythonPath: c.pythonPath ?? defaultConfig.advancedOptions.pythonPath, 196 | advancedPythonLogging: c.advancedPythonLogging ?? defaultConfig.advancedOptions.advancedPythonLogging, 197 | }, 198 | }; 199 | } -------------------------------------------------------------------------------- /.github/scripts/issue_manager.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """ 3 | Single-script issue handler with two modes: 4 | 5 | - MODE=classify: 6 | * Determine canonical label (bug/enhancement/question/breaking-change/docs/dependency/internal/workflow) 7 | * Decide needs-info based on template completion and body length. 8 | 9 | - MODE=validate: 10 | * Enforce presence of a classification label 11 | * Fail if needs-info is still present or minimal content is missing 12 | * Returns messages used by the workflow to build a sticky comment 13 | """ 14 | import json 15 | import os 16 | import re 17 | import sys 18 | 19 | SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__)) 20 | if SCRIPT_DIR not in sys.path: 21 | sys.path.insert(0, SCRIPT_DIR) 22 | import common 23 | 24 | from common import Context 25 | 26 | MAP = { 27 | "bug": "bug", 28 | "fix": "bug", 29 | "feature": "enhancement", 30 | "enhancement": "enhancement", 31 | "support": "question", 32 | "question": "question", 33 | "breaking change": "breaking-change", 34 | "breaking-change": "breaking-change", 35 | "breaking": "breaking-change", 36 | "docs": "docs", 37 | "documentation": "docs", 38 | "dependency": "dependency", 39 | "dependencies": "dependency", 40 | "internal": "internal", 41 | "workflow": "workflow", 42 | "ci": "workflow", 43 | "housekeeping": "internal", 44 | "chore": "internal", 45 | } 46 | CLASSIFICATION = { 47 | "bug", 48 | "enhancement", 49 | "question", 50 | "breaking-change", 51 | "docs", 52 | "dependency", 53 | "internal", 54 | "workflow", 55 | } 56 | 57 | def _section(body: str, title: str) -> str: 58 | pat = rf"(?is)^###\s*{re.escape(title)}\s*$\n(.*?)(?=^###\s|\Z)" 59 | mm = re.search(pat, body, re.MULTILINE) 60 | return (mm.group(1).strip() if mm else "").strip() 61 | 62 | def _guess_kind(body: str) -> str: 63 | m = re.search(r"(?im)^###\s*Type\s*$", body) 64 | if m: 65 | lines = body.splitlines() 66 | try: 67 | idx = next( 68 | i 69 | for i, l in enumerate(lines) 70 | if re.match(r"(?im)^###\s*Type\s*$", l) 71 | ) 72 | for j in range(idx + 1, len(lines)): 73 | candidate = lines[j].strip() 74 | if candidate: 75 | return candidate.lower() 76 | except StopIteration: 77 | pass 78 | lower = body.lower() 79 | if any(k in lower for k in ("traceback", "error", "stack")): 80 | return "bug" 81 | if "migration" in lower and "break" in lower: 82 | return "breaking change" 83 | if "feature" in lower or "enhancement" in lower: 84 | return "feature" 85 | if any(k in lower for k in ("docs", "documentation", "readme")): 86 | return "docs" 87 | if "dependen" in lower: 88 | return "dependency" 89 | if any(k in lower for k in ("support", "help", "question")): 90 | return "support" 91 | if any(k in lower for k in ("workflow", "github actions", "ci", "pipeline")): 92 | return "workflow" 93 | if any(k in lower for k in ("internal", "housekeeping", "chore")): 94 | return "internal" 95 | return "" 96 | 97 | def _first_existing_classification(labels: list[str]) -> str: 98 | for l in labels: 99 | ll = (l or "").lower() 100 | if ll in CLASSIFICATION: 101 | return ll 102 | return "" 103 | 104 | def _handle_classify(context: Context) -> dict: 105 | code, issue = common.github_api(context.github_repository, context.github_token, f"/issues/{context.issue_number}") 106 | if code != 200 or not isinstance(issue, dict): 107 | return { 108 | "ok": True, 109 | "applied_label": "", 110 | "needs_info": True, 111 | } 112 | body = issue.get("body") or "" 113 | current_labels = [(l.get("name") or "") for l in issue.get("labels", [])] 114 | kind = _guess_kind(body) 115 | canonical = MAP.get(kind, "") 116 | if not canonical: 117 | existing = _first_existing_classification(current_labels) 118 | if existing: 119 | canonical = existing 120 | needs_info = False 121 | if canonical == "bug": 122 | env = _section(body, "Environment") 123 | details = _section(body, "Details") 124 | if len(env) < 15: 125 | needs_info = True 126 | if not re.search(r"\b(step|reproduce|expected|actual)\b", details, re.IGNORECASE): 127 | needs_info = True 128 | elif canonical == "breaking-change": 129 | migration = _section(body, "Migration Strategy") 130 | details = _section(body, "Details") 131 | if len(migration) < 30: 132 | needs_info = True 133 | if not re.search(r"(impact|rationale|break)", details, re.IGNORECASE): 134 | needs_info = True 135 | elif canonical in ("enhancement", "question", "docs", "dependency", "internal", "workflow"): 136 | if not body or len(body.strip()) < 20: 137 | needs_info = True 138 | if not canonical: 139 | needs_info = True 140 | return { 141 | "ok": True, 142 | "applied_label": canonical, 143 | "needs_info": needs_info, 144 | } 145 | 146 | def _handle_validate(context: Context) -> dict: 147 | code, issue = common.github_api(context.github_repository, context.github_token, f"/issues/{context.issue_number}") 148 | if code != 200 or not isinstance(issue, dict): 149 | return { 150 | "ok": False, 151 | "messages": [f"Unable to fetch issue #{context.issue_number} (status {code})."], 152 | } 153 | body = issue.get("body") or "" 154 | labels = [(l.get("name") or "").lower() for l in issue.get("labels", [])] 155 | ok = True 156 | messages: list[str] = [] 157 | if not any(l in CLASSIFICATION for l in labels): 158 | ok = False 159 | messages.append( 160 | "Missing classification label. Need one of: " 161 | + ", ".join(sorted(CLASSIFICATION)) 162 | + f". Current: {', '.join(labels)}" 163 | ) 164 | if "breaking-change" in labels: 165 | migration = _section(body, "Migration Strategy") 166 | details = _section(body, "Details") 167 | if len(migration) < 30: 168 | ok = False 169 | messages.append("Breaking change: Migration Strategy too short (<30 chars).") 170 | if not re.search(r"(impact|rationale|break)", details, re.IGNORECASE): 171 | ok = False 172 | messages.append( 173 | "Breaking change: Details should mention impact or rationale." 174 | ) 175 | if "needs-info" in labels: 176 | ok = False 177 | messages.append( 178 | "Issue is still marked as needs-info. Please provide the requested information." 179 | ) 180 | if len(body.strip()) < 10: 181 | ok = False 182 | messages.append("Issue body is too short. Please add more details.") 183 | return {"ok": ok, "messages": messages} 184 | 185 | def main(): 186 | github_repository = os.getenv("GITHUB_REPOSITORY") 187 | github_token = os.getenv("GITHUB_TOKEN") 188 | issue_number = os.getenv("ISSUE_NUMBER") 189 | mode = os.getenv("MODE") 190 | context = Context(github_token=github_token, github_repository=github_repository, mode=mode, issue_number=issue_number) 191 | if mode == "classify": 192 | result = _handle_classify(context) 193 | print(json.dumps(result)) 194 | elif mode == "validate": 195 | result = _handle_validate(context) 196 | print(json.dumps(result)) 197 | sys.exit(0) 198 | 199 | if __name__ == "__main__": 200 | main() -------------------------------------------------------------------------------- /config.schema.json: -------------------------------------------------------------------------------- 1 | { 2 | "pluginAlias": "KasaPython", 3 | "pluginType": "platform", 4 | "singular": true, 5 | "strictValidation": true, 6 | "headerDisplay": "Kasa Python Plugin.

Most users do not require configuration and can just click \"Save\" to get started. See [README](https://github.com/ZeliardM/homebridge-kasa-python/blob/master/README.md) for more information.

", 7 | "footerDisplay": "", 8 | "schema": { 9 | "type": "object", 10 | "properties": { 11 | "name": { 12 | "title": "Name", 13 | "type": "string", 14 | "required": true, 15 | "default": "KasaPython", 16 | "minLength": 1 17 | }, 18 | "enableCredentials": { 19 | "title": "Enable Credentials", 20 | "type": "boolean", 21 | "description": "Enable to use username and password for authentication.", 22 | "default": false 23 | }, 24 | "username": { 25 | "title": "Username", 26 | "type": "string", 27 | "description": "Username for the Kasa account. Required for cloud devices.", 28 | "placeholder": "Username", 29 | "minLength": 1, 30 | "condition": { 31 | "functionBody": "return model.enableCredentials;" 32 | } 33 | }, 34 | "password": { 35 | "title": "Password", 36 | "type": "string", 37 | "description": "Password for the Kasa account. Required for cloud devices.", 38 | "placeholder": "Password", 39 | "minLength": 1, 40 | "condition": { 41 | "functionBody": "return model.enableCredentials && model.username && model.username !== '';" 42 | } 43 | }, 44 | "hideHomeKitMatter": { 45 | "title": "Hide HomeKit or Matter Devices", 46 | "type": "boolean", 47 | "description": "Hide HomeKit or Matter Devices from Homebridge.", 48 | "default": true 49 | }, 50 | "pollingInterval": { 51 | "title": "Polling Interval (seconds)", 52 | "type": "integer", 53 | "description": "How often to check device status in the background (seconds)", 54 | "default": 5 55 | }, 56 | "discoveryPollingInterval": { 57 | "title": "Discovery Polling Interval (seconds)", 58 | "type": "integer", 59 | "description": "How often to discover new devices in the background (seconds)", 60 | "default": 300 61 | }, 62 | "offlineInterval": { 63 | "title": "Offline Interval (days)", 64 | "type": "integer", 65 | "description": "How often to remove offline devices in the background (days)", 66 | "default": 7 67 | }, 68 | "additionalBroadcasts": { 69 | "title": "Additional Broadcast Addresses", 70 | "type": "array", 71 | "items": { 72 | "title": "Broadcast Address", 73 | "type": "string" 74 | }, 75 | "description": "List of additional broadcast addresses to discover devices on. Format: 192.168.1.255" 76 | }, 77 | "manualDevices": { 78 | "title": "Manual List Of Devices", 79 | "type": "array", 80 | "items": { 81 | "title": "Manual Device", 82 | "type": "object", 83 | "properties": { 84 | "host": { 85 | "type": "string", 86 | "title": "Host" 87 | }, 88 | "alias": { 89 | "type": "string", 90 | "title": "Alias", 91 | "readonly": true, 92 | "condition": { 93 | "functionBody": "return model.manualDevices && model.manualDevices[arrayIndices] && model.manualDevices[arrayIndices].host && model.manualDevices[arrayIndices].host !== '';" 94 | } 95 | } 96 | } 97 | }, 98 | "description": "List of devices to manually add." 99 | }, 100 | "excludeMacAddresses": { 101 | "title": "Exclude MAC Addresses", 102 | "type": "array", 103 | "items": { 104 | "title": "MAC Address", 105 | "type": "string" 106 | }, 107 | "description": "List of MAC addresses to exclude from device discovery. Format: AA:BB:CC:11:22:33" 108 | }, 109 | "includeMacAddresses": { 110 | "title": "Include MAC Addresses", 111 | "type": "array", 112 | "items": { 113 | "title": "MAC Address", 114 | "type": "string" 115 | }, 116 | "description": "List of MAC addresses to include in device discovery. Format: AA:BB:CC:11:22:33" 117 | }, 118 | "waitTimeUpdate": { 119 | "title": "Wait Time Update (milliseconds)", 120 | "type": "integer", 121 | "description": "The time to wait to combine similar commands for a device before sending a command to a device (milliseconds). Default: 100.", 122 | "default": 100 123 | }, 124 | "pythonPath": { 125 | "title": "Python Path", 126 | "type": "string", 127 | "description": "Full Path to the Python executable. If not set, the plugin will try to find the Python executable or default to the System Path.", 128 | "placeholder": "Enter custom Full Python Path if needed, i.e. /usr/bin/python3.11", 129 | "minLength": 1 130 | }, 131 | "advancedPythonLogging": { 132 | "title": "Advanced Python Logging", 133 | "type": "boolean", 134 | "description": "Enable detailed logging for Python scripts. Only shows logs when Debug Mode in Homebridge is enabled.", 135 | "default": false 136 | } 137 | } 138 | }, 139 | "layout": [ 140 | "name", 141 | "enableCredentials", 142 | { 143 | "type": "conditional", 144 | "condition": "model.enableCredentials", 145 | "items": [ 146 | "username", 147 | "password" 148 | ] 149 | }, 150 | { 151 | "type": "help", 152 | "helpvalue": "Username and Password will be required for specific devices only." 153 | }, 154 | { 155 | "type": "fieldset", 156 | "title": "HomeKit (Optional)", 157 | "description": "Customize HomeKit options.", 158 | "expandable": true, 159 | "items": [ 160 | "hideHomeKitMatter" 161 | ] 162 | }, 163 | { 164 | "type": "fieldset", 165 | "title": "Device Discovery (Optional)", 166 | "description": "Customize device discovery", 167 | "expandable": true, 168 | "items": [ 169 | "pollingInterval", 170 | "discoveryPollingInterval", 171 | "offlineInterval", 172 | { 173 | "type": "fieldset", 174 | "title": "Additional Broadcasts", 175 | "description": "Specify additional broadcast addresses for device discovery.", 176 | "expandable": true, 177 | "items": [ 178 | { 179 | "key": "additionalBroadcasts", 180 | "type": "array", 181 | "items": { 182 | "title": "Broadcast Address", 183 | "type": "string" 184 | } 185 | } 186 | ] 187 | }, 188 | { 189 | "type": "fieldset", 190 | "title": "Manual Devices", 191 | "description": "If automatic discovery is not working, try this.", 192 | "expandable": true, 193 | "items": [ 194 | { 195 | "key": "manualDevices", 196 | "type": "array", 197 | "items": [ 198 | "manualDevices[].host", "manualDevices[].alias" 199 | ] 200 | }, 201 | { 202 | "type": "help", 203 | "helpvalue": "Before resorting to manually specifying devices. Try setting the broadcast address and check your router/switch/firewall configuration. You must assign static IP addresses to your devices to use this configuration." 204 | } 205 | ] 206 | }, 207 | { 208 | "type": "fieldset", 209 | "title": "Exclude MAC Addresses", 210 | "description": "Specify MAC addresses to exclude from device discovery.", 211 | "expandable": true, 212 | "items": [ 213 | { 214 | "key": "excludeMacAddresses", 215 | "type": "array", 216 | "items": { 217 | "title": "MAC Address", 218 | "type": "string" 219 | } 220 | } 221 | ] 222 | }, 223 | { 224 | "type": "fieldset", 225 | "title": "Include MAC Addresses", 226 | "description": "Specify MAC addresses to include in device discovery.", 227 | "expandable": true, 228 | "items": [ 229 | { 230 | "key": "includeMacAddresses", 231 | "type": "array", 232 | "items": { 233 | "title": "MAC Address", 234 | "type": "string" 235 | } 236 | } 237 | ] 238 | } 239 | ] 240 | }, 241 | { 242 | "type": "fieldset", 243 | "title": "Advanced Settings (Optional)", 244 | "description": "Don't change these, unless you understand what you're doing.", 245 | "expandable": true, 246 | "items": [ 247 | "waitTimeUpdate", 248 | "pythonPath", 249 | "advancedPythonLogging" 250 | ] 251 | } 252 | ] 253 | } -------------------------------------------------------------------------------- /.github/workflows/label-and-validate-pr.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Label & Validate Pull Requests 3 | 4 | on: 5 | pull_request_target: 6 | types: [opened, reopened, synchronize, ready_for_review] 7 | 8 | permissions: 9 | contents: write 10 | issues: write 11 | pull-requests: write 12 | 13 | env: 14 | GITHUB_REPOSITORY: ${{ github.repository }} 15 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 16 | PULL_REQUEST_BASE_SHA: ${{ github.event.pull_request.base.sha }} 17 | PULL_REQUEST_NUMBER: ${{ github.event.pull_request.number }} 18 | 19 | concurrency: 20 | group: label-and-validate-pr-${{ github.event.pull_request.number }} 21 | cancel-in-progress: true 22 | 23 | jobs: 24 | detect-fork: 25 | name: Detect Fork 26 | runs-on: ubuntu-latest 27 | outputs: 28 | is_fork: ${{ steps.detect-fork-context.outputs.is_fork }} 29 | steps: 30 | - name: Checkout Actions 31 | id: checkout 32 | uses: actions/checkout@v6 33 | with: 34 | fetch-depth: 0 35 | 36 | - name: Detect Fork Context 37 | id: detect-fork-context 38 | uses: ./.github/actions/detect-fork 39 | 40 | prepare: 41 | name: Validate Environment 42 | runs-on: ubuntu-latest 43 | needs: detect-fork 44 | if: needs.detect-fork.outputs.is_fork == 'false' 45 | steps: 46 | - name: Validate Environment 47 | id: validate 48 | shell: bash 49 | run: | 50 | set -euo pipefail 51 | : "${GITHUB_REPOSITORY:?GITHUB_REPOSITORY is required}" 52 | : "${GITHUB_TOKEN:?GITHUB_TOKEN is required}" 53 | : "${PULL_REQUEST_BASE_SHA:?PULL_REQUEST_BASE_SHA is required}" 54 | : "${PULL_REQUEST_NUMBER:?PULL_REQUEST_NUMBER is required}" 55 | 56 | retarget-base: 57 | name: Retarget Pull Request Base to Beta 58 | runs-on: ubuntu-latest 59 | needs: prepare 60 | if: needs.prepare.result == 'success' && github.actor != 'github-actions[bot]' 61 | steps: 62 | - name: Checkout Actions 63 | id: checkout 64 | uses: actions/checkout@v6 65 | with: 66 | ref: ${{ env.PULL_REQUEST_BASE_SHA }} 67 | fetch-depth: 0 68 | 69 | - name: Retarget Pull Request 70 | id: retarget-pr 71 | shell: bash 72 | run: | 73 | set -euo pipefail 74 | BASE="$(gh -R "$GITHUB_REPOSITORY" pr view "$PULL_REQUEST_NUMBER" --json baseRefName --jq .baseRefName || echo '')" 75 | HEAD="$(gh -R "$GITHUB_REPOSITORY" pr view "$PULL_REQUEST_NUMBER" --json headRefName --jq .headRefName || echo '')" 76 | LABELS="$(gh -R "$GITHUB_REPOSITORY" pr view "$PULL_REQUEST_NUMBER" --json labels --jq '.labels[].name' || true)" 77 | is_stable_conversion=false 78 | if [ "$BASE" = "latest" ] && [ "$HEAD" = "beta" ]; then 79 | if printf '%s\n' "$LABELS" | grep -qi '^stable-conversion$'; then 80 | is_stable_conversion=true 81 | fi 82 | fi 83 | if [ "$BASE" = "beta" ]; then 84 | echo "Base is already 'beta'; no retargeting needed." 85 | exit 0 86 | fi 87 | if [ "$is_stable_conversion" = "true" ]; then 88 | echo "Stable conversion PR detected; keeping base '$BASE'." 89 | exit 0 90 | fi 91 | if gh -R "$GITHUB_REPOSITORY" pr edit "$PULL_REQUEST_NUMBER" --base beta; then 92 | echo "Retargeted PR #$PULL_REQUEST_NUMBER base to 'beta'." 93 | else 94 | echo "::warning::Failed to retarget base to beta for PR #$PULL_REQUEST_NUMBER (base was '$BASE')." 95 | fi 96 | 97 | label: 98 | name: Clear & Apply Labels 99 | runs-on: ubuntu-latest 100 | needs: retarget-base 101 | if: needs.retarget-base.result == 'success' 102 | steps: 103 | - name: Checkout Actions 104 | id: checkout 105 | uses: actions/checkout@v6 106 | with: 107 | ref: ${{ env.PULL_REQUEST_BASE_SHA }} 108 | fetch-depth: 0 109 | 110 | - name: Clear All Labels 111 | id: clear-labels 112 | if: ${{ !contains(github.event.pull_request.labels.*.name, 'stable-conversion') }} 113 | uses: ./.github/actions/clear-labels 114 | with: 115 | number: ${{ env.PULL_REQUEST_NUMBER }} 116 | 117 | - name: Apply Labels 118 | id: apply-labels 119 | uses: actions/labeler@v6 120 | with: 121 | repo-token: ${{ env.GITHUB_TOKEN }} 122 | configuration-path: .github/labeler.yml 123 | 124 | - name: Fetch Dependabot Metadata 125 | id: metadata 126 | if: github.actor == 'dependabot[bot]' 127 | uses: dependabot/fetch-metadata@v2 128 | with: 129 | github-token: "${{ env.GITHUB_TOKEN }}" 130 | 131 | - name: Apply Dependabot Labels 132 | id: apply-dependabot-labels 133 | if: steps.metadata.outcome == 'success' 134 | shell: bash 135 | env: 136 | UPDATE_TYPE: ${{ steps.metadata.outputs.update-type }} 137 | run: | 138 | set -euo pipefail 139 | echo "Dependabot Pull Request detected; update-type=${UPDATE_TYPE}" 140 | case "$UPDATE_TYPE" in 141 | version-update:semver-major) 142 | labels=("dependency-major") 143 | ;; 144 | version-update:semver-minor) 145 | labels=("dependency-minor") 146 | ;; 147 | version-update:semver-patch) 148 | labels=("dependency-patch") 149 | ;; 150 | ""|null) 151 | # Lockfile / unknown cases 152 | labels=("dependency-lockfile") 153 | ;; 154 | *) 155 | labels=() 156 | ;; 157 | esac 158 | case "$UPDATE_TYPE" in 159 | version-update:semver-minor|version-update:semver-patch|""|null) 160 | labels+=("auto-merge") 161 | ;; 162 | esac 163 | if [ "${#labels[@]}" -eq 0 ]; then 164 | echo "No extra labels to apply for this Dependabot update-type." 165 | exit 0 166 | fi 167 | printf 'Applying labels: %s\n' "${labels[*]}" 168 | args=(--method POST \ 169 | -H "Accept: application/vnd.github+json" \ 170 | "/repos/$GITHUB_REPOSITORY/issues/$PULL_REQUEST_NUMBER/labels") 171 | for lbl in "${labels[@]}"; do 172 | args+=("-f" "labels[]=$lbl") 173 | done 174 | gh api "${args[@]}" 175 | 176 | validate-pr: 177 | name: Validate Pull Request 178 | runs-on: ubuntu-latest 179 | needs: label 180 | if: needs.label.result == 'success' 181 | outputs: 182 | ok: ${{ steps.validate-pr.outputs.ok }} 183 | steps: 184 | - name: Checkout Actions 185 | id: checkout 186 | uses: actions/checkout@v6 187 | with: 188 | ref: ${{ env.PULL_REQUEST_BASE_SHA }} 189 | fetch-depth: 0 190 | 191 | - name: Ensure jq 192 | id: ensure-jq 193 | uses: ./.github/actions/ensure-jq 194 | 195 | - name: Validate Pull Request 196 | id: validate-pr 197 | shell: bash 198 | run: | 199 | set -euo pipefail 200 | RESULT="$(python3 .github/scripts/pr_manager.py)" 201 | echo "Raw validation: $RESULT" 202 | OK="$(echo "$RESULT" | jq -r '.ok // false')" 203 | MSGS="$(echo "$RESULT" | jq -c '.messages // []')" 204 | echo "ok=$OK" >> "$GITHUB_OUTPUT" 205 | echo "messages=$MSGS" >> "$GITHUB_OUTPUT" 206 | 207 | - name: Build Validation Sticky 208 | id: build-validation-sticky 209 | shell: bash 210 | run: | 211 | set -euo pipefail 212 | MARK="" 213 | OK="${{ steps.validate-pr.outputs.ok }}" 214 | MSGS_JSON='${{ steps.validate-pr.outputs.messages }}' 215 | if [ -n "$MSGS_JSON" ] && [ "$MSGS_JSON" != "null" ]; then 216 | VALMSGS="$(printf '%s' "$MSGS_JSON" | jq -r '.[]?')" 217 | else 218 | VALMSGS="" 219 | fi 220 | if [ "$OK" != "true" ]; then 221 | { 222 | echo "❌ Pull Request validation failed." 223 | echo 224 | if [ -n "$VALMSGS" ]; then 225 | echo "Details:" 226 | echo '```' 227 | printf '%s\n' "$VALMSGS" 228 | echo '```' 229 | echo 230 | fi 231 | echo "Please update the pull request with the requested information." 232 | echo 233 | echo "$MARK" 234 | } > body.txt 235 | else 236 | { 237 | echo "✅ Pull Request classified, labeled, and validated successfully!" 238 | echo 239 | echo "Thank you. This Pull Request has the required information and labels." 240 | echo 241 | echo "$MARK" 242 | } > body.txt 243 | fi 244 | 245 | - name: Upsert Validation Sticky 246 | id: upsert-validation-sticky 247 | uses: ./.github/actions/sticky-comment 248 | with: 249 | number: ${{ github.event.pull_request.number }} 250 | marker: "" 251 | body_file: body.txt 252 | 253 | dependabot-auto-merge: 254 | name: Dependabot Auto-Merge 255 | needs: validate-pr 256 | if: needs.validate-pr.result == 'success' && needs.validate-pr.outputs.ok == 'true' && github.actor == 'dependabot[bot]' 257 | uses: ./.github/workflows/shared-dependabot-auto-merge.yml 258 | 259 | fork-notice: 260 | name: Fork Notice 261 | runs-on: ubuntu-latest 262 | needs: detect-fork 263 | if: needs.detect-fork.outputs.is_fork == 'true' 264 | steps: 265 | - name: Fork Notice 266 | id: fork-notice 267 | shell: bash 268 | run: | 269 | set -euo pipefail 270 | echo "::notice:: This workflow is running in a forked repository. All privileged steps have been skipped." 271 | -------------------------------------------------------------------------------- /src/utils.ts: -------------------------------------------------------------------------------- 1 | import { LogLevel } from 'homebridge'; 2 | import type { 3 | Characteristic, 4 | Logger, 5 | Logging, 6 | } from 'homebridge'; 7 | 8 | import axios from 'axios'; 9 | import { ChildProcessWithoutNullStreams, spawn, SpawnOptionsWithoutStdio } from 'node:child_process'; 10 | import { promises as fs } from 'node:fs'; 11 | import { writeFile } from 'node:fs/promises'; 12 | import { fileURLToPath } from 'node:url'; 13 | import net from 'node:net'; 14 | import path from 'node:path'; 15 | 16 | export function deferAndCombine( 17 | fn: (requestCount: number) => Promise, 18 | timeout: number, 19 | runNowFn?: (arg: U) => void, 20 | ): (arg?: U) => Promise { 21 | let requests: { resolve: (value: T) => void; reject: (reason?: unknown) => void }[] = []; 22 | let timer: NodeJS.Timeout | null = null; 23 | 24 | const processRequests = () => { 25 | const currentRequests = requests; 26 | requests = []; 27 | fn(currentRequests.length) 28 | .then(value => currentRequests.forEach(req => req.resolve(value))) 29 | .catch(error => currentRequests.forEach(req => req.reject(error))) 30 | .finally(() => timer = null); 31 | }; 32 | 33 | return (arg?: U) => { 34 | if (runNowFn && arg !== undefined) { 35 | runNowFn(arg); 36 | } 37 | 38 | return new Promise((resolve, reject) => { 39 | requests.push({ resolve, reject }); 40 | 41 | if (!timer) { 42 | timer = setTimeout(processRequests, timeout); 43 | } 44 | }); 45 | }; 46 | } 47 | 48 | export function delay(ms: number): Promise { 49 | return new Promise((resolve) => { 50 | setTimeout(resolve, ms); 51 | }); 52 | } 53 | 54 | export function isObjectLike(candidate: unknown): candidate is Record { 55 | return typeof candidate === 'object' && candidate !== null || typeof candidate === 'function'; 56 | } 57 | 58 | export function lookup( 59 | object: unknown, 60 | compareFn: undefined | ((objectProp: unknown, search: T) => boolean), 61 | value: T, 62 | ): string | undefined { 63 | const compare = compareFn ?? ((objectProp: unknown, search: T): boolean => objectProp === search); 64 | 65 | if (isObjectLike(object)) { 66 | return Object.keys(object).find(key => compare(object[key], value)); 67 | } 68 | return undefined; 69 | } 70 | 71 | export function lookupCharacteristicNameByUUID( 72 | characteristic: typeof Characteristic, 73 | uuid: string, 74 | ): string | undefined { 75 | return Object.keys(characteristic).find(key => ((characteristic as unknown as {[key: string]: {UUID: string}})[key].UUID === uuid)); 76 | } 77 | 78 | export function prefixLogger(logger: Logger, prefix: string | (() => string)): Logging { 79 | const methods: Array<'info' | 'warn' | 'error' | 'debug' | 'log'> = ['info', 'warn', 'error', 'debug', 'log']; 80 | const clonedLogger: Logging = methods.reduce((acc: Logging, method) => { 81 | acc[method] = (...args: unknown[]) => { 82 | const prefixString = typeof prefix === 'function' ? prefix() : prefix; 83 | if (method === 'log') { 84 | const [level, message, ...parameters] = args; 85 | logger[method](level as LogLevel, `${prefixString} ${message}`, ...parameters); 86 | } else { 87 | const [message, ...parameters] = args; 88 | logger[method](`${prefixString} ${message}`, ...parameters); 89 | } 90 | }; 91 | return acc; 92 | }, {} as Logging); 93 | 94 | (clonedLogger as { prefix: string | (() => string) }).prefix = typeof logger.prefix === 'string' ? `${prefix} ${logger.prefix}` : prefix; 95 | 96 | return clonedLogger; 97 | } 98 | 99 | export async function runCommand( 100 | logger: Logger, 101 | command: string, 102 | args: readonly string[] = [], 103 | options?: SpawnOptionsWithoutStdio, 104 | hideStdout: boolean = false, 105 | hideStderr: boolean = false, 106 | returnProcess: boolean = false, 107 | suppressErrors: string[] = [], 108 | ): Promise<[string, string, number | null, (ChildProcessWithoutNullStreams | null)?]> { 109 | const MAX_BUFFER_SIZE = 1024 * 1024; 110 | let stdout: string = ''; 111 | let stderr: string = ''; 112 | let outputFile: string | null = null; 113 | 114 | const filteredArgs = args.filter(arg => { 115 | if (arg.startsWith('>')) { 116 | outputFile = arg.substring(1).trim(); 117 | return false; 118 | } 119 | return true; 120 | }); 121 | 122 | logger.debug(`Running command: ${command} ${filteredArgs.join(' ')}`); 123 | 124 | const env = { 125 | ...process.env, 126 | ...(options?.env || {}), 127 | }; 128 | 129 | const p: ChildProcessWithoutNullStreams = spawn(command, filteredArgs, { 130 | ...options, 131 | env, 132 | }); 133 | 134 | logger.debug(`Command PID: ${p.pid}`); 135 | 136 | p.stdout.setEncoding('utf8').on('data', data => { 137 | stdout += data; 138 | if (stdout.length > MAX_BUFFER_SIZE) { 139 | stdout = stdout.slice(-MAX_BUFFER_SIZE); 140 | } 141 | if (!hideStdout) { 142 | logger.debug(`STDOUT: ${data.trim()}`); 143 | } 144 | }); 145 | 146 | p.stderr.setEncoding('utf8').on('data', data => { 147 | stderr += data; 148 | if (stderr.length > MAX_BUFFER_SIZE) { 149 | stderr = stderr.slice(-MAX_BUFFER_SIZE); 150 | } 151 | if (!hideStderr) { 152 | logger.error(`STDERR: ${data.trim()}`); 153 | } 154 | }); 155 | 156 | if (returnProcess) { 157 | logger.debug('Command started and returning process.'); 158 | 159 | const stderrReady = new Promise((resolve) => { 160 | p.stderr.once('data', () => { 161 | logger.debug('Process data received.'); 162 | resolve(); 163 | }); 164 | }); 165 | 166 | await stderrReady; 167 | 168 | return [stdout, stderr, null, p]; 169 | } 170 | 171 | const exitCode = await new Promise((resolve, reject) => { 172 | p.on('close', (code) => { 173 | logger.debug(`Command closed with exit code: ${code}`); 174 | resolve(code); 175 | }).on('error', (error: NodeJS.ErrnoException) => { 176 | const errorMessage = error.message.toLowerCase(); 177 | const shouldSuppress = 178 | suppressErrors.some(err => 179 | (error.code && error.code.toString().toLowerCase() === err.toLowerCase()) || 180 | errorMessage.includes(err.toLowerCase()), 181 | ); 182 | if (!shouldSuppress) { 183 | logger.error('Command encountered an error:', error); 184 | } 185 | reject(error); 186 | }); 187 | }); 188 | 189 | p.stdout.destroy(); 190 | p.stderr.destroy(); 191 | p.kill(); 192 | 193 | if (outputFile) { 194 | logger.debug(`Writing command output to file: ${outputFile}`); 195 | await writeFile(outputFile, stdout); 196 | } 197 | 198 | logger.debug('Command finished.'); 199 | return [stdout, stderr, exitCode]; 200 | } 201 | 202 | export async function loadPackageConfig(logger: Logging): Promise<{ name: string; version: string; engines: { node: string } }> { 203 | const __dirname = path.dirname(fileURLToPath(import.meta.url)); 204 | const packageConfigPath = path.join(__dirname, '..', 'package.json'); 205 | const log: Logger = prefixLogger(logger, '[Package Config]'); 206 | log.debug('Loading package configuration from:', packageConfigPath); 207 | 208 | try { 209 | const packageConfigData = await fs.readFile(packageConfigPath, 'utf8'); 210 | return JSON.parse(packageConfigData); 211 | } catch (error) { 212 | log.error(`Error reading package.json: ${error}`); 213 | throw error; 214 | } 215 | } 216 | 217 | export function satisfiesVersion(currentVersion: string, requiredVersion: string): boolean { 218 | const versions = requiredVersion.split('||').map(v => v.trim()); 219 | 220 | return versions.some(version => { 221 | const [requiredMajor, requiredMinor, requiredPatch] = version.replace('^', '').split('.').map(Number); 222 | const [currentMajor, currentMinor, currentPatch] = currentVersion.replace('v', '').split('.').map(Number); 223 | 224 | if (currentMajor > requiredMajor) { 225 | return true; 226 | } 227 | if (currentMajor < requiredMajor) { 228 | return false; 229 | } 230 | if (currentMinor > requiredMinor) { 231 | return true; 232 | } 233 | if (currentMinor < requiredMinor) { 234 | return false; 235 | } 236 | return currentPatch >= requiredPatch; 237 | }); 238 | } 239 | 240 | export async function checkForUpgrade( 241 | packageConfig: { name: string; version: string; engines: { node: string } }, 242 | storagePath: string, 243 | logger: Logging, 244 | ): Promise { 245 | const versionDir = path.join(storagePath, 'kasa-python'); 246 | const versionFilePath = path.join(versionDir, 'kasa-python-version.json'); 247 | let storedVersion = ''; 248 | 249 | logger.debug('Checking for upgrade at path:', versionFilePath); 250 | 251 | try { 252 | await fs.access(versionFilePath); 253 | const versionData = await fs.readFile(versionFilePath, 'utf8'); 254 | storedVersion = JSON.parse(versionData).version; 255 | logger.debug('Stored version:', storedVersion); 256 | } catch (error) { 257 | if ((error as NodeJS.ErrnoException).code === 'ENOENT') { 258 | logger.info('Version file does not exist, treating as new install or version change.'); 259 | } else { 260 | logger.error('Error reading version file:', error); 261 | } 262 | } 263 | 264 | if (storedVersion !== packageConfig.version) { 265 | try { 266 | logger.debug('Updating version file to new version:', packageConfig.version); 267 | await fs.mkdir(versionDir, { recursive: true }); 268 | await fs.writeFile(versionFilePath, JSON.stringify({ version: packageConfig.version }), 'utf8'); 269 | logger.info(`Version file updated to version ${packageConfig.version}`); 270 | } catch (error) { 271 | logger.error('Error writing version file:', error); 272 | } 273 | return true; 274 | } 275 | 276 | logger.debug('No upgrade needed, version is up to date.'); 277 | return false; 278 | } 279 | 280 | export async function waitForServer(url: string, log: Logging, timeout: number = 30000, interval: number = 1000): Promise { 281 | const startTime = Date.now(); 282 | log.debug(`Waiting for server at ${url} with timeout ${timeout}ms and interval ${interval}ms`); 283 | 284 | while (Date.now() - startTime < timeout) { 285 | try { 286 | const response = await axios.get(url); 287 | if (response.status === 200) { 288 | log.debug('Server responded successfully'); 289 | return; 290 | } 291 | } catch { 292 | log.debug('Server not responding yet, retrying...'); 293 | } 294 | await new Promise(resolve => setTimeout(resolve, interval)); 295 | } 296 | 297 | log.error(`Server did not respond within ${timeout / 1000} seconds`); 298 | throw new Error(`Server did not respond within ${timeout / 1000} seconds`); 299 | } 300 | 301 | export async function getAvailablePort(): Promise { 302 | return new Promise((resolve, reject) => { 303 | const server = net.createServer(); 304 | server.listen(0, () => { 305 | const port = (server.address() as net.AddressInfo).port; 306 | server.close(() => resolve(port)); 307 | }); 308 | server.on('error', reject); 309 | }); 310 | } -------------------------------------------------------------------------------- /src/devices/deviceManager.ts: -------------------------------------------------------------------------------- 1 | import type { CharacteristicValue, Logger, PlatformConfig } from 'homebridge'; 2 | import axios from 'axios'; 3 | import path from 'node:path'; 4 | import { promises as fs } from 'node:fs'; 5 | import KasaPythonPlatform from '../platform.js'; 6 | import { parseConfig } from '../config.js'; 7 | import type { ConfigDevice, FeatureInfo, HSV, KasaDevice, SysInfo } from './kasaDevices.js'; 8 | import { EventEmitter } from 'events'; 9 | import { EventSource } from 'eventsource'; 10 | 11 | export const deviceEventEmitter = new EventEmitter(); 12 | 13 | type ControlDeviceValue = CharacteristicValue | HSV; 14 | 15 | export default class DeviceManager { 16 | private log: Logger; 17 | private apiUrl: string; 18 | private username: string; 19 | private password: string; 20 | private additionalBroadcasts: string[]; 21 | private manualDevices: string[]; 22 | private excludeMacAddresses: string[]; 23 | private includeMacAddresses: string[]; 24 | 25 | constructor(private platform: KasaPythonPlatform) { 26 | this.log = platform.log; 27 | this.username = platform.config.username; 28 | this.password = platform.config.password; 29 | this.apiUrl = `http://127.0.0.1:${platform.port}`; 30 | this.additionalBroadcasts = platform.config.discoveryOptions.additionalBroadcasts; 31 | this.manualDevices = platform.config.discoveryOptions.manualDevices.map(device => device.host); 32 | this.excludeMacAddresses = platform.config.discoveryOptions.excludeMacAddresses; 33 | this.includeMacAddresses = platform.config.discoveryOptions.includeMacAddresses; 34 | } 35 | 36 | private convertManualDevices(manualDevices: (string | ConfigDevice)[]): ConfigDevice[] { 37 | return manualDevices.map(device => { 38 | if (typeof device === 'string') { 39 | return { host: device, alias: 'Will Be Filled By Plug-In Automatically' }; 40 | } else if ('breakoutChildDevices' in device) { 41 | delete device.breakoutChildDevices; 42 | } else if ('host' in device && !('alias' in device)) { 43 | (device as ConfigDevice).alias = 'Will Be Filled By Plug-In Automatically'; 44 | } 45 | return device; 46 | }); 47 | } 48 | 49 | private updateDeviceAlias(device: KasaDevice | SysInfo): void { 50 | let sysInfo: SysInfo; 51 | 52 | if (this.isKasaDevice(device)) { 53 | sysInfo = device.sys_info as SysInfo; 54 | } else { 55 | sysInfo = device as SysInfo; 56 | } 57 | 58 | if (sysInfo.alias) { 59 | const aliasMappings: { [key: string]: string } = { 60 | 'TP-LINK_Power Strip_': 'Power Strip', 61 | 'TP-LINK_Smart Plug_': 'Smart Plug', 62 | 'TP-LINK_Smart Bulb_': 'Smart Bulb', 63 | }; 64 | 65 | for (const [pattern, replacement] of Object.entries(aliasMappings)) { 66 | if (sysInfo.alias.includes(pattern)) { 67 | sysInfo.alias = `${replacement} ${sysInfo.alias.slice(-4)}`; 68 | break; 69 | } 70 | } 71 | } 72 | } 73 | 74 | private isKasaDevice(device: KasaDevice | SysInfo): device is KasaDevice { 75 | return (device as KasaDevice).sys_info !== undefined; 76 | } 77 | 78 | private async readConfigFile(configPath: string): Promise { 79 | try { 80 | const configData = await fs.readFile(configPath, 'utf8'); 81 | return JSON.parse(configData); 82 | } catch (error) { 83 | this.log.error(`Error reading config file: ${String(error)}`); 84 | throw error; 85 | } 86 | } 87 | 88 | private async writeConfigFile(configPath: string, fileConfig: PlatformConfig): Promise { 89 | try { 90 | await fs.writeFile(configPath, JSON.stringify(fileConfig, null, 2), 'utf8'); 91 | } catch (error) { 92 | this.log.error(`Error writing config file: ${String(error)}`); 93 | } 94 | } 95 | 96 | async discoverDevices(): Promise { 97 | this.log.info('Discovering devices using streaming...'); 98 | 99 | try { 100 | const config = this.username && this.password 101 | ? { auth: { username: this.username, password: this.password } } 102 | : {}; 103 | const response = await axios.post>( 104 | `${this.apiUrl}/discover`, 105 | { 106 | additionalBroadcasts: this.additionalBroadcasts, 107 | manualDevices: this.manualDevices, 108 | excludeMacAddresses: this.excludeMacAddresses, 109 | includeMacAddresses: this.includeMacAddresses, 110 | }, 111 | config, 112 | ); 113 | this.log.info('Discovery initiated:', response.data); 114 | 115 | const configPath = path.join(this.platform.storagePath, 'config.json'); 116 | const fileConfig = await this.readConfigFile(configPath); 117 | const platformConfig = fileConfig.platforms.find((p: PlatformConfig) => p.platform === 'KasaPython'); 118 | if (!platformConfig) { 119 | this.log.error('KasaPython configuration not found in config file.'); 120 | } else { 121 | platformConfig.manualDevices = platformConfig.manualDevices || []; 122 | } 123 | 124 | const eventSource = new EventSource(`${this.apiUrl}/stream`); 125 | eventSource.onmessage = (event: MessageEvent) => { 126 | try { 127 | const data = JSON.parse(event.data); 128 | this.log.debug('Received SSE event data:', data); 129 | if (data.status === 'discovery_complete') { 130 | this.log.info('Device discovery complete.'); 131 | eventSource.close(); 132 | } else { 133 | if (!data.sys_info || !data.sys_info.host) { 134 | this.log.error('Invalid device data received:', data); 135 | return; 136 | } 137 | const device: KasaDevice = { 138 | sys_info: data.sys_info, 139 | feature_info: data.feature_info, 140 | last_seen: new Date(), 141 | offline: false, 142 | }; 143 | this.log.info(`Received device info for ${device.sys_info.host}`); 144 | this.processDevice(device, platformConfig); 145 | deviceEventEmitter.emit('deviceDiscovered', device); 146 | } 147 | } catch (err) { 148 | this.log.error('Error parsing SSE event data:', err); 149 | } 150 | }; 151 | eventSource.onerror = (err: Event) => { 152 | this.log.error('EventSource error:', err); 153 | eventSource.close(); 154 | }; 155 | 156 | await new Promise(resolve => setTimeout(resolve, 10000)); 157 | eventSource.close(); 158 | 159 | if (platformConfig) { 160 | platformConfig.manualDevices = platformConfig.manualDevices.filter((device: string | ConfigDevice) => { 161 | if (typeof device === 'string') { 162 | return true; 163 | } else if (!device.host) { 164 | this.log.warn(`Removing manual device without host: ${JSON.stringify(device)}`); 165 | return false; 166 | } 167 | return true; 168 | }); 169 | if (this.shouldConvertManualDevices(platformConfig.manualDevices)) { 170 | platformConfig.manualDevices = this.convertManualDevices(platformConfig.manualDevices); 171 | } 172 | await this.writeConfigFile(configPath, fileConfig); 173 | this.platform.config = parseConfig(platformConfig); 174 | } 175 | } catch (error) { 176 | this.handleAxiosError(error, 'discoverDevices'); 177 | } 178 | } 179 | 180 | private processDevice(device: KasaDevice, platformConfig: PlatformConfig): void { 181 | try { 182 | this.updateDeviceAlias(device); 183 | if (platformConfig.manualDevices) { 184 | const existingDevice = platformConfig.manualDevices.find((d: ConfigDevice) => d.host === device.sys_info.host); 185 | if (existingDevice) { 186 | existingDevice.host = device.sys_info.host; 187 | existingDevice.alias = device.sys_info.alias; 188 | } 189 | } 190 | } catch (error) { 191 | this.log.error(`Error processing device: ${String(error)}`); 192 | } 193 | } 194 | 195 | private shouldConvertManualDevices(manualDevices: (string | ConfigDevice)[]): boolean { 196 | return manualDevices.length > 0 && 197 | (typeof manualDevices[0] === 'string' || 198 | manualDevices.some((device) => typeof device !== 'string')); 199 | } 200 | 201 | async getSysInfo(host: string): Promise { 202 | try { 203 | const response = await axios.post(`${this.apiUrl}/getSysInfo`, { host }); 204 | const sysInfo: SysInfo = response.data.sys_info; 205 | if (!sysInfo) { 206 | this.log.error(`No sys_info returned for host: ${host}`); 207 | return undefined; 208 | } 209 | this.updateDeviceAlias(sysInfo); 210 | return sysInfo; 211 | } catch (error) { 212 | this.handleAxiosError(error, 'getSysInfo'); 213 | throw error; 214 | } 215 | } 216 | 217 | async controlDevice(host: string, feature: string, value: ControlDeviceValue, child_num?: number): Promise { 218 | let action: string; 219 | switch (feature) { 220 | case 'brightness': 221 | case 'color_temp': 222 | case 'fan_speed_level': 223 | action = `set_${feature}`; 224 | break; 225 | case 'hsv': 226 | action = 'set_hsv'; 227 | break; 228 | case 'state': 229 | action = value ? 'turn_on' : 'turn_off'; 230 | break; 231 | default: 232 | throw new Error(`Unsupported feature: ${feature}`); 233 | } 234 | await this.performDeviceAction(host, feature, action, value, child_num); 235 | } 236 | 237 | private async performDeviceAction( 238 | host: string, feature: string, action: string, value: ControlDeviceValue, childNumber?: number, 239 | ): Promise { 240 | const url = `${this.apiUrl}/controlDevice`; 241 | const data = { 242 | host, 243 | feature, 244 | action, 245 | value, 246 | ...(childNumber !== undefined && { child_num: childNumber }), 247 | }; 248 | try { 249 | const response = await axios.post(url, data); 250 | if (response.data.status !== 'success') { 251 | this.log.error(`Error performing action: ${response.data.message}`); 252 | } 253 | } catch (error) { 254 | this.handleAxiosError(error, 'controlDevice'); 255 | } 256 | } 257 | 258 | private handleAxiosError(error: unknown, context: string): void { 259 | if (axios.isAxiosError(error)) { 260 | if (error.response) { 261 | const statusCode = error.response.status; 262 | const errorMessage = error.response.data?.error || error.response.statusText || 'Unknown error'; 263 | if (statusCode === 500) { 264 | this.log.error(`Error during ${context}: ${errorMessage}`); 265 | } else { 266 | this.log.error(`Error during ${context}: ${statusCode} - ${errorMessage}`); 267 | } 268 | } else if (error.code === 'ECONNREFUSED') { 269 | this.log.error(`Connection refused during ${context} - device may be offline`); 270 | } else if (error.code === 'ETIMEDOUT') { 271 | this.log.error(`Connection timed out during ${context} - network may be down`); 272 | } else { 273 | this.log.error(`Axios error during ${context}: ${error.message}`); 274 | } 275 | } else if (error instanceof Error) { 276 | this.log.error(`Error during ${context}: ${error.message}`); 277 | if (error.stack) { 278 | this.log.debug(error.stack); 279 | } 280 | } else { 281 | this.log.error(`Unknown error during ${context}: ${JSON.stringify(error)}`); 282 | } 283 | } 284 | } -------------------------------------------------------------------------------- /.github/scripts/common.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """ 3 | Common helpers reused across workflow scripts. 4 | """ 5 | import json 6 | import os 7 | import shlex 8 | import subprocess 9 | import urllib.request 10 | import urllib.error 11 | 12 | from dataclasses import dataclass 13 | from typing import Any, Sequence, Union 14 | 15 | def run( 16 | cmd: Union[str, Sequence[str]], 17 | *, 18 | env: dict[str, str] | None = None, 19 | check: bool = True, 20 | cwd: str | None = None, 21 | quiet: bool = False, 22 | capture: bool = False, 23 | stdout=None, 24 | stderr=None, 25 | text: bool = True, 26 | ) -> subprocess.CompletedProcess: 27 | shell = isinstance(cmd, str) 28 | if not quiet: 29 | printable = cmd if shell else " ".join(shlex.quote(str(p)) for p in cmd) 30 | print(f"$ {printable}") 31 | if capture: 32 | stdout = subprocess.PIPE 33 | stderr = subprocess.PIPE 34 | return subprocess.run( 35 | cmd, 36 | check=check, 37 | text=text, 38 | env=env, 39 | cwd=cwd, 40 | shell=shell, 41 | stdout=stdout, 42 | stderr=stderr, 43 | ) 44 | 45 | def read_event(path: str | None = None) -> dict: 46 | event_path = path or os.environ.get("GITHUB_EVENT_PATH") or "" 47 | if not event_path or not os.path.exists(event_path): 48 | return {} 49 | try: 50 | with open(event_path, "r", encoding="utf-8") as f: 51 | return json.load(f) 52 | except Exception: 53 | return {} 54 | 55 | def github_api( 56 | repo: str, 57 | token: str, 58 | path: str, 59 | method: str = "GET", 60 | data: dict | None = None, 61 | ) -> tuple[int, dict]: 62 | url = f"https://api.github.com/repos/{repo}{path}" 63 | headers = { 64 | "Accept": "application/vnd.github+json", 65 | "Authorization": f"Bearer {token}", 66 | "User-Agent": f"scripts-common (+https://github.com/{repo})", 67 | "Content-Type": "application/json", 68 | } 69 | body = json.dumps(data).encode() if data is not None else None 70 | req = urllib.request.Request(url, data=body, headers=headers, method=method) 71 | try: 72 | with urllib.request.urlopen(req, timeout=45) as r: 73 | raw = r.read().decode("utf-8") 74 | code = r.getcode() 75 | try: 76 | payload = json.loads(raw) if raw.strip() else {} 77 | except Exception: 78 | payload = {} 79 | return code, payload 80 | except urllib.error.HTTPError as e: 81 | try: 82 | raw = e.read().decode() 83 | payload = json.loads(raw) if raw.strip() else {} 84 | except Exception: 85 | payload = {"message": str(e)} 86 | return e.code, payload 87 | except Exception as e: 88 | return 0, {"message": str(e)} 89 | 90 | def gh_commit_pulls(repo: str, token: str, sha: str) -> list[dict]: 91 | url = f"https://api.github.com/repos/{repo}/commits/{sha}/pulls" 92 | headers = { 93 | "Accept": "application/vnd.github.groot-preview+json", 94 | "Authorization": f"Bearer {token}", 95 | "User-Agent": f"release-manager (+https://github.com/{repo})", 96 | } 97 | req = urllib.request.Request(url, headers=headers, method="GET") 98 | with urllib.request.urlopen(req, timeout=30) as resp: 99 | raw = resp.read().decode() 100 | data = json.loads(raw) if raw.strip() else [] 101 | return data if isinstance(data, list) else [] 102 | 103 | def gh_list_paginated( 104 | repo: str, 105 | token: str, 106 | base_path: str, 107 | *, 108 | per_page: int = 100, 109 | max_pages: int = 50, 110 | ) -> list[dict]: 111 | out: list[dict] = [] 112 | page = 1 113 | while page <= max_pages: 114 | sep = "&" if "?" in base_path else "?" 115 | path = f"{base_path}{sep}per_page={per_page}&page={page}" 116 | code, batch = github_api(repo, token, path, method="GET") 117 | if code != 200 or not isinstance(batch, list) or not batch: 118 | break 119 | out.extend(batch) 120 | if len(batch) < per_page: 121 | break 122 | page += 1 123 | return out 124 | 125 | def gh_release( 126 | repo: str, 127 | token: str, 128 | *, 129 | release_id: int | None = None, 130 | tag: str | None = None, 131 | ) -> dict | None: 132 | if release_id is not None and tag is not None: 133 | raise ValueError("Provide only one of release_id or tag") 134 | if release_id is not None: 135 | code, data = github_api(repo, token, f"/releases/{release_id}") 136 | if code == 404: 137 | return None 138 | return data if isinstance(data, dict) else None 139 | if tag is not None: 140 | code, data = github_api(repo, token, f"/releases/tags/{tag}") 141 | if code == 404: 142 | return None 143 | return data if isinstance(data, dict) else None 144 | raise ValueError("release_id or tag is required") 145 | 146 | def gh_release_create( 147 | repo: str, 148 | token: str, 149 | tag: str, 150 | target_commitish: str | None = None, 151 | draft: bool = True, 152 | prerelease: bool = False, 153 | name: str | None = None, 154 | body: str | None = None, 155 | ) -> dict | None: 156 | payload: dict[str, Any] = { 157 | "tag_name": tag, 158 | "draft": bool(draft), 159 | "prerelease": bool(prerelease), 160 | } 161 | if target_commitish: 162 | payload["target_commitish"] = target_commitish 163 | if name: 164 | payload["name"] = name 165 | if body: 166 | payload["body"] = body 167 | code, data = github_api(repo, token, "/releases", method="POST", data=payload) 168 | if 200 <= code < 300 and isinstance(data, dict): 169 | return data 170 | print(f"::warning::Failed to create release {tag}: {code} {data}") 171 | return None 172 | 173 | def gh_release_delete(repo: str, token: str, release_id: int) -> bool: 174 | code, _ = github_api(repo, token, f"/releases/{release_id}", method="DELETE") 175 | return 200 <= code < 300 or code == 204 176 | 177 | def gh_release_update( 178 | repo: str, 179 | token: str, 180 | release_id: int, 181 | **fields: Any, 182 | ) -> dict | None: 183 | code, data = github_api( 184 | repo, 185 | token, 186 | f"/releases/{release_id}", 187 | method="PATCH", 188 | data=fields or {}, 189 | ) 190 | if 200 <= code < 300 and isinstance(data, dict): 191 | return data 192 | return None 193 | 194 | def gh_releases(repo: str, token: str, *, max_pages: int = 50) -> list[dict]: 195 | return gh_list_paginated(repo, token, "/releases", max_pages=max_pages) 196 | 197 | def git_checkout_ref(ref: str, *, create_branch_from: str | None = None) -> None: 198 | if create_branch_from: 199 | try: 200 | run(["git", "checkout", "-B", ref, f"origin/{create_branch_from}"], check=True) 201 | return 202 | except Exception: 203 | pass 204 | run(["git", "checkout", ref], check=False) 205 | 206 | def git_checkout_tag(tag: str) -> None: 207 | run(["git", "fetch", "--tags", "--force", "origin"], check=False) 208 | cp = run( 209 | ["git", "-c", "advice.detachedHead=false", "checkout", "-f", f"tags/{tag}"], 210 | check=False, 211 | ) 212 | if cp.returncode != 0: 213 | run(["git", "checkout", "-f", f"refs/tags/{tag}"], check=True) 214 | 215 | def git_commit_files(files: Sequence[str], message: str) -> None: 216 | run(["git", "config", "--local", "user.email", "action@github.com"], check=False) 217 | run(["git", "config", "--local", "user.name", "GitHub Action"], check=False) 218 | staged_any = False 219 | for f in files: 220 | if os.path.exists(f): 221 | run(["git", "add", f], check=False) 222 | staged_any = True 223 | if not staged_any: 224 | return 225 | if run(["git", "diff", "--cached", "--quiet"], check=False).returncode == 0: 226 | return 227 | run(["git", "commit", "-m", message], check=False) 228 | run(["git", "push"], check=False) 229 | 230 | def git_delete_tag(tag: str) -> None: 231 | try: 232 | run(["git", "fetch", "--tags", "--force", "origin"], check=False) 233 | run(["git", "push", "origin", f":refs/tags/{tag}"], check=False) 234 | except subprocess.CalledProcessError: 235 | print( 236 | f"::warning::[common] Failed to delete tag {tag} " 237 | f"(it may not exist remotely)" 238 | ) 239 | 240 | def git_fetch(ref: str | None = None, *, depth: int | None = None) -> None: 241 | args = ["git", "fetch", "origin"] 242 | if ref: 243 | args.append(ref) 244 | if depth is not None: 245 | args.append(f"--depth={depth}") 246 | run(args, check=False) 247 | 248 | def git_force_tag(tag: str) -> None: 249 | try: 250 | run(["git", "fetch", "--tags"], check=False) 251 | run(["git", "tag", "-f", tag], check=True) 252 | run(["git", "push", "--force", "origin", tag], check=True) 253 | print(f"[common] Tag {tag} updated to HEAD") 254 | except subprocess.CalledProcessError as e: 255 | print(f"::warning::[common] Failed to update tag {tag}: {e}") 256 | 257 | def git_get_commit_author_name(sha: str) -> str: 258 | proc = run( 259 | ["git", "show", "-s", "--format=%aN", sha], 260 | capture=True, 261 | check=False, 262 | ) 263 | return proc.stdout.strip() if proc and getattr(proc, "stdout", None) else "" 264 | 265 | def git_get_commit_subject(sha: str) -> str: 266 | proc = run( 267 | ["git", "show", "-s", "--format=%s", sha], 268 | capture=True, 269 | check=False, 270 | ) 271 | return proc.stdout.strip() if proc and getattr(proc, "stdout", None) else "" 272 | 273 | def git_rev_list_range(before: str, after: str) -> list[str]: 274 | proc = run( 275 | ["git", "rev-list", "--reverse", f"{before}..{after}"], 276 | capture=True, 277 | check=False, 278 | ) 279 | if not proc or not getattr(proc, "stdout", None): 280 | return [] 281 | return [s for s in proc.stdout.strip().split() if s] 282 | 283 | def npm_available() -> bool: 284 | try: 285 | run( 286 | ["npm", "--version"], 287 | check=True, 288 | stdout=subprocess.DEVNULL, 289 | stderr=subprocess.DEVNULL, 290 | quiet=True, 291 | ) 292 | return True 293 | except Exception: 294 | return False 295 | 296 | def npm_pkg_set_version(new_version_no_v: str) -> bool: 297 | if not os.path.exists("package.json"): 298 | print("[common] package.json not found; skipping npm version alignment.") 299 | return False 300 | if not npm_available(): 301 | print("[common] npm not available; cannot align package versions.") 302 | return False 303 | try: 304 | run(["npm", "pkg", "set", f"version={new_version_no_v}"], check=True) 305 | run(["npm", "i", "--package-lock-only"], check=False) 306 | print(f"[common] Updated package.json version -> {new_version_no_v}") 307 | return True 308 | except subprocess.CalledProcessError as e: 309 | print(f"[common] npm version alignment failed: {e}") 310 | return False 311 | 312 | def npm_read_version() -> str: 313 | with open("package.json", "r", encoding="utf-8") as f: 314 | data = json.load(f) 315 | v = data.get("version", "") 316 | if not isinstance(v, str) or not v: 317 | raise RuntimeError("package.json is missing a valid version") 318 | return v 319 | 320 | def npm_set_version_no_git_tag(new_ver: str) -> None: 321 | run(["npm", "version", new_ver, "--no-git-tag-version"]) 322 | 323 | @dataclass 324 | class Context: 325 | github_repository: str 326 | github_token: str 327 | head_after: str = "" 328 | head_before: str = "" 329 | is_beta: bool = False 330 | issue_number: str = "" 331 | mode: str = "" 332 | pull_request_author: str = "" 333 | pull_request_branch: str = "" 334 | pull_request_labels: str = "" 335 | pull_request_number: str = "" 336 | pull_request_title: str = "" 337 | tag: str = "" 338 | target_branch: str = "" -------------------------------------------------------------------------------- /.github/scripts/discord_tools.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """ 3 | Discord workflow helpers with three env-driven modes, controlled by MODE: 4 | 5 | MODE=trim 6 | - Reads the GitHub Release event payload (GITHUB_EVENT_PATH) 7 | - Builds a compact "Event - " field value from the release body: 8 | * Bold release name or tag on the first line if available 9 | * Category headings with bullets under their correct categories 10 | * One bullet per present category initially 11 | * Appends "**Full Changelog**: ..." at the bottom (if present) 12 | * Round-robins remaining bullets under correct categories until the Discord field limit (1024 chars) is reached 13 | * If not all bullets fit, appends "- …" to the last non-empty category (if space allows) 14 | * Ensures an "Update CHANGELOG.md for (beta) release ..." bullet exists in Other Changes, 15 | inserting it as the first bullet there if missing 16 | - Writes Actions output key "body" (multiline < str: 52 | if max_len <= 0: 53 | return "" 54 | if len(s) <= max_len: 55 | return s 56 | if max_len == 1: 57 | return "…" 58 | return s[: max_len - 1] + "…" 59 | 60 | def _read_release(evt: dict) -> dict: 61 | rel = evt.get("release") 62 | return rel if isinstance(rel, dict) else {} 63 | 64 | def _read_release_body(evt: dict) -> str: 65 | rel = _read_release(evt) 66 | return str(rel.get("body") or "") 67 | 68 | def _read_release_name_or_tag(evt: dict) -> str: 69 | rel = _read_release(evt) 70 | name = str(rel.get("name") or "") 71 | tag = str(rel.get("tag_name") or "") 72 | return name or tag or "" 73 | 74 | def _extract_full_changelog_line(lines: list[str]) -> tuple[str, list[str]]: 75 | fc_line = "" 76 | keep: list[str] = [] 77 | for ln in lines: 78 | if ln.strip().startswith("**Full Changelog**:"): 79 | fc_line = ln.strip() 80 | else: 81 | keep.append(ln) 82 | return fc_line, keep 83 | 84 | def _parse_sections(raw_body: str) -> dict: 85 | sections: dict = {} 86 | current = "" 87 | for ln in raw_body.splitlines(): 88 | ln = ln.replace("\r", "") 89 | if ln.startswith("### ") or ln.startswith("## "): 90 | current = ln[4:].strip() 91 | if current: 92 | sections.setdefault(current, []) 93 | continue 94 | if current and ln.startswith("- "): 95 | sections[current].append(ln.strip()) 96 | return sections 97 | 98 | def _ordered_categories(sections: dict) -> list[str]: 99 | present = list(sections.keys()) 100 | ordered: list[str] = [c for c in CATEGORY_ORDER if c in sections] 101 | for c in present: 102 | if c not in CATEGORY_ORDER: 103 | ordered.append(c) 104 | return ordered 105 | 106 | def _ensure_changelog_update_bullet(sections: dict, version: str) -> None: 107 | other = sections.get("Other Changes") 108 | if other is None: 109 | sections["Other Changes"] = [] 110 | other = sections["Other Changes"] 111 | already = any( 112 | b.startswith("- Update CHANGELOG.md for beta release") 113 | or b.startswith("- Update CHANGELOG.md for release") 114 | for b in other 115 | ) 116 | if already or not version: 117 | return 118 | if "beta" in version: 119 | bullet = f"- Update CHANGELOG.md for beta release {version} [beta-release] (@github-actions)" 120 | else: 121 | bullet = f"- Update CHANGELOG.md for release {version} [release] (@github-actions)" 122 | other.insert(0, bullet) 123 | 124 | def _build_event_value_from_body(body: str, name_or_tag: str, field_hard_max: int) -> str: 125 | lines_all = body.splitlines() 126 | fc_line, keep_lines = _extract_full_changelog_line(lines_all) 127 | sections = _parse_sections("\n".join(keep_lines)) 128 | _ensure_changelog_update_bullet(sections, name_or_tag) 129 | order = _ordered_categories(sections) 130 | section_lines: dict = {} 131 | included_counts: dict = {cat: 0 for cat in order} 132 | for cat in order: 133 | lines = [f"### {cat}", ""] 134 | bullets = sections.get(cat, []) 135 | if bullets: 136 | lines.append(bullets[0]) 137 | included_counts[cat] = 1 138 | section_lines[cat] = lines 139 | progressed = True 140 | while progressed: 141 | progressed = False 142 | for cat in order: 143 | bullets = sections.get(cat, []) 144 | idx = included_counts[cat] 145 | if idx < len(bullets): 146 | candidate_sections = {k: v[:] for k, v in section_lines.items()} 147 | candidate_sections[cat].append(bullets[idx]) 148 | candidate_lines: list[str] = [] 149 | if name_or_tag: 150 | candidate_lines.append(f"**{name_or_tag}**") 151 | candidate_lines.append("") 152 | for c in order: 153 | candidate_lines += candidate_sections[c] + [""] 154 | if fc_line: 155 | candidate_lines.append(fc_line) 156 | candidate = "\n".join(candidate_lines).strip() 157 | if len(candidate) <= field_hard_max: 158 | section_lines[cat].append(bullets[idx]) 159 | included_counts[cat] += 1 160 | progressed = True 161 | else: 162 | break 163 | final_sections = {k: v[:] for k, v in section_lines.items()} 164 | for cat in order: 165 | bullets = sections.get(cat, []) 166 | if included_counts[cat] < len(bullets): 167 | candidate_lines: list[str] = [] 168 | if name_or_tag: 169 | candidate_lines.append(f"**{name_or_tag}**") 170 | candidate_lines.append("") 171 | for c in order: 172 | candidate_lines += final_sections[c] + [""] 173 | candidate_lines += ["- …", ""] 174 | if fc_line: 175 | candidate_lines.append(fc_line) 176 | candidate = "\n".join(candidate_lines).strip() 177 | if len(candidate) <= field_hard_max: 178 | final_sections[cat].append("- …") 179 | final_lines: list[str] = [] 180 | if name_or_tag: 181 | final_lines.append(f"**{name_or_tag}**") 182 | final_lines.append("") 183 | for cat in order: 184 | final_lines += final_sections[cat] + [""] 185 | if fc_line: 186 | final_lines.append(fc_line) 187 | event_val = "\n".join(final_lines).strip() 188 | if len(event_val) > field_hard_max: 189 | event_val = _trunc_with_ellipsis(event_val, field_hard_max) 190 | return event_val 191 | 192 | def _write_actions_output(key: str, value: str) -> None: 193 | out_path = os.environ.get("GITHUB_OUTPUT") 194 | if not out_path: 195 | return 196 | with open(out_path, "a", encoding="utf-8") as f: 197 | f.write(f"{key}< None: 202 | embeds = payload.get("embeds") 203 | if not isinstance(embeds, list) or not embeds: 204 | payload["embeds"] = [{}] 205 | embeds = payload["embeds"] 206 | embed = embeds[0] if embeds else {} 207 | if not isinstance(embed, dict): 208 | embed = {} 209 | payload["embeds"] = [embed] 210 | fields = embed.get("fields") 211 | if not isinstance(fields, list): 212 | fields = [] 213 | embed["fields"] = fields 214 | idx_event = -1 215 | for i, f in enumerate(fields): 216 | name = str(f.get("name") or "") 217 | if name.lower().startswith("event -"): 218 | idx_event = i 219 | break 220 | if len(value) > 1024: 221 | value = _trunc_with_ellipsis(value, 1024) 222 | if idx_event >= 0: 223 | fields[idx_event]["value"] = value or "No further information" 224 | fields[idx_event]["inline"] = False 225 | if "name" in fields[idx_event]: 226 | nm = str(fields[idx_event]["name"]) 227 | if len(nm) > 256: 228 | fields[idx_event]["name"] = _trunc_with_ellipsis(nm, 256) 229 | return 230 | idx_after_ref = -1 231 | for i, f in enumerate(fields): 232 | name = str(f.get("name") or "") 233 | if name.strip().lower() == "ref": 234 | idx_after_ref = i 235 | break 236 | new_field = { 237 | "name": "Event - release", 238 | "value": value or "No further information", 239 | "inline": False, 240 | } 241 | if idx_after_ref >= 0: 242 | fields.insert(idx_after_ref + 1, new_field) 243 | else: 244 | fields.append(new_field) 245 | 246 | def _handle_trim() -> int: 247 | evt = common.read_event() 248 | body = _read_release_body(evt) 249 | name_or_tag = _read_release_name_or_tag(evt) 250 | event_value = _build_event_value_from_body( 251 | body=body, 252 | name_or_tag=name_or_tag, 253 | field_hard_max=1024, 254 | ) 255 | _write_actions_output("body", event_value) 256 | print(event_value) 257 | return 0 258 | 259 | def _handle_edit_payload() -> int: 260 | event_value = os.environ.get("EVENT_VALUE") 261 | payload_raw = os.environ.get("WEBHOOK_PAYLOAD") 262 | try: 263 | payload = json.loads(payload_raw) 264 | except Exception: 265 | try: 266 | fixed = payload_raw.replace("'", '"') 267 | payload = json.loads(fixed) 268 | except Exception: 269 | print("::error::WEBHOOK_PAYLOAD was invalid JSON", file=sys.stderr) 270 | return 1 271 | _ensure_event_field(payload, event_value) 272 | final_json = json.dumps(payload, ensure_ascii=False) 273 | _write_actions_output("edited_payload", final_json) 274 | print(final_json) 275 | return 0 276 | 277 | def _handle_post() -> int: 278 | edited_payload_raw = os.environ.get("EDITED_PAYLOAD") 279 | webhook = os.environ.get("WEBHOOK_URL") 280 | try: 281 | edited_payload = json.loads(edited_payload_raw) 282 | except Exception: 283 | try: 284 | fixed = edited_payload_raw.replace("'", '"') 285 | edited_payload = json.loads(fixed) 286 | except Exception: 287 | print("::error::EDITED_PAYLOAD was invalid JSON", file=sys.stderr) 288 | return 1 289 | try: 290 | response = requests.post(webhook, json=edited_payload, timeout=45) 291 | if response.ok: 292 | print(f"POST -> {response.status_code} {response.reason}") 293 | return 0 294 | print(f"::error::Discord webhook failed {response.status_code}: {response.text}", file=sys.stderr) 295 | return 1 296 | except requests.RequestException as e: 297 | print(f"::error::Request failed: {e}", file=sys.stderr) 298 | return 1 299 | 300 | def main() -> int: 301 | mode = os.environ.get("MODE") 302 | if mode == "trim": 303 | return _handle_trim() 304 | if mode == "edit-payload": 305 | return _handle_edit_payload() 306 | if mode == "post": 307 | return _handle_post() 308 | print("::error::MODE must be one of: trim, edit-payload, post", file=sys.stderr) 309 | return 1 310 | 311 | if __name__ == "__main__": 312 | sys.exit(main()) -------------------------------------------------------------------------------- /src/python/pythonChecker.ts: -------------------------------------------------------------------------------- 1 | import type { Logger } from 'homebridge'; 2 | 3 | import axios from 'axios'; 4 | import fs from 'node:fs'; 5 | import path from 'node:path'; 6 | import { fileURLToPath } from 'node:url'; 7 | 8 | import type KasaPythonPlatform from '../platform.js'; 9 | import { delay, prefixLogger, runCommand } from '../utils.js'; 10 | 11 | const __dirname: string = path.dirname(fileURLToPath(import.meta.url)); 12 | const SUPPORTED_PYTHON_VERSIONS: string[] = ['3.11', '3.12', '3.13']; 13 | 14 | class PythonChecker { 15 | private readonly log: Logger; 16 | private readonly platform: KasaPythonPlatform; 17 | private readonly advancedPythonLogging: boolean; 18 | private readonly pythonExecutables: string[]; 19 | private readonly pluginDirPath: string; 20 | private readonly venvPath: string; 21 | private readonly venvConfigPath: string; 22 | private readonly requirementsPath: string = path.join(__dirname, '..', '..', 'requirements.txt'); 23 | private pythonExecutable: string = ''; 24 | private venvPipExecutable: string = ''; 25 | private venvPythonExecutable: string = ''; 26 | 27 | public constructor(platform: KasaPythonPlatform) { 28 | this.platform = platform; 29 | this.log = prefixLogger(this.platform.log, '[Python Check]'); 30 | this.advancedPythonLogging = this.platform.config.advancedOptions?.advancedPythonLogging ?? false; 31 | this.pythonExecutables = [ 32 | 'python3.13', 33 | 'python3.12', 34 | 'python3.11', 35 | 'python3', 36 | 'python', 37 | ]; 38 | this.pluginDirPath = path.join(this.platform.storagePath, 'kasa-python'); 39 | this.venvPath = path.join(this.pluginDirPath, '.venv'); 40 | this.venvConfigPath = path.join(this.venvPath, 'pyvenv.cfg'); 41 | } 42 | 43 | public async allInOne(isUpgrade: boolean): Promise { 44 | this.log.debug('Starting python environment check...'); 45 | this.ensurePluginDir(); 46 | await this.ensurePythonVersion(); 47 | await this.ensureVenvCreated(isUpgrade); 48 | await this.ensureVenvUsesCorrectPythonHome(); 49 | await this.ensureVenvPipUpToDate(); 50 | await this.ensureVenvRequirementsSatisfied(); 51 | this.log.debug('Python environment check completed successfully'); 52 | } 53 | 54 | private ensurePluginDir(): void { 55 | this.log.debug('Ensuring plugin directory exists:', this.pluginDirPath); 56 | if (!fs.existsSync(this.pluginDirPath)) { 57 | fs.mkdirSync(this.pluginDirPath); 58 | this.log.debug('Plugin directory created:', this.pluginDirPath); 59 | } else { 60 | this.log.debug('Plugin directory already exists:', this.pluginDirPath); 61 | } 62 | } 63 | 64 | private async ensurePythonVersion(): Promise { 65 | this.log.debug('Checking for supported Python version'); 66 | const userPythonPath = this.platform.config.advancedOptions.pythonPath ?? ''; 67 | if (userPythonPath) { 68 | this.log.debug(`User configured pythonPath: ${userPythonPath}`); 69 | if (!fs.existsSync(userPythonPath)) { 70 | this.log.error(`Configured pythonPath (${userPythonPath}) does not exist.`); 71 | } else if (fs.statSync(userPythonPath).isDirectory()) { 72 | this.log.error( 73 | `Configured pythonPath (${userPythonPath}) is a directory, not an executable. ` + 74 | 'Please provide the full path to the Python executable, i.e. /usr/bin/python3.11', 75 | ); 76 | } else { 77 | const version = await this.getPythonVersion(userPythonPath); 78 | if (version && SUPPORTED_PYTHON_VERSIONS.includes(version)) { 79 | this.setPythonExecutables(userPythonPath, version); 80 | return; 81 | } else { 82 | this.log.error(`Configured pythonPath (${userPythonPath}) is not supported`); 83 | } 84 | } 85 | } 86 | for (const executable of this.pythonExecutables) { 87 | const resolved = await this.resolvePythonExecutable(executable); 88 | if (resolved) { 89 | const version = await this.getPythonVersion(resolved); 90 | if (version && SUPPORTED_PYTHON_VERSIONS.includes(version)) { 91 | this.setPythonExecutables(resolved, version); 92 | return; 93 | } 94 | } 95 | } 96 | this.log.error('No supported Python version found. Install Python 3.11+ and restart Homebridge.'); 97 | throw new Error('No supported Python version found. Install Python 3.11+ and restart Homebridge.'); 98 | } 99 | 100 | private async getPythonVersion(executablePath: string): Promise { 101 | try { 102 | const [stdout] = await runCommand( 103 | this.log, 104 | executablePath, 105 | ['--version'], 106 | undefined, 107 | !this.advancedPythonLogging, 108 | !this.advancedPythonLogging, 109 | false, 110 | ['ENOENT'], 111 | ); 112 | const match = stdout.trim().match(/^Python (\d+\.\d+)/); 113 | return match ? match[1] : null; 114 | } catch { 115 | return null; 116 | } 117 | } 118 | 119 | private async resolvePythonExecutable(executable: string): Promise { 120 | const cmd = process.platform === 'win32' ? 'where' : 'which'; 121 | try { 122 | const [stdout] = await runCommand( 123 | this.log, 124 | cmd, 125 | [executable], 126 | undefined, 127 | !this.advancedPythonLogging, 128 | !this.advancedPythonLogging, 129 | ); 130 | const candidates = stdout.trim().split(/\r?\n/).filter(Boolean); 131 | for (const candidate of candidates) { 132 | if (fs.existsSync(candidate)) { 133 | return candidate; 134 | } 135 | } 136 | return null; 137 | } catch { 138 | return null; 139 | } 140 | } 141 | 142 | private setPythonExecutables(pythonPath: string, version: string): void { 143 | this.pythonExecutable = pythonPath; 144 | const majorMinor = version; 145 | if (process.platform === 'win32') { 146 | this.venvPythonExecutable = path.join(this.venvPath, 'Scripts', 'python.exe'); 147 | this.venvPipExecutable = path.join(this.venvPath, 'Scripts', 'pip.exe'); 148 | } else { 149 | this.venvPythonExecutable = path.join(this.venvPath, 'bin', `python${majorMinor}`); 150 | this.venvPipExecutable = path.join(this.venvPath, 'bin', `pip${majorMinor}`); 151 | } 152 | this.platform.venvPythonExecutable = this.venvPythonExecutable; 153 | this.log.debug(`Selected Python executable: ${this.pythonExecutable}`); 154 | } 155 | 156 | private async ensureVenvCreated(isUpgrade: boolean): Promise { 157 | this.log.debug('Ensuring virtual environment is created'); 158 | if (isUpgrade || !this.isVenvCreated()) { 159 | await this.createVenv(); 160 | } else { 161 | this.log.debug('Virtual environment already exists'); 162 | } 163 | } 164 | 165 | private isVenvCreated(): boolean { 166 | const venvExists = fs.existsSync(this.venvPipExecutable) && 167 | fs.existsSync(this.venvConfigPath) && 168 | fs.existsSync(this.venvPythonExecutable); 169 | this.log.debug('Virtual environment exists:', venvExists); 170 | return venvExists; 171 | } 172 | 173 | private async createVenv(): Promise { 174 | this.log.debug('Creating virtual environment at path:', this.venvPath); 175 | const [stdout] = await runCommand( 176 | this.log, 177 | this.pythonExecutable, 178 | ['-m', 'venv', this.venvPath, '--clear', '--upgrade-deps'], 179 | undefined, 180 | !this.advancedPythonLogging, 181 | !this.advancedPythonLogging, 182 | ); 183 | if (stdout.includes('not created successfully') || !this.isVenvCreated()) { 184 | this.log.error('Failed to create virtual environment.'); 185 | await delay(300000); 186 | } else { 187 | this.log.debug('Virtual environment created successfully'); 188 | } 189 | } 190 | 191 | private async ensureVenvUsesCorrectPythonHome(): Promise { 192 | this.log.debug('Ensuring virtual environment uses correct Python home'); 193 | const venvPythonHome = await this.getPythonHome(this.venvPythonExecutable); 194 | this.log.debug('Virtual environment Python home:', venvPythonHome); 195 | const pythonHome = await this.getPythonHome(this.pythonExecutable); 196 | this.log.debug('System Python home:', pythonHome); 197 | if (venvPythonHome !== pythonHome) { 198 | this.log.debug('Python homes mismatch, recreating virtual environment'); 199 | await this.createVenv(); 200 | } else { 201 | this.log.debug('Python homes match'); 202 | } 203 | } 204 | 205 | private async getPythonHome(executable: string): Promise { 206 | this.log.debug('Getting Python home for executable:', executable); 207 | const [basePrefix] = await runCommand( 208 | this.log, 209 | executable, 210 | [ 211 | '-c', 212 | 'import sys; print(getattr(sys, "base_prefix", getattr(sys, "prefix", "")))', 213 | ], 214 | undefined, 215 | !this.advancedPythonLogging, 216 | !this.advancedPythonLogging, 217 | ); 218 | let pythonHome = basePrefix.trim(); 219 | if (process.platform !== 'win32') { 220 | pythonHome = path.join(pythonHome, 'bin'); 221 | } 222 | return pythonHome; 223 | } 224 | 225 | private async ensureVenvPipUpToDate(): Promise { 226 | const currentVersion = await this.getVenvPipVersion(); 227 | const latestVersion = await this.getMostRecentPipVersion(); 228 | if (currentVersion !== latestVersion) { 229 | await this.updatePip(); 230 | } else { 231 | this.log.debug('Virtual environment pip is up to date'); 232 | } 233 | } 234 | 235 | private async getVenvPipVersion(): Promise { 236 | const [stdout] = await runCommand( 237 | this.log, 238 | this.venvPipExecutable, 239 | ['--version'], 240 | undefined, 241 | !this.advancedPythonLogging, 242 | !this.advancedPythonLogging, 243 | ); 244 | return stdout.trim().split(' ')[1]; 245 | } 246 | 247 | private async getMostRecentPipVersion(): Promise { 248 | try { 249 | const response = await axios.get<{ info: { version: string } }>('https://pypi.org/pypi/pip/json'); 250 | return response.data.info.version; 251 | } catch (err) { 252 | this.log.error(`Error fetching latest pip version: ${err}`); 253 | return ''; 254 | } 255 | } 256 | 257 | private async updatePip(): Promise { 258 | this.log.debug('Updating pip in virtual environment'); 259 | await runCommand( 260 | this.log, 261 | this.venvPipExecutable, 262 | ['install', '--upgrade', 'pip'], 263 | undefined, 264 | !this.advancedPythonLogging, 265 | !this.advancedPythonLogging, 266 | ); 267 | this.log.debug('Pip updated successfully'); 268 | } 269 | 270 | private async ensureVenvRequirementsSatisfied(): Promise { 271 | if (!await this.areRequirementsSatisfied()) { 272 | await this.installRequirements(); 273 | } else { 274 | this.log.debug('Virtual environment requirements are satisfied'); 275 | } 276 | } 277 | 278 | private async areRequirementsSatisfied(): Promise { 279 | const [freezeStdout] = await runCommand( 280 | this.log, 281 | this.venvPipExecutable, 282 | ['freeze'], 283 | undefined, 284 | !this.advancedPythonLogging, 285 | !this.advancedPythonLogging, 286 | ); 287 | const installed = this.stringToObject(freezeStdout); 288 | const required = this.stringToObject(fs.readFileSync(this.requirementsPath, 'utf8')); 289 | return Object.keys(required).every(pkg => installed[pkg] === required[pkg]); 290 | } 291 | 292 | private stringToObject(value: string): Record { 293 | return value.trim().split('\n').reduce((acc, line) => { 294 | const [pkg, version] = line.split('==').map(x => x.trim()); 295 | if (pkg && version) { 296 | acc[pkg.toLowerCase()] = version; 297 | } 298 | return acc; 299 | }, {} as Record); 300 | } 301 | 302 | private async installRequirements(): Promise { 303 | this.log.debug('Installing requirements from:', this.requirementsPath); 304 | await runCommand( 305 | this.log, 306 | this.venvPipExecutable, 307 | ['install', '-r', this.requirementsPath], 308 | undefined, 309 | !this.advancedPythonLogging, 310 | !this.advancedPythonLogging, 311 | ); 312 | this.log.debug('Requirements installed successfully'); 313 | } 314 | } 315 | 316 | export default PythonChecker; -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | ## [v2.9.1](https://github.com/ZeliardM/homebridge-kasa-python/releases/tag/v2.9.1) (2025-09-25) 4 | 5 | ### Other Changes 6 | 7 | - Update CHANGELOG.md for release v2.9.1 @github-actions [release] 8 | - npm(deps-dev): bump globals from 16.3.0 to 16.4.0 @dependabot[bot] [#114] 9 | - npm(deps-dev): bump @types/node from 24.3.0 to 24.5.2 @dependabot[bot] [#113] 10 | - npm(deps-dev): bump @stylistic/eslint-plugin from 5.2.3 to 5.4.0 in the stylistic group @dependabot[bot] [#112] 11 | - npm(deps-dev): bump the eslint group with 2 updates @dependabot[bot] [#111] 12 | - npm(deps-dev): bump @typescript-eslint/parser from 8.41.0 to 8.44.0 in the typescript-eslint group @dependabot[bot] [#110] 13 | - pip(deps): bump uvicorn from 0.35.0 to 0.36.0 @dependabot[bot] [#109] 14 | - ci(deps): bump actions/setup-node from 4 to 5 @dependabot[bot] [#108] 15 | - ci(deps): bump actions/checkout from 4 to 5 @dependabot[bot] [#107] 16 | - ci(deps): bump actions/stale from 9 to 10 @dependabot[bot] [#106] 17 | - ci(deps): bump actions/setup-python from 5 to 6 @dependabot[bot] [#105] 18 | - Bump axios from 1.11.0 to 1.12.0 in the npm_and_yarn group across 1 directory @dependabot[bot] [#85] 19 | - Release: v2.9.0 @ZeliardM [#104] 20 | - Issue/cleanup @ZeliardM [#116] 21 | - Convert beta releases (v2.9.1-beta.0, v2.9.1-beta.1) to regular release v2.9.1 @github-actions [beta-to-release] 22 | 23 | **Full Changelog**: https://github.com/ZeliardM/homebridge-kasa-python/compare/v2.9.0...v2.9.1 24 | 25 | ## [v2.9.1-beta.1](https://github.com/ZeliardM/homebridge-kasa-python/releases/tag/v2.9.1-beta.1) (2025-09-25) 26 | 27 | ### Other Changes 28 | 29 | - Update CHANGELOG.md for beta release v2.9.1-beta.1 @github-actions [beta-release] 30 | - Issue/cleanup @ZeliardM [#116] 31 | 32 | **Full Changelog**: https://github.com/ZeliardM/homebridge-kasa-python/compare/v2.9.1-beta.0...v2.9.1-beta.1 33 | 34 | ## [v2.9.1-beta.0](https://github.com/ZeliardM/homebridge-kasa-python/releases/tag/v2.9.1-beta.0) (2025-09-23) 35 | 36 | ### Other Changes 37 | 38 | - Update CHANGELOG.md for beta release v2.9.1-beta.0 @github-actions [beta-release] 39 | - npm(deps-dev): bump globals from 16.3.0 to 16.4.0 @dependabot[bot] [#114] 40 | - npm(deps-dev): bump @types/node from 24.3.0 to 24.5.2 @dependabot[bot] [#113] 41 | - npm(deps-dev): bump @stylistic/eslint-plugin from 5.2.3 to 5.4.0 in the stylistic group @dependabot[bot] [#112] 42 | - npm(deps-dev): bump the eslint group with 2 updates @dependabot[bot] [#111] 43 | - npm(deps-dev): bump @typescript-eslint/parser from 8.41.0 to 8.44.0 in the typescript-eslint group @dependabot[bot] [#110] 44 | - pip(deps): bump uvicorn from 0.35.0 to 0.36.0 @dependabot[bot] [#109] 45 | - ci(deps): bump actions/setup-node from 4 to 5 @dependabot[bot] [#108] 46 | - ci(deps): bump actions/checkout from 4 to 5 @dependabot[bot] [#107] 47 | - ci(deps): bump actions/stale from 9 to 10 @dependabot[bot] [#106] 48 | - ci(deps): bump actions/setup-python from 5 to 6 @dependabot[bot] [#105] 49 | - Bump axios from 1.11.0 to 1.12.0 in the npm_and_yarn group across 1 directory @dependabot[bot] [#85] 50 | - Release: v2.9.0 @ZeliardM [#104] 51 | 52 | **Full Changelog**: https://github.com/ZeliardM/homebridge-kasa-python/compare/v2.9.0...v2.9.1-beta.0 53 | 54 | ## [v2.9.0](https://github.com//releases/tag/v2.9.0) (2025-09-22) 55 | 56 | ### Featured Changes 57 | 58 | - Fix child device sorting and calling @ZeliardM [#93] 59 | 60 | ### Bug Fixes 61 | 62 | - Update section ordering in release_manager.py @ZeliardM [#90] 63 | - Fix npm-publish permissions @ZeliardM [#89] 64 | - Fix release permissions @ZeliardM [#88] 65 | - Issue/fix dependabot @ZeliardM [#87] 66 | - Issue/fix npm publish failures @ZeliardM [#86] 67 | - Fix publish version matching package version @ZeliardM [#84] 68 | - Complete release workflow consolidation - replace complex inline Python with unified release manager @Copilot [#81] 69 | - Fix workflow issues: stale management, changelog updates, dependabot integration, labeler configuration, draft release section handling, and comprehensive testing validation @Copilot [#80] 70 | - Issue/fix beta draft pre release @ZeliardM [#94] 71 | - Add publish date to finalized CHANGELOG.md @ZeliardM [#92] 72 | 73 | ### Other Changes 74 | 75 | - Update CHANGELOG.md for release v2.9.0 @github-actions [release] 76 | - Fix CHANGELOG.md workflow not syncing properly with GitHub releases, add automated changelog maintenance, consolidate release workflows, implement comprehensive beta management system, optimize workflow code quality, and resolve Unreleased section handling @Copilot [#76] 77 | - Truncate Discord release notes @ZeliardM [#91] 78 | - Refactor of all workflows and scripts @ZeliardM [#95] 79 | - Issue/fix discord notify @ZeliardM [#97] 80 | - Correct handling of discord webhook body @ZeliardM [#98] 81 | - Update cmd_trim function to accept arguments @ZeliardM [#99] 82 | - Issue/fix discord notify @ZeliardM [#100] 83 | - Issue/cleanup discord notify @ZeliardM [#101] 84 | - Fix bullets for Update CHANGELOG.md line @ZeliardM [#102] 85 | - Convert beta releases (v2.8.2-beta.0, v2.9.0-beta.0, v2.9.0-beta.1, v2.9.0-beta.2, v2.9.0-beta.3, v2.9.0-beta.4, v2.9.0-beta.5, v2.9.0-beta.6, v2.9.0-beta.7) to regular release v2.9.0 @github-actions [beta-to-release] 86 | 87 | **Full Changelog**: https://github.com//compare/v2.8.1...v2.9.0 88 | 89 | ## [v2.9.0-beta.7](https://github.com/ZeliardM/homebridge-kasa-python/releases/tag/v2.9.0-beta.7) (2025-09-22) 90 | 91 | ### Other Changes 92 | 93 | - Update CHANGELOG.md for beta release v2.9.0-beta.7 @github-actions [beta-release] 94 | - Fix bullets for Update CHANGELOG.md line @ZeliardM [#102] 95 | 96 | **Full Changelog**: https://github.com/ZeliardM/homebridge-kasa-python/compare/v2.9.0-beta.6...v2.9.0-beta.7 97 | 98 | ## [v2.9.0-beta.6](https://github.com/ZeliardM/homebridge-kasa-python/releases/tag/v2.9.0-beta.6) (2025-09-22) 99 | 100 | ### Other Changes 101 | 102 | - Update CHANGELOG.md for beta release v2.9.0-beta.6 @github-actions [beta-release] 103 | - Issue/cleanup discord notify @ZeliardM [#101] 104 | 105 | **Full Changelog**: https://github.com/ZeliardM/homebridge-kasa-python/compare/v2.9.0-beta.5...v2.9.0-beta.6 106 | 107 | ## [v2.9.0-beta.5](https://github.com/ZeliardM/homebridge-kasa-python/releases/tag/v2.9.0-beta.5) (2025-09-21) 108 | 109 | ### Other Changes 110 | 111 | - Update CHANGELOG.md for beta release v2.9.0-beta.5 @github-actions [beta-release] 112 | - Issue/fix discord notify @ZeliardM [#100] 113 | 114 | **Full Changelog**: https://github.com/ZeliardM/homebridge-kasa-python/compare/v2.9.0-beta.4...v2.9.0-beta.5 115 | 116 | ## [v2.9.0-beta.4](https://github.com/ZeliardM/homebridge-kasa-python/releases/tag/v2.9.0-beta.4) (2025-09-19) 117 | 118 | ### Other Changes 119 | 120 | - Update CHANGELOG.md for beta release v2.9.0-beta.4 @github-actions [beta-release] 121 | - Update cmd_trim function to accept arguments @ZeliardM [#99] 122 | 123 | **Full Changelog**: https://github.com/ZeliardM/homebridge-kasa-python/compare/v2.9.0-beta.3...v2.9.0-beta.4 124 | 125 | ## [v2.9.0-beta.3](https://github.com/ZeliardM/homebridge-kasa-python/releases/tag/v2.9.0-beta.3) (2025-09-19) 126 | 127 | ### Other Changes 128 | 129 | - Update CHANGELOG.md for beta release v2.9.0-beta.3 @github-actions [beta-release] 130 | - Correct handling of discord webhook body @ZeliardM [#98] 131 | 132 | **Full Changelog**: https://github.com/ZeliardM/homebridge-kasa-python/compare/v2.9.0-beta.2...v2.9.0-beta.3 133 | 134 | ## [v2.9.0-beta.2](https://github.com/ZeliardM/homebridge-kasa-python/releases/tag/v2.9.0-beta.2) (2025-09-18) 135 | 136 | ### Other Changes 137 | 138 | - Update CHANGELOG.md for beta release v2.9.0-beta.2 @github-actions [beta-release] 139 | - Issue/fix discord notify @ZeliardM [#97] 140 | 141 | **Full Changelog**: https://github.com/ZeliardM/homebridge-kasa-python/compare/v2.9.0-beta.1...v2.9.0-beta.2 142 | 143 | ## [v2.9.0-beta.1](https://github.com/ZeliardM/homebridge-kasa-python/releases/tag/v2.9.0-beta.1) (2025-09-18) 144 | 145 | ### Other Changes 146 | 147 | - Update CHANGELOG.md for beta release v2.9.0-beta.1 @github-actions [beta-release] 148 | - Refactor of all workflows and scripts @ZeliardM [#95] 149 | 150 | **Full Changelog**: https://github.com/ZeliardM/homebridge-kasa-python/compare/v2.9.0-beta.0...v2.9.0-beta.1 151 | 152 | ## [v2.9.0-beta.0](https://github.com/ZeliardM/homebridge-kasa-python/releases/tag/v2.9.0-beta.0) (2025-09-16) 153 | 154 | ### Featured Changes 155 | 156 | - Fix child device sorting and calling @ZeliardM [#93] 157 | 158 | ### Bug Fixes 159 | 160 | - Issue/fix beta draft pre release @ZeliardM [#94] 161 | - Add publish date to finalized CHANGELOG.md @ZeliardM [#92] 162 | 163 | ### Other Changes 164 | 165 | - Update CHANGELOG.md for beta release v2.9.0-beta.0 @github-actions [beta-release] 166 | - Truncate Discord release notes @ZeliardM [#91] 167 | 168 | **Full Changelog**: https://github.com/ZeliardM/homebridge-kasa-python/compare/v2.8.2-beta.0...v2.9.0-beta.0 169 | 170 | ## [v2.8.2-beta.0](https://github.com/ZeliardM/homebridge-kasa-python/releases/tag/v2.8.2-beta.0) (2025-09-15) 171 | 172 | ### Bug Fixes 173 | 174 | - Update section ordering in release_manager.py @ZeliardM [#90] 175 | - Fix npm-publish permissions @ZeliardM [#89] 176 | - Fix release permissions @ZeliardM [#88] 177 | - Issue/fix dependabot @ZeliardM [#87] 178 | - Issue/fix npm publish failures @ZeliardM [#86] 179 | - Fix publish version matching package version @ZeliardM [#84] 180 | - Complete release workflow consolidation - replace complex inline Python with unified release manager @Copilot [#81] 181 | - Fix workflow issues: stale management, changelog updates, dependabot integration, labeler configuration, draft release section handling, and comprehensive testing validation @Copilot [#80] 182 | 183 | ### Other Changes 184 | 185 | - Update CHANGELOG.md for beta release v2.8.2-beta.0 @github-actions [beta-release] 186 | - Fix CHANGELOG.md workflow not syncing properly with GitHub releases, add automated changelog maintenance, consolidate release workflows, implement comprehensive beta management system, optimize workflow code quality, and resolve Unreleased section handling @Copilot [#76] 187 | 188 | **Full Changelog**: https://github.com/ZeliardM/homebridge-kasa-python/compare/v2.8.1...v2.8.2-beta.0 189 | 190 | ## [v2.8.1](https://github.com/ZeliardM/homebridge-kasa-python/releases/tag/v2.8.1) (2025-09-05) 191 | 192 | ### Featured Changes 193 | 194 | - Prepare for scoped plugin @ZeliardM [#73] 195 | 196 | ### Other Changes 197 | 198 | - Fix release-drafter.yml @ZeliardM [#74] 199 | 200 | **Full Changelog**: https://github.com/ZeliardM/homebridge-kasa-python/compare/v2.8.0...v2.8.1 201 | 202 | ## [v2.8.0](https://github.com/ZeliardM/homebridge-kasa-python/releases/tag/v2.8.0) (2025-09-05) 203 | 204 | ### Breaking Changes 205 | 206 | - Complete Rewrite of Device Discovery 207 | 208 | ### Featured Changes 209 | 210 | - Add user configurable Python Path to config 211 | - Add support for L535E Bulbs 212 | - Added manual support for TS15 Matter Switches, waiting on fixture file from user 213 | - Add KL400L10 support 214 | - Add include mac address features 215 | - Added manual support for P400M Power Strip, waiting on fixture file from user 216 | - Added support for S500 Wall Switch 217 | - Implement Energy Usages for PowerStrip and Plug 218 | 219 | ### Bug Fixes 220 | 221 | - Fix brew environment check errors 222 | - Fix pythonChecker error logging 223 | - Fix error reporting for Python version check 224 | - Fix isShuttingDown check for periodicDiscovery 225 | - Fix lint and import issue with newer homebridge install 226 | - Fix Homebridge version check logging 227 | - Fix Homebridge version check 228 | 229 | ### Other Changes 230 | 231 | - Rewrite and refactor platform.ts, utils.ts, deviceManager.ts, and kasaApi.py to handle devices as they are discovered 232 | - Updated pythonChecker.ts and runCommand in utils.ts to handle user profiles 233 | - Update task handling for shutdown 234 | - Update logging 235 | - Add dictionary check for hsv values 236 | - EventEmitter maxListener update 237 | - Update axiosError handling 238 | - Update color_temp and hsv handling 239 | - Improved color temp handling 240 | - Improve brightness and fan speed handling 241 | - Add homebrew checking to user path for python 242 | - Add strictValidation and update dependencies and requirements 243 | - Update config.sample.json 244 | - Add anyio to requirements due to now missing on many user reports 245 | - Update node typescript config 246 | - Create OS agnostic pythonChecker.ts and update related files 247 | - Additional logging for getPythonHome 248 | - Update dependencies 249 | - Update requirements and dependencies 250 | - Finish updating dependencies 251 | - Merge branch 'v2.8.0-beta' into latest 252 | 253 | **Full Changelog**: https://github.com/ZeliardM/homebridge-kasa-python/compare/v2.7.2...v2.8.0 254 | -------------------------------------------------------------------------------- /.github/copilot-instructions.md: -------------------------------------------------------------------------------- 1 | # Homebridge Kasa Python - Copilot Coding Agent Onboarding 2 | 3 | ## Repository Overview 4 | 5 | This repository is a Homebridge platform plugin that bridges TP-Link Kasa/Tapo smart devices (plugs, switches, bulbs, power strips, light strips) to Apple HomeKit. It uses a hybrid architecture: the main orchestration, accessory management, and Homebridge integration is in TypeScript/Node.js; direct device communication and control is handled by Python scripts using the [python-kasa](https://github.com/python-kasa/python-kasa) library, exposed via a local API. 6 | 7 | - **Type**: Homebridge plugin for smart home integration 8 | - **Languages**: TypeScript/Node.js (ES Modules), Python 3.11/3.12/3.13 9 | - **Supported Devices**: Plugs, power strips, switches, bulbs, light strips (see README for full list) 10 | - **Target Runtime**: Node.js 20/22/24, Python 3.11/3.12/3.13 11 | - **Size**: ~3,000 lines TypeScript, several Python scripts 12 | - **Main Entry**: `src/index.ts`, `src/platform.ts` 13 | - **Python API Entrypoint**: `src/python/kasaApi.py`, startup: `src/python/startKasaApi.py` 14 | - **Configuration Schema**: `config.schema.json` 15 | - **Linting**: ESLint (config: `eslint.config.mjs`) 16 | - **Build Output**: Compiled JS in `dist/`, Python scripts copied to `dist/python/` 17 | 18 | ## Environment Setup and Build Instructions 19 | 20 | **CRITICAL:** This project **requires both Node.js AND Python environments** set up correctly. All device communication will fail if Python dependencies are missing. 21 | 22 | ### Environment Setup 23 | 24 | 1. **Python Setup (ALWAYS FIRST):** 25 | ```bash 26 | python3 -m pip install --upgrade pip 27 | python3 -m pip install -r requirements.txt 28 | ``` 29 | - If install times out, verify with: 30 | ```bash 31 | python3 -c "import kasa; print('Python Import OK')" 32 | ``` 33 | 34 | 2. **Node.js Setup (Supported: 20/22/24):** 35 | - Always use **`npm ci`** (never `npm install`) 36 | ```bash 37 | npm ci 38 | ``` 39 | 40 | ### Build, Lint, and Validation (ALWAYS in order): 41 | 42 | ```bash 43 | npm run lint # Lint (must pass, zero warnings) 44 | npm run build # Build (runs: npm ci && rimraf -I ./dist && tsc && node copyPythonFiles.js) 45 | node -e "import('./dist/index.js').then(()=>console.log('Node OK'))" 46 | python3 -c "import kasa; print('Python Import OK')" 47 | ``` 48 | 49 | - **Development/watch**: `npm run watch` (rebuilds and links automatically) 50 | - **Test**: `npm run test` (outputs "No test specified") 51 | - **Lint only**: `npm run lint` 52 | 53 | #### Common Troubleshooting 54 | 55 | - **Python errors at runtime**: Python dependencies *must* be installed first. 56 | - **rimraf not found**: Run `npm ci` to install dev dependencies. 57 | - **Missing dist/python files**: Build process copies Python files from `src/python/` to `dist/python/`. 58 | - **Pip install timeout**: If network issues, verify using import command above. 59 | - **Build will succeed even if Python deps are missing, but runtime will fail.** 60 | 61 | #### Build Timing 62 | 63 | - Clean build: ~10-15 seconds; incremental builds are faster. 64 | - Pip install can timeout; verify via import. 65 | 66 | ## Project Architecture and Layout 67 | 68 | **Root files:** 69 | - `package.json` - Node dependencies, scripts 70 | - `tsconfig.json` - TypeScript config 71 | - `requirements.txt` - Python dependencies 72 | - `eslint.config.mjs` - ESLint config 73 | - `config.schema.json` - Homebridge config schema 74 | - `copyPythonFiles.js` - Copies Python scripts to dist 75 | - `nodemon.json` - Watch config 76 | 77 | **Source:** 78 | ``` 79 | src/ 80 | ├── index.ts # Plugin registration 81 | ├── platform.ts # Main platform logic 82 | ├── config.ts # Config parsing/validation 83 | ├── settings.ts # Platform/plugin names 84 | ├── utils.ts # Utility functions 85 | ├── taskQueue.ts # Task management 86 | ├── accessoryInformation.ts # HomeKit accessory info 87 | ├── devices/ 88 | │ ├── index.ts # Base accessory class 89 | │ ├── create.ts # Device creation factory 90 | │ ├── deviceManager.ts # Device communication manager 91 | │ ├── kasaDevices.ts # Type definitions 92 | │ ├── homekitPlug.ts # Plug logic 93 | │ ├── homekitPowerStrip.ts # Power strip logic 94 | │ ├── homekitSwitch.ts # Switch logic 95 | │ ├── homekitLightBulb.ts # Bulb logic 96 | │ └── homekitSwitchWithChildren.ts # Multi-child switches/fans 97 | └── python/ 98 | ├── kasaApi.py # Python API bridge 99 | ├── startKasaApi.py # API server startup 100 | └── pythonChecker.ts # Python env validation (Node-side) 101 | ``` 102 | - **Build output:** 103 | - `dist/` - Compiled JS + copied Python files 104 | - `dist/python/` - Python scripts 105 | 106 | ## Device Implementation Patterns 107 | 108 | - **Device factory:** `src/devices/create.ts` creates HomeKit device instances for discovered Kasa devices. 109 | - **Base class:** `src/devices/index.ts` is the HomeKitDevice base class. 110 | - **Accessory logic:** Device types (plug, bulb, power strip, switch, multi-child) each have dedicated classes. 111 | - **Python bridge:** All device control/state queries use the Python API via HTTP. 112 | - **State management:** Accessories poll and synchronize state with the underlying device. 113 | 114 | ## CI/CD and Validation Workflows 115 | 116 | ### Included Workflows 117 | 118 | - **build-lint-test.yml** 119 | - Runs on push, PR, and workflow_call for `latest` and `beta` branches. 120 | - Matrix builds: Node.js 20/22/24, Python 3.11/3.12/3.13. 121 | - Steps: setup environments, install dependencies (Python always before Node), lint, build, test Node.js import, test Python imports, audit dependencies. 122 | 123 | - **handle-issue.yml** 124 | - Runs on GitHub Issue opened/edited/reopened. 125 | - Classifies the issue and applies canonical labels. 126 | - Clears all labels and enforces a single canonical label. 127 | - Posts sticky comment with classification and validation results. 128 | - Fails validation if required info/labels are missing; marks with `needs-info` if insufficient. 129 | 130 | - **handle-pr.yml** 131 | - Runs on PR events. 132 | - Retargets PR base to `beta` unless stable-conversion (beta->latest) PR. 133 | - Clears labels (unless stable-conversion), applies labels via labeler, validates base/labels/markers. 134 | - Sticky comments for validation results. 135 | - Fails invalid PRs (wrong base, missing classification label, missing breaking change explanation markers). 136 | 137 | - **flow-beta.yml** 138 | - On PR merged to `beta`, updates changelog, beta draft, and aligns package versions. 139 | - On beta release published, finalizes changelog section, adds housekeeping entry, retags to include finalize commit. 140 | 141 | - **flow-stable.yml** 142 | - On stable-conversion PR merged to `latest` (with `stable-conversion` label), aggregates published betas into a stable draft, updates changelog, aligns package versions. 143 | - On stable release published, finalizes changelog section, adds housekeeping entry, retags release tag to include finalize commit. 144 | 145 | - **beta-release.yml** 146 | - Runs on published beta releases. 147 | - Calls build-lint-test then npm-publish workflow (publishes to npm under beta dist-tag). 148 | - Notifies Discord with trimmed release notes. 149 | 150 | - **release.yml** 151 | - Runs on published stable releases (base: latest). 152 | - Calls build-lint-test then npm-publish workflow (publishes to npm). 153 | - Notifies Discord. 154 | 155 | - **beta-to-stable.yml** 156 | - Manual workflow_dispatch to create or update a beta→latest PR for the next stable version, auto-detected from latest published beta tag. 157 | 158 | - **npm-publish.yml** 159 | - Publishes to npm, handles rollback if publish fails (delete tag/release, undo CHANGELOG finalize metadata). 160 | 161 | - **discord-notify.yml** 162 | - Posts release event to Discord, with trimmed notes and changelog bullets. 163 | 164 | - **dependabot-auto-merge.yml** 165 | - Auto-labels and merges safe Dependabot PRs (minor/patch, no breaking-change). 166 | - Posts sticky comment for blocked/auto-merged PRs. 167 | 168 | - **codeql.yml** 169 | - Runs CodeQL analysis on JS/TS and Python for security and code quality. 170 | 171 | - **stale.yml** 172 | - Marks issues and PRs as stale after inactivity, closes after additional days unless updated. 173 | 174 | ### Key Workflow Enforcement Rules 175 | 176 | - **All PRs must target `beta` branch** (except stable-conversion PRs from beta→latest, which require the `stable-conversion` label). 177 | - **Labels are cleared and reapplied** for every PR/issue to enforce canonical forms (`bug`, `enhancement`, `breaking-change`, etc.). 178 | - **PR validation**: Base branch, classification labels, breaking-change markers, and explanation length enforced (see `.github/pull_request_template.md`). 179 | - **Issue validation**: Must have canonical label; required sections for bug/breaking-change types; `needs-info` applied if insufficient. 180 | - **Release automation**: On PR merge, changelog and draft release are updated and aligned; on publish, changelog is finalized and NPM publish is triggered. 181 | - **Manual workflow for beta→stable PRs**: Use workflow_dispatch to aggregate published betas into a stable draft. 182 | - **Sticky comments**: Summarize validation/classification status for PRs and issues. 183 | 184 | **To replicate CI locally:** 185 | ```bash 186 | python3 -m pip install --upgrade pip 187 | python3 -m pip install -r requirements.txt 188 | python3 -c "import kasa; print('Python Import OK')" 189 | npm ci 190 | npm run lint 191 | npm run build 192 | node -e "import('./dist/index.js').then(()=>console.log('Node OK'))" 193 | python3 -c "import kasa; print('Python Import OK')" 194 | ``` 195 | 196 | ## PR and Issue Handling Workflow 197 | 198 | **Branch Model:** 199 | - `latest`: Stable release 200 | - `beta`: Active development (**feature branches must be created from `beta`**) 201 | - **PRs:** Always target `beta` 202 | 203 | **PR Checklist:** 204 | - Install Python & Node dependencies, build, lint, import tests **before submitting** 205 | - Label appropriately: `enhancement`, `fix`, `breaking-change`, `docs`, `dependency` 206 | - **One logical change per PR** 207 | - PRs must describe the change and reference relevant issues 208 | - PRs must not include extraneous/unrelated changes 209 | 210 | **Issue Handling:** 211 | - Use GitHub Issues for bugs, features, support 212 | - When generating issues, always clarify ambiguity and include relevant context/code references 213 | 214 | **Release Flow:** 215 | - Merge PR into `beta` triggers changelog update and beta draft 216 | - Unpublished beta.0 aggregates PRs (single tag per beta) 217 | - After publishing a beta, new changes create beta.(N+1) 218 | - Breaking changes after publish escalate to new major beta.0 219 | - Manual conversion (workflow_dispatch) consolidates betas into a stable draft 220 | - Publishing stable release finalizes changelog 221 | 222 | **Changelog:** 223 | - Beta releases mirror only their version’s section 224 | - Stable releases omit “Beta Release -” line 225 | - All changes must be categorized and labeled 226 | 227 | **Labels:** 228 | - breaking-change 229 | - enhancement/feature 230 | - fix/bug 231 | - docs/dependency 232 | 233 | ## Testing and Validation 234 | 235 | **No formal unit tests exist.** Validation relies on: 236 | - Lint checks for code quality 237 | - Build success for compilation validity 238 | - Import tests for runtime compatibility 239 | - Manual Homebridge testing (see README) 240 | - Use `npm run watch` for live development; plugin is linked globally 241 | 242 | ## Common Troubleshooting 243 | 244 | - **Build fails with Python errors:** Ensure Python dependencies are installed first. 245 | - **Import test fails:** Verify both Node.js and Python are properly set up and compatible. 246 | - **Lint failures:** Run `npm run lint` for details. 247 | - **Watch mode issues:** Ensure no conflicting Homebridge instances are running. 248 | - **Platform registration errors:** Confirm plugin is compiled and exports registration function. 249 | 250 | ## Key Dependencies 251 | 252 | - **Node.js:** `axios`, `eventsource`, `ts-essentials`, `typescript` 253 | - **Python:** `python-kasa`, `quart`, `uvicorn`, `anyio` 254 | - **Dev:** `eslint`, `rimraf`, `nodemon`, `homebridge` 255 | 256 | ## Copilot Coding Agent Guidance 257 | 258 | - **Follow these instructions directly for build, validation, PR, and issue workflows.** 259 | - **For PRs:** Always create a feature branch from `beta`, never from `latest` or main. 260 | - **Always run build/lint/import validation sequence before opening a PR.** 261 | - **Label PRs** with the correct category. 262 | - **Issues:** Clarify ambiguity before draft, include context. 263 | - **Code changes:** Follow file structure; device logic in `src/devices/`, Python additions in `src/python/`. 264 | - **Never skip Python dependency installation.** 265 | - **Trust these instructions—search only if information here is incomplete or in error.** 266 | - **Be concise, clear, and thoughtful in all responses, referencing this onboarding if needed.** 267 | 268 | ## Final Notes 269 | 270 | - **Hybrid architecture:** Node.js orchestrates, Python controls devices. 271 | - **Environment setup order is critical.** 272 | - **Lint and build must always pass before PR—no exceptions.** 273 | - **PRs must target `beta`, use feature branches, and be labeled.** 274 | - **Trust these instructions—search only if necessary.** 275 | --------------------------------------------------------------------------------