├── .devcontainer ├── Dockerfile ├── README.md ├── configuration.yaml ├── custom_component_helper ├── devcontainer.json └── images │ └── reopen.png ├── .github ├── ISSUE_TEMPLATE │ ├── feature_request.md │ └── issue.md └── settings.yml ├── .gitignore ├── .vscode ├── settings.json └── tasks.json ├── CONTRIBUTING.md ├── LICENSE ├── README.md ├── custom_components └── email │ ├── __init__.py │ ├── const.py │ ├── manifest.json │ ├── parsers │ ├── __init__.py │ ├── adafruit.py │ ├── adam_eve.py │ ├── ali_express.py │ ├── amazon.py │ ├── amazon_de.py │ ├── bespoke_post.py │ ├── best_buy.py │ ├── bh_photo.py │ ├── chewy.py │ ├── dhl.py │ ├── dollar_shave_club.py │ ├── dsw.py │ ├── ebay.py │ ├── fedex.py │ ├── gamestop.py │ ├── generic.py │ ├── georgia_power.py │ ├── google_express.py │ ├── groupon.py │ ├── guitar_center.py │ ├── home_depot.py │ ├── hue.py │ ├── litter_robot.py │ ├── lowes.py │ ├── manta_sleep.py │ ├── monoprice.py │ ├── newegg.py │ ├── nintendo.py │ ├── nuleaf.py │ ├── paypal.py │ ├── pledgebox.py │ ├── prusa.py │ ├── reolink.py │ ├── rockauto.py │ ├── sony.py │ ├── swiss_post.py │ ├── sylvane.py │ ├── target.py │ ├── the_smartest_house.py │ ├── thriftbooks.py │ ├── timeless.py │ ├── ubiquiti.py │ ├── ups.py │ ├── usps.py │ ├── western_digital.py │ ├── wyze.py │ └── zazzle.py │ └── sensor.py ├── hacs.json └── requirements.txt /.devcontainer/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.7 2 | 3 | RUN apt-get update \ 4 | && apt-get install -y --no-install-recommends \ 5 | git \ 6 | && apt-get clean \ 7 | && rm -rf /var/lib/apt/lists/* 8 | 9 | RUN python -m pip install --upgrade colorlog black pylint openrouteservice 10 | RUN python -m pip install --upgrade git+https://github.com/home-assistant/home-assistant@dev 11 | RUN cd && mkdir -p /config/custom_components 12 | 13 | 14 | WORKDIR /workspace 15 | 16 | # Set the default shell to bash instead of sh 17 | ENV SHELL /bin/bash -------------------------------------------------------------------------------- /.devcontainer/README.md: -------------------------------------------------------------------------------- 1 | # Devcontainer 2 | 3 | _The easiest way to contribute to and/or test this repository._ 4 | 5 | ## Requirements 6 | 7 | - [git](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git) 8 | - [docker](https://docs.docker.com/install/) 9 | - [VS Code](https://code.visualstudio.com/) 10 | - [Remote - Containers (VSC Extention)](https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.remote-containers) 11 | 12 | [More info about requirements and devcontainer in general](https://code.visualstudio.com/docs/remote/containers#_getting-started) 13 | 14 | ## How to use Devcontainer for development/test 15 | 16 | 1. Make sure your computer meets the requirements. 17 | 1. Fork this repository. 18 | 1. Clone the repository to your computer. 19 | 1. Open the repository using VS Code. 20 | 21 | When you open this repository with VSCode and your computer meets the requirements you are asked to "Reopen in Container", do that. 22 | 23 | ![reopen](images/reopen.png) 24 | 25 | If you don't see this notification, open the command pallet (ctrl+shift+p) and select `Remote-Containers: Reopen Folder in Container`. 26 | 27 | _It will now build the devcontainer._ 28 | 29 | The container have some "tasks" to help you testing your changes. 30 | 31 | ## Custom Tasks in this repository 32 | 33 | _Start "tasks" by opening the the command pallet (ctrl+shift+p) and select `Tasks: Run Task`_ 34 | 35 | Running tasks like `Start Home Assistant on port 8124` can be restarted by opening the the command pallet (ctrl+shift+p) and select `Tasks: Restart Running Task`, then select the task you want to restart. 36 | 37 | ### Start Home Assistant on port 8124 38 | 39 | This will copy the configuration and the integration files to the expected location in the container. 40 | 41 | And start up Home Assistant on [port 8124.](http://localhost:8124) 42 | 43 | ### Upgrade Home Assistant to latest dev 44 | 45 | This will upgrade Home Assistant to the latest dev version. 46 | 47 | ### Set Home Assistant Version 48 | 49 | This allows you to specify a version of Home Assistant to install inside the devcontainer. 50 | 51 | ### Home Assistant Config Check 52 | 53 | This runs a config check to make sure your config is valid. 54 | -------------------------------------------------------------------------------- /.devcontainer/configuration.yaml: -------------------------------------------------------------------------------- 1 | default_config: 2 | logger: 3 | default: error 4 | logs: 5 | custom_components.open_route_service: debug 6 | 7 | 8 | 9 | sensor: 10 | - platform: open_route_service 11 | api_key: !secret api_key 12 | origin_latitude: "49.41461" 13 | origin_longitude: "8.681495" 14 | destination_latitude: "49.420318" 15 | destination_longitude: "8.687872" -------------------------------------------------------------------------------- /.devcontainer/custom_component_helper: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | function StartHomeAssistant { 4 | echo "Copy configuration.yaml" 5 | cp -f .devcontainer/configuration.yaml /config || echo ".devcontainer/configuration.yaml are missing!" exit 1 6 | 7 | echo "Copy secrets.yaml" 8 | cp -f .devcontainer/secrets.yaml /config || echo ".devcontainer/secrets.yaml are missing!" 9 | 10 | echo "Copy the custom component" 11 | rm -R /config/custom_components/ || echo "" 12 | cp -r custom_components /config/custom_components/ || echo "Could not copy the custom_component" exit 1 13 | 14 | echo "Start Home Assistant" 15 | hass -c /config 16 | } 17 | 18 | function UpdgradeHomeAssistantDev { 19 | python -m pip install --upgrade git+https://github.com/home-assistant/home-assistant@dev 20 | } 21 | 22 | function SetHomeAssistantVersion { 23 | read -p 'Version: ' version 24 | python -m pip install --upgrade homeassistant==$version 25 | } 26 | 27 | function HomeAssistantConfigCheck { 28 | hass -c /config --script check_config 29 | } -------------------------------------------------------------------------------- /.devcontainer/devcontainer.json: -------------------------------------------------------------------------------- 1 | // See https://aka.ms/vscode-remote/devcontainer.json for format details. 2 | { 3 | "context": "..", 4 | "dockerFile": "Dockerfile", 5 | "appPort": "8124:8123", 6 | "runArgs": [ 7 | "-e", 8 | "GIT_EDTIOR='code --wait'" 9 | ], 10 | "extensions": [ 11 | "ms-python.python", 12 | "tabnine.tabnine-vscode" 13 | ], 14 | "settings": { 15 | "python.pythonPath": "/usr/local/bin/python", 16 | "python.linting.pylintEnabled": true, 17 | "python.linting.enabled": true, 18 | "python.formatting.provider": "black", 19 | "editor.formatOnPaste": false, 20 | "editor.formatOnSave": true, 21 | "editor.formatOnType": true, 22 | "files.trimTrailingWhitespace": true 23 | } 24 | } -------------------------------------------------------------------------------- /.devcontainer/images/reopen.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ljmerza/ha-email-sensor/57e52a6688eb043b43a8b7640598f961016ca7ee/.devcontainer/images/reopen.png -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | 5 | --- 6 | 7 | **Is your feature request related to a problem? Please describe.** 8 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 9 | 10 | **Describe the solution you'd like** 11 | A clear and concise description of what you want to happen. 12 | 13 | **Describe alternatives you've considered** 14 | A clear and concise description of any alternative solutions or features you've considered. 15 | 16 | **Additional context** 17 | Add any other context or screenshots about the feature request here. -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/issue.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Issue 3 | about: Create a report to help us improve 4 | 5 | --- 6 | 7 | 16 | 17 | ## Version of the custom_component 18 | 21 | 22 | ## Configuration 23 | 24 | ```yaml 25 | 26 | Add your logs here. 27 | 28 | ``` 29 | 30 | ## Describe the bug 31 | A clear and concise description of what the bug is. 32 | 33 | 34 | ## Debug log 35 | 36 | 37 | 38 | ```text 39 | 40 | Add your logs here. 41 | 42 | ``` -------------------------------------------------------------------------------- /.github/settings.yml: -------------------------------------------------------------------------------- 1 | repository: 2 | private: false 3 | has_issues: true 4 | has_projects: false 5 | has_wiki: false 6 | has_downloads: false 7 | default_branch: master 8 | allow_squash_merge: true 9 | allow_merge_commit: false 10 | allow_rebase_merge: false 11 | labels: 12 | - name: "Feature Request" 13 | color: "fbca04" 14 | - name: "Bug" 15 | color: "b60205" 16 | - name: "Wont Fix" 17 | color: "ffffff" 18 | - name: "Enhancement" 19 | color: a2eeef 20 | - name: "Documentation" 21 | color: "008672" 22 | - name: "Stale" 23 | color: "930191" -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | __pycache__/ 2 | .devcontainer/secrets.yaml -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "files.associations": { 3 | "*.yaml": "home-assistant" 4 | } 5 | } -------------------------------------------------------------------------------- /.vscode/tasks.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "2.0.0", 3 | "tasks": [ 4 | { 5 | "label": "Start Home Assistant on port 8124", 6 | "type": "shell", 7 | "command": "source .devcontainer/custom_component_helper && StartHomeAssistant", 8 | "group": { 9 | "kind": "test", 10 | "isDefault": true, 11 | }, 12 | "presentation": { 13 | "reveal": "always", 14 | "panel": "new" 15 | }, 16 | "problemMatcher": [] 17 | }, 18 | { 19 | "label": "Upgrade Home Assistant to latest dev", 20 | "type": "shell", 21 | "command": "source .devcontainer/custom_component_helper && UpdgradeHomeAssistantDev", 22 | "group": { 23 | "kind": "test", 24 | "isDefault": true, 25 | }, 26 | "presentation": { 27 | "reveal": "always", 28 | "panel": "new" 29 | }, 30 | "problemMatcher": [] 31 | }, 32 | { 33 | "label": "Set Home Assistant Version", 34 | "type": "shell", 35 | "command": "source .devcontainer/custom_component_helper && SetHomeAssistantVersion", 36 | "group": { 37 | "kind": "test", 38 | "isDefault": true, 39 | }, 40 | "presentation": { 41 | "reveal": "always", 42 | "panel": "new" 43 | }, 44 | "problemMatcher": [] 45 | }, 46 | { 47 | "label": "Home Assistant Config Check", 48 | "type": "shell", 49 | "command": "source .devcontainer/custom_component_helper && HomeAssistantConfigCheck", 50 | "group": { 51 | "kind": "test", 52 | "isDefault": true, 53 | }, 54 | "presentation": { 55 | "reveal": "always", 56 | "panel": "new" 57 | }, 58 | "problemMatcher": [] 59 | } 60 | ] 61 | } -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contribution guidelines 2 | 3 | Contributing to this project should be as easy and transparent as possible, whether it's: 4 | 5 | - Reporting a bug 6 | - Discussing the current state of the code 7 | - Submitting a fix 8 | - Proposing new features 9 | 10 | ## Github is used for everything 11 | 12 | Github is used to host code, to track issues and feature requests, as well as accept pull requests. 13 | 14 | Pull requests are the best way to propose changes to the codebase. 15 | 16 | 1. Fork the repo and create your branch from `master`. 17 | 2. If you've changed something, update the documentation. 18 | 3. Make sure your code lints (using black). 19 | 4. Issue that pull request! 20 | 21 | ## Any contributions you make will be under the MIT Software License 22 | 23 | In short, when you submit code changes, your submissions are understood to be under the same [MIT License](http://choosealicense.com/licenses/mit/) that covers the project. Feel free to contact the maintainers if that's a concern. 24 | 25 | ## Report bugs using Github's [issues](../../issues) 26 | 27 | GitHub issues are used to track public bugs. 28 | Report a bug by [opening a new issue](../../issues/new/choose); it's that easy! 29 | 30 | ## Write bug reports with detail, background, and sample code 31 | 32 | **Great Bug Reports** tend to have: 33 | 34 | - A quick summary and/or background 35 | - Steps to reproduce 36 | - Be specific! 37 | - Give sample code if you can. 38 | - What you expected would happen 39 | - What actually happens 40 | - Notes (possibly including why you think this might be happening, or stuff you tried that didn't work) 41 | 42 | People *love* thorough bug reports. I'm not even kidding. 43 | 44 | ## Use a Consistent Coding Style 45 | 46 | Use [black](https://github.com/ambv/black) to make sure the code follows the style. 47 | 48 | ## License 49 | 50 | By contributing, you agree that your contributions will be licensed under its MIT License. 51 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 Leonardo Merza 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Home Assistant Email Sensor 2 | 3 | Gets emails from IMAP and parses out any tracking numbers. Goes well with the [tracking-number-card](https://github.com/ljmerza/tracking-number-card) for lovelace! 4 | 5 | Supported Emails 6 | 7 | - Adafruit 8 | - Adam & Eve 9 | - Amazon 10 | - Ali Express 11 | - B&H Photo 12 | - Bespoke Post 13 | - Best Buy 14 | - Chewy 15 | - DHL 16 | - Dollar Shave Club 17 | - DSW 18 | - eBay 19 | - FedEx 20 | - Gamestop 21 | - Georgia Power 22 | - Google Express 23 | - Groupon 24 | - Guitar Center 25 | - Litter Robot 26 | - Lowes 27 | - Manta Sleep 28 | - Monoprice 29 | - NewEgg 30 | - Nintendo 31 | - Nuleaf 32 | - Paypal 33 | - Pledge Box 34 | - Philips Hue 35 | - Prusa 36 | - Reolink 37 | - Rockauto 38 | - Sylvane 39 | - Sony 40 | - Swiss Post 41 | - Target 42 | - Thriftbooks 43 | - Timeless 44 | - The Smartest House 45 | - Ubiquiti 46 | - UPS 47 | - USPS 48 | - Wyze 49 | - Zazzle 50 | 51 | If you want support for tracking, forward me the email (ljmerza at gmail) and open an issue. 52 | 53 | --- 54 | 55 | [![GitHub Release][releases-shield]][releases] 56 | [![License][license-shield]](LICENSE.md) 57 | 58 | ![Project Maintenance][maintenance-shield] 59 | [![GitHub Activity][commits-shield]][commits] 60 | 61 | ## Installation 62 | 63 | - Add the `email` folder in your `custom_components` folder 64 | 65 | ```yaml 66 | sensor: 67 | - platform: email 68 | email: !secret my_email 69 | password: !secret my_email_password 70 | ``` 71 | 72 | - If the component doesn't load this might be due to Home Assistant not installing the requirements automatically through the manifests file. You can install them manaually by running `pip install beautifulsoup4==4.7.1 imapclient==2.1.0 mail-parser==3.9.3` 73 | - If you use 2 factor authentication for Google you'll need to create an app password. See more details [here](https://support.google.com/accounts/answer/185833?hl=en) 74 | 75 | ## Options 76 | 77 | | Name | Type | Requirement | `default` Description | 78 | | ----------- | ------- | ------------ | --------------------------------------------------------------------- | 79 | | email | string | **Required** | email address | 80 | | password | string | **Required** | email password | 81 | | imap_server | string | **Optional** | `imap.gmail.com` IMAP server address> | 82 | | imap_port | number | **Optional** | `993` IMAP port | 83 | | folder | string | **Optional** | `INBOX` Which folder to pull emails from | 84 | | ssl | boolean | **Optional** | `true` enable or disable SSL when using IMAP | 85 | | days_old | number | **Optional** | `30` how many days of emails to retrieve | 86 | 87 | --- 88 | 89 | Enjoy my card? Help me out for a couple of :beers: or a :coffee:! 90 | 91 | [![coffee](https://www.buymeacoffee.com/assets/img/custom_images/black_img.png)](https://www.buymeacoffee.com/JMISm06AD) 92 | 93 | [commits-shield]: https://img.shields.io/github/commit-activity/y/ljmerza/ha-email-sensor.svg?style=for-the-badge 94 | [commits]: https://github.com/ljmerza/ha-email-sensor/commits/master 95 | [license-shield]: https://img.shields.io/github/license/ljmerza/ha-email-sensor.svg?style=for-the-badge 96 | [maintenance-shield]: https://img.shields.io/badge/maintainer-Leonardo%20Merza%20%40ljmerza-blue.svg?style=for-the-badge 97 | [releases-shield]: https://img.shields.io/github/release/ljmerza/ha-email-sensor.svg?style=for-the-badge 98 | [releases]: https://github.com/ljmerza/ha-email-sensor/releases 99 | -------------------------------------------------------------------------------- /custom_components/email/__init__.py: -------------------------------------------------------------------------------- 1 | """Component to integrate with email.""" -------------------------------------------------------------------------------- /custom_components/email/const.py: -------------------------------------------------------------------------------- 1 | """Constants for Email Platform.""" 2 | 3 | DOMAIN = 'email' 4 | 5 | CONF_EMAIL = 'email' 6 | CONF_PASSWORD = 'password' 7 | CONF_IMAP_SERVER = 'imap_server' 8 | CONF_IMAP_PORT = 'imap_port' 9 | CONF_EMAIL_FOLDER = 'folder' 10 | CONF_SSL = 'ssl' 11 | CONF_DAYS_OLD = 'days_old' 12 | 13 | ATTR_COUNT = 'count' 14 | ATTR_TRACKING_NUMBERS = 'tracking_numbers' 15 | 16 | EMAIL_ATTR_FROM = 'from' 17 | EMAIL_ATTR_SUBJECT = 'subject' 18 | EMAIL_ATTR_BODY = 'body' 19 | 20 | USPS_TRACKING_NUMBER_REGEX = r"\b(94\d{20}|\d{4}\s\d{4}\s\d{4}\s\d{4}\s\d{4}\s\d{2})\b" 21 | UPS_TRACKING_NUMBER_REGEX = r"\b(1Z[A-HJ-NP-Z0-9]{16})\b" 22 | FEDEX_TRACKING_NUMBER_REGEX = r"\b(\d{12})\b" 23 | 24 | EMAIL_DOMAIN_REGEX = r"@([\w.-]+)" -------------------------------------------------------------------------------- /custom_components/email/manifest.json: -------------------------------------------------------------------------------- 1 | { 2 | "domain": "email", 3 | "name": "Email", 4 | "documentation": "https://github.com/ljmerza/ha-email-sensor", 5 | "dependencies": [], 6 | "codeowners": [ 7 | "@ljmerza" 8 | ], 9 | "requirements": [ 10 | "beautifulsoup4==4.7.1", 11 | "imapclient==2.3.1", 12 | "mail-parser==3.9.3" 13 | ], 14 | "version": "3.12.0" 15 | } -------------------------------------------------------------------------------- /custom_components/email/parsers/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ljmerza/ha-email-sensor/57e52a6688eb043b43a8b7640598f961016ca7ee/custom_components/email/parsers/__init__.py -------------------------------------------------------------------------------- /custom_components/email/parsers/adafruit.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import re 3 | 4 | from bs4 import BeautifulSoup 5 | from ..const import EMAIL_ATTR_BODY 6 | 7 | 8 | _LOGGER = logging.getLogger(__name__) 9 | ATTR_ADAFRUIT = 'adafruit' 10 | EMAIL_DOMAIN_ADAFRUIT = 'adafruit.com' 11 | 12 | 13 | def parse_adafruit(email): 14 | """Parse Adafruit tracking numbers.""" 15 | tracking_numbers = [] 16 | 17 | matches = re.findall(r'Delivery Confirmation ID is (.*?) ', email[EMAIL_ATTR_BODY]) 18 | for tracking_number in matches: 19 | if tracking_number not in tracking_numbers: 20 | tracking_numbers.append(tracking_number) 21 | 22 | return tracking_numbers 23 | -------------------------------------------------------------------------------- /custom_components/email/parsers/adam_eve.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import re 3 | 4 | from bs4 import BeautifulSoup 5 | from ..const import EMAIL_ATTR_BODY 6 | 7 | 8 | _LOGGER = logging.getLogger(__name__) 9 | ATTR_ADAM_AND_EVE = 'adam_and_eve' 10 | EMAIL_DOMAIN_ADAM_AND_EVE = 'adamandeve.com' 11 | 12 | 13 | def parse_adam_and_eve(email): 14 | """Parse Adam & Eve tracking numbers.""" 15 | tracking_numbers = [] 16 | 17 | soup = BeautifulSoup(email[EMAIL_ATTR_BODY], 'html.parser') 18 | linkTexts = [link.text for link in soup.find_all('a')] 19 | for linkText in linkTexts: 20 | if not linkText: 21 | continue 22 | match = re.search('(\d{26})', linkText) 23 | if match and match.group(1).isnumeric() and match.group(1) not in tracking_numbers: 24 | tracking_numbers.append(match.group(1)) 25 | 26 | return tracking_numbers 27 | -------------------------------------------------------------------------------- /custom_components/email/parsers/ali_express.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import re 3 | 4 | from bs4 import BeautifulSoup 5 | from ..const import EMAIL_ATTR_BODY 6 | 7 | 8 | _LOGGER = logging.getLogger(__name__) 9 | ATTR_ALI_EXPRESS = 'ali_express' 10 | EMAIL_DOMAIN_ALI_EXPRESS = 'aliexpress.com' 11 | 12 | 13 | def parse_ali_express(email): 14 | """Parse Ali Express tracking numbers.""" 15 | tracking_numbers = [] 16 | 17 | soup = BeautifulSoup(email[EMAIL_ATTR_BODY], 'html.parser') 18 | 19 | lines = [p_element.text for p_element in soup.find_all('p')] 20 | for line in lines: 21 | if not line: 22 | continue 23 | match = re.search('TRACKING NUMBER :(.*?)\.', line) 24 | if match and match.group(1) not in tracking_numbers: 25 | tracking_numbers.append(match.group(1)) 26 | 27 | link_urls = [link.get('href') for link in soup.find_all('a')] 28 | for link in link_urls: 29 | if not link: 30 | continue 31 | order_number_match = re.search('orderId=(.*?)&', link) 32 | 33 | if order_number_match and order_number_match.group(1): 34 | order_number = order_number_match.group(1) 35 | order_numbers = list(map(lambda x: x['tracking_number'], tracking_numbers)) 36 | if order_number not in order_numbers: 37 | tracking_numbers.append({ 38 | 'link': link, 39 | 'tracking_number': order_number 40 | }) 41 | 42 | return tracking_numbers 43 | -------------------------------------------------------------------------------- /custom_components/email/parsers/amazon.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import re 3 | 4 | from bs4 import BeautifulSoup 5 | from ..const import EMAIL_ATTR_BODY, EMAIL_ATTR_SUBJECT 6 | 7 | 8 | _LOGGER = logging.getLogger(__name__) 9 | ATTR_AMAZON = 'amazon' 10 | EMAIL_DOMAIN_AMAZON = 'amazon.com' 11 | 12 | def parse_amazon(email): 13 | """Parse Amazon tracking numbers.""" 14 | tracking_numbers = [] 15 | 16 | soup = BeautifulSoup(email[EMAIL_ATTR_BODY], 'html.parser') 17 | 18 | # see if it's an shipped order email 19 | order_number_match = re.search('Your AmazonSmile order #(.*?) has shipped', email[EMAIL_ATTR_SUBJECT]) 20 | if not order_number_match: 21 | order_number_match = re.search('Your Amazon.com order #(.*?) has shipped', email[EMAIL_ATTR_SUBJECT]) 22 | if not order_number_match: 23 | return tracking_numbers 24 | 25 | order_number = order_number_match.group(1) 26 | 27 | # find the link that has 'track package' text 28 | linkElements = soup.find_all('a') 29 | for linkElement in linkElements: 30 | if not re.search(r'track package', linkElement.text, re.IGNORECASE): 31 | continue 32 | 33 | # if found we no get url and check for duplicates 34 | link = linkElement.get('href') 35 | 36 | # make sure we dont have dupes 37 | order_numbers = list(map(lambda x: x['tracking_number'], tracking_numbers)) 38 | if order_number not in order_numbers: 39 | tracking_numbers.append({ 40 | 'link': link, 41 | 'tracking_number': order_number 42 | }) 43 | 44 | return tracking_numbers 45 | -------------------------------------------------------------------------------- /custom_components/email/parsers/amazon_de.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import re 3 | 4 | from bs4 import BeautifulSoup 5 | from ..const import EMAIL_ATTR_BODY, EMAIL_ATTR_SUBJECT 6 | 7 | 8 | _LOGGER = logging.getLogger(__name__) 9 | ATTR_AMAZON_DE = 'amazon_de' 10 | EMAIL_DOMAIN_AMAZON_DE = 'amazon.de' 11 | 12 | def parse_amazon_de(email): 13 | """Parse Amazon tracking numbers.""" 14 | tracking_numbers = [] 15 | 16 | soup = BeautifulSoup(email[EMAIL_ATTR_BODY], 'html.parser') 17 | 18 | # see if it's an shipped order email 19 | order_number_match = re.search('Order: #(.*?)\n', email[EMAIL_ATTR_BODY]) 20 | if not order_number_match: 21 | order_number_match = re.search('Your Amazon.de order of (.*?) has been dispatched!', email[EMAIL_ATTR_SUBJECT]) 22 | if not order_number_match: 23 | return tracking_numbers 24 | 25 | order_number = order_number_match.group(1) 26 | 27 | # find the link that has 'track your package' text 28 | linkElements = soup.find_all('a') 29 | for linkElement in linkElements: 30 | if not re.search(r'track your package', linkElement.text, re.IGNORECASE): 31 | continue 32 | 33 | # if found we no get url and check for duplicates 34 | link = linkElement.get('href') 35 | 36 | # make sure we dont have dupes 37 | order_numbers = list(map(lambda x: x['tracking_number'], tracking_numbers)) 38 | if order_number not in order_numbers: 39 | tracking_numbers.append({ 40 | 'link': link, 41 | 'tracking_number': order_number 42 | }) 43 | 44 | return tracking_numbers -------------------------------------------------------------------------------- /custom_components/email/parsers/bespoke_post.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import re 3 | 4 | from bs4 import BeautifulSoup 5 | from ..const import EMAIL_ATTR_BODY 6 | 7 | 8 | _LOGGER = logging.getLogger(__name__) 9 | ATTR_DSW = 'BESPOKE_POST' 10 | EMAIL_DOMAIN_DSW = 'bespokepost.com' 11 | 12 | 13 | def parse_bespoke_post(email): 14 | """Parse bespoke post tracking numbers.""" 15 | tracking_numbers = [] 16 | 17 | matches = re.findall(r'Tracking Number (.*?) ', email[EMAIL_ATTR_BODY]) 18 | for tracking_number in matches: 19 | if tracking_number not in tracking_numbers: 20 | tracking_numbers.append(tracking_number) 21 | 22 | return tracking_numbers 23 | -------------------------------------------------------------------------------- /custom_components/email/parsers/best_buy.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import re 3 | 4 | from bs4 import BeautifulSoup 5 | from ..const import EMAIL_ATTR_BODY 6 | 7 | 8 | _LOGGER = logging.getLogger(__name__) 9 | ATTR_BEST_BUY = 'best_buy' 10 | EMAIL_DOMAIN_BEST_BUY = 'bestbuy.com' 11 | 12 | 13 | def parse_best_buy(email): 14 | """Parse Best Buy tracking numbers.""" 15 | tracking_numbers = [] 16 | 17 | soup = BeautifulSoup(email[EMAIL_ATTR_BODY], 'html.parser') 18 | elements = soup.find_all('a') 19 | for element in elements: 20 | link = element.get('href') 21 | if not link: 22 | continue 23 | if 'shipment/tracking' in link: 24 | tracking_number = element.text 25 | if tracking_number and tracking_number not in tracking_numbers: 26 | tracking_numbers.append(tracking_number.strip()) 27 | 28 | return tracking_numbers 29 | -------------------------------------------------------------------------------- /custom_components/email/parsers/bh_photo.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import re 3 | 4 | from bs4 import BeautifulSoup 5 | from ..const import EMAIL_ATTR_BODY 6 | 7 | 8 | _LOGGER = logging.getLogger(__name__) 9 | ATTR_BH_PHOTO = 'bh_photo' 10 | EMAIL_DOMAIN_BH_PHOTO = 'bhphotovideo.com' 11 | 12 | 13 | def parse_bh_photo(email): 14 | """Parse B&H Photo tracking numbers.""" 15 | tracking_numbers = [] 16 | 17 | soup = BeautifulSoup(email[EMAIL_ATTR_BODY], 'html.parser') 18 | links = [link.get('href') for link in soup.find_all('a')] 19 | for link in links: 20 | if not link: 21 | continue 22 | match = re.search('tracknumbers=(.*?)$', link) 23 | if match and match.group(1) not in tracking_numbers: 24 | tracking_numbers.append(match.group(1)) 25 | 26 | return tracking_numbers 27 | -------------------------------------------------------------------------------- /custom_components/email/parsers/chewy.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import re 3 | 4 | from bs4 import BeautifulSoup 5 | from ..const import EMAIL_ATTR_BODY 6 | 7 | 8 | _LOGGER = logging.getLogger(__name__) 9 | ATTR_CHEWY = 'chewy' 10 | EMAIL_DOMAIN_CHEWY = 'chewy.com' 11 | 12 | 13 | def parse_chewy(email): 14 | """Parse chewy tracking numbers.""" 15 | _LOGGER.debug(email) 16 | tracking_numbers = [] 17 | 18 | matches = re.findall(r'tracknumber_list=([0-9]+)', email[EMAIL_ATTR_BODY]) 19 | if not matches: 20 | matches = re.findall(r'numbers=([0-9]+)', email[EMAIL_ATTR_BODY]) 21 | 22 | for tracking_number in matches: 23 | if tracking_number not in tracking_numbers: 24 | tracking_numbers.append(tracking_number) 25 | 26 | return tracking_numbers 27 | -------------------------------------------------------------------------------- /custom_components/email/parsers/dhl.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import re 3 | 4 | from bs4 import BeautifulSoup 5 | from ..const import EMAIL_ATTR_BODY 6 | 7 | 8 | _LOGGER = logging.getLogger(__name__) 9 | ATTR_DHL = 'dhl' 10 | EMAIL_DOMAIN_DHL = 'dhl' 11 | 12 | 13 | def parse_dhl(email): 14 | """Parse DHL tracking numbers.""" 15 | tracking_numbers = [] 16 | 17 | matches = re.findall(r'idc=(.*?)"', email[EMAIL_ATTR_BODY]) 18 | for tracking_number in matches: 19 | if tracking_number not in tracking_numbers: 20 | tracking_numbers.append(tracking_number) 21 | 22 | return tracking_numbers 23 | -------------------------------------------------------------------------------- /custom_components/email/parsers/dollar_shave_club.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import re 3 | 4 | from bs4 import BeautifulSoup 5 | from ..const import EMAIL_ATTR_BODY 6 | 7 | _LOGGER = logging.getLogger(__name__) 8 | ATTR_DOLLAR_SHAVE_CLUB = 'dollar_shave_club' 9 | EMAIL_DOMAIN_DOLLAR_SHAVE_CLUB = 'dollarshaveclub.com' 10 | 11 | 12 | def parse_dollar_shave_club(email): 13 | """Parse Dollar Shave Club tracking numbers.""" 14 | tracking_numbers = [] 15 | 16 | soup = BeautifulSoup(email[EMAIL_ATTR_BODY], 'html.parser') 17 | elements = soup.find_all('a') 18 | for element in elements: 19 | title = element.get('title') 20 | if not title: 21 | continue 22 | if 'Track Package' == title: 23 | link = element.get('href') 24 | match = re.search(r'x=(.*?)%7c', link) 25 | if match and match.group(1) not in tracking_numbers: 26 | tracking_numbers.append(match.group(1)) 27 | 28 | return tracking_numbers 29 | -------------------------------------------------------------------------------- /custom_components/email/parsers/dsw.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import re 3 | 4 | from bs4 import BeautifulSoup 5 | from ..const import EMAIL_ATTR_BODY 6 | 7 | 8 | _LOGGER = logging.getLogger(__name__) 9 | ATTR_DSW = 'DSW' 10 | EMAIL_DOMAIN_DSW = 'dsw.com' 11 | 12 | 13 | def parse_dsw(email): 14 | """Parse DSW tracking numbers.""" 15 | tracking_numbers = [] 16 | 17 | matches = re.findall(r'tracking_numbers=(.*?)&', email[EMAIL_ATTR_BODY]) 18 | for tracking_number in matches: 19 | if tracking_number not in tracking_numbers: 20 | tracking_numbers.append(tracking_number) 21 | 22 | return tracking_numbers 23 | -------------------------------------------------------------------------------- /custom_components/email/parsers/ebay.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | from bs4 import BeautifulSoup 4 | from ..const import EMAIL_ATTR_BODY 5 | 6 | 7 | _LOGGER = logging.getLogger(__name__) 8 | ATTR_EBAY = 'ebay' 9 | EMAIL_DOMAIN_EBAY = 'ebay.com' 10 | 11 | 12 | def parse_ebay(email): 13 | """Parse eBay tracking numbers.""" 14 | tracking_numbers = [] 15 | 16 | soup = BeautifulSoup(email[EMAIL_ATTR_BODY], 'html.parser') 17 | elements = [element for element in soup.find_all('span')] 18 | for element in elements: 19 | if 'Tracking Number' in element.text: 20 | tracking_link = element.find("a", recursive=False) 21 | tracking_number = tracking_link.text 22 | if tracking_number not in tracking_numbers: 23 | tracking_numbers.append(tracking_number) 24 | 25 | return tracking_numbers 26 | -------------------------------------------------------------------------------- /custom_components/email/parsers/fedex.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import re 3 | 4 | from bs4 import BeautifulSoup 5 | from ..const import EMAIL_ATTR_BODY 6 | from ..const import EMAIL_ATTR_SUBJECT 7 | 8 | _LOGGER = logging.getLogger(__name__) 9 | ATTR_FEDEX = 'fedex' 10 | EMAIL_DOMAIN_FEDEX = 'fedex.com' 11 | 12 | 13 | def parse_fedex(email): 14 | """Parse FedEx tracking numbers.""" 15 | tracking_numbers = [] 16 | 17 | soup = BeautifulSoup(email[EMAIL_ATTR_BODY], 'html.parser') 18 | links = [link.get('href') for link in soup.find_all('a')] 19 | for link in links: 20 | if not link: 21 | continue 22 | match = re.search('tracknumbers=(.*?)&', link) 23 | if match and match.group(1) not in tracking_numbers: 24 | tracking_numbers.append(match.group(1)) 25 | 26 | match = re.search('FedEx Shipment (.*?): Your package is on its way', email[EMAIL_ATTR_SUBJECT]) 27 | if match and match.group(1) not in tracking_numbers: 28 | tracking_numbers.append(match.group(1)) 29 | 30 | return tracking_numbers 31 | -------------------------------------------------------------------------------- /custom_components/email/parsers/gamestop.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import re 3 | 4 | from bs4 import BeautifulSoup 5 | from ..const import EMAIL_ATTR_BODY 6 | 7 | 8 | _LOGGER = logging.getLogger(__name__) 9 | ATTR_GAMESTOP = 'gamestop' 10 | EMAIL_DOMAIN_GAMESTOP = 'gamestop.com' 11 | 12 | 13 | def parse_gamestop(email): 14 | """Parse gamestop tracking numbers.""" 15 | _LOGGER.debug(email) 16 | tracking_numbers = [] 17 | 18 | matches = re.findall(r'tracking_numbers=([0-9]+)', email[EMAIL_ATTR_BODY]) 19 | 20 | for tracking_number in matches: 21 | if tracking_number not in tracking_numbers: 22 | tracking_numbers.append(tracking_number) 23 | 24 | return tracking_numbers 25 | -------------------------------------------------------------------------------- /custom_components/email/parsers/generic.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import re 3 | 4 | from bs4 import BeautifulSoup 5 | from ..const import EMAIL_ATTR_BODY, USPS_TRACKING_NUMBER_REGEX, UPS_TRACKING_NUMBER_REGEX, FEDEX_TRACKING_NUMBER_REGEX 6 | 7 | _LOGGER = logging.getLogger(__name__) 8 | ATTR_GENERIC = 'generic' 9 | EMAIL_DOMAIN_GENERIC = '' 10 | 11 | def parse_generic(email): 12 | """Tries to parse tracking numbers for any type of email.""" 13 | tracking_numbers = [] 14 | 15 | soup = BeautifulSoup(email[EMAIL_ATTR_BODY], 'html.parser') 16 | 17 | matches = re.findall(UPS_TRACKING_NUMBER_REGEX, email[EMAIL_ATTR_BODY]) 18 | for tracking_number in matches: 19 | if tracking_number not in tracking_numbers: 20 | tracking_numbers.append(tracking_number) 21 | 22 | matches = re.findall(USPS_TRACKING_NUMBER_REGEX, email[EMAIL_ATTR_BODY]) 23 | for tracking_number in matches: 24 | if tracking_number not in tracking_numbers: 25 | tracking_numbers.append(tracking_number) 26 | 27 | # matches = re.findall(FEDEX_TRACKING_NUMBER_REGEX, email[EMAIL_ATTR_BODY]) 28 | # for tracking_number in matches: 29 | # if tracking_number not in tracking_numbers: 30 | # tracking_numbers.append(tracking_number) 31 | 32 | return tracking_numbers 33 | -------------------------------------------------------------------------------- /custom_components/email/parsers/georgia_power.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import re 3 | 4 | from bs4 import BeautifulSoup 5 | from ..const import EMAIL_ATTR_BODY 6 | 7 | 8 | _LOGGER = logging.getLogger(__name__) 9 | ATTR_GEORGIA_POWER = 'georgia_power' 10 | EMAIL_DOMAIN_GEORGIA_POWER = 'southernco.com' 11 | 12 | 13 | def parse_georgia_power(email): 14 | """Parse Georgia power usage numbers.""" 15 | 16 | usage_numbers = { 17 | 'yesterday_use': '', 18 | 'yesterday_cost': 0, 19 | 'monthly_use': '', 20 | 'monthly_cost': 0, 21 | } 22 | 23 | soup = BeautifulSoup(email[EMAIL_ATTR_BODY], 'html.parser') 24 | elements = soup.find_all('td') 25 | 26 | for idx, element in enumerate(elements): 27 | text = element.getText() 28 | 29 | if("Yesterday's Energy" in text): 30 | usage_numbers['yesterday_use'] = elements[idx + 31 | 1].getText().strip() 32 | 33 | elif("Yesterday's estimated" in text): 34 | usage_numbers['yesterday_cost'] = elements[idx + 35 | 1].getText().strip() 36 | 37 | elif("Monthly Energy" in text): 38 | usage_numbers['monthly_use'] = elements[idx + 1].getText().strip() 39 | 40 | elif("Monthly estimated" in text): 41 | usage_numbers['monthly_cost'] = elements[idx + 1].getText().strip() 42 | 43 | return [usage_numbers] 44 | -------------------------------------------------------------------------------- /custom_components/email/parsers/google_express.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import re 3 | 4 | from bs4 import BeautifulSoup 5 | from ..const import EMAIL_ATTR_BODY 6 | 7 | 8 | _LOGGER = logging.getLogger(__name__) 9 | ATTR_GOOGLE_EXPRESS = 'google_express' 10 | EMAIL_DOMAIN_GOOGLE_EXPRESS = 'google.com' 11 | 12 | 13 | def parse_google_express(email): 14 | """Parse Google Express tracking numbers.""" 15 | tracking_numbers = [] 16 | 17 | soup = BeautifulSoup(email[EMAIL_ATTR_BODY], 'html.parser') 18 | images = soup.find_all('img', alt=True) 19 | for image in images: 20 | if image['alt'] == 'UPS': 21 | link = image.parent.find('a') 22 | if not link: 23 | continue 24 | tracking_number = link.text 25 | if tracking_number not in tracking_numbers: 26 | tracking_numbers.append(tracking_number) 27 | 28 | return tracking_numbers 29 | -------------------------------------------------------------------------------- /custom_components/email/parsers/groupon.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import re 3 | 4 | from bs4 import BeautifulSoup 5 | from ..const import EMAIL_ATTR_BODY 6 | 7 | 8 | _LOGGER = logging.getLogger(__name__) 9 | ATTR_GROUPON = 'groupon' 10 | EMAIL_DOMAIN_GROUPON = 'groupon.com' 11 | 12 | 13 | def parse_groupon(email): 14 | """Parse groupon tracking numbers.""" 15 | tracking_numbers = [] 16 | 17 | soup = BeautifulSoup(email[EMAIL_ATTR_BODY], 'html.parser') 18 | elements = soup.find_all('a') 19 | for element in elements: 20 | link = element.get('href') 21 | if not link: 22 | continue 23 | if 'track_order' in link: 24 | tracking_number = element.text 25 | if tracking_number != 'here' and tracking_number and tracking_number not in tracking_numbers: 26 | tracking_numbers.append(tracking_number) 27 | 28 | return tracking_numbers 29 | -------------------------------------------------------------------------------- /custom_components/email/parsers/guitar_center.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | from bs4 import BeautifulSoup 4 | from ..const import EMAIL_ATTR_BODY 5 | 6 | 7 | _LOGGER = logging.getLogger(__name__) 8 | ATTR_GUITAR_CENTER = 'guitar_center' 9 | EMAIL_DOMAIN_GUITAR_CENTER = 'guitarcenter.com' 10 | 11 | 12 | def parse_guitar_center(email): 13 | """Parse Guitar Center tracking numbers.""" 14 | tracking_numbers = [] 15 | 16 | soup = BeautifulSoup(email[EMAIL_ATTR_BODY], 'html.parser') 17 | elements = [element for element in soup.find_all('td')] 18 | for element in elements: 19 | if 'Tracking:' in element.text: 20 | tracking_link = element.find("a", recursive=False) 21 | tracking_number = tracking_link.text 22 | if tracking_number not in tracking_numbers: 23 | tracking_numbers.append(tracking_number) 24 | 25 | return tracking_numbers 26 | -------------------------------------------------------------------------------- /custom_components/email/parsers/home_depot.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import re 3 | import math 4 | 5 | from bs4 import BeautifulSoup 6 | from ..const import EMAIL_ATTR_BODY 7 | 8 | 9 | _LOGGER = logging.getLogger(__name__) 10 | ATTR_HOME_DEPOT = 'home_depot' 11 | EMAIL_DOMAIN_HOME_DEPOT = 'homedepot.com' 12 | 13 | 14 | track_copy_pattern = re.compile(r"track my order", re.IGNORECASE) 15 | order_number_pattern = re.compile(r"^[A-Za-z]{2}\d{8}$") 16 | 17 | def parse_home_depot(email): 18 | """Parse home depot tracking numbers.""" 19 | tracking_numbers = [] 20 | 21 | soup = BeautifulSoup(email[EMAIL_ATTR_BODY], 'html.parser') 22 | elements = soup.find_all('a') 23 | 24 | for element in elements: 25 | link = element.get('href') 26 | 27 | if not link: 28 | continue 29 | 30 | if 'link.order.homedepot.com' not in link: 31 | continue 32 | 33 | match = re.search(order_number_pattern, element.text) 34 | 35 | if match: 36 | tracking_numbers.append({ 37 | 'link': link, 38 | 'tracking_number': match.group() 39 | }) 40 | 41 | 42 | return tracking_numbers 43 | -------------------------------------------------------------------------------- /custom_components/email/parsers/hue.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import re 3 | 4 | from bs4 import BeautifulSoup 5 | from ..const import EMAIL_ATTR_BODY 6 | 7 | 8 | _LOGGER = logging.getLogger(__name__) 9 | ATTR_HUE = 'hue' 10 | EMAIL_DOMAIN_HUE = 'luzernsolutions' 11 | 12 | 13 | def parse_hue(email): 14 | """Parse Phillips Hue tracking numbers.""" 15 | tracking_numbers = [] 16 | 17 | body = email[EMAIL_ATTR_BODY] 18 | matches = re.findall(r'tracking number is: (.*?)<', body) 19 | for tracking_number in matches: 20 | if tracking_number not in tracking_numbers: 21 | tracking_numbers.append(tracking_number) 22 | 23 | return tracking_numbers 24 | -------------------------------------------------------------------------------- /custom_components/email/parsers/litter_robot.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import re 3 | 4 | from bs4 import BeautifulSoup 5 | from ..const import EMAIL_ATTR_BODY 6 | 7 | 8 | _LOGGER = logging.getLogger(__name__) 9 | ATTR_LITTER_ROBOT= 'litter_robot' 10 | EMAIL_DOMAIN_LITTER_ROBOT = 'litter-robot.com' 11 | 12 | 13 | def parse_litter_robot(email): 14 | """Parse Litter Robot tracking numbers.""" 15 | tracking_numbers = [] 16 | 17 | soup = BeautifulSoup(email[EMAIL_ATTR_BODY], 'html.parser') 18 | elements = soup.find_all('a') 19 | for element in elements: 20 | link = element.get('href') 21 | if not link: 22 | continue 23 | if 'shipping/tracking' in link: 24 | tracking_number = element.text 25 | if tracking_number and tracking_number not in tracking_numbers: 26 | tracking_numbers.append(tracking_number.strip()) 27 | 28 | return tracking_numbers 29 | -------------------------------------------------------------------------------- /custom_components/email/parsers/lowes.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import re 3 | 4 | from bs4 import BeautifulSoup 5 | from ..const import EMAIL_ATTR_BODY, EMAIL_ATTR_SUBJECT 6 | 7 | 8 | _LOGGER = logging.getLogger(__name__) 9 | ATTR_LOWES = 'lowes' 10 | EMAIL_DOMAIN_LOWES = 'lowes.com' 11 | 12 | def parse_lowes(email): 13 | """Parse Lowes tracking numbers.""" 14 | tracking_numbers = [] 15 | 16 | soup = BeautifulSoup(email[EMAIL_ATTR_BODY], 'html.parser') 17 | elements = soup.find_all('span') 18 | 19 | for element in elements: 20 | if 'Tracking #' in element.text: 21 | anchor = element.findChild("a" , recursive=False) 22 | link = anchor.get('href') 23 | 24 | order_number = re.search(r'#(\d+)', email[EMAIL_ATTR_SUBJECT]) 25 | if order_number: 26 | tracking_numbers.append({ 27 | 'link': link, 28 | 'tracking_number': order_number.group(1) 29 | }) 30 | 31 | return tracking_numbers 32 | -------------------------------------------------------------------------------- /custom_components/email/parsers/manta_sleep.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import re 3 | 4 | from bs4 import BeautifulSoup 5 | from ..const import EMAIL_ATTR_BODY 6 | 7 | 8 | _LOGGER = logging.getLogger(__name__) 9 | ATTR_MANTA_SLEEP = 'manta_sleep' 10 | EMAIL_DOMAIN_MANTA_SLEEP = 'mantasleep.com' 11 | 12 | 13 | def parse_manta_sleep(email): 14 | """Parse Manta Sleep tracking numbers.""" 15 | tracking_numbers = [] 16 | 17 | soup = BeautifulSoup(email[EMAIL_ATTR_BODY], 'html.parser') 18 | 19 | link_urls = [link.get('href') for link in soup.find_all('a')] 20 | for link in link_urls: 21 | if not link: 22 | continue 23 | match = re.search('trackingnumber=(.*?)$', link) 24 | if match and match.group(1) not in tracking_numbers: 25 | tracking_numbers.append(match.group(1)) 26 | 27 | return tracking_numbers 28 | -------------------------------------------------------------------------------- /custom_components/email/parsers/monoprice.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import re 3 | 4 | from bs4 import BeautifulSoup 5 | from ..const import EMAIL_ATTR_BODY 6 | 7 | 8 | _LOGGER = logging.getLogger(__name__) 9 | ATTR_MONOPRICE = 'monoprice' 10 | EMAIL_DOMAIN_MONOPRICE = 'monoprice.com' 11 | 12 | 13 | def parse_monoprice(email): 14 | """Parse Monoprice tracking numbers.""" 15 | tracking_numbers = [] 16 | 17 | soup = BeautifulSoup(email[EMAIL_ATTR_BODY], 'html.parser') 18 | links = [element.get('href') for element in soup.find_all('a')] 19 | for link in links: 20 | if not link: 21 | continue 22 | match = re.search('TRK=(.*?)&', link) 23 | if match and match.group(1) not in tracking_numbers: 24 | tracking_numbers.append(match.group(1)) 25 | 26 | return tracking_numbers 27 | -------------------------------------------------------------------------------- /custom_components/email/parsers/newegg.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import re 3 | 4 | from bs4 import BeautifulSoup 5 | from ..const import EMAIL_ATTR_BODY 6 | 7 | 8 | _LOGGER = logging.getLogger(__name__) 9 | ATTR_NEWEGG = 'newegg' 10 | EMAIL_DOMAIN_NEWEGG = 'newegg.com' 11 | 12 | 13 | def parse_newegg(email): 14 | """Parse Newegg tracking numbers.""" 15 | tracking_numbers = [] 16 | 17 | soup = BeautifulSoup(email[EMAIL_ATTR_BODY], 'html.parser') 18 | 19 | link_urls = [link.get('href') for link in soup.find_all('a')] 20 | for link in link_urls: 21 | if not link: 22 | continue 23 | match = re.search('TrackingNumber=(.*?)&', link) 24 | if match and match.group(1) not in tracking_numbers: 25 | tracking_numbers.append(match.group(1)) 26 | 27 | # sometimes tracking numbers are text in a link 28 | strongs = [link for link in soup.find_all('strong')] 29 | for strong in strongs: 30 | if not strong.get_text(): 31 | continue 32 | match = re.search('Tracking Number', strong.get_text()) 33 | if match: 34 | link_texts = [link.get_text() for link in 35 | strong.findChildren("a", recursive=False)] 36 | for link_text in link_texts: 37 | if not link_text: 38 | continue 39 | if link_text not in tracking_numbers: 40 | tracking_numbers.append(link_text) 41 | 42 | return tracking_numbers 43 | -------------------------------------------------------------------------------- /custom_components/email/parsers/nintendo.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import re 3 | 4 | from bs4 import BeautifulSoup 5 | from ..const import EMAIL_ATTR_BODY 6 | 7 | 8 | _LOGGER = logging.getLogger(__name__) 9 | ATTR_NINTENDO = 'nintendo' 10 | EMAIL_DOMAIN_NINTENDO = 'nintendo.com' 11 | 12 | 13 | def parse_nintendo(email): 14 | """Parse Nintendo tracking numbers.""" 15 | tracking_numbers = [] 16 | 17 | soup = BeautifulSoup(email[EMAIL_ATTR_BODY], 'html.parser') 18 | links = [link.get('href') for link in soup.find_all('a')] 19 | for link in links: 20 | if not link: 21 | continue 22 | match = re.search('trackNums=(.*?)$', link) 23 | if match and match.group(1) not in tracking_numbers: 24 | tracking_numbers.append(match.group(1)) 25 | 26 | return tracking_numbers 27 | -------------------------------------------------------------------------------- /custom_components/email/parsers/nuleaf.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import re 3 | 4 | from bs4 import BeautifulSoup 5 | from ..const import EMAIL_ATTR_BODY 6 | 7 | 8 | _LOGGER = logging.getLogger(__name__) 9 | ATTR_NULEAF = 'nuleaf' 10 | EMAIL_DOMAIN_NULEAF = 'nuleafnaturals.com' 11 | 12 | 13 | def parse_nuleaf(email): 14 | """Parse NuLeaf tracking numbers.""" 15 | tracking_numbers = [] 16 | 17 | soup = BeautifulSoup(email[EMAIL_ATTR_BODY], 'html.parser') 18 | elements = soup.find_all('a') 19 | for element in elements: 20 | link = element.get('href') 21 | if not link: 22 | continue 23 | if 'emailtrk' in link: 24 | tracking_number = element.text 25 | if tracking_number and tracking_number not in tracking_numbers: 26 | tracking_numbers.append(tracking_number) 27 | 28 | return tracking_numbers 29 | -------------------------------------------------------------------------------- /custom_components/email/parsers/paypal.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import re 3 | 4 | from bs4 import BeautifulSoup 5 | from ..const import EMAIL_ATTR_BODY 6 | 7 | 8 | _LOGGER = logging.getLogger(__name__) 9 | ATTR_PAYPAL = 'paypal' 10 | EMAIL_DOMAIN_PAYPAL = 'paypal.com' 11 | 12 | 13 | def parse_paypal(email): 14 | """Parse Paypal tracking numbers.""" 15 | tracking_numbers = [] 16 | 17 | soup = BeautifulSoup(email[EMAIL_ATTR_BODY], 'html.parser') 18 | links = [link.get('href') for link in soup.find_all('a')] 19 | for link in links: 20 | if not link: 21 | continue 22 | match = re.search('origTrackNum=(.*?)$', link) 23 | if match and match.group(1) not in tracking_numbers: 24 | tracking_numbers.append(match.group(1)) 25 | 26 | return tracking_numbers 27 | -------------------------------------------------------------------------------- /custom_components/email/parsers/pledgebox.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import re 3 | 4 | from bs4 import BeautifulSoup 5 | from ..const import EMAIL_ATTR_BODY, USPS_TRACKING_NUMBER_REGEX 6 | 7 | 8 | _LOGGER = logging.getLogger(__name__) 9 | ATTR_PLEDGEBOX = 'pledgebox' 10 | EMAIL_DOMAIN_PLEDGEBOX = 'pledgebox.com' 11 | 12 | 13 | def parse_pledgebox(email): 14 | """Parse Pledge Box tracking numbers.""" 15 | tracking_numbers = [] 16 | 17 | soup = BeautifulSoup(email[EMAIL_ATTR_BODY], 'html.parser') 18 | lines = [element.text for element in soup.find_all('td')] 19 | for line in lines: 20 | if not line: 21 | continue 22 | match = re.search('^(\d{12})$', line) 23 | 24 | if match and match.group(1) not in tracking_numbers: 25 | tracking_numbers.append(match.group(1)) 26 | 27 | matches = re.findall(USPS_TRACKING_NUMBER_REGEX, email[EMAIL_ATTR_BODY]) 28 | for tracking_number in matches: 29 | if tracking_number not in tracking_numbers: 30 | tracking_numbers.append(tracking_number) 31 | 32 | return tracking_numbers 33 | -------------------------------------------------------------------------------- /custom_components/email/parsers/prusa.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import re 3 | 4 | from bs4 import BeautifulSoup 5 | from ..const import EMAIL_ATTR_BODY 6 | 7 | 8 | _LOGGER = logging.getLogger(__name__) 9 | ATTR_PRUSA = 'prusa' 10 | EMAIL_DOMAIN_PRUSA = 'prusa3d.com' 11 | 12 | 13 | def parse_prusa(email): 14 | """Parse Prusa tracking numbers.""" 15 | tracking_numbers = [] 16 | 17 | soup = BeautifulSoup(email[EMAIL_ATTR_BODY], 'html.parser') 18 | links = [link.get('href') for link in soup.find_all('a')] 19 | for link in links: 20 | if not link: 21 | continue 22 | match = re.search('trknbr=(.*?)$', link) 23 | if match and match.group(1) not in tracking_numbers: 24 | tracking_numbers.append(match.group(1)) 25 | 26 | return tracking_numbers 27 | -------------------------------------------------------------------------------- /custom_components/email/parsers/reolink.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import re 3 | 4 | from bs4 import BeautifulSoup 5 | from ..const import EMAIL_ATTR_BODY 6 | 7 | 8 | _LOGGER = logging.getLogger(__name__) 9 | ATTR_REOLINK = 'reolink' 10 | EMAIL_DOMAIN_REOLINK = 'reolink.com' 11 | 12 | 13 | def parse_reolink(email): 14 | """Parse Reolink tracking numbers.""" 15 | tracking_numbers = [] 16 | 17 | soup = BeautifulSoup(email[EMAIL_ATTR_BODY], 'html.parser') 18 | links = [link.get('href') for link in soup.find_all('a')] 19 | 20 | for link in links: 21 | if not link: 22 | continue 23 | 24 | match = re.search('trackNums=(.*?)$', link) 25 | if match and match.group(1) not in tracking_numbers: 26 | tracking_numbers.append(match.group(1)) 27 | 28 | match = re.search('qtc_tLabels1=(.*?)$', link) 29 | if match and match.group(1) not in tracking_numbers: 30 | tracking_numbers.append(match.group(1)) 31 | 32 | return tracking_numbers 33 | -------------------------------------------------------------------------------- /custom_components/email/parsers/rockauto.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import re 3 | 4 | from bs4 import BeautifulSoup 5 | from ..const import EMAIL_ATTR_BODY 6 | 7 | 8 | _LOGGER = logging.getLogger(__name__) 9 | ATTR_ROCKAUTO = 'rockauto' 10 | EMAIL_DOMAIN_ROCKAUTO = 'rockauto.com' 11 | 12 | 13 | def parse_rockauto(email): 14 | """Parse Rockauto tracking numbers.""" 15 | tracking_numbers = [] 16 | 17 | soup = BeautifulSoup(email[EMAIL_ATTR_BODY], 'html.parser') 18 | links = [link.get('href') for link in soup.find_all('a')] 19 | for link in links: 20 | if not link: 21 | continue 22 | 23 | match = re.search('tracknumbers=(.*?)$', link) 24 | if not match: 25 | match = re.search('trknbr=(.*?)$', link) 26 | 27 | if match and match.group(1) not in tracking_numbers: 28 | tracking_numbers.append(match.group(1)) 29 | 30 | return tracking_numbers 31 | -------------------------------------------------------------------------------- /custom_components/email/parsers/sony.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import re, base64 3 | 4 | from bs4 import BeautifulSoup 5 | from ..const import EMAIL_ATTR_BODY 6 | 7 | 8 | _LOGGER = logging.getLogger(__name__) 9 | ATTR_SONY = 'sony' 10 | EMAIL_DOMAIN_SONY = 'sony.com' 11 | 12 | 13 | def parse_sony(email): 14 | """Parse Sony tracking numbers.""" 15 | tracking_numbers = [] 16 | 17 | matches = re.findall(r'tracking_numbers=(.*?)&', email[EMAIL_ATTR_BODY]) 18 | for tracking_number in matches: 19 | if tracking_number not in tracking_numbers: 20 | tracking_numbers.append(tracking_number) 21 | 22 | return tracking_numbers 23 | -------------------------------------------------------------------------------- /custom_components/email/parsers/swiss_post.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import re 3 | 4 | from bs4 import BeautifulSoup 5 | from ..const import EMAIL_ATTR_BODY 6 | 7 | _LOGGER = logging.getLogger(__name__) 8 | ATTR_SWISS_POST = 'swiss_post' 9 | EMAIL_DOMAIN_SWISS_POST = 'post.ch' 10 | 11 | 12 | def parse_swiss_post(email): 13 | """Parse Swiss Post tracking numbers.""" 14 | tracking_numbers = [] 15 | 16 | soup = BeautifulSoup(email[EMAIL_ATTR_BODY], 'html.parser') 17 | links = [link.get('href') for link in soup.find_all('a')] 18 | for link in links: 19 | if not link: 20 | continue 21 | match = re.search('formattedParcelCodes=(\d+)', link) 22 | if match and match.group(1) not in tracking_numbers: 23 | tracking_numbers.append(match.group(1)) 24 | 25 | return tracking_numbers 26 | -------------------------------------------------------------------------------- /custom_components/email/parsers/sylvane.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import re 3 | 4 | from bs4 import BeautifulSoup 5 | from ..const import EMAIL_ATTR_BODY 6 | 7 | 8 | _LOGGER = logging.getLogger(__name__) 9 | ATTR_SYLVANE = 'sylvane' 10 | EMAIL_DOMAIN_SYLVANE = 'sylvane.com' 11 | 12 | 13 | def parse_sylvane(email): 14 | """Parse Sylvane tracking numbers.""" 15 | tracking_numbers = [] 16 | 17 | soup = BeautifulSoup(email[EMAIL_ATTR_BODY], 'html.parser') 18 | links = [link.get('href') for link in soup.find_all('a')] 19 | for link in links: 20 | if not link: 21 | continue 22 | match = re.search('trknbr=(.*?)$', link) 23 | if match and match.group(1) not in tracking_numbers: 24 | tracking_numbers.append(match.group(1)) 25 | 26 | return tracking_numbers 27 | -------------------------------------------------------------------------------- /custom_components/email/parsers/target.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import re 3 | 4 | from bs4 import BeautifulSoup 5 | from ..const import EMAIL_ATTR_BODY 6 | 7 | 8 | _LOGGER = logging.getLogger(__name__) 9 | ATTR_TARGET = 'target' 10 | EMAIL_DOMAIN_TARGET = 'target.com' 11 | 12 | 13 | def parse_target(email): 14 | """Parse Target tracking numbers.""" 15 | tracking_numbers = [] 16 | 17 | soup = BeautifulSoup(email[EMAIL_ATTR_BODY], 'html.parser') 18 | paragraphs = [paragraph.text for paragraph in soup.find_all('p')] 19 | for paragraph in paragraphs: 20 | if not paragraph: 21 | continue 22 | match = re.search('United Parcel Service Tracking # (\S{18})', paragraph) 23 | if match and match.group(1) not in tracking_numbers: 24 | tracking_numbers.append(match.group(1)) 25 | 26 | return tracking_numbers 27 | -------------------------------------------------------------------------------- /custom_components/email/parsers/the_smartest_house.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import re 3 | 4 | from bs4 import BeautifulSoup 5 | from ..const import EMAIL_ATTR_BODY 6 | 7 | 8 | _LOGGER = logging.getLogger(__name__) 9 | ATTR_SMARTEST_HOUSE = 'smartesthouse' 10 | EMAIL_DOMAIN_SMARTEST_HOUSE = 'thesmartesthouse.com' 11 | 12 | 13 | def parse_smartest_house(email): 14 | """Parse the smartest house tracking numbers.""" 15 | _LOGGER.debug(email) 16 | tracking_numbers = [] 17 | 18 | matches = re.findall(r'tracking_number=([0-9]+)', email[EMAIL_ATTR_BODY]) 19 | 20 | for tracking_number in matches: 21 | if tracking_number not in tracking_numbers: 22 | tracking_numbers.append(tracking_number) 23 | 24 | return tracking_numbers 25 | -------------------------------------------------------------------------------- /custom_components/email/parsers/thriftbooks.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import re 3 | import math 4 | 5 | from bs4 import BeautifulSoup 6 | from ..const import EMAIL_ATTR_BODY 7 | 8 | 9 | _LOGGER = logging.getLogger(__name__) 10 | ATTR_THRIFT_BOOKS = 'thrift_books' 11 | EMAIL_DOMAIN_THRIFT_BOOKS = 'thriftbooks' 12 | 13 | track_copy_pattern = re.compile(r"track my package", re.IGNORECASE) 14 | order_number_pattern = re.compile(r"Order #:\s+(\d+)") 15 | 16 | def parse_thrift_books(email): 17 | """Parse thrift books tracking numbers.""" 18 | tracking_numbers = [] 19 | 20 | soup = BeautifulSoup(email[EMAIL_ATTR_BODY], 'html.parser') 21 | elements = soup.find_all('a') 22 | 23 | for element in elements: 24 | link = element.get('href') 25 | 26 | if not link: 27 | continue 28 | 29 | if 'spmailtechno' not in link: 30 | continue 31 | 32 | try: 33 | if re.search(track_copy_pattern, element.text): 34 | match = re.search(order_number_pattern, email[EMAIL_ATTR_BODY]) 35 | if match: 36 | tracking_numbers.append({ 37 | 'link': link, 38 | 'tracking_number': match.group(1) 39 | }) 40 | except: 41 | pass 42 | 43 | return tracking_numbers 44 | -------------------------------------------------------------------------------- /custom_components/email/parsers/timeless.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import re 3 | 4 | from bs4 import BeautifulSoup 5 | from ..const import EMAIL_ATTR_BODY 6 | 7 | _LOGGER = logging.getLogger(__name__) 8 | ATTR_TIMELESS = 'timeless' 9 | EMAIL_DOMAIN_TIMLESS = 'timelessha.com' 10 | 11 | 12 | def parse_timeless(email): 13 | """Parse timeless tracking numbers.""" 14 | tracking_numbers = [] 15 | 16 | soup = BeautifulSoup(email[EMAIL_ATTR_BODY], 'html.parser') 17 | elements = soup.find_all('a') 18 | for element in elements: 19 | link = element.get('href') 20 | if not link: 21 | continue 22 | match = re.search(r'TrackConfirmAction\.action\?tLabels=(.*?)$', link) 23 | if match and match.group(1) not in tracking_numbers: 24 | tracking_numbers.append(match.group(1)) 25 | 26 | return tracking_numbers 27 | -------------------------------------------------------------------------------- /custom_components/email/parsers/ubiquiti.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import re 3 | 4 | from bs4 import BeautifulSoup 5 | from ..const import EMAIL_ATTR_BODY, EMAIL_ATTR_SUBJECT 6 | 7 | 8 | _LOGGER = logging.getLogger(__name__) 9 | ATTR_UBIQUITI = 'ubiquiti' 10 | EMAIL_DOMAIN_UBIQUITI = 'shopifyemail.com' 11 | 12 | 13 | def parse_ubiquiti(email): 14 | """Parse Ubiquiti tracking numbers.""" 15 | tracking_numbers = [] 16 | _LOGGER.error(email) 17 | 18 | # see if it's an shipped order email 19 | order_number_match = re.search('A shipment from order #(.*?) is on the way', email[EMAIL_ATTR_SUBJECT]) 20 | _LOGGER.error(order_number_match) 21 | if not order_number_match: 22 | return tracking_numbers 23 | 24 | order_number = order_number_match.group(1) 25 | 26 | soup = BeautifulSoup(email[EMAIL_ATTR_BODY], 'html.parser') 27 | links = [link.href for link in soup.find_all('a')] 28 | for link in links: 29 | if not link: 30 | continue 31 | match = re.search('/(\d{26})/orders/', link) 32 | if match and link not in tracking_numbers: 33 | tracking_numbers.append({ 34 | "tracking_number": order_number, 35 | "link": link, 36 | }) 37 | 38 | return tracking_numbers 39 | -------------------------------------------------------------------------------- /custom_components/email/parsers/ups.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import re 3 | 4 | from bs4 import BeautifulSoup 5 | from ..const import EMAIL_ATTR_BODY 6 | 7 | 8 | _LOGGER = logging.getLogger(__name__) 9 | ATTR_UPS = 'ups' 10 | EMAIL_DOMAIN_UPS = 'ups.com' 11 | 12 | 13 | def parse_ups(email): 14 | """Parse UPS tracking numbers.""" 15 | tracking_numbers = [] 16 | 17 | soup = BeautifulSoup(email[EMAIL_ATTR_BODY], 'html.parser') 18 | links = [link.get('href') for link in soup.find_all('a')] 19 | for link in links: 20 | if not link: 21 | continue 22 | 23 | match = re.search('tracknum=(.*?)&', link) 24 | if match and match.group(1) not in tracking_numbers: 25 | tracking_numbers.append(match.group(1)) 26 | 27 | return tracking_numbers 28 | -------------------------------------------------------------------------------- /custom_components/email/parsers/usps.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import re 3 | 4 | from bs4 import BeautifulSoup 5 | from ..const import EMAIL_ATTR_BODY 6 | 7 | 8 | _LOGGER = logging.getLogger(__name__) 9 | ATTR_USPS = 'usps' 10 | EMAIL_DOMAIN_USPS = 'usps.com' 11 | 12 | def parse_usps(email): 13 | """Parse USPS tracking numbers.""" 14 | tracking_numbers = [] 15 | 16 | soup = BeautifulSoup(email[EMAIL_ATTR_BODY], 'html.parser') 17 | links = [link.get('href') for link in soup.find_all('a')] 18 | for link in links: 19 | if not link: 20 | continue 21 | match = re.search('selectedTrckNum=(.*?)&', link) 22 | if match and match.group(1) not in tracking_numbers: 23 | tracking_numbers.append(match.group(1)) 24 | match = re.search('tLabels=(.*?)&', link) 25 | if match and match.group(1) not in tracking_numbers: 26 | tracking_numbers.append(match.group(1)) 27 | 28 | return tracking_numbers 29 | -------------------------------------------------------------------------------- /custom_components/email/parsers/western_digital.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import re 3 | 4 | from bs4 import BeautifulSoup 5 | from ..const import EMAIL_ATTR_BODY 6 | 7 | 8 | _LOGGER = logging.getLogger(__name__) 9 | ATTR_WESTERN_DIGITAL = 'western_digital' 10 | EMAIL_DOMAIN_WESTERN_DIGITAL = 'wdc.com' 11 | 12 | 13 | def parse_western_digital(email): 14 | """Parse Western Digital tracking numbers.""" 15 | tracking_numbers = [] 16 | 17 | soup = BeautifulSoup(email[EMAIL_ATTR_BODY], 'html.parser') 18 | links = [link.get('href') for link in soup.find_all('a')] 19 | for link in links: 20 | if not link: 21 | continue 22 | match = re.search('(?<=tracknum=)(.*)', link) 23 | if match and match.group(1) not in tracking_numbers: 24 | tracking_numbers.append(match.group(1)) 25 | 26 | return tracking_numbers 27 | -------------------------------------------------------------------------------- /custom_components/email/parsers/wyze.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import re 3 | 4 | from bs4 import BeautifulSoup 5 | from ..const import EMAIL_ATTR_BODY 6 | 7 | 8 | _LOGGER = logging.getLogger(__name__) 9 | ATTR_WYZE = 'wyze' 10 | EMAIL_DOMAIN_WYZE = 'wyze.com' 11 | 12 | 13 | def parse_wyze(email): 14 | """Parse Wyze tracking numbers.""" 15 | tracking_numbers = [] 16 | 17 | soup = BeautifulSoup(email[EMAIL_ATTR_BODY], 'html.parser') 18 | links = [link.get('href') for link in soup.find_all('a')] 19 | for link in links: 20 | if not link: 21 | continue 22 | match = re.search('tracking_numbers=(.*?)&', link) 23 | if match and match.group(1) not in tracking_numbers: 24 | tracking_numbers.append(match.group(1)) 25 | 26 | matches = re.findall(r'tracking_numbers=(.*?)&', email[EMAIL_ATTR_BODY]) 27 | for tracking_number in matches: 28 | if tracking_number not in tracking_numbers: 29 | tracking_numbers.append(tracking_number) 30 | 31 | 32 | return tracking_numbers 33 | -------------------------------------------------------------------------------- /custom_components/email/parsers/zazzle.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import re 3 | 4 | from bs4 import BeautifulSoup 5 | from ..const import EMAIL_ATTR_BODY 6 | 7 | 8 | _LOGGER = logging.getLogger(__name__) 9 | ATTR_ZAZZLE = 'zazzle' 10 | EMAIL_DOMAIN_ZAZZLE = 'zazzle.com' 11 | 12 | 13 | def parse_zazzle(email): 14 | """Parse Zazzle tracking numbers.""" 15 | tracking_numbers = [] 16 | 17 | soup = BeautifulSoup(email[EMAIL_ATTR_BODY], 'html.parser') 18 | links = [link.get('href') for link in soup.find_all('a')] 19 | for link in links: 20 | if not link: 21 | continue 22 | match = re.search('trackNums=(.*?)$', link) 23 | if match and match.group(1) not in tracking_numbers: 24 | tracking_numbers.append(match.group(1)) 25 | 26 | return tracking_numbers 27 | -------------------------------------------------------------------------------- /custom_components/email/sensor.py: -------------------------------------------------------------------------------- 1 | """Support for Google - Calendar Event Devices.""" 2 | from datetime import timedelta, date 3 | import logging 4 | import re 5 | 6 | from imapclient import IMAPClient 7 | from mailparser import parse_from_bytes 8 | import voluptuous as vol 9 | 10 | from homeassistant.components.sensor import PLATFORM_SCHEMA 11 | import homeassistant.helpers.config_validation as cv 12 | from homeassistant.helpers.entity import Entity 13 | 14 | from .const import ( 15 | CONF_EMAIL, CONF_PASSWORD, CONF_IMAP_SERVER, 16 | CONF_IMAP_PORT, CONF_SSL, CONF_EMAIL_FOLDER, CONF_DAYS_OLD, 17 | ATTR_TRACKING_NUMBERS, EMAIL_ATTR_FROM, EMAIL_ATTR_SUBJECT, 18 | EMAIL_ATTR_BODY, ATTR_COUNT) 19 | 20 | from .parsers.ups import ATTR_UPS, EMAIL_DOMAIN_UPS, parse_ups 21 | from .parsers.amazon import ATTR_AMAZON, EMAIL_DOMAIN_AMAZON, parse_amazon 22 | from .parsers.amazon_de import ATTR_AMAZON_DE, EMAIL_DOMAIN_AMAZON_DE, parse_amazon_de 23 | from .parsers.fedex import ATTR_FEDEX, EMAIL_DOMAIN_FEDEX, parse_fedex 24 | from .parsers.paypal import ATTR_PAYPAL, EMAIL_DOMAIN_PAYPAL, parse_paypal 25 | from .parsers.usps import ATTR_USPS, EMAIL_DOMAIN_USPS, parse_usps 26 | from .parsers.ali_express import ATTR_ALI_EXPRESS, EMAIL_DOMAIN_ALI_EXPRESS, parse_ali_express 27 | from .parsers.newegg import ATTR_NEWEGG, EMAIL_DOMAIN_NEWEGG, parse_newegg 28 | from .parsers.rockauto import ATTR_ROCKAUTO, EMAIL_DOMAIN_ROCKAUTO, parse_rockauto 29 | from .parsers.bh_photo import ATTR_BH_PHOTO, EMAIL_DOMAIN_BH_PHOTO, parse_bh_photo 30 | from .parsers.ebay import ATTR_EBAY, EMAIL_DOMAIN_EBAY, parse_ebay 31 | from .parsers.dhl import ATTR_DHL, EMAIL_DOMAIN_DHL, parse_dhl 32 | from .parsers.hue import ATTR_HUE, EMAIL_DOMAIN_HUE, parse_hue 33 | from .parsers.google_express import ATTR_GOOGLE_EXPRESS, EMAIL_DOMAIN_GOOGLE_EXPRESS, parse_google_express 34 | from .parsers.western_digital import ATTR_WESTERN_DIGITAL, EMAIL_DOMAIN_WESTERN_DIGITAL, parse_western_digital 35 | from .parsers.monoprice import ATTR_MONOPRICE, EMAIL_DOMAIN_MONOPRICE, parse_monoprice 36 | from .parsers.georgia_power import ATTR_GEORGIA_POWER, EMAIL_DOMAIN_GEORGIA_POWER, parse_georgia_power 37 | from .parsers.best_buy import ATTR_BEST_BUY, EMAIL_DOMAIN_BEST_BUY, parse_best_buy 38 | from .parsers.dollar_shave_club import ATTR_DOLLAR_SHAVE_CLUB, EMAIL_DOMAIN_DOLLAR_SHAVE_CLUB, parse_dollar_shave_club 39 | from .parsers.nuleaf import ATTR_NULEAF, EMAIL_DOMAIN_NULEAF, parse_nuleaf 40 | from .parsers.timeless import ATTR_TIMELESS, EMAIL_DOMAIN_TIMLESS, parse_timeless 41 | from .parsers.dsw import ATTR_DSW, EMAIL_DOMAIN_DSW, parse_dsw 42 | from .parsers.wyze import ATTR_WYZE, EMAIL_DOMAIN_WYZE, parse_wyze 43 | from .parsers.reolink import ATTR_REOLINK, EMAIL_DOMAIN_REOLINK, parse_reolink 44 | from .parsers.chewy import ATTR_CHEWY, EMAIL_DOMAIN_CHEWY, parse_chewy 45 | from .parsers.groupon import ATTR_GROUPON, EMAIL_DOMAIN_GROUPON, parse_groupon 46 | from .parsers.zazzle import ATTR_ZAZZLE, EMAIL_DOMAIN_ZAZZLE, parse_zazzle 47 | from .parsers.home_depot import ATTR_HOME_DEPOT, EMAIL_DOMAIN_HOME_DEPOT, parse_home_depot 48 | from .parsers.swiss_post import ATTR_SWISS_POST, EMAIL_DOMAIN_SWISS_POST, parse_swiss_post 49 | from .parsers.bespoke_post import ATTR_DSW, EMAIL_DOMAIN_DSW, parse_bespoke_post 50 | from .parsers.manta_sleep import ATTR_MANTA_SLEEP, EMAIL_DOMAIN_MANTA_SLEEP, parse_manta_sleep 51 | from .parsers.prusa import ATTR_PRUSA, EMAIL_DOMAIN_PRUSA, parse_prusa 52 | from .parsers.adam_eve import ATTR_ADAM_AND_EVE, EMAIL_DOMAIN_ADAM_AND_EVE, parse_adam_and_eve 53 | from .parsers.target import ATTR_TARGET, EMAIL_DOMAIN_TARGET, parse_target 54 | from .parsers.gamestop import ATTR_GAMESTOP, EMAIL_DOMAIN_GAMESTOP, parse_gamestop 55 | from .parsers.litter_robot import ATTR_LITTER_ROBOT, EMAIL_DOMAIN_LITTER_ROBOT, parse_litter_robot 56 | from .parsers.the_smartest_house import ATTR_SMARTEST_HOUSE, EMAIL_DOMAIN_SMARTEST_HOUSE, parse_smartest_house 57 | from .parsers.ubiquiti import ATTR_UBIQUITI, EMAIL_DOMAIN_UBIQUITI, parse_ubiquiti 58 | from .parsers.nintendo import ATTR_NINTENDO, EMAIL_DOMAIN_NINTENDO, parse_nintendo 59 | from .parsers.pledgebox import ATTR_PLEDGEBOX, EMAIL_DOMAIN_PLEDGEBOX, parse_pledgebox 60 | from .parsers.guitar_center import ATTR_GUITAR_CENTER, EMAIL_DOMAIN_GUITAR_CENTER, parse_guitar_center 61 | from .parsers.sony import ATTR_SONY, EMAIL_DOMAIN_SONY, parse_sony 62 | from .parsers.sylvane import ATTR_SYLVANE, EMAIL_DOMAIN_SYLVANE, parse_sylvane 63 | from .parsers.adafruit import ATTR_ADAFRUIT, EMAIL_DOMAIN_ADAFRUIT, parse_adafruit 64 | from .parsers.thriftbooks import ATTR_THRIFT_BOOKS, EMAIL_DOMAIN_THRIFT_BOOKS, parse_thrift_books 65 | from .parsers.lowes import ATTR_LOWES, EMAIL_DOMAIN_LOWES, parse_lowes 66 | 67 | from .parsers.generic import ATTR_GENERIC, EMAIL_DOMAIN_GENERIC, parse_generic 68 | 69 | 70 | parsers = [ 71 | (ATTR_UPS, EMAIL_DOMAIN_UPS, parse_ups), 72 | (ATTR_FEDEX, EMAIL_DOMAIN_FEDEX, parse_fedex), 73 | (ATTR_AMAZON, EMAIL_DOMAIN_AMAZON, parse_amazon), 74 | (ATTR_AMAZON_DE, EMAIL_DOMAIN_AMAZON_DE, parse_amazon_de), 75 | (ATTR_PAYPAL, EMAIL_DOMAIN_PAYPAL, parse_paypal), 76 | (ATTR_USPS, EMAIL_DOMAIN_USPS, parse_usps), 77 | (ATTR_ALI_EXPRESS, EMAIL_DOMAIN_ALI_EXPRESS, parse_ali_express), 78 | (ATTR_NEWEGG, EMAIL_DOMAIN_NEWEGG, parse_newegg), 79 | (ATTR_ROCKAUTO, EMAIL_DOMAIN_ROCKAUTO, parse_rockauto), 80 | (ATTR_BH_PHOTO, EMAIL_DOMAIN_BH_PHOTO, parse_bh_photo), 81 | (ATTR_EBAY, EMAIL_DOMAIN_EBAY, parse_ebay), 82 | (ATTR_DHL, EMAIL_DOMAIN_DHL, parse_dhl), 83 | (ATTR_HUE, EMAIL_DOMAIN_HUE, parse_hue), 84 | (ATTR_GOOGLE_EXPRESS, EMAIL_DOMAIN_GOOGLE_EXPRESS, parse_google_express), 85 | (ATTR_WESTERN_DIGITAL, EMAIL_DOMAIN_WESTERN_DIGITAL, parse_western_digital), 86 | (ATTR_MONOPRICE, EMAIL_DOMAIN_MONOPRICE, parse_monoprice), 87 | (ATTR_GEORGIA_POWER, EMAIL_DOMAIN_GEORGIA_POWER, parse_georgia_power), 88 | (ATTR_BEST_BUY, EMAIL_DOMAIN_BEST_BUY, parse_best_buy), 89 | (ATTR_DOLLAR_SHAVE_CLUB, EMAIL_DOMAIN_DOLLAR_SHAVE_CLUB, parse_dollar_shave_club), 90 | (ATTR_NULEAF, EMAIL_DOMAIN_NULEAF, parse_nuleaf), 91 | (ATTR_TIMELESS, EMAIL_DOMAIN_TIMLESS, parse_timeless), 92 | (ATTR_DSW, EMAIL_DOMAIN_DSW, parse_dsw), 93 | (ATTR_WYZE, EMAIL_DOMAIN_WYZE, parse_wyze), 94 | (ATTR_REOLINK, EMAIL_DOMAIN_REOLINK, parse_reolink), 95 | (ATTR_CHEWY, EMAIL_DOMAIN_CHEWY, parse_chewy), 96 | (ATTR_GROUPON, EMAIL_DOMAIN_GROUPON, parse_groupon), 97 | (ATTR_ZAZZLE, EMAIL_DOMAIN_ZAZZLE, parse_zazzle), 98 | (ATTR_HOME_DEPOT, EMAIL_DOMAIN_HOME_DEPOT, parse_home_depot), 99 | (ATTR_SWISS_POST, EMAIL_DOMAIN_SWISS_POST, parse_swiss_post), 100 | (ATTR_DSW, EMAIL_DOMAIN_DSW, parse_bespoke_post), 101 | (ATTR_MANTA_SLEEP, EMAIL_DOMAIN_MANTA_SLEEP, parse_manta_sleep), 102 | (ATTR_PRUSA, EMAIL_DOMAIN_PRUSA, parse_prusa), 103 | (ATTR_ADAM_AND_EVE, EMAIL_DOMAIN_ADAM_AND_EVE, parse_adam_and_eve), 104 | (ATTR_TARGET, EMAIL_DOMAIN_TARGET, parse_target), 105 | (ATTR_GAMESTOP, EMAIL_DOMAIN_GAMESTOP, parse_gamestop), 106 | (ATTR_LITTER_ROBOT, EMAIL_DOMAIN_LITTER_ROBOT, parse_litter_robot), 107 | (ATTR_SMARTEST_HOUSE, EMAIL_DOMAIN_SMARTEST_HOUSE, parse_smartest_house), 108 | (ATTR_UBIQUITI, EMAIL_DOMAIN_UBIQUITI, parse_ubiquiti), 109 | (ATTR_NINTENDO, EMAIL_DOMAIN_NINTENDO, parse_nintendo), 110 | (ATTR_PLEDGEBOX, EMAIL_DOMAIN_PLEDGEBOX, parse_pledgebox), 111 | (ATTR_GUITAR_CENTER, EMAIL_DOMAIN_GUITAR_CENTER, parse_guitar_center), 112 | (ATTR_SONY, EMAIL_DOMAIN_SONY, parse_sony), 113 | (ATTR_SYLVANE, EMAIL_DOMAIN_SYLVANE, parse_sylvane), 114 | (ATTR_ADAFRUIT, EMAIL_DOMAIN_ADAFRUIT, parse_adafruit), 115 | (ATTR_THRIFT_BOOKS, EMAIL_DOMAIN_THRIFT_BOOKS, parse_thrift_books), 116 | (ATTR_LOWES, EMAIL_DOMAIN_LOWES, parse_lowes), 117 | 118 | (ATTR_GENERIC, EMAIL_DOMAIN_GENERIC, parse_generic), 119 | ] 120 | 121 | _LOGGER = logging.getLogger(__name__) 122 | 123 | DOMAIN = 'email' 124 | SCAN_INTERVAL = timedelta(seconds=30*60) 125 | 126 | PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ 127 | vol.Required(CONF_EMAIL): cv.string, 128 | vol.Required(CONF_PASSWORD): cv.string, 129 | vol.Required(CONF_DAYS_OLD, default='30'): cv.positive_int, 130 | vol.Required(CONF_IMAP_SERVER, default='imap.gmail.com'): cv.string, 131 | vol.Required(CONF_IMAP_PORT, default=993): cv.positive_int, 132 | vol.Required(CONF_SSL, default=True): cv.boolean, 133 | vol.Required(CONF_EMAIL_FOLDER, default='INBOX'): cv.string, 134 | }) 135 | 136 | TRACKING_NUMBER_URLS = { 137 | 'ups': "https://www.ups.com/track?loc=en_US&tracknum=", 138 | 'usps': "https://tools.usps.com/go/TrackConfirmAction?tLabels=", 139 | 'fedex': "https://www.fedex.com/apps/fedextrack/?tracknumbers=", 140 | 'dhl': 'https://www.logistics.dhl/us-en/home/tracking/tracking-parcel.html?submit=1&tracking-id=', 141 | 'swiss_post': 'https://www.swisspost.ch/track?formattedParcelCodes=', 142 | 'unknown': 'https://www.google.com/search?q=', 143 | } 144 | 145 | 146 | usps_pattern = [ 147 | '^(94|93|92|94|95)[0-9]{20}$', 148 | '^(94|93|92|94|95)[0-9]{22}$', 149 | '^(70|14|23|03)[0-9]{14}$', 150 | '^(M0|82)[0-9]{8}$', 151 | '^([A-Z]{2})[0-9]{9}([A-Z]{2})$' 152 | ] 153 | 154 | ups_pattern = [ 155 | '^(1Z)[0-9A-Z]{16}$', 156 | '^(T)+[0-9A-Z]{10}$', 157 | '^[0-9]{9}$', 158 | '^[0-9]{26}$' 159 | ] 160 | 161 | fedex_pattern = [ 162 | '^[0-9]{20}$', 163 | '^[0-9]{15}$', 164 | '^[0-9]{12}$', 165 | '^[0-9]{22}$' 166 | ] 167 | 168 | usps_regex = "(" + ")|(".join(usps_pattern) + ")" 169 | fedex_regex = "(" + ")|(".join(fedex_pattern) + ")" 170 | ups_regex = "(" + ")|(".join(ups_pattern) + ")" 171 | 172 | def find_carrier(tracking_number, email_domain): 173 | _LOGGER.debug(f'find_carrier email_domain: {email_domain} {tracking_number}') 174 | 175 | # we may have the carrier/link already parsed from parser 176 | if type(tracking_number) is dict: 177 | return { 178 | 'tracking_number': tracking_number.get('tracking_number', ''), 179 | 'carrier': email_domain, 180 | 'origin': email_domain, 181 | 'link': tracking_number.get('link', ''), 182 | } 183 | 184 | link = "" 185 | carrier = "" 186 | 187 | # if tracking number is a url then use that 188 | if tracking_number.startswith('http'): 189 | link = tracking_number 190 | carrier = email_domain 191 | 192 | # if from carrier themself then use that 193 | elif email_domain == EMAIL_DOMAIN_UPS: 194 | link = TRACKING_NUMBER_URLS["ups"] 195 | carrier = "UPS" 196 | elif email_domain == EMAIL_DOMAIN_FEDEX: 197 | link = TRACKING_NUMBER_URLS["fedex"] 198 | carrier = "FedEx" 199 | elif email_domain == EMAIL_DOMAIN_USPS: 200 | link = TRACKING_NUMBER_URLS["usps"] 201 | carrier = "USPS" 202 | elif email_domain == EMAIL_DOMAIN_DHL: 203 | link = TRACKING_NUMBER_URLS["dhl"] 204 | carrier = "DHL" 205 | elif email_domain == EMAIL_DOMAIN_SWISS_POST: 206 | link = TRACKING_NUMBER_URLS["swiss_post"] 207 | carrier = "Swiss Post" 208 | 209 | # regex tracking number 210 | elif re.search(usps_regex, tracking_number) != None: 211 | link = TRACKING_NUMBER_URLS["usps"] 212 | carrier = 'USPS' 213 | elif re.search(ups_regex, tracking_number) != None: 214 | link = TRACKING_NUMBER_URLS["ups"] 215 | carrier = 'UPS' 216 | elif re.search(fedex_regex, tracking_number) != None: 217 | link = TRACKING_NUMBER_URLS["fedex"] 218 | carrier = 'FedEx' 219 | 220 | # try one more time 221 | else: 222 | isNumber = tracking_number.isnumeric() 223 | length = len(tracking_number) 224 | 225 | if (isNumber and (length == 12 or length == 15 or length == 20)): 226 | link = TRACKING_NUMBER_URLS["fedex"] 227 | carrier = "FedEx" 228 | elif (isNumber and length == 22): 229 | link = TRACKING_NUMBER_URLS["usps"] 230 | carrier = "USPS" 231 | elif (length > 25): 232 | link = TRACKING_NUMBER_URLS["dhl"] 233 | carrier = "DHL" 234 | else: 235 | link = TRACKING_NUMBER_URLS["unknown"] 236 | carrier = email_domain 237 | 238 | return { 239 | 'tracking_number': tracking_number, 240 | 'carrier': carrier, 241 | 'origin': email_domain or carrier, 242 | 'link': f'{link}{tracking_number}', 243 | } 244 | 245 | def setup_platform(hass, config, add_entities, discovery_info=None): 246 | """Set up the Email platform.""" 247 | add_entities([EmailEntity(config)], True) 248 | 249 | 250 | class EmailEntity(Entity): 251 | """Email Entity.""" 252 | 253 | def __init__(self, config): 254 | """Init the Email Entity.""" 255 | self._attr = { 256 | ATTR_TRACKING_NUMBERS: {}, 257 | ATTR_COUNT: 0 258 | } 259 | 260 | self.imap_server = config[CONF_IMAP_SERVER] 261 | self.imap_port = config[CONF_IMAP_PORT] 262 | self.email_address = config[CONF_EMAIL] 263 | self.password = config[CONF_PASSWORD] 264 | self.email_folder = config[CONF_EMAIL_FOLDER] 265 | self.ssl = config[CONF_SSL] 266 | self.days_old = int(config[CONF_DAYS_OLD]) 267 | 268 | self.flag = [u'SINCE', date.today() - timedelta(days=self.days_old)] 269 | 270 | def update(self): 271 | """Update data from Email API.""" 272 | self._attr = { 273 | ATTR_TRACKING_NUMBERS: {}, 274 | ATTR_COUNT: 0 275 | } 276 | 277 | # update to current day 278 | self.flag = [u'SINCE', date.today() - timedelta(days=self.days_old)] 279 | _LOGGER.debug(f'flag: {self.flag}') 280 | 281 | emails = [] 282 | server = IMAPClient(self.imap_server, port=self.imap_port, use_uid=True, ssl=self.ssl) 283 | 284 | try: 285 | server.login(self.email_address, self.password) 286 | server.select_folder(self.email_folder, readonly=True) 287 | except Exception as err: 288 | _LOGGER.error('IMAPClient login error {}'.format(err)) 289 | return False 290 | 291 | try: 292 | messages = server.search(self.flag) 293 | for uid, message_data in server.fetch(messages, 'RFC822').items(): 294 | try: 295 | mail = parse_from_bytes(message_data[b'RFC822']) 296 | 297 | emails.append({ 298 | EMAIL_ATTR_FROM: mail.from_, 299 | EMAIL_ATTR_SUBJECT: mail.subject, 300 | EMAIL_ATTR_BODY: mail.body 301 | }) 302 | except Exception as err: 303 | _LOGGER.warning( 304 | 'mailparser parse_from_bytes error: {}'.format(err)) 305 | 306 | except Exception as err: 307 | _LOGGER.error('IMAPClient update error: {}'.format(err)) 308 | 309 | # empty out all parser arrays 310 | for ATTR, EMAIL_DOMAIN, parser in parsers: 311 | self._attr[ATTR_TRACKING_NUMBERS][ATTR] = [] 312 | 313 | # for each email run each parser and save in the corresponding ATTR 314 | for email in emails: 315 | email_from = email[EMAIL_ATTR_FROM] 316 | _LOGGER.debug(f'parsing email from {email_from}') 317 | if isinstance(email_from, (list, tuple)): 318 | email_from = list(email_from) 319 | email_from = ''.join(list(email_from[0])) 320 | 321 | # run through all parsers for each email if email domain matches 322 | for ATTR, EMAIL_DOMAIN, parser in parsers: 323 | _LOGGER.debug(f'parsing email for parser {EMAIL_DOMAIN}') 324 | try: 325 | if EMAIL_DOMAIN in email_from: 326 | self._attr[ATTR_TRACKING_NUMBERS][ATTR] = self._attr[ATTR_TRACKING_NUMBERS][ATTR] + parser(email=email) 327 | except Exception as err: 328 | _LOGGER.error('{} error: {}'.format(ATTR, err)) 329 | 330 | counter = 0 331 | # remove duplicates 332 | for ATTR, EMAIL_DOMAIN, parser in parsers: 333 | tracking_numbers = self._attr[ATTR_TRACKING_NUMBERS][ATTR] 334 | if len(tracking_numbers) > 0 and isinstance(tracking_numbers[0], str): 335 | self._attr[ATTR_TRACKING_NUMBERS][ATTR] = list( 336 | dict.fromkeys(tracking_numbers)) 337 | 338 | # format tracking numbers to add carrier type 339 | for ATTR, EMAIL_DOMAIN, parser in parsers: 340 | _LOGGER.debug(f'parsing tracking numbers for {EMAIL_DOMAIN}') 341 | tracking_numbers = self._attr[ATTR_TRACKING_NUMBERS][ATTR] 342 | self._attr[ATTR_TRACKING_NUMBERS][ATTR] = list(map(lambda x: find_carrier(x, EMAIL_DOMAIN), tracking_numbers)) 343 | _LOGGER.debug(self._attr[ATTR_TRACKING_NUMBERS][ATTR]) 344 | counter = counter + len(tracking_numbers) 345 | 346 | self._attr[ATTR_COUNT] = counter 347 | server.logout() 348 | 349 | @property 350 | def name(self): 351 | """Return the name of the sensor.""" 352 | return 'email_{}'.format(self.email_address) 353 | 354 | @property 355 | def state(self): 356 | """Return the state of the sensor.""" 357 | return self._attr[ATTR_COUNT] 358 | 359 | @property 360 | def extra_state_attributes(self): 361 | """Return the state attributes.""" 362 | return self._attr 363 | 364 | @property 365 | def icon(self): 366 | """Return the icon to use in the frontend.""" 367 | return 'mdi:email' 368 | -------------------------------------------------------------------------------- /hacs.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "Email Sensor", 3 | "render_readme": true 4 | } -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | beautifulsoup4==4.7.1 2 | imapclient==2.1.0 3 | mail-parser==3.9.3 --------------------------------------------------------------------------------