├── .editorconfig ├── .envrc ├── .github ├── dependabot.yml └── workflows │ └── python.yaml ├── .gitignore ├── .pre-commit-config.yaml ├── .vim └── coc-settings.json ├── CHANGELOG.md ├── LICENSE ├── README.md ├── cicd └── custom_style_check.py ├── dev-requirements.txt ├── examples ├── get-conversation.py ├── listen-for-events.py ├── manual-login.py ├── mark-conversation-as-read.py ├── pickle-example.py ├── redact-message.py ├── send-message.py ├── send-reaction.py └── simple-2fa.py ├── linkedin_messaging ├── __init__.py ├── api_objects.py ├── exceptions.py ├── linkedin.py └── py.typed ├── pyproject.toml ├── requirements.txt ├── setup.cfg ├── shell.nix └── tests └── test_urn.py /.editorconfig: -------------------------------------------------------------------------------- 1 | # EditorConfig is awesome: https://EditorConfig.org 2 | 3 | # top-most EditorConfig file 4 | root = true 5 | 6 | # Unix-style newlines with a newline ending every file 7 | [*] 8 | end_of_line = lf 9 | insert_final_newline = true 10 | charset = utf-8 11 | indent_style = space 12 | indent_size = 4 13 | 14 | [*.py] 15 | max_line_length = 99 16 | 17 | # Indentation override for all JSON/YAML files 18 | [*.{json,yaml,yml}] 19 | indent_style = space 20 | indent_size = 2 21 | -------------------------------------------------------------------------------- /.envrc: -------------------------------------------------------------------------------- 1 | use nix 2 | 3 | # If the .venv folder doesn't exist, create a virtualenv. 4 | if [ ! -d .venv ]; then 5 | python3 -m venv .venv 6 | rm -rf .direnv/requirements.txt 7 | rm -rf .direnv/dev-requirements.txt 8 | fi 9 | 10 | # Activate the virtualenv 11 | source .venv/bin/activate 12 | 13 | # Install the project dependencies 14 | cached_req=$(expand_path .direnv/requirements.txt) 15 | 16 | if ! cmp -s $cached_req requirements.txt; then 17 | mkdir -p .direnv 18 | pip install -r requirements.txt 19 | cp requirements.txt $cached_req 20 | fi 21 | 22 | cached_devreq=$(expand_path .direnv/dev-requirements.txt) 23 | if ! cmp -s $cached_devreq dev-requirements.txt; then 24 | mkdir -p .direnv 25 | pip install -r dev-requirements.txt 26 | cp dev-requirements.txt $cached_devreq 27 | fi 28 | 29 | watch_file requirements.txt 30 | watch_file dev-requirements.txt 31 | watch_file shell.nix 32 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "github-actions" 4 | directory: "/" 5 | schedule: 6 | interval: "weekly" 7 | - package-ecosystem: "pip" 8 | directory: "/" 9 | schedule: 10 | interval: "weekly" 11 | -------------------------------------------------------------------------------- /.github/workflows/python.yaml: -------------------------------------------------------------------------------- 1 | name: Python 2 | 3 | on: 4 | push: 5 | pull_request: 6 | branches: [master] 7 | release: 8 | types: 9 | - created 10 | 11 | env: 12 | PYTHON_VERSION: 3.9 13 | 14 | jobs: 15 | lint: 16 | name: Run linters 17 | runs-on: ubuntu-latest 18 | steps: 19 | - uses: actions/checkout@v4 20 | - name: Set up Python ${{ env.PYTHON_VERSION }} 21 | uses: actions/setup-python@v5 22 | with: 23 | python-version: ${{ env.PYTHON_VERSION }} 24 | 25 | - name: Install dependencies 26 | run: | 27 | pip install -r dev-requirements.txt 28 | 29 | - name: Run pre-commit 30 | uses: pre-commit/action@v3.0.0 31 | with: 32 | extra_args: --verbose --all-files 33 | 34 | test: 35 | name: Run the tests 36 | runs-on: ubuntu-latest 37 | steps: 38 | - uses: actions/checkout@v4 39 | - name: Set up Python ${{ env.PYTHON_VERSION }} 40 | uses: actions/setup-python@v5 41 | with: 42 | python-version: ${{ env.PYTHON_VERSION }} 43 | 44 | - name: Install dependencies 45 | run: | 46 | pip install -r dev-requirements.txt 47 | 48 | - name: Run pytest 49 | run: | 50 | pytest -vv 51 | pytest -vv > pytest-coverage.txt 52 | 53 | - name: Comment coverage 54 | uses: coroo/pytest-coverage-commentator@v1.0.2 55 | if: ${{ github.event_name == 'pull_request' && github.event.action == 'created' }} 56 | 57 | build: 58 | name: Build linkedin_messaging 59 | runs-on: ubuntu-latest 60 | steps: 61 | - uses: actions/checkout@v4 62 | - name: Set up Python ${{ env.PYTHON_VERSION }} 63 | uses: actions/setup-python@v5 64 | with: 65 | python-version: ${{ env.PYTHON_VERSION }} 66 | 67 | - name: Install dependencies 68 | run: | 69 | pip install -r dev-requirements.txt 70 | 71 | - run: flit build 72 | 73 | deploy: 74 | name: Deploy to PyPi 75 | runs-on: ubuntu-latest 76 | needs: [lint, test, build] 77 | if: ${{ github.event_name == 'release' && github.event.action == 'created' }} 78 | 79 | env: 80 | FLIT_USERNAME: "__token__" 81 | FLIT_PASSWORD: ${{ secrets.PYPI_DEPLOY_TOKEN }} 82 | 83 | steps: 84 | - uses: actions/checkout@v4 85 | - name: Set up Python ${{ env.PYTHON_VERSION }} 86 | uses: actions/setup-python@v5 87 | with: 88 | python-version: ${{ env.PYTHON_VERSION }} 89 | 90 | - name: Install dependencies 91 | run: | 92 | pip install -r dev-requirements.txt 93 | 94 | - run: flit build 95 | 96 | - run: flit publish 97 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | cookies.pickle 2 | test.py 3 | .direnv 4 | 5 | # Created by https://www.toptal.com/developers/gitignore/api/python 6 | # Edit at https://www.toptal.com/developers/gitignore?templates=python 7 | 8 | ### Python ### 9 | # Byte-compiled / optimized / DLL files 10 | __pycache__/ 11 | *.py[cod] 12 | *$py.class 13 | 14 | # C extensions 15 | *.so 16 | 17 | # Distribution / packaging 18 | .Python 19 | build/ 20 | develop-eggs/ 21 | dist/ 22 | downloads/ 23 | eggs/ 24 | .eggs/ 25 | lib/ 26 | lib64/ 27 | parts/ 28 | sdist/ 29 | var/ 30 | wheels/ 31 | share/python-wheels/ 32 | *.egg-info/ 33 | .installed.cfg 34 | *.egg 35 | MANIFEST 36 | 37 | # PyInstaller 38 | # Usually these files are written by a python script from a template 39 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 40 | *.manifest 41 | *.spec 42 | 43 | # Installer logs 44 | pip-log.txt 45 | pip-delete-this-directory.txt 46 | 47 | # Unit test / coverage reports 48 | htmlcov/ 49 | .tox/ 50 | .nox/ 51 | .coverage 52 | .coverage.* 53 | .cache 54 | nosetests.xml 55 | coverage.xml 56 | *.cover 57 | *.py,cover 58 | .hypothesis/ 59 | .pytest_cache/ 60 | cover/ 61 | 62 | # Translations 63 | *.mo 64 | *.pot 65 | 66 | # Django stuff: 67 | *.log 68 | local_settings.py 69 | db.sqlite3 70 | db.sqlite3-journal 71 | 72 | # Flask stuff: 73 | instance/ 74 | .webassets-cache 75 | 76 | # Scrapy stuff: 77 | .scrapy 78 | 79 | # Sphinx documentation 80 | docs/_build/ 81 | 82 | # PyBuilder 83 | .pybuilder/ 84 | target/ 85 | 86 | # Jupyter Notebook 87 | .ipynb_checkpoints 88 | 89 | # IPython 90 | profile_default/ 91 | ipython_config.py 92 | 93 | # pyenv 94 | # For a library or package, you might want to ignore these files since the code is 95 | # intended to run in multiple environments; otherwise, check them in: 96 | # .python-version 97 | 98 | # pipenv 99 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 100 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 101 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 102 | # install all needed dependencies. 103 | #Pipfile.lock 104 | 105 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 106 | __pypackages__/ 107 | 108 | # Celery stuff 109 | celerybeat-schedule 110 | celerybeat.pid 111 | 112 | # SageMath parsed files 113 | *.sage.py 114 | 115 | # Environments 116 | .env 117 | .venv 118 | env/ 119 | venv/ 120 | ENV/ 121 | env.bak/ 122 | venv.bak/ 123 | 124 | # Spyder project settings 125 | .spyderproject 126 | .spyproject 127 | 128 | # Rope project settings 129 | .ropeproject 130 | 131 | # mkdocs documentation 132 | /site 133 | 134 | # mypy 135 | .mypy_cache/ 136 | .dmypy.json 137 | dmypy.json 138 | 139 | # Pyre type checker 140 | .pyre/ 141 | 142 | # pytype static type analyzer 143 | .pytype/ 144 | 145 | # Cython debug symbols 146 | cython_debug/ 147 | 148 | # End of https://www.toptal.com/developers/gitignore/api/python 149 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/pre-commit/pre-commit-hooks 3 | rev: v4.4.0 4 | hooks: 5 | - id: trailing-whitespace 6 | exclude_types: [markdown] 7 | - id: end-of-file-fixer 8 | - id: check-yaml 9 | - id: check-added-large-files 10 | 11 | - repo: https://github.com/tox-dev/pyproject-fmt 12 | rev: 0.11.2 13 | hooks: 14 | - id: pyproject-fmt 15 | 16 | # black 17 | - repo: https://github.com/psf/black 18 | rev: 23.3.0 19 | hooks: 20 | - id: black 21 | files: ^(linkedin_messaging|tests)/.*$ 22 | 23 | # isort 24 | - repo: https://github.com/PyCQA/isort 25 | rev: 5.12.0 26 | hooks: 27 | - id: isort 28 | files: ^(linkedin_messaging|tests)/.*$ 29 | 30 | # flake8 31 | - repo: https://github.com/pycqa/flake8 32 | rev: 6.0.0 33 | hooks: 34 | - id: flake8 35 | files: ^(linkedin_messaging|tests)/.*$ 36 | 37 | # mypy 38 | - repo: https://github.com/pre-commit/mirrors-mypy 39 | rev: v1.3.0 40 | hooks: 41 | - id: mypy 42 | additional_dependencies: [types-termcolor==1.1.3] 43 | 44 | # custom style checks 45 | - repo: local 46 | hooks: 47 | - id: custom-style-check 48 | name: custom style check 49 | entry: ./cicd/custom_style_check.py 50 | language: system 51 | -------------------------------------------------------------------------------- /.vim/coc-settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "python.linting.pylintEnabled": false, 3 | "python.linting.flake8Enabled": true, 4 | "python.linting.enabled": true, 5 | "python.linting.mypyEnabled": true, 6 | "python.formatting.provider": "black", 7 | "python.pythonPath": ".venv/bin/python" 8 | } 9 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # v0.6.0 2 | 3 | * Removed ability to pickle the `LinkedInMessaging` cookies. Use `from_cookies` 4 | and pass `JSESSIONID` and `li_at` individually. 5 | 6 | # v0.5.7 7 | 8 | * Added objects to support message edits. 9 | 10 | # v0.5.6 11 | 12 | * Added objects to support voice messages. 13 | * Updated versions of many dependencies and all of the pre-commit hooks. 14 | 15 | # v0.5.5 16 | 17 | * Add back support for the `TIMEOUT` special event. All other errors still need 18 | to be handled by API consumers. 19 | 20 | # v0.5.4 21 | 22 | * Fix the `__license__` property in the package. 23 | * Added some objects to support posts media. Contributed by @mehdiirh in #5 24 | * Dropped support for the `STREAM_ERROR` and `TIMEOUT` special events. Consumers 25 | of that API should just handle errors thrown by the `start_listener` call 26 | and perform the appropriate error mitigations. 27 | 28 | * Examples 29 | 30 | * Add `ALL_EVENTS` listener example. 31 | 32 | * Internal: 33 | 34 | * Convert to flit for managing the package and dependencies. 35 | 36 | * Developer experience: 37 | 38 | * Add dependabot for GitHub Actions and Python requirements. 39 | * Use [pre-commit/action](https://github.com/pre-commit/action) for linting in 40 | CI. 41 | * Update all of the pre-commit hooks. 42 | 43 | # v0.5.3 44 | 45 | * Add manual login option where you can specify the `li_at` and `JSESSIONID` 46 | cookies manually. For example, you could open a private browser window, log 47 | in, and extract the cookies from the developer tools. 48 | 49 | * Internal: 50 | 51 | * Update GitHub Actions workflow to not use a matrix for Python versions. 52 | 53 | # v0.5.2 54 | 55 | * Parse timestamp from LinkedIn as UTC timestamps in case the server's timezone 56 | is different than UTC. 57 | * Internal: 58 | 59 | * Add isort and pre-commit. 60 | * Changed maximum line length from 88 to 99. 61 | * Reordered imports to be more in line with other mautrix bridges. 62 | 63 | # v0.5.1 64 | 65 | * Add objects for typing notifications and read receipts. 66 | * Add `set_typing` function for sending typing notifications. 67 | 68 | # v0.5.0 69 | 70 | * Add objects for shared feed updates. 71 | 72 | # v0.4.3 73 | 74 | * Add conversation name field to `Conversation` and name update custom content 75 | to `MessageCustomContent`. 76 | 77 | # v0.4.2 78 | 79 | * Explicitly specify the timeout of each event stream connection to help avoid 80 | weird states where the event stream is open but not receiving events. 81 | * Add more logging and error handling when event listener handlers fail. 82 | 83 | # v0.4.1 84 | 85 | * Added special events for monitoring listener connection status. 86 | * Added the ability to mark a conversation as read. 87 | 88 | # v0.4.0 89 | 90 | * Dropped support for everything except for Python 3.9+. 91 | * Made all API fields optional. 92 | 93 | # v0.3.0 94 | 95 | * Add support for deleting messages. 96 | * Added support for the `recalled_at` property on `MessageEvent`s. 97 | * Add support for adding and removing reactions. 98 | * Add support for getting the reactors of a message and emoji. 99 | * Improved error handling when there is a JSON decode error. 100 | 101 | # v0.2.1 102 | 103 | * Removed `liap` cookie as required for being authenticated. 104 | 105 | # v0.2.0 106 | 107 | * Implemented logout endpoint. 108 | 109 | # v0.1.7 110 | 111 | * Fix typo in InMail parsing. 112 | 113 | # v0.1.6 114 | 115 | * Add support for parsing alternate names and images for InMail profiles 116 | 117 | # v0.1.5 118 | 119 | * Add support for parsing InMail messages 120 | 121 | # v0.1.4 122 | 123 | * Add `py.typed` file to indicate that the library has type hints. 124 | 125 | # v0.1.3 126 | 127 | * Made the `URN` object hashable. This is useful for using `URN`s as keys for 128 | dictionaries 129 | * Added more function type annotations and enforced using annotations via 130 | flake8-annotations. 131 | 132 | # v0.1.2 133 | 134 | * Add a few convenience methods for URNs. 135 | * Add `get_all_conversations` async generator for iterating through all of the 136 | user's conversations. 137 | * Add better error handling to download functions. 138 | * Add examples 139 | 140 | # v0.1.1 141 | 142 | This is the initial release. Features include: 143 | 144 | * Login helper and session manager. Works with 2FA. 145 | * Get a list of the user's conversations. 146 | * Retrieve the messages and media in a particular conversation. 147 | * Send messages to a particular conversation or set of recipients. Multimedia is 148 | supported. 149 | * An event listener structure for listening for new events. 150 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | 179 | APPENDIX: How to apply the Apache License to your work. 180 | 181 | To apply the Apache License to your work, attach the following 182 | boilerplate notice, with the fields enclosed by brackets "[]" 183 | replaced with your own identifying information. (Don't include 184 | the brackets!) The text should be enclosed in the appropriate 185 | comment syntax for the file format. We also recommend that a 186 | file or class name and description of purpose be included on the 187 | same "printed page" as the copyright notice for easier 188 | identification within third-party archives. 189 | 190 | Copyright [yyyy] [name of copyright owner] 191 | 192 | Licensed under the Apache License, Version 2.0 (the "License"); 193 | you may not use this file except in compliance with the License. 194 | You may obtain a copy of the License at 195 | 196 | http://www.apache.org/licenses/LICENSE-2.0 197 | 198 | Unless required by applicable law or agreed to in writing, software 199 | distributed under the License is distributed on an "AS IS" BASIS, 200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 201 | See the License for the specific language governing permissions and 202 | limitations under the License. 203 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # DEPRECATED: moved into https://github.com/beeper/linkedin 2 | 3 | # LinkedIn Messaging API 4 | 5 | [![Python](https://github.com/sumnerevans/linkedin-messaging-api/actions/workflows/python.yaml/badge.svg)](https://github.com/sumnerevans/linkedin-messaging-api/actions/workflows/python.yaml) 6 | [![Matrix Chat](https://img.shields.io/matrix/linkedin-matrix:nevarro.space?server_fqdn=matrix.nevarro.space)](https://matrix.to/#/#linkedin-matrix:nevarro.space?via=nevarro.space&via=sumnerevans.com) 7 | 8 | An unofficial API for interacting with LinkedIn Messaging. 9 | 10 | Built using [aiohttp](https://docs.aiohttp.org/en/stable/). 11 | 12 | ## Documentation 13 | 14 | See [`examples` directory](./examples). 15 | 16 | ## Credits 17 | 18 | Inspired by [linkedin-api](https://github.com/tomquirk/linkedin-api). 19 | 20 | Authentication technique from [@everping](https://github.com/everping) in the 21 | [Linkedin-Authentication-Challenge](https://github.com/everping/Linkedin-Authentication-Challenge) 22 | repo. Used with permission. 23 | -------------------------------------------------------------------------------- /cicd/custom_style_check.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | 3 | """ 4 | Checks for TODO comments and makes sure they have an associated issue. Formats that are 5 | accepted are: 6 | 7 | TODO (#1) 8 | TODO (#1) 9 | TODO (project#1) 10 | TODO (namespace/project#1) 11 | TODO (namespace/namespace/project#1) 12 | 13 | Additionally, the TODO can be postfixed with ``:``. 14 | """ 15 | 16 | import re 17 | import sys 18 | from pathlib import Path 19 | from typing import Pattern 20 | 21 | from termcolor import cprint 22 | 23 | todo_re = re.compile(r"\s*#\s*TODO:?\s*") 24 | accounted_for_todo = re.compile(r"\s*#\s*TODO:?\s*\(([\w-]+(/[\w-]+)*)?#\d+\)") 25 | 26 | 27 | def noqa_re(error_id: str = "") -> Pattern: 28 | return re.compile(rf"#\s*noqa(:\s*{error_id})?\s*\n$") 29 | 30 | 31 | def eprint(*strings: str): 32 | cprint(" ".join(strings), "red", end="", attrs=["bold"]) 33 | 34 | 35 | def check_file(path: Path) -> bool: 36 | print(f"Checking {path.absolute()}...") # noqa: T001 37 | file = path.open() 38 | valid = True 39 | 40 | for i, line in enumerate(file, start=1): 41 | if todo_re.match(line) and not accounted_for_todo.match(line): 42 | eprint(f"{i}: {line}") 43 | valid = False 44 | 45 | file.close() 46 | return valid 47 | 48 | 49 | valid = True 50 | for path in sys.argv[1:]: 51 | if path.endswith(".py"): 52 | valid &= check_file(Path(path).absolute()) 53 | 54 | if "CHANGELOG.md" in sys.argv: 55 | """ 56 | Checks that the version in the CHANGELOG is the same as the version in ``__init__.py``. 57 | """ 58 | print("Checking version in CHANGELOG is the same as version in __init__") # noqa: T001 59 | with open(Path("linkedin_messaging/__init__.py")) as f: 60 | for line in f: 61 | if line.startswith("__version__"): 62 | version = eval(line.split()[-1]) 63 | break 64 | else: # nobreak 65 | raise AssertionError("No version in linkedin_messaging/__init__.py") 66 | 67 | with open(Path("CHANGELOG.md")) as f: 68 | assert f.readline().strip() == f"# v{version}", "Version mismatch: CHANGELOG" 69 | 70 | sys.exit(0 if valid else 1) 71 | -------------------------------------------------------------------------------- /dev-requirements.txt: -------------------------------------------------------------------------------- 1 | # 2 | # This file is autogenerated by pip-compile with Python 3.9 3 | # by the following command: 4 | # 5 | # pip-compile --extra=dev --output-file=dev-requirements.txt pyproject.toml 6 | # 7 | aiohttp==3.9.1 8 | # via 9 | # black 10 | # linkedin-messaging (pyproject.toml) 11 | aiosignal==1.3.1 12 | # via aiohttp 13 | async-timeout==4.0.3 14 | # via aiohttp 15 | attrs==23.1.0 16 | # via 17 | # aiohttp 18 | # flake8-annotations 19 | # flake8-bugbear 20 | beautifulsoup4==4.12.2 21 | # via linkedin-messaging (pyproject.toml) 22 | black==23.12.0 23 | # via linkedin-messaging (pyproject.toml) 24 | build==1.0.3 25 | # via pip-tools 26 | certifi==2023.11.17 27 | # via requests 28 | cfgv==3.4.0 29 | # via pre-commit 30 | charset-normalizer==3.3.2 31 | # via requests 32 | click==8.1.7 33 | # via 34 | # black 35 | # pip-tools 36 | coverage[toml]==7.3.2 37 | # via 38 | # coverage 39 | # pytest-cov 40 | dataclasses-json==0.6.3 41 | # via linkedin-messaging (pyproject.toml) 42 | distlib==0.3.8 43 | # via virtualenv 44 | docutils==0.20.1 45 | # via flit 46 | exceptiongroup==1.2.0 47 | # via pytest 48 | filelock==3.13.1 49 | # via virtualenv 50 | flake8==6.1.0 51 | # via 52 | # flake8-annotations 53 | # flake8-bugbear 54 | # flake8-comprehensions 55 | # flake8-isort 56 | # flake8-pep3101 57 | # flake8-print 58 | # linkedin-messaging (pyproject.toml) 59 | flake8-annotations==3.0.1 60 | # via linkedin-messaging (pyproject.toml) 61 | flake8-bugbear==23.12.2 62 | # via linkedin-messaging (pyproject.toml) 63 | flake8-comprehensions==3.14.0 64 | # via linkedin-messaging (pyproject.toml) 65 | flake8-isort==6.1.1 66 | # via linkedin-messaging (pyproject.toml) 67 | flake8-pep3101==2.1.0 68 | # via linkedin-messaging (pyproject.toml) 69 | flake8-print==5.0.0 70 | # via linkedin-messaging (pyproject.toml) 71 | flit==3.9.0 72 | # via linkedin-messaging (pyproject.toml) 73 | flit-core==3.9.0 74 | # via flit 75 | frozenlist==1.4.0 76 | # via 77 | # aiohttp 78 | # aiosignal 79 | identify==2.5.33 80 | # via pre-commit 81 | idna==3.6 82 | # via 83 | # requests 84 | # yarl 85 | importlib-metadata==7.0.0 86 | # via build 87 | iniconfig==2.0.0 88 | # via pytest 89 | isort==5.13.1 90 | # via 91 | # flake8-isort 92 | # linkedin-messaging (pyproject.toml) 93 | marshmallow==3.20.1 94 | # via dataclasses-json 95 | mccabe==0.7.0 96 | # via flake8 97 | multidict==6.0.4 98 | # via 99 | # aiohttp 100 | # yarl 101 | mypy==1.7.1 102 | # via linkedin-messaging (pyproject.toml) 103 | mypy-extensions==1.0.0 104 | # via 105 | # black 106 | # mypy 107 | # typing-inspect 108 | nodeenv==1.8.0 109 | # via pre-commit 110 | packaging==23.2 111 | # via 112 | # black 113 | # build 114 | # marshmallow 115 | # pytest 116 | pathspec==0.12.1 117 | # via black 118 | pip-tools==7.3.0 119 | # via linkedin-messaging (pyproject.toml) 120 | platformdirs==4.1.0 121 | # via 122 | # black 123 | # virtualenv 124 | pluggy==1.3.0 125 | # via pytest 126 | pre-commit==3.6.0 127 | # via linkedin-messaging (pyproject.toml) 128 | pycodestyle==2.11.1 129 | # via 130 | # flake8 131 | # flake8-print 132 | pyflakes==3.1.0 133 | # via flake8 134 | pyproject-hooks==1.0.0 135 | # via build 136 | pytest==7.4.3 137 | # via 138 | # linkedin-messaging (pyproject.toml) 139 | # pytest-cov 140 | pytest-cov==4.1.0 141 | # via linkedin-messaging (pyproject.toml) 142 | pyyaml==6.0.1 143 | # via pre-commit 144 | requests==2.31.0 145 | # via flit 146 | soupsieve==2.5 147 | # via beautifulsoup4 148 | termcolor==2.4.0 149 | # via linkedin-messaging (pyproject.toml) 150 | tomli==2.0.1 151 | # via 152 | # black 153 | # build 154 | # coverage 155 | # mypy 156 | # pip-tools 157 | # pyproject-hooks 158 | # pytest 159 | tomli-w==1.0.0 160 | # via flit 161 | types-termcolor==1.1.6.2 162 | # via linkedin-messaging (pyproject.toml) 163 | typing-extensions==4.9.0 164 | # via 165 | # black 166 | # mypy 167 | # typing-inspect 168 | typing-inspect==0.9.0 169 | # via dataclasses-json 170 | urllib3==2.1.0 171 | # via requests 172 | virtualenv==20.25.0 173 | # via pre-commit 174 | wheel==0.42.0 175 | # via pip-tools 176 | yarl==1.9.4 177 | # via aiohttp 178 | zipp==3.17.0 179 | # via importlib-metadata 180 | 181 | # The following packages are considered to be unsafe in a requirements file: 182 | # pip 183 | # setuptools 184 | -------------------------------------------------------------------------------- /examples/get-conversation.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from pathlib import Path 3 | 4 | from linkedin_messaging import ChallengeException, LinkedInMessaging 5 | from linkedin_messaging.api_objects import URN 6 | 7 | cookie_path = Path(__file__).parent.joinpath("cookies.pickle") 8 | urn = URN("urn:li:fs_conversation:2-YWM2YTJmYjUtNTdjMS00ZjlmLTgwMDUtOWYxMmMxNjY4M2FlXzAxMg==") 9 | 10 | 11 | async def main(): 12 | linkedin = LinkedInMessaging() 13 | if cookie_path.exists(): 14 | with open(cookie_path, "rb") as cf: 15 | linkedin = LinkedInMessaging.from_pickle(cf.read()) 16 | 17 | if not await linkedin.logged_in(): 18 | try: 19 | await linkedin.login("EMAIL", "PASSWORD") 20 | except ChallengeException: 21 | await linkedin.enter_2fa(input("2fa code: ")) 22 | 23 | with open(cookie_path, "wb+") as cf: 24 | cf.write(linkedin.to_pickle()) 25 | 26 | try: 27 | # Get a list of all of the conversations for the given user. 28 | conversations = await linkedin.get_conversations() 29 | for c in conversations.elements: 30 | print(c) 31 | 32 | # Get a specific conversation by URN. 33 | convo_resp = await linkedin.get_conversation(urn) 34 | for element in convo_resp.elements: 35 | print(element) 36 | 37 | finally: 38 | await linkedin.close() 39 | 40 | 41 | loop = asyncio.get_event_loop() 42 | loop.run_until_complete(main()) 43 | -------------------------------------------------------------------------------- /examples/listen-for-events.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import logging 3 | from pathlib import Path 4 | 5 | from linkedin_messaging import ChallengeException, LinkedInMessaging 6 | from linkedin_messaging.api_objects import RealTimeEventStreamEvent 7 | 8 | cookie_path = Path(__file__).parent.joinpath("cookies.pickle") 9 | 10 | 11 | async def main(): 12 | logging.basicConfig() 13 | logging.getLogger().setLevel(logging.DEBUG) 14 | 15 | linkedin = LinkedInMessaging() 16 | if cookie_path.exists(): 17 | with open(cookie_path, "rb") as cf: 18 | linkedin = LinkedInMessaging.from_pickle(cf.read()) 19 | 20 | if not await linkedin.logged_in(): 21 | try: 22 | await linkedin.login("EMAIL", "PASSWORD") 23 | except ChallengeException: 24 | await linkedin.enter_2fa(input("2fa code: ")) 25 | 26 | with open(cookie_path, "wb+") as cf: 27 | cf.write(linkedin.to_pickle()) 28 | 29 | async def on_event(event: RealTimeEventStreamEvent): 30 | print("MESSAGE") 31 | if (e := event.event) and (ec := e.event_content) and (me := ec.message_event): 32 | print("REDACTION?", me.recalled_at) 33 | print(event) 34 | 35 | async def on_reaction(event: RealTimeEventStreamEvent): 36 | print("REACTION") 37 | print(event) 38 | assert event.event_urn 39 | assert event.reaction_summary 40 | print(await linkedin.get_reactors(event.event_urn, event.reaction_summary.emoji)) 41 | 42 | async def all_events(event): 43 | print("ALL EVENTS") 44 | print(event) 45 | 46 | linkedin.add_event_listener("event", on_event) 47 | linkedin.add_event_listener("reactionSummary", on_reaction) 48 | linkedin.add_event_listener("ALL_EVENTS", all_events) 49 | 50 | task = asyncio.create_task(linkedin.start_listener()) 51 | 52 | # wait basically forever 53 | await asyncio.sleep(2**128) 54 | 55 | asyncio.gather(task) 56 | 57 | await linkedin.close() 58 | 59 | 60 | loop = asyncio.get_event_loop() 61 | loop.run_until_complete(main()) 62 | -------------------------------------------------------------------------------- /examples/manual-login.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from linkedin_messaging import LinkedInMessaging 4 | from linkedin_messaging.api_objects import URN 5 | 6 | urn = URN("urn:li:fs_conversation:2-YWM2YTJmYjUtNTdjMS00ZjlmLTgwMDUtOWYxMmMxNjY4M2FlXzAxMg==") 7 | 8 | 9 | async def main(): 10 | linkedin = LinkedInMessaging() 11 | await linkedin.login_manual( 12 | "FOO", 13 | "ajax:1234567890", 14 | ) 15 | 16 | assert await linkedin.logged_in() 17 | 18 | try: 19 | # Get a list of all of the conversations for the given user. 20 | conversations = await linkedin.get_conversations() 21 | for c in conversations.elements: 22 | print(c) 23 | 24 | # Get a specific conversation by URN. 25 | convo_resp = await linkedin.get_conversation(urn) 26 | for element in convo_resp.elements: 27 | print(element) 28 | 29 | finally: 30 | await linkedin.close() 31 | 32 | 33 | loop = asyncio.get_event_loop() 34 | loop.run_until_complete(main()) 35 | -------------------------------------------------------------------------------- /examples/mark-conversation-as-read.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from pathlib import Path 3 | 4 | from linkedin_messaging import ChallengeException, LinkedInMessaging 5 | from linkedin_messaging.api_objects import URN 6 | 7 | cookie_path = Path(__file__).parent.joinpath("cookies.pickle") 8 | urn = URN("2-YWM2YTJmYjUtNTdjMS00ZjlmLTgwMDUtOWYxMmMxNjY4M2FlXzAxMg==") 9 | 10 | 11 | async def main(): 12 | linkedin = LinkedInMessaging() 13 | if cookie_path.exists(): 14 | with open(cookie_path, "rb") as cf: 15 | linkedin = LinkedInMessaging.from_pickle(cf.read()) 16 | 17 | if not await linkedin.logged_in(): 18 | try: 19 | await linkedin.login("EMAIL", "PASSWORD") 20 | except ChallengeException: 21 | await linkedin.enter_2fa(input("2fa code: ")) 22 | 23 | with open(cookie_path, "wb+") as cf: 24 | cf.write(linkedin.to_pickle()) 25 | 26 | try: 27 | print(await linkedin.mark_conversation_as_read(urn)) 28 | finally: 29 | await linkedin.close() 30 | 31 | 32 | loop = asyncio.get_event_loop() 33 | loop.run_until_complete(main()) 34 | -------------------------------------------------------------------------------- /examples/pickle-example.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from pathlib import Path 3 | 4 | from linkedin_messaging import ChallengeException, LinkedInMessaging 5 | 6 | cookie_path = Path(__file__).parent.joinpath("cookies.pickle") 7 | 8 | 9 | async def main(): 10 | linkedin = LinkedInMessaging() 11 | if cookie_path.exists(): 12 | with open(cookie_path, "rb") as cf: 13 | linkedin = LinkedInMessaging.from_pickle(cf.read()) 14 | 15 | if not await linkedin.logged_in(): 16 | try: 17 | await linkedin.login("EMAIL", "PASSWORD") 18 | except ChallengeException: 19 | await linkedin.enter_2fa(input("2fa code: ")) 20 | 21 | with open(cookie_path, "wb+") as cf: 22 | cf.write(linkedin.to_pickle()) 23 | 24 | print(await linkedin.get_user_profile()) 25 | 26 | await linkedin.logout() 27 | await linkedin.close() 28 | 29 | 30 | loop = asyncio.get_event_loop() 31 | loop.run_until_complete(main()) 32 | -------------------------------------------------------------------------------- /examples/redact-message.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import logging 3 | from pathlib import Path 4 | 5 | from linkedin_messaging import ChallengeException, LinkedInMessaging 6 | from linkedin_messaging.api_objects import URN, AttributedBody, MessageCreate 7 | 8 | cookie_path = Path(__file__).parent.joinpath("cookies.pickle") 9 | urn = URN("2-YWM2YTJmYjUtNTdjMS00ZjlmLTgwMDUtOWYxMmMxNjY4M2FlXzAxMg==") 10 | 11 | 12 | async def main(): 13 | logging.basicConfig() 14 | logging.getLogger().setLevel(logging.DEBUG) 15 | 16 | linkedin = LinkedInMessaging() 17 | if cookie_path.exists(): 18 | with open(cookie_path, "rb") as cf: 19 | linkedin = LinkedInMessaging.from_pickle(cf.read()) 20 | 21 | if not await linkedin.logged_in(): 22 | try: 23 | await linkedin.login("EMAIL", "PASSWORD") 24 | except ChallengeException: 25 | await linkedin.enter_2fa(input("2fa code: ")) 26 | 27 | with open(cookie_path, "wb+") as cf: 28 | cf.write(linkedin.to_pickle()) 29 | 30 | # Send a simple message that has some text. 31 | mc = MessageCreate(AttributedBody("test")) 32 | message = await linkedin.send_message(urn, mc) 33 | 34 | await asyncio.sleep(5) 35 | 36 | # Now, delete it 37 | print( 38 | await linkedin.delete_message( 39 | message.value.conversation_urn, 40 | message.value.event_urn, 41 | ) 42 | ) 43 | await linkedin.close() 44 | 45 | 46 | loop = asyncio.get_event_loop() 47 | loop.run_until_complete(main()) 48 | -------------------------------------------------------------------------------- /examples/send-message.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import logging 3 | from pathlib import Path 4 | 5 | from linkedin_messaging import ChallengeException, LinkedInMessaging 6 | from linkedin_messaging.api_objects import URN, AttributedBody, MessageCreate 7 | 8 | cookie_path = Path(__file__).parent.joinpath("cookies.pickle") 9 | urn = URN("urn:li:fs_conversation:2-OTNkODIyYTEtODFjZS00NTdlLThlYTItYWQyMDg2NTc4YWMyXzAxMA==") 10 | 11 | 12 | async def main(): 13 | logging.basicConfig() 14 | logging.getLogger().setLevel(logging.DEBUG) 15 | 16 | linkedin = LinkedInMessaging() 17 | if cookie_path.exists(): 18 | with open(cookie_path, "rb") as cf: 19 | linkedin = LinkedInMessaging.from_pickle(cf.read()) 20 | 21 | if not await linkedin.logged_in(): 22 | try: 23 | await linkedin.login("EMAIL", "PASSWORD") 24 | except ChallengeException: 25 | await linkedin.enter_2fa(input("2fa code: ")) 26 | 27 | with open(cookie_path, "wb+") as cf: 28 | cf.write(linkedin.to_pickle()) 29 | 30 | # Send a simple message that has some text. 31 | mc = MessageCreate(AttributedBody("test")) 32 | print(await linkedin.send_message(urn, mc)) 33 | 34 | # Send a multimedia message. 35 | with open("/path/to/the/cool-pic.jpg", "rb") as f: 36 | attachment = await linkedin.upload_media(f.read(), "cool-pic.jpg", "image/jpeg") 37 | 38 | mc = MessageCreate(AttributedBody(), attachments=[attachment]) 39 | print(await linkedin.send_message(urn, mc)) 40 | 41 | await linkedin.logout() 42 | await linkedin.close() 43 | 44 | 45 | loop = asyncio.get_event_loop() 46 | loop.run_until_complete(main()) 47 | -------------------------------------------------------------------------------- /examples/send-reaction.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import logging 3 | from pathlib import Path 4 | 5 | from linkedin_messaging import ChallengeException, LinkedInMessaging 6 | from linkedin_messaging.api_objects import URN, AttributedBody, MessageCreate 7 | 8 | cookie_path = Path(__file__).parent.joinpath("cookies.pickle") 9 | urn = URN("2-YWM2YTJmYjUtNTdjMS00ZjlmLTgwMDUtOWYxMmMxNjY4M2FlXzAxMg==") 10 | 11 | 12 | async def main(): 13 | logging.basicConfig() 14 | logging.getLogger().setLevel(logging.DEBUG) 15 | 16 | linkedin = LinkedInMessaging() 17 | if cookie_path.exists(): 18 | with open(cookie_path, "rb") as cf: 19 | linkedin = LinkedInMessaging.from_pickle(cf.read()) 20 | 21 | if not await linkedin.logged_in(): 22 | try: 23 | await linkedin.login("EMAIL", "PASSWORD") 24 | except ChallengeException: 25 | await linkedin.enter_2fa(input("2fa code: ")) 26 | 27 | with open(cookie_path, "wb+") as cf: 28 | cf.write(linkedin.to_pickle()) 29 | 30 | # Send a simple message that has some text. 31 | mc = MessageCreate(AttributedBody("test")) 32 | message = await linkedin.send_message(urn, mc) 33 | 34 | # Adding reactions 35 | print( 36 | await linkedin.add_emoji_reaction( 37 | message.value.conversation_urn, 38 | message.value.event_urn, 39 | "😃", 40 | ) 41 | ) 42 | print( 43 | await linkedin.add_emoji_reaction( 44 | message.value.conversation_urn, 45 | message.value.event_urn, 46 | "🤑", 47 | ) 48 | ) 49 | 50 | await asyncio.sleep(5) 51 | 52 | # Remove one of them 53 | print( 54 | await linkedin.remove_emoji_reaction( 55 | message.value.conversation_urn, 56 | message.value.event_urn, 57 | "😃", 58 | ) 59 | ) 60 | 61 | await linkedin.close() 62 | 63 | 64 | loop = asyncio.get_event_loop() 65 | loop.run_until_complete(main()) 66 | -------------------------------------------------------------------------------- /examples/simple-2fa.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from pathlib import Path 3 | 4 | from linkedin_messaging import ChallengeException, LinkedInMessaging 5 | 6 | cookie_path = Path(__file__).parent.joinpath("cookies.pickle") 7 | 8 | 9 | async def main(): 10 | linkedin = LinkedInMessaging() 11 | 12 | try: 13 | await linkedin.login("EMAIL", "PASSWORD") 14 | except ChallengeException: 15 | await linkedin.enter_2fa(input("2fa code: ")) 16 | 17 | print(await linkedin.get_user_profile()) 18 | 19 | await linkedin.logout() 20 | await linkedin.close() 21 | 22 | 23 | loop = asyncio.get_event_loop() 24 | loop.run_until_complete(main()) 25 | -------------------------------------------------------------------------------- /linkedin_messaging/__init__.py: -------------------------------------------------------------------------------- 1 | """An unofficial API for interacting with LinkedIn Messaging""" 2 | 3 | from .api_objects import URN 4 | from .linkedin import ChallengeException, LinkedInMessaging 5 | 6 | __title__ = "linkedin_messaging" 7 | __version__ = "0.6.0" 8 | __description__ = "An unofficial API for interacting with LinkedIn Messaging" 9 | 10 | __license__ = "Apache License 2.0" 11 | 12 | __author__ = "Sumner Evans" 13 | __email__ = "sumner@beeper.com" 14 | 15 | __all__ = ("ChallengeException", "LinkedInMessaging", "URN") 16 | -------------------------------------------------------------------------------- /linkedin_messaging/api_objects.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass, field 2 | from datetime import datetime 3 | from typing import Any, Callable, Optional, Union 4 | 5 | import dataclasses_json 6 | from dataclasses_json import DataClassJsonMixin, LetterCase, Undefined, config, dataclass_json 7 | 8 | 9 | class URN: 10 | def __init__(self, urn_str: str): 11 | urn_parts = urn_str.split(":") 12 | self.prefix = ":".join(urn_parts[:-1]) 13 | self.id_parts = urn_parts[-1].strip("()").split(",") 14 | 15 | def get_id(self) -> str: 16 | assert len(self.id_parts) == 1 17 | return self.id_parts[0] 18 | 19 | def id_str(self) -> str: 20 | return ",".join(self.id_parts) 21 | 22 | def __str__(self) -> str: 23 | return "{}:{}".format( 24 | self.prefix, 25 | (self.id_parts[0] if len(self.id_parts) == 1 else f"({self.id_str()})"), 26 | ) 27 | 28 | def __hash__(self) -> int: 29 | return hash(self.id_str()) 30 | 31 | def __eq__(self, other: Any) -> bool: 32 | if not isinstance(other, URN): 33 | return False 34 | return self.id_parts == other.id_parts 35 | 36 | def __repr__(self) -> str: 37 | return f"URN('{str(self)}')" 38 | 39 | 40 | # Use milliseconds instead of seconds from the UNIX epoch. 41 | decoder_functions = { 42 | datetime: (lambda s: datetime.utcfromtimestamp(int(s) / 1000) if s else None), 43 | URN: (lambda s: URN(s) if s else None), 44 | } 45 | encoder_functions: dict[Any, Callable[[Any], Any]] = { 46 | datetime: (lambda d: int(d.timestamp() * 1000) if d else None), 47 | URN: (lambda u: str(u) if u else None), 48 | } 49 | 50 | for type_, translation_function in decoder_functions.items(): 51 | dataclasses_json.cfg.global_config.decoders[type_] = translation_function 52 | dataclasses_json.cfg.global_config.decoders[ 53 | Optional[type_] # type: ignore 54 | ] = translation_function 55 | 56 | for type_, translation_function in encoder_functions.items(): 57 | dataclasses_json.cfg.global_config.encoders[type_] = translation_function 58 | dataclasses_json.cfg.global_config.encoders[ 59 | Optional[type_] # type: ignore 60 | ] = translation_function 61 | 62 | 63 | @dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) 64 | @dataclass 65 | class Artifact: 66 | height: int = -1 67 | width: int = -1 68 | file_identifying_url_path_segment: str = "" 69 | expires_at: Optional[datetime] = None 70 | 71 | 72 | @dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) 73 | @dataclass 74 | class VectorImage: 75 | artifacts: list[Artifact] = field(default_factory=list) 76 | root_url: str = "" 77 | 78 | 79 | @dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) 80 | @dataclass 81 | class Picture: 82 | vector_image: Optional[VectorImage] = field( 83 | metadata=config(field_name="com.linkedin.common.VectorImage"), 84 | default=None, 85 | ) 86 | 87 | 88 | @dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) 89 | @dataclass 90 | class MiniProfile: 91 | entity_urn: Optional[URN] = None 92 | public_identifier: Optional[str] = None 93 | first_name: Optional[str] = None 94 | last_name: Optional[str] = None 95 | occupation: Optional[str] = None 96 | memorialized: bool = False 97 | object_urn: Optional[URN] = None 98 | picture: Optional[Picture] = None 99 | 100 | 101 | @dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) 102 | @dataclass 103 | class MessagingMember: 104 | entity_urn: Optional[URN] = None 105 | mini_profile: Optional[MiniProfile] = None 106 | alternate_name: Optional[str] = None 107 | alternate_image: Optional[Picture] = None 108 | 109 | 110 | @dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) 111 | @dataclass 112 | class Paging: 113 | count: int = 0 114 | start: int = 0 115 | links: list[Any] = field(default_factory=list) 116 | 117 | 118 | @dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) 119 | @dataclass 120 | class TextEntity: 121 | urn: Optional[URN] = None 122 | 123 | 124 | @dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) 125 | @dataclass 126 | class AttributeType: 127 | text_entity: Optional[TextEntity] = field( 128 | metadata=config(field_name="com.linkedin.pemberly.text.Entity"), default=None 129 | ) 130 | 131 | 132 | @dataclass_json 133 | @dataclass 134 | class Attribute: 135 | start: int = 0 136 | length: int = 0 137 | type_: Optional[AttributeType] = field(metadata=config(field_name="type"), default=None) 138 | 139 | 140 | @dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) 141 | @dataclass 142 | class AttributedBody: 143 | text: str = "" 144 | attributes: list[Attribute] = field(default_factory=list) 145 | 146 | 147 | @dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) 148 | @dataclass 149 | class MessageAttachmentCreate: 150 | byte_size: int = 0 151 | id_: Optional[URN] = field(metadata=config(field_name="id"), default=None) 152 | media_type: str = "" 153 | name: str = "" 154 | 155 | 156 | @dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) 157 | @dataclass 158 | class MessageAttachmentReference: 159 | string: str = "" 160 | 161 | 162 | @dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) 163 | @dataclass 164 | class MessageAttachment: 165 | id_: Optional[URN] = field(metadata=config(field_name="id"), default=None) 166 | byte_size: int = 0 167 | media_type: str = "" 168 | name: str = "" 169 | reference: Optional[MessageAttachmentReference] = None 170 | 171 | 172 | @dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) 173 | @dataclass 174 | class AudioMetadata: 175 | urn: Optional[URN] 176 | duration: int = 0 177 | url: str = "" 178 | 179 | 180 | @dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) 181 | @dataclass 182 | class MediaAttachment: 183 | media_type: str = "" 184 | audio_metadata: Optional[AudioMetadata] = None 185 | 186 | 187 | @dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) 188 | @dataclass 189 | class GifInfo: 190 | original_height: int = 0 191 | original_width: int = 0 192 | url: str = "" 193 | 194 | 195 | @dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) 196 | @dataclass 197 | class ThirdPartyMediaInfo: 198 | previewgif: Optional[GifInfo] = None 199 | nanogif: Optional[GifInfo] = None 200 | gif: Optional[GifInfo] = None 201 | 202 | 203 | @dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) 204 | @dataclass 205 | class ThirdPartyMedia: 206 | media_type: str = "" 207 | id_: str = field(metadata=config(field_name="id"), default="") 208 | media: Optional[ThirdPartyMediaInfo] = None 209 | title: str = "" 210 | 211 | 212 | @dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) 213 | @dataclass 214 | class LegalText: 215 | static_legal_text: str = "" 216 | custom_legal_text: str = "" 217 | 218 | 219 | @dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) 220 | @dataclass 221 | class SpInmailStandardSubContent: 222 | action: str = "" 223 | action_text: str = "" 224 | 225 | 226 | @dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) 227 | @dataclass 228 | class SpInmailSubContent: 229 | standard: Optional[SpInmailStandardSubContent] = field( 230 | metadata=config( 231 | field_name="com.linkedin.voyager.messaging.event.message.spinmail.SpInmailStandardSubContent" # noqa: E501 232 | ), 233 | default=None, 234 | ) 235 | 236 | 237 | @dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) 238 | @dataclass 239 | class SpInmailContent: 240 | status: str = "" 241 | sp_inmail_type: str = "" 242 | advertiser_label: str = "" 243 | body: str = "" 244 | legal_text: Optional[LegalText] = None 245 | sub_content: Optional[SpInmailSubContent] = None 246 | 247 | 248 | @dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) 249 | @dataclass 250 | class ConversationNameUpdateContent: 251 | new_name: str = "" 252 | 253 | 254 | @dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) 255 | @dataclass 256 | class MessageCustomContent: 257 | conversation_name_update_content: Optional[ConversationNameUpdateContent] = field( 258 | metadata=config( 259 | field_name="com.linkedin.voyager.messaging.event.message.ConversationNameUpdateContent" # noqa: E501 260 | ), 261 | default=None, 262 | ) 263 | sp_inmail_content: Optional[SpInmailContent] = field( 264 | metadata=config( 265 | field_name="com.linkedin.voyager.messaging.event.message.spinmail.SpInmailContent" # noqa: E501 266 | ), 267 | default=None, 268 | ) 269 | third_party_media: Optional[ThirdPartyMedia] = field( 270 | metadata=config(field_name="com.linkedin.voyager.messaging.shared.ThirdPartyMedia"), 271 | default=None, 272 | ) 273 | 274 | 275 | @dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) 276 | @dataclass 277 | class CommentaryText: 278 | text: str = "" 279 | 280 | 281 | @dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) 282 | @dataclass 283 | class Commentary: 284 | text: Optional[CommentaryText] 285 | 286 | 287 | @dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) 288 | @dataclass 289 | class NavigationContext: 290 | tracking_action_type: str = "" 291 | action_target: str = "" 292 | 293 | 294 | @dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) 295 | @dataclass 296 | class ArticleComponent: 297 | navigation_context: Optional[NavigationContext] = None 298 | 299 | 300 | @dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) 301 | @dataclass 302 | class ImageAttributes: 303 | vector_image: Optional[VectorImage] = None 304 | 305 | 306 | @dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) 307 | @dataclass 308 | class Image: 309 | attributes: list[ImageAttributes] = field(default_factory=list) 310 | 311 | 312 | @dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) 313 | @dataclass 314 | class ImageComponent: 315 | images: list[Image] = field(default_factory=list) 316 | 317 | 318 | @dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) 319 | @dataclass 320 | class Document: 321 | transcribed_document_url: str = "" 322 | 323 | 324 | @dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) 325 | @dataclass 326 | class DocumentComponent: 327 | document: Optional[Document] = None 328 | 329 | 330 | @dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) 331 | @dataclass 332 | class StreamLocations: 333 | url: str = "" 334 | expires_at: int = -1 335 | 336 | 337 | @dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) 338 | @dataclass 339 | class ProgressiveStreams: 340 | width: int = -1 341 | height: int = -1 342 | size: int = -1 343 | media_type: str = "" 344 | streaming_locations: list[StreamLocations] = field(default_factory=list) 345 | 346 | 347 | @dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) 348 | @dataclass 349 | class VideoPlayMetadata: 350 | progressive_streams: list[ProgressiveStreams] = field(default_factory=list) 351 | 352 | 353 | @dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) 354 | @dataclass 355 | class VideoComponent: 356 | video_play_metadata: Optional[VideoPlayMetadata] = None 357 | 358 | 359 | @dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) 360 | @dataclass 361 | class ArticleContent: 362 | image_component: Optional[ImageComponent] = field( 363 | metadata=config(field_name="com.linkedin.voyager.feed.render.ImageComponent"), 364 | default=None, 365 | ) 366 | video_component: Optional[VideoComponent] = field( 367 | metadata=config(field_name="com.linkedin.voyager.feed.render.LinkedInVideoComponent"), 368 | default=None, 369 | ) 370 | document_component: Optional[DocumentComponent] = field( 371 | metadata=config(field_name="com.linkedin.voyager.feed.render.DocumentComponent"), 372 | default=None, 373 | ) 374 | article_component: Optional[ArticleComponent] = field( 375 | metadata=config(field_name="com.linkedin.voyager.feed.render.ArticleComponent"), 376 | default=None, 377 | ) 378 | 379 | 380 | @dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) 381 | @dataclass 382 | class ActorName: 383 | text: str = "" 384 | 385 | 386 | @dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) 387 | @dataclass 388 | class Actor: 389 | name: Optional[ActorName] = None 390 | 391 | 392 | @dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) 393 | @dataclass 394 | class FeedUpdate: 395 | actor: Optional[Actor] = None 396 | commentary: Optional[Commentary] = None 397 | content: Optional[ArticleContent] = None 398 | 399 | 400 | @dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) 401 | @dataclass 402 | class MessageEvent: 403 | body: str = "" 404 | feed_update: Optional[FeedUpdate] = None 405 | message_body_render_format: str = "" 406 | subject: Optional[str] = None 407 | recalled_at: Optional[datetime] = None 408 | last_edited_at: Optional[datetime] = None 409 | attributed_body: Optional[AttributedBody] = None 410 | attachments: list[MessageAttachment] = field(default_factory=list) 411 | media_attachments: list[MediaAttachment] = field(default_factory=list) 412 | custom_content: Optional[MessageCustomContent] = None 413 | 414 | 415 | @dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) 416 | @dataclass 417 | class EventContent: 418 | message_event: Optional[MessageEvent] = field( 419 | metadata=config(field_name="com.linkedin.voyager.messaging.event.MessageEvent"), 420 | default=None, 421 | ) 422 | 423 | 424 | @dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) 425 | @dataclass 426 | class From: 427 | messaging_member: Optional[MessagingMember] = field( 428 | metadata=config(field_name="com.linkedin.voyager.messaging.MessagingMember"), 429 | default=None, 430 | ) 431 | 432 | 433 | @dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) 434 | @dataclass 435 | class ReactionSummary: 436 | count: int = 0 437 | first_reacted_at: Optional[datetime] = None 438 | emoji: str = "" 439 | viewer_reacted: bool = False 440 | 441 | 442 | @dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) 443 | @dataclass 444 | class ConversationEvent: 445 | created_at: Optional[datetime] = None 446 | entity_urn: Optional[URN] = None 447 | event_content: Optional[EventContent] = None 448 | subtype: str = "" 449 | from_: Optional[From] = field(metadata=config(field_name="from"), default=None) 450 | previous_event_in_conversation: Optional[URN] = None 451 | reaction_summaries: list[ReactionSummary] = field(default_factory=list) 452 | 453 | 454 | @dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) 455 | @dataclass 456 | class Participant: 457 | messaging_member: Optional[MessagingMember] = field( 458 | metadata=config(field_name="com.linkedin.voyager.messaging.MessagingMember"), 459 | default=None, 460 | ) 461 | 462 | 463 | @dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) 464 | @dataclass 465 | class Conversation: 466 | group_chat: bool = False 467 | total_event_count: int = 0 468 | unread_count: int = 0 469 | read: Optional[bool] = None 470 | last_activity_at: Optional[datetime] = None 471 | entity_urn: Optional[URN] = None 472 | name: str = "" 473 | muted: bool = False 474 | events: list[ConversationEvent] = field(default_factory=list) 475 | participants: list[Participant] = field(default_factory=list) 476 | 477 | 478 | @dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) 479 | @dataclass 480 | class ConversationsResponse(DataClassJsonMixin): 481 | elements: list[Conversation] = field(default_factory=list) 482 | paging: Optional[Paging] = None 483 | 484 | 485 | @dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) 486 | @dataclass 487 | class ConversationResponse(DataClassJsonMixin): 488 | elements: list[ConversationEvent] = field(default_factory=list) 489 | paging: Optional[Paging] = None 490 | 491 | 492 | @dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) 493 | @dataclass 494 | class MessageCreate(DataClassJsonMixin): 495 | attributed_body: Optional[AttributedBody] = None 496 | body: str = "" 497 | attachments: list[MessageAttachmentCreate] = field(default_factory=list) 498 | 499 | 500 | @dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) 501 | @dataclass 502 | class MessageCreatedInfo: 503 | created_at: Optional[datetime] = None 504 | event_urn: Optional[URN] = None 505 | backend_event_urn: Optional[URN] = None 506 | conversation_urn: Optional[URN] = None 507 | backend_conversation_urn: Optional[URN] = None 508 | 509 | 510 | @dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) 511 | @dataclass 512 | class SendMessageResponse(DataClassJsonMixin): 513 | value: Optional[MessageCreatedInfo] = None 514 | 515 | 516 | @dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) 517 | @dataclass 518 | class UserProfileResponse(DataClassJsonMixin): 519 | plain_id: str = "" 520 | mini_profile: Optional[MiniProfile] = None 521 | 522 | 523 | @dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) 524 | @dataclass 525 | class SeenReceipt: 526 | event_urn: URN 527 | seen_at: Optional[datetime] = None 528 | 529 | 530 | @dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) 531 | @dataclass 532 | class RealTimeEventStreamEvent(DataClassJsonMixin): 533 | # Action real-time events (marking as read for example) 534 | action: Optional[str] = None 535 | conversation: Optional[Union[Conversation, URN]] = None 536 | 537 | # Message real-time events 538 | previous_event_in_conversation: Optional[URN] = None 539 | event: Optional[ConversationEvent] = None 540 | 541 | # Reaction real-time events 542 | reaction_added: Optional[bool] = None 543 | actor_mini_profile_urn: Optional[URN] = None 544 | event_urn: Optional[URN] = None 545 | reaction_summary: Optional[ReactionSummary] = None 546 | 547 | # Seen Receipt real-time events 548 | from_entity: Optional[URN] = None 549 | seen_receipt: Optional[SeenReceipt] = None 550 | 551 | 552 | @dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) 553 | @dataclass 554 | class ReactorProfile: 555 | first_name: str = "" 556 | last_name: str = "" 557 | entity_urn: Optional[URN] = None 558 | 559 | 560 | @dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) 561 | @dataclass 562 | class Reactor: 563 | reactor_urn: Optional[URN] = None 564 | reactor: Optional[ReactorProfile] = None 565 | 566 | 567 | @dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) 568 | @dataclass 569 | class ReactorsResponse(DataClassJsonMixin): 570 | elements: list[Reactor] = field(default_factory=list) 571 | paging: Optional[Paging] = None 572 | 573 | 574 | @dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE) 575 | @dataclass 576 | class Error(DataClassJsonMixin, Exception): 577 | status: int = -1 578 | -------------------------------------------------------------------------------- /linkedin_messaging/exceptions.py: -------------------------------------------------------------------------------- 1 | class TooManyRequestsError(Exception): 2 | pass 3 | -------------------------------------------------------------------------------- /linkedin_messaging/linkedin.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import json 3 | import logging 4 | from collections import defaultdict 5 | from datetime import datetime 6 | from typing import Any, AsyncGenerator, Awaitable, Callable, Optional, TypeVar, Union, cast 7 | 8 | import aiohttp 9 | import aiohttp.client_exceptions 10 | from bs4 import BeautifulSoup 11 | from dataclasses_json.api import DataClassJsonMixin 12 | 13 | from .api_objects import ( 14 | URN, 15 | Conversation, 16 | ConversationResponse, 17 | ConversationsResponse, 18 | Error, 19 | MessageAttachmentCreate, 20 | MessageCreate, 21 | Picture, 22 | ReactorsResponse, 23 | RealTimeEventStreamEvent, 24 | SendMessageResponse, 25 | UserProfileResponse, 26 | ) 27 | from .exceptions import TooManyRequestsError 28 | 29 | REQUEST_HEADERS = { 30 | "user-agent": " ".join( 31 | [ 32 | "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_5)", 33 | "AppleWebKit/537.36 (KHTML, like Gecko)", 34 | "Chrome/83.0.4103.116 Safari/537.36", 35 | ] 36 | ), 37 | "accept-language": "en-AU,en-GB;q=0.9,en-US;q=0.8,en;q=0.7", 38 | "x-li-lang": "en_US", 39 | "x-restli-protocol-version": "2.0.0", 40 | "x-li-track": json.dumps( 41 | { 42 | "clientVersion": "1.13.8031", 43 | "mpVersion": "1.13.8031", 44 | "osName": "web", 45 | "timezoneOffset": 0, 46 | "timezone": "Etc/UTC", 47 | "deviceFormFactor": "DESKTOP", 48 | "mpName": "voyager-web", 49 | } 50 | ), 51 | } 52 | 53 | LINKEDIN_BASE_URL = "https://www.linkedin.com" 54 | LOGIN_URL = f"{LINKEDIN_BASE_URL}/checkpoint/lg/login-submit" 55 | LOGOUT_URL = f"{LINKEDIN_BASE_URL}/uas/logout" 56 | REALTIME_CONNECT_URL = f"{LINKEDIN_BASE_URL}/realtime/connect" 57 | VERIFY_URL = f"{LINKEDIN_BASE_URL}/checkpoint/challenge/verify" 58 | API_BASE_URL = f"{LINKEDIN_BASE_URL}/voyager/api" 59 | 60 | SEED_URL = f"{LINKEDIN_BASE_URL}/login" 61 | """ 62 | URL to seed all of the auth requests 63 | """ 64 | 65 | 66 | T = TypeVar("T", bound=DataClassJsonMixin) 67 | 68 | 69 | async def try_from_json(deserialise_to: T, response: aiohttp.ClientResponse) -> T: 70 | if response.status < 200 or 300 <= response.status: 71 | try: 72 | error = Error.from_json(await response.text()) 73 | except: 74 | raise Exception( 75 | f"Deserialising to {deserialise_to} failed because response " 76 | f"was {response.status}. Details: {await response.text()}" 77 | ) 78 | raise error 79 | 80 | text = await response.text() 81 | try: 82 | return deserialise_to.from_json(text) 83 | except (json.JSONDecodeError, ValueError) as e: 84 | try: 85 | error = Error.from_json(text) 86 | except: 87 | raise Exception( 88 | f"Deserialising to {deserialise_to} failed. Error: {e}. " f"Response: {text}." 89 | ) 90 | raise error 91 | 92 | 93 | class ChallengeException(Exception): 94 | pass 95 | 96 | 97 | class LinkedInMessaging: 98 | session: aiohttp.ClientSession 99 | two_factor_payload: dict[str, Any] 100 | event_listeners: defaultdict[ 101 | str, 102 | list[ 103 | Union[ 104 | Callable[[RealTimeEventStreamEvent], Awaitable[None]], 105 | Callable[[asyncio.exceptions.TimeoutError], Awaitable[None]], 106 | Callable[[Exception], Awaitable[None]], 107 | ] 108 | ], 109 | ] 110 | 111 | def __init__(self): 112 | self.session = aiohttp.ClientSession() 113 | self.event_listeners = defaultdict(list) 114 | 115 | @staticmethod 116 | def from_cookies(li_at: str, jsessionid: str) -> "LinkedInMessaging": 117 | linkedin = LinkedInMessaging() 118 | linkedin.session.cookie_jar.update_cookies({"li_at": li_at, "JSESSIONID": jsessionid}) 119 | linkedin.session.headers["csrf-token"] = jsessionid 120 | return linkedin 121 | 122 | async def close(self): 123 | await self.session.close() 124 | 125 | async def _get(self, relative_url: str, **kwargs: Any) -> aiohttp.ClientResponse: 126 | return await self.session.get(API_BASE_URL + relative_url, **kwargs) 127 | 128 | async def _post(self, relative_url: str, **kwargs: Any) -> aiohttp.ClientResponse: 129 | return await self.session.post(API_BASE_URL + relative_url, **kwargs) 130 | 131 | # region Authentication 132 | 133 | @property 134 | def has_auth_cookies(self) -> bool: 135 | cookie_names = {c.key for c in self.session.cookie_jar} 136 | return "li_at" in cookie_names and "JSESSIONID" in cookie_names 137 | 138 | async def logged_in(self) -> bool: 139 | if not self.has_auth_cookies: 140 | return False 141 | try: 142 | return bool(await self.get_user_profile()) 143 | except Exception as e: 144 | logging.exception(f"Failed getting the user profile: {e}") 145 | return False 146 | 147 | async def login_manual(self, li_at: str, jsessionid: str, new_session: bool = True): 148 | if new_session: 149 | if self.session: 150 | await self.session.close() 151 | self.session = aiohttp.ClientSession() 152 | self.session.cookie_jar.update_cookies({"li_at": li_at, "JSESSIONID": jsessionid}) 153 | self.session.headers["csrf-token"] = jsessionid.strip('"') 154 | 155 | async def login(self, email: str, password: str, new_session: bool = True): 156 | if new_session: 157 | if self.session: 158 | await self.session.close() 159 | self.session = aiohttp.ClientSession() 160 | 161 | # Get the CSRF token. 162 | async with self.session.get(SEED_URL) as seed_response: 163 | if seed_response.status != 200: 164 | raise Exception("Couldn't open the CSRF seed page") 165 | 166 | soup = BeautifulSoup(await seed_response.text(), "html.parser") 167 | login_csrf_param = soup.find("input", {"name": "loginCsrfParam"})["value"] 168 | 169 | # Login with username and password 170 | async with self.session.post( 171 | LOGIN_URL, 172 | data={ 173 | "loginCsrfParam": login_csrf_param, 174 | "session_key": email, 175 | "session_password": password, 176 | }, 177 | ) as login_response: 178 | # Check to see if the user was successfully logged in with just email and 179 | # password. 180 | if self.has_auth_cookies: 181 | for c in self.session.cookie_jar: 182 | if c.key == "JSESSIONID": 183 | self.session.headers["csrf-token"] = c.value.strip('"') 184 | return 185 | 186 | # 2FA is required. Throw an exception. 187 | soup = BeautifulSoup(await login_response.text(), "html.parser") 188 | 189 | # TODO (#1) better detection of 2FA vs bad password 190 | if soup.find("input", {"name": "challengeId"}): 191 | self.two_factor_payload = { 192 | k: soup.find("input", {"name": k})["value"] 193 | for k in ( 194 | "csrfToken", 195 | "pageInstance", 196 | "resendUrl", 197 | "challengeId", 198 | "displayTime", 199 | "challengeSource", 200 | "requestSubmissionId", 201 | "challengeType", 202 | "challengeData", 203 | "challengeDetails", 204 | "failureRedirectUri", 205 | "flowTreeId", 206 | ) 207 | } 208 | self.two_factor_payload["language"] = "en-US" 209 | self.two_factor_payload["recognizedDevice"] = "on" 210 | raise ChallengeException() 211 | 212 | # TODO (#1) can we scrape anything from the page? 213 | raise Exception("Failed to log in.") 214 | 215 | async def enter_2fa(self, two_factor_code: str): 216 | async with self.session.post( 217 | VERIFY_URL, data={**self.two_factor_payload, "pin": two_factor_code} 218 | ): 219 | if self.has_auth_cookies: 220 | for c in self.session.cookie_jar: 221 | if c.key == "JSESSIONID": 222 | self.session.headers["csrf-token"] = c.value.strip('"') 223 | return 224 | # TODO (#1) can we scrape anything from the page? 225 | raise Exception("Failed to log in.") 226 | 227 | async def logout(self) -> bool: 228 | csrf_token = self.session.headers.get("csrf-token") 229 | if not csrf_token: 230 | return True 231 | response = await self.session.get( 232 | LOGOUT_URL, 233 | params={"csrfToken": csrf_token}, 234 | allow_redirects=False, 235 | ) 236 | return response.status == 303 237 | 238 | # endregion 239 | 240 | # region Conversations 241 | 242 | async def get_conversations( 243 | self, 244 | last_activity_before: Optional[datetime] = None, 245 | ) -> ConversationsResponse: 246 | """ 247 | Fetch list of conversations the user is in. 248 | 249 | :param last_activity_before: :class:`datetime` of the last chat activity to 250 | consider 251 | """ 252 | if last_activity_before is None: 253 | last_activity_before = datetime.now() 254 | 255 | params = { 256 | "keyVersion": "LEGACY_INBOX", 257 | # For some reason, createdBefore is the key, even though that makes 258 | # absolutely no sense whatsoever. 259 | "createdBefore": int(last_activity_before.timestamp() * 1000), 260 | } 261 | 262 | res = await self._get("/messaging/conversations", params=params) 263 | return cast(ConversationsResponse, await try_from_json(ConversationsResponse, res)) 264 | 265 | async def get_all_conversations(self) -> AsyncGenerator[Conversation, None]: 266 | """ 267 | A generator of all of the user's conversations using paging. 268 | """ 269 | last_activity_before = datetime.now() 270 | while True: 271 | conversations_response = await self.get_conversations( 272 | last_activity_before=last_activity_before 273 | ) 274 | for c in conversations_response.elements: 275 | yield c 276 | 277 | # The page size is 20, by default, so if we get less than 20, we are at the 278 | # end of the list so we should stop. 279 | if len(conversations_response.elements) < 20: 280 | break 281 | 282 | if last_activity_at := conversations_response.elements[-1].last_activity_at: 283 | last_activity_before = last_activity_at 284 | else: 285 | break 286 | 287 | async def get_conversation( 288 | self, 289 | conversation_urn: URN, 290 | created_before: Optional[datetime] = None, 291 | ) -> ConversationResponse: 292 | """ 293 | Fetch the given conversation. 294 | 295 | :param conversation_urn_id: LinkedIn URN for a conversation 296 | :param created_before: datetime of the last chat activity to consider 297 | """ 298 | if len(conversation_urn.id_parts) != 1: 299 | raise TypeError(f"Invalid conversation URN {conversation_urn}.") 300 | 301 | if created_before is None: 302 | created_before = datetime.now() 303 | 304 | params = { 305 | "createdBefore": int(created_before.timestamp() * 1000), 306 | } 307 | 308 | res = await self._get( 309 | f"/messaging/conversations/{conversation_urn.id_parts[0]}/events", 310 | params=params, 311 | ) 312 | return cast(ConversationResponse, await try_from_json(ConversationResponse, res)) 313 | 314 | async def mark_conversation_as_read(self, conversation_urn: URN) -> bool: 315 | res = await self._post( 316 | f"/messaging/conversations/{conversation_urn.id_parts[-1]}", 317 | json={"patch": {"$set": {"read": True}}}, 318 | ) 319 | return res.status == 200 320 | 321 | # endregion 322 | 323 | # region Messages 324 | 325 | async def upload_media( 326 | self, 327 | data: bytes, 328 | filename: str, 329 | media_type: str, 330 | ) -> MessageAttachmentCreate: 331 | upload_metadata_response = await self._post( 332 | "/voyagerMediaUploadMetadata", 333 | params={"action": "upload"}, 334 | json={ 335 | "mediaUploadType": "MESSAGING_PHOTO_ATTACHMENT", 336 | "fileSize": len(data), 337 | "filename": filename, 338 | }, 339 | ) 340 | if upload_metadata_response.status != 200: 341 | raise Exception("Failed to send upload metadata.") 342 | 343 | upload_metadata_response_json = (await upload_metadata_response.json()).get("value", {}) 344 | upload_url = upload_metadata_response_json.get("singleUploadUrl") 345 | if not upload_url: 346 | raise Exception("No upload URL provided") 347 | 348 | upload_response = await self.session.put(upload_url, data=data) 349 | if upload_response.status != 201: 350 | # TODO (#2) is there any other data that we get? 351 | raise Exception("Failed to upload file.") 352 | 353 | return MessageAttachmentCreate( 354 | len(data), 355 | URN(upload_metadata_response_json.get("urn")), 356 | media_type, 357 | filename, 358 | ) 359 | 360 | async def send_message( 361 | self, 362 | conversation_urn_or_recipients: Union[URN, list[URN]], 363 | message_create: MessageCreate, 364 | ) -> SendMessageResponse: 365 | params = {"action": "create"} 366 | message_create_key = "com.linkedin.voyager.messaging.create.MessageCreate" 367 | 368 | message_event: dict[str, Any] = { 369 | "eventCreate": {"value": {message_create_key: message_create.to_dict()}} 370 | } 371 | 372 | if isinstance(conversation_urn_or_recipients, list): 373 | message_event["recipients"] = [r.get_id() for r in conversation_urn_or_recipients] 374 | message_event["subtype"] = "MEMBER_TO_MEMBER" 375 | payload = { 376 | "keyVersion": "LEGACY_INBOX", 377 | "conversationCreate": message_event, 378 | } 379 | res = await self._post( 380 | "/messaging/conversations", 381 | params=params, 382 | json=payload, 383 | ) 384 | else: 385 | conversation_id = conversation_urn_or_recipients.get_id() 386 | res = await self._post( 387 | f"/messaging/conversations/{conversation_id}/events", 388 | params=params, 389 | json=message_event, 390 | headers=REQUEST_HEADERS, 391 | ) 392 | 393 | return cast(SendMessageResponse, await try_from_json(SendMessageResponse, res)) 394 | 395 | async def delete_message(self, conversation_urn: URN, message_urn: URN) -> bool: 396 | res = await self._post( 397 | "/messaging/conversations/{}/events/{}".format( 398 | conversation_urn, message_urn.id_parts[-1] 399 | ), 400 | params={"action": "recall"}, 401 | ) 402 | return res.status == 204 403 | 404 | async def download_linkedin_media(self, url: str) -> bytes: 405 | async with self.session.get(url) as media_resp: 406 | if not media_resp.ok: 407 | raise Exception(f"Failed downloading media. Response code {media_resp.status}") 408 | return await media_resp.content.read() 409 | 410 | # endregion 411 | 412 | # region Reactions 413 | 414 | async def add_emoji_reaction( 415 | self, 416 | conversation_urn: URN, 417 | message_urn: URN, 418 | emoji: str, 419 | ) -> bool: 420 | res = await self._post( 421 | "/messaging/conversations/{}/events/{}".format( 422 | conversation_urn, message_urn.id_parts[-1] 423 | ), 424 | params={"action": "reactWithEmoji"}, 425 | json={"emoji": emoji}, 426 | ) 427 | return res.status == 204 428 | 429 | async def remove_emoji_reaction( 430 | self, 431 | conversation_urn: URN, 432 | message_urn: URN, 433 | emoji: str, 434 | ) -> bool: 435 | res = await self._post( 436 | "/messaging/conversations/{}/events/{}".format( 437 | conversation_urn, message_urn.id_parts[-1] 438 | ), 439 | params={"action": "unreactWithEmoji"}, 440 | json={"emoji": emoji}, 441 | ) 442 | return res.status == 204 443 | 444 | async def get_reactors(self, message_urn: URN, emoji: str) -> ReactorsResponse: 445 | params = { 446 | "decorationId": "com.linkedin.voyager.dash.deco.messaging.FullReactor-8", 447 | "emoji": emoji, 448 | "messageUrn": f"urn:li:fsd_message:{message_urn.id_parts[-1]}", 449 | "q": "messageAndEmoji", 450 | } 451 | res = await self._get("/voyagerMessagingDashReactors", params=params) 452 | return cast(ReactorsResponse, await try_from_json(ReactorsResponse, res)) 453 | 454 | # endregion 455 | 456 | # region Typing Notifications 457 | 458 | async def set_typing(self, conversation_urn: URN): 459 | await self._post( 460 | "/messaging/conversations", 461 | params={"action": "typing"}, 462 | json={"conversationId": conversation_urn.get_id()}, 463 | ) 464 | 465 | # endregion 466 | 467 | # region Profiles 468 | 469 | async def get_user_profile(self) -> UserProfileResponse: 470 | res = await self._get("/me") 471 | return cast(UserProfileResponse, await try_from_json(UserProfileResponse, res)) 472 | 473 | async def download_profile_picture(self, picture: Picture) -> bytes: 474 | if not picture.vector_image: 475 | raise Exception( 476 | "Failed downloading media. Invalid Picture object with no vector_image." 477 | ) 478 | url = ( 479 | picture.vector_image.root_url 480 | + picture.vector_image.artifacts[-1].file_identifying_url_path_segment 481 | ) 482 | async with await self.session.get(url) as profile_resp: 483 | if not profile_resp.ok: 484 | raise Exception(f"Failed downloading media. Response code {profile_resp.status}") 485 | return await profile_resp.content.read() 486 | 487 | # endregion 488 | 489 | # region Event Listener 490 | 491 | def add_event_listener( 492 | self, 493 | payload_key: str, 494 | fn: Union[ 495 | Callable[[RealTimeEventStreamEvent], Awaitable[None]], 496 | Callable[[asyncio.exceptions.TimeoutError], Awaitable[None]], 497 | Callable[[Exception], Awaitable[None]], 498 | ], 499 | ): 500 | """ 501 | There are two special event types: 502 | 503 | * ``ALL_EVENTS`` - an event fired on every event, and which contains the entirety of the 504 | raw event payload 505 | * ``TIMEOUT`` - an event fired if the event listener connection times out 506 | """ 507 | self.event_listeners[payload_key].append(fn) 508 | 509 | async def _fire(self, payload_key: str, event: Any): 510 | for listener in self.event_listeners[payload_key]: 511 | try: 512 | await listener(event) 513 | except Exception: 514 | logging.exception(f"Listener {listener} failed to handle {event}") 515 | 516 | async def _listen_to_event_stream(self): 517 | logging.info("Starting event stream listener") 518 | 519 | async with self.session.get( 520 | REALTIME_CONNECT_URL, 521 | headers={ 522 | "accept": "text/event-stream", 523 | "connection": "keep-alive", 524 | "x-li-accept": "application/vnd.linkedin.normalized+json+2.1", 525 | **REQUEST_HEADERS, 526 | }, 527 | # The event stream normally stays open for about 3 minutes, but this will 528 | # automatically close it more agressively so that we don't get into a weird 529 | # state where it's not receiving any data, but simultaneously isn't closed. 530 | timeout=120, 531 | ) as resp: 532 | if resp.status != 200: 533 | raise TooManyRequestsError(f"Failed to connect. Status {resp.status}.") 534 | 535 | while True: 536 | line = await resp.content.readline() 537 | if resp.content.at_eof(): 538 | break 539 | 540 | if not line.startswith(b"data:"): 541 | continue 542 | data = json.loads(line.decode("utf-8")[6:]) 543 | 544 | # Special handling for ALL_EVENTS handler. 545 | if all_events_handlers := self.event_listeners.get("ALL_EVENTS"): 546 | for handler in all_events_handlers: 547 | try: 548 | await handler(data) 549 | except Exception: 550 | logging.exception(f"Handler {handler} failed to handle {data}") 551 | 552 | event_payload = data.get("com.linkedin.realtimefrontend.DecoratedEvent", {}).get( 553 | "payload", {} 554 | ) 555 | 556 | for key in self.event_listeners.keys(): 557 | if event_payload.get(key) is not None: 558 | await self._fire(key, RealTimeEventStreamEvent.from_dict(event_payload)) 559 | 560 | logging.info("Event stream closed") 561 | 562 | async def start_listener(self): 563 | while True: 564 | try: 565 | await self._listen_to_event_stream() 566 | except asyncio.exceptions.TimeoutError as te: 567 | # Special handling for TIMEOUT handler. 568 | if timeout_handlers := self.event_listeners.get("TIMEOUT"): 569 | for handler in timeout_handlers: 570 | try: 571 | await handler(te) 572 | except Exception: 573 | logging.exception(f"Handler {handler} failed to handle {te}") 574 | except Exception as e: 575 | logging.exception(f"Got exception in listener: {e}") 576 | raise 577 | 578 | # endregion 579 | -------------------------------------------------------------------------------- /linkedin_messaging/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/beeper/linkedin-messaging-api/6c4384263bbd57578e985c122141f79e04173e07/linkedin_messaging/py.typed -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | build-backend = "flit_core.buildapi" 3 | requires = [ 4 | "flit_core<4,>=3.2", 5 | ] 6 | 7 | [project] 8 | name = "linkedin-messaging" 9 | readme = "README.md" 10 | keywords = [ 11 | "LinkedIn", 12 | "messaging", 13 | ] 14 | license = { file = "LICENSE" } 15 | authors = [ 16 | { name = "Sumner Evans", email = "sumner@beeper.com" }, 17 | ] 18 | requires-python = ">=3.9" 19 | classifiers = [ 20 | "Development Status :: 3 - Alpha", 21 | "Framework :: AsyncIO", 22 | "Intended Audience :: Developers", 23 | "License :: OSI Approved :: Apache Software License", 24 | "Programming Language :: Python :: 3 :: Only", 25 | "Programming Language :: Python :: 3.9", 26 | "Programming Language :: Python :: 3.10", 27 | "Programming Language :: Python :: 3.11", 28 | "Topic :: Communications :: Chat", 29 | "Topic :: Software Development :: Libraries", 30 | ] 31 | dynamic = [ 32 | "description", 33 | "version", 34 | ] 35 | dependencies = [ 36 | "aiohttp", 37 | "beautifulsoup4", 38 | "dataclasses-json", 39 | ] 40 | [project.optional-dependencies] 41 | dev = [ 42 | "black", 43 | "flake8", 44 | "flake8-annotations", 45 | "flake8-bugbear", 46 | "flake8-comprehensions", 47 | "flake8-isort", 48 | "flake8-pep3101", 49 | "flake8-print", 50 | "flit", 51 | "isort", 52 | "mypy", 53 | "pip-tools", 54 | "pre-commit", 55 | "pytest", 56 | "pytest-cov", 57 | "termcolor", 58 | "types-termcolor", 59 | ] 60 | [project.urls] 61 | "Bug Tracker" = "https://github.com/beeper/linkedin-messaging-api/issues" 62 | Homepage = "https://github.com/beeper/linkedin-messaging-api" 63 | 64 | [tool.black] 65 | line-length = 99 66 | target-version = ["py38"] 67 | 68 | [tool.isort] 69 | profile = "black" 70 | combine_as_imports = true 71 | known_first_party = "linkedin_messaging" 72 | line_length = 99 73 | 74 | [tool.pytest.ini_options] 75 | addopts = """ 76 | -vvv 77 | --doctest-modules 78 | --ignore-glob='examples/*' 79 | --ignore-glob='cicd/*' 80 | --cov=linkedin_messaging 81 | --cov-report html 82 | --cov-report term 83 | """ 84 | 85 | [[tool.mypy.overrides]] 86 | module = ["bs4"] 87 | ignore_missing_imports = true 88 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | # 2 | # This file is autogenerated by pip-compile with Python 3.9 3 | # by the following command: 4 | # 5 | # pip-compile --output-file=requirements.txt pyproject.toml 6 | # 7 | aiohttp==3.9.1 8 | # via linkedin-messaging (pyproject.toml) 9 | aiosignal==1.3.1 10 | # via aiohttp 11 | async-timeout==4.0.3 12 | # via aiohttp 13 | attrs==23.1.0 14 | # via aiohttp 15 | beautifulsoup4==4.12.2 16 | # via linkedin-messaging (pyproject.toml) 17 | dataclasses-json==0.6.3 18 | # via linkedin-messaging (pyproject.toml) 19 | frozenlist==1.4.0 20 | # via 21 | # aiohttp 22 | # aiosignal 23 | idna==3.6 24 | # via yarl 25 | marshmallow==3.20.1 26 | # via dataclasses-json 27 | multidict==6.0.4 28 | # via 29 | # aiohttp 30 | # yarl 31 | mypy-extensions==1.0.0 32 | # via typing-inspect 33 | packaging==23.2 34 | # via marshmallow 35 | soupsieve==2.5 36 | # via beautifulsoup4 37 | typing-extensions==4.9.0 38 | # via typing-inspect 39 | typing-inspect==0.9.0 40 | # via dataclasses-json 41 | yarl==1.9.4 42 | # via aiohttp 43 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [flake8] 2 | extend-ignore = E203, E402, E722, W503, ANN101, ANN102 3 | exclude = .git,__pycache__,build,dist,.venv 4 | max-line-length = 99 5 | suppress-none-returning = True 6 | suppress-dummy-args = True 7 | extension = 8 | MC1 = flake8-pep3101 9 | -------------------------------------------------------------------------------- /shell.nix: -------------------------------------------------------------------------------- 1 | { pkgs ? import {} }: with pkgs; 2 | pkgs.mkShell { 3 | buildInputs = [ 4 | rnix-lsp 5 | ]; 6 | 7 | propagatedBuildInputs = with python3Packages; [ 8 | python39 9 | ]; 10 | 11 | shellHook = '' 12 | export SOURCE_DATE_EPOCH=315532800 13 | ''; 14 | } 15 | -------------------------------------------------------------------------------- /tests/test_urn.py: -------------------------------------------------------------------------------- 1 | from linkedin_messaging import URN 2 | 3 | 4 | def test_urn_equivalence(): 5 | assert URN("urn:123") == URN("123") 6 | assert URN("urn:(123,456)") == URN("urn:test:(123,456)") 7 | 8 | 9 | def test_urn_equivalence_in_tuple(): 10 | assert (URN("urn:123"), URN("urn:(123,456)")) == ( 11 | URN("123"), 12 | URN("urn:test:(123,456)"), 13 | ) 14 | --------------------------------------------------------------------------------