├── .github └── workflows │ └── python-package.yml ├── .gitignore ├── .pylintrc ├── Dockerfile ├── LICENSE ├── README.md ├── azure-pipelines.yml ├── docker_run.sh ├── msal_extensions ├── __init__.py ├── cache_lock.py ├── filelock.py ├── libsecret.py ├── osx.py ├── persistence.py ├── token_cache.py └── windows.py ├── requirements.txt ├── sample ├── persistence_sample.py └── token_cache_sample.py ├── setup.cfg ├── setup.py ├── tests ├── __init__.py ├── cache_file_generator.py ├── http_client.py ├── lock_acquire.py ├── test_agnostic_backend.py ├── test_cache_lock_file_perf.py ├── test_crossplatlock.py ├── test_macos_backend.py ├── test_persistence.py └── test_windows_backend.py └── tox.ini /.github/workflows/python-package.yml: -------------------------------------------------------------------------------- 1 | # This workflow will install Python dependencies, run tests and lint with a variety of Python versions 2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions 3 | 4 | name: CI 5 | 6 | on: 7 | push: 8 | pull_request: 9 | branches: [ dev ] 10 | # This guards against unknown PR until a community member vet it and label it. 11 | types: [ labeled ] 12 | 13 | jobs: 14 | ci: 15 | 16 | runs-on: ${{ matrix.os }} 17 | strategy: 18 | fail-fast: false 19 | matrix: 20 | python-version: [3.9, "3.10", 3.11, 3.12, "3.13"] 21 | os: [ubuntu-latest, windows-latest, macos-latest] 22 | include: # https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#using-environment-variables-in-a-matrix 23 | - python-version: 3.9 24 | toxenv: "py39" 25 | - python-version: "3.10" 26 | toxenv: "py310" 27 | - python-version: 3.11 28 | toxenv: "py311" 29 | - python-version: 3.12 30 | toxenv: "py312" 31 | - python-version: "3.13" 32 | toxenv: "py313" 33 | - python-version: 3.9 34 | os: ubuntu-latest 35 | lint: "true" 36 | steps: 37 | - uses: actions/checkout@v4 38 | - name: Set up Python ${{ matrix.python-version }} 39 | uses: actions/setup-python@v5 40 | with: 41 | python-version: ${{ matrix.python-version }} 42 | cache: 'pip' 43 | - name: Install Linux dependencies for Python 3 44 | if: ${{ matrix.os == 'ubuntu-latest' }} 45 | run: | 46 | sudo apt update 47 | # girepository-2.0 becomes necessary since PyGobject 3.51 https://pygobject.gnome.org/changelog.html#pre-release 48 | sudo apt install -y python3-dev libgirepository-2.0-dev libcairo2-dev gir1.2-secret-1 gnome-keyring 49 | # The line above is different than the PyGObject install instructions: 50 | # https://pygobject.gnome.org/getting_started.html#ubuntu-logo-ubuntu-debian-logo-debian 51 | # because we chose to specify the exact dependencies (secret and keyring) 52 | # rather than using the much larger gtk. 53 | - name: Install Python dependencies 54 | run: | 55 | python -m pip install --upgrade pip 56 | pip install -r requirements.txt 57 | - name: Lint 58 | if: ${{ matrix.lint == 'true' }} 59 | run: | 60 | pylint msal_extensions 61 | # stop the build if there are Python syntax errors or undefined names 62 | #flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics 63 | # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide 64 | #flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics 65 | - name: Test on Linux with encryption 66 | if: ${{ matrix.os == 'ubuntu-latest' }} 67 | run: | 68 | # Don't know why, but the pytest and "." have to be re-installed again for them to be used 69 | echo "echo secret_placeholder | gnome-keyring-daemon --unlock; pip install pytest .; pytest" > linux_test.sh 70 | chmod +x linux_test.sh 71 | sudo dbus-run-session -- ./linux_test.sh 72 | - name: Test on other platforms without encryption 73 | if: ${{ matrix.os != 'ubuntu-latest' }} 74 | env: 75 | TOXENV: ${{ matrix.toxenv }} 76 | run: | 77 | tox 78 | 79 | cd: 80 | needs: ci 81 | # Note: github.event.pull_request.draft == false WON'T WORK in "if" statement, 82 | # because the triggered event is a push, not a pull_request. 83 | # This means each commit will trigger a release on TestPyPI. 84 | # Those releases will only succeed when each push has a new version number: a1, a2, a3, etc. 85 | if: | 86 | github.event_name == 'push' && 87 | ( 88 | startsWith(github.ref, 'refs/tags') || 89 | startsWith(github.ref, 'refs/heads/release-') 90 | ) 91 | runs-on: ubuntu-latest 92 | steps: 93 | - uses: actions/checkout@v4 94 | - name: Set up Python 3.9 95 | uses: actions/setup-python@v5 96 | with: 97 | python-version: 3.9 98 | cache: 'pip' 99 | - name: Build a package for release 100 | run: | 101 | python -m pip install build --user 102 | python -m build --sdist --wheel --outdir dist/ . 103 | - name: | 104 | Publish to TestPyPI when pushing to release-* branch. 105 | You better test with a1, a2, b1, b2 releases first. 106 | uses: pypa/gh-action-pypi-publish@v1.4.2 107 | if: startsWith(github.ref, 'refs/heads/release-') 108 | with: 109 | user: __token__ 110 | password: ${{ secrets.TEST_PYPI_API_TOKEN }} 111 | repository_url: https://test.pypi.org/legacy/ 112 | - name: Publish to PyPI when tagged 113 | if: startsWith(github.ref, 'refs/tags') 114 | uses: pypa/gh-action-pypi-publish@v1.4.2 115 | with: 116 | user: __token__ 117 | password: ${{ secrets.PYPI_API_TOKEN }} 118 | 119 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .nox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | *.py,cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | cover/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | .pybuilder/ 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pdm 86 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 87 | #pdm.lock 88 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 89 | # in version control. 90 | # https://pdm.fming.dev/latest/usage/project/#working-with-version-control 91 | .pdm.toml 92 | .pdm-python 93 | .pdm-build/ 94 | 95 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 96 | __pypackages__/ 97 | 98 | # Celery stuff 99 | celerybeat-schedule 100 | celerybeat.pid 101 | 102 | # SageMath parsed files 103 | *.sage.py 104 | 105 | # Environments 106 | .env 107 | .venv 108 | env/ 109 | venv/ 110 | ENV/ 111 | env.bak/ 112 | venv.bak/ 113 | 114 | # Spyder project settings 115 | .spyderproject 116 | .spyproject 117 | 118 | # Rope project settings 119 | .ropeproject 120 | 121 | # mkdocs documentation 122 | /site 123 | 124 | # mypy 125 | .mypy_cache/ 126 | .dmypy.json 127 | dmypy.json 128 | 129 | # Pyre type checker 130 | .pyre/ 131 | 132 | # pytype static type analyzer 133 | .pytype/ 134 | 135 | # Cython debug symbols 136 | cython_debug/ 137 | 138 | # Ruff stuff: 139 | .ruff_cache/ 140 | 141 | # PyPI configuration file 142 | .pypirc -------------------------------------------------------------------------------- /.pylintrc: -------------------------------------------------------------------------------- 1 | [MESSAGES CONTROL] 2 | good-names= 3 | logger 4 | disable= 5 | consider-using-f-string, # For Python < 3.6 6 | super-with-arguments, # For Python 2.x 7 | raise-missing-from, # For Python 2.x 8 | trailing-newlines, 9 | useless-object-inheritance 10 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | # TODO: Can this Dockerfile use multi-stage build? 2 | # https://testdriven.io/tips/6da2d9c9-8849-4386-b7f9-13b28514ded8/ 3 | # Final size 690MB. (It would be 1.16 GB if started with python:3 as base) 4 | FROM python:3.13-slim 5 | 6 | # Install Generic PyGObject (sans GTK) 7 | #The following somehow won't work: 8 | #RUN apt-get update && apt-get install -y python3-gi python3-gi-cairo 9 | RUN apt-get update && apt-get install -y \ 10 | libcairo2-dev \ 11 | libgirepository1.0-dev \ 12 | python3-dev 13 | 14 | # Install MSAL Extensions dependencies 15 | # Don't know how to get container talk to dbus on host, 16 | # so we choose to create a self-contained image by installing gnome-keyring 17 | RUN apt-get install -y \ 18 | gir1.2-secret-1 \ 19 | gnome-keyring 20 | 21 | # Not strictly necessary, but we include a pytest (which is only 3MB) to facilitate testing. 22 | RUN pip install "pygobject>=3,<4" "pytest>=6,<7" 23 | 24 | # Install MSAL Extensions. Upgrade the pinned version number to trigger a new image build. 25 | RUN pip install "msal-extensions==1.2" 26 | 27 | # This setup is inspired from https://github.com/jaraco/keyring#using-keyring-on-headless-linux-systems-in-a-docker-container 28 | ENTRYPOINT ["dbus-run-session", "--"] 29 | # Note: gnome-keyring-daemon needs previleged mode, therefore can not be run by a RUN command. 30 | CMD ["sh", "-c", "echo default_secret | gnome-keyring-daemon --unlock; bash"] 31 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) Microsoft Corporation. All rights reserved. 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 2 | # Microsoft Authentication Extensions for Python 3 | 4 | The Microsoft Authentication Extensions for Python offers secure mechanisms for client applications to perform cross-platform token cache serialization and persistence. It gives additional support to the [Microsoft Authentication Library for Python (MSAL)](https://github.com/AzureAD/microsoft-authentication-library-for-python). 5 | 6 | MSAL Python supports an in-memory cache by default and provides the [SerializableTokenCache](https://msal-python.readthedocs.io/en/latest/#msal.SerializableTokenCache) to perform cache serialization. You can read more about this in the MSAL Python [documentation](https://docs.microsoft.com/en-us/azure/active-directory/develop/msal-python-token-cache-serialization). Developers are required to implement their own cache persistence across multiple platforms and Microsoft Authentication Extensions makes this simpler. 7 | 8 | The supported platforms are Windows, Mac and Linux. 9 | - Windows - [DPAPI](https://docs.microsoft.com/en-us/dotnet/standard/security/how-to-use-data-protection) is used for encryption. 10 | - MAC - The MAC KeyChain is used. 11 | - Linux - [LibSecret](https://wiki.gnome.org/Projects/Libsecret) is used for encryption. 12 | 13 | > Note: It is recommended to use this library for cache persistance support for Public client applications such as Desktop apps only. In web applications, this may lead to scale and performance issues. Web applications are recommended to persist the cache in session. Take a look at this [webapp sample](https://github.com/Azure-Samples/ms-identity-python-webapp). 14 | 15 | ## Installation 16 | 17 | You can find Microsoft Authentication Extensions for Python on [Pypi](https://pypi.org/project/msal-extensions/). 18 | 1. If you haven't already, [install and/or upgrade the pip](https://pip.pypa.io/en/stable/installing/) 19 | of your Python environment to a recent version. We tested with pip 18.1. 20 | 2. Run `pip install msal-extensions`. 21 | 22 | ## Versions 23 | 24 | This library follows [Semantic Versioning](http://semver.org/). 25 | 26 | You can find the changes for each version under 27 | [Releases](https://github.com/AzureAD/microsoft-authentication-extensions-for-python/releases). 28 | 29 | ## Usage 30 | 31 | ### Creating an encrypted token cache file to be used by MSAL 32 | 33 | The Microsoft Authentication Extensions library provides the `PersistedTokenCache` which accepts a platform-dependent persistence instance. This token cache can then be used to instantiate the `PublicClientApplication` in MSAL Python. 34 | 35 | The token cache includes a file lock, and auto-reload behavior under the hood. 36 | 37 | 38 | 39 | Here is an example of this pattern for multiple platforms (taken from the complete [sample here](https://github.com/AzureAD/microsoft-authentication-extensions-for-python/blob/dev/sample/token_cache_sample.py)): 40 | 41 | ```python 42 | def build_persistence(location, fallback_to_plaintext=False): 43 | """Build a suitable persistence instance based your current OS""" 44 | try: 45 | return build_encrypted_persistence(location) 46 | except: 47 | if not fallback_to_plaintext: 48 | raise 49 | logging.warning("Encryption unavailable. Opting in to plain text.") 50 | return FilePersistence(location) 51 | 52 | persistence = build_persistence("token_cache.bin") 53 | print("Type of persistence: {}".format(persistence.__class__.__name__)) 54 | print("Is this persistence encrypted?", persistence.is_encrypted) 55 | 56 | cache = PersistedTokenCache(persistence) 57 | ``` 58 | Now you can use it in an MSAL application like this: 59 | ```python 60 | app = msal.PublicClientApplication("my_client_id", token_cache=cache) 61 | ``` 62 | 63 | ### Creating an encrypted persistence file to store your own data 64 | 65 | Here is an example of this pattern for multiple platforms (taken from the complete [sample here](https://github.com/AzureAD/microsoft-authentication-extensions-for-python/blob/dev/sample/persistence_sample.py)): 66 | 67 | ```python 68 | def build_persistence(location, fallback_to_plaintext=False): 69 | """Build a suitable persistence instance based your current OS""" 70 | try: 71 | return build_encrypted_persistence(location) 72 | except: # pylint: disable=bare-except 73 | if not fallback_to_plaintext: 74 | raise 75 | logging.warning("Encryption unavailable. Opting in to plain text.") 76 | return FilePersistence(location) 77 | 78 | persistence = build_persistence("storage.bin", fallback_to_plaintext=False) 79 | print("Type of persistence: {}".format(persistence.__class__.__name__)) 80 | print("Is this persistence encrypted?", persistence.is_encrypted) 81 | 82 | data = { # It can be anything, here we demonstrate an arbitrary json object 83 | "foo": "hello world", 84 | "bar": "", 85 | "service_principle_1": "blah blah...", 86 | } 87 | 88 | persistence.save(json.dumps(data)) 89 | assert json.loads(persistence.load()) == data 90 | ``` 91 | 92 | ## Python version support policy 93 | 94 | Python versions which are 6 months older than their 95 | [end-of-life cycle defined by Python Software Foundation (PSF)](https://devguide.python.org/versions/#versions) 96 | will not receive new feature updates from this library. 97 | 98 | 99 | ## Community Help and Support 100 | 101 | We leverage Stack Overflow to work with the community on supporting Azure Active Directory and its SDKs, including this one! 102 | We highly recommend you ask your questions on Stack Overflow (we're all on there!). 103 | Also browse existing issues to see if someone has had your question before. 104 | 105 | We recommend you use the "msal" tag so we can see it! 106 | Here is the latest Q&A on Stack Overflow for MSAL: 107 | [http://stackoverflow.com/questions/tagged/msal](http://stackoverflow.com/questions/tagged/msal) 108 | 109 | 110 | ## Contributing 111 | 112 | All code is licensed under the MIT license and we triage actively on GitHub. 113 | 114 | This project welcomes contributions and suggestions. Most contributions require you to agree to a 115 | Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us 116 | the rights to use your contribution. For details, visit https://cla.microsoft.com. 117 | 118 | When you submit a pull request, a CLA-bot will automatically determine whether you need to provide 119 | a CLA and decorate the PR appropriately (e.g., label, comment). Simply follow the instructions 120 | provided by the bot. You will only need to do this once across all repos using our CLA. 121 | 122 | 123 | ## We value and adhere to the Microsoft Open Source Code of Conduct 124 | 125 | This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments. 126 | -------------------------------------------------------------------------------- /azure-pipelines.yml: -------------------------------------------------------------------------------- 1 | resources: 2 | - repo: self 3 | 4 | trigger: 5 | batch: true 6 | branches: 7 | include: 8 | - '*' 9 | -------------------------------------------------------------------------------- /docker_run.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/bash 2 | IMAGE_NAME=msal-extensions:latest 3 | 4 | docker build -t $IMAGE_NAME - < Dockerfile 5 | 6 | echo "==== Integration Test for Persistence on Linux (libsecret) ====" 7 | echo "After seeing the bash prompt, run the following to test encryption on Linux:" 8 | echo " pip install -e ." 9 | echo " pytest --capture=no -s tests/chosen_test_file.py" 10 | echo "Note: It will test portalocker-based lock when portalocker is installed, or test file-based lock otherwise." 11 | docker run --rm -it \ 12 | --privileged \ 13 | -w /home -v $PWD:/home \ 14 | $IMAGE_NAME \ 15 | $1 16 | 17 | -------------------------------------------------------------------------------- /msal_extensions/__init__.py: -------------------------------------------------------------------------------- 1 | """Provides auxiliary functionality to the `msal` package.""" 2 | __version__ = "1.3.1" # Note: During/after release, copy this number to Dockerfile 3 | 4 | from .persistence import ( 5 | FilePersistence, 6 | build_encrypted_persistence, 7 | FilePersistenceWithDataProtection, 8 | KeychainPersistence, 9 | LibsecretPersistence, 10 | ) 11 | from .token_cache import PersistedTokenCache, CrossPlatLock, LockError 12 | 13 | -------------------------------------------------------------------------------- /msal_extensions/cache_lock.py: -------------------------------------------------------------------------------- 1 | """Provides a mechanism for not competing with other processes interacting with an MSAL cache.""" 2 | import os 3 | import sys 4 | import errno 5 | import time 6 | import logging 7 | 8 | import portalocker # pylint: disable=import-error 9 | 10 | 11 | logger = logging.getLogger(__name__) 12 | 13 | 14 | LockError = portalocker.exceptions.LockException 15 | 16 | 17 | class CrossPlatLock(object): 18 | """Offers a mechanism for waiting until another process is finished interacting with a shared 19 | resource. This is specifically written to interact with a class of the same name in the .NET 20 | extensions library. 21 | """ 22 | def __init__(self, lockfile_path): 23 | self._lockpath = lockfile_path 24 | self._lock = portalocker.Lock( 25 | lockfile_path, 26 | mode='wb+', 27 | # In posix systems, we HAVE to use LOCK_EX(exclusive lock) bitwise ORed 28 | # with LOCK_NB(non-blocking) to avoid blocking on lock acquisition. 29 | # More information here: 30 | # https://docs.python.org/3/library/fcntl.html#fcntl.lockf 31 | flags=portalocker.LOCK_EX | portalocker.LOCK_NB, 32 | # Support for passing through arguments to the open syscall 33 | # was added in Portalocker v1.4.0 (2019-02-11). 34 | buffering=0, 35 | ) 36 | 37 | def _try_to_create_lock_file(self): 38 | timeout = 5 39 | check_interval = 0.25 40 | current_time = getattr(time, "monotonic", time.time) 41 | timeout_end = current_time() + timeout 42 | pid = os.getpid() 43 | while timeout_end > current_time(): 44 | try: 45 | with open(self._lockpath, 'x'): # pylint: disable=unspecified-encoding 46 | return True 47 | except ValueError: # This needs to be the first clause, for Python 2 to hit it 48 | logger.warning("Python 2 does not support atomic creation of file") 49 | return False 50 | except FileExistsError: # Only Python 3 will reach this clause 51 | logger.debug( 52 | "Process %d found existing lock file, will retry after %f second", 53 | pid, check_interval) 54 | time.sleep(check_interval) 55 | return False 56 | 57 | def __enter__(self): 58 | pid = os.getpid() 59 | if not self._try_to_create_lock_file(): 60 | logger.warning("Process %d failed to create lock file", pid) 61 | file_handle = self._lock.__enter__() 62 | file_handle.write('{} {}'.format(pid, sys.argv[0]).encode('utf-8')) # pylint: disable=consider-using-f-string 63 | return file_handle 64 | 65 | def __exit__(self, *args): 66 | self._lock.__exit__(*args) 67 | try: 68 | # Attempt to delete the lockfile. In either of the failure cases enumerated below, it is 69 | # likely that another process has raced this one and ended up clearing or locking the 70 | # file for itself. 71 | os.remove(self._lockpath) 72 | except OSError as ex: # pylint: disable=invalid-name 73 | if ex.errno not in (errno.ENOENT, errno.EACCES): 74 | raise 75 | -------------------------------------------------------------------------------- /msal_extensions/filelock.py: -------------------------------------------------------------------------------- 1 | """A cross-process lock based on exclusive creation of a given file name""" 2 | import os 3 | import sys 4 | import errno 5 | import time 6 | import logging 7 | 8 | 9 | logger = logging.getLogger(__name__) 10 | 11 | 12 | class LockError(RuntimeError): 13 | """It will be raised when unable to obtain a lock""" 14 | 15 | 16 | class CrossPlatLock(object): 17 | """This implementation relies only on ``open(..., 'x')``""" 18 | def __init__(self, lockfile_path): 19 | self._lockpath = lockfile_path 20 | 21 | def __enter__(self): 22 | self._create_lock_file('{} {}'.format( 23 | os.getpid(), 24 | sys.argv[0], 25 | ).encode('utf-8')) # pylint: disable=consider-using-f-string 26 | return self 27 | 28 | def _create_lock_file(self, content): 29 | timeout = 5 30 | check_interval = 0.25 31 | current_time = getattr(time, "monotonic", time.time) 32 | timeout_end = current_time() + timeout 33 | while timeout_end > current_time(): 34 | try: 35 | with open(self._lockpath, 'xb') as lock_file: # pylint: disable=unspecified-encoding 36 | lock_file.write(content) 37 | return None # Happy path 38 | except ValueError: # This needs to be the first clause, for Python 2 to hit it 39 | raise LockError("Python 2 does not support atomic creation of file") 40 | except FileExistsError: # Only Python 3 will reach this clause 41 | logger.debug( 42 | "Process %d found existing lock file, will retry after %f second", 43 | os.getpid(), check_interval) 44 | time.sleep(check_interval) 45 | raise LockError( 46 | "Unable to obtain lock, despite trying for {} second(s). " 47 | "You may want to manually remove the stale lock file {}".format( 48 | timeout, 49 | self._lockpath, 50 | )) 51 | 52 | def __exit__(self, *args): 53 | try: 54 | os.remove(self._lockpath) 55 | except OSError as ex: # pylint: disable=invalid-name 56 | if ex.errno in (errno.ENOENT, errno.EACCES): 57 | # Probably another process has raced this one 58 | # and ended up clearing or locking the file for itself. 59 | logger.debug("Unable to remove lock file") 60 | else: 61 | raise 62 | 63 | -------------------------------------------------------------------------------- /msal_extensions/libsecret.py: -------------------------------------------------------------------------------- 1 | """Implements a Linux specific TokenCache, and provides auxiliary helper types. 2 | 3 | This module depends on PyGObject. But `pip install pygobject` would typically fail, 4 | until you install its dependencies first. For example, on a Debian Linux, you need:: 5 | 6 | sudo apt install libgirepository1.0-dev libcairo2-dev python3-dev gir1.2-secret-1 7 | pip install pygobject 8 | 9 | Alternatively, you could skip Cairo & PyCairo, but you still need to do all these 10 | (derived from https://gitlab.gnome.org/GNOME/pygobject/-/issues/395):: 11 | 12 | sudo apt install libgirepository1.0-dev python3-dev gir1.2-secret-1 13 | pip install wheel 14 | PYGOBJECT_WITHOUT_PYCAIRO=1 pip install --no-build-isolation pygobject 15 | """ 16 | 17 | try: 18 | import gi # https://github.com/AzureAD/microsoft-authentication-extensions-for-python/wiki/Encryption-on-Linux # pylint: disable=line-too-long 19 | except ImportError: 20 | raise ImportError("""Unable to import module 'gi' 21 | Runtime dependency of PyGObject is missing. 22 | Depends on your Linux distro, you could install it system-wide by something like: 23 | sudo apt install python3-gi python3-gi-cairo gir1.2-secret-1 24 | If necessary, please refer to PyGObject's doc: 25 | https://pygobject.readthedocs.io/en/latest/getting_started.html 26 | """) # Message via exception rather than log 27 | 28 | try: 29 | # pylint: disable=no-name-in-module 30 | gi.require_version("Secret", "1") # Would require a package gir1.2-secret-1 31 | # pylint: disable=wrong-import-position 32 | from gi.repository import Secret # Would require a package gir1.2-secret-1 33 | except (ValueError, ImportError) as ex: 34 | raise type(ex)( 35 | """Require a package "gir1.2-secret-1" which could be installed by: 36 | sudo apt install gir1.2-secret-1 37 | """) # Message via exception rather than log 38 | 39 | 40 | class LibSecretAgent(object): 41 | """A loader/saver built on top of low-level libsecret""" 42 | # Inspired by https://developer.gnome.org/libsecret/unstable/py-examples.html 43 | def __init__( # pylint: disable=too-many-arguments,too-many-positional-arguments 44 | self, 45 | schema_name, 46 | attributes, # {"name": "value", ...} 47 | label="", # Helpful when visualizing secrets by other viewers 48 | attribute_types=None, # {name: SchemaAttributeType, ...} 49 | collection=None, # None means default collection 50 | ): 51 | """This agent is built on top of lower level libsecret API. 52 | 53 | Content stored via libsecret is associated with a bunch of attributes. 54 | 55 | :param string schema_name: 56 | Attributes would conceptually follow an existing schema. 57 | But this class will do it in the other way around, 58 | by automatically deriving a schema based on your attributes. 59 | However, you will still need to provide a schema_name. 60 | load() and save() will only operate on data with matching schema_name. 61 | 62 | :param dict attributes: 63 | Attributes are key-value pairs, represented as a Python dict here. 64 | They will be used to filter content during load() and save(). 65 | Their arbitrary keys are strings. 66 | Their arbitrary values can MEAN strings, integers and booleans, 67 | but are always represented as strings, according to upstream sample: 68 | https://developer.gnome.org/libsecret/0.18/py-store-example.html 69 | 70 | :param string label: 71 | It will not be used during data lookup and filtering. 72 | It is only helpful when/if you visualize secrets by other viewers. 73 | 74 | :param dict attribute_types: 75 | Each key is the name of your each attribute. 76 | The corresponding value will be one of the following three: 77 | 78 | * Secret.SchemaAttributeType.STRING 79 | * Secret.SchemaAttributeType.INTEGER 80 | * Secret.SchemaAttributeType.BOOLEAN 81 | 82 | But if all your attributes are Secret.SchemaAttributeType.STRING, 83 | you do not need to provide this types definition at all. 84 | 85 | :param collection: 86 | The default value `None` means default collection. 87 | """ 88 | self._collection = collection 89 | self._attributes = attributes or {} 90 | self._label = label 91 | self._schema = Secret.Schema.new(schema_name, Secret.SchemaFlags.NONE, { 92 | k: (attribute_types or {}).get(k, Secret.SchemaAttributeType.STRING) 93 | for k in self._attributes}) 94 | 95 | def save(self, data): 96 | """Store data. Returns a boolean of whether operation was successful.""" 97 | return Secret.password_store_sync( 98 | self._schema, self._attributes, self._collection, self._label, 99 | data, None) 100 | 101 | def load(self): 102 | """Load a password in the secret service, return None when found nothing""" 103 | return Secret.password_lookup_sync(self._schema, self._attributes, None) 104 | 105 | def clear(self): 106 | """Returns a boolean of whether any passwords were removed""" 107 | return Secret.password_clear_sync(self._schema, self._attributes, None) 108 | 109 | 110 | def trial_run(): 111 | """This trial run will raise an exception if libsecret is not functioning. 112 | 113 | Even after you installed all the dependencies so that your script can start, 114 | or even if your previous run was successful, your script could fail next time, 115 | for example when it will be running inside a headless SSH session. 116 | 117 | You do not have to do trial_run. The exception would also be raised by save(). 118 | """ 119 | try: 120 | agent = LibSecretAgent("Test Schema", {"attr1": "foo", "attr2": "bar"}) 121 | payload = "Test Data" 122 | agent.save(payload) # It would fail when running inside an SSH session 123 | assert agent.load() == payload # This line is probably not reachable 124 | agent.clear() 125 | except (gi.repository.GLib.Error, AssertionError): # pylint: disable=no-member 126 | # https://pygobject.readthedocs.io/en/latest/guide/api/error_handling.html#examples 127 | message = """libsecret did not perform properly. 128 | * If you encountered error "Remote error from secret service: 129 | org.freedesktop.DBus.Error.ServiceUnknown", 130 | you may need to install gnome-keyring package. 131 | * Headless mode (such as in an ssh session) is not supported. 132 | """ 133 | raise RuntimeError(message) # Message via exception rather than log 134 | 135 | -------------------------------------------------------------------------------- /msal_extensions/osx.py: -------------------------------------------------------------------------------- 1 | # pylint: disable=duplicate-code 2 | 3 | """Implements a macOS specific TokenCache, and provides auxiliary helper types.""" 4 | 5 | import os 6 | import ctypes as _ctypes 7 | 8 | OS_RESULT = _ctypes.c_int32 # pylint: disable=invalid-name 9 | 10 | 11 | class KeychainError(OSError): 12 | """The RuntimeError that will be run when a function interacting with Keychain fails.""" 13 | 14 | ACCESS_DENIED = -128 15 | NO_SUCH_KEYCHAIN = -25294 16 | NO_DEFAULT = -25307 17 | ITEM_NOT_FOUND = -25300 18 | 19 | def __init__(self, exit_status): 20 | super(KeychainError, self).__init__() 21 | self.exit_status = exit_status 22 | # TODO: pylint: disable=fixme 23 | # use SecCopyErrorMessageString to fetch the appropriate message here. 24 | self.message = ( 25 | '{} see https://opensource.apple.com/source/CarbonHeaders/CarbonHeaders-18.1/MacErrors.h' # pylint: disable=consider-using-f-string,line-too-long 26 | .format(self.exit_status)) 27 | 28 | def _get_native_location(name): 29 | # type: (str) -> str 30 | """ 31 | Fetches the location of a native MacOS library. 32 | :param name: The name of the library to be loaded. 33 | :return: The location of the library on a MacOS filesystem. 34 | """ 35 | return '/System/Library/Frameworks/{0}.framework/{0}'.format(name) # pylint: disable=consider-using-f-string 36 | 37 | 38 | # Load native MacOS libraries 39 | _SECURITY = _ctypes.CDLL(_get_native_location('Security')) 40 | _CORE = _ctypes.CDLL(_get_native_location('CoreFoundation')) 41 | 42 | 43 | # Bind CFRelease from native MacOS libraries. 44 | _CORE_RELEASE = _CORE.CFRelease 45 | _CORE_RELEASE.argtypes = ( 46 | _ctypes.c_void_p, 47 | ) 48 | 49 | # Bind SecCopyErrorMessageString from native MacOS libraries. 50 | # https://developer.apple.com/documentation/security/1394686-seccopyerrormessagestring?language=objc 51 | _SECURITY_COPY_ERROR_MESSAGE_STRING = _SECURITY.SecCopyErrorMessageString 52 | _SECURITY_COPY_ERROR_MESSAGE_STRING.argtypes = ( 53 | OS_RESULT, 54 | _ctypes.c_void_p 55 | ) 56 | _SECURITY_COPY_ERROR_MESSAGE_STRING.restype = _ctypes.c_char_p 57 | 58 | # Bind SecKeychainOpen from native MacOS libraries. 59 | # https://developer.apple.com/documentation/security/1396431-seckeychainopen 60 | _SECURITY_KEYCHAIN_OPEN = _SECURITY.SecKeychainOpen 61 | _SECURITY_KEYCHAIN_OPEN.argtypes = ( 62 | _ctypes.c_char_p, 63 | _ctypes.POINTER(_ctypes.c_void_p) 64 | ) 65 | _SECURITY_KEYCHAIN_OPEN.restype = OS_RESULT 66 | 67 | # Bind SecKeychainCopyDefault from native MacOS libraries. 68 | # https://developer.apple.com/documentation/security/1400743-seckeychaincopydefault?language=objc 69 | _SECURITY_KEYCHAIN_COPY_DEFAULT = _SECURITY.SecKeychainCopyDefault 70 | _SECURITY_KEYCHAIN_COPY_DEFAULT.argtypes = ( 71 | _ctypes.POINTER(_ctypes.c_void_p), 72 | ) 73 | _SECURITY_KEYCHAIN_COPY_DEFAULT.restype = OS_RESULT 74 | 75 | 76 | # Bind SecKeychainItemFreeContent from native MacOS libraries. 77 | _SECURITY_KEYCHAIN_ITEM_FREE_CONTENT = _SECURITY.SecKeychainItemFreeContent 78 | _SECURITY_KEYCHAIN_ITEM_FREE_CONTENT.argtypes = ( 79 | _ctypes.c_void_p, 80 | _ctypes.c_void_p, 81 | ) 82 | _SECURITY_KEYCHAIN_ITEM_FREE_CONTENT.restype = OS_RESULT 83 | 84 | # Bind SecKeychainItemModifyAttributesAndData from native MacOS libraries. 85 | _SECURITY_KEYCHAIN_ITEM_MODIFY_ATTRIBUTES_AND_DATA = \ 86 | _SECURITY.SecKeychainItemModifyAttributesAndData 87 | _SECURITY_KEYCHAIN_ITEM_MODIFY_ATTRIBUTES_AND_DATA.argtypes = ( 88 | _ctypes.c_void_p, 89 | _ctypes.c_void_p, 90 | _ctypes.c_uint32, 91 | _ctypes.c_void_p, 92 | ) 93 | _SECURITY_KEYCHAIN_ITEM_MODIFY_ATTRIBUTES_AND_DATA.restype = OS_RESULT 94 | 95 | # Bind SecKeychainFindGenericPassword from native MacOS libraries. 96 | # https://developer.apple.com/documentation/security/1397301-seckeychainfindgenericpassword?language=objc 97 | _SECURITY_KEYCHAIN_FIND_GENERIC_PASSWORD = _SECURITY.SecKeychainFindGenericPassword 98 | _SECURITY_KEYCHAIN_FIND_GENERIC_PASSWORD.argtypes = ( 99 | _ctypes.c_void_p, 100 | _ctypes.c_uint32, 101 | _ctypes.c_char_p, 102 | _ctypes.c_uint32, 103 | _ctypes.c_char_p, 104 | _ctypes.POINTER(_ctypes.c_uint32), 105 | _ctypes.POINTER(_ctypes.c_void_p), 106 | _ctypes.POINTER(_ctypes.c_void_p), 107 | ) 108 | _SECURITY_KEYCHAIN_FIND_GENERIC_PASSWORD.restype = OS_RESULT 109 | # Bind SecKeychainAddGenericPassword from native MacOS 110 | # https://developer.apple.com/documentation/security/1398366-seckeychainaddgenericpassword?language=objc 111 | _SECURITY_KEYCHAIN_ADD_GENERIC_PASSWORD = _SECURITY.SecKeychainAddGenericPassword 112 | _SECURITY_KEYCHAIN_ADD_GENERIC_PASSWORD.argtypes = ( 113 | _ctypes.c_void_p, 114 | _ctypes.c_uint32, 115 | _ctypes.c_char_p, 116 | _ctypes.c_uint32, 117 | _ctypes.c_char_p, 118 | _ctypes.c_uint32, 119 | _ctypes.c_char_p, 120 | _ctypes.POINTER(_ctypes.c_void_p), 121 | ) 122 | _SECURITY_KEYCHAIN_ADD_GENERIC_PASSWORD.restype = OS_RESULT 123 | 124 | 125 | class Keychain(object): 126 | """Encapsulates the interactions with a particular MacOS Keychain.""" 127 | def __init__(self, filename=None): 128 | # type: (str) -> None 129 | self._ref = _ctypes.c_void_p() 130 | 131 | if filename: 132 | filename = os.path.expanduser(filename) 133 | self._filename = filename.encode('utf-8') 134 | else: 135 | self._filename = None 136 | 137 | def __enter__(self): 138 | if self._filename: 139 | status = _SECURITY_KEYCHAIN_OPEN(self._filename, self._ref) 140 | else: 141 | status = _SECURITY_KEYCHAIN_COPY_DEFAULT(self._ref) 142 | 143 | if status: 144 | raise OSError(status) 145 | return self 146 | 147 | def __exit__(self, *args): 148 | if self._ref: 149 | _CORE_RELEASE(self._ref) 150 | 151 | def get_generic_password(self, service, account_name): 152 | # type: (str, str) -> str 153 | """Fetch the password associated with a particular service and account. 154 | 155 | :param service: The service that this password is associated with. 156 | :param account_name: The account that this password is associated with. 157 | :return: The value of the password associated with the specified service and account. 158 | """ 159 | service = service.encode('utf-8') 160 | account_name = account_name.encode('utf-8') 161 | 162 | length = _ctypes.c_uint32() 163 | contents = _ctypes.c_void_p() 164 | exit_status = _SECURITY_KEYCHAIN_FIND_GENERIC_PASSWORD( 165 | self._ref, 166 | len(service), 167 | service, 168 | len(account_name), 169 | account_name, 170 | length, 171 | contents, 172 | None, 173 | ) 174 | 175 | if exit_status: 176 | raise KeychainError(exit_status=exit_status) 177 | 178 | value = _ctypes.create_string_buffer(length.value) 179 | _ctypes.memmove(value, contents.value, length.value) 180 | _SECURITY_KEYCHAIN_ITEM_FREE_CONTENT(None, contents) 181 | return value.raw.decode('utf-8') 182 | 183 | def set_generic_password(self, service, account_name, value): 184 | # type: (str, str, str) -> None 185 | """Associate a password with a given service and account. 186 | 187 | :param service: The service to associate this password with. 188 | :param account_name: The account to associate this password with. 189 | :param value: The string that should be used as the password. 190 | """ 191 | service = service.encode('utf-8') 192 | account_name = account_name.encode('utf-8') 193 | value = value.encode('utf-8') 194 | 195 | entry = _ctypes.c_void_p() 196 | find_exit_status = _SECURITY_KEYCHAIN_FIND_GENERIC_PASSWORD( 197 | self._ref, 198 | len(service), 199 | service, 200 | len(account_name), 201 | account_name, 202 | None, 203 | None, 204 | entry, 205 | ) 206 | 207 | if not find_exit_status: 208 | modify_exit_status = _SECURITY_KEYCHAIN_ITEM_MODIFY_ATTRIBUTES_AND_DATA( 209 | entry, 210 | None, 211 | len(value), 212 | value, 213 | ) 214 | if modify_exit_status: 215 | raise KeychainError(exit_status=modify_exit_status) 216 | 217 | elif find_exit_status == KeychainError.ITEM_NOT_FOUND: 218 | add_exit_status = _SECURITY_KEYCHAIN_ADD_GENERIC_PASSWORD( 219 | self._ref, 220 | len(service), 221 | service, 222 | len(account_name), 223 | account_name, 224 | len(value), 225 | value, 226 | None 227 | ) 228 | 229 | if add_exit_status: 230 | raise KeychainError(exit_status=add_exit_status) 231 | else: 232 | raise KeychainError(exit_status=find_exit_status) 233 | 234 | def get_internet_password(self, service, username): 235 | # type: (str, str) -> str 236 | """ Fetches a password associated with a domain and username. 237 | NOTE: THIS IS NOT YET IMPLEMENTED 238 | :param service: The website/service that this password is associated with. 239 | :param username: The account that this password is associated with. 240 | :return: The password that was associated with the given service and username. 241 | """ 242 | raise NotImplementedError() 243 | 244 | def set_internet_password(self, service, username, value): 245 | # type: (str, str, str) -> None 246 | """Sets a password associated with a domain and a username. 247 | NOTE: THIS IS NOT YET IMPLEMENTED 248 | :param service: The website/service that this password is associated with. 249 | :param username: The account that this password is associated with. 250 | :param value: The password that should be associated with the given service and username. 251 | """ 252 | raise NotImplementedError() 253 | -------------------------------------------------------------------------------- /msal_extensions/persistence.py: -------------------------------------------------------------------------------- 1 | """A generic persistence layer, optionally encrypted on Windows, OSX, and Linux. 2 | 3 | Should a certain encryption is unavailable, exception will be raised at run-time, 4 | rather than at import time. 5 | 6 | By successfully creating and using a certain persistence object, 7 | app developer would naturally know whether the data are protected by encryption. 8 | """ 9 | import abc 10 | import os 11 | import errno 12 | import hashlib 13 | import logging 14 | import sys 15 | try: 16 | from pathlib import Path # Built-in in Python 3 17 | except ImportError: 18 | from pathlib2 import Path # An extra lib for Python 2 19 | 20 | 21 | try: 22 | ABC = abc.ABC 23 | except AttributeError: # Python 2.7, abc exists, but not ABC 24 | ABC = abc.ABCMeta("ABC", (object,), {"__slots__": ()}) # type: ignore 25 | 26 | 27 | logger = logging.getLogger(__name__) 28 | 29 | 30 | def _mkdir_p(path): 31 | """Creates a directory, and any necessary parents. 32 | 33 | If the path provided is an existing file, this function raises an exception. 34 | :param path: The directory name that should be created. 35 | """ 36 | if not path: 37 | return # NO-OP 38 | 39 | if sys.version_info >= (3, 2): 40 | os.makedirs(path, exist_ok=True) 41 | return 42 | 43 | # This fallback implementation is based on a Stack Overflow question: 44 | # https://stackoverflow.com/questions/600268/mkdir-p-functionality-in-python 45 | # Known issue: it won't work when the path is a root folder like "C:\\" 46 | try: 47 | os.makedirs(path) 48 | except OSError as exp: 49 | if exp.errno == errno.EEXIST and os.path.isdir(path): 50 | pass 51 | else: 52 | raise 53 | 54 | def _auto_hash(input_string): 55 | return hashlib.sha256(input_string.encode('utf-8')).hexdigest() 56 | 57 | 58 | # We do not aim to wrap every os-specific exception. 59 | # Here we standardize only the most common ones, 60 | # otherwise caller would need to catch os-specific underlying exceptions. 61 | class PersistenceError(IOError): # Use IOError rather than OSError as base, 62 | """The base exception for persistence.""" 63 | # because historically an IOError was bubbled up and expected. 64 | # https://github.com/AzureAD/microsoft-authentication-extensions-for-python/blob/0.2.2/msal_extensions/token_cache.py#L38 65 | # Now we want to maintain backward compatibility even when using Python 2.x 66 | # It makes no difference in Python 3.3+ where IOError is an alias of OSError. 67 | def __init__(self, err_no=None, message=None, location=None): # pylint: disable=useless-super-delegation 68 | super(PersistenceError, self).__init__(err_no, message, location) 69 | 70 | 71 | class PersistenceNotFound(PersistenceError): 72 | """This happens when attempting BasePersistence.load() on a non-existent persistence instance""" 73 | def __init__(self, err_no=None, message=None, location=None): 74 | super(PersistenceNotFound, self).__init__( 75 | err_no=errno.ENOENT, 76 | message=message or "Persistence not found", 77 | location=location) 78 | 79 | class PersistenceEncryptionError(PersistenceError): 80 | """This could be raised by persistence.save()""" 81 | 82 | class PersistenceDecryptionError(PersistenceError): 83 | """This could be raised by persistence.load()""" 84 | 85 | 86 | def build_encrypted_persistence(location): 87 | """Build a suitable encrypted persistence instance based your current OS. 88 | 89 | If you do not need encryption, then simply use ``FilePersistence`` constructor. 90 | """ 91 | # Does not (yet?) support fallback_to_plaintext flag, 92 | # because the persistence on Windows and macOS do not support built-in trial_run(). 93 | if sys.platform.startswith('win'): 94 | return FilePersistenceWithDataProtection(location) 95 | if sys.platform.startswith('darwin'): 96 | return KeychainPersistence(location) 97 | if sys.platform.startswith('linux'): 98 | return LibsecretPersistence(location) 99 | raise RuntimeError("Unsupported platform: {}".format(sys.platform)) # pylint: disable=consider-using-f-string 100 | 101 | 102 | class BasePersistence(ABC): 103 | """An abstract persistence defining the common interface of this family""" 104 | 105 | is_encrypted = False # Default to False. To be overridden by sub-classes. 106 | 107 | @abc.abstractmethod 108 | def save(self, content): 109 | # type: (str) -> None 110 | """Save the content into this persistence""" 111 | raise NotImplementedError 112 | 113 | @abc.abstractmethod 114 | def load(self): 115 | # type: () -> str 116 | """Load content from this persistence. 117 | 118 | Could raise PersistenceNotFound if no save() was called before. 119 | """ 120 | raise NotImplementedError 121 | 122 | @abc.abstractmethod 123 | def time_last_modified(self): 124 | """Get the last time when this persistence has been modified. 125 | 126 | Could raise PersistenceNotFound if no save() was called before. 127 | """ 128 | raise NotImplementedError 129 | 130 | @abc.abstractmethod 131 | def get_location(self): 132 | """Return the file path which this persistence stores (meta)data into""" 133 | raise NotImplementedError 134 | 135 | 136 | def _open(location): 137 | return os.open(location, os.O_RDWR | os.O_CREAT | os.O_TRUNC, 0o600) 138 | # The 600 seems no-op on NTFS/Windows, and that is fine 139 | 140 | 141 | class FilePersistence(BasePersistence): 142 | """A generic persistence, storing data in a plain-text file""" 143 | 144 | def __init__(self, location): 145 | if not location: 146 | raise ValueError("Requires a file path") 147 | self._location = os.path.expanduser(location) 148 | _mkdir_p(os.path.dirname(self._location)) 149 | 150 | def save(self, content): 151 | # type: (str) -> None 152 | """Save the content into this persistence""" 153 | with os.fdopen(_open(self._location), 'w+') as handle: 154 | handle.write(content) 155 | 156 | def load(self): 157 | # type: () -> str 158 | """Load content from this persistence""" 159 | try: 160 | with open(self._location, 'r') as handle: # pylint: disable=unspecified-encoding 161 | return handle.read() 162 | except EnvironmentError as exp: # EnvironmentError in Py 2.7 works across platform 163 | if exp.errno == errno.ENOENT: 164 | raise PersistenceNotFound( 165 | message=( 166 | "Persistence not initialized. " 167 | "You can recover by calling a save() first."), 168 | location=self._location, 169 | ) 170 | raise 171 | 172 | 173 | def time_last_modified(self): 174 | try: 175 | return os.path.getmtime(self._location) 176 | except EnvironmentError as exp: # EnvironmentError in Py 2.7 works across platform 177 | if exp.errno == errno.ENOENT: 178 | raise PersistenceNotFound( 179 | message=( 180 | "Persistence not initialized. " 181 | "You can recover by calling a save() first."), 182 | location=self._location, 183 | ) 184 | raise 185 | 186 | def touch(self): 187 | """To touch this file-based persistence without writing content into it""" 188 | Path(self._location).touch() # For os.path.getmtime() to work 189 | 190 | def get_location(self): 191 | return self._location 192 | 193 | 194 | class FilePersistenceWithDataProtection(FilePersistence): 195 | """A generic persistence with data stored in a file, 196 | protected by Win32 encryption APIs on Windows""" 197 | is_encrypted = True 198 | 199 | def __init__(self, location, entropy=''): 200 | """Initialization could fail due to unsatisfied dependency""" 201 | # pylint: disable=import-outside-toplevel 202 | from .windows import WindowsDataProtectionAgent 203 | self._dp_agent = WindowsDataProtectionAgent(entropy=entropy) 204 | super(FilePersistenceWithDataProtection, self).__init__(location) 205 | 206 | def save(self, content): 207 | # type: (str) -> None 208 | try: 209 | data = self._dp_agent.protect(content) 210 | except OSError as exception: 211 | raise PersistenceEncryptionError( 212 | err_no=getattr(exception, "winerror", None), # Exists in Python 3 on Windows 213 | message="Encryption failed: {} Consider disable encryption.".format(exception), 214 | ) 215 | with os.fdopen(_open(self._location), 'wb+') as handle: 216 | handle.write(data) 217 | 218 | def load(self): 219 | # type: () -> str 220 | try: 221 | with open(self._location, 'rb') as handle: 222 | data = handle.read() 223 | except EnvironmentError as exp: # EnvironmentError in Py 2.7 works across platform 224 | if exp.errno == errno.ENOENT: 225 | raise PersistenceNotFound( 226 | message=( 227 | "Persistence not initialized. " 228 | "You can recover by calling a save() first."), 229 | location=self._location, 230 | ) 231 | logger.exception( 232 | "DPAPI error likely caused by file content not previously encrypted. " 233 | "App developer should migrate by calling save(plaintext) first.") 234 | raise 235 | try: 236 | return self._dp_agent.unprotect(data) 237 | except OSError as exception: 238 | raise PersistenceDecryptionError( 239 | err_no=getattr(exception, "winerror", None), # Exists in Python 3 on Windows 240 | message="Decryption failed: {} " 241 | "App developer may consider this guidance: " 242 | "https://github.com/AzureAD/microsoft-authentication-extensions-for-python/wiki/PersistenceDecryptionError" # pylint: disable=line-too-long 243 | .format(exception), 244 | location=self._location, 245 | ) 246 | 247 | 248 | class KeychainPersistence(BasePersistence): 249 | """A generic persistence with data stored in, 250 | and protected by native Keychain libraries on OSX""" 251 | is_encrypted = True 252 | 253 | def __init__(self, signal_location, service_name=None, account_name=None): 254 | """Initialization could fail due to unsatisfied dependency. 255 | 256 | :param signal_location: See :func:`persistence.LibsecretPersistence.__init__` 257 | """ 258 | from .osx import Keychain, KeychainError # pylint: disable=import-outside-toplevel 259 | self._file_persistence = FilePersistence(signal_location) # Favor composition 260 | self._Keychain = Keychain # pylint: disable=invalid-name 261 | self._KeychainError = KeychainError # pylint: disable=invalid-name 262 | default_service_name = "msal-extensions" # This is also our package name 263 | self._service_name = service_name or default_service_name 264 | self._account_name = account_name or _auto_hash(signal_location) 265 | 266 | def save(self, content): 267 | with self._Keychain() as locker: 268 | locker.set_generic_password( 269 | self._service_name, self._account_name, content) 270 | self._file_persistence.touch() # For time_last_modified() 271 | 272 | def load(self): 273 | with self._Keychain() as locker: 274 | try: 275 | return locker.get_generic_password( 276 | self._service_name, self._account_name) 277 | except self._KeychainError as ex: # pylint: disable=invalid-name 278 | if ex.exit_status == self._KeychainError.ITEM_NOT_FOUND: 279 | # This happens when a load() is called before a save(). 280 | # We map it into cross-platform error for unified catching. 281 | raise PersistenceNotFound( 282 | location="Service:{} Account:{}".format( # pylint: disable=consider-using-f-string 283 | self._service_name, self._account_name), 284 | message=( 285 | "Keychain persistence not initialized. " 286 | "You can recover by call a save() first."), 287 | ) 288 | raise # We do not intend to hide any other underlying exceptions 289 | 290 | def time_last_modified(self): 291 | return self._file_persistence.time_last_modified() 292 | 293 | def get_location(self): 294 | return self._file_persistence.get_location() 295 | 296 | 297 | class LibsecretPersistence(BasePersistence): 298 | """A generic persistence with data stored in, 299 | and protected by native libsecret libraries on Linux""" 300 | is_encrypted = True 301 | 302 | def __init__(self, signal_location, schema_name=None, attributes=None, **kwargs): 303 | """Initialization could fail due to unsatisfied dependency. 304 | 305 | :param string signal_location: 306 | Besides saving the real payload into encrypted storage, 307 | this class will also touch this signal file. 308 | Applications may listen a FileSystemWatcher.Changed event for reload. 309 | https://docs.microsoft.com/en-us/dotnet/api/system.io.filesystemwatcher.changed?view=netframework-4.8#remarks 310 | :param string schema_name: See :func:`libsecret.LibSecretAgent.__init__` 311 | :param dict attributes: See :func:`libsecret.LibSecretAgent.__init__` 312 | """ 313 | # pylint: disable=import-outside-toplevel 314 | from .libsecret import ( # This uncertain import is deferred till runtime 315 | LibSecretAgent, trial_run) 316 | trial_run() 317 | self._agent = LibSecretAgent( 318 | schema_name or _auto_hash(signal_location), attributes or {}, **kwargs) 319 | self._file_persistence = FilePersistence(signal_location) # Favor composition 320 | 321 | def save(self, content): 322 | if self._agent.save(content): 323 | self._file_persistence.touch() # For time_last_modified() 324 | 325 | def load(self): 326 | data = self._agent.load() 327 | if data is None: 328 | # Lower level libsecret would return None when found nothing. Here 329 | # in persistence layer, we convert it to a unified error for consistence. 330 | raise PersistenceNotFound(message=( 331 | "Keyring persistence not initialized. " 332 | "You can recover by call a save() first.")) 333 | return data 334 | 335 | def time_last_modified(self): 336 | return self._file_persistence.time_last_modified() 337 | 338 | def get_location(self): 339 | return self._file_persistence.get_location() 340 | 341 | # We could also have a KeyringPersistence() which can then be used together 342 | # with a FilePersistence to achieve 343 | # https://github.com/AzureAD/microsoft-authentication-extensions-for-python/issues/12 344 | # But this idea is not pursued at this time. 345 | -------------------------------------------------------------------------------- /msal_extensions/token_cache.py: -------------------------------------------------------------------------------- 1 | """Generic functions and types for working with a TokenCache that is not platform specific.""" 2 | import os 3 | import time 4 | import logging 5 | 6 | import msal 7 | 8 | try: # It needs portalocker 9 | from .cache_lock import ( # pylint: disable=unused-import 10 | CrossPlatLock, 11 | LockError, # We don't use LockError in this file, but __init__.py uses it. 12 | ) 13 | except ImportError: # Falls back to file-based lock 14 | from .filelock import CrossPlatLock, LockError # pylint: disable=unused-import 15 | from .persistence import _mkdir_p, PersistenceNotFound 16 | 17 | 18 | logger = logging.getLogger(__name__) 19 | 20 | class PersistedTokenCache(msal.SerializableTokenCache): 21 | """A token cache backed by a persistence layer, coordinated by a file lock, 22 | to sustain a certain level of multi-process concurrency for a desktop app. 23 | 24 | The scenario is that multiple instances of same desktop app 25 | (or even multiple different apps) 26 | create their own ``PersistedTokenCache`` instances, 27 | which are all backed by the same token cache file on disk 28 | (known as a persistence). The goal is to have Single Sign On (SSO). 29 | 30 | Each instance of ``PersistedTokenCache`` holds a snapshot of the token cache 31 | in memory. 32 | Each :func:`~find` call will 33 | automatically reload token cache from the persistence when necessary, 34 | so that it will have fresh data. 35 | Each :func:`~modify` call will 36 | automatically reload token cache from the persistence when necessary, 37 | so that new writes will be appended on top of latest token cache data, 38 | and then the new data will be immediately flushed back to the persistence. 39 | 40 | Note: :func:`~deserialize` and :func:`~serialize` remain the same 41 | as their counterparts in the parent class ``msal.SerializableTokenCache``. 42 | In other words, they do not have the "reload from persistence if necessary" 43 | nor the "flush back to persistence" behavior. 44 | """ 45 | 46 | def __init__(self, persistence, lock_location=None): 47 | super(PersistedTokenCache, self).__init__() 48 | self._lock_location = ( 49 | os.path.expanduser(lock_location) if lock_location 50 | else persistence.get_location() + ".lockfile") 51 | _mkdir_p(os.path.dirname(self._lock_location)) 52 | self._persistence = persistence 53 | self._last_sync = 0 # _last_sync is a Unixtime 54 | self.is_encrypted = persistence.is_encrypted 55 | 56 | def _reload_if_necessary(self): 57 | # type: () -> None 58 | """Reload cache from persistence layer, if necessary""" 59 | try: 60 | if self._last_sync < self._persistence.time_last_modified(): 61 | self.deserialize(self._persistence.load()) 62 | self._last_sync = time.time() 63 | except PersistenceNotFound: 64 | # From cache's perspective, a nonexistent persistence is a NO-OP. 65 | pass 66 | # However, existing data unable to be decrypted will still be bubbled up. 67 | 68 | def modify(self, credential_type, old_entry, new_key_value_pairs=None): 69 | with CrossPlatLock(self._lock_location): 70 | self._reload_if_necessary() 71 | super(PersistedTokenCache, self).modify( 72 | credential_type, 73 | old_entry, 74 | new_key_value_pairs=new_key_value_pairs) 75 | self._persistence.save(self.serialize()) 76 | self._last_sync = time.time() 77 | 78 | def search(self, credential_type, **kwargs): # pylint: disable=arguments-differ 79 | # Use optimistic locking rather than CrossPlatLock(self._lock_location) 80 | retry = 3 81 | for attempt in range(1, retry + 1): 82 | try: 83 | self._reload_if_necessary() 84 | except Exception: # pylint: disable=broad-except 85 | # Presumably other processes are writing the file, causing dirty read 86 | if attempt < retry: 87 | logger.debug("Unable to load token cache file in No. %d attempt", attempt) 88 | time.sleep(0.5) 89 | else: 90 | raise # End of retry. Re-raise the exception as-is. 91 | else: # If reload encountered no error, the data is considered intact 92 | return super(PersistedTokenCache, self).search(credential_type, **kwargs) 93 | return [] # Not really reachable here. Just to keep pylint happy. 94 | 95 | -------------------------------------------------------------------------------- /msal_extensions/windows.py: -------------------------------------------------------------------------------- 1 | """Implements a Windows Specific TokenCache, and provides auxiliary helper types.""" 2 | import ctypes 3 | from ctypes import wintypes 4 | 5 | _LOCAL_FREE = ctypes.windll.kernel32.LocalFree 6 | _GET_LAST_ERROR = ctypes.windll.kernel32.GetLastError 7 | _MEMCPY = ctypes.cdll.msvcrt.memcpy 8 | _MEMCPY.argtypes = [ctypes.c_void_p, ctypes.c_void_p, ctypes.c_size_t] # Note: 9 | # Suggested by https://github.com/AzureAD/microsoft-authentication-extensions-for-python/issues/85 # pylint: disable=line-too-long 10 | # Matching https://docs.microsoft.com/en-us/cpp/c-runtime-library/reference/memcpy-wmemcpy?view=msvc-160 # pylint: disable=line-too-long 11 | _CRYPT_PROTECT_DATA = ctypes.windll.crypt32.CryptProtectData 12 | _CRYPT_UNPROTECT_DATA = ctypes.windll.crypt32.CryptUnprotectData 13 | _CRYPTPROTECT_UI_FORBIDDEN = 0x01 14 | 15 | 16 | class DataBlob(ctypes.Structure): # pylint: disable=too-few-public-methods 17 | """A wrapper for interacting with the _CRYPTOAPI_BLOB type and its many aliases. This type is 18 | exposed from Wincrypt.h in XP and above. 19 | 20 | The memory associated with a DataBlob itself does not need to be freed, as the Python runtime 21 | will correctly clean it up. However, depending on the data it points at, it may still need to be 22 | freed. For instance, memory created by ctypes.create_string_buffer is already managed, and needs 23 | to not be freed. However, memory allocated by CryptProtectData and CryptUnprotectData must have 24 | LocalFree called on pbData. 25 | 26 | See documentation for this type at: 27 | https://msdn.microsoft.com/en-us/7a06eae5-96d8-4ece-98cb-cf0710d2ddbd 28 | """ 29 | _fields_ = [("cbData", wintypes.DWORD), ("pbData", ctypes.POINTER(ctypes.c_char))] 30 | 31 | def raw(self): 32 | # type: () -> bytes 33 | """Copies the message from the DataBlob in natively allocated memory into Python controlled 34 | memory. 35 | :return A byte array that matches what is stored in native-memory.""" 36 | cb_data = int(self.cbData) 37 | pb_data = self.pbData 38 | blob_buffer = ctypes.create_string_buffer(cb_data) 39 | _MEMCPY(blob_buffer, pb_data, cb_data) 40 | return blob_buffer.raw 41 | 42 | _err_description = { 43 | # Keys came from real world observation, values came from winerror.h (http://errors (Microsoft internal)) 44 | -2146893813: "Key not valid for use in specified state.", 45 | -2146892987: "The requested operation cannot be completed. " 46 | "The computer must be trusted for delegation and " 47 | "the current user account must be configured to allow delegation. " 48 | "See also https://docs.microsoft.com/en-us/windows/security/threat-protection/security-policy-settings/enable-computer-and-user-accounts-to-be-trusted-for-delegation", 49 | 13: "The data is invalid.", 50 | } 51 | 52 | # This code is modeled from a StackOverflow question, which can be found here: 53 | # https://stackoverflow.com/questions/463832/using-dpapi-with-python 54 | class WindowsDataProtectionAgent(object): 55 | """A mechanism for interacting with the Windows DP API Native library, e.g. Crypt32.dll.""" 56 | 57 | def __init__(self, entropy=None): 58 | # type: (str) -> None 59 | self._entropy_blob = None 60 | if entropy: 61 | entropy_utf8 = entropy.encode('utf-8') 62 | blob_buffer = ctypes.create_string_buffer(entropy_utf8, len(entropy_utf8)) 63 | self._entropy_blob = DataBlob(len(entropy_utf8), blob_buffer) 64 | 65 | def protect(self, message): 66 | # type: (str) -> bytes 67 | """Encrypts a message. 68 | :return cipher text holding the original message.""" 69 | 70 | message = message.encode('utf-8') 71 | message_buffer = ctypes.create_string_buffer(message, len(message)) 72 | message_blob = DataBlob(len(message), message_buffer) 73 | result = DataBlob() 74 | 75 | if self._entropy_blob: 76 | entropy = ctypes.byref(self._entropy_blob) 77 | else: 78 | entropy = None 79 | 80 | if _CRYPT_PROTECT_DATA( 81 | ctypes.byref(message_blob), 82 | u"python_data", # pylint: disable=redundant-u-string-prefix 83 | entropy, 84 | None, 85 | None, 86 | _CRYPTPROTECT_UI_FORBIDDEN, 87 | ctypes.byref(result)): 88 | try: 89 | return result.raw() 90 | finally: 91 | _LOCAL_FREE(result.pbData) 92 | 93 | err_code = _GET_LAST_ERROR() 94 | raise OSError(None, _err_description.get(err_code, ''), None, err_code) 95 | 96 | def unprotect(self, cipher_text): 97 | # type: (bytes) -> str 98 | """Decrypts cipher text that is provided. 99 | :return The original message hidden in the cipher text.""" 100 | ct_buffer = ctypes.create_string_buffer(cipher_text, len(cipher_text)) 101 | ct_blob = DataBlob(len(cipher_text), ct_buffer) 102 | result = DataBlob() 103 | 104 | if self._entropy_blob: 105 | entropy = ctypes.byref(self._entropy_blob) 106 | else: 107 | entropy = None 108 | 109 | if _CRYPT_UNPROTECT_DATA( 110 | ctypes.byref(ct_blob), 111 | None, 112 | entropy, 113 | None, 114 | None, 115 | _CRYPTPROTECT_UI_FORBIDDEN, 116 | ctypes.byref(result) 117 | ): 118 | try: 119 | return result.raw().decode('utf-8') 120 | finally: 121 | _LOCAL_FREE(result.pbData) 122 | err_code = _GET_LAST_ERROR() 123 | raise OSError(None, _err_description.get(err_code, ''), None, err_code) 124 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | ## Github actions/setup-python might need a requirements.txt to cache dependencies 2 | # https://github.com/actions/setup-python?tab=readme-ov-file#caching-packages-dependencies 3 | pygobject; sys_platform == 'linux' 4 | pylint 5 | tox 6 | pytest 7 | -e . 8 | -------------------------------------------------------------------------------- /sample/persistence_sample.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import json 3 | 4 | from msal_extensions import build_encrypted_persistence, FilePersistence, CrossPlatLock 5 | 6 | 7 | def build_persistence(location, fallback_to_plaintext=False): 8 | """Build a suitable persistence instance based your current OS""" 9 | # Note: This sample stores both encrypted persistence and plaintext persistence 10 | # into same location, therefore their data would likely override with each other. 11 | try: 12 | return build_encrypted_persistence(location) 13 | except: # pylint: disable=bare-except 14 | # On Linux, encryption exception will be raised during initialization. 15 | # On Windows and macOS, they won't be detected here, 16 | # but will be raised during their load() or save(). 17 | if not fallback_to_plaintext: 18 | raise 19 | logging.warning("Encryption unavailable. Opting in to plain text.") 20 | return FilePersistence(location) 21 | 22 | persistence = build_persistence("storage.bin", fallback_to_plaintext=False) 23 | print("Type of persistence: {}".format(persistence.__class__.__name__)) 24 | print("Is this persistence encrypted?", persistence.is_encrypted) 25 | 26 | data = { # It can be anything, here we demonstrate an arbitrary json object 27 | "foo": "hello world", 28 | "bar": "", 29 | "service_principle_1": "blah blah...", 30 | } 31 | 32 | with CrossPlatLock("my_another_lock.txt"): 33 | persistence.save(json.dumps(data)) 34 | assert json.loads(persistence.load()) == data 35 | 36 | -------------------------------------------------------------------------------- /sample/token_cache_sample.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import logging 3 | import json 4 | 5 | from msal_extensions import build_encrypted_persistence, FilePersistence, PersistedTokenCache 6 | 7 | 8 | def build_persistence(location, fallback_to_plaintext=False): 9 | """Build a suitable persistence instance based your current OS""" 10 | # Note: This sample stores both encrypted persistence and plaintext persistence 11 | # into same location, therefore their data would likely override with each other. 12 | try: 13 | return build_encrypted_persistence(location) 14 | except: # pylint: disable=bare-except 15 | # On Linux, encryption exception will be raised during initialization. 16 | # On Windows and macOS, they won't be detected here, 17 | # but will be raised during their load() or save(). 18 | if not fallback_to_plaintext: 19 | raise 20 | logging.warning("Encryption unavailable. Opting in to plain text.") 21 | return FilePersistence(location) 22 | 23 | persistence = build_persistence("token_cache.bin") 24 | print("Type of persistence: {}".format(persistence.__class__.__name__)) 25 | print("Is this persistence encrypted?", persistence.is_encrypted) 26 | 27 | cache = PersistedTokenCache(persistence) 28 | # Now you can use it in an msal application like this: 29 | # app = msal.PublicClientApplication("my_client_id", token_cache=cache) 30 | 31 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | # https://setuptools.readthedocs.io/en/latest/setuptools.html#configuring-setup-using-setup-cfg-files 2 | 3 | [bdist_wheel] 4 | universal=0 5 | 6 | [metadata] 7 | license = MIT License 8 | project_urls = Changelog = https://github.com/AzureAD/microsoft-authentication-extensions-for-python/releases 9 | classifiers = 10 | License :: OSI Approved :: MIT License 11 | Development Status :: 5 - Production/Stable 12 | Programming Language :: Python :: 3 :: Only 13 | Programming Language :: Python :: 3 14 | Programming Language :: Python :: 3.7 15 | Programming Language :: Python :: 3.8 16 | Programming Language :: Python :: 3.9 17 | Programming Language :: Python :: 3.10 18 | Programming Language :: Python :: 3.11 19 | Programming Language :: Python :: 3.12 20 | 21 | description = Microsoft Authentication Library extensions (MSAL EX) provides a persistence API that can save your data on disk, encrypted on Windows, macOS and Linux. Concurrent data access will be coordinated by a file lock mechanism. 22 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | from setuptools import setup, find_packages 4 | import re, io 5 | 6 | __version__ = re.search( 7 | r'__version__\s*=\s*[rRfFuU]{0,2}[\'"]([^\'"]*)[\'"]', 8 | io.open('msal_extensions/__init__.py', encoding='utf_8_sig').read() 9 | ).group(1) 10 | 11 | long_description = open('README.md').read() 12 | 13 | setup( 14 | name='msal-extensions', 15 | version=__version__, 16 | packages=find_packages(exclude=["tests"]), 17 | long_description=long_description, 18 | long_description_content_type="text/markdown", 19 | package_data={'': ['LICENSE']}, 20 | python_requires=">=3.9", 21 | install_requires=[ 22 | 'msal>=1.29,<2', # Use TokenCache.search() from MSAL Python 1.29+ 23 | 24 | ## We choose to NOT define a hard dependency on this. 25 | # "pygobject>=3,<4;platform_system=='Linux'", 26 | ], 27 | extras_require={ 28 | "portalocker": [ 29 | 'portalocker<4,>=1.4', 30 | ], 31 | }, 32 | tests_require=['pytest'], 33 | ) 34 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AzureAD/microsoft-authentication-extensions-for-python/5a27660e2d805c49d8f26a9ba7d4901710fc997d/tests/__init__.py -------------------------------------------------------------------------------- /tests/cache_file_generator.py: -------------------------------------------------------------------------------- 1 | """ 2 | Usage: cache_file_generator.py cache_file_path sleep_interval 3 | 4 | This is a console application which is to be used for cross-platform lock performance testing. 5 | The app will acquire lock for the cache file, log the process id and then release the lock. 6 | 7 | It takes in two arguments - cache file path and the sleep interval. 8 | The cache file path is the path of cache file. 9 | The sleep interval is the time in seconds for which the lock is held by a process. 10 | """ 11 | 12 | import logging 13 | import os 14 | import sys 15 | import time 16 | 17 | from msal_extensions import FilePersistence, CrossPlatLock, LockError 18 | 19 | 20 | print("Testing with {}".format(CrossPlatLock)) 21 | 22 | 23 | def _acquire_lock_and_write_to_cache(cache_location, sleep_interval): 24 | cache_accessor = FilePersistence(cache_location) 25 | lock_file_path = cache_accessor.get_location() + ".lockfile" 26 | try: 27 | with CrossPlatLock(lock_file_path): 28 | data = cache_accessor.load() 29 | if data is None: 30 | data = "" 31 | data += "< " + str(os.getpid()) + "\n" 32 | time.sleep(sleep_interval) 33 | data += "> " + str(os.getpid()) + "\n" 34 | cache_accessor.save(data) 35 | except LockError as e: 36 | logging.warning("Unable to acquire lock %s", e) 37 | 38 | 39 | if __name__ == "__main__": 40 | if len(sys.argv) < 3: 41 | print(__doc__) 42 | sys.exit(0) 43 | _acquire_lock_and_write_to_cache(sys.argv[1], float(sys.argv[2])) 44 | 45 | -------------------------------------------------------------------------------- /tests/http_client.py: -------------------------------------------------------------------------------- 1 | class MinimalResponse(object): # Not for production use 2 | def __init__(self, requests_resp=None, status_code=None, text=None, headers=None): 3 | self.status_code = status_code or requests_resp.status_code 4 | self.text = text if text is not None else requests_resp.text 5 | self.headers = {} if headers is None else headers 6 | self._raw_resp = requests_resp 7 | 8 | def raise_for_status(self): 9 | if self._raw_resp is not None: # Turns out `if requests.response` won't work 10 | # cause it would be True when 200<=status<400 11 | self._raw_resp.raise_for_status() 12 | 13 | -------------------------------------------------------------------------------- /tests/lock_acquire.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import os 3 | import time 4 | import datetime 5 | from msal_extensions import CrossPlatLock 6 | 7 | 8 | def main(hold_time): 9 | # type: (datetime.timedelta) -> None 10 | """ 11 | Grabs a lock from a well-known file in order to test the CrossPlatLock class across processes. 12 | :param hold_time: The approximate duration that this process should hold onto the lock. 13 | :return: None 14 | """ 15 | pid = os.getpid() 16 | print('{} starting'.format(pid)) 17 | with CrossPlatLock('./delete_me.lockfile'): 18 | print('{} has acquired the lock'.format(pid)) 19 | time.sleep(hold_time.total_seconds()) 20 | print('{} is releasing the lock'.format(pid)) 21 | print('{} done.'.format(pid)) 22 | 23 | 24 | if __name__ == '__main__': 25 | lock_hold_time = datetime.timedelta(seconds=5) 26 | if len(sys.argv) > 1: 27 | hold_time = datetime.timedelta(seconds=int(sys.argv[1])) 28 | main(lock_hold_time) 29 | -------------------------------------------------------------------------------- /tests/test_agnostic_backend.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | import shutil 4 | import tempfile 5 | from unittest.mock import patch 6 | import sys 7 | 8 | import msal 9 | import pytest 10 | 11 | from msal_extensions import * 12 | from .http_client import MinimalResponse 13 | 14 | 15 | @pytest.fixture 16 | def temp_location(): 17 | test_folder = tempfile.mkdtemp(prefix="test_token_cache_roundtrip") 18 | yield os.path.join(test_folder, 'token_cache.bin') 19 | shutil.rmtree(test_folder, ignore_errors=True) 20 | 21 | def _test_token_cache_roundtrip(persistence): 22 | desired_scopes = ['https://graph.microsoft.com/.default'] 23 | apps = [ # Multiple apps sharing same persistence 24 | msal.ConfidentialClientApplication( 25 | "fake_client_id", client_credential="fake_client_secret", 26 | token_cache=PersistedTokenCache(persistence)) for i in range(2)] 27 | with patch.object(apps[0].http_client, "post", return_value=MinimalResponse( 28 | status_code=200, text=json.dumps({ 29 | "token_type": "Bearer", 30 | "access_token": "app token", 31 | "expires_in": 3600, 32 | }))) as mocked_post: 33 | token1 = apps[0].acquire_token_for_client(scopes=desired_scopes) 34 | assert token1["token_source"] == "identity_provider", "Initial token should come from IdP" 35 | token2 = apps[1].acquire_token_for_client(scopes=desired_scopes) # Hit token cache in MSAL 1.23+ 36 | assert token2["token_source"] == "cache", "App2 should hit cache written by app1" 37 | assert token1['access_token'] == token2['access_token'], "Cache should hit" 38 | 39 | def test_token_cache_roundtrip_with_persistence_builder(temp_location): 40 | _test_token_cache_roundtrip(build_encrypted_persistence(temp_location)) 41 | 42 | def test_token_cache_roundtrip_with_file_persistence(temp_location): 43 | _test_token_cache_roundtrip(FilePersistence(temp_location)) 44 | 45 | def test_file_not_found_error_is_not_raised(): 46 | persistence = FilePersistence('non_existing_file') 47 | cache = PersistedTokenCache(persistence) 48 | # An exception raised here will fail the test case as it is supposed to be a NO-OP 49 | cache.find('') 50 | -------------------------------------------------------------------------------- /tests/test_cache_lock_file_perf.py: -------------------------------------------------------------------------------- 1 | import multiprocessing 2 | import os 3 | import shutil 4 | import tempfile 5 | 6 | import pytest 7 | 8 | from .cache_file_generator import _acquire_lock_and_write_to_cache 9 | 10 | 11 | @pytest.fixture 12 | def temp_location(): 13 | test_folder = tempfile.mkdtemp(prefix="test_persistence_roundtrip") 14 | yield os.path.join(test_folder, 'persistence.bin') 15 | shutil.rmtree(test_folder, ignore_errors=True) 16 | 17 | 18 | def _validate_result_in_cache(cache_location): 19 | with open(cache_location) as handle: 20 | data = handle.read() 21 | prev_process_id = None 22 | count = 0 23 | for line in data.split("\n"): 24 | if line: 25 | count += 1 26 | tag, process_id = line.split(" ") 27 | if prev_process_id is not None: 28 | assert process_id == prev_process_id, "Process overlap found" 29 | assert tag == '>', "Process overlap_found" 30 | prev_process_id = None 31 | else: 32 | assert tag == '<', "Opening bracket not found" 33 | prev_process_id = process_id 34 | return count 35 | 36 | 37 | def _run_multiple_processes(no_of_processes, cache_location, sleep_interval): 38 | open(cache_location, "w+") 39 | processes = [] 40 | for i in range(no_of_processes): 41 | process = multiprocessing.Process( 42 | target=_acquire_lock_and_write_to_cache, 43 | args=(cache_location, sleep_interval)) 44 | processes.append(process) 45 | 46 | for process in processes: 47 | process.start() 48 | 49 | for process in processes: 50 | process.join() 51 | 52 | 53 | def test_lock_for_normal_workload(temp_location): 54 | num_of_processes = 4 55 | sleep_interval = 0.1 56 | _run_multiple_processes(num_of_processes, temp_location, sleep_interval) 57 | count = _validate_result_in_cache(temp_location) 58 | assert count == num_of_processes * 2, "Should not observe starvation" 59 | 60 | 61 | def test_lock_for_high_workload(temp_location): 62 | num_of_processes = 80 63 | sleep_interval = 0 64 | _run_multiple_processes(num_of_processes, temp_location, sleep_interval) 65 | count = _validate_result_in_cache(temp_location) 66 | assert count <= num_of_processes * 2, "Starvation or not, we should not observe garbled payload" 67 | 68 | 69 | def test_lock_for_timeout(temp_location): 70 | num_of_processes = 30 71 | sleep_interval = 1 72 | _run_multiple_processes(num_of_processes, temp_location, sleep_interval) 73 | count = _validate_result_in_cache(temp_location) 74 | assert count < num_of_processes * 2, "Should observe starvation" 75 | 76 | -------------------------------------------------------------------------------- /tests/test_crossplatlock.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from msal_extensions import CrossPlatLock 3 | 4 | 5 | def test_ensure_file_deleted(): 6 | lockfile = './test_lock_1.txt' 7 | 8 | try: 9 | FileNotFoundError 10 | except NameError: 11 | FileNotFoundError = IOError 12 | 13 | print("Testing with {}".format(CrossPlatLock)) 14 | with CrossPlatLock(lockfile): 15 | pass 16 | 17 | with pytest.raises(FileNotFoundError): 18 | with open(lockfile): 19 | pass 20 | -------------------------------------------------------------------------------- /tests/test_macos_backend.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import os 3 | import shutil 4 | import tempfile 5 | import pytest 6 | import uuid 7 | import msal 8 | 9 | if not sys.platform.startswith('darwin'): 10 | pytest.skip('skipping MacOS-only tests', allow_module_level=True) 11 | else: 12 | from msal_extensions.osx import Keychain 13 | from msal_extensions.token_cache import PersistedTokenCache 14 | from msal_extensions.persistence import KeychainPersistence 15 | 16 | 17 | def test_keychain_roundtrip(): 18 | with Keychain() as subject: 19 | location, account = "msal_extension_test1", "test_account1" 20 | want = uuid.uuid4().hex 21 | subject.set_generic_password(location, account, want) 22 | got = subject.get_generic_password(location, account) 23 | assert got == want 24 | 25 | 26 | def test_osx_token_cache_roundtrip(): 27 | client_id = os.getenv('AZURE_CLIENT_ID') 28 | client_secret = os.getenv('AZURE_CLIENT_SECRET') 29 | if not (client_id and client_secret): 30 | pytest.skip('no credentials present to test PersistedTokenCache round-trip with.') 31 | 32 | test_folder = tempfile.mkdtemp(prefix="msal_extension_test_osx_token_cache_roundtrip") 33 | cache_file = os.path.join(test_folder, 'msal.cache') 34 | try: 35 | subject = PersistedTokenCache(KeychainPersistence(cache_file)) 36 | app = msal.ConfidentialClientApplication( 37 | client_id=client_id, 38 | client_credential=client_secret, 39 | token_cache=subject) 40 | desired_scopes = ['https://graph.microsoft.com/.default'] 41 | token1 = app.acquire_token_for_client(scopes=desired_scopes) 42 | # TODO: Modify this to same approach in test_agnostic_backend.py 43 | os.utime(cache_file, None) # Mock having another process update the cache. 44 | token2 = app.acquire_token_silent(scopes=desired_scopes, account=None) 45 | assert token1['access_token'] == token2['access_token'] 46 | finally: 47 | shutil.rmtree(test_folder, ignore_errors=True) 48 | -------------------------------------------------------------------------------- /tests/test_persistence.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | import shutil 4 | import tempfile 5 | import logging 6 | 7 | import pytest 8 | 9 | from msal_extensions.persistence import * 10 | 11 | 12 | def _is_env_var_defined(env_var): 13 | return bool( # (WTF) What-The-Finding: 14 | # The bool(...) is necessary, otherwise skipif(...) would treat "true" as 15 | # string conditions and then raise an undefined "true" exception. 16 | # https://docs.pytest.org/en/latest/historical-notes.html#string-conditions 17 | os.getenv(env_var)) 18 | 19 | 20 | # Note: If you use tox, remember to pass them through via tox.ini 21 | # https://tox.wiki/en/latest/example/basic.html#passing-down-environment-variables 22 | is_running_on_github_ci = _is_env_var_defined("GITHUB_ACTIONS") 23 | 24 | @pytest.fixture 25 | def temp_location(): 26 | test_folder = tempfile.mkdtemp(prefix="test_persistence_roundtrip") 27 | yield os.path.join(test_folder, 'persistence.bin') 28 | shutil.rmtree(test_folder, ignore_errors=True) 29 | 30 | def _test_persistence_roundtrip(persistence): 31 | payload = 'arbitrary content' 32 | persistence.save(payload) 33 | assert persistence.load() == payload 34 | 35 | def _test_nonexistent_persistence(persistence): 36 | with pytest.raises(PersistenceNotFound): 37 | persistence.load() 38 | with pytest.raises(PersistenceNotFound): 39 | persistence.time_last_modified() 40 | 41 | def test_file_persistence(temp_location): 42 | _test_persistence_roundtrip(FilePersistence(temp_location)) 43 | 44 | def test_nonexistent_file_persistence(temp_location): 45 | _test_nonexistent_persistence(FilePersistence(temp_location)) 46 | 47 | @pytest.mark.skipif( 48 | not sys.platform.startswith('win'), 49 | reason="Requires Windows Desktop") 50 | def test_file_persistence_with_data_protection(temp_location): 51 | try: 52 | _test_persistence_roundtrip(FilePersistenceWithDataProtection(temp_location)) 53 | except PersistenceDecryptionError: 54 | if is_running_on_github_ci: 55 | logging.warning("DPAPI tends to fail on Windows VM. Run this on your desktop to double check.") 56 | else: 57 | raise 58 | 59 | @pytest.mark.skipif( 60 | not sys.platform.startswith('win'), 61 | reason="Requires Windows Desktop") 62 | def test_nonexistent_file_persistence_with_data_protection(temp_location): 63 | _test_nonexistent_persistence(FilePersistenceWithDataProtection(temp_location)) 64 | 65 | @pytest.mark.skipif( 66 | not sys.platform.startswith('darwin'), 67 | reason="Requires OSX.") 68 | def test_keychain_persistence(temp_location): 69 | _test_persistence_roundtrip(KeychainPersistence(temp_location)) 70 | 71 | @pytest.mark.skipif( 72 | not sys.platform.startswith('darwin'), 73 | reason="Requires OSX.") 74 | def test_nonexistent_keychain_persistence(temp_location): 75 | random_service_name = random_account_name = str(id(temp_location)) 76 | _test_nonexistent_persistence( 77 | KeychainPersistence(temp_location, random_service_name, random_account_name)) 78 | 79 | @pytest.mark.skipif( 80 | not sys.platform.startswith('linux'), 81 | reason="Requires Linux Desktop. Headless or SSH session won't work.") 82 | def test_libsecret_persistence(temp_location): 83 | _test_persistence_roundtrip(LibsecretPersistence(temp_location)) 84 | 85 | @pytest.mark.skipif( 86 | not sys.platform.startswith('linux'), 87 | reason="Requires Linux Desktop. Headless or SSH session won't work.") 88 | def test_nonexistent_libsecret_persistence(temp_location): 89 | random_schema_name = random_value = str(id(temp_location)) 90 | _test_nonexistent_persistence(LibsecretPersistence( 91 | temp_location, 92 | random_schema_name, 93 | {"my_attr_1": random_value, "my_attr_2": random_value}, 94 | )) 95 | 96 | -------------------------------------------------------------------------------- /tests/test_windows_backend.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import os 3 | import errno 4 | import shutil 5 | import tempfile 6 | import pytest 7 | import uuid 8 | import msal 9 | 10 | if not sys.platform.startswith('win'): 11 | pytest.skip('skipping windows-only tests', allow_module_level=True) 12 | else: 13 | from msal_extensions.windows import WindowsDataProtectionAgent 14 | from msal_extensions.token_cache import PersistedTokenCache 15 | from msal_extensions.persistence import FilePersistenceWithDataProtection 16 | 17 | 18 | def test_dpapi_roundtrip_with_entropy(): 19 | subject_without_entropy = WindowsDataProtectionAgent() 20 | subject_with_entropy = WindowsDataProtectionAgent(entropy=uuid.uuid4().hex) 21 | 22 | test_cases = [ 23 | '', 24 | 'lorem ipsum', 25 | 'lorem-ipsum', 26 | '', 27 | uuid.uuid4().hex, 28 | ] 29 | 30 | for tc in test_cases: 31 | ciphered = subject_with_entropy.protect(tc) 32 | assert ciphered != tc 33 | 34 | got = subject_with_entropy.unprotect(ciphered) 35 | assert got == tc 36 | 37 | ciphered = subject_without_entropy.protect(tc) 38 | assert ciphered != tc 39 | 40 | got = subject_without_entropy.unprotect(ciphered) 41 | assert got == tc 42 | 43 | 44 | def test_read_msal_cache_direct(): 45 | """ 46 | This loads and unprotects an MSAL cache directly, only using the DataProtectionAgent. 47 | """ 48 | localappdata_location = os.getenv('LOCALAPPDATA', os.path.expanduser('~')) 49 | cache_locations = [ 50 | os.path.join(localappdata_location, '.IdentityService', 'msal.cache'), # this is where it's supposed to be 51 | os.path.join(localappdata_location, '.IdentityServices', 'msal.cache'), # There was a miscommunications about whether this was plural or not. 52 | os.path.join(localappdata_location, 'msal.cache'), # The earliest most naive builds used this locations. 53 | ] 54 | 55 | found = False 56 | for loc in cache_locations: 57 | try: 58 | with open(loc, mode='rb') as fh: 59 | contents = fh.read() 60 | found = True 61 | 62 | break 63 | except IOError as exp: 64 | if exp.errno != errno.ENOENT: 65 | raise exp 66 | 67 | if not found: 68 | pytest.skip('could not find the msal.cache file (try logging in using MSAL)') 69 | 70 | subject = WindowsDataProtectionAgent() 71 | raw = subject.unprotect(contents) 72 | assert raw != "" 73 | 74 | cache = msal.SerializableTokenCache() 75 | cache.deserialize(raw) 76 | access_tokens = cache.find(msal.TokenCache.CredentialType.ACCESS_TOKEN) 77 | assert len(access_tokens) > 0 78 | 79 | 80 | def test_windows_token_cache_roundtrip(): 81 | client_id = os.getenv('AZURE_CLIENT_ID') 82 | client_secret = os.getenv('AZURE_CLIENT_SECRET') 83 | if not (client_id and client_secret): 84 | pytest.skip('no credentials present to test PersistedTokenCache round-trip with.') 85 | 86 | test_folder = tempfile.mkdtemp(prefix="msal_extension_test_windows_token_cache_roundtrip") 87 | cache_file = os.path.join(test_folder, 'msal.cache') 88 | try: 89 | subject = PersistedTokenCache(FilePersistenceWithDataProtection(cache_file)) 90 | app = msal.ConfidentialClientApplication( 91 | client_id=client_id, 92 | client_credential=client_secret, 93 | token_cache=subject) 94 | desired_scopes = ['https://graph.microsoft.com/.default'] 95 | token1 = app.acquire_token_for_client(scopes=desired_scopes) 96 | # TODO: Modify this to same approach in test_agnostic_backend.py 97 | os.utime(cache_file, None) # Mock having another process update the cache. 98 | token2 = app.acquire_token_silent(scopes=desired_scopes, account=None) 99 | assert token1['access_token'] == token2['access_token'] 100 | finally: 101 | shutil.rmtree(test_folder, ignore_errors=True) 102 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | envlist = py27,py35,py36,py37,py38,py39,py310,py311,py312 3 | 4 | [testenv] 5 | deps = pytest 6 | passenv = 7 | GITHUB_ACTIONS 8 | 9 | commands = 10 | {posargs:pytest --color=yes} 11 | 12 | [testenv:lint] 13 | deps = 14 | pylint 15 | commands = 16 | pylint msal_extensions 17 | 18 | --------------------------------------------------------------------------------