├── .gitignore ├── .travis.yml ├── CONTRIBUTING.rst ├── LICENSE ├── README.rst ├── endpoints_management ├── __init__.py ├── auth │ ├── __init__.py │ ├── caches.py │ ├── suppliers.py │ └── tokens.py ├── config │ ├── __init__.py │ └── service_config.py ├── control │ ├── __init__.py │ ├── caches.py │ ├── check_request.py │ ├── client.py │ ├── distribution.py │ ├── label_descriptor.py │ ├── metric_descriptor.py │ ├── metric_value.py │ ├── money.py │ ├── operation.py │ ├── path_regex.py │ ├── quota_request.py │ ├── report_request.py │ ├── service.py │ ├── signing.py │ ├── timestamp.py │ ├── vendor │ │ ├── __init__.py │ │ └── py3 │ │ │ ├── __init__.py │ │ │ └── sched.py │ └── wsgi.py └── gen │ ├── README.rst │ ├── __init__.py │ ├── servicecontrol_v1_client.py │ ├── servicecontrol_v1_messages.py │ └── servicemanagement_v1_messages.py ├── requirements.txt ├── setup.cfg ├── setup.py ├── test-requirements.txt ├── test ├── __init__.py ├── integration │ ├── __init__.py │ ├── ssl.cert │ ├── ssl.key │ └── test_auth.py ├── test_caches.py ├── test_check_request.py ├── test_client.py ├── test_distribution.py ├── test_label_descriptor.py ├── test_metric_descriptor.py ├── test_metric_value.py ├── test_money.py ├── test_operation.py ├── test_quota_request.py ├── test_report_request.py ├── test_service.py ├── test_service_config.py ├── test_signing.py ├── test_suppliers.py ├── test_timestamp.py ├── test_tokens.py ├── test_wsgi.py ├── test_wsgi_errors.py └── token_utils.py └── tox.ini /.gitignore: -------------------------------------------------------------------------------- 1 | *~ 2 | .DS_Store 3 | 4 | # https://github.com/github/gitignore/blob/master/Python.gitignore 5 | 6 | # Byte-compiled / optimized / DLL files 7 | __pycache__/ 8 | *.py[cod] 9 | *$py.class 10 | 11 | # C extensions 12 | *.so 13 | 14 | # Distribution / packaging 15 | .Python 16 | env/ 17 | build/ 18 | develop-eggs/ 19 | docs/generated 20 | dist/ 21 | downloads/ 22 | eggs/ 23 | .eggs/ 24 | lib/ 25 | lib64/ 26 | parts/ 27 | sdist/ 28 | var/ 29 | *.egg-info/ 30 | .installed.cfg 31 | *.egg 32 | 33 | # PyInstaller 34 | # Usually these files are written by a python script from a template 35 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 36 | *.manifest 37 | *.spec 38 | 39 | # Installer logs 40 | pip-log.txt 41 | pip-delete-this-directory.txt 42 | 43 | # Unit test / coverage reports 44 | htmlcov/ 45 | .tox/ 46 | .coverage 47 | .coverage.* 48 | .cache 49 | .pytest_cache 50 | nosetests.xml 51 | coverage.xml 52 | *,cover 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | 61 | # Sphinx documentation 62 | docs/_build/ 63 | 64 | # PyBuilder 65 | target/ 66 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | 3 | python: 4 | - "2.7" 5 | 6 | install: pip install tox-travis 7 | 8 | script: tox 9 | -------------------------------------------------------------------------------- /CONTRIBUTING.rst: -------------------------------------------------------------------------------- 1 | Contributing 2 | ============ 3 | 4 | Here are some guidelines for contributing to `endpoints-management-python`_. 5 | 6 | - Please **sign** one of the `Contributor License Agreements`_ below. 7 | - `File an issue`_ to notify the maintainers about what you're working on. 8 | - `Fork the repo`_; develop and `test your code changes`_; add docs. 9 | - Make sure that your `commit messages`_ clearly describe the changes. 10 | - `Make the pull request`_. 11 | 12 | .. _`Fork the repo`: https://help.github.com/articles/fork-a-repo 13 | .. _`forking`: https://help.github.com/articles/fork-a-repo 14 | .. _`commit messages`: http://chris.beams.io/posts/git-commit/ 15 | 16 | .. _`File an issue`: 17 | 18 | Before writing code, file an issue 19 | ---------------------------------- 20 | 21 | Use the issue tracker to start the discussion. It is possible that someone else 22 | is already working on your idea, your approach is not quite right, or that the 23 | functionality exists already. The ticket you file in the issue tracker will be 24 | used to hash that all out. 25 | 26 | Fork `endpoints-management-python` 27 | ----------------------------------- 28 | 29 | We will use GitHub's mechanism for `forking`_ repositories and making pull 30 | requests. Fork the repository, and make your changes in the forked repository. 31 | 32 | .. _`test your code changes`: 33 | 34 | Include tests 35 | ------------- 36 | 37 | Be sure to add relevant tests and run then them using :code:`tox` before making the pull request. 38 | Docs will be updated automatically when we merge to `master`, but 39 | you should also build the docs yourself via :code:`tox -e docs`, making sure that the docs build OK 40 | and that they are readable. 41 | 42 | .. _`tox`: https://tox.readthedocs.org/en/latest/ 43 | 44 | Make the pull request 45 | --------------------- 46 | 47 | Once you have made all your changes, tested, and updated the documentation, 48 | make a pull request to move everything back into the main `endpoints-management-python`_ 49 | repository. Be sure to reference the original issue in the pull request. 50 | Expect some back-and-forth with regards to style and compliance of these 51 | rules. 52 | 53 | Using a Development Checkout 54 | ---------------------------- 55 | 56 | You’ll have to create a development environment to hack on 57 | `endpoints-management-python`_, using a Git checkout: 58 | 59 | - While logged into your GitHub account, navigate to the `endpoints-management-python repo`_ on GitHub. 60 | - Fork and clone the `endpoints-management-python` repository to your GitHub account 61 | by clicking the "Fork" button. 62 | - Clone your fork of `endpoints-management-python` from your GitHub account to your 63 | local computer, substituting your account username and specifying 64 | the destination as `hack-on-endpoints-management-python`. For example: 65 | 66 | .. code:: bash 67 | 68 | cd ${HOME} 69 | git clone git@github.com:USERNAME/endpoints-management-python.git hack-on-endpoints-management-python 70 | cd hack-on-endpoints-management-python 71 | 72 | # Configure remotes such that you can pull changes from the endpoints-management-python 73 | # repository into your local repository. 74 | git remote add upstream https://github.com:google/endpoints-management-python 75 | 76 | # fetch and merge changes from upstream into master 77 | git fetch upstream 78 | git merge upstream/master 79 | 80 | 81 | Now your local repo is set up such that you will push changes to your 82 | GitHub repo, from which you can submit a pull request. 83 | 84 | - Create use tox to create development virtualenv in which `endpoints-management-python`_ is installed: 85 | 86 | .. code:: bash 87 | 88 | sudo pip install tox 89 | cd ~/hack-on-endpoints-management-python 90 | tox -e devenv 91 | 92 | - This is creates a tox virtualenv named `development` that has endpoints-management-python installed. 93 | Activate it to use endpoints-management-python locally, e.g, from the python prompt. 94 | 95 | .. code:: bash 96 | 97 | cd ~/hack-on-endpoints-management-python 98 | . ./tox/develop/bin/activate 99 | 100 | .. _`endpoints-management-python`: https://github.com/googleapis/endpoints-management-python 101 | .. _`endpoints-management-python repo`: https://github.com/googleapis/endpoints-management-python 102 | 103 | 104 | Running Tests 105 | ------------- 106 | 107 | - To run the full set of `endpoints-management-python` tests on all platforms, install 108 | `tox`_ into a system Python. The :code:`tox` console script will be 109 | installed into the scripts location for that Python. While in the 110 | `endpoints-management-python` checkout root directory (it contains :code:`tox.ini`), 111 | invoke the `tox` console script. This will read the :code:`tox.ini` file and 112 | execute the tests on multiple Python versions and platforms; while it runs, 113 | it creates a virtualenv for each version/platform combination. 114 | 115 | .. code:: bash 116 | 117 | sudo pip install tox 118 | cd ~/hack-on-endpoints-management-python 119 | tox 120 | 121 | Contributor License Agreements 122 | ------------------------------ 123 | 124 | Before we can accept your pull requests you'll need to sign a Contributor 125 | License Agreement (CLA): 126 | 127 | - **If you are an individual writing original source code** and **you own 128 | the intellectual property**, then you'll need to sign an 129 | `individual CLA`_. 130 | - **If you work for a company that wants to allow you to contribute your 131 | work**, then you'll need to sign a `corporate CLA`_. 132 | 133 | You can sign these electronically (just scroll to the bottom). After that, 134 | we'll be able to accept your pull requests. 135 | 136 | .. _`individual CLA`: https://developers.google.com/open-source/cla/individual 137 | .. _`corporate CLA`: https://developers.google.com/open-source/cla/corporate 138 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | Google Endpoints API Management 2 | =============================== 3 | 4 | 5 | .. image:: https://travis-ci.org/cloudendpoints/endpoints-management-python.svg?branch=master 6 | :target: https://travis-ci.org/cloudendpoints/endpoints-management-python 7 | .. image:: https://codecov.io/gh/cloudendpoints/endpoints-management-python/branch/master/graph/badge.svg 8 |   :target: https://codecov.io/gh/cloudendpoints/endpoints-management-python 9 | 10 | 11 | Google Endpoints API Management manages the 'control plane' of an API by 12 | providing support for authentication, billing, monitoring and quota control. 13 | 14 | It achieves this by 15 | 16 | - allowing HTTP servers to control access to their APIs using the Google Service Management and Google Service Control APIs 17 | - providing built-in, standards-compliant support for third-party authentication 18 | - doing this with minimal performance impact via the use of advanced caching and aggregation algorithms 19 | - making this easy to integrate via a set of `WSGI`_ middleware 20 | 21 | .. _`WSGI`: https://wsgi.readthedocs.io/en/latest/ 22 | 23 | 24 | Example: 25 | 26 | .. code:: python 27 | 28 | >>> application = MyWsgiApp() # an existing WSGI application 29 | >>> 30 | >>> # the name of the controlled service 31 | >>> service_name = 'my-service-name' 32 | >>> 33 | >>> # The Id of a Google Cloud project with the Service Control and Service Management 34 | >>> # APIs enabled 35 | >>> project_id = 'my-project-id' 36 | >>> 37 | >>> # wrap the app for service control 38 | >>> from endpoints_management.control import client, wsgi 39 | >>> control_client = client.Loaders.DEFAULT.load(service_name) 40 | >>> control_client.start() 41 | >>> controlled_app = wsgi.add_all(application, project_id, control_client) 42 | >>> 43 | >>> # now use the controlled in place of application 44 | >>> my_server.serve(controlled_app) 45 | 46 | 47 | Installation 48 | ------------- 49 | 50 | Install using `pip`_ 51 | 52 | .. code:: bash 53 | 54 | [sudo] pip install google-endpoints-api-management 55 | 56 | .. _`pip`: https://pip.pypa.io 57 | 58 | 59 | Python Versions 60 | --------------- 61 | 62 | endpoints-management-python is currently tested with Python 2.7. 63 | 64 | 65 | Contributing 66 | ------------ 67 | 68 | Contributions to this library are always welcome and highly encouraged. 69 | 70 | See the `CONTRIBUTING documentation`_ for more information on how to get started. 71 | 72 | .. _`CONTRIBUTING documentation`: https://github.com/cloudendpoints/endpoints-management-python/blob/master/CONTRIBUTING.rst 73 | 74 | 75 | Versioning 76 | ---------- 77 | 78 | This library follows `Semantic Versioning`_ 79 | 80 | .. _`Semantic Versioning`: http://semver.org/ 81 | 82 | 83 | Details 84 | ------- 85 | 86 | For detailed documentation of the modules in endpoints-management-python, please watch `DOCUMENTATION`_. 87 | 88 | .. _`DOCUMENTATION`: https://endpoints-management-python.readthedocs.org/ 89 | 90 | 91 | License 92 | ------- 93 | 94 | Apache - See `the full LICENSE`_ for more information. 95 | 96 | .. _`the full LICENSE`: https://github.com/cloudendpoints/endpoints-management-python/blob/master/LICENSE 97 | -------------------------------------------------------------------------------- /endpoints_management/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright 2017 Google Inc. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | from __future__ import absolute_import 16 | 17 | import logging 18 | 19 | from . import auth, config, control, gen 20 | 21 | __version__ = '1.11.1' 22 | 23 | _logger = logging.getLogger(__name__) 24 | _logger.setLevel(logging.INFO) 25 | 26 | USER_AGENT = u'ESP' 27 | SERVICE_AGENT = u'EF_PYTHON/' + __version__ 28 | 29 | __all__ = ['auth', 'config', 'control', 'gen'] 30 | -------------------------------------------------------------------------------- /endpoints_management/auth/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright 2017 Google Inc. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | from __future__ import absolute_import 16 | 17 | from dogpile import cache 18 | 19 | from . import suppliers, tokens 20 | 21 | 22 | cache.register_backend("lru_cache", "endpoints_management.auth.caches", "LruBackend") 23 | 24 | 25 | def create_authenticator(issuers_to_provider_ids, issuer_uri_configs): 26 | key_uri_supplier = suppliers.KeyUriSupplier(issuer_uri_configs) 27 | jwks_supplier = suppliers.JwksSupplier(key_uri_supplier) 28 | return tokens.Authenticator(issuers_to_provider_ids, jwks_supplier) 29 | -------------------------------------------------------------------------------- /endpoints_management/auth/caches.py: -------------------------------------------------------------------------------- 1 | # Copyright 2017 Google Inc. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | """Defines a dogpile in-memory cache backend that supports size management.""" 16 | 17 | from __future__ import absolute_import 18 | 19 | from dogpile.cache import api 20 | import pylru 21 | 22 | 23 | class LruBackend(api.CacheBackend): 24 | """A dogpile.cache backend that uses LRU as the size management.""" 25 | 26 | def __init__(self, options): 27 | """Initializes an LruBackend. 28 | 29 | Args: 30 | options: a dictionary that contains configuration options. 31 | """ 32 | capacity = options[u"capacity"] if u"capacity" in options else 200 33 | self._cache = pylru.lrucache(capacity) 34 | 35 | def get(self, key): 36 | return self._cache[key] if key in self._cache else api.NO_VALUE 37 | 38 | def set(self, key, value): 39 | self._cache[key] = value 40 | 41 | def delete(self, key): 42 | del self._cache[key] 43 | -------------------------------------------------------------------------------- /endpoints_management/auth/suppliers.py: -------------------------------------------------------------------------------- 1 | # Copyright 2017 Google Inc. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | """Defines several suppliers that are used by the authenticator.""" 16 | 17 | from __future__ import absolute_import 18 | 19 | import datetime 20 | from dogpile import cache 21 | from jwkest import jwk 22 | import requests 23 | import ssl 24 | 25 | 26 | _HTTP_PROTOCOL_PREFIX = u"http://" 27 | _HTTPS_PROTOCOL_PREFIX = u"https://" 28 | 29 | _OPEN_ID_CONFIG_PATH = u".well-known/openid-configuration" 30 | 31 | 32 | class KeyUriSupplier(object): # pylint: disable=too-few-public-methods 33 | """A supplier that provides the `jwks_uri` for an issuer.""" 34 | 35 | def __init__(self, issuer_uri_configs): 36 | """Construct an instance of KeyUriSupplier. 37 | 38 | Args: 39 | issuer_uri_configs: a dictionary mapping from an issuer to its jwks_uri 40 | configuration. 41 | """ 42 | self._issuer_uri_configs = issuer_uri_configs 43 | 44 | def supply(self, issuer): 45 | """Supplies the `jwks_uri` for the given issuer. 46 | 47 | Args: 48 | issuer: the issuer. 49 | 50 | Returns: 51 | The `jwks_uri` that is either statically configured or retrieved via 52 | OpenId discovery. None is returned when the issuer is unknown or the 53 | OpenId discovery fails. 54 | """ 55 | issuer_uri_config = self._issuer_uri_configs.get(issuer) 56 | 57 | if not issuer_uri_config: 58 | # The issuer is unknown. 59 | return 60 | 61 | jwks_uri = issuer_uri_config.jwks_uri 62 | if jwks_uri: 63 | # When jwks_uri is set, return it directly. 64 | return jwks_uri 65 | 66 | # When jwksUri is empty, we try to retrieve it through the OpenID 67 | # discovery. 68 | open_id_valid = issuer_uri_config.open_id_valid 69 | if open_id_valid: 70 | discovered_jwks_uri = _discover_jwks_uri(issuer) 71 | self._issuer_uri_configs[issuer] = IssuerUriConfig(False, 72 | discovered_jwks_uri) 73 | return discovered_jwks_uri 74 | 75 | 76 | class JwksSupplier(object): # pylint: disable=too-few-public-methods 77 | """A supplier that returns the Json Web Token Set of an issuer.""" 78 | 79 | def __init__(self, key_uri_supplier): 80 | """Constructs an instance of JwksSupplier. 81 | 82 | Args: 83 | key_uri_supplier: a KeyUriSupplier instance that returns the `jwks_uri` 84 | based on the given issuer. 85 | """ 86 | self._key_uri_supplier = key_uri_supplier 87 | self._jwks_cache = cache.make_region().configure( 88 | u"dogpile.cache.memory", expiration_time=datetime.timedelta(minutes=5)) 89 | 90 | def supply(self, issuer): 91 | """Supplies the `Json Web Key Set` for the given issuer. 92 | 93 | Args: 94 | issuer: the issuer. 95 | 96 | Returns: 97 | The successfully retrieved Json Web Key Set. None is returned if the 98 | issuer is unknown or the retrieval process fails. 99 | 100 | Raises: 101 | UnauthenticatedException: When this method cannot supply JWKS for the 102 | given issuer (e.g. unknown issuer, HTTP request error). 103 | """ 104 | def _retrieve_jwks(): 105 | """Retrieve the JWKS from the given jwks_uri when cache misses.""" 106 | jwks_uri = self._key_uri_supplier.supply(issuer) 107 | 108 | if not jwks_uri: 109 | raise UnauthenticatedException(u"Cannot find the `jwks_uri` for issuer " 110 | u"%s: either the issuer is unknown or " 111 | u"the OpenID discovery failed" % issuer) 112 | 113 | try: 114 | response = requests.get(jwks_uri) 115 | json_response = response.json() 116 | except Exception as exception: 117 | message = u"Cannot retrieve valid verification keys from the `jwks_uri`" 118 | raise UnauthenticatedException(message, exception) 119 | 120 | if u"keys" in json_response: 121 | # De-serialize the JSON as a JWKS object. 122 | jwks_keys = jwk.KEYS() 123 | jwks_keys.load_jwks(response.text) 124 | return jwks_keys._keys 125 | else: 126 | # The JSON is a dictionary mapping from key id to X.509 certificates. 127 | # Thus we extract the public key from the X.509 certificates and 128 | # construct a JWKS object. 129 | return _extract_x509_certificates(json_response) 130 | 131 | return self._jwks_cache.get_or_create(issuer, _retrieve_jwks) 132 | 133 | 134 | def _extract_x509_certificates(x509_certificates): 135 | keys = [] 136 | for kid, certificate in x509_certificates.iteritems(): 137 | try: 138 | if certificate.startswith(jwk.PREFIX): 139 | # The certificate is PEM-encoded 140 | der = ssl.PEM_cert_to_DER_cert(certificate) 141 | key = jwk.der2rsa(der) 142 | else: 143 | key = jwk.import_rsa_key(certificate) 144 | except Exception as exception: 145 | raise UnauthenticatedException(u"Cannot load X.509 certificate", 146 | exception) 147 | rsa_key = jwk.RSAKey().load_key(key) 148 | rsa_key.kid = kid 149 | keys.append(rsa_key) 150 | return keys 151 | 152 | 153 | def _discover_jwks_uri(issuer): 154 | open_id_url = _construct_open_id_url(issuer) 155 | try: 156 | response = requests.get(open_id_url) 157 | return response.json().get(u"jwks_uri") 158 | except Exception as error: 159 | raise UnauthenticatedException(u"Cannot discover the jwks uri", error) 160 | 161 | 162 | def _construct_open_id_url(issuer): 163 | url = issuer 164 | if (not url.startswith(_HTTP_PROTOCOL_PREFIX) and 165 | not url.startswith(_HTTPS_PROTOCOL_PREFIX)): 166 | url = _HTTPS_PROTOCOL_PREFIX + url 167 | if not url.endswith(u"/"): 168 | url += u"/" 169 | url += _OPEN_ID_CONFIG_PATH 170 | return url 171 | 172 | 173 | class IssuerUriConfig(object): 174 | """The jwks_uri configuration for an issuer. 175 | 176 | TODO (yangguan): this class should be removed after we figure out how to 177 | fetch the external configs. 178 | """ 179 | 180 | def __init__(self, open_id_valid, jwks_uri): 181 | """Create an instance of IsserUriConfig. 182 | 183 | Args: 184 | open_id_valid: indicates whether the corresponding issuer is valid for 185 | OpenId discovery. 186 | jwks_uri: is the saved jwks_uri. Its value can be None if the OpenId 187 | discovery process has not begun or has already failed. 188 | """ 189 | self._open_id_valid = open_id_valid 190 | self._jwks_uri = jwks_uri 191 | 192 | @property 193 | def open_id_valid(self): 194 | return self._open_id_valid 195 | 196 | @property 197 | def jwks_uri(self): 198 | return self._jwks_uri 199 | 200 | 201 | class UnauthenticatedException(Exception): 202 | pass 203 | -------------------------------------------------------------------------------- /endpoints_management/auth/tokens.py: -------------------------------------------------------------------------------- 1 | # Copyright 2017 Google Inc. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | """Decodes and verifies the signature of auth tokens.""" 16 | 17 | from __future__ import absolute_import 18 | 19 | import datetime 20 | import jwkest 21 | import time 22 | 23 | from dogpile import cache 24 | from jwkest import jws 25 | from jwkest import jwt 26 | 27 | from . import suppliers 28 | 29 | INT_TYPES = (int, long) 30 | 31 | 32 | class Authenticator(object): # pylint: disable=too-few-public-methods 33 | """Decodes and verifies the signature of auth tokens.""" 34 | 35 | def __init__(self, issuers_to_provider_ids, jwks_supplier, cache_capacity=200): 36 | """Construct an instance of AuthTokenDecoder. 37 | 38 | Args: 39 | issuers_to_provider_ids: a dictionary mapping from issuers to provider 40 | IDs defined in the service configuration. 41 | jwks_supplier: an instance of JwksSupplier that supplies JWKS based on 42 | issuer. 43 | cache_capacity: the cache_capacity with default value of 200. 44 | """ 45 | self._issuers_to_provider_ids = issuers_to_provider_ids 46 | self._jwks_supplier = jwks_supplier 47 | 48 | arguments = {u"capacity": cache_capacity} 49 | expiration_time = datetime.timedelta(minutes=5) 50 | self._cache = cache.make_region().configure(u"lru_cache", 51 | arguments=arguments, 52 | expiration_time=expiration_time) 53 | 54 | def authenticate(self, auth_token, auth_info, service_name): 55 | """Authenticates the current auth token. 56 | 57 | Args: 58 | auth_token: the auth token. 59 | auth_info: the auth configurations of the API method being called. 60 | service_name: the name of this service. 61 | 62 | Returns: 63 | A constructed UserInfo object representing the identity of the caller. 64 | 65 | Raises: 66 | UnauthenticatedException: When 67 | * the issuer is not allowed; 68 | * the audiences are not allowed; 69 | * the auth token has already expired. 70 | """ 71 | try: 72 | jwt_claims = self.get_jwt_claims(auth_token) 73 | except Exception as error: 74 | raise suppliers.UnauthenticatedException(u"Cannot decode the auth token", 75 | error) 76 | _check_jwt_claims(jwt_claims) 77 | 78 | user_info = UserInfo(jwt_claims) 79 | 80 | issuer = user_info.issuer 81 | if issuer not in self._issuers_to_provider_ids: 82 | raise suppliers.UnauthenticatedException(u"Unknown issuer: " + issuer) 83 | provider_id = self._issuers_to_provider_ids[issuer] 84 | 85 | if not auth_info.is_provider_allowed(provider_id): 86 | raise suppliers.UnauthenticatedException(u"The requested method does not " 87 | u"allow provider id: " + provider_id) 88 | 89 | # Check the audiences decoded from the auth token. The auth token is 90 | # allowed when 1) an audience is equal to the service name, or 2) at least 91 | # one audience is allowed in the method configuration. 92 | audiences = user_info.audiences 93 | has_service_name = service_name in audiences 94 | 95 | allowed_audiences = auth_info.get_allowed_audiences(provider_id) 96 | intersected_audiences = set(allowed_audiences).intersection(audiences) 97 | if not has_service_name and not intersected_audiences: 98 | raise suppliers.UnauthenticatedException(u"Audiences not allowed") 99 | 100 | return user_info 101 | 102 | def get_jwt_claims(self, auth_token): 103 | """Decodes the auth_token into JWT claims represented as a JSON object. 104 | 105 | This method first tries to look up the cache and returns the result 106 | immediately in case of a cache hit. When cache misses, the method tries to 107 | decode the given auth token, verify its signature, and check the existence 108 | of required JWT claims. When successful, the decoded JWT claims are loaded 109 | into the cache and then returned. 110 | 111 | Args: 112 | auth_token: the auth token to be decoded. 113 | 114 | Returns: 115 | The decoded JWT claims. 116 | 117 | Raises: 118 | UnauthenticatedException: When the signature verification fails, or when 119 | required claims are missing. 120 | """ 121 | 122 | def _decode_and_verify(): 123 | jwt_claims = jwt.JWT().unpack(auth_token).payload() 124 | _verify_required_claims_exist(jwt_claims) 125 | 126 | issuer = jwt_claims[u"iss"] 127 | keys = self._jwks_supplier.supply(issuer) 128 | try: 129 | return jws.JWS().verify_compact(auth_token, keys) 130 | except (jwkest.BadSignature, jws.NoSuitableSigningKeys, 131 | jws.SignerAlgError) as exception: 132 | raise suppliers.UnauthenticatedException(u"Signature verification failed", 133 | exception) 134 | 135 | return self._cache.get_or_create(auth_token, _decode_and_verify) 136 | 137 | 138 | class UserInfo(object): 139 | """An object that holds the authentication results.""" 140 | 141 | def __init__(self, jwt_claims): 142 | audiences = jwt_claims[u"aud"] 143 | if isinstance(audiences, basestring): 144 | audiences = [audiences] 145 | self._audiences = audiences 146 | 147 | # email is not required 148 | self._email = jwt_claims[u"email"] if u"email" in jwt_claims else None 149 | self._subject_id = jwt_claims[u"sub"] 150 | self._issuer = jwt_claims[u"iss"] 151 | 152 | @property 153 | def audiences(self): 154 | return self._audiences 155 | 156 | @property 157 | def email(self): 158 | return self._email 159 | 160 | @property 161 | def subject_id(self): 162 | return self._subject_id 163 | 164 | @property 165 | def issuer(self): 166 | return self._issuer 167 | 168 | 169 | def _check_jwt_claims(jwt_claims): 170 | """Checks whether the JWT claims should be accepted. 171 | 172 | Specifically, this method checks the "exp" claim and the "nbf" claim (if 173 | present), and raises UnauthenticatedException if 1) the current time is 174 | before the time identified by the "nbf" claim, or 2) the current time is 175 | equal to or after the time identified by the "exp" claim. 176 | 177 | Args: 178 | jwt_claims: the JWT claims whose expiratio to be checked. 179 | 180 | Raises: 181 | UnauthenticatedException: When the "exp" claim is malformed or the JWT has 182 | already expired. 183 | """ 184 | current_time = time.time() 185 | 186 | expiration = jwt_claims[u"exp"] 187 | if not isinstance(expiration, INT_TYPES): 188 | raise suppliers.UnauthenticatedException(u'Malformed claim: "exp" must be an integer') 189 | if current_time >= expiration: 190 | raise suppliers.UnauthenticatedException(u"The auth token has already expired") 191 | 192 | if u"nbf" not in jwt_claims: 193 | return 194 | 195 | not_before_time = jwt_claims[u"nbf"] 196 | if not isinstance(not_before_time, INT_TYPES): 197 | raise suppliers.UnauthenticatedException(u'Malformed claim: "nbf" must be an integer') 198 | if current_time < not_before_time: 199 | raise suppliers.UnauthenticatedException(u'Current time is less than the "nbf" time') 200 | 201 | 202 | def _verify_required_claims_exist(jwt_claims): 203 | """Verifies that the required claims exist. 204 | 205 | Args: 206 | jwt_claims: the JWT claims to be verified. 207 | 208 | Raises: 209 | UnauthenticatedException: if some claim doesn't exist. 210 | """ 211 | for claim_name in [u"aud", u"exp", u"iss", u"sub"]: 212 | if claim_name not in jwt_claims: 213 | raise suppliers.UnauthenticatedException(u'Missing "%s" claim' % claim_name) 214 | -------------------------------------------------------------------------------- /endpoints_management/config/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright 2017 Google Inc. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | from __future__ import absolute_import 16 | 17 | from .service_config import ServiceConfigException 18 | -------------------------------------------------------------------------------- /endpoints_management/config/service_config.py: -------------------------------------------------------------------------------- 1 | # Copyright 2017 Google Inc. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | """Provides a method for fetching Service Configuration from Google Service 16 | Management API.""" 17 | 18 | from __future__ import absolute_import 19 | 20 | import logging 21 | import json 22 | import os 23 | import urllib3 24 | 25 | from apitools.base.py import encoding 26 | from ..gen import servicemanagement_v1_messages as messages 27 | from oauth2client import client 28 | from urllib3.contrib import appengine 29 | 30 | 31 | _logger = logging.getLogger(__name__) 32 | 33 | _GOOGLE_API_SCOPE = u"https://www.googleapis.com/auth/cloud-platform" 34 | 35 | _SERVICE_MGMT_URL_TEMPLATE = (u"https://servicemanagement.googleapis.com" 36 | u"/v1/services/{}/configs/{}") 37 | 38 | _SERVICE_NAME_ENV_KEY = u"ENDPOINTS_SERVICE_NAME" 39 | _SERVICE_VERSION_ENV_KEY = u"ENDPOINTS_SERVICE_VERSION" 40 | 41 | 42 | class ServiceConfigException(Exception): 43 | pass 44 | 45 | 46 | def fetch_service_config(service_name=None, service_version=None): 47 | """Fetches the service config from Google Service Management API. 48 | 49 | Args: 50 | service_name: the service name. When this argument is unspecified, this 51 | method uses the value of the "SERVICE_NAME" environment variable as the 52 | service name, and raises ValueError if the environment variable is unset. 53 | service_version: the service version. When this argument is unspecified, 54 | this method uses the value of the "SERVICE_VERSION" environment variable 55 | as the service version, and raises ValueError if the environment variable 56 | is unset. 57 | 58 | Returns: the fetched service config JSON object. 59 | 60 | Raises: 61 | ValueError: when the service name/version is neither provided as an 62 | argument or set as an environment variable; or when the fetched service 63 | config fails validation. 64 | Exception: when the Google Service Management API returns non-200 response. 65 | """ 66 | if not service_name: 67 | service_name = _get_env_var_or_raise(_SERVICE_NAME_ENV_KEY) 68 | if not service_version: 69 | service_version = _get_service_version(_SERVICE_VERSION_ENV_KEY, 70 | service_name) 71 | 72 | _logger.debug(u'Contacting Service Management API for service %s version %s', 73 | service_name, service_version) 74 | response = _make_service_config_request(service_name, service_version) 75 | _logger.debug(u'obtained service json from the management api:\n%s', response.data) 76 | service = encoding.JsonToMessage(messages.Service, response.data) 77 | _validate_service_config(service, service_name, service_version) 78 | return service 79 | 80 | 81 | def _get_access_token(): 82 | credentials = client.GoogleCredentials.get_application_default() 83 | if credentials.create_scoped_required(): 84 | credentials = credentials.create_scoped(_GOOGLE_API_SCOPE) 85 | return credentials.get_access_token().access_token 86 | 87 | 88 | def _get_http_client(): 89 | # don't use the AppEngineManager when sockets access is enabled 90 | # see https://urllib3.readthedocs.io/en/latest/reference/urllib3.contrib.html#module-urllib3.contrib.appengine 91 | if appengine.is_appengine_sandbox() and 'GAE_USE_SOCKETS_HTTPLIB' not in os.environ: 92 | return appengine.AppEngineManager() 93 | return urllib3.PoolManager() 94 | 95 | 96 | def _get_env_var_or_raise(env_variable_name): 97 | if env_variable_name not in os.environ: 98 | message_template = u'The "{}" environment variable is not set' 99 | _log_and_raise(ValueError, message_template.format(env_variable_name)) 100 | return os.environ[env_variable_name] 101 | 102 | 103 | def _make_service_config_request(service_name, service_version=''): 104 | url = _SERVICE_MGMT_URL_TEMPLATE.format(service_name, 105 | service_version).rstrip('/') 106 | 107 | http_client = _get_http_client() 108 | headers = {u"Authorization": u"Bearer {}".format(_get_access_token())} 109 | response = http_client.request(u"GET", url, headers=headers) 110 | 111 | status_code = response.status 112 | if status_code == 403: 113 | message = (u"No service '{0}' found or permission denied. If this is a new " 114 | u"Endpoints service, make sure you've deployed the " 115 | u"service config using gcloud.").format(service_name) 116 | _log_and_raise(ServiceConfigException, message) 117 | elif status_code == 404: 118 | message = (u"The service '{0}' was found, but no service config was " 119 | u"found for version '{1}'.").format(service_name, service_version) 120 | _log_and_raise(ServiceConfigException, message) 121 | elif status_code != 200: 122 | message_template = u"Fetching service config failed (status code {})" 123 | _log_and_raise(ServiceConfigException, message_template.format(status_code)) 124 | 125 | return response 126 | 127 | 128 | def _get_service_version(env_variable_name, service_name): 129 | service_version = os.environ.get(env_variable_name) 130 | 131 | if service_version: 132 | return service_version 133 | 134 | _logger.debug(u'Contacting Service Management API for service %s', service_name) 135 | response = _make_service_config_request(service_name) 136 | _logger.debug(u'obtained service config list from api: \n%s', response.data) 137 | 138 | services = encoding.JsonToMessage(messages.ListServiceConfigsResponse, 139 | response.data) 140 | 141 | try: 142 | _logger.debug(u"found latest service version of %s", 143 | services.serviceConfigs[0].id) 144 | return services.serviceConfigs[0].id 145 | except: 146 | # catches IndexError if no versions or anything else that would 147 | # indicate a failed reading of the response. 148 | message_template = u"Couldn't retrieve service version from environment or server" 149 | _log_and_raise(ServiceConfigException, message_template) 150 | 151 | 152 | def _validate_service_config(service, expected_service_name, 153 | expected_service_version): 154 | service_name = service.name 155 | if not service_name: 156 | _log_and_raise(ValueError, u"No service name in the service config") 157 | if service_name != expected_service_name: 158 | message_template = u"Unexpected service name in service config: {}" 159 | _log_and_raise(ValueError, message_template.format(service_name)) 160 | 161 | service_version = service.id 162 | if not service_version: 163 | _log_and_raise(ValueError, u"No service version in the service config") 164 | if service_version != expected_service_version: 165 | message_template = u"Unexpected service version in service config: {}" 166 | _log_and_raise(ValueError, message_template.format(service_version)) 167 | 168 | 169 | def _log_and_raise(exception_class, message): 170 | _logger.error(message) 171 | raise exception_class(message) 172 | -------------------------------------------------------------------------------- /endpoints_management/control/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright 2017 Google Inc. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | """Google Service Control Client""" 16 | 17 | from __future__ import absolute_import 18 | 19 | from ..gen import servicecontrol_v1_messages as sc_messages 20 | from ..gen import servicemanagement_v1_messages as sm_messages 21 | from ..gen import servicecontrol_v1_client as api_client 22 | 23 | # Alias the generated MetricKind and ValueType enums to simplify their usage 24 | # elsewhere 25 | MetricKind = sm_messages.MetricDescriptor.MetricKindValueValuesEnum 26 | ValueType = sm_messages.MetricDescriptor.ValueTypeValueValuesEnum 27 | -------------------------------------------------------------------------------- /endpoints_management/control/caches.py: -------------------------------------------------------------------------------- 1 | # Copyright 2017 Google Inc. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | """caches provide functions and classes used to support caching. 16 | 17 | caching is provide by extensions of the cache classes provided by the 18 | cachetools open-source library. 19 | 20 | :func:`create` creates a cache instance specifed by either 21 | :class:`endpoints_management.control.CheckAggregationOptions` or a 22 | :class:`endpoints_management.control.ReportAggregationOptions` 23 | 24 | """ 25 | 26 | from __future__ import absolute_import 27 | 28 | # pylint: disable=too-many-ancestors 29 | # 30 | # It affects the DequeOutTTLCache and DequeOutLRUCache which extend 31 | # cachetools.TTLCache and cachetools.LRUCache respectively. Within cachetools, 32 | # those classes each extend Cache, which itself extends DefaultMapping. It does 33 | # makes sense to have this chain of ancestors, so it's right the disable the 34 | # warning here. 35 | 36 | import collections 37 | import logging 38 | import threading 39 | from datetime import datetime, timedelta 40 | 41 | import cachetools 42 | 43 | _logger = logging.getLogger(__name__) 44 | 45 | 46 | class CheckOptions( 47 | collections.namedtuple( 48 | u'CheckOptions', 49 | [u'num_entries', 50 | u'flush_interval', 51 | u'expiration'])): 52 | """Holds values used to control report check behavior. 53 | 54 | Attributes: 55 | 56 | num_entries: the maximum number of cache entries that can be kept in 57 | the aggregation cache 58 | flush_interval (:class:`datetime.timedelta`): the maximum delta before 59 | aggregated report requests are flushed to the server. The cache 60 | entry is deleted after the flush. 61 | expiration (:class:`datetime.timedelta`): elapsed time before a cached 62 | check response should be deleted. This value should be larger than 63 | ``flush_interval``, otherwise it will be ignored, and instead a value 64 | equivalent to flush_interval + 1ms will be used. 65 | """ 66 | # pylint: disable=too-few-public-methods 67 | DEFAULT_NUM_ENTRIES = 200 68 | DEFAULT_FLUSH_INTERVAL = timedelta(milliseconds=500) 69 | DEFAULT_EXPIRATION = timedelta(seconds=1) 70 | 71 | def __new__(cls, 72 | num_entries=DEFAULT_NUM_ENTRIES, 73 | flush_interval=DEFAULT_FLUSH_INTERVAL, 74 | expiration=DEFAULT_EXPIRATION): 75 | """Invokes the base constructor with default values.""" 76 | assert isinstance(num_entries, int), u'should be an int' 77 | assert isinstance(flush_interval, timedelta), u'should be a timedelta' 78 | assert isinstance(expiration, timedelta), u'should be a timedelta' 79 | if expiration <= flush_interval: 80 | expiration = flush_interval + timedelta(milliseconds=1) 81 | return super(cls, CheckOptions).__new__( 82 | cls, 83 | num_entries, 84 | flush_interval, 85 | expiration) 86 | 87 | 88 | class QuotaOptions( 89 | collections.namedtuple( 90 | u'QuotaOptions', 91 | [u'num_entries', 92 | u'flush_interval', 93 | u'expiration'])): 94 | """Holds values used to control report quota behavior. 95 | 96 | Attributes: 97 | 98 | num_entries: the maximum number of cache entries that can be kept in 99 | the aggregation cache 100 | flush_interval (:class:`datetime.timedelta`): the maximum delta before 101 | aggregated report requests are flushed to the server. The cache 102 | entry is deleted after the flush. 103 | expiration (:class:`datetime.timedelta`): elapsed time before a cached 104 | quota response should be deleted. This value should be larger than 105 | ``flush_interval``, otherwise it will be ignored, and instead a value 106 | equivalent to flush_interval + 1ms will be used. 107 | """ 108 | # pylint: disable=too-few-public-methods 109 | DEFAULT_NUM_ENTRIES = 1000 110 | DEFAULT_FLUSH_INTERVAL = timedelta(seconds=1) 111 | DEFAULT_EXPIRATION = timedelta(minutes=1) 112 | 113 | def __new__(cls, 114 | num_entries=DEFAULT_NUM_ENTRIES, 115 | flush_interval=DEFAULT_FLUSH_INTERVAL, 116 | expiration=DEFAULT_EXPIRATION): 117 | """Invokes the base constructor with default values.""" 118 | assert isinstance(num_entries, int), u'should be an int' 119 | assert isinstance(flush_interval, timedelta), u'should be a timedelta' 120 | assert isinstance(expiration, timedelta), u'should be a timedelta' 121 | if expiration <= flush_interval: 122 | expiration = flush_interval + timedelta(milliseconds=1) 123 | return super(cls, QuotaOptions).__new__( 124 | cls, 125 | num_entries, 126 | flush_interval, 127 | expiration) 128 | 129 | 130 | class ReportOptions( 131 | collections.namedtuple( 132 | u'ReportOptions', 133 | [u'num_entries', 134 | u'flush_interval'])): 135 | """Holds values used to control report aggregation behavior. 136 | 137 | Attributes: 138 | 139 | num_entries: the maximum number of cache entries that can be kept in 140 | the aggregation cache 141 | 142 | flush_interval (:class:`datetime.timedelta`): the maximum delta before 143 | aggregated report requests are flushed to the server. The cache 144 | entry is deleted after the flush 145 | """ 146 | # pylint: disable=too-few-public-methods 147 | DEFAULT_NUM_ENTRIES = 200 148 | DEFAULT_FLUSH_INTERVAL = timedelta(seconds=1) 149 | 150 | def __new__(cls, 151 | num_entries=DEFAULT_NUM_ENTRIES, 152 | flush_interval=DEFAULT_FLUSH_INTERVAL): 153 | """Invokes the base constructor with default values.""" 154 | assert isinstance(num_entries, int), u'should be an int' 155 | assert isinstance(flush_interval, timedelta), u'should be a timedelta' 156 | 157 | return super(cls, ReportOptions).__new__( 158 | cls, 159 | num_entries, 160 | flush_interval) 161 | 162 | 163 | ZERO_INTERVAL = timedelta() 164 | 165 | 166 | def create(options, timer=None, use_deque=True): 167 | """Create a cache specified by ``options`` 168 | 169 | ``options`` is an instance of either 170 | :class:`endpoints_management.control.caches.CheckOptions` or 171 | :class:`endpoints_management.control.caches.ReportOptions` 172 | 173 | The returned cache is wrapped in a :class:`LockedObject`, requiring it to 174 | be accessed in a with statement that gives synchronized access 175 | 176 | Example: 177 | >>> options = CheckOptions() 178 | >>> synced_cache = make_cache(options) 179 | >>> with synced_cache as cache: # acquire the lock 180 | ... cache['a_key'] = 'a_value' 181 | 182 | Args: 183 | options (object): an instance of either of the options classes 184 | 185 | Returns: 186 | :class:`cachetools.Cache`: the cache implementation specified by options 187 | or None: if options is ``None`` or if options.num_entries < 0 188 | 189 | Raises: 190 | ValueError: if options is not a support type 191 | 192 | """ 193 | if options is None: # no options, don't create cache 194 | return None 195 | 196 | if not isinstance(options, (CheckOptions, QuotaOptions, ReportOptions)): 197 | _logger.error(u'make_cache(): bad options %s', options) 198 | raise ValueError(u'Invalid options') 199 | 200 | if (options.num_entries <= 0): 201 | _logger.debug(u"did not create cache, options was %s", options) 202 | return None 203 | 204 | _logger.debug(u"creating a cache from %s", options) 205 | if (options.flush_interval > ZERO_INTERVAL): 206 | # options always has a flush_interval, but may have an expiration 207 | # field. If the expiration is present, use that instead of the 208 | # flush_interval for the ttl 209 | ttl = getattr(options, u'expiration', options.flush_interval) 210 | cache_cls = DequeOutTTLCache if use_deque else cachetools.TTLCache 211 | return LockedObject( 212 | cache_cls( 213 | options.num_entries, 214 | ttl=ttl.total_seconds(), 215 | timer=to_cache_timer(timer) 216 | )) 217 | 218 | cache_cls = DequeOutLRUCache if use_deque else cachetools.LRUCache 219 | return LockedObject(cache_cls(options.num_entries)) 220 | 221 | 222 | class DequeOutTTLCache(cachetools.TTLCache): 223 | """Extends ``TTLCache`` so that expired items are placed in a ``deque``.""" 224 | 225 | def __init__(self, maxsize, ttl, out_deque=None, **kw): 226 | """Constructor. 227 | 228 | Args: 229 | maxsize (int): the maximum number of entries in the queue 230 | ttl (int): the ttl for entries added to the cache 231 | out_deque :class:`collections.deque`: a `deque` in which to add items 232 | that expire from the cache 233 | **kw: the other keyword args supported by the constructor to 234 | :class:`cachetools.TTLCache` 235 | 236 | Raises: 237 | ValueError: if out_deque is not a collections.deque 238 | 239 | """ 240 | super(DequeOutTTLCache, self).__init__(maxsize, ttl, **kw) 241 | if out_deque is None: 242 | out_deque = collections.deque() 243 | elif not isinstance(out_deque, collections.deque): 244 | raise ValueError(u'out_deque should be a collections.deque') 245 | self._out_deque = out_deque 246 | self._tracking = {} 247 | 248 | def __setitem__(self, key, value, **kw): 249 | super(DequeOutTTLCache, self).__setitem__(key, value, **kw) 250 | self._tracking[key] = value 251 | 252 | @property 253 | def out_deque(self): 254 | """The :class:`collections.deque` to which expired items are added.""" 255 | self.expire() 256 | expired = {k: v for (k, v) in self._tracking.items() if self.get(k) is None} 257 | for k, v in expired.items(): 258 | del self._tracking[k] 259 | self._out_deque.append(v) 260 | return self._out_deque 261 | 262 | 263 | class DequeOutLRUCache(cachetools.LRUCache): 264 | """Extends ``LRUCache`` so that expired items are placed in a ``deque``.""" 265 | 266 | def __init__(self, maxsize, out_deque=None, **kw): 267 | """Constructor. 268 | 269 | Args: 270 | maxsize (int): the maximum number of entries in the queue 271 | out_deque :class:`collections.deque`: a `deque` in which to add items 272 | that expire from the cache 273 | **kw: the other keyword args supported by constructor to 274 | :class:`cachetools.LRUCache` 275 | 276 | Raises: 277 | ValueError: if out_deque is not a collections.deque 278 | 279 | """ 280 | super(DequeOutLRUCache, self).__init__(maxsize, **kw) 281 | if out_deque is None: 282 | out_deque = collections.deque() 283 | elif not isinstance(out_deque, collections.deque): 284 | raise ValueError(u'out_deque should be collections.deque') 285 | self._out_deque = out_deque 286 | self._tracking = {} 287 | 288 | def __setitem__(self, key, value, **kw): 289 | super(DequeOutLRUCache, self).__setitem__(key, value, **kw) 290 | self._tracking[key] = value 291 | 292 | @property 293 | def out_deque(self): 294 | """The :class:`collections.deque` to which expired items are added.""" 295 | expired = {k: v for (k, v) in self._tracking.items() if self.get(k) is None} 296 | for k, v in expired.items(): 297 | del self._tracking[k] 298 | self._out_deque.append(v) 299 | return self._out_deque 300 | 301 | 302 | class LockedObject(object): 303 | """LockedObject protects an object with a re-entrant lock. 304 | 305 | The lock is required by the context manager protocol. 306 | """ 307 | # pylint: disable=too-few-public-methods 308 | 309 | def __init__(self, obj): 310 | self._lock = threading.RLock() 311 | self._obj = obj 312 | 313 | def __enter__(self): 314 | self._lock.acquire() 315 | return self._obj 316 | 317 | def __exit__(self, _exc_type, _exc_val, _exc_tb): 318 | self._lock.release() 319 | 320 | 321 | def to_cache_timer(datetime_func): 322 | """Converts a datetime_func to a timestamp_func. 323 | 324 | Args: 325 | datetime_func (callable[[datatime]]): a func that returns the current 326 | time 327 | 328 | Returns: 329 | time_func (callable[[timestamp]): a func that returns the timestamp 330 | from the epoch 331 | """ 332 | if datetime_func is None: 333 | datetime_func = datetime.utcnow 334 | 335 | def _timer(): 336 | """Return the timestamp since the epoch.""" 337 | return (datetime_func() - datetime(1970, 1, 1)).total_seconds() 338 | 339 | return _timer 340 | -------------------------------------------------------------------------------- /endpoints_management/control/label_descriptor.py: -------------------------------------------------------------------------------- 1 | # Copyright 2017 Google Inc. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | """label_descriptor provides funcs for working with `LabelDescriptor` instances. 16 | 17 | :class:`KnownLabels` is an :class:`enum.Enum` that defines the list of known 18 | `LabelDescriptor` instances. Each enum instance has several fields 19 | 20 | - label_name: the name used in the label 21 | - kind: indicates whether the label is system or user label 22 | - value_type: the value type used in the label 23 | - update_label_func: a function to update the labels 24 | 25 | """ 26 | 27 | from __future__ import absolute_import 28 | 29 | import base64 30 | from enum import Enum 31 | from . import sm_messages 32 | from .. import USER_AGENT, SERVICE_AGENT 33 | 34 | ValueType = sm_messages.LabelDescriptor.ValueTypeValueValuesEnum 35 | 36 | 37 | class Kind(Enum): 38 | """Enumerates the known labels.""" 39 | # pylint: disable=too-few-public-methods 40 | USER = 0 41 | SYSTEM = 1 42 | 43 | 44 | _CANONICAL_CODES = { 45 | 200: 0, # OK 46 | 400: 3, # INVALID_ARGUMENT 47 | 401: 16, # UNAUTHENTICATED 48 | 403: 7, # PERMISSION_DENIED 49 | 404: 5, # NOT_FOUND 50 | 409: 10, # ABORTED 51 | 412: 9, # FAILED_PRECONDITION 52 | 416: 11, # OUT_OF_RANGE 53 | 429: 8, # RESOURCE_EXHAUSTED 54 | 499: 1, # CANCELLED 55 | 500: 13, # INTERNAL, UNKNOWN 56 | 504: 4, # DEADLINE_EXCEEDED 57 | 501: 12, # UNIMPLEMENTED 58 | 503: 14, # UNAVAILABLE 59 | } 60 | 61 | 62 | def _canonical_code(http_code): 63 | mapped_code = _CANONICAL_CODES.get(http_code, 0) 64 | if mapped_code != 0: 65 | return mapped_code 66 | elif 200 <= http_code < 300: 67 | return 0 # OK 68 | elif 400 <= http_code < 500: 69 | return 9 # failed precondition 70 | elif 500 <= http_code < 600: 71 | return 13 # internal 72 | else: 73 | return 2 # unknown 74 | 75 | 76 | def set_credential_id(name, info, labels): 77 | # The rule to set /credential_id is: 78 | # 1) If api_key is available, set it as apiKey:API-KEY 79 | # 2) If auth issuer and audience both are available, set it as: 80 | # jwtAuth:issuer=base64(issuer)&audience=base64(audience) 81 | if info.api_key: 82 | labels[name] = b'apiKey:' + info.api_key.encode('utf-8') 83 | elif info.auth_issuer: 84 | value = b'jwtAuth:issuer=' + base64.urlsafe_b64encode(info.auth_issuer.encode('utf-8')) 85 | if info.auth_audience: 86 | value += b'&audience=' + base64.urlsafe_b64encode(info.auth_audience.encode('utf-8')) 87 | labels[name] = value 88 | 89 | 90 | _ERROR_TYPES = tuple(u'%dxx' % (x,) for x in range(10)) 91 | 92 | 93 | def set_error_type(name, info, labels): 94 | if info.response_code > 0: 95 | code = (info.response_code // 100) % 10 96 | if code < len(_ERROR_TYPES): 97 | labels[name] = _ERROR_TYPES[code] 98 | 99 | 100 | def set_protocol(name, info, labels): 101 | labels[name] = info.protocol.name 102 | 103 | 104 | def set_referer(name, info, labels): 105 | if info.referer: 106 | labels[name] = info.referer 107 | 108 | 109 | def set_response_code(name, info, labels): 110 | labels[name] = u'%d' % (info.response_code,) 111 | 112 | 113 | def set_response_code_class(name, info, labels): 114 | if info.response_code > 0: 115 | code = (info.response_code // 100) % 10 116 | if code < len(_ERROR_TYPES): 117 | labels[name] = _ERROR_TYPES[code] 118 | 119 | 120 | def set_status_code(name, info, labels): 121 | if info.response_code > 0: 122 | labels[name] = u'%d' % (_canonical_code(info.response_code),) 123 | 124 | 125 | def set_location(name, info, labels): 126 | if info.location: 127 | labels[name] = info.location 128 | 129 | 130 | def set_api_method(name, info, labels): 131 | if info.api_method: 132 | labels[name] = info.api_method 133 | 134 | 135 | def set_api_version(name, info, labels): 136 | if info.api_version: 137 | labels[name] = info.api_version 138 | 139 | 140 | def set_platform(name, info, labels): 141 | labels[name] = info.platform.name 142 | 143 | 144 | def set_service_agent(name, dummy_info, labels): 145 | labels[name] = SERVICE_AGENT 146 | 147 | 148 | def set_user_agent(name, dummy_info, labels): 149 | labels[name] = USER_AGENT 150 | 151 | 152 | def set_consumer_project(name, info, labels): 153 | if info.consumer_project_number > 0: 154 | labels[name] = unicode(info.consumer_project_number) 155 | 156 | 157 | class KnownLabels(Enum): 158 | """Enumerates the known labels.""" 159 | 160 | CREDENTIAL_ID = ( 161 | u'/credential_id', ValueType.STRING, Kind.USER, set_credential_id) 162 | END_USER = (u'/end_user', ValueType.STRING, Kind.USER, None) 163 | END_USER_COUNTRY = (u'/end_user_country', ValueType.STRING, Kind.USER, None) 164 | ERROR_TYPE = (u'/error_type', ValueType.STRING, Kind.USER, 165 | set_error_type) 166 | PROTOCOL = (u'/protocol', ValueType.STRING, Kind.USER, 167 | set_protocol) 168 | REFERER = (u'/referer', ValueType.STRING, Kind.USER, 169 | set_referer) 170 | RESPONSE_CODE = (u'/response_code', ValueType.STRING, Kind.USER, 171 | set_response_code) 172 | RESPONSE_CODE_CLASS = (u'/response_code_class', ValueType.STRING, Kind.USER, 173 | set_response_code_class) 174 | STATUS_CODE = (u'/status_code', ValueType.STRING, Kind.USER, 175 | set_status_code) 176 | GAE_CLONE_ID = ( 177 | u'appengine.googleapis.com/clone_id', ValueType.STRING, Kind.USER, None) 178 | GAE_MODULE_ID = ( 179 | u'appengine.googleapis.com/module_id', ValueType.STRING, Kind.USER, None) 180 | GAE_REPLICA_INDEX = ( 181 | u'appengine.googleapis.com/replica_index', ValueType.STRING, Kind.USER, 182 | None) 183 | GAE_VERSION_ID = ( 184 | u'appengine.googleapis.com/version_id', ValueType.STRING, Kind.USER, None) 185 | GCP_LOCATION = ( 186 | u'cloud.googleapis.com/location', ValueType.STRING, Kind.SYSTEM, 187 | set_location) 188 | GCP_PROJECT = ( 189 | u'cloud.googleapis.com/project', ValueType.STRING, Kind.SYSTEM, None) 190 | GCP_REGION = ( 191 | u'cloud.googleapis.com/region', ValueType.STRING, Kind.SYSTEM, None) 192 | GCP_RESOURCE_ID = ( 193 | u'cloud.googleapis.com/resource_id', ValueType.STRING, Kind.USER, None) 194 | GCP_RESOURCE_TYPE = ( 195 | u'cloud.googleapis.com/resource_type', ValueType.STRING, Kind.USER, None) 196 | GCP_SERVICE = ( 197 | u'cloud.googleapis.com/service', ValueType.STRING, Kind.SYSTEM, None) 198 | GCP_ZONE = ( 199 | u'cloud.googleapis.com/zone', ValueType.STRING, Kind.SYSTEM, None) 200 | GCP_UID = ( 201 | u'cloud.googleapis.com/uid', ValueType.STRING, Kind.SYSTEM, None) 202 | GCP_API_METHOD = ( 203 | u'serviceruntime.googleapis.com/api_method', ValueType.STRING, Kind.USER, 204 | set_api_method) 205 | GCP_API_VERSION = ( 206 | u'serviceruntime.googleapis.com/api_version', ValueType.STRING, Kind.USER, 207 | set_api_version) 208 | SCC_ANDROID_CERT_FINGERPRINT = ( 209 | 'servicecontrol.googleapis.com/android_cert_fingerprint', ValueType.STRING, Kind.SYSTEM, None) 210 | SCC_ANDROID_PACKAGE_NAME = ( 211 | 'servicecontrol.googleapis.com/android_package_name', ValueType.STRING, Kind.SYSTEM, None) 212 | SCC_CALLER_IP = ( 213 | u'servicecontrol.googleapis.com/caller_ip', ValueType.STRING, Kind.SYSTEM, None) 214 | SCC_IOS_BUNDLE_ID = ( 215 | u'servicecontrol.googleapis.com/ios_bundle_id', ValueType.STRING, Kind.SYSTEM, None) 216 | SCC_PLATFORM = ( 217 | u'servicecontrol.googleapis.com/platform', ValueType.STRING, Kind.SYSTEM, 218 | set_platform) 219 | SCC_REFERER = ( 220 | u'servicecontrol.googleapis.com/referer', ValueType.STRING, Kind.SYSTEM, None) 221 | SCC_SERVICE_AGENT = ( 222 | u'servicecontrol.googleapis.com/service_agent', ValueType.STRING, Kind.SYSTEM, 223 | set_service_agent) 224 | SCC_USER_AGENT = ( 225 | u'servicecontrol.googleapis.com/user_agent', ValueType.STRING, Kind.SYSTEM, 226 | set_user_agent) 227 | SCC_CONSUMER_PROJECT = ( 228 | u'serviceruntime.googleapis.com/consumer_project', ValueType.STRING, Kind.SYSTEM, 229 | set_consumer_project) 230 | 231 | def __init__(self, label_name, value_type, kind, update_label_func): 232 | """Constructor. 233 | 234 | update_label_func is used when updating a label in an `Operation` from a 235 | `ReportRequestInfo`. 236 | 237 | Args: 238 | label_name (str): the name of the label descriptor 239 | value_type (:class:`ValueType`): the `value type` of the described metric 240 | kind (:class:`Kind`): the ``kind`` of the described metric 241 | update_op_func (function): the func to update an operation 242 | 243 | """ 244 | self.label_name = label_name 245 | self.kind = kind 246 | self.update_label_func = update_label_func 247 | self.value_type = value_type 248 | 249 | def matches(self, desc): 250 | """Determines if a given label descriptor matches this enum instance 251 | 252 | Args: 253 | desc (:class:`endpoints_management.gen.servicemanagement_v1_messages.LabelDescriptor`): 254 | the instance to test 255 | 256 | Return: 257 | `True` if desc is supported, otherwise `False` 258 | 259 | """ 260 | desc_value_type = desc.valueType or ValueType.STRING # default not parsed 261 | return (self.label_name == desc.key and 262 | self.value_type == desc_value_type) 263 | 264 | def do_labels_update(self, info, labels): 265 | """Updates a dictionary of labels using the assigned update_op_func 266 | 267 | Args: 268 | info (:class:`endpoints_management.control.report_request.Info`): the 269 | info instance to update 270 | labels (dict[string[string]]): the labels dictionary 271 | 272 | Return: 273 | `True` if desc is supported, otherwise `False` 274 | 275 | """ 276 | if self.update_label_func: 277 | self.update_label_func(self.label_name, info, labels) 278 | 279 | @classmethod 280 | def is_supported(cls, desc): 281 | """Determines if the given label descriptor is supported. 282 | 283 | Args: 284 | desc (:class:`endpoints_management.gen.servicemanagement_v1_messages.LabelDescriptor`): 285 | the label descriptor to test 286 | 287 | Return: 288 | `True` if desc is supported, otherwise `False` 289 | 290 | """ 291 | for l in cls: 292 | if l.matches(desc): 293 | return True 294 | return False 295 | -------------------------------------------------------------------------------- /endpoints_management/control/metric_descriptor.py: -------------------------------------------------------------------------------- 1 | # Copyright 2017 Google Inc. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | """metric_descriptor provides funcs for working with `MetricDescriptor` instances. 16 | 17 | :class:`KnownMetrics` is an :class:`enum.Enum` that defines the list of known 18 | `MetricDescriptor` instances. It is a complex enumeration that includes various 19 | attributes including 20 | 21 | - the full metric name 22 | - the kind of the metric 23 | - the value type of the metric 24 | - a func for updating :class:`Operation`s from a `ReportRequestInfo` 25 | 26 | """ 27 | 28 | from __future__ import absolute_import 29 | 30 | 31 | from enum import Enum 32 | from . import distribution, metric_value, sc_messages, MetricKind, ValueType 33 | 34 | 35 | def _add_metric_value(name, value, an_op): 36 | an_op.metricValueSets.append( 37 | sc_messages.MetricValueSet(metricName=name, metricValues=[value])) 38 | 39 | 40 | def _add_int64_metric_value(name, value, an_op): 41 | _add_metric_value( 42 | name, metric_value.create(int64Value=value), an_op) 43 | 44 | 45 | def _set_int64_metric_to_constant_1(name, dummy_info, op): 46 | _add_int64_metric_value(name, 1, op) 47 | 48 | 49 | def _set_int64_metric_to_constant_1_if_http_error(name, info, op): 50 | if info.response_code >= 400: 51 | _add_int64_metric_value(name, 1, op) 52 | 53 | 54 | def _add_distribution_metric_value(name, value, an_op, distribution_args): 55 | d = distribution.create_exponential(*distribution_args) 56 | distribution.add_sample(value, d) 57 | _add_metric_value( 58 | name, metric_value.create(distributionValue=d), an_op) 59 | 60 | 61 | _SIZE_DISTRIBUTION_ARGS = (8, 10.0, 1.0) 62 | 63 | 64 | def _set_distribution_metric_to_request_size(name, info, an_op): 65 | if info.request_size >= 0: 66 | _add_distribution_metric_value(name, info.request_size, an_op, 67 | _SIZE_DISTRIBUTION_ARGS) 68 | 69 | 70 | def _set_distribution_metric_to_response_size(name, info, an_op): 71 | if info.response_size >= 0: 72 | _add_distribution_metric_value(name, info.response_size, an_op, 73 | _SIZE_DISTRIBUTION_ARGS) 74 | 75 | 76 | _TIME_DISTRIBUTION_ARGS = (8, 10.0, 1e-6) 77 | 78 | 79 | def _set_distribution_metric_to_request_time(name, info, an_op): 80 | if info.request_time: 81 | _add_distribution_metric_value(name, info.request_time.total_seconds(), 82 | an_op, _TIME_DISTRIBUTION_ARGS) 83 | 84 | 85 | def _set_distribution_metric_to_backend_time(name, info, an_op): 86 | if info.backend_time: 87 | _add_distribution_metric_value(name, info.backend_time.total_seconds(), 88 | an_op, _TIME_DISTRIBUTION_ARGS) 89 | 90 | 91 | def _set_distribution_metric_to_overhead_time(name, info, an_op): 92 | if info.overhead_time: 93 | _add_distribution_metric_value(name, info.overhead_time.total_seconds(), 94 | an_op, _TIME_DISTRIBUTION_ARGS) 95 | 96 | 97 | class Mark(Enum): 98 | """Enumerates the types of metric.""" 99 | PRODUCER = 1 100 | CONSUMER = 2 101 | PRODUCER_BY_CONSUMER = 3 102 | 103 | 104 | class KnownMetrics(Enum): 105 | """Enumerates the known metrics.""" 106 | 107 | CONSUMER_REQUEST_COUNT = ( 108 | u'serviceruntime.googleapis.com/api/consumer/request_count', 109 | MetricKind.DELTA, 110 | ValueType.INT64, 111 | _set_int64_metric_to_constant_1, 112 | Mark.CONSUMER, 113 | ) 114 | PRODUCER_REQUEST_COUNT = ( 115 | u'serviceruntime.googleapis.com/api/producer/request_count', 116 | MetricKind.DELTA, 117 | ValueType.INT64, 118 | _set_int64_metric_to_constant_1, 119 | ) 120 | PRODUCER_BY_CONSUMER_REQUEST_COUNT = ( 121 | u'serviceruntime.googleapis.com/api/producer/by_consumer/request_count', 122 | MetricKind.DELTA, 123 | ValueType.INT64, 124 | _set_int64_metric_to_constant_1, 125 | Mark.PRODUCER_BY_CONSUMER, 126 | ) 127 | CONSUMER_REQUEST_SIZES = ( 128 | u'serviceruntime.googleapis.com/api/consumer/request_sizes', 129 | MetricKind.DELTA, 130 | ValueType.DISTRIBUTION, 131 | _set_distribution_metric_to_request_size, 132 | Mark.CONSUMER, 133 | ) 134 | PRODUCER_REQUEST_SIZES = ( 135 | u'serviceruntime.googleapis.com/api/producer/request_sizes', 136 | MetricKind.DELTA, 137 | ValueType.DISTRIBUTION, 138 | _set_distribution_metric_to_request_size, 139 | ) 140 | PRODUCER_BY_CONSUMER_REQUEST_SIZES = ( 141 | 'serviceruntime.googleapis.com/api/producer/by_consumer/request_sizes', 142 | MetricKind.DELTA, 143 | ValueType.DISTRIBUTION, 144 | _set_distribution_metric_to_request_size, 145 | Mark.PRODUCER_BY_CONSUMER, 146 | ) 147 | CONSUMER_RESPONSE_SIZES = ( 148 | u'serviceruntime.googleapis.com/api/consumer/response_sizes', 149 | MetricKind.DELTA, 150 | ValueType.DISTRIBUTION, 151 | _set_distribution_metric_to_response_size, 152 | Mark.CONSUMER, 153 | ) 154 | PRODUCER_RESPONSE_SIZES = ( 155 | u'serviceruntime.googleapis.com/api/producer/response_sizes', 156 | MetricKind.DELTA, 157 | ValueType.DISTRIBUTION, 158 | _set_distribution_metric_to_response_size, 159 | ) 160 | PRODUCER_BY_CONSUMER_RESPONSE_SIZES = ( 161 | 'serviceruntime.googleapis.com/api/producer/by_consumer/response_sizes', 162 | MetricKind.DELTA, 163 | ValueType.DISTRIBUTION, 164 | _set_distribution_metric_to_response_size, 165 | Mark.PRODUCER_BY_CONSUMER, 166 | ) 167 | CONSUMER_ERROR_COUNT = ( 168 | u'serviceruntime.googleapis.com/api/consumer/error_count', 169 | MetricKind.DELTA, 170 | ValueType.INT64, 171 | _set_int64_metric_to_constant_1_if_http_error, 172 | Mark.CONSUMER, 173 | ) 174 | PRODUCER_ERROR_COUNT = ( 175 | u'serviceruntime.googleapis.com/api/producer/error_count', 176 | MetricKind.DELTA, 177 | ValueType.INT64, 178 | _set_int64_metric_to_constant_1_if_http_error, 179 | ) 180 | PRODUCER_BY_CONSUMER_ERROR_COUNT = ( 181 | 'serviceruntime.googleapis.com/api/producer/by_consumer/error_count', 182 | MetricKind.DELTA, 183 | ValueType.INT64, 184 | _set_int64_metric_to_constant_1_if_http_error, 185 | Mark.PRODUCER_BY_CONSUMER, 186 | ) 187 | CONSUMER_TOTAL_LATENCIES = ( 188 | u'serviceruntime.googleapis.com/api/consumer/total_latencies', 189 | MetricKind.DELTA, 190 | ValueType.DISTRIBUTION, 191 | _set_distribution_metric_to_request_time, 192 | Mark.CONSUMER, 193 | ) 194 | PRODUCER_TOTAL_LATENCIES = ( 195 | u'serviceruntime.googleapis.com/api/producer/total_latencies', 196 | MetricKind.DELTA, 197 | ValueType.DISTRIBUTION, 198 | _set_distribution_metric_to_request_time, 199 | ) 200 | PRODUCER_BY_CONSUMER_TOTAL_LATENCIES = ( 201 | 'serviceruntime.googleapis.com/api/producer/by_consumer/' 202 | 'total_latencies', 203 | MetricKind.DELTA, 204 | ValueType.DISTRIBUTION, 205 | _set_distribution_metric_to_request_time, 206 | Mark.PRODUCER_BY_CONSUMER, 207 | ) 208 | CONSUMER_BACKEND_LATENCIES = ( 209 | u'serviceruntime.googleapis.com/api/consumer/backend_latencies', 210 | MetricKind.DELTA, 211 | ValueType.DISTRIBUTION, 212 | _set_distribution_metric_to_backend_time, 213 | Mark.CONSUMER, 214 | ) 215 | PRODUCER_BACKEND_LATENCIES = ( 216 | u'serviceruntime.googleapis.com/api/producer/backend_latencies', 217 | MetricKind.DELTA, 218 | ValueType.DISTRIBUTION, 219 | _set_distribution_metric_to_backend_time, 220 | ) 221 | PRODUCER_BY_CONSUMER_BACKEND_LATENCIES = ( 222 | 'serviceruntime.googleapis.com/api/producer/by_consumer/' 223 | 'backend_latencies', 224 | MetricKind.DELTA, 225 | ValueType.DISTRIBUTION, 226 | _set_distribution_metric_to_backend_time, 227 | Mark.PRODUCER_BY_CONSUMER, 228 | ) 229 | CONSUMER_REQUEST_OVERHEAD_LATENCIES = ( 230 | u'serviceruntime.googleapis.com/api/consumer/request_overhead_latencies', 231 | MetricKind.DELTA, 232 | ValueType.DISTRIBUTION, 233 | _set_distribution_metric_to_overhead_time, 234 | Mark.CONSUMER, 235 | ) 236 | PRODUCER_REQUEST_OVERHEAD_LATENCIES = ( 237 | u'serviceruntime.googleapis.com/api/producer/request_overhead_latencies', 238 | MetricKind.DELTA, 239 | ValueType.DISTRIBUTION, 240 | _set_distribution_metric_to_overhead_time, 241 | ) 242 | PRODUCER_BY_CONSUMER_REQUEST_OVERHEAD_LATENCIES = ( 243 | 'serviceruntime.googleapis.com/api/producer/by_consumer/' 244 | 'request_overhead_latencies', 245 | MetricKind.DELTA, 246 | ValueType.DISTRIBUTION, 247 | _set_distribution_metric_to_overhead_time, 248 | Mark.PRODUCER_BY_CONSUMER, 249 | ) 250 | 251 | def __init__(self, metric_name, kind, value_type, update_op_func, 252 | mark=Mark.PRODUCER): 253 | """Constructor. 254 | 255 | update_op_func is used to when updating an `Operation` from a 256 | `ReportRequestInfo`. 257 | 258 | Args: 259 | metric_name (str): the name of the metric descriptor 260 | kind (:class:`MetricKind`): the ``kind`` of the described metric 261 | value_type (:class:`ValueType`): the `value type` of the described metric 262 | update_op_func (function): the func to update an operation 263 | 264 | """ 265 | self.kind = kind 266 | self.metric_name = metric_name 267 | if mark is Mark.CONSUMER: 268 | self.update_op_func = self._consumer_metric(update_op_func) 269 | elif mark is Mark.PRODUCER_BY_CONSUMER: 270 | self.update_op_func = self._by_consumer_metric(update_op_func) 271 | else: 272 | self.update_op_func = update_op_func 273 | self.value_type = value_type 274 | self.mark = mark 275 | 276 | def matches(self, desc): 277 | """Determines if a given metric descriptor matches this enum instance 278 | 279 | Args: 280 | desc (:class:`endpoints_management.gen.servicecontrol_v1_messages.MetricDescriptor`): the 281 | instance to test 282 | 283 | Return: 284 | `True` if desc is supported, otherwise `False` 285 | 286 | """ 287 | return (self.metric_name == desc.name and 288 | self.kind == desc.metricKind and 289 | self.value_type == desc.valueType) 290 | 291 | def do_operation_update(self, info, an_op): 292 | """Updates an operation using the assigned update_op_func 293 | 294 | Args: 295 | info: (:class:`endpoints_management.control.report_request.Info`): the 296 | info instance to update 297 | an_op: (:class:`endpoints_management.control.report_request.Info`): 298 | the info instance to update 299 | 300 | Return: 301 | `True` if desc is supported, otherwise `False` 302 | 303 | """ 304 | self.update_op_func(self.metric_name, info, an_op) 305 | 306 | def _consumer_metric(self, update_op_func): 307 | def resulting_updater(metric_name, info, an_op): 308 | if info.api_key_valid: 309 | update_op_func(metric_name, info, an_op) 310 | 311 | return resulting_updater 312 | 313 | def _by_consumer_metric(self, update_op_func): 314 | def resulting_updater(metric_name, info, an_op): 315 | if info.consumer_project_number > 0: 316 | update_op_func(metric_name, info, an_op) 317 | 318 | return resulting_updater 319 | 320 | @classmethod 321 | def is_supported(cls, desc): 322 | """Determines if the given metric descriptor is supported. 323 | 324 | Args: 325 | desc (:class:`endpoints_management.gen.servicecontrol_v1_messages.MetricDescriptor`): the 326 | metric descriptor to test 327 | 328 | Return: 329 | `True` if desc is supported, otherwise `False` 330 | 331 | """ 332 | for m in cls: 333 | if m.matches(desc): 334 | return True 335 | return False 336 | -------------------------------------------------------------------------------- /endpoints_management/control/metric_value.py: -------------------------------------------------------------------------------- 1 | # Copyright 2017 Google Inc. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | """metric values provides funcs using to aggregate `MetricValue`. 16 | 17 | :func:`merge` merges two `MetricValue` instances. 18 | :func:`update_hash` adds a `MetricValue` to a secure hash 19 | :func:`sign` generates a signature for a `MetricValue` using a secure hash 20 | 21 | """ 22 | 23 | from __future__ import absolute_import 24 | 25 | import hashlib 26 | import logging 27 | 28 | from apitools.base.py import encoding 29 | 30 | from . import distribution, money, signing, timestamp, MetricKind 31 | from ..gen.servicecontrol_v1_messages import MetricValue 32 | 33 | 34 | _logger = logging.getLogger(__name__) 35 | 36 | 37 | def create(labels=None, **kw): 38 | """Constructs a new metric value. 39 | 40 | This acts as an alternate to MetricValue constructor which 41 | simplifies specification of labels. Rather than having to create 42 | a MetricValue.Labels instance, all that's necessary to specify the 43 | required string. 44 | 45 | Args: 46 | labels (dict([string, [string]]): 47 | **kw: any other valid keyword args valid in the MetricValue constructor 48 | 49 | Returns 50 | :class:`MetricValue`: the created instance 51 | 52 | """ 53 | if labels is not None: 54 | kw[u'labels'] = encoding.PyValueToMessage(MetricValue.LabelsValue, 55 | labels) 56 | return MetricValue(**kw) 57 | 58 | 59 | def merge(metric_kind, prior, latest): 60 | """Merges `prior` and `latest` 61 | 62 | Args: 63 | metric_kind (:class:`MetricKind`): indicates the kind of metrics 64 | being merged 65 | prior (:class:`MetricValue`): an prior instance of the metric 66 | latest (:class:`MetricValue`: the latest instance of the metric 67 | """ 68 | prior_type, _ = _detect_value(prior) 69 | latest_type, _ = _detect_value(latest) 70 | if prior_type != latest_type: 71 | _logger.warn(u'Metric values are not compatible: %s, %s', 72 | prior, latest) 73 | raise ValueError(u'Incompatible delta metric values') 74 | if prior_type is None: 75 | _logger.warn(u'Bad metric values, types not known for : %s, %s', 76 | prior, latest) 77 | raise ValueError(u'Unsupported delta metric types') 78 | 79 | if metric_kind == MetricKind.DELTA: 80 | return _merge_delta_metric(prior, latest) 81 | else: 82 | return _merge_cumulative_or_gauge_metrics(prior, latest) 83 | 84 | 85 | def update_hash(a_hash, mv): 86 | """Adds ``mv`` to ``a_hash`` 87 | 88 | Args: 89 | a_hash (`Hash`): the secure hash, e.g created by hashlib.md5 90 | mv (:class:`MetricValue`): the instance to add to the hash 91 | 92 | """ 93 | if mv.labels: 94 | signing.add_dict_to_hash(a_hash, encoding.MessageToPyValue(mv.labels)) 95 | money_value = mv.get_assigned_value(u'moneyValue') 96 | if money_value is not None: 97 | a_hash.update(b'\x00') 98 | a_hash.update(money_value.currencyCode.encode('utf-8')) 99 | 100 | 101 | def sign(mv): 102 | """Obtains a signature for a `MetricValue` 103 | 104 | Args: 105 | mv (:class:`endpoints_management.gen.servicecontrol_v1_messages.MetricValue`): a 106 | MetricValue that's part of an operation 107 | 108 | Returns: 109 | string: a unique signature for that operation 110 | """ 111 | md5 = hashlib.md5() 112 | update_hash(md5, mv) 113 | return md5.digest() 114 | 115 | 116 | def _merge_cumulative_or_gauge_metrics(prior, latest): 117 | if timestamp.compare(prior.endTime, latest.endTime) == -1: 118 | return latest 119 | else: 120 | return prior 121 | 122 | 123 | def _merge_delta_metric(prior, latest): 124 | prior_type, prior_value = _detect_value(prior) 125 | latest_type, latest_value = _detect_value(latest) 126 | _merge_delta_timestamps(prior, latest) 127 | updated_value = _combine_delta_values(prior_type, prior_value, latest_value) 128 | setattr(latest, latest_type, updated_value) 129 | return latest 130 | 131 | 132 | # This is derived from the oneof choices for the MetricValue message's value 133 | # field in google/api/servicecontrol/v1/metric_value.proto, and should be kept 134 | # in sync with that 135 | _METRIC_VALUE_ONEOF_FIELDS = ( 136 | u'boolValue', u'distributionValue', u'doubleValue', u'int64Value', 137 | u'moneyValue', u'stringValue') 138 | 139 | 140 | def _detect_value(metric_value): 141 | for f in _METRIC_VALUE_ONEOF_FIELDS: 142 | value = metric_value.get_assigned_value(f) 143 | if value is not None: 144 | return f, value 145 | return None, None 146 | 147 | 148 | def _merge_delta_timestamps(prior, latest): 149 | # Update the start time and end time in the latest metric value 150 | if (prior.startTime and 151 | (latest.startTime is None or 152 | timestamp.compare(prior.startTime, latest.startTime) == -1)): 153 | latest.startTime = prior.startTime 154 | 155 | if (prior.endTime and 156 | (latest.endTime is None or timestamp.compare( 157 | latest.endTime, prior.endTime) == -1)): 158 | latest.endTime = prior.endTime 159 | 160 | return latest 161 | 162 | 163 | def _combine_delta_values(value_type, prior, latest): 164 | if value_type in (u'int64Value', u'doubleValue'): 165 | return prior + latest 166 | elif value_type == u'moneyValue': 167 | return money.add(prior, latest, allow_overflow=True) 168 | elif value_type == u'distributionValue': 169 | distribution.merge(prior, latest) 170 | return latest 171 | else: 172 | _logger.error(u'Unmergeable metric type %s', value_type) 173 | raise ValueError(u'Could not merge unmergeable metric type') 174 | -------------------------------------------------------------------------------- /endpoints_management/control/money.py: -------------------------------------------------------------------------------- 1 | # Copyright 2017 Google Inc. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | """money provides funcs for working with `Money` instances. 16 | 17 | :func:`check_valid` determines if a `Money` instance is valid 18 | :func:`add` adds two `Money` instances together 19 | 20 | """ 21 | 22 | from __future__ import absolute_import 23 | 24 | import logging 25 | import sys 26 | 27 | from . import sc_messages 28 | 29 | _logger = logging.getLogger(__name__) 30 | 31 | _INT64_MAX = sys.maxint 32 | _INT64_MIN = -sys.maxint - 1 33 | _BILLION = 1000000000 34 | MAX_NANOS = _BILLION - 1 35 | _MSG_3_LETTERS_LONG = u'The currency code is not 3 letters long' 36 | _MSG_UNITS_NANOS_MISMATCH = u'The signs of the units and nanos do not match' 37 | _MSG_NANOS_OOB = u'The nanos field must be between -999999999 and 999999999' 38 | 39 | 40 | def check_valid(money): 41 | """Determine if an instance of `Money` is valid. 42 | 43 | Args: 44 | money (:class:`endpoints_management.gen.servicecontrol_v1_messages.Money`): the 45 | instance to test 46 | 47 | Raises: 48 | ValueError: if the money instance is invalid 49 | """ 50 | if not isinstance(money, sc_messages.Money): 51 | raise ValueError(u'Inputs should be of type %s' % (sc_messages.Money,)) 52 | currency = money.currencyCode 53 | if not currency or len(currency) != 3: 54 | raise ValueError(_MSG_3_LETTERS_LONG) 55 | units = money.units 56 | nanos = money.nanos 57 | if ((units > 0) and (nanos < 0)) or ((units < 0) and (nanos > 0)): 58 | raise ValueError(_MSG_UNITS_NANOS_MISMATCH) 59 | if abs(nanos) > MAX_NANOS: 60 | raise ValueError(_MSG_NANOS_OOB) 61 | 62 | 63 | def add(a, b, allow_overflow=False): 64 | """Adds two instances of `Money`. 65 | 66 | Args: 67 | a (:class:`endpoints_management.gen.servicecontrol_v1_messages.Money`): one money 68 | value 69 | b (:class:`endpoints_management.gen.servicecontrol_v1_messages.Money`): another 70 | money value 71 | allow_overflow: determines if the addition is allowed to overflow 72 | 73 | Return: 74 | `Money`: an instance of Money 75 | 76 | Raises: 77 | ValueError: if the inputs do not have the same currency code 78 | OverflowError: if the sum overflows and allow_overflow is not `True` 79 | """ 80 | for m in (a, b): 81 | if not isinstance(m, sc_messages.Money): 82 | raise ValueError(u'Inputs should be of type %s' % (sc_messages.Money,)) 83 | if a.currencyCode != b.currencyCode: 84 | raise ValueError(u'Money values need the same currency to be summed') 85 | nano_carry, nanos_sum = _sum_nanos(a, b) 86 | units_sum_no_carry = a.units + b.units 87 | units_sum = units_sum_no_carry + nano_carry 88 | 89 | # Adjust when units_sum and nanos_sum have different signs 90 | if units_sum > 0 and nanos_sum < 0: 91 | units_sum -= 1 92 | nanos_sum += _BILLION 93 | elif units_sum < 0 and nanos_sum > 0: 94 | units_sum += 1 95 | nanos_sum -= _BILLION 96 | 97 | # Return the result, detecting overflow if it occurs 98 | sign_a = _sign_of(a) 99 | sign_b = _sign_of(b) 100 | if sign_a > 0 and sign_b > 0 and units_sum >= _INT64_MAX: 101 | if not allow_overflow: 102 | raise OverflowError(u'Money addition positive overflow') 103 | else: 104 | return sc_messages.Money(units=_INT64_MAX, 105 | nanos=MAX_NANOS, 106 | currencyCode=a.currencyCode) 107 | elif (sign_a < 0 and sign_b < 0 and 108 | (units_sum_no_carry <= -_INT64_MAX or units_sum <= -_INT64_MAX)): 109 | if not allow_overflow: 110 | raise OverflowError(u'Money addition negative overflow') 111 | else: 112 | return sc_messages.Money(units=_INT64_MIN, 113 | nanos=-MAX_NANOS, 114 | currencyCode=a.currencyCode) 115 | else: 116 | return sc_messages.Money(units=units_sum, 117 | nanos=nanos_sum, 118 | currencyCode=a.currencyCode) 119 | 120 | 121 | def _sum_nanos(a, b): 122 | the_sum = a.nanos + b.nanos 123 | carry = 0 124 | if the_sum > _BILLION: 125 | carry = 1 126 | the_sum -= _BILLION 127 | elif the_sum <= -_BILLION: 128 | carry = -1 129 | the_sum += _BILLION 130 | return carry, the_sum 131 | 132 | 133 | def _sign_of(money): 134 | """Determines the amount sign of a money instance 135 | 136 | Args: 137 | money (:class:`endpoints_management.gen.servicecontrol_v1_messages.Money`): the 138 | instance to test 139 | 140 | Return: 141 | int: 1, 0 or -1 142 | 143 | """ 144 | units = money.units 145 | nanos = money.nanos 146 | if units: 147 | if units > 0: 148 | return 1 149 | elif units < 0: 150 | return -1 151 | if nanos: 152 | if nanos > 0: 153 | return 1 154 | elif nanos < 0: 155 | return -1 156 | return 0 157 | -------------------------------------------------------------------------------- /endpoints_management/control/operation.py: -------------------------------------------------------------------------------- 1 | # Copyright 2017 Google Inc. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | """operation provides support for working with `Operation` instances. 16 | 17 | :class:`~endpoints_management.gen.servicecontrol_v1_message.Operation` represents 18 | information regarding an operation, and is a key constituent of 19 | :class:`~endpoints_management.gen.servicecontrol_v1_message.CheckRequest` and 20 | :class:`~endpoints_management.gen.servicecontrol_v1_message.ReportRequests. 21 | 22 | The :class:`.Aggregator` support this. 23 | 24 | """ 25 | 26 | from __future__ import absolute_import 27 | 28 | import collections 29 | import logging 30 | from datetime import datetime 31 | 32 | from apitools.base.py import encoding 33 | 34 | from . import metric_value, sc_messages, timestamp, MetricKind 35 | 36 | _logger = logging.getLogger(__name__) 37 | 38 | 39 | class Info( 40 | collections.namedtuple( 41 | u'Info', [ 42 | u'android_cert_fingerprint', 43 | u'android_package_name', 44 | u'api_key', 45 | u'api_key_valid', 46 | u'consumer_project_id', 47 | u'ios_bundle_id', 48 | u'operation_id', 49 | u'operation_name', 50 | u'referer', 51 | u'service_name', 52 | ])): 53 | """Holds basic information about an api call. 54 | 55 | This class is one of several used to mediate between the raw service 56 | control api surface and python frameworks. Client code can construct 57 | operations using this surface 58 | 59 | Attributes: 60 | android_cert_fingerprint (string): the SHA-1 signing-certificate 61 | fingerprint of the calling app, used when the provided api_key is 62 | restricted to certain Android apps 63 | android_package_name (string): the package name of the calling app, 64 | used when the provided api_key is restricted to certain Android apps 65 | api_key (string): the api key 66 | api_key_valid (bool): it the request has a valid api key. By default 67 | it is true, it will only be set to false if the api key cannot 68 | be validated by the service controller 69 | consumer_project_id (string): the project id of the api consumer 70 | ios_bundle_id (string): the bundle identifier of the calling app, 71 | used when the provided api_key is restricted to certain iOS apps 72 | operation_id (string): identity of the operation, which must be unique 73 | within the scope of the service. Calls to report and check on the 74 | same operation should carry the same operation id 75 | operation_name (string): the fully-qualified name of the operation 76 | referer (string): the referer header, or if not present the origin 77 | service_name(string): the name of service 78 | 79 | """ 80 | # pylint: disable=too-many-arguments 81 | 82 | def __new__(cls, 83 | android_cert_fingerprint=u'', 84 | android_package_name=u'', 85 | api_key=u'', 86 | api_key_valid=False, 87 | consumer_project_id=u'', 88 | ios_bundle_id=u'', 89 | operation_id=u'', 90 | operation_name=u'', 91 | referer=u'', 92 | service_name=u''): 93 | """Invokes the base constructor with default values.""" 94 | return super(cls, Info).__new__( 95 | cls, 96 | android_cert_fingerprint, 97 | android_package_name, 98 | api_key, 99 | api_key_valid, 100 | consumer_project_id, 101 | ios_bundle_id, 102 | operation_id, 103 | operation_name, 104 | referer, 105 | service_name) 106 | 107 | def as_operation(self, timer=datetime.utcnow): 108 | """Makes an ``Operation`` from this instance. 109 | 110 | Returns: 111 | an ``Operation`` 112 | 113 | """ 114 | now = timer() 115 | op = sc_messages.Operation( 116 | endTime=timestamp.to_rfc3339(now), 117 | startTime=timestamp.to_rfc3339(now), 118 | importance=sc_messages.Operation.ImportanceValueValuesEnum.LOW) 119 | if self.operation_id: 120 | op.operationId = self.operation_id 121 | if self.operation_name: 122 | op.operationName = self.operation_name 123 | if self.api_key and self.api_key_valid: 124 | op.consumerId = u'api_key:' + self.api_key 125 | elif self.consumer_project_id: 126 | op.consumerId = u'project:' + self.consumer_project_id 127 | return op 128 | 129 | 130 | class Aggregator(object): 131 | """Container that implements operation aggregation. 132 | 133 | Thread compatible. 134 | """ 135 | DEFAULT_KIND = MetricKind.DELTA 136 | """Used when kinds are not specified, or are missing a metric name""" 137 | 138 | def __init__(self, initial_op, kinds=None): 139 | """Constructor. 140 | 141 | If kinds is not specifed, all operations will be merged assuming 142 | they are of Kind ``DEFAULT_KIND`` 143 | 144 | Args: 145 | initial_op ( 146 | :class:`endpoints_management.gen.servicecontrol_v1_messages.Operation`): the 147 | initial version of the operation 148 | kinds (dict[string,[string]]): specifies the metric kind for 149 | each metric name 150 | 151 | """ 152 | assert isinstance(initial_op, sc_messages.Operation) 153 | if kinds is None: 154 | kinds = {} 155 | self._kinds = kinds 156 | self._metric_values_by_name_then_sign = collections.defaultdict(dict) 157 | our_op = encoding.CopyProtoMessage(initial_op) 158 | self._merge_metric_values(our_op) 159 | our_op.metricValueSets = [] 160 | self._op = our_op 161 | 162 | def as_operation(self): 163 | """Obtains a single `Operation` representing this instances contents. 164 | 165 | Returns: 166 | :class:`endpoints_management.gen.servicecontrol_v1_messages.Operation` 167 | """ 168 | result = encoding.CopyProtoMessage(self._op) 169 | names = sorted(self._metric_values_by_name_then_sign.keys()) 170 | for name in names: 171 | mvs = self._metric_values_by_name_then_sign[name] 172 | result.metricValueSets.append( 173 | sc_messages.MetricValueSet( 174 | metricName=name, metricValues=mvs.values())) 175 | return result 176 | 177 | def add(self, other_op): 178 | """Combines `other_op` with the operation held by this aggregator. 179 | 180 | N.B. It merges the operations log entries and metric values, but makes 181 | the assumption the operation is consistent. It's the callers 182 | responsibility to ensure consistency 183 | 184 | Args: 185 | other_op ( 186 | class:`endpoints_management.gen.servicecontrol_v1_messages.Operation`): 187 | an operation merge into this one 188 | 189 | """ 190 | self._op.logEntries.extend(other_op.logEntries) 191 | self._merge_timestamps(other_op) 192 | self._merge_metric_values(other_op) 193 | 194 | def _merge_metric_values(self, other_op): 195 | for value_set in other_op.metricValueSets: 196 | name = value_set.metricName 197 | kind = self._kinds.get(name, self.DEFAULT_KIND) 198 | by_signature = self._metric_values_by_name_then_sign[name] 199 | for mv in value_set.metricValues: 200 | signature = metric_value.sign(mv) 201 | prior = by_signature.get(signature) 202 | if prior is not None: 203 | metric_value.merge(kind, prior, mv) 204 | by_signature[signature] = mv 205 | 206 | def _merge_timestamps(self, other_op): 207 | # Update the start time and end time in self._op as needed 208 | if (other_op.startTime and 209 | (self._op.startTime is None or 210 | timestamp.compare(other_op.startTime, self._op.startTime) == -1)): 211 | self._op.startTime = other_op.startTime 212 | 213 | if (other_op.endTime and 214 | (self._op.endTime is None or timestamp.compare( 215 | self._op.endTime, other_op.endTime) == -1)): 216 | self._op.endTime = other_op.endTime 217 | -------------------------------------------------------------------------------- /endpoints_management/control/path_regex.py: -------------------------------------------------------------------------------- 1 | # Copyright 2017 Google Inc. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | """Implements a utility for parsing path templates.""" 16 | 17 | # This is ported over from endpoints.api_config_manager. 18 | 19 | from __future__ import absolute_import 20 | 21 | import base64 22 | import re 23 | 24 | # Internal constants 25 | _PATH_VARIABLE_PATTERN = r'[a-zA-Z_][a-zA-Z_.\d]*' 26 | _PATH_VALUE_PATTERN = r'[^/?#\[\]{}]*' 27 | 28 | RegexError = re.error # convenient alias 29 | 30 | 31 | def _to_safe_path_param_name(matched_parameter): 32 | """Creates a safe string to be used as a regex group name. 33 | 34 | Only alphanumeric characters and underscore are allowed in variable name 35 | tokens, and numeric are not allowed as the first character. 36 | 37 | We cast the matched_parameter to base32 (since the alphabet is safe), 38 | strip the padding (= not safe) and prepend with _, since we know a token 39 | can begin with underscore. 40 | 41 | Args: 42 | matched_parameter: A string containing the parameter matched from the URL 43 | template. 44 | 45 | Returns: 46 | A string that's safe to be used as a regex group name. 47 | """ 48 | return '_' + base64.b32encode(matched_parameter).rstrip('=') 49 | 50 | def compile_path_pattern(pattern): 51 | r"""Generates a compiled regex pattern for a path pattern. 52 | 53 | e.g. '/MyApi/v1/notes/{id}' 54 | returns re.compile(r'/MyApi/v1/notes/(?P[^/?#\[\]{}]*)') 55 | 56 | Args: 57 | pattern: A string, the parameterized path pattern to be checked. 58 | 59 | Returns: 60 | A compiled regex object to match this path pattern. 61 | """ 62 | 63 | def replace_variable(match): 64 | """Replaces a {variable} with a regex to match it by name. 65 | 66 | Changes the string corresponding to the variable name to the base32 67 | representation of the string, prepended by an underscore. This is 68 | necessary because we can have message variable names in URL patterns 69 | (e.g. via {x.y}) but the character '.' can't be in a regex group name. 70 | 71 | Args: 72 | match: A regex match object, the matching regex group as sent by 73 | re.sub(). 74 | 75 | Returns: 76 | A string regex to match the variable by name, if the full pattern was 77 | matched. 78 | """ 79 | if match.lastindex > 1: 80 | var_name = _to_safe_path_param_name(match.group(2)) 81 | return '%s(?P<%s>%s)' % (match.group(1), var_name, 82 | _PATH_VALUE_PATTERN) 83 | return match.group(0) 84 | 85 | pattern = re.sub('(/|^){(%s)}(?=/|$|:)' % _PATH_VARIABLE_PATTERN, 86 | replace_variable, pattern) 87 | return re.compile(pattern + '/?$') 88 | -------------------------------------------------------------------------------- /endpoints_management/control/signing.py: -------------------------------------------------------------------------------- 1 | # Copyright 2017 Google Inc. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | """Provides support for creating signatures using secure hashes.""" 16 | 17 | from __future__ import absolute_import 18 | 19 | 20 | def add_dict_to_hash(a_hash, a_dict): 21 | """Adds `a_dict` to `a_hash` 22 | 23 | Args: 24 | a_hash (`Hash`): the secure hash, e.g created by hashlib.md5 25 | a_dict (dict[string, [string]]): the dictionary to add to the hash 26 | 27 | """ 28 | if a_dict is None: 29 | return 30 | for k, v in a_dict.items(): 31 | a_hash.update(b'\x00' + k.encode('utf-8') + b'\x00' + v.encode('utf-8')) 32 | -------------------------------------------------------------------------------- /endpoints_management/control/timestamp.py: -------------------------------------------------------------------------------- 1 | # Copyright 2017 Google Inc. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | """timestamp provides functions that support working with timestamps. 16 | 17 | :func:`to_rfc3339` and :func:`from_rfc3339` convert between standard python 18 | datetime types and the rfc3339 representation used in json messsages. 19 | 20 | :func:`compare` allows comparison of any timestamp representation, either the 21 | standard python datetime types, or an rfc3339 string representation 22 | 23 | """ 24 | 25 | from __future__ import absolute_import 26 | 27 | import datetime 28 | import logging 29 | 30 | import strict_rfc3339 31 | 32 | _logger = logging.getLogger(__name__) 33 | 34 | 35 | _EPOCH_START = datetime.datetime(1970, 1, 1) 36 | 37 | 38 | def compare(a, b): 39 | """Compares two timestamps. 40 | 41 | ``a`` and ``b`` must be the same type, in addition to normal 42 | representations of timestamps that order naturally, they can be rfc3339 43 | formatted strings. 44 | 45 | Args: 46 | a (string|object): a timestamp 47 | b (string|object): another timestamp 48 | 49 | Returns: 50 | int: -1 if a < b, 0 if a == b or 1 if a > b 51 | 52 | Raises: 53 | ValueError: if a or b are not the same type 54 | ValueError: if a or b strings but not in valid rfc3339 format 55 | 56 | """ 57 | a_is_text = isinstance(a, basestring) 58 | b_is_text = isinstance(b, basestring) 59 | if type(a) != type(b) and not (a_is_text and b_is_text): 60 | _logger.error(u'Cannot compare %s to %s, types differ %s!=%s', 61 | a, b, type(a), type(b)) 62 | raise ValueError(u'cannot compare inputs of differing types') 63 | 64 | if a_is_text: 65 | a = from_rfc3339(a, with_nanos=True) 66 | b = from_rfc3339(b, with_nanos=True) 67 | 68 | if a < b: 69 | return -1 70 | elif a > b: 71 | return 1 72 | else: 73 | return 0 74 | 75 | 76 | def to_rfc3339(timestamp): 77 | """Converts ``timestamp`` to an RFC 3339 date string format. 78 | 79 | ``timestamp`` can be either a ``datetime.datetime`` or a 80 | ``datetime.timedelta``. Instances of the later are assumed to be a delta 81 | with the beginining of the unix epoch, 1st of January, 1970 82 | 83 | The returned string is always Z-normalized. Examples of the return format: 84 | '1972-01-01T10:00:20.021Z' 85 | 86 | Args: 87 | timestamp (datetime|timedelta): represents the timestamp to convert 88 | 89 | Returns: 90 | string: timestamp converted to a rfc3339 compliant string as above 91 | 92 | Raises: 93 | ValueError: if timestamp is not a datetime.datetime or datetime.timedelta 94 | 95 | """ 96 | if isinstance(timestamp, datetime.datetime): 97 | timestamp = timestamp - _EPOCH_START 98 | if not isinstance(timestamp, datetime.timedelta): 99 | _logger.error(u'Could not convert %s to a rfc3339 time,', timestamp) 100 | raise ValueError(u'Invalid timestamp type') 101 | return strict_rfc3339.timestamp_to_rfc3339_utcoffset( 102 | timestamp.total_seconds()) 103 | 104 | 105 | def from_rfc3339(rfc3339_text, with_nanos=False): 106 | """Parse a RFC 3339 date string format to datetime.date. 107 | 108 | Example of accepted format: '1972-01-01T10:00:20.021-05:00' 109 | 110 | - By default, the result is a datetime.datetime 111 | - If with_nanos is true, the result is a 2-tuple, (datetime.datetime, 112 | nanos), where the second field represents the possible nanosecond 113 | resolution component of the second field. 114 | 115 | Args: 116 | rfc3339_text (string): An rfc3339 formatted date string 117 | with_nanos (bool): Determines if nanoseconds should be parsed from the 118 | string 119 | 120 | Raises: 121 | ValueError: if ``rfc3339_text`` is invalid 122 | 123 | Returns: 124 | :class:`datetime.datetime`: when with_nanos is False 125 | tuple(:class:`datetime.datetime`, int): when with_nanos is True 126 | 127 | """ 128 | timestamp = strict_rfc3339.rfc3339_to_timestamp(rfc3339_text) 129 | result = datetime.datetime.utcfromtimestamp(timestamp) 130 | if with_nanos: 131 | return (result, int((timestamp - int(timestamp)) * 1e9)) 132 | else: 133 | return result 134 | -------------------------------------------------------------------------------- /endpoints_management/control/vendor/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright 2017 Google Inc. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | """Vendored modules""" 16 | -------------------------------------------------------------------------------- /endpoints_management/control/vendor/py3/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright 2017 Google Inc. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | """Vendored python3 modules.""" 16 | -------------------------------------------------------------------------------- /endpoints_management/control/vendor/py3/sched.py: -------------------------------------------------------------------------------- 1 | """A generally useful event scheduler class. 2 | 3 | Each instance of this class manages its own queue. 4 | No multi-threading is implied; you are supposed to hack that 5 | yourself, or use a single instance per application. 6 | 7 | Each instance is parametrized with two functions, one that is 8 | supposed to return the current time, one that is supposed to 9 | implement a delay. You can implement real-time scheduling by 10 | substituting time and sleep from built-in module time, or you can 11 | implement simulated time by writing your own functions. This can 12 | also be used to integrate scheduling with STDWIN events; the delay 13 | function is allowed to modify the queue. Time can be expressed as 14 | integers or floating point numbers, as long as it is consistent. 15 | 16 | Events are specified by tuples (time, priority, action, argument, kwargs). 17 | As in UNIX, lower priority numbers mean higher priority; in this 18 | way the queue can be maintained as a priority queue. Execution of the 19 | event means calling the action function, passing it the argument 20 | sequence in "argument" (remember that in Python, multiple function 21 | arguments are be packed in a sequence) and keyword parameters in "kwargs". 22 | The action function may be an instance method so it 23 | has another way to reference private data (besides global variables). 24 | """ 25 | from __future__ import unicode_literals 26 | from __future__ import print_function 27 | from __future__ import division 28 | from __future__ import absolute_import 29 | 30 | # This file is vendored in from the Python 3.6.1 stdlib module `sched`, 31 | # and processed with the `pasteurize` script from http://python-future.org 32 | # The original file is 33 | # Copyright (c) 2001-2016 Python Software Foundation. All rights reserved. 34 | 35 | # flake8: noqa 36 | # pylint: skip-file 37 | 38 | import time 39 | import heapq 40 | from collections import namedtuple 41 | try: 42 | import threading 43 | except ImportError: 44 | import dummy_threading as threading 45 | from time import time as _time 46 | 47 | __all__ = ["scheduler"] 48 | 49 | class Event(namedtuple('Event', 'time, priority, action, argument, kwargs')): 50 | # pylint: disable=no-self-argument 51 | __slots__ = [] 52 | def __eq__(s, o): return (s.time, s.priority) == (o.time, o.priority) 53 | def __lt__(s, o): return (s.time, s.priority) < (o.time, o.priority) 54 | def __le__(s, o): return (s.time, s.priority) <= (o.time, o.priority) 55 | def __gt__(s, o): return (s.time, s.priority) > (o.time, o.priority) 56 | def __ge__(s, o): return (s.time, s.priority) >= (o.time, o.priority) 57 | 58 | 59 | _sentinel = object() 60 | 61 | class scheduler(object): 62 | 63 | def __init__(self, timefunc=_time, delayfunc=time.sleep): 64 | """Initialize a new instance, passing the time and delay 65 | functions""" 66 | self._queue = [] 67 | self._lock = threading.RLock() 68 | self.timefunc = timefunc 69 | self.delayfunc = delayfunc 70 | 71 | def enterabs(self, time, priority, action, argument=(), kwargs=_sentinel): 72 | """Enter a new event in the queue at an absolute time. 73 | 74 | Returns an ID for the event which can be used to remove it, 75 | if necessary. 76 | 77 | """ 78 | if kwargs is _sentinel: 79 | kwargs = {} 80 | event = Event(time, priority, action, argument, kwargs) 81 | with self._lock: 82 | heapq.heappush(self._queue, event) 83 | return event # The ID 84 | 85 | def enter(self, delay, priority, action, argument=(), kwargs=_sentinel): 86 | """A variant that specifies the time as a relative time. 87 | 88 | This is actually the more commonly used interface. 89 | 90 | """ 91 | time = self.timefunc() + delay 92 | return self.enterabs(time, priority, action, argument, kwargs) 93 | 94 | def cancel(self, event): 95 | """Remove an event from the queue. 96 | 97 | This must be presented the ID as returned by enter(). 98 | If the event is not in the queue, this raises ValueError. 99 | 100 | """ 101 | with self._lock: 102 | self._queue.remove(event) 103 | heapq.heapify(self._queue) 104 | 105 | def empty(self): 106 | """Check whether the queue is empty.""" 107 | with self._lock: 108 | return not self._queue 109 | 110 | def run(self, blocking=True): 111 | """Execute events until the queue is empty. 112 | If blocking is False executes the scheduled events due to 113 | expire soonest (if any) and then return the deadline of the 114 | next scheduled call in the scheduler. 115 | 116 | When there is a positive delay until the first event, the 117 | delay function is called and the event is left in the queue; 118 | otherwise, the event is removed from the queue and executed 119 | (its action function is called, passing it the argument). If 120 | the delay function returns prematurely, it is simply 121 | restarted. 122 | 123 | It is legal for both the delay function and the action 124 | function to modify the queue or to raise an exception; 125 | exceptions are not caught but the scheduler's state remains 126 | well-defined so run() may be called again. 127 | 128 | A questionable hack is added to allow other threads to run: 129 | just after an event is executed, a delay of 0 is executed, to 130 | avoid monopolizing the CPU when other threads are also 131 | runnable. 132 | 133 | """ 134 | # localize variable access to minimize overhead 135 | # and to improve thread safety 136 | lock = self._lock 137 | q = self._queue 138 | delayfunc = self.delayfunc 139 | timefunc = self.timefunc 140 | pop = heapq.heappop 141 | while True: 142 | with lock: 143 | if not q: 144 | break 145 | time, priority, action, argument, kwargs = q[0] 146 | now = timefunc() 147 | if time > now: 148 | delay = True 149 | else: 150 | delay = False 151 | pop(q) 152 | if delay: 153 | if not blocking: 154 | return time - now 155 | delayfunc(time - now) 156 | else: 157 | action(*argument, **kwargs) 158 | delayfunc(0) # Let other threads run 159 | 160 | @property 161 | def queue(self): 162 | """An ordered list of upcoming events. 163 | 164 | Events are named tuples with fields for: 165 | time, priority, action, arguments, kwargs 166 | 167 | """ 168 | # Use heapq to sort the queue rather than using 'sorted(self._queue)'. 169 | # With heapq, two events scheduled at the same time will show in 170 | # the actual order they would be retrieved. 171 | with self._lock: 172 | events = self._queue[:] 173 | return list(map(heapq.heappop, [events]*len(events))) 174 | -------------------------------------------------------------------------------- /endpoints_management/gen/README.rst: -------------------------------------------------------------------------------- 1 | Google Endpoints Service API Clients 2 | ==================================== 3 | 4 | This package is generated client code. You can regenerate the clients by 5 | installing the `apitools` CLI: 6 | 7 | .. code:: bash 8 | 9 | [sudo] pip install google-apitools[cli] 10 | gen_client --discovery_url=https://servicemanagement.googleapis.com/\$discovery/rest client 11 | gen_client --discovery_url=https://servicecontrol.googleapis.com/\$discovery/rest client 12 | -------------------------------------------------------------------------------- /endpoints_management/gen/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright 2017 Google Inc. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | """Generated model class and http client.""" 16 | -------------------------------------------------------------------------------- /endpoints_management/gen/servicecontrol_v1_client.py: -------------------------------------------------------------------------------- 1 | # Copyright 2017 Google Inc. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | """Generated client library for servicecontrol version v1.""" 16 | # NOTE: This file is autogenerated and should not be edited by hand. 17 | from apitools.base.py import base_api 18 | import servicecontrol_v1_messages as messages 19 | 20 | 21 | class ServicecontrolV1(base_api.BaseApiClient): 22 | """Generated client library for service servicecontrol version v1.""" 23 | 24 | MESSAGES_MODULE = messages 25 | BASE_URL = u'https://servicecontrol.googleapis.com/' 26 | 27 | _PACKAGE = u'servicecontrol' 28 | _SCOPES = [u'https://www.googleapis.com/auth/cloud-platform', u'https://www.googleapis.com/auth/servicecontrol'] 29 | _VERSION = u'v1' 30 | _CLIENT_ID = '1042881264118.apps.googleusercontent.com' 31 | _CLIENT_SECRET = 'x_Tw5K8nnjoRAqULM9PFAC2b' 32 | _USER_AGENT = 'x_Tw5K8nnjoRAqULM9PFAC2b' 33 | _CLIENT_CLASS_NAME = u'ServicecontrolV1' 34 | _URL_VERSION = u'v1' 35 | _API_KEY = None 36 | 37 | def __init__(self, url='', credentials=None, 38 | get_credentials=True, http=None, model=None, 39 | log_request=False, log_response=False, 40 | credentials_args=None, default_global_params=None, 41 | additional_http_headers=None): 42 | """Create a new servicecontrol handle.""" 43 | url = url or self.BASE_URL 44 | super(ServicecontrolV1, self).__init__( 45 | url, credentials=credentials, 46 | get_credentials=get_credentials, http=http, model=model, 47 | log_request=log_request, log_response=log_response, 48 | credentials_args=credentials_args, 49 | default_global_params=default_global_params, 50 | additional_http_headers=additional_http_headers) 51 | self.services = self.ServicesService(self) 52 | 53 | class ServicesService(base_api.BaseApiService): 54 | """Service class for the services resource.""" 55 | 56 | _NAME = u'services' 57 | 58 | def __init__(self, client): 59 | super(ServicecontrolV1.ServicesService, self).__init__(client) 60 | self._upload_configs = { 61 | } 62 | 63 | def AllocateQuota(self, request, global_params=None): 64 | """Attempts to allocate quota for the specified consumer. It should be called. 65 | before the operation is executed. 66 | 67 | This method requires the `servicemanagement.services.quota` 68 | permission on the specified service. For more information, see 69 | [Google Cloud IAM](https://cloud.google.com/iam). 70 | 71 | **NOTE:** the client code **must** fail-open if the server returns one 72 | of the following quota errors: 73 | - `PROJECT_STATUS_UNAVAILABLE` 74 | - `SERVICE_STATUS_UNAVAILABLE` 75 | - `BILLING_STATUS_UNAVAILABLE` 76 | - `QUOTA_SYSTEM_UNAVAILABLE` 77 | 78 | The server may inject above errors to prohibit any hard dependency 79 | on the quota system. 80 | 81 | Args: 82 | request: (ServicecontrolServicesAllocateQuotaRequest) input message 83 | global_params: (StandardQueryParameters, default: None) global arguments 84 | Returns: 85 | (AllocateQuotaResponse) The response message. 86 | """ 87 | config = self.GetMethodConfig('AllocateQuota') 88 | return self._RunMethod( 89 | config, request, global_params=global_params) 90 | 91 | AllocateQuota.method_config = lambda: base_api.ApiMethodInfo( 92 | http_method=u'POST', 93 | method_id=u'servicecontrol.services.allocateQuota', 94 | ordered_params=[u'serviceName'], 95 | path_params=[u'serviceName'], 96 | query_params=[], 97 | relative_path=u'v1/services/{serviceName}:allocateQuota', 98 | request_field=u'allocateQuotaRequest', 99 | request_type_name=u'ServicecontrolServicesAllocateQuotaRequest', 100 | response_type_name=u'AllocateQuotaResponse', 101 | supports_download=False, 102 | ) 103 | 104 | def Check(self, request, global_params=None): 105 | """Checks an operation with Google Service Control to decide whether. 106 | the given operation should proceed. It should be called before the 107 | operation is executed. 108 | 109 | If feasible, the client should cache the check results and reuse them for 110 | 60 seconds. In case of server errors, the client can rely on the cached 111 | results for longer time. 112 | 113 | NOTE: the CheckRequest has the size limit of 64KB. 114 | 115 | This method requires the `servicemanagement.services.check` permission 116 | on the specified service. For more information, see 117 | [Google Cloud IAM](https://cloud.google.com/iam). 118 | 119 | Args: 120 | request: (ServicecontrolServicesCheckRequest) input message 121 | global_params: (StandardQueryParameters, default: None) global arguments 122 | Returns: 123 | (CheckResponse) The response message. 124 | """ 125 | config = self.GetMethodConfig('Check') 126 | return self._RunMethod( 127 | config, request, global_params=global_params) 128 | 129 | Check.method_config = lambda: base_api.ApiMethodInfo( 130 | http_method=u'POST', 131 | method_id=u'servicecontrol.services.check', 132 | ordered_params=[u'serviceName'], 133 | path_params=[u'serviceName'], 134 | query_params=[], 135 | relative_path=u'v1/services/{serviceName}:check', 136 | request_field=u'checkRequest', 137 | request_type_name=u'ServicecontrolServicesCheckRequest', 138 | response_type_name=u'CheckResponse', 139 | supports_download=False, 140 | ) 141 | 142 | def EndReconciliation(self, request, global_params=None): 143 | """Signals the quota controller that service ends the ongoing usage. 144 | reconciliation. 145 | 146 | This method requires the `servicemanagement.services.quota` 147 | permission on the specified service. For more information, see 148 | [Google Cloud IAM](https://cloud.google.com/iam). 149 | 150 | Args: 151 | request: (ServicecontrolServicesEndReconciliationRequest) input message 152 | global_params: (StandardQueryParameters, default: None) global arguments 153 | Returns: 154 | (EndReconciliationResponse) The response message. 155 | """ 156 | config = self.GetMethodConfig('EndReconciliation') 157 | return self._RunMethod( 158 | config, request, global_params=global_params) 159 | 160 | EndReconciliation.method_config = lambda: base_api.ApiMethodInfo( 161 | http_method=u'POST', 162 | method_id=u'servicecontrol.services.endReconciliation', 163 | ordered_params=[u'serviceName'], 164 | path_params=[u'serviceName'], 165 | query_params=[], 166 | relative_path=u'v1/services/{serviceName}:endReconciliation', 167 | request_field=u'endReconciliationRequest', 168 | request_type_name=u'ServicecontrolServicesEndReconciliationRequest', 169 | response_type_name=u'EndReconciliationResponse', 170 | supports_download=False, 171 | ) 172 | 173 | def ReleaseQuota(self, request, global_params=None): 174 | """Releases previously allocated quota done through AllocateQuota method. 175 | 176 | This method requires the `servicemanagement.services.quota` 177 | permission on the specified service. For more information, see 178 | [Google Cloud IAM](https://cloud.google.com/iam). 179 | 180 | **NOTE:** the client code **must** fail-open if the server returns one 181 | of the following quota errors: 182 | - `PROJECT_STATUS_UNAVAILABLE` 183 | - `SERVICE_STATUS_UNAVAILABLE` 184 | - `BILLING_STATUS_UNAVAILABLE` 185 | - `QUOTA_SYSTEM_UNAVAILABLE` 186 | 187 | The server may inject above errors to prohibit any hard dependency 188 | on the quota system. 189 | 190 | Args: 191 | request: (ServicecontrolServicesReleaseQuotaRequest) input message 192 | global_params: (StandardQueryParameters, default: None) global arguments 193 | Returns: 194 | (ReleaseQuotaResponse) The response message. 195 | """ 196 | config = self.GetMethodConfig('ReleaseQuota') 197 | return self._RunMethod( 198 | config, request, global_params=global_params) 199 | 200 | ReleaseQuota.method_config = lambda: base_api.ApiMethodInfo( 201 | http_method=u'POST', 202 | method_id=u'servicecontrol.services.releaseQuota', 203 | ordered_params=[u'serviceName'], 204 | path_params=[u'serviceName'], 205 | query_params=[], 206 | relative_path=u'v1/services/{serviceName}:releaseQuota', 207 | request_field=u'releaseQuotaRequest', 208 | request_type_name=u'ServicecontrolServicesReleaseQuotaRequest', 209 | response_type_name=u'ReleaseQuotaResponse', 210 | supports_download=False, 211 | ) 212 | 213 | def Report(self, request, global_params=None): 214 | """Reports operation results to Google Service Control, such as logs and. 215 | metrics. It should be called after an operation is completed. 216 | 217 | If feasible, the client should aggregate reporting data for up to 5 218 | seconds to reduce API traffic. Limiting aggregation to 5 seconds is to 219 | reduce data loss during client crashes. Clients should carefully choose 220 | the aggregation time window to avoid data loss risk more than 0.01% 221 | for business and compliance reasons. 222 | 223 | NOTE: the ReportRequest has the size limit of 1MB. 224 | 225 | This method requires the `servicemanagement.services.report` permission 226 | on the specified service. For more information, see 227 | [Google Cloud IAM](https://cloud.google.com/iam). 228 | 229 | Args: 230 | request: (ServicecontrolServicesReportRequest) input message 231 | global_params: (StandardQueryParameters, default: None) global arguments 232 | Returns: 233 | (ReportResponse) The response message. 234 | """ 235 | config = self.GetMethodConfig('Report') 236 | return self._RunMethod( 237 | config, request, global_params=global_params) 238 | 239 | Report.method_config = lambda: base_api.ApiMethodInfo( 240 | http_method=u'POST', 241 | method_id=u'servicecontrol.services.report', 242 | ordered_params=[u'serviceName'], 243 | path_params=[u'serviceName'], 244 | query_params=[], 245 | relative_path=u'v1/services/{serviceName}:report', 246 | request_field=u'reportRequest', 247 | request_type_name=u'ServicecontrolServicesReportRequest', 248 | response_type_name=u'ReportResponse', 249 | supports_download=False, 250 | ) 251 | 252 | def StartReconciliation(self, request, global_params=None): 253 | """Unlike rate quota, allocation quota does not get refilled periodically. 254 | So, it is possible that the quota usage as seen by the service differs from 255 | what the One Platform considers the usage is. This is expected to happen 256 | only rarely, but over time this can accumulate. Services can invoke 257 | StartReconciliation and EndReconciliation to correct this usage drift, as 258 | described below: 259 | 1. Service sends StartReconciliation with a timestamp in future for each 260 | metric that needs to be reconciled. The timestamp being in future allows 261 | to account for in-flight AllocateQuota and ReleaseQuota requests for the 262 | same metric. 263 | 2. One Platform records this timestamp and starts tracking subsequent 264 | AllocateQuota and ReleaseQuota requests until EndReconciliation is 265 | called. 266 | 3. At or after the time specified in the StartReconciliation, service 267 | sends EndReconciliation with the usage that needs to be reconciled to. 268 | 4. One Platform adjusts its own record of usage for that metric to the 269 | value specified in EndReconciliation by taking in to account any 270 | allocation or release between StartReconciliation and EndReconciliation. 271 | 272 | Signals the quota controller that the service wants to perform a usage 273 | reconciliation as specified in the request. 274 | 275 | This method requires the `servicemanagement.services.quota` 276 | permission on the specified service. For more information, see 277 | [Google Cloud IAM](https://cloud.google.com/iam). 278 | 279 | Args: 280 | request: (ServicecontrolServicesStartReconciliationRequest) input message 281 | global_params: (StandardQueryParameters, default: None) global arguments 282 | Returns: 283 | (StartReconciliationResponse) The response message. 284 | """ 285 | config = self.GetMethodConfig('StartReconciliation') 286 | return self._RunMethod( 287 | config, request, global_params=global_params) 288 | 289 | StartReconciliation.method_config = lambda: base_api.ApiMethodInfo( 290 | http_method=u'POST', 291 | method_id=u'servicecontrol.services.startReconciliation', 292 | ordered_params=[u'serviceName'], 293 | path_params=[u'serviceName'], 294 | query_params=[], 295 | relative_path=u'v1/services/{serviceName}:startReconciliation', 296 | request_field=u'startReconciliationRequest', 297 | request_type_name=u'ServicecontrolServicesStartReconciliationRequest', 298 | response_type_name=u'StartReconciliationResponse', 299 | supports_download=False, 300 | ) 301 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | backoff>=1.6.0 2 | cachetools>=1.0.0,<3 3 | dogpile.cache>=0.6.1,<0.7 4 | enum34>=1.1.6,<2 5 | google-apitools>=0.5.21,<0.6 6 | oauth2client==3.0.0 7 | pylru>=1.0.9,<2.0 8 | pyjwkest>=1.0.0,<=1.0.9 9 | requests>=2.10.0,<3.0 10 | strict-rfc3339>=0.7,<0.8 11 | urllib3>=1.16,<2.0 12 | webob>=1.8.1 13 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [aliases] 2 | test = ptr 3 | pytest = ptr 4 | 5 | [tool:pytest] 6 | addopts = --cov-report term-missing --cov endpoints_management 7 | norecursedirs = env 8 | testpaths = test 9 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # Copyright 2016 Google Inc. All Rights Reserved. 5 | # 6 | # Licensed under the Apache License, Version 2.0 (the "License"); 7 | # you may not use this file except in compliance with the License. 8 | # You may obtain a copy of the License at 9 | # 10 | # http://www.apache.org/licenses/LICENSE-2.0 11 | # 12 | # Unless required by applicable law or agreed to in writing, software 13 | # distributed under the License is distributed on an "AS IS" BASIS, 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | # See the License for the specific language governing permissions and 16 | # limitations under the License. 17 | 18 | import re 19 | 20 | from setuptools import setup, find_packages 21 | 22 | # Get the version 23 | version_regex = r'__version__ = ["\']([^"\']*)["\']' 24 | with open('endpoints_management/__init__.py', 'r') as f: 25 | text = f.read() 26 | match = re.search(version_regex, text) 27 | if match: 28 | version = match.group(1) 29 | else: 30 | raise RuntimeError("No version number found!") 31 | 32 | install_requires = [ 33 | 'backoff>=1.6.0', 34 | 'cachetools>=1.0.0,<3', 35 | "dogpile.cache>=0.6.1,<0.7", 36 | 'enum34>=1.1.6,<2', 37 | 'google-apitools>=0.5.21,<0.6', 38 | 'oauth2client==3.0.0', 39 | "pylru>=1.0.9,<2.0", 40 | "pyjwkest>=1.0.0,<=1.0.9", 41 | "requests>=2.10.0,<3.0", 42 | 'strict-rfc3339>=0.7,<0.8', 43 | 'urllib3>=1.16,<2.0', 44 | 'webob>=1.7.4', 45 | ] 46 | 47 | tests_require = [ 48 | "flask>=0.11.1", 49 | "httmock>=1.2", 50 | "mock>=2.0", 51 | "pytest", 52 | "pytest-cov" 53 | ] 54 | 55 | setup( 56 | name='google-endpoints-api-management', 57 | version=version, 58 | description='Google Endpoints API management', 59 | long_description=open('README.rst').read(), 60 | author='Google Inc', 61 | author_email='googleapis-packages@google.com', 62 | url='https://github.com/cloudendpoints/endpoints-management-python', 63 | packages=find_packages(exclude=['test', 'test.*']), 64 | namespace_packages=[], 65 | package_dir={'google-endpoints-api-management': 'endpoints_management'}, 66 | license='Apache License', 67 | classifiers=[ 68 | 'Development Status :: 4 - Beta', 69 | 'Intended Audience :: Developers', 70 | 'License :: OSI Approved :: Apache Software License', 71 | 'Programming Language :: Python', 72 | 'Programming Language :: Python :: 2', 73 | 'Programming Language :: Python :: 2.7', 74 | 'Programming Language :: Python :: Implementation :: CPython', 75 | ], 76 | install_requires=install_requires, 77 | setup_requires=["pytest_runner"], 78 | tests_require=tests_require, 79 | test_suite="tests" 80 | ) 81 | -------------------------------------------------------------------------------- /test-requirements.txt: -------------------------------------------------------------------------------- 1 | expects>=0.7.2 2 | flask>=0.11.1 3 | httmock>=1.2 4 | mock>=2. 5 | pytest>=2.8.3 6 | pytest-cov>=1.8.1 7 | pytest-timeout>=1.0.0 8 | unittest2>=1.1.0 9 | webtest>=2.0.29,<3.0 10 | -------------------------------------------------------------------------------- /test/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright 2016 Google Inc. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | -------------------------------------------------------------------------------- /test/integration/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright 2016 Google Inc. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | -------------------------------------------------------------------------------- /test/integration/ssl.cert: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIIDozCCAougAwIBAgIJAPej0j3fUyWDMA0GCSqGSIb3DQEBCwUAMIGAMQ8wDQYD 3 | VQQKDAZHb29nbGUxCzAJBgNVBAYTAlVTMQswCQYDVQQIDAJXQTEQMA4GA1UEBwwH 4 | U2VhdHRsZTEOMAwGA1UECwwFQ2xvdWQxEjAQBgNVBAMMCWxvY2FsaG9zdDEdMBsG 5 | CSqGSIb3DQEJARYOcmhkQGdvb2dsZS5jb20wHhcNMTcwODI1MTg1NTE4WhcNMjAw 6 | ODI0MTg1NTE4WjCBgDEPMA0GA1UECgwGR29vZ2xlMQswCQYDVQQGEwJVUzELMAkG 7 | A1UECAwCV0ExEDAOBgNVBAcMB1NlYXR0bGUxDjAMBgNVBAsMBUNsb3VkMRIwEAYD 8 | VQQDDAlsb2NhbGhvc3QxHTAbBgkqhkiG9w0BCQEWDnJoZEBnb29nbGUuY29tMIIB 9 | IjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0BCvabW5pU7xSig46NDS4c7L 10 | 4Ih5Kbl+TDXtdUeQahoVybKQF7qUWO0Jy53Owz+FCvMKc3cCZ9RE9FcbdQLZnePp 11 | wmVVye+/ELCjbdqGNABU4r9fnXBgoviURxOVjdjJhtt8geXQCJuEYUPRtXTxYpdA 12 | nWOqpqNC4nahnp/2n2Fwju4ozeA001CpQWgqHuGFf+pHgKfqgmkd5QbLero3ToqT 13 | ipnIjIMgvDwSafqGkD/6TP0HhX4BQ8rtbDRcqpRr816ht8hyRWleR57UBpq2T+2X 14 | XhTXYScoc8UOTeiu53dke6royxRrJ+xXpTvy/7rwAnWwq5k6NKq8pNjIHw1fCwID 15 | AQABox4wHDAaBgNVHREEEzARgglsb2NhbGhvc3SHBH8AAAEwDQYJKoZIhvcNAQEL 16 | BQADggEBAJ2VMaXZnHrE56qHOD45vMvhiE7l4Ujwqs7a/wtw/nmp3+qkvbIR1l9P 17 | UUiDmR+cxTufGY87DErknFj19087xH76iNvytATDMSJpl7NsDwp1qbXkopl9Oe8R 18 | AJuvqSVIGhjePDyokOsajmt5/obGC6kUcO/UwnqN697UWK8TwlUsyIk0LP36FyXl 19 | xJH/SlWMaHoHttXmPU0bTqbsXOD1DWxYdGvS3i3qwH1jHfhFZH1QcvBNjz1S7NV6 20 | lfeo3y55rFkb3FZhb4066pf/Eb9/qL676KboAzI2F/0c1Ks4V9G6lpOYccVpuH7H 21 | l1R581lnlYNPKugbxDdm/7tK0F7ZnAI= 22 | -----END CERTIFICATE----- 23 | -------------------------------------------------------------------------------- /test/integration/ssl.key: -------------------------------------------------------------------------------- 1 | -----BEGIN PRIVATE KEY----- 2 | MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQDQEK9ptbmlTvFK 3 | KDjo0NLhzsvgiHkpuX5MNe11R5BqGhXJspAXupRY7QnLnc7DP4UK8wpzdwJn1ET0 4 | Vxt1Atmd4+nCZVXJ778QsKNt2oY0AFTiv1+dcGCi+JRHE5WN2MmG23yB5dAIm4Rh 5 | Q9G1dPFil0CdY6qmo0LidqGen/afYXCO7ijN4DTTUKlBaCoe4YV/6keAp+qCaR3l 6 | Bst6ujdOipOKmciMgyC8PBJp+oaQP/pM/QeFfgFDyu1sNFyqlGvzXqG3yHJFaV5H 7 | ntQGmrZP7ZdeFNdhJyhzxQ5N6K7nd2R7qujLFGsn7FelO/L/uvACdbCrmTo0qryk 8 | 2MgfDV8LAgMBAAECggEAOqqpu0XJTco/kOhce5D+FTuJEwuIFnK+IOEHzozaNICc 9 | ah3WMUqjr5tOqsIZXcZYTikPZlVFMV+R/c9d55VhJPrxm6WVFfZqylr0CfKW6qy5 10 | QqIxZKQeK3Wp4YytG6znzgWvSrUw/mxTXFWSGPcbZPA/AOHsaUYZgt9uXNq/4FpI 11 | CHZR2BEwdP1FArQYcAYOl+Oxv10ydJmLlG9KJrNJoQjRrHu28Z7Cm8//d11Oj4i0 12 | Hg5tuw7Ym1dkUF8AhjLBR3zz6xz1BGuNflLUX5cmqogiPimjnWi2XINsCErAF1rF 13 | ReLiQvTx/e6QtYnevFqjLlAAi3nqrR0zjgYUO6lhKQKBgQDplZt3W5euCs+KAO+J 14 | 11df8cd/68biN6Fp/ip3tPHAEg6jZJFGhsnSCTIgZhJvbXiZfcUobZ5GX3BYEvQQ 15 | ZxbKkPvjb1+kkOzYfyPXvLDO6k7BSj1iQLjiKJqlStiNNE+hqnna7cU/Rgxfa4jL 16 | 5R/xbmverKZ5HEw0+H/0ir2tlQKBgQDkCCfBuBAwzjdBP7UxIXMHyN74sQ7Vs6NA 17 | ZBzOtZF9gel36Mf/8lU9bg21rY976KxeWMpZH1/k3Z77s7ly4nTeS4m4ugYxKxtJ 18 | Hr34wBA/O6IRNtnppfl+Am/9NR5zbzSbwT52XTdis9wGvhOR2zvsg5BeOphmM5fc 19 | UL0KI2VyHwKBgE898szuJKnlfvzJTnoLtNeWtWBfQ0xRJRBJKm0L1IudVJv3rRUo 20 | +uSnO/sjxZNJpwUjVrRdY7lp/TnTzDqYTnIP93Pzv5WbmM3la+pvV+gKMwd9wQ9I 21 | 96+5qKT0nDxjB7THmak8ypKpl97zIyhQpaKJHE0hcyRZBj1eJEJ4otp1AoGAe0T9 22 | WPeLqRaRQDCUXq7aZEbuYjo07slhYxm7iPMWZ2anMy8fjvkNo1uvBZuBiVZKd+Y/ 23 | 3NXdHPfBTgfmJHjO/KyREs9dMY8//NMllyccoVI3Vl70hh43NStUJulkQLrJEbnx 24 | bkMOAcmBFBQEuEBvsHeev5GlePpDCdKLnsNrv7kCgYEAucA5tA/GLALkXmlWERiD 25 | Yp4YWEemB+ru6QfWGPHOsdtbpwEz+lM0EvkfnIyMf0tKBtZaSqQJrgR1z94mPsK4 26 | MDzZ+3oheBsiPyNIJiYeds+i0L1qejCz05jbxXqIA3+xZ/AbuX0ljPuDiRC6FIe1 27 | l+TZwDuo80BY3LwA/wJkvkA= 28 | -----END PRIVATE KEY----- 29 | -------------------------------------------------------------------------------- /test/test_caches.py: -------------------------------------------------------------------------------- 1 | # Copyright 2016 Google Inc. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | from __future__ import absolute_import 16 | 17 | import collections 18 | import datetime 19 | import unittest2 20 | 21 | from expects import be, be_a, be_none, equal, expect, raise_error 22 | 23 | from endpoints_management.control import caches, report_request 24 | 25 | 26 | _TEST_NUM_ENTRIES = 3 # arbitrary 27 | 28 | 29 | class TestDequeOutLRUCache(unittest2.TestCase): 30 | 31 | def test_constructor_should_set_up_a_default_deque(self): 32 | c = caches.DequeOutLRUCache(_TEST_NUM_ENTRIES) 33 | expect(c.out_deque).to(be_a(collections.deque)) 34 | 35 | def test_constructor_should_fail_on_bad_deques(self): 36 | testf = lambda: caches.DequeOutLRUCache(_TEST_NUM_ENTRIES, 37 | out_deque=object()) 38 | expect(testf).to(raise_error(ValueError)) 39 | 40 | def test_constructor_should_accept_deques(self): 41 | a_deque = collections.deque() 42 | c = caches.DequeOutLRUCache(_TEST_NUM_ENTRIES, out_deque=a_deque) 43 | expect(c.out_deque).to(be(a_deque)) 44 | 45 | def test_lru(self): 46 | lru_limit = 2 47 | cache = caches.DequeOutLRUCache(lru_limit) 48 | cache[1] = 1 49 | cache[2] = 2 50 | cache[3] = 3 51 | expect(len(cache)).to(equal(2)) 52 | expect(cache[2]).to(equal(2)) 53 | expect(cache[3]).to(equal(3)) 54 | expect(cache.get(1)).to(be_none) 55 | expect(len(cache.out_deque)).to(be(1)) 56 | cache[4] = 4 57 | expect(cache.get(2)).to(be_none) 58 | expect(len(cache.out_deque)).to(be(2)) 59 | 60 | 61 | class _Timer(object): 62 | def __init__(self, auto=False): 63 | self.auto = auto 64 | self.time = 0 65 | 66 | def __call__(self): 67 | if self.auto: 68 | self.tick() 69 | return self.time 70 | 71 | def tick(self): 72 | self.time += 1 73 | 74 | 75 | _TEST_TTL = 3 # arbitrary 76 | 77 | 78 | class TestDequeOutTTLCache(unittest2.TestCase): 79 | # pylint: disable=fixme 80 | # 81 | # TODO: add a ttl test based on the one in cachetools testsuite 82 | 83 | def test_constructor_should_set_up_a_default_deque(self): 84 | c = caches.DequeOutTTLCache(_TEST_NUM_ENTRIES, _TEST_TTL) 85 | expect(c.out_deque).to(be_a(collections.deque)) 86 | 87 | def test_constructor_should_fail_on_bad_deques(self): 88 | testf = lambda: caches.DequeOutTTLCache(_TEST_NUM_ENTRIES, _TEST_TTL, 89 | out_deque=object()) 90 | expect(testf).to(raise_error(ValueError)) 91 | 92 | def test_constructor_should_accept_deques(self): 93 | a_deque = collections.deque() 94 | c = caches.DequeOutTTLCache(3, 3, out_deque=a_deque) 95 | expect(c.out_deque).to(be(a_deque)) 96 | 97 | def test_lru(self): 98 | lru_limit = 2 99 | expiry = 100 100 | cache = caches.DequeOutTTLCache(lru_limit, expiry) 101 | cache[1] = 1 102 | cache[2] = 2 103 | cache[3] = 3 104 | expect(len(cache)).to(equal(2)) 105 | expect(cache[2]).to(equal(2)) 106 | expect(cache[3]).to(equal(3)) 107 | expect(cache.get(1)).to(be_none) 108 | expect(len(cache.out_deque)).to(be(1)) 109 | cache[4] = 4 110 | expect(cache.get(2)).to(be_none) 111 | expect(len(cache.out_deque)).to(be(2)) 112 | 113 | def test_ttl(self): 114 | cache = caches.DequeOutTTLCache(2, ttl=1, timer=_Timer()) 115 | expect(cache.timer()).to(equal(0)) 116 | expect(cache.ttl).to(equal(1)) 117 | 118 | cache[1] = 1 119 | expect(set(cache)).to(equal({1})) 120 | expect(len(cache)).to(equal(1)) 121 | expect(cache[1]).to(equal(1)) 122 | 123 | cache.timer.tick() 124 | expect(set(cache)).to(equal({1})) 125 | expect(len(cache)).to(equal(1)) 126 | expect(cache[1]).to(equal(1)) 127 | 128 | cache[2] = 2 129 | expect(set(cache)).to(equal({1, 2})) 130 | expect(len(cache)).to(equal(2)) 131 | expect(cache[1]).to(equal(1)) 132 | expect(cache[2]).to(equal(2)) 133 | 134 | cache.timer.tick() 135 | expect(set(cache)).to(equal({2})) 136 | expect(len(cache)).to(equal(1)) 137 | expect(cache[2]).to(equal(2)) 138 | expect(cache.get(1)).to(be_none) 139 | 140 | 141 | class _DateTimeTimer(object): 142 | def __init__(self, auto=False): 143 | self.auto = auto 144 | self.time = datetime.datetime(1970, 1, 1) 145 | 146 | def __call__(self): 147 | if self.auto: 148 | self.tick() 149 | return self.time 150 | 151 | def tick(self): 152 | self.time += datetime.timedelta(seconds=1) 153 | 154 | 155 | class TestCreate(unittest2.TestCase): 156 | 157 | def test_should_fail_if_bad_options_are_used(self): 158 | should_fail = [ 159 | lambda: caches.create(object()), 160 | ] 161 | for testf in should_fail: 162 | expect(testf).to(raise_error(ValueError)) 163 | 164 | def test_should_return_none_if_options_is_none(self): 165 | expect(caches.create(None)).to(be_none) 166 | 167 | def test_should_return_none_if_cache_size_not_positive(self): 168 | should_be_none = [ 169 | lambda: caches.create(caches.CheckOptions(num_entries=0)), 170 | lambda: caches.create(caches.CheckOptions(num_entries=-1)), 171 | lambda: caches.create(caches.ReportOptions(num_entries=0)), 172 | lambda: caches.create(caches.ReportOptions(num_entries=-1)), 173 | ] 174 | for testf in should_be_none: 175 | expect(testf()).to(be_none) 176 | 177 | def test_should_return_ttl_cache_if_flush_interval_is_positive(self): 178 | delta = datetime.timedelta(seconds=1) 179 | should_be_ttl = [ 180 | lambda timer: caches.create( 181 | caches.CheckOptions(num_entries=1, flush_interval=delta), 182 | timer=timer 183 | ), 184 | lambda timer: caches.create( 185 | caches.ReportOptions(num_entries=1, flush_interval=delta), 186 | timer=timer 187 | ), 188 | ] 189 | for testf in should_be_ttl: 190 | timer = _DateTimeTimer() 191 | sync_cache = testf(timer) 192 | expect(sync_cache).to(be_a(caches.LockedObject)) 193 | with sync_cache as cache: 194 | expect(cache).to(be_a(caches.DequeOutTTLCache)) 195 | expect(cache.timer()).to(equal(0)) 196 | cache[1] = 1 197 | expect(set(cache)).to(equal({1})) 198 | expect(cache.get(1)).to(equal(1)) 199 | timer.tick() 200 | expect(cache.get(1)).to(equal(1)) 201 | timer.tick() 202 | expect(cache.get(1)).to(be_none) 203 | 204 | # Is still TTL without the custom timer 205 | sync_cache = testf(None) 206 | expect(sync_cache).to(be_a(caches.LockedObject)) 207 | with sync_cache as cache: 208 | expect(cache).to(be_a(caches.DequeOutTTLCache)) 209 | 210 | def test_should_return_a_lru_cache_if_flush_interval_is_negative(self): 211 | delta = datetime.timedelta(seconds=-1) 212 | should_be_ttl = [ 213 | lambda: caches.create( 214 | caches.CheckOptions(num_entries=1, flush_interval=delta), 215 | ), 216 | lambda: caches.create( 217 | caches.ReportOptions(num_entries=1, flush_interval=delta)), 218 | ] 219 | for testf in should_be_ttl: 220 | sync_cache = testf() 221 | expect(sync_cache).to(be_a(caches.LockedObject)) 222 | with sync_cache as cache: 223 | expect(cache).to(be_a(caches.DequeOutLRUCache)) 224 | 225 | 226 | class TestReportOptions(unittest2.TestCase): 227 | 228 | def test_should_create_with_defaults(self): 229 | options = caches.ReportOptions() 230 | expect(options.num_entries).to(equal( 231 | caches.ReportOptions.DEFAULT_NUM_ENTRIES)) 232 | expect(options.flush_interval).to(equal( 233 | caches.ReportOptions.DEFAULT_FLUSH_INTERVAL)) 234 | 235 | 236 | class TestCheckOptions(unittest2.TestCase): 237 | AN_INTERVAL = datetime.timedelta(milliseconds=2) 238 | A_LOWER_INTERVAL = datetime.timedelta(milliseconds=1) 239 | 240 | def test_should_create_with_defaults(self): 241 | options = caches.CheckOptions() 242 | expect(options.num_entries).to(equal( 243 | caches.CheckOptions.DEFAULT_NUM_ENTRIES)) 244 | expect(options.flush_interval).to(equal( 245 | caches.CheckOptions.DEFAULT_FLUSH_INTERVAL)) 246 | expect(options.expiration).to(equal( 247 | caches.CheckOptions.DEFAULT_EXPIRATION)) 248 | 249 | def test_should_ignores_lower_expiration(self): 250 | wanted_expiration = ( 251 | self.AN_INTERVAL + datetime.timedelta(milliseconds=1)) 252 | options = caches.CheckOptions(flush_interval=self.AN_INTERVAL, 253 | expiration=self.A_LOWER_INTERVAL) 254 | expect(options.flush_interval).to(equal(self.AN_INTERVAL)) 255 | expect(options.expiration).to(equal(wanted_expiration)) 256 | expect(options.expiration).not_to(equal(self.A_LOWER_INTERVAL)) 257 | -------------------------------------------------------------------------------- /test/test_distribution.py: -------------------------------------------------------------------------------- 1 | # Copyright 2016 Google Inc. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | from __future__ import absolute_import 16 | 17 | import sys 18 | import unittest2 19 | from expects import expect, equal, raise_error 20 | 21 | from endpoints_management.control import distribution, sc_messages 22 | 23 | 24 | class TestCreateExponential(unittest2.TestCase): 25 | 26 | def test_should_fail_if_num_finite_buckets_is_bad(self): 27 | testf = lambda: distribution.create_exponential(0, 1.1, 0.1) 28 | expect(testf).to(raise_error(ValueError)) 29 | 30 | def test_should_fail_if_growth_factor_is_bad(self): 31 | testf = lambda: distribution.create_exponential(1, 0.9, 0.1) 32 | expect(testf).to(raise_error(ValueError)) 33 | 34 | def test_should_fail_if_scale_is_bad(self): 35 | testf = lambda: distribution.create_exponential(1, 1.1, -0.1) 36 | expect(testf).to(raise_error(ValueError)) 37 | 38 | def test_should_succeed_if_inputs_are_ok(self): 39 | num_finite_buckets = 1 40 | got = distribution.create_exponential(num_finite_buckets, 1.1, 0.1) 41 | expect(len(got.bucketCounts)).to(equal(num_finite_buckets + 2)) 42 | 43 | 44 | class TestCreateLinear(unittest2.TestCase): 45 | 46 | def test_should_fail_if_num_finite_buckets_is_bad(self): 47 | testf = lambda: distribution.create_linear(0, 1.1, 0.1) 48 | expect(testf).to(raise_error(ValueError)) 49 | 50 | def test_should_fail_if_growth_factor_is_bad(self): 51 | testf = lambda: distribution.create_linear(1, -0.1, 0.1) 52 | expect(testf).to(raise_error(ValueError)) 53 | 54 | def test_should_succeed_if_inputs_are_ok(self): 55 | num_finite_buckets = 1 56 | got = distribution.create_linear(num_finite_buckets, 0.1, 0.1) 57 | expect(len(got.bucketCounts)).to(equal(num_finite_buckets + 2)) 58 | 59 | 60 | class TestCreateExplicit(unittest2.TestCase): 61 | 62 | def test_should_fail_if_there_are_matching_bounds(self): 63 | testf = lambda: distribution.create_explicit([0.0, 0.1, 0.1]) 64 | expect(testf).to(raise_error(ValueError)) 65 | 66 | def test_should_succeed_if_inputs_are_ok(self): 67 | want = [0.1, 0.2, 0.3] 68 | got = distribution.create_explicit([0.1, 0.2, 0.3]) 69 | expect(got.explicitBuckets.bounds).to(equal(want)) 70 | expect(len(got.bucketCounts)).to(equal(len(want) + 1)) 71 | 72 | def test_should_succeed_if_input_bounds_are_unsorted(self): 73 | want = [0.1, 0.2, 0.3] 74 | got = distribution.create_explicit([0.3, 0.1, 0.2]) 75 | expect(got.explicitBuckets.bounds).to(equal(want)) 76 | 77 | 78 | def _make_explicit_dist(): 79 | return distribution.create_explicit([0.1, 0.3, 0.5, 0.7]) 80 | 81 | 82 | def _make_linear_dist(): 83 | return distribution.create_linear(3, 0.2, 0.1) 84 | 85 | 86 | def _make_exponential_dist(): 87 | return distribution.create_exponential(3, 2, 0.1) 88 | 89 | _UNDERFLOW_SAMPLE = 1e-5 90 | _LOW_SAMPLE = 0.11 91 | _HIGH_SAMPLE = 0.5 92 | _OVERFLOW_SAMPLE = 1e5 93 | 94 | _TEST_SAMPLES_AND_BUCKETS = [ 95 | { 96 | u'samples': [_UNDERFLOW_SAMPLE], 97 | u'want': [1, 0, 0, 0, 0] 98 | }, 99 | { 100 | u'samples': [_LOW_SAMPLE] * 2, 101 | u'want': [0, 2, 0, 0, 0] 102 | }, 103 | { 104 | u'samples': [_LOW_SAMPLE, _HIGH_SAMPLE, _HIGH_SAMPLE], 105 | u'want': [0, 1, 0, 2, 0] 106 | }, 107 | { 108 | u'samples': [_OVERFLOW_SAMPLE], 109 | u'want': [0, 0, 0, 0, 1] 110 | }, 111 | ] 112 | 113 | 114 | def _expect_stats_eq_direct_calc_from_samples(d, samples): 115 | # pylint: disable=fixme 116 | # TODO: update this the sum of rho-squared 117 | want_mean = sum(samples) / len(samples) 118 | expect(d.mean).to(equal(want_mean)) 119 | expect(d.maximum).to(equal(max(samples))) 120 | expect(d.minimum).to(equal(min(samples))) 121 | 122 | 123 | class TestAddSample(unittest2.TestCase): 124 | NOTHING_SET = sc_messages.Distribution() 125 | 126 | def test_should_fail_if_no_buckets_are_set(self): 127 | testf = lambda: distribution.add_sample(_UNDERFLOW_SAMPLE, 128 | self.NOTHING_SET) 129 | expect(testf).to(raise_error(ValueError)) 130 | 131 | def expect_adds_test_samples_ok(self, make_dist_func): 132 | for t in _TEST_SAMPLES_AND_BUCKETS: 133 | d = make_dist_func() 134 | samples = t[u'samples'] 135 | for s in samples: 136 | distribution.add_sample(s, d) 137 | expect(d.bucketCounts).to(equal(t[u'want'])) 138 | _expect_stats_eq_direct_calc_from_samples(d, samples) 139 | 140 | def test_update_explict_buckets_ok(self): 141 | self.expect_adds_test_samples_ok(_make_explicit_dist) 142 | 143 | def test_update_exponential_buckets_ok(self): 144 | self.expect_adds_test_samples_ok(_make_exponential_dist) 145 | 146 | def test_update_linear_buckets_ok(self): 147 | self.expect_adds_test_samples_ok(_make_linear_dist) 148 | 149 | 150 | class TestMerge(unittest2.TestCase): 151 | 152 | def setUp(self): 153 | self.merge_triples = ( 154 | ( 155 | distribution.create_exponential(3, 2, 0.1), 156 | distribution.create_exponential(3, 2, 0.1), 157 | distribution.create_exponential(4, 2, 0.1), 158 | ),( 159 | distribution.create_linear(3, 0.2, 0.1), 160 | distribution.create_linear(3, 0.2, 0.1), 161 | distribution.create_linear(4, 0.2, 0.1) 162 | ),( 163 | distribution.create_explicit([0.1, 0.3]), 164 | distribution.create_explicit([0.1, 0.3]), 165 | distribution.create_explicit([0.1, 0.3, 0.5]), 166 | ) 167 | ) 168 | for d1, d2, _ in self.merge_triples: 169 | distribution.add_sample(_LOW_SAMPLE, d1) 170 | distribution.add_sample(_HIGH_SAMPLE, d2) 171 | 172 | def test_should_fail_on_dissimilar_bucket_options(self): 173 | explicit = _make_explicit_dist() 174 | linear = _make_linear_dist() 175 | exponential = _make_exponential_dist() 176 | pairs = ( 177 | (explicit, linear), 178 | (explicit, exponential), 179 | (linear, exponential) 180 | ) 181 | for p in pairs: 182 | testf = lambda: distribution.merge(*p) 183 | expect(testf).to(raise_error(ValueError)) 184 | 185 | def test_should_fail_on_dissimilar_bucket_counts(self): 186 | for _, d2, d3 in self.merge_triples: 187 | testf = lambda: distribution.merge(d2, d3) 188 | expect(testf).to(raise_error(ValueError)) 189 | 190 | def test_should_merge_stats_correctly(self): 191 | # TODO(add a check of the variance) 192 | for d1, d2, _ in self.merge_triples: 193 | distribution.merge(d1, d2) 194 | expect(d2.count).to(equal(2)) 195 | expect(d2.mean).to(equal((_HIGH_SAMPLE + _LOW_SAMPLE) / 2)) 196 | expect(d2.maximum).to(equal(_HIGH_SAMPLE)) 197 | expect(d2.minimum).to(equal(_LOW_SAMPLE)) 198 | 199 | def test_should_merge_bucket_counts_correctly(self): 200 | for d1, d2, _ in self.merge_triples: 201 | d1_start = list(d1.bucketCounts) 202 | d2_start = list(d2.bucketCounts) 203 | want = [x + y for (x,y) in zip(d1_start, d2_start)] 204 | distribution.merge(d1, d2) 205 | expect(d2.bucketCounts).to(equal(want)) 206 | -------------------------------------------------------------------------------- /test/test_label_descriptor.py: -------------------------------------------------------------------------------- 1 | # Copyright 2016 Google Inc. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | from __future__ import absolute_import 16 | 17 | import base64 18 | import datetime 19 | import unittest2 20 | from expects import be_none, be_true, expect, equal, raise_error 21 | 22 | from endpoints_management.control import (label_descriptor, sm_messages, 23 | report_request) 24 | 25 | _KNOWN = label_descriptor.KnownLabels 26 | ValueType = label_descriptor.ValueType 27 | 28 | class KnownLabelsBase(object): 29 | SUBJECT = None 30 | GIVEN_INFO = report_request.Info( 31 | api_method = u'dummy_method', 32 | api_version = u'dummy_version', 33 | location = u'dummy_location', 34 | referer = u'dummy_referer', 35 | consumer_project_number=1234) 36 | WANTED_LABEL_DICT = {} 37 | 38 | def _matching_descriptor(self, hide_default=False): 39 | res = sm_messages.LabelDescriptor( 40 | key=self.SUBJECT.label_name, 41 | valueType=self.SUBJECT.value_type) 42 | if res.valueType == ValueType.STRING and hide_default: 43 | res.valueType = None 44 | return res 45 | 46 | def _not_matched(self): 47 | d = self._matching_descriptor() 48 | d.valueType = ValueType.INT64 # no known labels have this type 49 | return d 50 | 51 | def test_should_be_supported(self): 52 | expect(_KNOWN.is_supported(self._matching_descriptor())).to(be_true) 53 | expect(_KNOWN.is_supported( 54 | self._matching_descriptor(hide_default=True))).to(be_true) 55 | expect(_KNOWN.is_supported(self._not_matched())).not_to(be_true) 56 | 57 | def test_should_be_matched_correctly(self): 58 | expect(self.SUBJECT.matches(self._matching_descriptor())).to(be_true) 59 | expect(self.SUBJECT.matches( 60 | self._matching_descriptor(hide_default=True))).to(be_true) 61 | expect(self.SUBJECT.matches(self._not_matched())).not_to(be_true) 62 | 63 | def test_should_update_request_info(self): 64 | given_dict = {} 65 | self.SUBJECT.do_labels_update(self.GIVEN_INFO, given_dict) 66 | expect(given_dict).to(equal(self.WANTED_LABEL_DICT)) 67 | 68 | 69 | class TestCredentialIdWithNoCreds(KnownLabelsBase, unittest2.TestCase): 70 | SUBJECT = _KNOWN.CREDENTIAL_ID 71 | 72 | 73 | class TestCredentialIdWithApiKey(KnownLabelsBase, unittest2.TestCase): 74 | SUBJECT = _KNOWN.CREDENTIAL_ID 75 | GIVEN_INFO = report_request.Info( 76 | api_key = u'dummy_api_key', 77 | ) 78 | WANTED_LABEL_DICT = {SUBJECT.label_name: b'apiKey:dummy_api_key'} 79 | 80 | 81 | class TestCredentialIdWithAuthIssuer(KnownLabelsBase, unittest2.TestCase): 82 | SUBJECT = _KNOWN.CREDENTIAL_ID 83 | GIVEN_INFO = report_request.Info( 84 | auth_issuer = u'dummy_issuer', 85 | auth_audience = u'dummy_audience') 86 | WANTED_VALUE = b'jwtAuth:issuer=' + base64.urlsafe_b64encode(b'dummy_issuer') 87 | WANTED_VALUE += b'&audience=' + base64.urlsafe_b64encode(b'dummy_audience') 88 | WANTED_LABEL_DICT = {SUBJECT.label_name: WANTED_VALUE} 89 | 90 | 91 | class EndUser(KnownLabelsBase, unittest2.TestCase): 92 | SUBJECT = _KNOWN.END_USER 93 | 94 | 95 | class EndUserCountry(KnownLabelsBase, unittest2.TestCase): 96 | SUBJECT = _KNOWN.END_USER_COUNTRY 97 | 98 | 99 | class ErrorType(KnownLabelsBase, unittest2.TestCase): 100 | SUBJECT = _KNOWN.ERROR_TYPE 101 | WANTED_LABEL_DICT = {SUBJECT.label_name: u'2xx'} 102 | 103 | 104 | class Protocol(KnownLabelsBase, unittest2.TestCase): 105 | SUBJECT = _KNOWN.PROTOCOL 106 | WANTED_LABEL_DICT = { 107 | SUBJECT.label_name: report_request.ReportedProtocols.UNKNOWN.name 108 | } 109 | 110 | 111 | class Referer(KnownLabelsBase, unittest2.TestCase): 112 | SUBJECT = _KNOWN.REFERER 113 | WANTED_LABEL_DICT = {SUBJECT.label_name: u'dummy_referer'} 114 | 115 | 116 | class ResponseCode(KnownLabelsBase, unittest2.TestCase): 117 | SUBJECT = _KNOWN.RESPONSE_CODE 118 | WANTED_LABEL_DICT = {SUBJECT.label_name: u'200'} 119 | 120 | 121 | class ResponseCodeClass(KnownLabelsBase, unittest2.TestCase): 122 | SUBJECT = _KNOWN.RESPONSE_CODE_CLASS 123 | WANTED_LABEL_DICT = {SUBJECT.label_name: u'2xx'} 124 | 125 | 126 | class StatusCodeWithOkStatus(KnownLabelsBase, unittest2.TestCase): 127 | SUBJECT = _KNOWN.STATUS_CODE 128 | WANTED_LABEL_DICT = {SUBJECT.label_name: u'0'} 129 | 130 | 131 | class StatusCodeWithKnown4XXStatus(KnownLabelsBase, unittest2.TestCase): 132 | SUBJECT = _KNOWN.STATUS_CODE 133 | GIVEN_INFO = report_request.Info( 134 | response_code = 401, 135 | ) 136 | WANTED_LABEL_DICT = {SUBJECT.label_name: u'16'} 137 | 138 | 139 | class StatusCodeWithUnknown4XXStatus(KnownLabelsBase, unittest2.TestCase): 140 | SUBJECT = _KNOWN.STATUS_CODE 141 | GIVEN_INFO = report_request.Info( 142 | response_code = 477, 143 | ) 144 | WANTED_LABEL_DICT = {SUBJECT.label_name: u'9'} 145 | 146 | 147 | class StatusCodeWithKnown5XXStatus(KnownLabelsBase, unittest2.TestCase): 148 | SUBJECT = _KNOWN.STATUS_CODE 149 | GIVEN_INFO = report_request.Info( 150 | response_code = 501, 151 | ) 152 | WANTED_LABEL_DICT = {SUBJECT.label_name: u'12'} 153 | 154 | 155 | class StatusCodeWithUnknown5XXStatus(KnownLabelsBase, unittest2.TestCase): 156 | SUBJECT = _KNOWN.STATUS_CODE 157 | GIVEN_INFO = report_request.Info( 158 | response_code = 577, 159 | ) 160 | WANTED_LABEL_DICT = {SUBJECT.label_name: u'13'} 161 | 162 | 163 | class StatusCodeWithUnknownStatus(KnownLabelsBase, unittest2.TestCase): 164 | SUBJECT = _KNOWN.STATUS_CODE 165 | GIVEN_INFO = report_request.Info( 166 | response_code = 777, 167 | ) 168 | WANTED_LABEL_DICT = {SUBJECT.label_name: u'2'} 169 | 170 | 171 | class GaeCloneId(KnownLabelsBase, unittest2.TestCase): 172 | SUBJECT = _KNOWN.GAE_CLONE_ID 173 | 174 | 175 | class GaeModuleId(KnownLabelsBase, unittest2.TestCase): 176 | SUBJECT = _KNOWN.GAE_MODULE_ID 177 | 178 | 179 | class GaeReplicaIndex(KnownLabelsBase, unittest2.TestCase): 180 | SUBJECT = _KNOWN.GAE_REPLICA_INDEX 181 | 182 | 183 | class GaeVersionId(KnownLabelsBase, unittest2.TestCase): 184 | SUBJECT = _KNOWN.GAE_VERSION_ID 185 | 186 | 187 | class GcpLocation(KnownLabelsBase, unittest2.TestCase): 188 | SUBJECT = _KNOWN.GCP_LOCATION 189 | WANTED_LABEL_DICT = {SUBJECT.label_name: u'dummy_location'} 190 | 191 | 192 | class GcpProject(KnownLabelsBase, unittest2.TestCase): 193 | SUBJECT = _KNOWN.GCP_PROJECT 194 | 195 | 196 | class GcpRegion(KnownLabelsBase, unittest2.TestCase): 197 | SUBJECT = _KNOWN.GCP_REGION 198 | 199 | 200 | class GcpResourceId(KnownLabelsBase, unittest2.TestCase): 201 | SUBJECT = _KNOWN.GCP_RESOURCE_ID 202 | 203 | 204 | class GcpResourceType(KnownLabelsBase, unittest2.TestCase): 205 | SUBJECT = _KNOWN.GCP_RESOURCE_TYPE 206 | 207 | 208 | class GcpService(KnownLabelsBase, unittest2.TestCase): 209 | SUBJECT = _KNOWN.GCP_SERVICE 210 | 211 | 212 | class GcpZone(KnownLabelsBase, unittest2.TestCase): 213 | SUBJECT = _KNOWN.GCP_ZONE 214 | 215 | 216 | class GcpUid(KnownLabelsBase, unittest2.TestCase): 217 | SUBJECT = _KNOWN.GCP_UID 218 | 219 | 220 | class GcpApiMethod(KnownLabelsBase, unittest2.TestCase): 221 | SUBJECT = _KNOWN.GCP_API_METHOD 222 | WANTED_LABEL_DICT = {SUBJECT.label_name: u'dummy_method'} 223 | 224 | 225 | class GcpApiVersion(KnownLabelsBase, unittest2.TestCase): 226 | SUBJECT = _KNOWN.GCP_API_VERSION 227 | WANTED_LABEL_DICT = {SUBJECT.label_name: u'dummy_version'} 228 | 229 | 230 | class SccAndroidCertFingerprint(KnownLabelsBase, unittest2.TestCase): 231 | SUBJECT = _KNOWN.SCC_ANDROID_CERT_FINGERPRINT 232 | 233 | 234 | class SccAndroidPackageName(KnownLabelsBase, unittest2.TestCase): 235 | SUBJECT = _KNOWN.SCC_ANDROID_PACKAGE_NAME 236 | 237 | 238 | class SccCallerIp(KnownLabelsBase, unittest2.TestCase): 239 | SUBJECT = _KNOWN.SCC_CALLER_IP 240 | 241 | 242 | class SccIosBundleId(KnownLabelsBase, unittest2.TestCase): 243 | SUBJECT = _KNOWN.SCC_IOS_BUNDLE_ID 244 | 245 | 246 | class SccPlatform(KnownLabelsBase, unittest2.TestCase): 247 | SUBJECT = _KNOWN.SCC_PLATFORM 248 | WANTED_LABEL_DICT = { 249 | SUBJECT.label_name: report_request.ReportedPlatforms.UNKNOWN.name 250 | } 251 | 252 | 253 | class SccReferer(KnownLabelsBase, unittest2.TestCase): 254 | SUBJECT = _KNOWN.SCC_REFERER 255 | 256 | 257 | class SccServiceAgent(KnownLabelsBase, unittest2.TestCase): 258 | SUBJECT = _KNOWN.SCC_SERVICE_AGENT 259 | WANTED_LABEL_DICT = {SUBJECT.label_name: label_descriptor.SERVICE_AGENT} 260 | 261 | 262 | class SccUserAgent(KnownLabelsBase, unittest2.TestCase): 263 | SUBJECT = _KNOWN.SCC_USER_AGENT 264 | WANTED_LABEL_DICT = {SUBJECT.label_name: label_descriptor.USER_AGENT} 265 | 266 | 267 | class SccConsumerProject(KnownLabelsBase, unittest2.TestCase): 268 | SUBJECT = _KNOWN.SCC_CONSUMER_PROJECT 269 | WANTED_LABEL_DICT = {SUBJECT.label_name: "1234"} 270 | -------------------------------------------------------------------------------- /test/test_metric_value.py: -------------------------------------------------------------------------------- 1 | # Copyright 2016 Google Inc. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | from __future__ import absolute_import 16 | 17 | import datetime 18 | import hashlib 19 | 20 | import unittest2 21 | from expects import equal, expect, raise_error 22 | 23 | from apitools.base.py import encoding 24 | from endpoints_management.control import (distribution, timestamp, 25 | metric_value, sc_messages) 26 | from endpoints_management.control import MetricKind 27 | 28 | 29 | class TestUpdateHash(unittest2.TestCase): 30 | NOTHING_ADDED = hashlib.md5().digest() 31 | 32 | def make_hash(self, mv): 33 | md5 = hashlib.md5() 34 | metric_value.update_hash(md5, mv) 35 | return md5.digest() 36 | 37 | def test_should_add_nothing_without_labels_or_currency(self): 38 | expect(self.make_hash(sc_messages.MetricValue())).to( 39 | equal(self.NOTHING_ADDED)) 40 | 41 | def test_should_add_matching_hashes_for_matching_labels(self): 42 | a_dict = {u'test': u'dict'} 43 | mv1 = metric_value.create(labels=a_dict) 44 | mv2 = metric_value.create(labels=a_dict) 45 | want = self.make_hash(mv1) 46 | got = self.make_hash(mv2) 47 | expect(got).to(equal(want)) 48 | 49 | def test_should_update_hash_for_when_currency_is_added(self): 50 | a_dict = {u'test': u'dict'} 51 | mv1 = metric_value.create(labels=a_dict) 52 | mv2 = metric_value.create(labels=a_dict) 53 | mv2.moneyValue = sc_messages.Money(currencyCode=u'JPY') 54 | want = self.make_hash(mv1) 55 | got = self.make_hash(mv2) 56 | expect(got).to_not(equal(want)) 57 | 58 | 59 | class TestSign(TestUpdateHash): 60 | 61 | def make_hash(self, mv): 62 | return metric_value.sign(mv) 63 | 64 | 65 | class TestMerge(unittest2.TestCase): 66 | A_FLOAT_VALUE = 1.0 67 | EARLY = timestamp.to_rfc3339(datetime.datetime(1970, 1, 1, 10, 0, 0)) 68 | LATER = timestamp.to_rfc3339(datetime.datetime(1990, 1, 1, 10, 0, 0)) 69 | TEST_LABELS = { 70 | u'key1': u'value1', 71 | u'key2': u'value2', 72 | } 73 | 74 | def setUp(self): 75 | self.test_value = metric_value.create( 76 | labels=self.TEST_LABELS, 77 | doubleValue=self.A_FLOAT_VALUE) 78 | self.early_ending = metric_value.create( 79 | labels=self.TEST_LABELS, 80 | doubleValue=self.A_FLOAT_VALUE, 81 | endTime=self.EARLY) 82 | self.late_ending = metric_value.create( 83 | labels=self.TEST_LABELS, 84 | doubleValue=self.A_FLOAT_VALUE, 85 | endTime=self.LATER) 86 | self.test_value_with_money = metric_value.create( 87 | labels=self.TEST_LABELS, 88 | moneyValue=sc_messages.Money( 89 | currencyCode=u'JPY', units=100, nanos=0)) 90 | 91 | def test_should_fail_for_metric_values_with_different_types(self): 92 | changed = metric_value.create(labels=self.TEST_LABELS, int64Value=1) 93 | for kind in (MetricKind.GAUGE, MetricKind.CUMULATIVE, MetricKind.DELTA): 94 | testf = lambda: metric_value.merge(kind, self.test_value, changed) 95 | expect(testf).to(raise_error(ValueError)) 96 | 97 | def test_should_fail_for_uninitialized_metric_values(self): 98 | no_init = metric_value.create() 99 | for kind in (MetricKind.GAUGE, MetricKind.CUMULATIVE, MetricKind.DELTA): 100 | testf = lambda: metric_value.merge(kind, no_init, no_init) 101 | expect(testf).to(raise_error(ValueError)) 102 | 103 | def test_should_fail_for_delta_metrics_with_unmergable_types(self): 104 | no_init = metric_value.create() 105 | unmergeables = [ 106 | metric_value.create(stringValue=u'a test string'), 107 | metric_value.create(boolValue=False), 108 | ] 109 | for mv in unmergeables: 110 | testf = lambda: metric_value.merge(MetricKind.DELTA, mv, mv) 111 | expect(testf).to(raise_error(ValueError)) 112 | 113 | def test_should_succeed_for_delta_metrics_with_the_money_type(self): 114 | v = self.test_value_with_money 115 | want = 2 * v.moneyValue.units 116 | got = metric_value.merge(MetricKind.DELTA, v, v) 117 | expect(got.moneyValue.units).to(equal(want)) 118 | 119 | def test_should_succeed_for_delta_metrics_with_the_double_type(self): 120 | v = self.test_value 121 | want = 2 * v.doubleValue 122 | got = metric_value.merge(MetricKind.DELTA, v, v) 123 | expect(got.doubleValue).to(equal(want)) 124 | 125 | def test_should_succeed_for_delta_metrics_with_the_int64_type(self): 126 | test_int = 4 127 | v = metric_value.create(labels=self.TEST_LABELS, int64Value=test_int) 128 | want = 2 * test_int 129 | got = metric_value.merge(MetricKind.DELTA, v, v) 130 | expect(got.int64Value).to(equal(want)) 131 | 132 | def test_should_succeed_for_delta_metrics_with_the_distribution_type(self): 133 | test_distribution = distribution.create_explicit([0.1, 0.3, 0.5]) 134 | distribution.add_sample(0.4, test_distribution) 135 | v = metric_value.create(labels=self.TEST_LABELS, 136 | distributionValue=test_distribution) 137 | want = 2 * test_distribution.count 138 | got = metric_value.merge(MetricKind.DELTA, v, v) 139 | expect(got.distributionValue.count).to(equal(want)) 140 | 141 | def test_should_return_metric_value_with_latest_end_time_for_non_deltas(self): 142 | for kind in (MetricKind.GAUGE, MetricKind.CUMULATIVE): 143 | got = metric_value.merge(kind, self.early_ending, self.late_ending) 144 | expect(got).to(equal(self.late_ending)) 145 | got = metric_value.merge(kind, self.late_ending, self.early_ending) 146 | expect(got).to(equal(self.late_ending)) 147 | 148 | def test_should_use_the_latest_end_time_delta_merges(self): 149 | got = metric_value.merge(MetricKind.DELTA, 150 | self.early_ending, 151 | self.late_ending) 152 | expect(got.endTime).to(equal(self.late_ending.endTime)) 153 | got = metric_value.merge(MetricKind.DELTA, 154 | self.late_ending, 155 | self.early_ending) 156 | expect(got.endTime).to(equal(self.late_ending.endTime)) 157 | 158 | def test_should_use_the_earliest_start_time_in_delta_merges(self): 159 | early_starting = metric_value.create( 160 | labels=self.TEST_LABELS, 161 | doubleValue=self.A_FLOAT_VALUE, 162 | startTime=self.EARLY) 163 | late_starting = metric_value.create( 164 | labels=self.TEST_LABELS, 165 | doubleValue=self.A_FLOAT_VALUE, 166 | startTime=self.LATER) 167 | got = metric_value.merge(MetricKind.DELTA, early_starting, 168 | late_starting) 169 | expect(got.startTime).to(equal(early_starting.startTime)) 170 | got = metric_value.merge(MetricKind.DELTA, late_starting, 171 | early_starting) 172 | expect(got.startTime).to(equal(early_starting.startTime)) 173 | -------------------------------------------------------------------------------- /test/test_money.py: -------------------------------------------------------------------------------- 1 | # Copyright 2016 Google Inc. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | from __future__ import absolute_import 16 | 17 | import sys 18 | import unittest2 19 | from expects import expect, equal, raise_error 20 | 21 | from endpoints_management.control import money, sc_messages 22 | 23 | 24 | class TestCheckValid(unittest2.TestCase): 25 | _BAD_CURRENCY = sc_messages.Money(currencyCode=u'this-is-bad') 26 | _MISMATCHED_UNITS = ( 27 | sc_messages.Money(currencyCode=u'JPY', units=-1, nanos=1), 28 | sc_messages.Money(currencyCode=u'JPY', units=1, nanos=-1), 29 | ) 30 | _NANOS_OOB = sc_messages.Money( 31 | currencyCode=u'EUR', units=0, nanos=9999999999) 32 | _OK = ( 33 | sc_messages.Money(currencyCode=u'JPY', units=1, nanos=1), 34 | sc_messages.Money(currencyCode=u'JPY', units=-1, nanos=-1), 35 | sc_messages.Money(currencyCode=u'EUR', units=0, nanos=money.MAX_NANOS), 36 | ) 37 | 38 | def test_should_fail_if_not_really_money(self): 39 | expect(lambda: money.check_valid(object())).to(raise_error(ValueError)) 40 | expect(lambda: money.check_valid(None)).to(raise_error(ValueError)) 41 | 42 | def test_should_fail_when_no_currency_is_set(self): 43 | expect(lambda: money.check_valid(sc_messages.Money())).to( 44 | raise_error(ValueError)) 45 | 46 | def test_should_fail_when_the_currency_is_bad(self): 47 | expect(lambda: money.check_valid(self._BAD_CURRENCY)).to( 48 | raise_error(ValueError)) 49 | 50 | def test_should_fail_when_the_units_and_nanos_are_mismatched(self): 51 | for m in self._MISMATCHED_UNITS: 52 | expect(lambda: money.check_valid(m)).to(raise_error(ValueError)) 53 | 54 | def test_should_fail_when_nanos_are_oob(self): 55 | expect(lambda: money.check_valid(self._NANOS_OOB)).to( 56 | raise_error(ValueError)) 57 | 58 | def test_should_succeed_for_ok_instances(self): 59 | for m in self._OK: 60 | money.check_valid(m) 61 | 62 | 63 | class TestAdd(unittest2.TestCase): 64 | _SOME_YEN = sc_messages.Money(currencyCode=u'JPY', units=3, nanos=0) 65 | _SOME_YEN_DEBT = sc_messages.Money(currencyCode=u'JPY', units=-2, nanos=-1) 66 | _SOME_MORE_YEN = sc_messages.Money(currencyCode=u'JPY', units=1, nanos=3) 67 | _SOME_USD = sc_messages.Money(currencyCode=u'USD', units=1, nanos=0) 68 | _INT64_MAX = sys.maxint 69 | _INT64_MIN = -sys.maxint - 1 70 | _LARGE_YEN = sc_messages.Money( 71 | currencyCode=u'JPY', units=_INT64_MAX -1, nanos=0) 72 | _LARGE_YEN_DEBT = sc_messages.Money( 73 | currencyCode=u'JPY', units=-_INT64_MAX + 1, nanos=0) 74 | 75 | def test_should_fail_if_non_money_is_used(self): 76 | testfs = [ 77 | lambda: money.add(self._SOME_YEN, object()), 78 | lambda: money.add(object(), self._SOME_USD), 79 | lambda: money.add(None, self._SOME_USD), 80 | lambda: money.add(self._SOME_YEN, None), 81 | ] 82 | for testf in testfs: 83 | expect(testf).to(raise_error(ValueError)) 84 | 85 | def test_should_fail_on_currency_mismatch(self): 86 | testf = lambda: money.add(self._SOME_YEN, self._SOME_USD) 87 | expect(testf).to(raise_error(ValueError)) 88 | 89 | def test_should_fail_on_unallowed_positive_overflows(self): 90 | testf = lambda: money.add(self._SOME_YEN, self._LARGE_YEN) 91 | expect(testf).to(raise_error(OverflowError)) 92 | 93 | def test_should_allow_positive_overflows(self): 94 | overflowing = money.add(self._SOME_YEN, self._LARGE_YEN, 95 | allow_overflow=True) 96 | expect(overflowing.units).to(equal(self._INT64_MAX)) 97 | expect(overflowing.nanos).to(equal(money.MAX_NANOS)) 98 | 99 | def test_should_fail_on_unallowed_negative_overflows(self): 100 | testf = lambda: money.add(self._SOME_YEN_DEBT, self._LARGE_YEN_DEBT) 101 | expect(testf).to(raise_error(OverflowError)) 102 | 103 | def test_should_allow_negative_overflows(self): 104 | overflowing = money.add(self._SOME_YEN_DEBT, self._LARGE_YEN_DEBT, 105 | allow_overflow=True) 106 | expect(overflowing.units).to(equal(self._INT64_MIN)) 107 | expect(overflowing.nanos).to(equal(-money.MAX_NANOS)) 108 | 109 | def test_should_add_ok_when_nanos_have_same_sign(self): 110 | the_sum = money.add(self._SOME_YEN, self._SOME_YEN) 111 | expect(the_sum.units).to(equal(2 * self._SOME_YEN.units)) 112 | 113 | def test_should_add_ok_when_nanos_have_different_signs(self): 114 | the_sum = money.add(self._SOME_YEN, self._SOME_YEN_DEBT) 115 | want_units = self._SOME_YEN_DEBT.units + self._SOME_YEN.units - 1 116 | expect(the_sum.units).to(equal(want_units)) 117 | expect(the_sum.nanos).to(equal(money.MAX_NANOS)) 118 | the_sum = money.add(self._SOME_MORE_YEN, self._SOME_YEN_DEBT) 119 | want_units = self._SOME_YEN_DEBT.units + self._SOME_YEN.units - 1 120 | expect(the_sum.units).to(equal(want_units)) 121 | expect(the_sum.nanos).to(equal(1 - money.MAX_NANOS)) 122 | -------------------------------------------------------------------------------- /test/test_service_config.py: -------------------------------------------------------------------------------- 1 | # Copyright 2016 Google Inc. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | import copy 16 | import httmock 17 | import json 18 | import mock 19 | import os 20 | import sys 21 | import unittest 22 | 23 | from apitools.base.py import encoding 24 | from endpoints_management.config import service_config 25 | from endpoints_management.control import sm_messages 26 | from oauth2client import client 27 | 28 | class ServiceConfigFetchTest(unittest.TestCase): 29 | 30 | _ACCESS_TOKEN = u"test_access_token" 31 | 32 | _SERVICE_NAME = u"test_service_name" 33 | _SERVICE_VERSION = u"test_service_version" 34 | _SERVICE_CONFIG_JSON = { 35 | u"name": _SERVICE_NAME, 36 | u"id": _SERVICE_VERSION 37 | } 38 | _SERVICE_CONFIG_LIST_JSON = { 39 | u"serviceConfigs": [{ 40 | u"name": _SERVICE_NAME, 41 | u"title": _SERVICE_NAME, 42 | u"documentation": {}, 43 | u"usage": {}, 44 | u"id": _SERVICE_VERSION 45 | }] 46 | } 47 | 48 | _credentials = mock.MagicMock() 49 | _get_http_client = mock.MagicMock() 50 | 51 | def setUp(self): 52 | os.environ[u"ENDPOINTS_SERVICE_NAME"] = ServiceConfigFetchTest._SERVICE_NAME 53 | os.environ[u"ENDPOINTS_SERVICE_VERSION"] = ServiceConfigFetchTest._SERVICE_VERSION 54 | 55 | self._set_up_default_credential() 56 | 57 | def test_no_service_name(self): 58 | del os.environ[u"ENDPOINTS_SERVICE_NAME"] 59 | message = u'The "ENDPOINTS_SERVICE_NAME" environment variable is not set' 60 | with self.assertRaisesRegexp(ValueError, message): 61 | service_config.fetch_service_config() 62 | 63 | @mock.patch(u"endpoints_management.config.service_config.client.GoogleCredentials", 64 | _credentials) 65 | @mock.patch(u"endpoints_management.config.service_config._get_http_client", _get_http_client) 66 | def test_no_service_version(self): 67 | del os.environ[u"ENDPOINTS_SERVICE_VERSION"] 68 | 69 | list_mock_response = mock.MagicMock() 70 | list_mock_response.status = 200 71 | list_mock_response.data = json.dumps(ServiceConfigFetchTest._SERVICE_CONFIG_LIST_JSON) 72 | 73 | config_mock_response = mock.MagicMock() 74 | config_mock_response.status = 200 75 | config_mock_response.data = json.dumps(ServiceConfigFetchTest._SERVICE_CONFIG_JSON) 76 | 77 | mock_http_client = mock.MagicMock() 78 | mock_http_client.request.side_effect = [list_mock_response, config_mock_response] 79 | ServiceConfigFetchTest._get_http_client.return_value = mock_http_client 80 | 81 | service = encoding.JsonToMessage(sm_messages.Service, 82 | json.dumps(self._SERVICE_CONFIG_JSON)) 83 | self.assertEqual(service, service_config.fetch_service_config()) 84 | 85 | self.assertEqual(2, mock_http_client.request.call_count) 86 | 87 | headers={u"Authorization": u"Bearer " + ServiceConfigFetchTest._ACCESS_TOKEN} 88 | 89 | template = service_config._SERVICE_MGMT_URL_TEMPLATE 90 | url1 = template.format(ServiceConfigFetchTest._SERVICE_NAME, '').rstrip('/') 91 | url2 = template.format(ServiceConfigFetchTest._SERVICE_NAME, 92 | ServiceConfigFetchTest._SERVICE_VERSION) 93 | 94 | call1 = mock.call(u"GET", url1, headers=headers) 95 | call2 = mock.call(u"GET", url2, headers=headers) 96 | 97 | mock_http_client.request.assert_has_calls([call1, call2]) 98 | 99 | @mock.patch(u"endpoints_management.config.service_config.client.GoogleCredentials", 100 | _credentials) 101 | @mock.patch(u"endpoints_management.config.service_config._get_http_client", _get_http_client) 102 | def test_fetch_service_config(self): 103 | mock_response = mock.MagicMock() 104 | mock_response.status = 200 105 | mock_response.data = json.dumps(ServiceConfigFetchTest._SERVICE_CONFIG_JSON) 106 | mock_http_client = mock.MagicMock() 107 | mock_http_client.request.return_value = mock_response 108 | ServiceConfigFetchTest._get_http_client.return_value = mock_http_client 109 | 110 | service = encoding.JsonToMessage(sm_messages.Service, 111 | json.dumps(self._SERVICE_CONFIG_JSON)) 112 | self.assertEqual(service, service_config.fetch_service_config()) 113 | 114 | template = service_config._SERVICE_MGMT_URL_TEMPLATE 115 | url = template.format(ServiceConfigFetchTest._SERVICE_NAME, 116 | ServiceConfigFetchTest._SERVICE_VERSION) 117 | headers={u"Authorization": u"Bearer " + ServiceConfigFetchTest._ACCESS_TOKEN} 118 | mock_http_client.request.assert_called_once_with(u"GET", url, 119 | headers=headers) 120 | 121 | @mock.patch(u"endpoints_management.config.service_config.client.GoogleCredentials", 122 | _credentials) 123 | @mock.patch(u"endpoints_management.config.service_config._get_http_client", _get_http_client) 124 | def test_fetch_service_config_failed_forbidden(self): 125 | mock_response = mock.MagicMock() 126 | mock_response.status = 403 127 | mock_http_client = mock.MagicMock() 128 | mock_http_client.request.return_value = mock_response 129 | ServiceConfigFetchTest._get_http_client.return_value = mock_http_client 130 | expected_message = (u"No service 'test_service_name' found or permission denied. " 131 | u"If this is a new Endpoints service, make sure you've deployed " 132 | u"the service config using gcloud.") 133 | with self.assertRaisesRegexp(service_config.ServiceConfigException, expected_message): 134 | service_config.fetch_service_config() 135 | 136 | @mock.patch(u"endpoints_management.config.service_config.client.GoogleCredentials", 137 | _credentials) 138 | @mock.patch(u"endpoints_management.config.service_config._get_http_client", _get_http_client) 139 | def test_fetch_service_config_failed_not_found(self): 140 | mock_response = mock.MagicMock() 141 | mock_response.status = 404 142 | mock_http_client = mock.MagicMock() 143 | mock_http_client.request.return_value = mock_response 144 | ServiceConfigFetchTest._get_http_client.return_value = mock_http_client 145 | expected_message = (u"The service 'test_service_name' was found, but no service config " 146 | u"was found for version 'test_service_version'.") 147 | with self.assertRaisesRegexp(service_config.ServiceConfigException, expected_message): 148 | service_config.fetch_service_config() 149 | 150 | @mock.patch(u"endpoints_management.config.service_config.client.GoogleCredentials", 151 | _credentials) 152 | @mock.patch(u"endpoints_management.config.service_config._get_http_client", _get_http_client) 153 | def test_fetch_service_config_with_wrong_service_name(self): 154 | mock_response = mock.MagicMock() 155 | mock_response.status = 200 156 | config = copy.deepcopy(ServiceConfigFetchTest._SERVICE_CONFIG_JSON) 157 | config[u"name"] = u"incorrect_service_name" 158 | mock_response.data = json.dumps(config) 159 | mock_http_client = mock.MagicMock() 160 | mock_http_client.request.return_value = mock_response 161 | ServiceConfigFetchTest._get_http_client.return_value = mock_http_client 162 | 163 | message = (u"Unexpected service name in service config: " + 164 | config[u"name"]) 165 | with self.assertRaisesRegexp(ValueError, message): 166 | service_config.fetch_service_config() 167 | 168 | @mock.patch(u"endpoints_management.config.service_config.client.GoogleCredentials", 169 | _credentials) 170 | @mock.patch(u"endpoints_management.config.service_config._get_http_client", _get_http_client) 171 | def test_fetch_service_config_with_wrong_service_version(self): 172 | mock_response = mock.MagicMock() 173 | mock_response.status = 200 174 | config = copy.deepcopy(ServiceConfigFetchTest._SERVICE_CONFIG_JSON) 175 | config[u"id"] = u"incorrect_service_version" 176 | mock_response.data = json.dumps(config) 177 | mock_http_client = mock.MagicMock() 178 | mock_http_client.request.return_value = mock_response 179 | ServiceConfigFetchTest._get_http_client.return_value = mock_http_client 180 | 181 | message = (u"Unexpected service version in service config: " + 182 | config[u"id"]) 183 | with self.assertRaisesRegexp(ValueError, message): 184 | service_config.fetch_service_config() 185 | 186 | def _set_up_default_credential(self): 187 | default_credential = mock.MagicMock() 188 | ServiceConfigFetchTest._credentials.get_application_default.return_value \ 189 | = default_credential 190 | default_credential.create_scoped.return_value = default_credential 191 | token = ServiceConfigFetchTest._ACCESS_TOKEN 192 | access_token = client.AccessTokenInfo(access_token=token, expires_in=None) 193 | default_credential.get_access_token.return_value = access_token 194 | -------------------------------------------------------------------------------- /test/test_signing.py: -------------------------------------------------------------------------------- 1 | # Copyright 2016 Google Inc. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | from __future__ import absolute_import 16 | 17 | import hashlib 18 | 19 | import unittest2 20 | from expects import equal, expect 21 | 22 | from endpoints_management.control import signing 23 | 24 | 25 | class TestAddDictToHash(unittest2.TestCase): 26 | NOTHING_ADDED = hashlib.md5().digest() 27 | 28 | def test_should_add_nothing_when_dict_is_none(self): 29 | md5 = hashlib.md5() 30 | signing.add_dict_to_hash(md5, None) 31 | expect(md5.digest()).to(equal(self.NOTHING_ADDED)) 32 | 33 | def test_should_add_matching_hashes_for_matching_dicts(self): 34 | a_dict = {u'test': u'dict'} 35 | same_dict = dict(a_dict) 36 | want_hash = hashlib.md5() 37 | signing.add_dict_to_hash(want_hash, a_dict) 38 | want = want_hash.digest() 39 | got_hash = hashlib.md5() 40 | signing.add_dict_to_hash(got_hash, same_dict) 41 | got = got_hash.digest() 42 | expect(got).to(equal(want)) 43 | -------------------------------------------------------------------------------- /test/test_suppliers.py: -------------------------------------------------------------------------------- 1 | # Copyright 2016 Google Inc. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | import json 16 | import unittest 17 | import httmock 18 | import mock 19 | 20 | from Crypto import PublicKey 21 | from jwkest import jwk 22 | 23 | from endpoints_management.auth import suppliers 24 | 25 | 26 | class KeyUriSupplierTest(unittest.TestCase): 27 | 28 | def test_supply_issuer(self): 29 | issuer = u"https://issuer.com" 30 | jwks_uri = u"https://issuer.com/jwks/uri" 31 | configs = {issuer: suppliers.IssuerUriConfig(False, jwks_uri)} 32 | supplier = suppliers.KeyUriSupplier(configs) 33 | self.assertEquals(jwks_uri, supplier.supply(issuer)) 34 | self.assertIsNone(supplier.supply(u"random-issuer")) 35 | 36 | def test_openid_discovery(self): 37 | jwks_uri = u"https://issuer.com/jwks/uri" 38 | @httmock.urlmatch(scheme=u"https", netloc=u"issuer.com", 39 | path=u"/" + suppliers._OPEN_ID_CONFIG_PATH) 40 | def _mock_response(url, request): # pylint: disable=unused-argument 41 | response = {u"jwks_uri": jwks_uri} 42 | return json.dumps(response) 43 | 44 | issuer = u"https://issuer.com" 45 | configs = {issuer: suppliers.IssuerUriConfig(True, None)} 46 | supplier = suppliers.KeyUriSupplier(configs) 47 | with httmock.HTTMock(_mock_response): 48 | self.assertEquals(jwks_uri, supplier.supply(issuer)) 49 | 50 | def test_issuer_without_protocol(self): 51 | jwks_uri = u"https://issuer.com/jwks/uri" 52 | @httmock.urlmatch(scheme=u"https", netloc=u"issuer.com", 53 | path=u"/" + suppliers._OPEN_ID_CONFIG_PATH) 54 | def _mock_response(url, request): # pylint: disable=unused-argument 55 | response = {u"jwks_uri": jwks_uri} 56 | return json.dumps(response) 57 | 58 | # Specify an issuer without protocol to make sure the "https://" prefix is 59 | # added automatically. 60 | issuer = u"issuer.com" 61 | configs = {issuer: suppliers.IssuerUriConfig(True, None)} 62 | supplier = suppliers.KeyUriSupplier(configs) 63 | with httmock.HTTMock(_mock_response): 64 | self.assertEquals(jwks_uri, supplier.supply(issuer)) 65 | 66 | def test_openid_discovery_with_bad_json(self): 67 | @httmock.urlmatch(scheme=u"https", netloc=u"issuer.com") 68 | def _mock_response_with_bad_json(url, request): # pylint: disable=unused-argument 69 | return u"bad-json" 70 | 71 | issuer = u"https://issuer.com" 72 | configs = {issuer: suppliers.IssuerUriConfig(True, None)} 73 | supplier = suppliers.KeyUriSupplier(configs) 74 | with httmock.HTTMock(_mock_response_with_bad_json): 75 | with self.assertRaises(suppliers.UnauthenticatedException): 76 | supplier.supply(issuer) 77 | 78 | 79 | class JwksSupplierTest(unittest.TestCase): 80 | _mock_timer = mock.MagicMock() 81 | 82 | def setUp(self): 83 | self._key_uri_supplier = mock.MagicMock() 84 | self._jwks_uri_supplier = suppliers.JwksSupplier(self._key_uri_supplier) 85 | 86 | def test_supply_with_unknown_issuer(self): 87 | self._key_uri_supplier.supply.return_value = None 88 | issuer = u"unknown-issuer" 89 | expected_message = u"Cannot find the `jwks_uri` for issuer " + issuer 90 | with self.assertRaisesRegexp(suppliers.UnauthenticatedException, 91 | expected_message): 92 | self._jwks_uri_supplier.supply(issuer) 93 | 94 | def test_supply_with_invalid_json_response(self): 95 | scheme = u"https" 96 | issuer = u"issuer.com" 97 | self._key_uri_supplier.supply.return_value = scheme + u"://" + issuer 98 | 99 | @httmock.urlmatch(scheme=scheme, netloc=issuer) 100 | def _mock_response_with_invalid_json(url, response): # pylint: disable=unused-argument 101 | return u"invalid-json" 102 | 103 | with httmock.HTTMock(_mock_response_with_invalid_json): 104 | with self.assertRaises(suppliers.UnauthenticatedException): 105 | self._jwks_uri_supplier.supply(issuer) 106 | 107 | def test_supply_jwks(self): 108 | rsa_key = PublicKey.RSA.generate(2048) 109 | jwks = jwk.KEYS() 110 | jwks.wrap_add(rsa_key) 111 | 112 | scheme = u"https" 113 | issuer = u"issuer.com" 114 | self._key_uri_supplier.supply.return_value = scheme + u"://" + issuer 115 | 116 | @httmock.urlmatch(scheme=scheme, netloc=issuer) 117 | def _mock_response_with_jwks(url, response): # pylint: disable=unused-argument 118 | return jwks.dump_jwks() 119 | 120 | with httmock.HTTMock(_mock_response_with_jwks): 121 | actual_jwks = self._jwks_uri_supplier.supply(issuer) 122 | self.assertEquals(1, len(actual_jwks)) 123 | actual_key = actual_jwks[0].key 124 | self.assertEquals(rsa_key.n, actual_key.n) 125 | self.assertEquals(rsa_key.e, actual_key.e) 126 | 127 | def test_supply_jwks_with_x509_certificate(self): 128 | rsa_key = PublicKey.RSA.generate(2048) 129 | cert = rsa_key.publickey().exportKey(u"PEM") 130 | kid = u"rsa-cert" 131 | 132 | scheme = u"https" 133 | issuer = u"issuer.com" 134 | self._key_uri_supplier.supply.return_value = scheme + u"://" + issuer 135 | 136 | @httmock.urlmatch(scheme=scheme, netloc=issuer) 137 | def _mock_response_with_x509_certificates(url, response): # pylint: disable=unused-argument 138 | return json.dumps({kid: cert.decode('ascii')}) 139 | 140 | with httmock.HTTMock(_mock_response_with_x509_certificates): 141 | actual_jwks = self._jwks_uri_supplier.supply(issuer) 142 | self.assertEquals(1, len(actual_jwks)) 143 | actual_key = actual_jwks[0].key 144 | 145 | self.assertEquals(kid, actual_jwks[0].kid) 146 | self.assertEquals(rsa_key.n, actual_key.n) 147 | self.assertEquals(rsa_key.e, actual_key.e) 148 | 149 | def test_supply_empty_x509_certificate(self): 150 | scheme = u"https" 151 | issuer = u"issuer.com" 152 | self._key_uri_supplier.supply.return_value = scheme + u"://" + issuer 153 | 154 | @httmock.urlmatch(scheme=scheme, netloc=issuer) 155 | def _mock_invalid_response(url, response): # pylint: disable=unused-argument 156 | return json.dumps({u"kid": u"invlid-certificate"}) 157 | 158 | with httmock.HTTMock(_mock_invalid_response): 159 | with self.assertRaises(suppliers.UnauthenticatedException): 160 | self._jwks_uri_supplier.supply(issuer) 161 | 162 | @mock.patch(u"time.time", _mock_timer) 163 | def test_supply_cached_jwks(self): 164 | JwksSupplierTest._mock_timer.return_value = 10 165 | rsa_key = PublicKey.RSA.generate(2048) 166 | jwks = jwk.KEYS() 167 | jwks.wrap_add(rsa_key) 168 | 169 | scheme = u"https" 170 | issuer = u"issuer.com" 171 | self._key_uri_supplier.supply.return_value = scheme + u"://" + issuer 172 | 173 | @httmock.urlmatch(scheme=scheme, netloc=issuer) 174 | def _mock_response_with_jwks(url, response): # pylint: disable=unused-argument 175 | return jwks.dump_jwks() 176 | 177 | with httmock.HTTMock(_mock_response_with_jwks): 178 | self.assertEqual(1, len(self._jwks_uri_supplier.supply(issuer))) 179 | 180 | # Add an additional key to the JWKS to be returned by the HTTP request. 181 | jwks.wrap_add(PublicKey.RSA.generate(2048)) 182 | 183 | # Forward the clock by 1 second. The JWKS should remain cached. 184 | JwksSupplierTest._mock_timer.return_value += 1 185 | self._jwks_uri_supplier.supply(issuer) 186 | self.assertEqual(1, len(self._jwks_uri_supplier.supply(issuer))) 187 | 188 | # Forward the clock by 5 minutes. The cache entry should have expired so 189 | # the returned JWKS should be the updated one with two keys. 190 | JwksSupplierTest._mock_timer.return_value += 5 * 60 191 | self._jwks_uri_supplier.supply(issuer) 192 | self.assertEqual(2, len(self._jwks_uri_supplier.supply(issuer))) 193 | -------------------------------------------------------------------------------- /test/test_timestamp.py: -------------------------------------------------------------------------------- 1 | # Copyright 2016 Google Inc. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | from __future__ import absolute_import 16 | 17 | import datetime 18 | 19 | import unittest2 20 | from expects import be_below_or_equal, expect, equal, raise_error 21 | 22 | from endpoints_management.control import timestamp 23 | 24 | 25 | class TestToRfc3339(unittest2.TestCase): 26 | A_LONG_TIME_AGO = datetime.datetime(1971, 12, 31, 21, 0, 20, 21000) 27 | TESTS = [ 28 | (A_LONG_TIME_AGO, u'1971-12-31T21:00:20.021Z'), 29 | (A_LONG_TIME_AGO - datetime.datetime(1970, 1, 1), 30 | u'1971-12-31T21:00:20.021Z') 31 | ] 32 | 33 | def test_should_converts_correctly(self): 34 | for t in self.TESTS: 35 | expect(timestamp.to_rfc3339(t[0])).to(equal(t[1])) 36 | 37 | def test_should_fail_on_invalid_input(self): 38 | testf = lambda: timestamp.to_rfc3339(u'this will not work') 39 | expect(testf).to(raise_error(ValueError)) 40 | 41 | 42 | class TestFromRfc3339(unittest2.TestCase): 43 | TOLERANCE = 10000 # 1e-5 * 1e9 44 | TESTS = [ 45 | # Simple 46 | (u'1971-12-31T21:00:20.021Z', 47 | datetime.datetime(1971, 12, 31, 21, 0, 20, 21000)), 48 | # different timezone 49 | (u'1996-12-19T16:39:57-08:00', 50 | datetime.datetime(1996, 12, 20, 0, 39, 57, 0)), 51 | # microseconds 52 | (u'1996-12-19T16:39:57.123456-08:00', 53 | datetime.datetime(1996, 12, 20, 0, 39, 57, 123456)), 54 | # Beyond 2038 55 | (u'2100-01-01T00:00:00Z', 56 | datetime.datetime(2100, 1, 1, 0, 0, 0, 0)) 57 | ] 58 | 59 | NANO_TESTS = [ 60 | # Simple 61 | (u'1971-12-31T21:00:20.021Z', 62 | (datetime.datetime(1971, 12, 31, 21, 0, 20, 21000), 21000000)), 63 | # different timezone 64 | (u'1996-12-19T16:39:57-08:00', 65 | (datetime.datetime(1996, 12, 20, 0, 39, 57, 0), 0)), 66 | # microseconds 67 | (u'1996-12-19T16:39:57.123456789-08:00', 68 | (datetime.datetime(1996, 12, 20, 0, 39, 57, 123457), 123456789)), 69 | ] 70 | 71 | def test_should_convert_correctly_without_nanos(self): 72 | for t in self.TESTS: 73 | expect(timestamp.from_rfc3339(t[0])).to(equal(t[1])) 74 | 75 | def test_should_convert_correctly_with_nanos(self): 76 | for t in self.NANO_TESTS: 77 | dt, nanos = timestamp.from_rfc3339(t[0], with_nanos=True) 78 | expect(dt).to(equal(t[1][0])) 79 | epsilon = abs(nanos - t[1][1]) 80 | # expect(epsilon).to(equal(0)) 81 | expect(epsilon).to(be_below_or_equal(self.TOLERANCE)) 82 | 83 | 84 | class TestCompare(unittest2.TestCase): 85 | TESTS = [ 86 | # Strings 87 | (u'1971-10-31T21:00:20.021Z', u'1971-11-30T21:00:20.021Z', -1), 88 | (u'1971-11-30T21:00:20.021Z', u'1971-10-30T21:00:20.021Z', 1), 89 | (u'1971-11-30T21:00:20Z', u'1971-11-30T21:00:20Z', 0), 90 | (u'1971-11-30T21:00:20.021Z', u'1971-11-30T21:00:20.041Z', -1), 91 | (u'1971-11-30T21:00:20.021Z', u'1971-11-30T21:00:20.001Z', 1), 92 | # Datetimes 93 | (datetime.datetime(1996, 10, 20, 0, 39, 57, 0), 94 | datetime.datetime(1996, 11, 20, 0, 39, 57, 0), 95 | -1), 96 | (datetime.datetime(1996, 10, 20, 0, 39, 57, 0), 97 | datetime.datetime(1996, 10, 20, 0, 39, 57, 0), 98 | 0), 99 | (datetime.datetime(1996, 11, 20, 0, 39, 57, 0), 100 | datetime.datetime(1996, 10, 20, 0, 39, 57, 0), 101 | 1) 102 | ] 103 | 104 | def test_should_compare_correctly(self): 105 | for t in self.TESTS: 106 | a, b, want = t 107 | expect(timestamp.compare(a, b)).to(equal(want)) 108 | 109 | def test_should_fail_if_inputs_do_not_have_the_same_type(self): 110 | testf = lambda: timestamp.compare(self.TESTS[0][0], 111 | datetime.datetime.utcnow()) 112 | expect(testf).to(raise_error(ValueError)) 113 | testf = lambda: timestamp.compare(self.TESTS[0], 114 | datetime.datetime.utcnow()) 115 | expect(testf).to(raise_error(ValueError)) 116 | -------------------------------------------------------------------------------- /test/test_wsgi_errors.py: -------------------------------------------------------------------------------- 1 | # Copyright 2018 Google Inc. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | """Test WSGI responses when check/quota errors occur.""" 16 | 17 | import mock 18 | import pytest 19 | import webtest 20 | from webtest.debugapp import debug_app as DEBUG_APP 21 | 22 | from apitools.base.py import encoding 23 | from endpoints_management.control import ( 24 | client, quota_request, sc_messages, sm_messages, wsgi, 25 | ) 26 | from .test_wsgi import _SYSTEM_PARAMETER_CONFIG_TEST 27 | 28 | 29 | @pytest.fixture(scope='module') 30 | def project_id(): 31 | return 'middleware-errors' 32 | 33 | 34 | @pytest.fixture() 35 | def control_client(): 36 | return mock.MagicMock(spec=client.Client) 37 | 38 | 39 | @pytest.fixture() 40 | def service_config_loader(): 41 | service = encoding.JsonToMessage(sm_messages.Service, _SYSTEM_PARAMETER_CONFIG_TEST) 42 | loader = mock.Mock() 43 | loader.load.return_value = service 44 | return loader 45 | 46 | 47 | @pytest.fixture() 48 | def wrapped_app(project_id, control_client, service_config_loader): 49 | return wsgi.add_all(DEBUG_APP, project_id, control_client, loader=service_config_loader) 50 | 51 | 52 | @pytest.fixture() 53 | def test_app(wrapped_app): 54 | return webtest.TestApp(wrapped_app, lint=False) 55 | 56 | 57 | def test_handle_missing_api_key(control_client, test_app): 58 | url = '/uvw/method_needs_api_key/more_stuff' 59 | check_resp = sc_messages.CheckResponse( 60 | operationId=u'fake_operation_id') 61 | control_client.check.return_value = check_resp 62 | resp = test_app.get(url, expect_errors=True) 63 | assert resp.status_code == 401 64 | assert resp.content_type == 'application/json' 65 | assert wsgi.Middleware._NO_API_KEY_MSG in resp.json['message'] 66 | 67 | 68 | def test_handle_out_of_quota(control_client, test_app): 69 | quota_resp = sc_messages.AllocateQuotaResponse( 70 | allocateErrors = [ 71 | sc_messages.QuotaError( 72 | code=quota_request._QuotaErrors.RESOURCE_EXHAUSTED, 73 | description=u'details') 74 | ] 75 | ) 76 | check_resp = sc_messages.CheckResponse( 77 | operationId=u'fake_operation_id') 78 | control_client.check.return_value = check_resp 79 | control_client.allocate_quota.return_value = quota_resp 80 | url = '/uvw/method2/with_no_param' 81 | resp = test_app.get(url, expect_errors=True) 82 | expected_status, expected_detail = quota_request._QUOTA_ERROR_CONVERSION[ 83 | quota_request._QuotaErrors.RESOURCE_EXHAUSTED] 84 | assert resp.status_code == expected_status 85 | assert resp.content_type == 'application/json' 86 | assert expected_detail in resp.json['message'] 87 | -------------------------------------------------------------------------------- /test/token_utils.py: -------------------------------------------------------------------------------- 1 | # Copyright 2016 Google Inc. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | """Provides a utility method that generates auth token.""" 16 | 17 | import json 18 | 19 | from jwkest import jws 20 | 21 | 22 | def generate_auth_token(payload, keys, alg=u"ES256", kid=None): 23 | json_web_signature = jws.JWS(json.dumps(payload), alg=alg, kid=kid) 24 | return json_web_signature.sign_compact(keys=keys) 25 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | # Copyright 2016 Google Inc. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | [tox] 16 | envlist = py27,pep8,pylint-errors,pylint-full 17 | skip_missing_interpreters=True 18 | 19 | [tox:travis] 20 | 2.7 = py27,pylint-errors 21 | 22 | [testenv] 23 | setenv = 24 | PYTHONPATH = {toxinidir} 25 | 26 | deps= -r{toxinidir}/test-requirements.txt 27 | -r{toxinidir}/requirements.txt 28 | commands= py.test --timeout=30 --cov-report html --cov-report=term --cov {toxinidir}/endpoints_management/ 29 | 30 | [testenv:pep8] 31 | deps= -r{toxinidir}/test-requirements.txt 32 | -r{toxinidir}/requirements.txt 33 | flake8 34 | commands = flake8 --max-complexity=12 endpoints_management --ignore=E501,F401 \ 35 | --exclude=endpoints_management/gen 36 | 37 | [testenv:pylint-errors] 38 | deps= -r{toxinidir}/test-requirements.txt 39 | -r{toxinidir}/requirements.txt 40 | pylint 41 | commands = pylint -f colorized -E --ignore=gen endpoints_management 42 | 43 | [testenv:pylint-warnings] 44 | deps= -r{toxinidir}/test-requirements.txt 45 | -r{toxinidir}/requirements.txt 46 | pylint 47 | commands = pylint -f colorized -d all -e W -r n --ignore=gen endpoints_management 48 | 49 | [testenv:pylint-full] 50 | deps= -r{toxinidir}/test-requirements.txt 51 | -r{toxinidir}/requirements.txt 52 | pylint 53 | commands = pylint -f colorized --disable=all -e E,W,R --ignore=py3 --ignore=gen \ 54 | endpoints_management 55 | 56 | [testenv:devenv] 57 | commands = 58 | envdir = {toxworkdir}/develop 59 | basepython = python2.7 60 | usedevelop = True 61 | deps= -r{toxinidir}/test-requirements.txt 62 | -r{toxinidir}/requirements.txt 63 | --------------------------------------------------------------------------------