├── .gitignore ├── .gitreview ├── .stestr.conf ├── .zuul.d ├── cross-jobs.yaml ├── jobs.yaml ├── project-template.yaml └── project.yaml ├── MANIFEST.in ├── README.rst ├── babel-test ├── babel-input.py ├── expected-log-error.pot └── expected.pot ├── backports.txt ├── bindep.txt ├── denylist.txt ├── detail.py ├── doc ├── requirements.txt └── source │ ├── conf.py │ ├── contributor │ └── contributing.rst │ └── index.rst ├── global-requirements.txt ├── openstack_requirements ├── __init__.py ├── check.py ├── cmds │ ├── __init__.py │ ├── build_lower_constraints.py │ ├── check_conflicts.py │ ├── check_exists.py │ ├── check_py2.py │ ├── edit_constraint.py │ ├── generate.py │ ├── normalize_requirements.py │ ├── validate.py │ └── validate_projects.py ├── constraints.py ├── project.py ├── project_config.py ├── requirement.py ├── tests │ ├── __init__.py │ ├── common.py │ ├── files │ │ ├── denylist.txt │ │ ├── gr-base.txt │ │ ├── old-setup.py │ │ ├── pbr_setup.cfg │ │ ├── project-with-bad-requirement.txt │ │ ├── project-with-oslo-tar.txt │ │ ├── project.txt │ │ ├── setup.cfg │ │ ├── setup.py │ │ ├── test-project.txt │ │ └── upper-constraints.txt │ ├── test_build_lower_constraints.py │ ├── test_check.py │ ├── test_check_constraints.py │ ├── test_constraints.py │ ├── test_edit_constraint.py │ ├── test_generate.py │ ├── test_project.py │ └── test_requirement.py └── utils.py ├── playbooks ├── drop-wheel-mirror.yaml ├── files │ └── project-requirements-change.py ├── nodejs-pre.yaml └── requirements-check.yaml ├── projects.txt ├── requirements.txt ├── roles └── check-requirements │ ├── README.rst │ ├── defaults │ └── main.yaml │ └── tasks │ └── main.yaml ├── setup.cfg ├── setup.py ├── test-requirements.txt ├── tools ├── README.txt ├── babel-test.sh ├── build_wheels.sh ├── cap.py ├── check-install.py ├── code-search.sh ├── cruft.sh ├── fix-lower-constraints.py ├── functions ├── get-health-report.sh ├── grep-all.sh ├── lint.py ├── list-unused-packages.sh ├── noop-change.sh ├── publish_constraints.sh └── what-broke.py ├── tox.ini ├── upper-constraints-xfails.txt └── upper-constraints.txt /.gitignore: -------------------------------------------------------------------------------- 1 | *.egg-info 2 | *.pyc 3 | .*.swp 4 | *~ 5 | .stestr 6 | .tox 7 | .testrepository 8 | .venv 9 | AUTHORS 10 | ChangeLog 11 | dist 12 | .eggs 13 | *.egg 14 | *.pot 15 | doc/build 16 | build 17 | -------------------------------------------------------------------------------- /.gitreview: -------------------------------------------------------------------------------- 1 | [gerrit] 2 | host=review.opendev.org 3 | port=29418 4 | project=openstack/requirements.git 5 | -------------------------------------------------------------------------------- /.stestr.conf: -------------------------------------------------------------------------------- 1 | [DEFAULT] 2 | test_path=./openstack_requirements/tests 3 | top_dir=./ 4 | -------------------------------------------------------------------------------- /.zuul.d/cross-jobs.yaml: -------------------------------------------------------------------------------- 1 | - job: 2 | name: requirements-cross-test 3 | parent: openstack-tox 4 | timeout: 2400 5 | description: | 6 | A parent job to perform cross-repository tests. 7 | 8 | Inherit from this job, and add the intended project to 9 | ``required-projects``. 10 | 11 | Also, set the following variable: 12 | 13 | .. zuul:jobvar:: tox_envlist 14 | 15 | Use the specified tox environments (``ALL`` selects all). 16 | nodeset: ubuntu-noble 17 | vars: 18 | zuul_work_dir: "{{ (zuul.projects.values() | selectattr('required') | selectattr('name', 'match', '^(?!openstack/requirements)') | list)[0].src_dir }}" 19 | tox_constraints_file: "{{ ansible_user_dir }}/{{ zuul.projects['opendev.org/openstack/requirements'].src_dir }}/upper-constraints.txt" 20 | files: 21 | - upper-constraints.txt 22 | - .zuul.d/cross-jobs.yaml 23 | 24 | - job: 25 | name: cross-aodh-py312 26 | parent: requirements-cross-test 27 | description: Run cross-project tests on aodh with py312. 28 | required-projects: openstack/aodh 29 | vars: 30 | tox_envlist: py312 31 | 32 | - job: 33 | name: cross-barbican-py312 34 | parent: requirements-cross-test 35 | description: Run cross-project tests on barbican with py312. 36 | required-projects: openstack/barbican 37 | vars: 38 | tox_envlist: py312 39 | 40 | - job: 41 | name: cross-ceilometer-py312 42 | parent: requirements-cross-test 43 | description: Run cross-project tests on ceilometer with py312. 44 | required-projects: openstack/ceilometer 45 | vars: 46 | tox_envlist: py312 47 | 48 | - job: 49 | name: cross-cinder-py312 50 | parent: requirements-cross-test 51 | description: Run cross-project tests on cinder with py312. 52 | required-projects: openstack/cinder 53 | vars: 54 | tox_envlist: py312 55 | 56 | - job: 57 | name: cross-designate-py312 58 | parent: requirements-cross-test 59 | description: Run cross-project tests on designate with py312. 60 | required-projects: openstack/designate 61 | vars: 62 | tox_envlist: py312 63 | 64 | - job: 65 | name: cross-glance-py312 66 | parent: requirements-cross-test 67 | description: Run cross-project tests on glance with py312. 68 | required-projects: openstack/glance 69 | vars: 70 | tox_envlist: py312 71 | 72 | - job: 73 | name: cross-heat-py312 74 | parent: requirements-cross-test 75 | description: Run cross-project tests on heat with py312. 76 | required-projects: openstack/heat 77 | vars: 78 | tox_envlist: py312 79 | 80 | - job: 81 | name: cross-horizon-py312 82 | parent: requirements-cross-test 83 | description: Run cross-project tests on horizon with py312. 84 | required-projects: openstack/horizon 85 | vars: 86 | tox_envlist: py312 87 | 88 | - job: 89 | name: cross-horizon-npm 90 | parent: horizon-nodejs20-run-test 91 | timeout: 2400 92 | description: Run cross-project tests on horizon with npm. 93 | required-projects: openstack/horizon 94 | vars: 95 | tox_constraints_file: "{{ ansible_user_dir }}/{{ zuul.projects['opendev.org/openstack/requirements'].src_dir }}/upper-constraints.txt" 96 | tox_envlist: npm 97 | zuul_work_dir: "{{ (zuul.projects.values() | selectattr('required') | selectattr('name', 'match', '^(?!openstack/requirements)') | list)[0].src_dir }}" 98 | pre-run: playbooks/nodejs-pre.yaml 99 | files: 100 | - upper-constraints.txt 101 | - .zuul.d/cross-jobs.yaml 102 | 103 | - job: 104 | name: cross-keystone-py312 105 | parent: requirements-cross-test 106 | description: Run cross-project tests on keystone with py312. 107 | required-projects: openstack/keystone 108 | timeout: 3600 109 | vars: 110 | tox_envlist: py312 111 | 112 | - job: 113 | name: cross-kuryr-py312 114 | parent: requirements-cross-test 115 | description: Run cross-project tests on kuryr-kubernetes with py312. 116 | required-projects: openstack/kuryr-kubernetes 117 | vars: 118 | tox_envlist: py312 119 | 120 | - job: 121 | name: cross-ironic-py312 122 | parent: requirements-cross-test 123 | description: Run cross-project tests on ironic with py312. 124 | required-projects: openstack/ironic 125 | vars: 126 | tox_envlist: py312 127 | 128 | - job: 129 | name: cross-magnum-py312 130 | parent: requirements-cross-test 131 | description: Run cross-project tests on magnum with py312. 132 | required-projects: openstack/magnum 133 | vars: 134 | tox_envlist: py312 135 | 136 | - job: 137 | name: cross-manila-py312 138 | parent: requirements-cross-test 139 | description: Run cross-project tests on manila with py312. 140 | required-projects: openstack/manila 141 | vars: 142 | tox_envlist: py312 143 | 144 | - job: 145 | name: cross-masakari-py312 146 | parent: requirements-cross-test 147 | description: Run cross-project tests on masakari with py312. 148 | required-projects: openstack/masakari 149 | vars: 150 | tox_envlist: py312 151 | 152 | - job: 153 | name: cross-mistral-py312 154 | parent: requirements-cross-test 155 | description: Run cross-project tests on mistral with py312. 156 | required-projects: openstack/mistral 157 | vars: 158 | tox_envlist: py312 159 | 160 | - job: 161 | name: cross-neutron-py312 162 | parent: requirements-cross-test 163 | description: Run cross-project tests on neutron with py312. 164 | required-projects: openstack/neutron 165 | timeout: 3600 166 | vars: 167 | tox_envlist: py312 168 | 169 | - job: 170 | name: cross-nova-functional 171 | parent: requirements-cross-test 172 | description: Run cross-project functional tests on nova. 173 | required-projects: openstack/nova 174 | vars: 175 | tox_envlist: functional 176 | 177 | - job: 178 | name: cross-placement-functional-py312 179 | parent: requirements-cross-test 180 | description: Run cross-project functional tests on placement with py312. 181 | required-projects: openstack/placement 182 | vars: 183 | tox_envlist: functional-py312 184 | 185 | - job: 186 | name: cross-nova-pep8 187 | parent: requirements-cross-test 188 | description: Run cross-project tests on nova with pep8. 189 | required-projects: openstack/nova 190 | vars: 191 | tox_envlist: pep8 192 | 193 | - job: 194 | name: cross-nova-py312 195 | parent: requirements-cross-test 196 | description: Run cross-project tests on nova with py312. 197 | required-projects: openstack/nova 198 | vars: 199 | tox_envlist: py312 200 | 201 | - job: 202 | name: cross-placement-py312 203 | parent: requirements-cross-test 204 | description: Run cross-project tests on placement with py312. 205 | required-projects: openstack/placement 206 | vars: 207 | tox_envlist: py312 208 | 209 | - job: 210 | name: cross-osvif-py3 211 | parent: requirements-cross-test 212 | description: Run cross-project tests on os-vif with py3. 213 | required-projects: openstack/os-vif 214 | vars: 215 | tox_envlist: py3 216 | 217 | - job: 218 | name: cross-octavia-py312 219 | parent: requirements-cross-test 220 | description: Run cross-project tests on octavia with py312. 221 | required-projects: openstack/octavia 222 | vars: 223 | tox_envlist: py312 224 | 225 | - job: 226 | name: cross-swift-py312 227 | parent: requirements-cross-test 228 | description: Run cross-project tests on swift with py312. 229 | required-projects: openstack/swift 230 | vars: 231 | tox_envlist: py312 232 | 233 | - job: 234 | name: cross-osc-tox-docs 235 | parent: openstack-tox-docs 236 | description: | 237 | Run cross-project tests on python-openstackclient with 238 | openstack-tox-docs. 239 | vars: 240 | zuul_work_dir: src/opendev.org/openstack/python-openstackclient 241 | required-projects: 242 | - openstack/python-openstackclient 243 | files: 244 | - upper-constraints.txt 245 | - .zuul.d/cross-jobs.yaml 246 | 247 | - job: 248 | name: cross-watcher-py3 249 | parent: requirements-cross-test 250 | description: Run cross-project tests on watcher with py3 251 | required-projects: openstack/watcher 252 | vars: 253 | tox_envlist: py3 254 | -------------------------------------------------------------------------------- /.zuul.d/jobs.yaml: -------------------------------------------------------------------------------- 1 | - job: 2 | name: requirements-tox-babel 3 | parent: openstack-tox 4 | description: | 5 | Run test for requirements project. 6 | 7 | Uses tox with the ``babel`` environment. 8 | files: 9 | - ^babel-test/.* 10 | - ^tox.ini 11 | - ^upper-constraints.txt 12 | vars: 13 | tox_envlist: babel 14 | 15 | - job: 16 | name: requirements-tox-py39-check-uc 17 | parent: openstack-tox-py39 18 | description: | 19 | Run test for requirements project. 20 | 21 | Uses tox with the ``py39-check-uc`` environment. 22 | files: ^upper-constraints.*txt$ 23 | vars: 24 | tox_envlist: py39-check-uc 25 | 26 | - job: 27 | name: requirements-tox-py310-check-uc 28 | parent: openstack-tox-py310 29 | description: | 30 | Run test for requirements project. 31 | 32 | Uses tox with the ``py310-check-uc`` environment. 33 | files: ^upper-constraints.*txt$ 34 | vars: 35 | tox_envlist: py310-check-uc 36 | - job: 37 | name: requirements-tox-py311-check-uc 38 | parent: openstack-tox-py311 39 | description: | 40 | Run test for requirements project. 41 | 42 | Uses tox with the ``py311-check-uc`` environment. 43 | files: ^upper-constraints.*txt$ 44 | vars: 45 | tox_envlist: py311-check-uc 46 | - job: 47 | name: requirements-tox-py312-check-uc 48 | parent: openstack-tox-py312 49 | description: | 50 | Run test for requirements project. 51 | 52 | Uses tox with the ``py312-check-uc`` environment. 53 | files: ^upper-constraints.*txt$ 54 | vars: 55 | tox_envlist: py312-check-uc 56 | - job: 57 | name: requirements-tox-py313-check-uc 58 | parent: openstack-tox-py313 59 | description: | 60 | Run test for requirements project. 61 | 62 | Uses tox with the ``py313-check-uc`` environment. 63 | files: ^upper-constraints.*txt$ 64 | vars: 65 | tox_envlist: py313-check-uc 66 | 67 | - job: 68 | name: requirements-tox-py39-check-uc-no-wheels 69 | parent: requirements-tox-py39-check-uc 70 | description: | 71 | Run test for requirements project. 72 | 73 | Uses tox with the ``py39-check-uc`` environment. 74 | Does not use wheel mirror. 75 | pre-run: playbooks/drop-wheel-mirror.yaml 76 | 77 | - job: 78 | name: requirements-tox-py310-check-uc-no-wheels 79 | parent: requirements-tox-py310-check-uc 80 | description: | 81 | Run test for requirements project. 82 | 83 | Uses tox with the ``py310-check-uc`` environment. 84 | Does not use wheel mirror. 85 | pre-run: playbooks/drop-wheel-mirror.yaml 86 | 87 | - job: 88 | name: requirements-tox-py311-check-uc-no-wheels 89 | parent: requirements-tox-py311-check-uc 90 | description: | 91 | Run test for requirements project. 92 | 93 | Uses tox with the ``py311-check-uc`` environment. 94 | Does not use wheel mirror. 95 | pre-run: playbooks/drop-wheel-mirror.yaml 96 | 97 | - job: 98 | name: requirements-tox-py312-check-uc-no-wheels 99 | parent: requirements-tox-py312-check-uc 100 | description: | 101 | Run test for requirements project. 102 | 103 | Uses tox with the ``py312-check-uc`` environment. 104 | Does not use wheel mirror. 105 | pre-run: playbooks/drop-wheel-mirror.yaml 106 | 107 | - job: 108 | name: requirements-tox-validate-projects 109 | parent: openstack-tox 110 | files: 111 | - ^projects.txt 112 | description: | 113 | Run test for requirements project. 114 | 115 | Uses tox with the ``validate-projects`` environment. 116 | vars: 117 | tox_envlist: validate-projects 118 | 119 | - job: 120 | name: requirements-tox-bindep 121 | parent: openstack-tox 122 | description: | 123 | Run test for requirements project. 124 | 125 | Uses tox with the ``bindep`` environment. 126 | files: ^bindep.txt$ 127 | vars: 128 | tox_envlist: bindep 129 | -------------------------------------------------------------------------------- /.zuul.d/project-template.yaml: -------------------------------------------------------------------------------- 1 | - project-template: 2 | name: check-requirements 3 | check: 4 | jobs: 5 | - requirements-check 6 | gate: 7 | jobs: 8 | - requirements-check 9 | 10 | - job: 11 | name: requirements-check 12 | parent: unittests 13 | description: | 14 | Check that requirements in the target repo match OpenStack 15 | global requirements. 16 | 17 | **Job Variables** 18 | 19 | .. zuul:jobvar:: zuul_work_dir 20 | :default: {{ zuul.project.src_dir }} 21 | 22 | Directory holding the project to check. 23 | 24 | .. zuul:jobvar:: zuul_branch 25 | :default: {{ zuul.branch }} 26 | 27 | Branch to check. 28 | # NOTE(gmann): Update the nodeset if we move testing to the new version 29 | # of ubuntu. We are explicitly setting the nodeset here because the base 30 | # job (unittests) moving to the ubuntu latest version can break this 31 | # job on stable branch (as this job on stable branch will start running on 32 | # ubuntu latest version). This nodeset setting will avoid such breaking 33 | # on stable branch and make sure it continue to run on the supported ubuntu 34 | # version on stable branches. 35 | nodeset: ubuntu-noble 36 | run: playbooks/requirements-check.yaml 37 | required-projects: 38 | - openstack/requirements 39 | files: 40 | - ^tools/.*-requires$ 41 | - ^.*requirements.txt$ 42 | - ^.*requirements-py[2,3].txt$ 43 | - ^doc/requirements.txt$ 44 | - ^lower-constraints.txt$ 45 | 46 | - job: 47 | name: requirements-check-self 48 | description: | 49 | Run the requirements-check job on another repo in order to 50 | self-test changes to its job configuration. 51 | parent: requirements-check 52 | # This could be any project, nova is chosen arbitrarily. 53 | required-projects: openstack/nova 54 | vars: 55 | zuul_work_dir: "{{ zuul.projects['opendev.org/openstack/nova'].src_dir }}" 56 | files: 57 | - ^playbooks/requirements-check.yaml$ 58 | - ^roles/check-requirements/ 59 | -------------------------------------------------------------------------------- /.zuul.d/project.yaml: -------------------------------------------------------------------------------- 1 | - project: 2 | templates: 3 | - openstack-python3-jobs 4 | check: 5 | jobs: 6 | - build-wheel-cache-ubuntu-bionic: 7 | files: 8 | - bindep.txt 9 | - build-wheel-cache-ubuntu-focal: 10 | files: 11 | - bindep.txt 12 | - build-wheel-cache-ubuntu-jammy: 13 | files: 14 | - bindep.txt 15 | - openstack-tox-validate 16 | - requirements-check-self 17 | - requirements-tox-babel 18 | - requirements-tox-bindep: 19 | voting: false 20 | - requirements-tox-py39-check-uc 21 | - requirements-tox-py310-check-uc 22 | - requirements-tox-py311-check-uc 23 | - requirements-tox-py312-check-uc 24 | - requirements-tox-py313-check-uc: 25 | voting: false 26 | - requirements-tox-validate-projects: 27 | voting: false 28 | - cross-aodh-py312 29 | - cross-barbican-py312 30 | - cross-ceilometer-py312 31 | - cross-cinder-py312 32 | - cross-designate-py312 33 | - cross-glance-py312 34 | - cross-heat-py312 35 | - cross-horizon-py312 36 | - cross-horizon-npm 37 | - cross-ironic-py312 38 | - cross-keystone-py312 39 | - cross-kuryr-py312 40 | - cross-manila-py312 41 | - cross-magnum-py312 42 | - cross-masakari-py312: 43 | voting: false 44 | - cross-mistral-py312: 45 | voting: false 46 | - cross-neutron-py312 47 | - cross-nova-pep8 48 | - cross-nova-py312 49 | - cross-placement-py312 50 | - cross-osvif-py3 51 | - cross-nova-functional 52 | - cross-placement-functional-py312 53 | - cross-octavia-py312 54 | - cross-osc-tox-docs 55 | - cross-swift-py312 56 | - cross-watcher-py3 57 | - openstacksdk-functional-devstack 58 | - tempest-full-py3: 59 | irrelevant-files: 60 | - ^bindep.txt$ 61 | - ^(test-|)requirements.txt$ 62 | - ^setup.cfg$ 63 | check-arm64: 64 | jobs: 65 | - build-wheel-cache-ubuntu-bionic-arm64: 66 | files: 67 | - bindep.txt 68 | - build-wheel-cache-ubuntu-focal-arm64: 69 | files: 70 | - bindep.txt 71 | - build-wheel-cache-ubuntu-jammy-arm64: 72 | files: 73 | - bindep.txt 74 | gate: 75 | jobs: 76 | - build-wheel-cache-ubuntu-bionic: 77 | files: 78 | - bindep.txt 79 | - build-wheel-cache-ubuntu-focal: 80 | files: 81 | - bindep.txt 82 | - build-wheel-cache-ubuntu-jammy: 83 | files: 84 | - bindep.txt 85 | - openstack-tox-validate 86 | - requirements-check-self 87 | - requirements-tox-babel 88 | - requirements-tox-py39-check-uc 89 | - requirements-tox-py310-check-uc 90 | - requirements-tox-py311-check-uc 91 | - requirements-tox-py312-check-uc 92 | - cross-cinder-py312 93 | - cross-designate-py312 94 | - cross-glance-py312 95 | - cross-heat-py312 96 | - cross-horizon-py312 97 | - cross-horizon-npm 98 | - cross-ironic-py312 99 | - cross-keystone-py312 100 | - cross-kuryr-py312 101 | - cross-manila-py312 102 | - cross-neutron-py312 103 | - cross-nova-pep8 104 | - cross-nova-py312: 105 | files: ^upper-constraints.txt$ 106 | - cross-placement-py312: 107 | files: ^upper-constraints.txt$ 108 | - cross-osvif-py3 109 | - cross-nova-functional 110 | - cross-placement-functional-py312 111 | - cross-octavia-py312 112 | - cross-osc-tox-docs 113 | - cross-swift-py312 114 | - cross-watcher-py3 115 | - openstacksdk-functional-devstack 116 | - tempest-full-py3: 117 | irrelevant-files: 118 | - ^bindep.txt$ 119 | - ^(test-|)requirements.txt$ 120 | - ^setup.cfg$ 121 | experimental: 122 | jobs: 123 | - nova-ceph-multistore 124 | - nova-next 125 | - requirements-tox-py39-check-uc-no-wheels 126 | - requirements-tox-py310-check-uc-no-wheels 127 | - requirements-tox-py311-check-uc-no-wheels 128 | - requirements-tox-py312-check-uc-no-wheels 129 | - tempest-integrated-storage 130 | - tempest-pg-full: 131 | irrelevant-files: 132 | - ^(test-|)requirements.txt$ 133 | - ^setup.cfg$ 134 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include AUTHORS 2 | include ChangeLog 3 | exclude .gitignore 4 | exclude .gitreview 5 | 6 | global-exclude *.pyc 7 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | =========================================================== 2 | Global Requirements and Constraints for OpenStack Projects 3 | =========================================================== 4 | 5 | .. image:: https://governance.openstack.org/tc/badges/requirements.svg 6 | :target: https://governance.openstack.org/tc/reference/tags/index.html 7 | 8 | Resources and Documentation 9 | =========================== 10 | 11 | Please refer to the dependency management documentation linked below for up to 12 | date documentation on how to use and interact with the requirements project. 13 | 14 | - Documentation: https://docs.openstack.org/project-team-guide/dependency-management.html 15 | - Wiki: https://wiki.openstack.org/wiki/Requirements 16 | - Bugs: https://launchpad.net/openstack-requirements 17 | - Source: https://opendev.org/openstack/requirements 18 | -------------------------------------------------------------------------------- /babel-test/babel-input.py: -------------------------------------------------------------------------------- 1 | # Licensed under the Apache License, Version 2.0 (the "License"); you may 2 | # not use this file except in compliance with the License. You may obtain 3 | # a copy of the License at 4 | # 5 | # http://www.apache.org/licenses/LICENSE-2.0 6 | # 7 | # Unless required by applicable law or agreed to in writing, software 8 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 9 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 10 | # License for the specific language governing permissions and limitations 11 | # under the License. 12 | 13 | 14 | """Test input for Babel""" 15 | 16 | 17 | from oslo.i18n import _ 18 | from oslo.i18n import _LE 19 | from oslo_log import log as logging 20 | 21 | LOG = logging.getLogger(__name__) 22 | 23 | 24 | def just_testing(): 25 | """Just some random commands for Babel to extract strings from""" 26 | 27 | LOG.exception(_LE("LE translated string1")) 28 | LOG.exception(_LE("LE translated string2")) 29 | print(_("Normal translated string1")) 30 | # Translators: Comment for string2 31 | print(_("Normal translated string2")) 32 | -------------------------------------------------------------------------------- /babel-test/expected-log-error.pot: -------------------------------------------------------------------------------- 1 | # Translations template for requirements. 2 | # Copyright (C) 2016 ORGANIZATION 3 | # This file is distributed under the same license as the requirements 4 | # project. 5 | # FIRST AUTHOR , 2016. 6 | # 7 | #, fuzzy 8 | msgid "" 9 | msgstr "" 10 | "Project-Id-Version: requirements 1\n" 11 | "Report-Msgid-Bugs-To: https://bugs.launchpad.net/openstack-i18n/\n" 12 | "POT-Creation-Date: 2016-04-24 09:28+0200\n" 13 | "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" 14 | "Last-Translator: FULL NAME \n" 15 | "Language-Team: LANGUAGE \n" 16 | "MIME-Version: 1.0\n" 17 | "Content-Type: text/plain; charset=utf-8\n" 18 | "Content-Transfer-Encoding: 8bit\n" 19 | "Generated-By: Babel 1.3\n" 20 | 21 | #: babel-test/babel-input.py:24 22 | msgid "LE translated string1" 23 | msgstr "" 24 | 25 | #: babel-test/babel-input.py:25 26 | msgid "LE translated string2" 27 | msgstr "" 28 | 29 | -------------------------------------------------------------------------------- /babel-test/expected.pot: -------------------------------------------------------------------------------- 1 | # Translations template for requirements. 2 | # Copyright (C) 2016 ORGANIZATION 3 | # This file is distributed under the same license as the requirements 4 | # project. 5 | # FIRST AUTHOR , 2016. 6 | # 7 | #, fuzzy 8 | msgid "" 9 | msgstr "" 10 | "Project-Id-Version: requirements 1\n" 11 | "Report-Msgid-Bugs-To: https://bugs.launchpad.net/openstack-i18n/\n" 12 | "POT-Creation-Date: 2016-04-24 09:28+0200\n" 13 | "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" 14 | "Last-Translator: FULL NAME \n" 15 | "Language-Team: LANGUAGE \n" 16 | "MIME-Version: 1.0\n" 17 | "Content-Type: text/plain; charset=utf-8\n" 18 | "Content-Transfer-Encoding: 8bit\n" 19 | "Generated-By: Babel 1.3\n" 20 | 21 | #: babel-test/babel-input.py:26 22 | msgid "Normal translated string1" 23 | msgstr "" 24 | 25 | #. Translators: Comment for string2 26 | #: babel-test/babel-input.py:28 27 | msgid "Normal translated string2" 28 | msgstr "" 29 | 30 | -------------------------------------------------------------------------------- /backports.txt: -------------------------------------------------------------------------------- 1 | importlib-metadata 2 | -------------------------------------------------------------------------------- /bindep.txt: -------------------------------------------------------------------------------- 1 | # These are needed to build all the things in global-requirements, which we do 2 | # for integration testing, generate-constraints and wheel building. 3 | 4 | app-crypt/mit-krb5 [platform:gentoo] 5 | krb5-devel [platform:redhat] 6 | libffi-dev [platform:dpkg] 7 | libffi-devel [platform:redhat] 8 | libjpeg-dev [platform:dpkg] 9 | libjpeg-devel [platform:rpm] 10 | libkrb5-dev [platform:dpkg] 11 | libldap2-dev [platform:dpkg] 12 | libmysqlclient-dev [platform:ubuntu-xenial] 13 | libmariadb-dev [platform:dpkg !platform:ubuntu-xenial] 14 | libnss3-dev [platform:dpkg] 15 | libpq-dev [platform:dpkg] 16 | librdkafka-dev [platform:dpkg] # we need 1.4.0+ which is only in Debian:bullseye so far 17 | libsasl2-dev [platform:dpkg] 18 | libsqlite3-dev [platform:dpkg] 19 | libuuid-devel [platform:rpm] 20 | libvirt-dev [platform:dpkg] 21 | libvirt-devel [platform:redhat] 22 | libxml2-dev [platform:dpkg] 23 | libxml2-devel [platform:rpm] 24 | libxslt-devel [platform:rpm] 25 | libxslt1-dev [platform:dpkg] 26 | libzmq3-dev [platform:dpkg] 27 | mariadb-devel [platform:redhat] 28 | mozilla-nss-devel [platform:suse] 29 | nss-devel [platform:redhat] 30 | libssl-dev [platform:dpkg] 31 | openssl-devel [platform:redhat] 32 | openldap-devel [platform:redhat] 33 | pkg-config [platform:dpkg] 34 | pkgconfig [platform:redhat] 35 | postgresql-devel [platform:redhat] 36 | pypy [platform:dpkg !platform:debian-bookworm !platform:ubuntu-noble] 37 | pypy-dev [platform:dpkg !platform:debian-bookworm !platform:ubuntu-noble] 38 | pypy3 [platform:debian-bookworm platform:ubuntu-noble] 39 | pypy3-dev [platform:debian-bookworm platform:ubuntu-noble] 40 | # Note that python3-all-dev includes python3-all, added 41 | # both here for documentary purpose. 42 | python3-all-dev [platform:dpkg] 43 | python3-all [platform:dpkg] 44 | python-all-dev [platform:dpkg !platform:debian-bookworm !platform:ubuntu-noble] 45 | python3-devel [platform:rpm] 46 | # Ubuntu packages venv separately, otherwise standard 47 | python3-venv [platform:dpkg] 48 | swig 49 | systemd-devel [platform:redhat] 50 | uuid-dev [platform:dpkg] 51 | 52 | # Python things that change rarely and we're willing to risk breakage vs latest 53 | python-numpy [python] 54 | python-yaml [python] 55 | 56 | # NOTE(dirk) needed for PyECLib 57 | liberasurecode-dev [platform:dpkg] 58 | liberasurecode-devel [platform:rpm] # RDO repo 59 | dev-libs/liberasurecode [platform:gentoo] 60 | libsystemd-dev [platform:dpkg !platform:ubuntu-trusty] 61 | libsystemd-journal-dev [platform:ubuntu-trusty] 62 | 63 | libpcre3-dev [platform:dpkg] 64 | pcre-devel [platform:rpm] 65 | 66 | # NOTE(pabelanger): Needed for build-wheel-mirror jobs 67 | gawk [test] 68 | unzip [test] 69 | # NOTE(pabelanger): You must first enable epel to install parallel RPM on 70 | # centos-7. 71 | parallel [test] 72 | 73 | # NOTE(hrw): Needed for numpy/scipy 74 | gfortran [platform:dpkg] 75 | liblapack-dev [platform:dpkg] 76 | libopenblas-dev [platform:dpkg] 77 | gcc-c++ [platform:rpm] 78 | gcc-gfortran [platform:rpm] 79 | lapack-devel [platform:rpm] 80 | openblas-devel [platform:rpm !platform:centos-7] 81 | blas-devel [platform:centos-7] 82 | -------------------------------------------------------------------------------- /denylist.txt: -------------------------------------------------------------------------------- 1 | # linters - each project may have a different version with loose convergence 2 | # over time. 3 | astroid 4 | ansible-lint 5 | bandit 6 | bashate 7 | flake8 8 | flake8-docstrings 9 | flake8-import-order 10 | flake8-logging-format 11 | hacking 12 | isort 13 | mccabe 14 | mypy 15 | pep257 16 | pep8 17 | pre-commit 18 | pyflakes 19 | pycodestyle 20 | pylint 21 | yamllint 22 | 23 | # The following Neutron libraries need to be denylisted 24 | # as they do not use cycle-with-intermediary. The only time they 25 | # would be updated is at release time. 26 | networking-bagpipe 27 | networking-bgpvpn 28 | networking-l2gw 29 | networking-odl 30 | networking-sfc 31 | neutron 32 | neutron-dynamic-routing 33 | neutron-fwaas 34 | neutron-lbaas 35 | neutron-vpnaas 36 | tap-as-a-service 37 | 38 | # Projects are free to specify their own version of ansible and molecule 39 | ansible 40 | ansible-compat 41 | ansible-core 42 | molecule 43 | molecule-plugins 44 | 45 | # Tempest and its plugins are branchless which means master version of these 46 | # tools are used in testing the master + stable branches (except EM stable branch 47 | # where we pin the Tempest and plugins for testing). Keeping Tempest in the 48 | # u-c file will block testing the master + stable branches with Tempest master. 49 | # Bug#1916771 for more details. 50 | # Once any stable branch move to 'Extended Maintenance' and we pin the 51 | # older Tempest to test them then we can move it from here to u-c file. 52 | tempest 53 | 54 | # annoying from setuptools 55 | pkg_resources 56 | 57 | # We want to always have latest list of trusted Certificate Authorities 58 | certifi 59 | -------------------------------------------------------------------------------- /detail.py: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2014 Yahoo! Inc. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); you may 4 | # not use this file except in compliance with the License. You may obtain 5 | # a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 11 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 12 | # License for the specific language governing permissions and limitations 13 | # under the License. 14 | 15 | import contextlib 16 | import json 17 | import os 18 | import sys 19 | import traceback 20 | import urllib.parse as urlparse 21 | import urllib.request as urlreq 22 | 23 | import pkg_resources 24 | 25 | try: 26 | PYPI_LOCATION = os.environ['PYPI_LOCATION'] 27 | except KeyError: 28 | PYPI_LOCATION = 'http://pypi.org/project' 29 | 30 | 31 | KEEP_KEYS = frozenset([ 32 | 'author', 33 | 'author_email', 34 | 'maintainer', 35 | 'maintainer_email', 36 | 'license', 37 | 'summary', 38 | 'home_page', 39 | ]) 40 | 41 | 42 | def iter_names(req): 43 | for k in (req.key, req.project_name): 44 | yield k 45 | yield k.title() 46 | yield k.replace("-", "_") 47 | yield k.replace("-", "_").title() 48 | 49 | 50 | def release_data(req): 51 | # Try to find it with various names... 52 | attempted = [] 53 | for name in iter_names(req): 54 | url = PYPI_LOCATION + "/%s/json" % (urlparse.quote(name)) 55 | if url in attempted: 56 | continue 57 | with contextlib.closing(urlreq.urlopen(url)) as uh: 58 | if uh.getcode() != 200: 59 | attempted.append(url) 60 | continue 61 | return json.loads(uh.read()) 62 | attempted = [" * %s" % u for u in attempted] 63 | raise IOError("Could not find '%s' on pypi\nAttempted urls:\n%s" 64 | % (req.key, "\n".join(attempted))) 65 | 66 | 67 | def main(): 68 | if len(sys.argv) == 1: 69 | print("%s requirement-file ..." % (sys.argv[0]), file=sys.stderr) 70 | sys.exit(1) 71 | for filename in sys.argv[1:]: 72 | print("Analyzing file: %s" % (filename)) 73 | details = {} 74 | with open(filename, "rb") as fh: 75 | for line in fh.read().splitlines(): 76 | line = line.strip() 77 | if line.startswith("#") or not line: 78 | continue 79 | req = pkg_resources.Requirement.parse(line) 80 | print(" - processing: %s" % (req)) 81 | try: 82 | raw_req_data = release_data(req) 83 | except IOError: 84 | traceback.print_exc() 85 | details[req.key] = None 86 | else: 87 | req_info = {} 88 | for (k, v) in raw_req_data.get('info', {}).items(): 89 | if k not in KEEP_KEYS: 90 | continue 91 | req_info[k] = v 92 | details[req.key] = { 93 | 'requirement': str(req), 94 | 'info': req_info, 95 | } 96 | filename, _ext = os.path.splitext(filename) 97 | with open("%s.json" % (filename), "wb") as fh: 98 | fh.write(json.dumps(details, sort_keys=True, indent=4, 99 | separators=(",", ": "))) 100 | 101 | 102 | if __name__ == '__main__': 103 | main() 104 | -------------------------------------------------------------------------------- /doc/requirements.txt: -------------------------------------------------------------------------------- 1 | # this is required for the docs build jobs 2 | sphinx>=2.0.0,!=2.1.0 # BSD 3 | openstackdocstheme>=2.2.1 # Apache-2.0 4 | -------------------------------------------------------------------------------- /doc/source/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # This file is execfile()d with the current directory set to its containing dir. 4 | # 5 | # Note that not all possible configuration values are present in this 6 | # autogenerated file. 7 | # 8 | # All configuration values have a default; values that are commented out 9 | # serve to show the default. 10 | 11 | import sys 12 | import os 13 | 14 | # If extensions (or modules to document with autodoc) are in another directory, 15 | # add these directories to sys.path here. If the directory is relative to the 16 | # documentation root, use os.path.abspath to make it absolute, like shown here. 17 | #sys.path.insert(0, os.path.abspath('.')) 18 | 19 | # -- General configuration ----------------------------------------------------- 20 | 21 | # If your documentation needs a minimal Sphinx version, state it here. 22 | #needs_sphinx = '1.0' 23 | 24 | # Add any Sphinx extension module names here, as strings. They can be extensions 25 | # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. 26 | extensions = ['openstackdocstheme'] 27 | 28 | todo_include_todos = True 29 | 30 | # The suffix of source filenames. 31 | source_suffix = '.rst' 32 | 33 | # The encoding of source files. 34 | #source_encoding = 'utf-8-sig' 35 | 36 | # The master toctree document. 37 | master_doc = 'index' 38 | 39 | # General information about the project. 40 | project = u'requirements' 41 | copyright = u'2017, OpenStack Foundation' 42 | 43 | # The language for content autogenerated by Sphinx. Refer to documentation 44 | # for a list of supported languages. 45 | #language = None 46 | 47 | # There are two options for replacing |today|: either, you set today to some 48 | # non-false value, then it is used: 49 | #today = '' 50 | # Else, today_fmt is used as the format for a strftime call. 51 | #today_fmt = '%B %d, %Y' 52 | 53 | # List of patterns, relative to source directory, that match files and 54 | # directories to ignore when looking for source files. 55 | exclude_patterns = ['_build'] 56 | 57 | # The reST default role (used for this markup: `text`) to use for all documents. 58 | #default_role = None 59 | 60 | # If true, '()' will be appended to :func: etc. cross-reference text. 61 | #add_function_parentheses = True 62 | 63 | # If true, the current module name will be prepended to all description 64 | # unit titles (such as .. function::). 65 | add_module_names = False 66 | 67 | # If true, sectionauthor and moduleauthor directives will be shown in the 68 | # output. They are ignored by default. 69 | show_authors = False 70 | 71 | # The name of the Pygments (syntax highlighting) style to use. 72 | pygments_style = 'native' 73 | 74 | # A list of ignored prefixes for module index sorting. 75 | modindex_common_prefix = ['requirements-doc.'] 76 | 77 | # openstackdocstheme options 78 | openstackdocs_repo_name = 'openstack/requirements' 79 | openstackdocs_auto_name = False 80 | openstackdocs_bug_project = 'openstack-requirements' 81 | openstackdocs_bug_tag = '' 82 | 83 | # -- Options for man page output ---------------------------------------------- 84 | man_pages = [] 85 | 86 | # -- Options for HTML output --------------------------------------------------- 87 | 88 | # The theme to use for HTML and HTML Help pages. See the documentation for 89 | # a list of builtin themes. 90 | html_theme = 'openstackdocs' 91 | 92 | # Theme options are theme-specific and customize the look and feel of a theme 93 | # further. For a list of options available for each theme, see the 94 | # documentation. 95 | #html_theme_options = {} 96 | 97 | # Add any paths that contain custom themes here, relative to this directory. 98 | #html_theme_path = [] 99 | 100 | # The name for this set of Sphinx documents. If None, it defaults to 101 | # " v documentation". 102 | #html_title = None 103 | 104 | # A shorter title for the navigation bar. Default is the same as html_title. 105 | #html_short_title = None 106 | 107 | # The name of an image file (relative to this directory) to place at the top 108 | # of the sidebar. 109 | #html_logo = None 110 | 111 | # The name of an image file (within the static path) to use as favicon of the 112 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 113 | # pixels large. 114 | #html_favicon = None 115 | 116 | # If true, SmartyPants will be used to convert quotes and dashes to 117 | # typographically correct entities. 118 | #html_use_smartypants = True 119 | 120 | # Custom sidebar templates, maps document names to template names. 121 | #html_sidebars = {} 122 | 123 | # Additional templates that should be rendered to pages, maps page names to 124 | # template names. 125 | #html_additional_pages = {} 126 | 127 | # If false, no module index is generated. 128 | html_domain_indices = False 129 | 130 | # If false, no index is generated. 131 | html_use_index = False 132 | 133 | # If true, the index is split into individual pages for each letter. 134 | #html_split_index = False 135 | 136 | # If true, links to the reST sources are added to the pages. 137 | #html_show_sourcelink = True 138 | 139 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. 140 | #html_show_sphinx = True 141 | 142 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. 143 | #html_show_copyright = True 144 | 145 | # If true, an OpenSearch description file will be output, and all pages will 146 | # contain a tag referring to it. The value of this option must be the 147 | # base URL from which the finished HTML is served. 148 | #html_use_opensearch = '' 149 | 150 | # This is the file name suffix for HTML files (e.g. ".xhtml"). 151 | #html_file_suffix = None 152 | 153 | # Output file base name for HTML help builder. 154 | htmlhelp_basename = 'requirements-doc' 155 | 156 | 157 | # -- Options for LaTeX output -------------------------------------------------- 158 | 159 | latex_elements = { 160 | # The paper size ('letterpaper' or 'a4paper'). 161 | #'papersize': 'letterpaper', 162 | 163 | # The font size ('10pt', '11pt' or '12pt'). 164 | #'pointsize': '10pt', 165 | 166 | # Additional stuff for the LaTeX preamble. 167 | #'preamble': '', 168 | } 169 | 170 | # Grouping the document tree into LaTeX files. List of tuples 171 | # (source start file, target name, title, author, documentclass [howto/manual]). 172 | latex_documents = [ 173 | ('index', 'RequirementsGuide.tex', 'Global Requirements Guide', 174 | u'OpenStack contributors', 'manual'), 175 | ] 176 | 177 | # The name of an image file (relative to this directory) to place at the top of 178 | # the title page. 179 | #latex_logo = None 180 | 181 | # For "manual" documents, if this is true, then toplevel headings are parts, 182 | # not chapters. 183 | #latex_use_parts = False 184 | 185 | # If true, show page references after internal links. 186 | #latex_show_pagerefs = False 187 | 188 | # If true, show URL addresses after external links. 189 | #latex_show_urls = False 190 | 191 | # Documents to append as an appendix to all manuals. 192 | #latex_appendices = [] 193 | 194 | # If false, no module index is generated. 195 | #latex_domain_indices = True 196 | 197 | # -- Options for Texinfo output ------------------------------------------------ 198 | 199 | # Grouping the document tree into Texinfo files. List of tuples 200 | # (source start file, target name, title, author, 201 | # dir menu entry, description, category) 202 | texinfo_documents = [ 203 | ('index'), 204 | ] 205 | 206 | # Documents to append as an appendix to all manuals. 207 | #texinfo_appendices = [] 208 | 209 | # If false, no module index is generated. 210 | #texinfo_domain_indices = True 211 | 212 | # How to display URL addresses: 'footnote', 'no', or 'inline'. 213 | #texinfo_show_urls = 'footnote' 214 | 215 | 216 | # -- Options for Epub output --------------------------------------------------- 217 | 218 | # Bibliographic Dublin Core info. 219 | epub_title = u'requirements Documentation' 220 | epub_author = u'OpenStack' 221 | epub_publisher = u'OpenStack' 222 | epub_copyright = u'2017, OpenStack' 223 | 224 | # The language of the text. It defaults to the language option 225 | # or en if the language is not set. 226 | #epub_language = '' 227 | 228 | # The scheme of the identifier. Typical schemes are ISBN or URL. 229 | #epub_scheme = '' 230 | 231 | # The unique identifier of the text. This can be a ISBN number 232 | # or the project homepage. 233 | #epub_identifier = '' 234 | 235 | # A unique identification for the text. 236 | #epub_uid = '' 237 | 238 | # A tuple containing the cover image and cover page html template filenames. 239 | #epub_cover = () 240 | 241 | # HTML files that should be inserted before the pages created by sphinx. 242 | # The format is a list of tuples containing the path and title. 243 | #epub_pre_files = [] 244 | 245 | # HTML files shat should be inserted after the pages created by sphinx. 246 | # The format is a list of tuples containing the path and title. 247 | #epub_post_files = [] 248 | 249 | # A list of files that should not be packed into the epub file. 250 | #epub_exclude_files = [] 251 | 252 | # The depth of the table of contents in toc.ncx. 253 | #epub_tocdepth = 3 254 | 255 | # Allow duplicate toc entries. 256 | #epub_tocdup = True 257 | -------------------------------------------------------------------------------- /doc/source/contributor/contributing.rst: -------------------------------------------------------------------------------- 1 | ============================ 2 | So You Want to Contribute... 3 | ============================ 4 | 5 | For general information on contributing to OpenStack, please check out the 6 | `contributor guide `_ to get started. 7 | It covers all the basics that are common to all OpenStack projects: the accounts 8 | you need, the basics of interacting with our Gerrit review system, how we 9 | communicate as a community, etc. 10 | 11 | Below will cover the more project specific information you need to get started 12 | with openstack/requirements. 13 | 14 | Communication 15 | ============= 16 | We are on the #openstack-requirements channel on the OFTC IRC network. 17 | 18 | Our meetings are currently Wednesdays at 2030 UTC. See the 19 | `official meeting `_ for 20 | up to date info. 21 | 22 | Contacting the Core Team 23 | ++++++++++++++++++++++++ 24 | On IRC the nicks of our core team are as follows. 25 | 26 | * dirk 27 | * smcginnis 28 | * prometheanfire 29 | 30 | New Feature Planning 31 | ==================== 32 | New features should have a bug associated with it and be discussed during the 33 | weekly meeting (see below for how to report a bug and above for meeting info.) 34 | 35 | Task Tracking 36 | ============= 37 | We track our tasks in 38 | `Storyboard 39 | `_. 40 | 41 | If you're looking for some smaller, easier work item to pick up and get started 42 | on, comment in IRC and we'll find something. 43 | 44 | Reporting a Bug 45 | =============== 46 | If you have found an issue and want to make sure we are aware of it, please 47 | report the issue on 48 | `Storyboard 49 | `_. 50 | 51 | Getting Your Patch Merged 52 | ========================= 53 | Updates proposed to by the infra-bot to master only need one core reviewer to 54 | approve and merge. 55 | 56 | All other updates require two reviewers to merge. 57 | 58 | Project Team Lead Duties 59 | ======================== 60 | 61 | Openstack Freeze Process 62 | ++++++++++++++++++++++++ 63 | 64 | Notice 65 | ------ 66 | 67 | - Email the developer mailing list approximately two weeks before the freeze. 68 | This email should contain a notice that requirements will branch and 69 | cycle-trailing projects should be careful if they have not branched. The 70 | cycle-trailing projects can retarget their constraints usage to the stable 71 | branch. 72 | 73 | Branch 74 | ------ 75 | 76 | - File a review in ``openstack/releases`` with -W and only remove the -W when 77 | ready to branch. 78 | 79 | - Once branched, change the publish location to the new release branch. 80 | 81 | - Once branched, update devstack grenade for the new release. For example, 82 | use https://review.openstack.org/#/c/493057/13/devstack-vm-gate-wrap.sh 83 | 84 | Potential issues 85 | ---------------- 86 | 87 | - Use something like https://review.openstack.org/#/c/492382 to find problem 88 | projects. 89 | 90 | All common PTL duties are enumerated in the `PTL guide 91 | `_. 92 | 93 | -------------------------------------------------------------------------------- /doc/source/index.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../../README.rst 2 | .. include:: contributor/contributing.rst 3 | -------------------------------------------------------------------------------- /openstack_requirements/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/openstack/requirements/c36e570d3d645e7a51354c977783b3cd8357f91e/openstack_requirements/__init__.py -------------------------------------------------------------------------------- /openstack_requirements/cmds/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/openstack/requirements/c36e570d3d645e7a51354c977783b3cd8357f91e/openstack_requirements/cmds/__init__.py -------------------------------------------------------------------------------- /openstack_requirements/cmds/build_lower_constraints.py: -------------------------------------------------------------------------------- 1 | # Licensed under the Apache License, Version 2.0 (the "License"); 2 | # you may not use this file except in compliance with the License. 3 | # You may obtain a copy of the License at 4 | # 5 | # http://www.apache.org/licenses/LICENSE-2.0 6 | # 7 | # Unless required by applicable law or agreed to in writing, software 8 | # distributed under the License is distributed on an "AS IS" BASIS, 9 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 10 | # implied. 11 | # See the License for the specific language governing permissions and 12 | # limitations under the License. 13 | 14 | """Merge multiple lower-constraints.txt files to find the highest values. 15 | 16 | """ 17 | 18 | import argparse 19 | import collections 20 | 21 | from openstack_requirements.utils import read_requirements_file 22 | 23 | 24 | import packaging.specifiers 25 | import packaging.version 26 | 27 | 28 | def get_requirements_version(req): 29 | """Find the version for a requirement. 30 | 31 | Use the version attached to >=, ==, or ===, depending on the type 32 | of input requirement. 33 | 34 | """ 35 | for specifier in packaging.specifiers.SpecifierSet(req.specifiers): 36 | if '>=' in specifier.operator or '==' in specifier.operator: 37 | return packaging.version.parse(specifier.version) 38 | raise ValueError('could not find version for {}'.format(req)) 39 | 40 | 41 | def merge_constraints_sets(constraints_sets): 42 | "Generator of Requirements with the maximum version for each constraint." 43 | all_constraints = collections.defaultdict(list) 44 | for constraints_set in constraints_sets: 45 | for constraint_name, constraint in constraints_set.items(): 46 | if constraint_name: 47 | all_constraints[constraint_name].extend(constraint) 48 | for constraint_name, constraints in sorted(all_constraints.items()): 49 | val = max((c[0] for c in constraints), key=get_requirements_version) 50 | yield val.to_line() 51 | 52 | 53 | def main(): 54 | parser = argparse.ArgumentParser() 55 | parser.add_argument( 56 | 'lower_constraints', 57 | nargs='+', 58 | help='lower-constraints.txt files', 59 | ) 60 | args = parser.parse_args() 61 | 62 | constraints_sets = [ 63 | read_requirements_file(filename) 64 | for filename in args.lower_constraints 65 | ] 66 | 67 | merged = list(merge_constraints_sets(constraints_sets)) 68 | print(''.join(merged)) 69 | -------------------------------------------------------------------------------- /openstack_requirements/cmds/check_conflicts.py: -------------------------------------------------------------------------------- 1 | # Licensed under the Apache License, Version 2.0 (the "License"); you may 2 | # not use this file except in compliance with the License. You may obtain 3 | # a copy of the License at 4 | # 5 | # http://www.apache.org/licenses/LICENSE-2.0 6 | # 7 | # Unless required by applicable law or agreed to in writing, software 8 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 9 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 10 | # License for the specific language governing permissions and limitations 11 | # under the License. 12 | 13 | """Apply validation rules to the various requirements lists. 14 | 15 | """ 16 | 17 | import argparse 18 | import sys 19 | import traceback 20 | 21 | import pkg_resources 22 | 23 | from openstack_requirements.utils import read_requirements_file 24 | 25 | 26 | def main(): 27 | parser = argparse.ArgumentParser() 28 | parser.add_argument( 29 | 'upper_constraints', 30 | default='upper-constraints.txt', 31 | help='path to the upper-constraints.txt file') 32 | parser.add_argument( 33 | 'uc_xfails', 34 | default='upper-constraints-xfails.txt', 35 | help='Path to the upper-constraints-xfails.txt file', 36 | ) 37 | args = parser.parse_args() 38 | 39 | error_count = 0 40 | 41 | print('\nChecking %s' % args.upper_constraints) 42 | upper_constraints = read_requirements_file(args.upper_constraints) 43 | xfails = read_requirements_file(args.uc_xfails) 44 | for name, spec_list in upper_constraints.items(): 45 | try: 46 | if name: 47 | pyver = "python_version=='%s.%s'" % (sys.version_info[0], 48 | sys.version_info[1]) 49 | for req, original_line in spec_list: 50 | if req.markers in ["", pyver]: 51 | pkg_resources.require(name) 52 | except pkg_resources.ContextualVersionConflict as e: 53 | if e.dist.key in xfails: 54 | xfail_requirement = xfails[e.dist.key][0][0] 55 | xfail_denylists = set(xfail_requirement.markers.split(',')) 56 | conflict = e.dist.as_requirement() 57 | conflict_specifiers = ''.join(conflict.specs[0]) 58 | conflict_name = conflict.name.lower() 59 | 60 | if (e.required_by.issubset(xfail_denylists) and 61 | xfail_requirement.package == conflict_name and 62 | conflict_specifiers == xfail_requirement.specifiers): 63 | 64 | print('XFAIL while checking conflicts ' 65 | 'for %s: %s conflicts with %s' % 66 | (name, e.dist, str(e.req))) 67 | continue 68 | 69 | print('Checking conflicts for %s:\n' 70 | 'ContextualVersionConflict: %s' % (name, str(e))) 71 | 72 | traceback.print_exc(file=sys.stdout) 73 | error_count += 1 74 | 75 | return 1 if error_count else 0 76 | -------------------------------------------------------------------------------- /openstack_requirements/cmds/check_exists.py: -------------------------------------------------------------------------------- 1 | # Licensed under the Apache License, Version 2.0 (the "License"); you may 2 | # not use this file except in compliance with the License. You may obtain 3 | # a copy of the License at 4 | # 5 | # http://www.apache.org/licenses/LICENSE-2.0 6 | # 7 | # Unless required by applicable law or agreed to in writing, software 8 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 9 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 10 | # License for the specific language governing permissions and limitations 11 | # under the License. 12 | 13 | """Check to see if a package from a project's requrements file exist in g-r or 14 | u-c. 15 | 16 | """ 17 | 18 | import argparse 19 | 20 | from packaging.specifiers import SpecifierSet 21 | from packaging.version import Version 22 | 23 | from openstack_requirements import project 24 | from openstack_requirements import requirement 25 | from openstack_requirements.utils import read_requirements_file 26 | 27 | 28 | def main(args=None): 29 | parser = argparse.ArgumentParser() 30 | parser.add_argument( 31 | 'project', 32 | default='', 33 | help='path to the project source root folder.') 34 | parser.add_argument( 35 | '-u', '--upper-constraints', 36 | default='upper-constraints.txt', 37 | help='path to the upper-constraints.txt file') 38 | parser.add_argument( 39 | '-g', '--global-requirements', 40 | default='global-requirements.txt', 41 | help='Path to the global-requirements.txt file') 42 | parser.add_argument( 43 | '-b', '-d', '--denylist', 44 | default='denylist.txt', 45 | help='Path to the denylist.txt file') 46 | parser.add_argument( 47 | '-G', '--gr-check', action='store_true', 48 | help='Do a specifier check of global-requirements') 49 | args = parser.parse_args(args) 50 | 51 | upper_constraints = read_requirements_file(args.upper_constraints) 52 | global_requirements = read_requirements_file(args.global_requirements) 53 | denylist = read_requirements_file(args.denylist) 54 | project_data = project.read(args.project) 55 | error_count = 0 56 | 57 | for require_file, data in project_data.get('requirements', {}).items(): 58 | print(u'\nComparing %s with global-requirements and upper-constraints' 59 | % require_file) 60 | requirements = requirement.parse(data) 61 | for name, spec_list in requirements.items(): 62 | if not name or name in denylist: 63 | continue 64 | if name not in global_requirements: 65 | print(u'%s from %s not found in global-requirements' % ( 66 | name, require_file)) 67 | error_count += 1 68 | continue 69 | if name not in upper_constraints: 70 | print(u'%s from %s not found in upper-constraints' % ( 71 | name, require_file)) 72 | error_count += 1 73 | continue 74 | elif spec_list: 75 | uc = upper_constraints[name][0][0] 76 | gr = global_requirements[name][0][0] 77 | spec_gr = SpecifierSet(gr.specifiers) 78 | for req, _ in spec_list: 79 | specs = SpecifierSet(req.specifiers) 80 | # This assumes uc will only have == specifiers 81 | for uc_spec in SpecifierSet(uc.specifiers): 82 | # if the uc version isn't in the lower specifier 83 | # then something is wrong. 84 | if Version(uc_spec.version) not in specs: 85 | print( 86 | u'%s must be <= %s from upper-constraints and ' 87 | 'include the upper-constraints version' % 88 | (name, uc_spec.version)) 89 | error_count += 1 90 | continue 91 | if args.gr_check: 92 | for spec in specs: 93 | # g-r will mostly define blocked versions. And a 94 | # local project may define there own, so there is 95 | # no point checking a != specifier 96 | if spec.operator == '!=': 97 | continue 98 | if spec.version not in spec_gr: 99 | print( 100 | u'Specifier %s from %s is failing check ' 101 | 'from global-requirements specifiers %s' % 102 | (spec.version, name, str(spec_gr))) 103 | error_count += 1 104 | continue 105 | 106 | return 1 if error_count else 0 107 | -------------------------------------------------------------------------------- /openstack_requirements/cmds/check_py2.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 11 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 12 | # License for the specific language governing permissions and limitations 13 | # under the License. 14 | 15 | import argparse 16 | 17 | import pkg_resources 18 | import requests 19 | 20 | 21 | _url_template = 'https://pypi.org/project/{dist}/{version}/json' 22 | 23 | 24 | def _get_metadata(dist, version): 25 | try: 26 | url = _url_template.format(dist=dist, version=version) 27 | response = requests.get(url) 28 | return response.json() 29 | except ValueError: 30 | return {} 31 | 32 | 33 | def main(): 34 | parser = argparse.ArgumentParser() 35 | parser.add_argument( 36 | '--verbose', '-v', 37 | default=False, 38 | action='store_true', 39 | help='turn on noisy output', 40 | ) 41 | parser.add_argument( 42 | '--requirements', 43 | default='upper-constraints.txt', 44 | help='the list of constrained requirements to check', 45 | ) 46 | args = parser.parse_args() 47 | 48 | for line in open(args.requirements, 'r'): 49 | try: 50 | req = pkg_resources.Requirement.parse(line) 51 | except ValueError: 52 | # Assume this is a comment and skip it. 53 | continue 54 | # req.specifier is a set so we can't get an item out of it 55 | # directly. Turn it into a list and take the first (and only) 56 | # value. That gives us an _IndividualSpecifier which has a 57 | # version attribute that is not smart enough to filter out the 58 | # selector value for things like python version, so drop 59 | # anything after the first semicolon. 60 | version = list(req.specifier)[0].version.split(';')[0] 61 | data = _get_metadata(req.project_name, version) 62 | classifiers = data.get('info', {}).get('classifiers', []) 63 | for classifier in classifiers: 64 | if classifier.startswith('Programming Language :: Python :: 2'): 65 | if args.verbose: 66 | print('{}==={} {!r}'.format( 67 | req.project_name, version, classifier)) 68 | break 69 | else: 70 | print('\nNo "Python :: 2" classifier found for {}==={}'.format( 71 | req.project_name, version)) 72 | for classifier in classifiers: 73 | print(' {}'.format(classifier)) 74 | 75 | 76 | if __name__ == '__main__': 77 | main() 78 | -------------------------------------------------------------------------------- /openstack_requirements/cmds/edit_constraint.py: -------------------------------------------------------------------------------- 1 | # Licensed under the Apache License, Version 2.0 (the "License"); 2 | # you may not use this file except in compliance with the License. 3 | # You may obtain a copy of the License at 4 | # 5 | # http://www.apache.org/licenses/LICENSE-2.0 6 | # 7 | # Unless required by applicable law or agreed to in writing, software 8 | # distributed under the License is distributed on an "AS IS" BASIS, 9 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 10 | # implied. 11 | # See the License for the specific language governing permissions and 12 | # limitations under the License. 13 | 14 | import optparse 15 | import os.path 16 | import sys 17 | import textwrap 18 | 19 | 20 | from openstack_requirements import requirement 21 | 22 | 23 | def edit(reqs, name, replacement): 24 | key = requirement.canonical_name(name) 25 | if not replacement: 26 | reqs.pop(key, None) 27 | else: 28 | reqs[key] = [ 29 | (requirement.Requirement('', '', '', '', replacement), '')] 30 | result = [] 31 | for entries in reqs.values(): 32 | for entry, _ in entries: 33 | result.append(entry) 34 | return requirement.Requirements(sorted(result)) 35 | 36 | 37 | # -- untested UI glue from here down. 38 | 39 | 40 | def _validate_options(options, args): 41 | """Check that options and arguments are valid. 42 | 43 | :param options: The optparse options for this program. 44 | :param args: The args for this program. 45 | """ 46 | if len(args) < 2: 47 | raise Exception("Not enough arguments given") 48 | if not os.path.exists(args[0]): 49 | raise Exception( 50 | "Constraints file %(con)s not found." 51 | % dict(con=args[0])) 52 | 53 | 54 | def main(argv=None, stdout=None): 55 | parser = optparse.OptionParser( 56 | usage="%prog [options] constraintpath name replacement", 57 | epilog=textwrap.dedent("""\ 58 | Replaces any entries of "name" in the constraints file with 59 | "replacement". If "name" is not present, it is added to the end of 60 | the file. If "replacement" is missing or empty, remove "name" from 61 | the file. 62 | """)) 63 | options, args = parser.parse_args(argv) 64 | if stdout is None: 65 | stdout = sys.stdout 66 | _validate_options(options, args) 67 | args = args + [""] 68 | content = open(args[0], 'rt').read() 69 | reqs = requirement.parse(content, permit_urls=True) 70 | out_reqs = edit(reqs, args[1], args[2]) 71 | out = requirement.to_content(out_reqs, prefix=False) 72 | with open(args[0] + '.tmp', 'wt') as f: 73 | f.write(out) 74 | if os.path.exists(args[0]): 75 | os.remove(args[0]) 76 | os.rename(args[0] + '.tmp', args[0]) 77 | return 0 78 | -------------------------------------------------------------------------------- /openstack_requirements/cmds/normalize_requirements.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 11 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 12 | # License for the specific language governing permissions and limitations 13 | # under the License. 14 | 15 | import argparse 16 | import os.path 17 | 18 | from openstack_requirements import requirement 19 | 20 | 21 | def write_requirements_file(filename, reqs): 22 | with open(filename + 'tmp', 'wt') as f: 23 | f.write(reqs) 24 | if os.path.exists(filename): 25 | os.remove(filename) 26 | os.rename(filename + 'tmp', filename) 27 | 28 | 29 | def main(): 30 | parser = argparse.ArgumentParser( 31 | description="Normalize requirements files") 32 | parser.add_argument('requirements', help='requirements file input') 33 | parser.add_argument('-s', '--save', action='store_true', default=False, 34 | help=('save normalized requirements ' 35 | 'file instead of displaying it')) 36 | args = parser.parse_args() 37 | with open(args.requirements) as f: 38 | requirements = [line.strip() for line in f.readlines()] 39 | 40 | normed_reqs = "" 41 | for line in requirements: 42 | req = requirement.parse_line(line) 43 | normed_req = req.to_line(comment_prefix=' ', sort_specifiers=True) 44 | normed_reqs += normed_req 45 | 46 | if args.save: 47 | write_requirements_file(args.requirements, normed_reqs) 48 | else: 49 | print(normed_reqs, end='') 50 | 51 | 52 | if __name__ == '__main__': 53 | main() 54 | -------------------------------------------------------------------------------- /openstack_requirements/cmds/validate.py: -------------------------------------------------------------------------------- 1 | # Licensed under the Apache License, Version 2.0 (the "License"); you may 2 | # not use this file except in compliance with the License. You may obtain 3 | # a copy of the License at 4 | # 5 | # http://www.apache.org/licenses/LICENSE-2.0 6 | # 7 | # Unless required by applicable law or agreed to in writing, software 8 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 9 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 10 | # License for the specific language governing permissions and limitations 11 | # under the License. 12 | 13 | """Apply validation rules to the various requirements lists. 14 | 15 | """ 16 | 17 | import argparse 18 | import os 19 | 20 | from openstack_requirements import constraints 21 | from openstack_requirements import requirement 22 | from openstack_requirements.utils import read_requirements_file 23 | 24 | 25 | def main(): 26 | parser = argparse.ArgumentParser() 27 | parser.add_argument( 28 | 'global_requirements', 29 | default='global-requirements.txt', 30 | help='path to the global-requirements.txt file', 31 | ) 32 | parser.add_argument( 33 | 'upper_constraints', 34 | default='upper-constraints.txt', 35 | help='path to the upper-constraints.txt file', 36 | ) 37 | parser.add_argument( 38 | 'denylist', 39 | default='denylist.txt', 40 | help='path to the denylist.txt file', 41 | ) 42 | args = parser.parse_args() 43 | 44 | error_count = 0 45 | 46 | # Check the format of the constraints file. 47 | print('\nChecking %s' % args.upper_constraints) 48 | constraints_txt = read_requirements_file(args.upper_constraints) 49 | for msg in constraints.check_format(constraints_txt): 50 | print(msg) 51 | error_count += 1 52 | 53 | # Check that the constraints and requirements are compatible. 54 | print('\nChecking %s' % args.global_requirements) 55 | global_reqs = read_requirements_file(args.global_requirements) 56 | for msg in constraints.check_compatible(global_reqs, constraints_txt): 57 | print(msg) 58 | error_count += 1 59 | 60 | # Check requirements to satisfy policy. 61 | print('\nChecking requirements on %s' % args.global_requirements) 62 | for msg in requirement.check_reqs_bounds_policy(global_reqs): 63 | print(msg) 64 | error_count += 1 65 | 66 | # Check that global requirements are uniformly formatted 67 | print('\nValidating uniform formatting on %s' % args.global_requirements) 68 | with open(args.global_requirements, 'rt') as f: 69 | for line in f: 70 | if line == '\n': 71 | continue 72 | req = requirement.parse_line(line) 73 | normed_req = req.to_line(comment_prefix=' ', sort_specifiers=True) 74 | if line.rstrip() != normed_req.rstrip(): 75 | print("-%s\n+%s" % (line.rstrip(), normed_req.rstrip())) 76 | error_count += 1 77 | 78 | # Check that all of the items in the global-requirements list 79 | # appear in exactly one of the constraints file or the denylist. 80 | print('\nChecking %s' % args.denylist) 81 | denylist = read_requirements_file(args.denylist) 82 | for msg in constraints.check_denylist_coverage( 83 | global_reqs, constraints_txt, denylist, 84 | os.path.basename(args.upper_constraints)): 85 | print(msg) 86 | error_count += 1 87 | 88 | return 1 if error_count else 0 89 | -------------------------------------------------------------------------------- /openstack_requirements/cmds/validate_projects.py: -------------------------------------------------------------------------------- 1 | # Licensed under the Apache License, Version 2.0 (the "License"); you may 2 | # not use this file except in compliance with the License. You may obtain 3 | # a copy of the License at 4 | # 5 | # http://www.apache.org/licenses/LICENSE-2.0 6 | # 7 | # Unless required by applicable law or agreed to in writing, software 8 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 9 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 10 | # License for the specific language governing permissions and limitations 11 | # under the License. 12 | 13 | """Apply validation rules to the projects.txt file 14 | 15 | """ 16 | 17 | import argparse 18 | 19 | from openstack_requirements import project_config 20 | 21 | 22 | _BLACKLIST = set([ 23 | # NOTE(dhellmann): It's not clear why these don't get updates, 24 | # except that trying to do so may break the test jobs using them 25 | # because of the nature of the projects. 26 | 'openstack/hacking', 27 | 'openstack/pbr', 28 | # We can't enforce the check rules against this repo. 29 | 'openstack/requirements', 30 | ]) 31 | 32 | 33 | def main(): 34 | parser = argparse.ArgumentParser() 35 | parser.add_argument( 36 | 'projects_list', 37 | default='projects.txt', 38 | help='path to the projects.txt file', 39 | ) 40 | args = parser.parse_args() 41 | 42 | zuul_projects = project_config.get_zuul_projects_data() 43 | 44 | error_count = 0 45 | 46 | print('\nChecking %s' % args.projects_list) 47 | with open(args.projects_list, 'r') as f: 48 | for repo in f: 49 | repo = repo.strip() 50 | if repo.startswith('#'): 51 | continue 52 | if repo in _BLACKLIST: 53 | continue 54 | pe = project_config.require_check_requirements_for_repo( 55 | zuul_projects, repo) 56 | for e in pe: 57 | print(e) 58 | error_count += 1 59 | 60 | return 1 if error_count else 0 61 | -------------------------------------------------------------------------------- /openstack_requirements/constraints.py: -------------------------------------------------------------------------------- 1 | # Licensed under the Apache License, Version 2.0 (the "License"); you may 2 | # not use this file except in compliance with the License. You may obtain 3 | # a copy of the License at 4 | # 5 | # http://www.apache.org/licenses/LICENSE-2.0 6 | # 7 | # Unless required by applicable law or agreed to in writing, software 8 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 9 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 10 | # License for the specific language governing permissions and limitations 11 | # under the License. 12 | 13 | from packaging import specifiers 14 | 15 | 16 | # FIXME(dhellmann): These items were not in the constraints list but 17 | # should not be denylisted. We don't know yet what versions they 18 | # should have, so just ignore them for a little while until we have 19 | # time to figure that out. 20 | UNCONSTRAINABLE = set([ 21 | 'argparse', 22 | 'pip', 23 | 'setuptools', 24 | 'wmi', 25 | 'pywin32', 26 | 'pymi', 27 | 'wheel', 28 | '', # blank lines 29 | ]) 30 | 31 | 32 | def check_denylist_coverage(global_reqs, constraints, denylist, 33 | constraints_list_name): 34 | """Report any items that are not properly constrained. 35 | 36 | Check that all of the items in the global-requirements list 37 | appear either in the constraints file or the denylist. 38 | """ 39 | to_be_constrained = ( 40 | set(global_reqs.keys()) - set(denylist.keys()) 41 | - UNCONSTRAINABLE 42 | ) 43 | constrained = set(constraints.keys()) - set(['']) 44 | unconstrained = to_be_constrained - constrained 45 | for u in sorted(unconstrained): 46 | yield ('%r appears in global-requirements.txt ' 47 | 'but not %s or denylist.txt' % (u, constraints_list_name)) 48 | 49 | # Verify that the denylist packages are not also listed in 50 | # the constraints file. 51 | dupes = constrained.intersection(set(denylist.keys())) 52 | for d in dupes: 53 | yield ('%r appears in both denylist.txt and %s' 54 | % (d, constraints_list_name)) 55 | 56 | 57 | def check_format(parsed_constraints): 58 | "Apply the formatting rules to the pre-parsed constraints." 59 | for name, spec_list in parsed_constraints.items(): 60 | for req, original_line in spec_list: 61 | if not req.specifiers.startswith('==='): 62 | yield ('Invalid constraint for %s does not have 3 "=": %s' % 63 | (name, original_line)) 64 | 65 | 66 | def check_compatible(global_reqs, constraints): 67 | """Check compatibility between requirements and constraints. 68 | 69 | A change to global-requirements that wants to make changes 70 | incompatible with the current frozen constraints needs to also raise 71 | those constraints. 72 | 73 | * Load global-requirements 74 | * Load given constraints.txt 75 | * Check that every version within given constraints.txt is either 76 | 77 | A) Missing from global-requirements - its a transitive dep or 78 | a removed dep. 79 | B) Compatible with any of the versions in global-requirements. 80 | This is not-quite right, because we should in principle match 81 | markers, but that requires evaluating the markers which we 82 | haven't yet implemented. Being compatible with one of the 83 | requirements is good enough proxy to catch most cases. 84 | 85 | :param global_reqs: A set of global requirements after parsing. 86 | :param constraints: The same from given constraints.txt. 87 | :return: A list of the error messages for constraints that failed. 88 | """ 89 | def satisfied(reqs, name, version, failures): 90 | if name not in reqs: 91 | return True 92 | tested = [] 93 | for constraint, _ in reqs[name]: 94 | spec = specifiers.SpecifierSet(constraint.specifiers) 95 | # pre-releases are allowed by policy but discouraged 96 | if spec.contains(version, prereleases=True): 97 | return True 98 | tested.append(constraint.specifiers) 99 | failures.append('Constraint %s for %s does not match requirement %s' % 100 | (version, name, tested)) 101 | return False 102 | failures = [] 103 | for pkg_constraints in constraints.values(): 104 | for constraint, _ in pkg_constraints: 105 | name = constraint.package 106 | version = constraint.specifiers[3:] 107 | satisfied(global_reqs, name, version, failures) 108 | return failures 109 | -------------------------------------------------------------------------------- /openstack_requirements/project.py: -------------------------------------------------------------------------------- 1 | # Copyright 2012 OpenStack Foundation 2 | # Copyright 2013 Hewlett-Packard Development Company, L.P. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); you may 5 | # not use this file except in compliance with the License. You may obtain 6 | # a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 12 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 13 | # License for the specific language governing permissions and limitations 14 | # under the License. 15 | 16 | """The project abstraction.""" 17 | 18 | import collections 19 | import configparser 20 | import errno 21 | import io 22 | import os 23 | 24 | from parsley import makeGrammar 25 | 26 | from openstack_requirements import requirement 27 | 28 | # PURE logic from here until the IO marker below. 29 | 30 | 31 | _Comment = collections.namedtuple('Comment', ['line']) 32 | _Extra = collections.namedtuple('Extra', ['name', 'content']) 33 | 34 | 35 | _extras_grammar = """ 36 | ini = (line*:p extras?:e line*:l final:s) -> (''.join(p), e, ''.join(l+[s])) 37 | line = ~extras <(~'\\n' anything)* '\\n'> 38 | final = <(~'\\n' anything)* > 39 | extras = '[' 'e' 'x' 't' 'r' 'a' 's' ']' '\\n'+ body*:b -> b 40 | body = comment | extra 41 | comment = <'#' (~'\\n' anything)* '\\n'>:c '\\n'* -> comment(c) 42 | extra = name:n ' '* '=' line:l cont*:c '\\n'* -> extra(n, ''.join([l] + c)) 43 | name = <(anything:x ?(x not in '\\n \\t='))+> 44 | cont = ' '+ <(~'\\n' anything)* '\\n'> 45 | """ 46 | _extras_compiled = makeGrammar( 47 | _extras_grammar, {"comment": _Comment, "extra": _Extra}) 48 | 49 | 50 | Error = collections.namedtuple('Error', ['message']) 51 | File = collections.namedtuple('File', ['filename', 'content']) 52 | StdOut = collections.namedtuple('StdOut', ['message']) 53 | Verbose = collections.namedtuple('Verbose', ['message']) 54 | 55 | 56 | def extras(project): 57 | """Return a dict of extra-name:content for the extras in setup.cfg.""" 58 | if 'setup.cfg' not in project: 59 | return {} 60 | c = configparser.ConfigParser() 61 | c.read_file(io.StringIO(project['setup.cfg'])) 62 | if not c.has_section('extras'): 63 | return {} 64 | return dict(c.items('extras')) 65 | 66 | 67 | def merge_setup_cfg(old_content, new_extras): 68 | # This is ugly. All the existing libraries handle setup.cfg's poorly. 69 | prefix, extras, suffix = _extras_compiled(old_content).ini() 70 | out_extras = [] 71 | if extras is not None: 72 | for extra in extras: 73 | if type(extra) is _Comment: 74 | out_extras.append(extra) 75 | elif type(extra) is _Extra: 76 | if extra.name not in new_extras: 77 | out_extras.append(extra) 78 | continue 79 | e = _Extra( 80 | extra.name, 81 | requirement.to_content( 82 | new_extras[extra.name], ':', ' ', False)) 83 | out_extras.append(e) 84 | else: 85 | raise TypeError('unknown type %r' % extra) 86 | if out_extras: 87 | extras_str = ['[extras]\n'] 88 | for extra in out_extras: 89 | if type(extra) is _Comment: 90 | extras_str.append(extra.line) 91 | else: 92 | extras_str.append(extra.name + ' =') 93 | extras_str.append(extra.content) 94 | if suffix: 95 | extras_str.append('\n') 96 | extras_str = ''.join(extras_str) 97 | else: 98 | extras_str = '' 99 | return prefix + extras_str + suffix 100 | 101 | 102 | # IO from here to the end of the file. 103 | 104 | def _safe_read(project, filename, output=None): 105 | if output is None: 106 | output = project 107 | try: 108 | path = os.path.join(project['root'], filename) 109 | with io.open(path, 'rt', encoding="utf-8") as f: 110 | output[filename] = f.read() 111 | except IOError as e: 112 | if e.errno != errno.ENOENT: 113 | raise 114 | 115 | 116 | def read(root): 117 | """Read into memory the packaging data for the project at root. 118 | 119 | :param root: A directory path. 120 | :return: A dict representing the project with the following keys: 121 | - root: The root dir. 122 | - setup.py: Contents of setup.py. 123 | - setup.cfg: Contents of setup.cfg. 124 | - requirements: Dict of requirement file name: contents. 125 | """ 126 | result = {'root': root} 127 | _safe_read(result, 'setup.py') 128 | _safe_read(result, 'setup.cfg') 129 | requirements = {} 130 | result['requirements'] = requirements 131 | target_files = [ 132 | 'requirements.txt', 'tools/pip-requires', 133 | 'test-requirements.txt', 'tools/test-requires', 134 | 'doc/requirements.txt', 135 | ] 136 | for py_version in (2, 3): 137 | target_files.append('requirements-py%s.txt' % py_version) 138 | target_files.append('test-requirements-py%s.txt' % py_version) 139 | for target_file in target_files: 140 | _safe_read(result, target_file, output=requirements) 141 | # Read lower-constraints.txt and ensure the key is always present 142 | # in case the file is missing. 143 | result['lower-constraints.txt'] = None 144 | _safe_read(result, 'lower-constraints.txt') 145 | return result 146 | 147 | 148 | def write(project, actions, stdout, verbose, noop=False): 149 | """Write actions into project. 150 | 151 | :param project: A project metadata dict. 152 | :param actions: A list of action tuples - File or Verbose - that describe 153 | what actions are to be taken. 154 | Error objects write a message to stdout and trigger an exception at 155 | the end of _write_project. 156 | File objects describe a file to have content placed in it. 157 | StdOut objects describe a message to write to stdout. 158 | Verbose objects will write a message to stdout when verbose is True. 159 | :param stdout: Where to write content for stdout. 160 | :param verbose: If True Verbose actions will be written to stdout. 161 | :param noop: If True nothing will be written to disk. 162 | :return None: 163 | :raises IOError: If the IO operations fail, IOError is raised. If this 164 | happens some actions may have been applied and others not. 165 | """ 166 | error = False 167 | for action in actions: 168 | if type(action) is Error: 169 | error = True 170 | stdout.write(action.message + '\n') 171 | elif type(action) is File: 172 | if noop: 173 | continue 174 | fullname = os.path.join(project['root'], action.filename) 175 | tmpname = fullname + '.tmp' 176 | with open(tmpname, 'wt') as f: 177 | f.write(action.content) 178 | if os.path.exists(fullname): 179 | os.remove(fullname) 180 | os.rename(tmpname, fullname) 181 | elif type(action) is StdOut: 182 | stdout.write(action.message) 183 | elif type(action) is Verbose: 184 | if verbose: 185 | stdout.write(u"%s\n" % (action.message,)) 186 | else: 187 | raise Exception("Invalid action %r" % (action,)) 188 | if error: 189 | raise Exception("Error occurred processing %s" % (project['root'])) 190 | -------------------------------------------------------------------------------- /openstack_requirements/project_config.py: -------------------------------------------------------------------------------- 1 | # Licensed under the Apache License, Version 2.0 (the "License"); you may 2 | # not use this file except in compliance with the License. You may obtain 3 | # a copy of the License at 4 | # 5 | # http://www.apache.org/licenses/LICENSE-2.0 6 | # 7 | # Unless required by applicable law or agreed to in writing, software 8 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 9 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 10 | # License for the specific language governing permissions and limitations 11 | # under the License. 12 | 13 | """Work with the project-config repository. 14 | """ 15 | 16 | import requests 17 | import yaml 18 | 19 | 20 | ZUUL_PROJECTS_URL = 'https://git.openstack.org/cgit/openstack-infra/project-config/plain/zuul.d/projects.yaml' # noqa 21 | ZUUL_PROJECTS_FILENAME = 'openstack-infra/project-config/zuul.d/projects.yaml' 22 | 23 | 24 | def get_zuul_projects_data(url=ZUUL_PROJECTS_URL): 25 | """Return the parsed data structure for the zuul.d/projects.yaml file. 26 | 27 | :param url: Optional URL to the location of the file. Defaults to 28 | the most current version in the public git repository. 29 | 30 | """ 31 | r = requests.get(url) 32 | raw = yaml.safe_load(r.text) 33 | # Add a mapping from repo name to repo settings, since that is how 34 | # we access this most often. 35 | projects = { 36 | p['project']['name']: p['project'] 37 | for p in raw 38 | } 39 | return projects 40 | 41 | 42 | def require_check_requirements_for_repo(zuul_projects, repo): 43 | """Check the repository for the jobs related to requirements. 44 | 45 | Returns a list of error messages. 46 | 47 | """ 48 | errors = [] 49 | 50 | if repo not in zuul_projects: 51 | errors.append( 52 | ('did not find %s in %s' % (repo, ZUUL_PROJECTS_FILENAME), 53 | True) 54 | ) 55 | else: 56 | p = zuul_projects[repo] 57 | templates = p.get('templates', []) 58 | # NOTE(dhellmann): We don't mess around looking for individual 59 | # jobs, because we want projects to use the templates. 60 | if 'check-requirements' not in templates: 61 | errors.append( 62 | '%s no check-requirements job specified for %s' 63 | % (ZUUL_PROJECTS_FILENAME, repo) 64 | ) 65 | return errors 66 | -------------------------------------------------------------------------------- /openstack_requirements/requirement.py: -------------------------------------------------------------------------------- 1 | # Licensed under the Apache License, Version 2.0 (the "License"); you may 2 | # not use this file except in compliance with the License. You may obtain 3 | # a copy of the License at 4 | # 5 | # http://www.apache.org/licenses/LICENSE-2.0 6 | # 7 | # Unless required by applicable law or agreed to in writing, software 8 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 9 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 10 | # License for the specific language governing permissions and limitations 11 | # under the License. 12 | 13 | """Requirements handling.""" 14 | 15 | # This module has no IO at all, and none should be added. 16 | 17 | import collections 18 | import distutils.version 19 | import packaging.specifiers 20 | import pkg_resources 21 | import re 22 | 23 | 24 | # A header for the requirements file(s). 25 | # TODO(lifeless): Remove this once constraints are in use. 26 | _REQS_HEADER = [ 27 | '# The order of packages is significant, because pip processes ' 28 | 'them in the order\n', 29 | '# of appearance. Changing the order has an impact on the overall ' 30 | 'integration\n', 31 | '# process, which may cause wedges in the gate later.\n', 32 | ] 33 | 34 | 35 | def key_specifier(a): 36 | weight = {'>=': 0, '>': 0, 37 | '===': 1, '==': 1, '~=': 1, '!=': 1, 38 | '<': 2, '<=': 2} 39 | a = a._spec 40 | return (weight[a[0]], distutils.version.LooseVersion(a[1])) 41 | 42 | 43 | class Requirement(collections.namedtuple('Requirement', 44 | ['package', 'location', 'specifiers', 45 | 'markers', 'comment', 'extras'])): 46 | def __new__(cls, package, location, specifiers, markers, comment, 47 | extras=None): 48 | return super(Requirement, cls).__new__( 49 | cls, package, location, specifiers, markers, comment, 50 | frozenset(extras or ())) 51 | 52 | def to_line(self, marker_sep=';', line_prefix='', comment_prefix=' ', 53 | sort_specifiers=False): 54 | comment_p = comment_prefix if self.package else '' 55 | comment = (comment_p + self.comment if self.comment else '') 56 | marker = marker_sep + self.markers if self.markers else '' 57 | package = line_prefix + self.package if self.package else '' 58 | location = self.location + '#egg=' if self.location else '' 59 | extras = '[%s]' % ",".join(sorted(self.extras)) if self.extras else '' 60 | specifiers = self.specifiers 61 | if sort_specifiers: 62 | _specifiers = packaging.specifiers.SpecifierSet(specifiers) 63 | _specifiers = ['%s' % s for s in sorted(_specifiers, 64 | key=key_specifier)] 65 | specifiers = ','.join(_specifiers) 66 | return '%s%s%s%s%s%s\n' % (location, 67 | package, 68 | extras, 69 | specifiers, 70 | marker, 71 | comment) 72 | 73 | 74 | Requirements = collections.namedtuple('Requirements', ['reqs']) 75 | 76 | 77 | url_re = re.compile( 78 | r'^(?P\s*(?:-e\s)?\s*(?:(?:[a-z]+\+)?(?:[a-z]+))://[^#]*)' 79 | r'#egg=(?P[-\.\w]+)') 80 | 81 | 82 | def canonical_name(req_name): 83 | """Return the canonical form of req_name.""" 84 | return pkg_resources.safe_name(req_name).lower() 85 | 86 | 87 | def parse(content, permit_urls=False): 88 | return to_dict(to_reqs(content, permit_urls=permit_urls)) 89 | 90 | 91 | def parse_line(req_line, permit_urls=False): 92 | """Parse a single line of a requirements file. 93 | 94 | requirements files here are a subset of pip requirements files: we don't 95 | try to parse URL entries, or pip options like -f and -e. Those are not 96 | permitted in global-requirements.txt. If encountered in a synchronised 97 | file such as requirements.txt or test-requirements.txt, they are illegal 98 | but currently preserved as-is. 99 | 100 | They may of course be used by local test configurations, just not 101 | committed into the OpenStack reference branches. 102 | 103 | :param permit_urls: If True, urls are parsed into Requirement tuples. 104 | By default they are not, because they cannot be reflected into 105 | setuptools kwargs, and thus the default is conservative. When 106 | urls are permitted, -e *may* be supplied at the start of the line. 107 | """ 108 | end = len(req_line) 109 | hash_pos = req_line.find('#') 110 | if hash_pos < 0: 111 | hash_pos = end 112 | # Don't find urls that are in comments. 113 | if '://' in req_line[:hash_pos]: 114 | if permit_urls: 115 | # We accept only a subset of urls here - they have to have an egg 116 | # name so that we can tell what project its for without doing 117 | # network access. Egg markers use a fragment, so we need to pull 118 | # out url from the entire line. 119 | m = url_re.match(req_line) 120 | name = m.group('name') 121 | location = m.group('url') 122 | parse_start = m.end('name') 123 | hash_pos = req_line[parse_start:].find('#') 124 | if hash_pos < 0: 125 | hash_pos = end 126 | else: 127 | hash_pos = hash_pos + parse_start 128 | else: 129 | # Trigger an early failure before we look for ':' 130 | pkg_resources.Requirement.parse(req_line) 131 | else: 132 | parse_start = 0 133 | location = '' 134 | semi_pos = req_line.find(';', parse_start, hash_pos) 135 | colon_pos = req_line.find(':', parse_start, hash_pos) 136 | marker_pos = max(semi_pos, colon_pos) 137 | if marker_pos < 0: 138 | marker_pos = hash_pos 139 | markers = req_line[marker_pos + 1:hash_pos].strip() 140 | if hash_pos != end: 141 | comment = req_line[hash_pos:] 142 | else: 143 | comment = '' 144 | req_line = req_line[parse_start:marker_pos] 145 | 146 | extras = () 147 | if parse_start: 148 | # We parsed a url before 149 | specifier = '' 150 | elif req_line: 151 | # Pulled out a requirement 152 | parsed = pkg_resources.Requirement.parse(req_line) 153 | name = parsed.project_name 154 | extras = parsed.extras 155 | specifier = str(parsed.specifier) 156 | else: 157 | # Comments / blank lines etc. 158 | name = '' 159 | specifier = '' 160 | return Requirement(name, location, specifier, markers, comment, extras) 161 | 162 | 163 | def to_content(reqs, marker_sep=';', line_prefix='', prefix=True): 164 | lines = [] 165 | if prefix: 166 | lines += _REQS_HEADER 167 | for req in reqs.reqs: 168 | lines.append(req.to_line(marker_sep, line_prefix)) 169 | return u''.join(lines) 170 | 171 | 172 | def to_dict(req_sequence): 173 | reqs = dict() 174 | for req, req_line in req_sequence: 175 | if req is not None: 176 | key = canonical_name(req.package) 177 | reqs.setdefault(key, []).append((req, req_line)) 178 | return reqs 179 | 180 | 181 | def _pass_through(req_line, permit_urls=False): 182 | """Identify unparsable lines.""" 183 | if permit_urls: 184 | return (req_line.startswith('http://tarballs.openstack.org/') or 185 | req_line.startswith('-f')) 186 | else: 187 | return (req_line.startswith('http://tarballs.openstack.org/') or 188 | req_line.startswith('-e') or 189 | req_line.startswith('-f')) 190 | 191 | 192 | def to_reqs(content, permit_urls=False): 193 | for content_line in content.splitlines(True): 194 | req_line = content_line.strip() 195 | if _pass_through(req_line, permit_urls=permit_urls): 196 | yield None, content_line 197 | else: 198 | yield parse_line(req_line, permit_urls=permit_urls), content_line 199 | 200 | 201 | def check_reqs_bounds_policy(global_reqs): 202 | """Check that the global requirement version specifiers match the policy. 203 | 204 | The policy is defined as 205 | * There needs to be exactly one lower bound (>=1.2 defined) 206 | * There can be one or more excludes (!=1.2.1, !=1.2.2) 207 | * TODO: Clarify (non-) existance of upper caps 208 | """ 209 | 210 | for pkg_requirement in global_reqs.values(): 211 | req = pkg_requirement[0][0] 212 | if req.package: 213 | _specifiers = packaging.specifiers.SpecifierSet(req.specifiers) 214 | lower_bound = set() 215 | for spec in _specifiers: 216 | if spec.operator == '>=': 217 | lower_bound.add(spec) 218 | if len(lower_bound): 219 | yield ('Requirement %s should not include a >= specifier' % 220 | req.package) 221 | -------------------------------------------------------------------------------- /openstack_requirements/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/openstack/requirements/c36e570d3d645e7a51354c977783b3cd8357f91e/openstack_requirements/tests/__init__.py -------------------------------------------------------------------------------- /openstack_requirements/tests/common.py: -------------------------------------------------------------------------------- 1 | # Licensed under the Apache License, Version 2.0 (the "License"); you may 2 | # not use this file except in compliance with the License. You may obtain 3 | # a copy of the License at 4 | # 5 | # http://www.apache.org/licenses/LICENSE-2.0 6 | # 7 | # Unless required by applicable law or agreed to in writing, software 8 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 9 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 10 | # License for the specific language governing permissions and limitations 11 | # under the License. 12 | 13 | import os.path 14 | import shutil 15 | 16 | import fixtures 17 | 18 | from openstack_requirements import project 19 | from openstack_requirements import requirement 20 | 21 | 22 | def _file_to_list(fname): 23 | with open(fname) as f: 24 | content = list(map(lambda x: x.rstrip(), f.readlines())) 25 | return content 26 | 27 | 28 | class Project(fixtures.Fixture): 29 | """A single project we can update.""" 30 | 31 | def __init__( 32 | self, req_path, setup_path, setup_cfg_path, test_req_path=None): 33 | super(Project, self).__init__() 34 | self._req_path = req_path 35 | self._setup_path = setup_path 36 | self._setup_cfg_path = setup_cfg_path 37 | self._test_req_path = test_req_path 38 | 39 | def setUp(self): 40 | super(Project, self).setUp() 41 | self.root = self.useFixture(fixtures.TempDir()).path 42 | self.req_file = os.path.join(self.root, 'requirements.txt') 43 | self.setup_file = os.path.join(self.root, 'setup.py') 44 | self.setup_cfg_file = os.path.join(self.root, 'setup.cfg') 45 | self.test_req_file = os.path.join(self.root, 'test-requirements.txt') 46 | shutil.copy(self._req_path, self.req_file) 47 | shutil.copy(self._setup_path, self.setup_file) 48 | shutil.copy(self._setup_cfg_path, self.setup_cfg_file) 49 | if self._test_req_path: 50 | shutil.copy(self._test_req_path, self.test_req_file) 51 | 52 | 53 | project_fixture = Project( 54 | "openstack_requirements/tests/files/project.txt", 55 | "openstack_requirements/tests/files/setup.py", 56 | "openstack_requirements/tests/files/setup.cfg", 57 | "openstack_requirements/tests/files/test-project.txt") 58 | bad_project_fixture = Project( 59 | "openstack_requirements/tests/files/project-with-bad-requirement.txt", 60 | "openstack_requirements/tests/files/setup.py", 61 | "openstack_requirements/tests/files/setup.cfg") 62 | oslo_fixture = Project( 63 | "openstack_requirements/tests/files/project-with-oslo-tar.txt", 64 | "openstack_requirements/tests/files/old-setup.py", 65 | "openstack_requirements/tests/files/setup.cfg") 66 | pbr_fixture = Project( 67 | "openstack_requirements/tests/files/project.txt", 68 | "openstack_requirements/tests/files/setup.py", 69 | "openstack_requirements/tests/files/pbr_setup.cfg", 70 | "openstack_requirements/tests/files/test-project.txt") 71 | 72 | 73 | class GlobalRequirements(fixtures.Fixture): 74 | 75 | def setUp(self): 76 | super(GlobalRequirements, self).setUp() 77 | self.root = self.useFixture(fixtures.TempDir()).path 78 | self.req_file = os.path.join(self.root, "global-requirements.txt") 79 | shutil.copy( 80 | "openstack_requirements/tests/files/gr-base.txt", self.req_file) 81 | self.denylist_file = os.path.join(self.root, "denylist.txt") 82 | shutil.copy( 83 | "openstack_requirements/tests/files/denylist.txt", 84 | self.denylist_file) 85 | 86 | 87 | # Static data for unit testing. 88 | def make_project(fixture): 89 | with fixture: 90 | return project.read(fixture.root) 91 | 92 | 93 | global_reqs = requirement.parse( 94 | open("openstack_requirements/tests/files/gr-base.txt", "rt").read()) 95 | upper_constraints = requirement.parse( 96 | open("openstack_requirements/tests/files/upper-constraints.txt", 97 | "rt").read()) 98 | denylist = requirement.parse( 99 | open("openstack_requirements/tests/files/denylist.txt", "rt").read()) 100 | pbr_project = make_project(pbr_fixture) 101 | project_project = make_project(project_fixture) 102 | bad_project = make_project(bad_project_fixture) 103 | oslo_project = make_project(oslo_fixture) 104 | -------------------------------------------------------------------------------- /openstack_requirements/tests/files/denylist.txt: -------------------------------------------------------------------------------- 1 | # linters - each project may have a different version with loose convergence 2 | # over time. 3 | flake8 4 | flake8_docstrings 5 | flake8-import-order 6 | hacking 7 | pep257 8 | pylint 9 | -------------------------------------------------------------------------------- /openstack_requirements/tests/files/gr-base.txt: -------------------------------------------------------------------------------- 1 | alembic>=0.4.1 2 | amqplib>=0.6.1 3 | anyjson>=0.3.3 4 | argparse 5 | Babel>=0.9.6 6 | boto>=2.4.0 7 | cffi 8 | Cheetah>=2.4.4 9 | cliff>=1.4 10 | d2to1>=0.2.10,<0.3 11 | Django>=1.4,<1.6 12 | django_compressor>=1.4 13 | dnspython>=1.9.4 14 | eventlet>=0.12.0 15 | extras 16 | Flask==0.9 17 | greenlet>=0.3.2 18 | happybase>=0.4 19 | httplib2 20 | iso8601>=0.1.4 21 | Jinja2 22 | jsonrpclib 23 | jsonschema!=1.4.0,<2,>=1.0.0 24 | kazoo>=0.9,<=1.1 25 | lesscpy>=0.10 26 | kombu>=2.4.8 27 | lockfile>=0.8 28 | lxml>=2.3 29 | msgpack-python 30 | netaddr 31 | netifaces>=0.10.4 32 | oauth2 33 | oslo.config>=1.1.0 34 | pam>=0.1.4 35 | paramiko>=1.13.0 36 | passlib 37 | Paste 38 | PasteDeploy>=1.5.0 39 | pbr>=0.5.16,<0.6 40 | pecan>=0.2.0 41 | pip>=1.0 42 | PrettyTable>=0.6,<0.8 43 | psutil<1.0 44 | pyasn1 45 | pymongo>=2.4 46 | pyOpenSSL 47 | pyparsing>=1.5.7,<2.0 48 | # OpenStack clients. None of these should have an upper bound 49 | # as that has implications for testing in the gate. An exception 50 | # is currently being made for neutron client because of the need 51 | # for an incompatible change in their next release. 52 | python-cinderclient>=1.0.4 53 | python-ceilometerclient>=1.0.3 54 | python-heatclient>=0.2.9 55 | python-glanceclient>=0.9.0 56 | python-keystoneclient>=0.4.1 57 | python-memcached 58 | python-neutronclient>=2.2.3,<3 59 | python-novaclient>=2.12.0 60 | python-swiftclient>=1.2 61 | python-troveclient 62 | pytz>=2011b 63 | pyudev 64 | PyYAML>=3.1.0 65 | qpid-python 66 | requests>=1.1,<1.2.3 67 | Routes>=1.12.3 68 | setuptools_git>=0.4 69 | simplejson>=2.2.0 70 | six 71 | sockjs-tornado>=1.0.0,<2.0.0 72 | SQLAlchemy<=0.7.99,>=0.7 73 | sqlalchemy-migrate>=0.7 74 | stevedore>=0.10 75 | suds>=0.4 76 | warlock>=0.7.0,<2 77 | WebOb<1.3,>=1.2.3 78 | websockify>=0.5.1,<0.6 79 | wsgiref>=0.1.2 80 | WSME>=0.5b2 81 | xattr>=0.4 82 | # Testing tools below, which are typically in test-requires.txt 83 | 84 | configobj 85 | coverage>=3.6 86 | discover 87 | django-nose 88 | docutils==0.9.1 89 | feedparser 90 | fixtures>=0.3.12 91 | flake8==2.0 92 | hacking>=0.5.6,<0.7 93 | hp3parclient>=3.0,<4.0 94 | httpretty>=0.8.0 95 | keyring 96 | mock>=0.8.0 97 | mox>=0.5.3 98 | mox3>=0.7.0 99 | nose 100 | nose-exclude 101 | nosehtmloutput>=0.0.3 102 | nosexcover 103 | openstack.nose_plugin>=0.7 104 | psycopg2 105 | pylint==0.25.2 106 | pysendfile==2.0.0 107 | pysqlite 108 | python-ldap==2.3.13 109 | python-subunit 110 | pytest-runner # dependency of keyring that is only in setup_requires 111 | pyzmq 112 | redis 113 | selenium 114 | sphinx>=1.1.2 115 | sphinxcontrib-httpdomain 116 | sphinxcontrib-pecanwsme>=0.2 117 | oslo.sphinx 118 | swift 119 | testrepository>=0.0.17 120 | testresources<0.3 121 | testscenarios>=0.4,<0.5 122 | testtools>=0.9.32 123 | unittest2 124 | WebTest==1.3.3 125 | -------------------------------------------------------------------------------- /openstack_requirements/tests/files/old-setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # Copyright (c) 2010-2012 OpenStack, LLC. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 13 | # implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | import setuptools 18 | from swift import __canonical_version__ as version 19 | 20 | 21 | name = 'swift' 22 | 23 | 24 | with open('requirements.txt', 'r') as f: 25 | requires = [x.strip() for x in f if x.strip()] 26 | 27 | 28 | setuptools.setup( 29 | name=name, 30 | version=version, 31 | description='Swift', 32 | license='Apache License (2.0)', 33 | author='OpenStack, LLC.', 34 | author_email='openstack-admins@lists.launchpad.net', 35 | url='https://launchpad.net/swift', 36 | packages=setuptools.find_packages(exclude=['test', 'bin']), 37 | test_suite='nose.collector', 38 | classifiers=[ 39 | 'Development Status :: 5 - Production/Stable', 40 | 'License :: OSI Approved :: Apache Software License', 41 | 'Operating System :: POSIX :: Linux', 42 | 'Programming Language :: Python', 43 | 'Programming Language :: Python :: 2.6', 44 | 'Programming Language :: Python :: 2.7', 45 | 'Environment :: No Input/Output (Daemon)', 46 | 'Environment :: OpenStack', 47 | ], 48 | install_requires=requires, 49 | scripts=[ 50 | 'bin/swift-account-audit', 51 | 'bin/swift-account-auditor', 52 | 'bin/swift-account-reaper', 53 | 'bin/swift-account-replicator', 54 | 'bin/swift-account-server', 55 | 'bin/swift-bench', 56 | 'bin/swift-bench-client', 57 | 'bin/swift-config', 58 | 'bin/swift-container-auditor', 59 | 'bin/swift-container-replicator', 60 | 'bin/swift-container-server', 61 | 'bin/swift-container-sync', 62 | 'bin/swift-container-updater', 63 | 'bin/swift-dispersion-populate', 64 | 'bin/swift-dispersion-report', 65 | 'bin/swift-drive-audit', 66 | 'bin/swift-form-signature', 67 | 'bin/swift-get-nodes', 68 | 'bin/swift-init', 69 | 'bin/swift-object-auditor', 70 | 'bin/swift-object-expirer', 71 | 'bin/swift-object-info', 72 | 'bin/swift-object-replicator', 73 | 'bin/swift-object-server', 74 | 'bin/swift-object-updater', 75 | 'bin/swift-oldies', 76 | 'bin/swift-orphans', 77 | 'bin/swift-proxy-server', 78 | 'bin/swift-recon', 79 | 'bin/swift-recon-cron', 80 | 'bin/swift-ring-builder', 81 | 'bin/swift-temp-url', 82 | ], 83 | entry_points={ 84 | 'paste.app_factory': [ 85 | 'proxy=swift.proxy.server:app_factory', 86 | 'object=swift.obj.server:app_factory', 87 | 'container=swift.container.server:app_factory', 88 | 'account=swift.account.server:app_factory', 89 | ], 90 | 'paste.filter_factory': [ 91 | 'healthcheck=swift.common.middleware.healthcheck:filter_factory', 92 | 'crossdomain=swift.common.middleware.crossdomain:filter_factory', 93 | 'memcache=swift.common.middleware.memcache:filter_factory', 94 | 'ratelimit=swift.common.middleware.ratelimit:filter_factory', 95 | 'cname_lookup=swift.common.middleware.cname_lookup:filter_factory', 96 | 'catch_errors=swift.common.middleware.catch_errors:filter_factory', 97 | 'domain_remap=swift.common.middleware.domain_remap:filter_factory', 98 | 'staticweb=swift.common.middleware.staticweb:filter_factory', 99 | 'tempauth=swift.common.middleware.tempauth:filter_factory', 100 | 'keystoneauth=swift.common.middleware.keystoneauth:filter_factory', 101 | 'recon=swift.common.middleware.recon:filter_factory', 102 | 'tempurl=swift.common.middleware.tempurl:filter_factory', 103 | 'formpost=swift.common.middleware.formpost:filter_factory', 104 | 'name_check=swift.common.middleware.name_check:filter_factory', 105 | 'bulk=swift.common.middleware.bulk:filter_factory', 106 | 'container_quotas=swift.common.middleware.container_quotas:' 107 | 'filter_factory', 108 | 'account_quotas=swift.common.middleware.account_quotas:' 109 | 'filter_factory', 110 | 'proxy_logging=swift.common.middleware.proxy_logging:' 111 | 'filter_factory', 112 | 'slo=swift.common.middleware.slo:filter_factory', 113 | 'list_endpoints=swift.common.middleware.list_endpoints:' 114 | 'filter_factory', 115 | ], 116 | }, 117 | ) 118 | -------------------------------------------------------------------------------- /openstack_requirements/tests/files/pbr_setup.cfg: -------------------------------------------------------------------------------- 1 | [metadata] 2 | name = pbr 3 | author = OpenStack 4 | author-email = openstack-discuss@lists.openstack.org 5 | summary = Python Build Reasonableness 6 | description-file = 7 | README.rst 8 | home-page = https://pypi.org/project/pbr 9 | python-requires = >=2.6 10 | classifier = 11 | Development Status :: 5 - Production/Stable 12 | Environment :: Console 13 | Environment :: OpenStack 14 | Intended Audience :: Developers 15 | Intended Audience :: Information Technology 16 | License :: OSI Approved :: Apache Software License 17 | Operating System :: OS Independent 18 | Programming Language :: Python 19 | -------------------------------------------------------------------------------- /openstack_requirements/tests/files/project-with-bad-requirement.txt: -------------------------------------------------------------------------------- 1 | # The greenlet package must be compiled with gcc and needs 2 | # the Python.h headers. Make sure you install the python-dev 3 | # package to get the right headers... 4 | greenlet>=0.3.1 5 | 6 | # < 0.8.0/0.8 does not work, see https://bugs.launchpad.net/bugs/1153983 7 | SQLAlchemy>=0.7.8,<=0.7.99 8 | anyjson>=0.3.3 9 | eventlet>=0.9.12 10 | PasteDeploy 11 | routes 12 | WebOb>=1.2 13 | wsgiref 14 | argparse 15 | boto 16 | sqlalchemy-migrate>=0.7 17 | httplib2 18 | kombu>2.4.7 19 | iso8601>=0.1.4 20 | oslo.config>=1.1.0 21 | 22 | 23 | thisisnotarealdependency 24 | 25 | # For Swift storage backend. 26 | python-swiftclient>=1.2,<2 27 | 28 | # Note you will need gcc buildtools installed and must 29 | # have installed libxml headers for lxml to be successfully 30 | # installed using pip, therefore you will need to install the 31 | # libxml2-dev and libxslt-dev Ubuntu packages. 32 | lxml 33 | 34 | # For paste.util.template used in keystone.common.template 35 | Paste 36 | 37 | passlib 38 | jsonschema 39 | python-cinderclient>=1.0.4 40 | python-keystoneclient>=0.2.0 41 | pyOpenSSL 42 | 43 | # Required by openstack.common libraries 44 | six 45 | -------------------------------------------------------------------------------- /openstack_requirements/tests/files/project-with-oslo-tar.txt: -------------------------------------------------------------------------------- 1 | d2to1>=0.2.10,<0.3 2 | pbr>=0.5.16,<0.6 3 | SQLAlchemy>=0.7.8,<0.7.99 4 | Cheetah>=2.4.4 5 | amqplib>=0.6.1 6 | anyjson>=0.2.4 7 | argparse 8 | boto 9 | eventlet>=0.9.17 10 | kombu>=1.0.4 11 | lxml>=2.3 12 | routes>=1.12.3 13 | WebOb==1.2.3 14 | greenlet>=0.3.1 15 | PasteDeploy>=1.5.0 16 | paste 17 | sqlalchemy-migrate>=0.7.2 18 | netaddr>=0.7.6 19 | suds>=0.4 20 | paramiko 21 | pyasn1 22 | Babel>=0.9.6 23 | iso8601>=0.1.4 24 | requests>=1.1,<1.2.1 # order-dependent python-cinderclient req cap, bug 1182271 25 | python-cinderclient>=1.0.1 26 | python-neutronclient>=2.2.3,<3.0.0 27 | python-glanceclient>=0.9.0 28 | python-keystoneclient>=0.2.0 29 | six 30 | stevedore>=0.10 31 | websockify<0.4 32 | pyparsing>=1.5.7,<2.0 # order-dependent python-quantumclient req, bug 1191866 33 | 34 | -f http://tarballs.openstack.org/oslo.config/oslo.config-1.2.0a3.tar.gz#egg=oslo.config-1.2.0a3 35 | oslo.config>=1.2.0a3 36 | -------------------------------------------------------------------------------- /openstack_requirements/tests/files/project.txt: -------------------------------------------------------------------------------- 1 | # The greenlet package must be compiled with gcc and needs 2 | # the Python.h headers. Make sure you install the python-dev 3 | # package to get the right headers... 4 | greenlet>=0.3.1 5 | 6 | # < 0.8.0/0.8 does not work, see https://bugs.launchpad.net/bugs/1153983 7 | SQLAlchemy>=0.7.8,<=1.0.17 8 | anyjson>=0.3.3 9 | eventlet>=0.9.12 10 | PasteDeploy 11 | routes 12 | WebOb>=1.2 13 | wsgiref 14 | argparse 15 | boto 16 | sqlalchemy-migrate>=0.7 17 | httplib2 18 | kombu>2.4.7 19 | iso8601>=0.1.4 20 | oslo.config>=1.1.0 21 | 22 | 23 | # For Swift storage backend. 24 | python-swiftclient>=1.2,<4 25 | 26 | # Note you will need gcc buildtools installed and must 27 | # have installed libxml headers for lxml to be successfully 28 | # installed using pip, therefore you will need to install the 29 | # libxml2-dev and libxslt-dev Ubuntu packages. 30 | lxml 31 | 32 | # For paste.util.template used in keystone.common.template 33 | Paste 34 | 35 | passlib 36 | jsonschema 37 | python-cinderclient>=1.0.4 38 | python-keystoneclient>=0.2.0 39 | pyOpenSSL 40 | 41 | # Required by openstack.common libraries 42 | six 43 | -------------------------------------------------------------------------------- /openstack_requirements/tests/files/setup.cfg: -------------------------------------------------------------------------------- 1 | [metadata] 2 | name = testproject 3 | summary = OpenStack Test Project 4 | description-file = 5 | README.rst 6 | author = OpenStack 7 | author-email = openstack-discuss@lists.openstack.org 8 | home-page = https://docs.openstack.org/requirements/latest/ 9 | classifier = 10 | Environment :: OpenStack 11 | Intended Audience :: Information Technology 12 | Intended Audience :: System Administrators 13 | License :: OSI Approved :: Apache Software License 14 | Operating System :: POSIX :: Linux 15 | Programming Language :: Python 16 | Programming Language :: Python :: 2 17 | Programming Language :: Python :: 2.7 18 | Programming Language :: Python :: 2.6 19 | -------------------------------------------------------------------------------- /openstack_requirements/tests/files/setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # Copyright (c) 2013 Hewlett-Packard Development Company, L.P. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 13 | # implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | import setuptools 18 | 19 | setuptools.setup( 20 | setup_requires=['d2to1', 'pbr>=0.5,<0.6'], 21 | d2to1=True) 22 | -------------------------------------------------------------------------------- /openstack_requirements/tests/files/test-project.txt: -------------------------------------------------------------------------------- 1 | hacking>=0.5.6,<0.7 2 | coverage>=3.6 3 | discover 4 | feedparser 5 | fixtures>=0.3.12 6 | mox==0.5.3 7 | mox3==0.21.0 8 | psycopg2 9 | pylint==0.25.2 10 | # Imported by ldapdns so required to generate 11 | # the sample configuration file 12 | python-ldap==2.3.13 13 | python-subunit 14 | setuptools_git>=0.4 15 | sphinx>=1.1.2 16 | oslo.sphinx 17 | testrepository>=0.0.13 18 | testtools>=0.9.27 19 | -------------------------------------------------------------------------------- /openstack_requirements/tests/test_build_lower_constraints.py: -------------------------------------------------------------------------------- 1 | # Licensed under the Apache License, Version 2.0 (the "License"); you may 2 | # not use this file except in compliance with the License. You may obtain 3 | # a copy of the License at 4 | # 5 | # http://www.apache.org/licenses/LICENSE-2.0 6 | # 7 | # Unless required by applicable law or agreed to in writing, software 8 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 9 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 10 | # License for the specific language governing permissions and limitations 11 | # under the License. 12 | 13 | import testtools 14 | 15 | from openstack_requirements.cmds import build_lower_constraints 16 | from openstack_requirements import requirement 17 | 18 | 19 | class BuildLowerConstraintsTest(testtools.TestCase): 20 | 21 | def test_one_input_file(self): 22 | inputs = [ 23 | requirement.parse('package==1.2.3'), 24 | ] 25 | expected = [ 26 | 'package==1.2.3\n', 27 | ] 28 | self.assertEqual( 29 | expected, 30 | list(build_lower_constraints.merge_constraints_sets(inputs)) 31 | ) 32 | 33 | def test_two_input_file_same(self): 34 | inputs = [ 35 | requirement.parse('package==1.2.3'), 36 | requirement.parse('package==1.2.3'), 37 | ] 38 | expected = [ 39 | 'package==1.2.3\n', 40 | ] 41 | self.assertEqual( 42 | expected, 43 | list(build_lower_constraints.merge_constraints_sets(inputs)) 44 | ) 45 | 46 | def test_two_input_file_differ(self): 47 | inputs = [ 48 | requirement.parse('package==1.2.3'), 49 | requirement.parse('package==4.5.6'), 50 | ] 51 | expected = [ 52 | 'package==4.5.6\n', 53 | ] 54 | self.assertEqual( 55 | expected, 56 | list(build_lower_constraints.merge_constraints_sets(inputs)) 57 | ) 58 | 59 | def test_one_input_file_with_comments(self): 60 | inputs = [ 61 | requirement.parse('package==1.2.3\n # package2==0.9.8'), 62 | ] 63 | expected = [ 64 | 'package==1.2.3\n', 65 | ] 66 | self.assertEqual( 67 | expected, 68 | list(build_lower_constraints.merge_constraints_sets(inputs)) 69 | ) 70 | -------------------------------------------------------------------------------- /openstack_requirements/tests/test_check_constraints.py: -------------------------------------------------------------------------------- 1 | # Licensed under the Apache License, Version 2.0 (the "License"); you may 2 | # not use this file except in compliance with the License. You may obtain 3 | # a copy of the License at 4 | # 5 | # http://www.apache.org/licenses/LICENSE-2.0 6 | # 7 | # Unless required by applicable law or agreed to in writing, software 8 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 9 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 10 | # License for the specific language governing permissions and limitations 11 | # under the License. 12 | 13 | import io 14 | import os 15 | from unittest import mock 16 | 17 | import testscenarios 18 | import testtools 19 | 20 | from openstack_requirements.cmds import check_exists 21 | from openstack_requirements import project 22 | from openstack_requirements.tests import common 23 | 24 | load_tests = testscenarios.load_tests_apply_scenarios 25 | 26 | 27 | def mock_read_requirements_file(filename): 28 | if os.path.basename(filename) == 'upper-constraints.txt': 29 | return common.upper_constraints 30 | elif os.path.basename(filename) == 'global-requirements.txt': 31 | return common.global_reqs 32 | elif os.path.basename(filename) == 'denylist.txt': 33 | return common.denylist 34 | else: 35 | raise IOError('No such file or directory: %s' % filename) 36 | 37 | 38 | class CheckExistsTest(testtools.TestCase): 39 | 40 | def setUp(self): 41 | super(CheckExistsTest, self).setUp() 42 | 43 | @mock.patch( 44 | 'openstack_requirements.cmds.check_exists.read_requirements_file', 45 | mock_read_requirements_file) 46 | @mock.patch('openstack_requirements.project.read', 47 | return_value=common.project_project) 48 | def test_good_project(self, mock_project_read): 49 | ret = check_exists.main([common.project_fixture.root]) 50 | self.assertEqual(ret, 0) 51 | 52 | @mock.patch( 53 | 'openstack_requirements.cmds.check_exists.read_requirements_file', 54 | mock_read_requirements_file) 55 | def test_project_missing_from_uc(self): 56 | self.useFixture(common.project_fixture) 57 | orig_mocked_read_req = check_exists.read_requirements_file 58 | read_req_path = ('openstack_requirements.cmds.check_exists.' 59 | 'read_requirements_file') 60 | 61 | def remove_req_read_reqs_file(filename): 62 | if filename == 'upper-constraints.txt': 63 | upper_cons = common.upper_constraints.copy() 64 | upper_cons.pop('six') 65 | return upper_cons 66 | 67 | return orig_mocked_read_req(filename) 68 | 69 | expected_out = ('six from requirements.txt not found in' 70 | ' upper-constraints') 71 | 72 | # Start capturing some output 73 | mock_stdout = io.StringIO() 74 | with mock.patch('openstack_requirements.project.read', 75 | return_value=common.project_project), \ 76 | mock.patch('sys.stdout', mock_stdout), \ 77 | mock.patch(read_req_path, remove_req_read_reqs_file): 78 | ret = check_exists.main([common.project_fixture.root]) 79 | self.assertEqual(ret, 1) 80 | self.assertIn(expected_out, mock_stdout.getvalue()) 81 | 82 | @mock.patch( 83 | 'openstack_requirements.cmds.check_exists.read_requirements_file', 84 | mock_read_requirements_file) 85 | def test_project_missing_from_gr(self): 86 | self.useFixture(common.project_fixture) 87 | 88 | # Add some random package that wont exist in G-R 89 | with open(common.project_fixture.req_file, 'a') as req_file: 90 | req_file.write(u'SomeRandomModule #Some random module\n') 91 | req_file.flush() 92 | 93 | expected_out = ('somerandommodule from requirements.txt not found in' 94 | ' global-requirements') 95 | 96 | # Start capturing some output 97 | mock_stdout = io.StringIO() 98 | proj_read = project.read(common.project_fixture.root) 99 | with mock.patch('openstack_requirements.project.read', 100 | return_value=proj_read), \ 101 | mock.patch('sys.stdout', mock_stdout): 102 | ret = check_exists.main([common.project_fixture.root]) 103 | self.assertEqual(ret, 1) 104 | self.assertIn(expected_out, mock_stdout.getvalue()) 105 | 106 | @mock.patch( 107 | 'openstack_requirements.cmds.check_exists.read_requirements_file', 108 | mock_read_requirements_file) 109 | def test_project_multiple_missing_from_uc_and_gr(self): 110 | self.useFixture(common.project_fixture) 111 | orig_mocked_read_req = check_exists.read_requirements_file 112 | read_req_path = ('openstack_requirements.cmds.check_exists.' 113 | 'read_requirements_file') 114 | 115 | def remove_req_read_reqs_file(filename): 116 | if filename == 'upper-constraints.txt': 117 | upper_cons = common.upper_constraints.copy() 118 | upper_cons.pop('lxml') 119 | return upper_cons 120 | 121 | return orig_mocked_read_req(filename) 122 | 123 | new_reqs = '>1.10.0\nsomerandommodule\n' 124 | 125 | # lets change the six requirement not include the u-c version 126 | proj_read = project.read(common.project_fixture.root) 127 | proj_read['requirements']['requirements.txt'] = \ 128 | proj_read['requirements']['requirements.txt'][:-1] + new_reqs 129 | proj_read['requirements']['test-requirements.txt'] = \ 130 | proj_read['requirements']['test-requirements.txt'] + \ 131 | 'anotherrandommodule\n' 132 | 133 | expected_outs = [ 134 | 'lxml from requirements.txt not found in upper-constraints', 135 | 'somerandommodule from requirements.txt not found in ' 136 | 'global-requirements', 137 | 'anotherrandommodule from test-requirements.txt not found in ' 138 | 'global-requirements', 139 | 'six must be <= 1.10.0 from upper-constraints and include the ' 140 | 'upper-constraints version'] 141 | 142 | # Start capturing some output 143 | mock_stdout = io.StringIO() 144 | with mock.patch('openstack_requirements.project.read', 145 | return_value=proj_read), \ 146 | mock.patch('sys.stdout', mock_stdout), \ 147 | mock.patch(read_req_path, remove_req_read_reqs_file): 148 | ret = check_exists.main([common.project_fixture.root]) 149 | self.assertEqual(ret, 1) 150 | for expected in expected_outs: 151 | self.assertIn(expected, mock_stdout.getvalue()) 152 | 153 | @mock.patch( 154 | 'openstack_requirements.cmds.check_exists.read_requirements_file', 155 | mock_read_requirements_file) 156 | def test_project_req_bigger_then_uc(self): 157 | self.useFixture(common.project_fixture) 158 | 159 | # lets change the six requirement not include the u-c version 160 | proj_read = project.read(common.project_fixture.root) 161 | proj_read['requirements']['requirements.txt'] = \ 162 | proj_read['requirements']['requirements.txt'][:-1] + '>1.10.0\n' 163 | expected_out = ('six must be <= 1.10.0 from upper-constraints and ' 164 | 'include the upper-constraints version') 165 | 166 | # Start capturing some output 167 | mock_stdout = io.StringIO() 168 | with mock.patch('openstack_requirements.project.read', 169 | return_value=proj_read), \ 170 | mock.patch('sys.stdout', mock_stdout): 171 | ret = check_exists.main([common.project_fixture.root]) 172 | self.assertEqual(ret, 1) 173 | self.assertIn(expected_out, mock_stdout.getvalue()) 174 | 175 | @mock.patch( 176 | 'openstack_requirements.cmds.check_exists.read_requirements_file', 177 | mock_read_requirements_file) 178 | def test_project_req_not_include_uc_version(self): 179 | self.useFixture(common.project_fixture) 180 | 181 | # lets change the six requirement not include the u-c version 182 | proj_read = project.read(common.project_fixture.root) 183 | proj_read['requirements']['requirements.txt'] = \ 184 | proj_read['requirements']['requirements.txt'][:-1] + \ 185 | '<1.10.0,>1.10.0\n' 186 | expected_out = ('six must be <= 1.10.0 from upper-constraints and ' 187 | 'include the upper-constraints version') 188 | 189 | # Start capturing some output 190 | mock_stdout = io.StringIO() 191 | with mock.patch('openstack_requirements.project.read', 192 | return_value=proj_read), \ 193 | mock.patch('sys.stdout', mock_stdout): 194 | ret = check_exists.main([common.project_fixture.root]) 195 | self.assertEqual(ret, 1) 196 | self.assertIn(expected_out, mock_stdout.getvalue()) 197 | -------------------------------------------------------------------------------- /openstack_requirements/tests/test_constraints.py: -------------------------------------------------------------------------------- 1 | # Licensed under the Apache License, Version 2.0 (the "License"); you may 2 | # not use this file except in compliance with the License. You may obtain 3 | # a copy of the License at 4 | # 5 | # http://www.apache.org/licenses/LICENSE-2.0 6 | # 7 | # Unless required by applicable law or agreed to in writing, software 8 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 9 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 10 | # License for the specific language governing permissions and limitations 11 | # under the License. 12 | 13 | import testtools 14 | 15 | from openstack_requirements import constraints 16 | from openstack_requirements import requirement 17 | 18 | 19 | class TestCheckCompatible(testtools.TestCase): 20 | 21 | def test_non_requirement(self): 22 | global_reqs = {} 23 | good_constraints = requirement.parse("foo===1.2.5\n") 24 | self.assertEqual( 25 | [], 26 | constraints.check_compatible(global_reqs, good_constraints) 27 | ) 28 | 29 | def test_compatible(self): 30 | global_reqs = requirement.parse("foo>=1.2\nbar>2.0\n") 31 | good_constraints = requirement.parse("foo===1.2.5\n") 32 | self.assertEqual( 33 | [], 34 | constraints.check_compatible(global_reqs, good_constraints) 35 | ) 36 | 37 | def test_constraint_below_range(self): 38 | global_reqs = requirement.parse("oslo.concurrency>=2.3.0\nbar>1.0\n") 39 | bad_constraints = requirement.parse("oslo.concurrency===2.2.0\n") 40 | results = constraints.check_compatible(global_reqs, bad_constraints) 41 | self.assertNotEqual([], results) 42 | 43 | def test_constraint_above_range(self): 44 | global_reqs = requirement.parse("foo>=1.2,<2.0\nbar>1.0\n") 45 | bad_constraints = requirement.parse("foo===2.0.1\n") 46 | results = constraints.check_compatible(global_reqs, bad_constraints) 47 | self.assertNotEqual([], results) 48 | 49 | 50 | class TestCheckFormat(testtools.TestCase): 51 | 52 | def test_ok(self): 53 | good_constraints = requirement.parse("foo===1.2.5\n") 54 | self.assertEqual( 55 | [], 56 | list(constraints.check_format(good_constraints)) 57 | ) 58 | 59 | def test_two_equals(self): 60 | bad_constraints = requirement.parse("foo==1.2.5\n") 61 | self.assertEqual( 62 | 1, 63 | len(list(constraints.check_format(bad_constraints))) 64 | ) 65 | 66 | 67 | class TestDenylistCoverage(testtools.TestCase): 68 | 69 | def test_constrained(self): 70 | global_reqs = requirement.parse("foo>=1.2\nbar>2.0\n") 71 | good_constraints = requirement.parse("foo===1.2.5\nbar==2.1") 72 | denylist = requirement.parse('flake8\nhacking') 73 | self.assertEqual( 74 | [], 75 | list(constraints.check_denylist_coverage( 76 | global_reqs, good_constraints, denylist, 'test')) 77 | ) 78 | 79 | def test_denylisted(self): 80 | global_reqs = requirement.parse("foo>=1.2\nbar>2.0\n") 81 | good_constraints = requirement.parse("foo===1.2.5\n") 82 | denylist = requirement.parse('flake8\nhacking\nbar') 83 | self.assertEqual( 84 | [], 85 | list(constraints.check_denylist_coverage( 86 | global_reqs, good_constraints, denylist, 'test')) 87 | ) 88 | 89 | def test_both(self): 90 | global_reqs = requirement.parse("foo>=1.2\nbar>2.0\n") 91 | good_constraints = requirement.parse("foo===1.2.5\nbar>2.0") 92 | denylist = requirement.parse('flake8\nhacking\nbar') 93 | results = list(constraints.check_denylist_coverage( 94 | global_reqs, good_constraints, denylist, 'test')) 95 | self.assertEqual(1, len(results)) 96 | self.assertIn("'bar' appears in both", results[0]) 97 | 98 | def test_neither(self): 99 | global_reqs = requirement.parse("foo>=1.2\nbar>2.0\n") 100 | good_constraints = requirement.parse("foo===1.2.5\n") 101 | denylist = requirement.parse('flake8\nhacking') 102 | results = list(constraints.check_denylist_coverage( 103 | global_reqs, good_constraints, denylist, 'test')) 104 | self.assertEqual(1, len(results)) 105 | self.assertIn("'bar' appears in global-requirements.txt", results[0]) 106 | -------------------------------------------------------------------------------- /openstack_requirements/tests/test_edit_constraint.py: -------------------------------------------------------------------------------- 1 | # Licensed under the Apache License, Version 2.0 (the "License"); you may 2 | # not use this file except in compliance with the License. You may obtain 3 | # a copy of the License at 4 | # 5 | # http://www.apache.org/licenses/LICENSE-2.0 6 | # 7 | # Unless required by applicable law or agreed to in writing, software 8 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 9 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 10 | # License for the specific language governing permissions and limitations 11 | # under the License. 12 | 13 | import io 14 | import os 15 | import textwrap 16 | 17 | import fixtures 18 | import testscenarios 19 | import testtools 20 | 21 | from openstack_requirements.cmds import edit_constraint as edit 22 | from openstack_requirements import requirement 23 | 24 | 25 | load_tests = testscenarios.load_tests_apply_scenarios 26 | 27 | 28 | class SmokeTest(testtools.TestCase): 29 | 30 | def test_make_url(self): 31 | stdout = io.StringIO() 32 | tmpdir = self.useFixture(fixtures.TempDir()).path 33 | constraints_path = os.path.join(tmpdir, 'name.txt') 34 | with open(constraints_path, 'wt') as f: 35 | f.write('bar===1\nfoo===1.0.2\nquux==3\n') 36 | rv = edit.main( 37 | [constraints_path, 'foo', '--', '-e /path/to/foo'], stdout) 38 | self.assertEqual(0, rv) 39 | content = open(constraints_path, 'rt').read() 40 | self.assertEqual('-e /path/to/foo\nbar===1\nquux==3\n', content) 41 | 42 | def test_edit_paths(self): 43 | stdout = io.StringIO() 44 | tmpdir = self.useFixture(fixtures.TempDir()).path 45 | constraints_path = os.path.join(tmpdir, 'name.txt') 46 | with open(constraints_path, 'wt') as f: 47 | f.write(textwrap.dedent("""\ 48 | file:///path/to/foo#egg=foo 49 | -e file:///path/to/bar#egg=bar 50 | """)) 51 | rv = edit.main( 52 | [constraints_path, 'foo', '--', '-e file:///path/to/foo#egg=foo'], 53 | stdout) 54 | self.assertEqual(0, rv) 55 | content = open(constraints_path, 'rt').read() 56 | self.assertEqual(textwrap.dedent("""\ 57 | -e file:///path/to/foo#egg=foo 58 | -e file:///path/to/bar#egg=bar 59 | """), content) 60 | 61 | 62 | class TestEdit(testtools.TestCase): 63 | 64 | def test_add(self): 65 | reqs = {} 66 | res = edit.edit(reqs, 'foo', 'foo==1.2') 67 | self.assertEqual(requirement.Requirements( 68 | [requirement.Requirement('', '', '', '', 'foo==1.2')]), res) 69 | 70 | def test_delete(self): 71 | reqs = requirement.parse('foo==1.2\n') 72 | res = edit.edit(reqs, 'foo', '') 73 | self.assertEqual(requirement.Requirements([]), res) 74 | 75 | def test_replace(self): 76 | reqs = requirement.parse('foo==1.2\n') 77 | res = edit.edit(reqs, 'foo', 'foo==1.3') 78 | self.assertEqual(requirement.Requirements( 79 | [requirement.Requirement('', '', '', '', 'foo==1.3')]), res) 80 | 81 | def test_replace_many(self): 82 | reqs = requirement.parse('foo==1.2;p\nfoo==1.3;q') 83 | res = edit.edit(reqs, 'foo', 'foo==1.3') 84 | self.assertEqual(requirement.Requirements( 85 | [requirement.Requirement('', '', '', '', 'foo==1.3')]), res) 86 | 87 | def test_replace_non_canonical(self): 88 | new_req = '-e file:///path#egg=foo_baz' 89 | reqs = requirement.parse("foo-baz===1.0.2\n") 90 | res = edit.edit(reqs, 'foo_baz', new_req) 91 | self.assertEqual(res, requirement.Requirements( 92 | [requirement.Requirement('', '', '', '', new_req)])) 93 | -------------------------------------------------------------------------------- /openstack_requirements/tests/test_generate.py: -------------------------------------------------------------------------------- 1 | # Licensed under the Apache License, Version 2.0 (the "License"); you may 2 | # not use this file except in compliance with the License. You may obtain 3 | # a copy of the License at 4 | # 5 | # http://www.apache.org/licenses/LICENSE-2.0 6 | # 7 | # Unless required by applicable law or agreed to in writing, software 8 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 9 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 10 | # License for the specific language governing permissions and limitations 11 | # under the License. 12 | 13 | import os.path 14 | import subprocess 15 | 16 | import fixtures 17 | import testtools 18 | from testtools import matchers 19 | 20 | from openstack_requirements.cmds import generate 21 | 22 | 23 | class TestFreeze(testtools.TestCase): 24 | 25 | def test_freeze_smoke(self): 26 | # Use an arbitrary python, but make sure it has the venv standard lib. 27 | versions = ['/usr/bin/python3.%(v)s' % dict(v=v) for v in range(5, 10)] 28 | found = [v for v in versions if os.path.exists(v)] 29 | found_with_venv = [] 30 | for py in found: 31 | output = str(subprocess.check_output( 32 | [py, 33 | '-c', 34 | 'import pkgutil; [print(x) for x in pkgutil.iter_modules()]'] 35 | )) 36 | # Needs both venv and ensurepip 37 | if 'venv' in output and 'ensurepip' in output: 38 | found_with_venv.append(py) 39 | 40 | if len(found_with_venv) == 0: 41 | self.skipTest('Unable to find python that includes venv module') 42 | 43 | # Grab the latest version available as that is the most likely to 44 | # break. 45 | pyversion = found_with_venv[-1] 46 | req = self.useFixture(fixtures.TempDir()).path + '/r.txt' 47 | with open(req, 'wt') as output: 48 | output.write('fixtures==2.0.0') 49 | frozen = generate._freeze(req, pyversion) 50 | expected_version = pyversion[-3:] 51 | self.expectThat(frozen, matchers.HasLength(2)) 52 | self.expectThat(frozen[0], matchers.Equals(expected_version)) 53 | # There are multiple items in the dependency tree of fixtures. 54 | # Since this is a smoke test, just ensure fixtures is there. 55 | self.expectThat(frozen[1], matchers.Contains(('fixtures', '2.0.0'))) 56 | 57 | 58 | class TestParse(testtools.TestCase): 59 | 60 | def test_parse(self): 61 | text = "linecache2==1.0.0\nargparse==1.2\n\n# fred\n" 62 | parsed = generate._parse_freeze(text) 63 | self.assertEqual( 64 | [('linecache2', '1.0.0'), ('argparse', '1.2')], parsed) 65 | 66 | def test_editable_banned(self): 67 | text = "-e git:..." 68 | self.assertRaises(Exception, generate._parse_freeze, text) # noqa 69 | 70 | 71 | class TestCombine(testtools.TestCase): 72 | 73 | def test_same_items(self): 74 | fixtures = [('fixtures', '1.2.0')] 75 | freeze_27 = ('2.7', fixtures) 76 | freeze_34 = ('3.4', fixtures) 77 | self.assertEqual( 78 | ['fixtures===1.2.0\n'], 79 | list(generate._combine_freezes([freeze_27, freeze_34]))) 80 | 81 | def test_distinct_items(self): 82 | freeze_27 = ('2.7', [('fixtures', '1.2.0')]) 83 | freeze_34 = ('3.4', [('fixtures', '1.2.0'), ('enum', '1.5.0')]) 84 | self.assertEqual( 85 | ["enum===1.5.0;python_version=='3.4'\n", 'fixtures===1.2.0\n'], 86 | list(generate._combine_freezes([freeze_27, freeze_34]))) 87 | 88 | def test_different_versions(self): 89 | freeze_27 = ('2.7', [('fixtures', '1.2.0')]) 90 | freeze_34 = ('3.4', [('fixtures', '1.5.0')]) 91 | self.assertEqual( 92 | ["fixtures===1.2.0;python_version<='2.7'\n", 93 | "fixtures===1.5.0;python_version>='3.4'\n"], 94 | list(generate._combine_freezes([freeze_27, freeze_34]))) 95 | 96 | def test_duplicate_pythons(self): 97 | with testtools.ExpectedException(Exception): 98 | list(generate._combine_freezes([('2.7', []), ('2.7', [])])) 99 | 100 | def test_denylist(self): 101 | denylist = ['Fixtures'] 102 | freeze_27 = ('2.7', [('fixtures', '1.2.0')]) 103 | freeze_34 = ('3.4', [('fixtures', '1.2.0'), ('enum', '1.5.0')]) 104 | self.assertEqual( 105 | ["enum===1.5.0;python_version=='3.4'\n"], 106 | list(generate._combine_freezes( 107 | [freeze_27, freeze_34], denylist=denylist))) 108 | 109 | def test_denylist_with_safe_name(self): 110 | denylist = ['flake8_docstrings'] 111 | freeze_27 = ('2.7', [('flake8-docstrings', '0.2.1.post1'), 112 | ('enum', '1.5.0')]) 113 | self.assertEqual( 114 | ['enum===1.5.0\n'], 115 | list(generate._combine_freezes( 116 | [freeze_27], denylist=denylist))) 117 | 118 | 119 | class Namespace(object): 120 | def __init__(self, **kwargs): 121 | self.__dict__.update(kwargs) 122 | 123 | 124 | class TestClone(testtools.TestCase): 125 | 126 | def test_py34_clone_py35(self): 127 | # Simulate an environment where we have python 3.4 data and need to 128 | # clone that to python 3.5 129 | options = Namespace(version_map={'3.4': set(['3.5']), 130 | '3.5': set(['3.4'])}) 131 | freeze_27 = ('2.7', [('dnspython', '1.15.0')]) 132 | freeze_34 = ('3.4', [('dnspython3', '1.12.0')]) 133 | freeze_35 = ('3.5', [('dnspython3', '1.12.0')]) 134 | 135 | freezes = [freeze_27, freeze_34] 136 | expected_freezes = [freeze_27, freeze_34, freeze_35] 137 | 138 | generate._clone_versions(freezes, options) 139 | 140 | self.assertEqual(expected_freezes, freezes) 141 | 142 | def test_py34_noclone_py35(self): 143 | # Simulate an environment where we have python 3.4 and python 3.5 data 144 | # so there is no need to clone. 145 | options = Namespace(version_map={'3.4': set(['3.5']), 146 | '3.5': set(['3.4'])}) 147 | freeze_27 = ('2.7', [('dnspython', '1.15.0')]) 148 | freeze_34 = ('3.4', [('dnspython3', '1.12.0')]) 149 | freeze_35 = ('3.5', [('other-pkg', '1.0.0')]) 150 | 151 | freezes = [freeze_27, freeze_34, freeze_35] 152 | expected_freezes = [freeze_27, freeze_34, freeze_35] 153 | 154 | generate._clone_versions(freezes, options) 155 | 156 | self.assertEqual(expected_freezes, freezes) 157 | 158 | def test_py35_clone_py34(self): 159 | # Simulate an environment where we have python 3.5 data and need to 160 | # clone that to python 3.4 161 | options = Namespace(version_map={'3.4': set(['3.5']), 162 | '3.5': set(['3.4'])}) 163 | freeze_27 = ('2.7', [('dnspython', '1.15.0')]) 164 | freeze_34 = ('3.4', [('dnspython3', '1.12.0')]) 165 | freeze_35 = ('3.5', [('dnspython3', '1.12.0')]) 166 | 167 | freezes = [freeze_27, freeze_35] 168 | expected_freezes = [freeze_27, freeze_35, freeze_34] 169 | 170 | generate._clone_versions(freezes, options) 171 | 172 | self.assertEqual(expected_freezes, freezes) 173 | 174 | def test_py35_clone_py34_py36(self): 175 | # Simulate an environment where we have python 3.5 data and need to 176 | # clone that to python 3.4 177 | options = Namespace(version_map={'3.5': set(['3.4', '3.6'])}) 178 | freeze_27 = ('2.7', [('dnspython', '1.15.0')]) 179 | freeze_34 = ('3.4', [('dnspython3', '1.12.0')]) 180 | freeze_35 = ('3.5', [('dnspython3', '1.12.0')]) 181 | freeze_36 = ('3.6', [('dnspython3', '1.12.0')]) 182 | 183 | freezes = [freeze_27, freeze_35] 184 | expected_freezes = [freeze_27, freeze_35, freeze_34, freeze_36] 185 | 186 | generate._clone_versions(freezes, options) 187 | 188 | self.assertEqual(expected_freezes, freezes) 189 | -------------------------------------------------------------------------------- /openstack_requirements/tests/test_project.py: -------------------------------------------------------------------------------- 1 | # Licensed under the Apache License, Version 2.0 (the "License"); you may 2 | # not use this file except in compliance with the License. You may obtain 3 | # a copy of the License at 4 | # 5 | # http://www.apache.org/licenses/LICENSE-2.0 6 | # 7 | # Unless required by applicable law or agreed to in writing, software 8 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 9 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 10 | # License for the specific language governing permissions and limitations 11 | # under the License. 12 | 13 | import io 14 | import textwrap 15 | 16 | import fixtures 17 | import parsley 18 | import testscenarios 19 | import testtools 20 | from testtools import matchers 21 | 22 | from openstack_requirements import project 23 | from openstack_requirements import requirement 24 | from openstack_requirements.tests import common 25 | 26 | 27 | load_tests = testscenarios.load_tests_apply_scenarios 28 | 29 | 30 | class TestReadProject(testtools.TestCase): 31 | 32 | def test_pbr(self): 33 | root = self.useFixture(common.pbr_fixture).root 34 | proj = project.read(root) 35 | self.expectThat(proj['root'], matchers.Equals(root)) 36 | setup_py = open(root + '/setup.py', 'rt').read() 37 | self.expectThat(proj['setup.py'], matchers.Equals(setup_py)) 38 | setup_cfg = open(root + '/setup.cfg', 'rt').read() 39 | self.expectThat(proj['setup.cfg'], matchers.Equals(setup_cfg)) 40 | self.expectThat( 41 | proj['requirements'], 42 | matchers.KeysEqual('requirements.txt', 'test-requirements.txt')) 43 | 44 | def test_no_setup_py(self): 45 | root = self.useFixture(fixtures.TempDir()).path 46 | proj = project.read(root) 47 | self.expectThat( 48 | proj, matchers.Equals({'root': root, 'requirements': {}, 49 | 'lower-constraints.txt': None})) 50 | 51 | 52 | class TestProjectExtras(testtools.TestCase): 53 | 54 | def test_smoke(self): 55 | proj = {'setup.cfg': textwrap.dedent(u""" 56 | [extras] 57 | 1 = 58 | foo 59 | 2 = 60 | foo # fred 61 | bar 62 | """)} 63 | expected = { 64 | '1': '\nfoo', 65 | '2': '\nfoo # fred\nbar' 66 | } 67 | self.assertEqual(expected, project.extras(proj)) 68 | 69 | def test_none(self): 70 | proj = {'setup.cfg': u"[metadata]\n"} 71 | self.assertEqual({}, project.extras(proj)) 72 | 73 | def test_no_setup_cfg(self): 74 | proj = {} 75 | self.assertEqual({}, project.extras(proj)) 76 | 77 | 78 | class TestExtrasParsing(testtools.TestCase): 79 | 80 | def test_none(self): 81 | old_content = textwrap.dedent(u""" 82 | [metadata] 83 | # something something 84 | name = fred 85 | 86 | [entry_points] 87 | console_scripts = 88 | foo = bar:quux 89 | """) 90 | ini = project._extras_compiled(old_content).ini() 91 | self.assertEqual(ini, (old_content, None, '')) 92 | 93 | def test_no_eol(self): 94 | old_content = textwrap.dedent(u""" 95 | [metadata] 96 | # something something 97 | name = fred 98 | 99 | [entry_points] 100 | console_scripts = 101 | foo = bar:quux""") 102 | expected1 = textwrap.dedent(u""" 103 | [metadata] 104 | # something something 105 | name = fred 106 | 107 | [entry_points] 108 | console_scripts = 109 | """) 110 | suffix = ' foo = bar:quux' 111 | ini = project._extras_compiled(old_content).ini() 112 | self.assertEqual(ini, (expected1, None, suffix)) 113 | 114 | def test_two_extras_raises(self): 115 | old_content = textwrap.dedent(u""" 116 | [metadata] 117 | # something something 118 | name = fred 119 | 120 | [extras] 121 | a = b 122 | [extras] 123 | b = c 124 | 125 | [entry_points] 126 | console_scripts = 127 | foo = bar:quux 128 | """) 129 | with testtools.ExpectedException(parsley.ParseError): 130 | project._extras_compiled(old_content).ini() 131 | 132 | def test_extras(self): 133 | # We get an AST for extras we can use to preserve comments. 134 | old_content = textwrap.dedent(u""" 135 | [metadata] 136 | # something something 137 | name = fred 138 | 139 | [extras] 140 | # comment1 141 | a = 142 | b 143 | c 144 | # comment2 145 | # comment3 146 | d = 147 | e 148 | # comment4 149 | 150 | [entry_points] 151 | console_scripts = 152 | foo = bar:quux 153 | """) 154 | prefix = textwrap.dedent(u""" 155 | [metadata] 156 | # something something 157 | name = fred 158 | 159 | """) 160 | suffix = textwrap.dedent(u"""\ 161 | [entry_points] 162 | console_scripts = 163 | foo = bar:quux 164 | """) 165 | extras = [ 166 | project._Comment('# comment1\n'), 167 | project._Extra('a', '\nb\nc\n'), 168 | project._Comment('# comment2\n'), 169 | project._Comment('# comment3\n'), 170 | project._Extra('d', '\ne\n'), 171 | project._Comment('# comment4\n')] 172 | ini = project._extras_compiled(old_content).ini() 173 | self.assertEqual(ini, (prefix, extras, suffix)) 174 | 175 | 176 | class TestMergeSetupCfg(testtools.TestCase): 177 | 178 | def test_merge_none(self): 179 | old_content = textwrap.dedent(u""" 180 | [metadata] 181 | # something something 182 | name = fred 183 | 184 | [entry_points] 185 | console_scripts = 186 | foo = bar:quux 187 | """) 188 | merged = project.merge_setup_cfg(old_content, {}) 189 | self.assertEqual(old_content, merged) 190 | 191 | def test_merge_extras(self): 192 | old_content = textwrap.dedent(u""" 193 | [metadata] 194 | name = fred 195 | 196 | [extras] 197 | # Comment 198 | a = 199 | b 200 | # comment 201 | c = 202 | d 203 | 204 | [entry_points] 205 | console_scripts = 206 | foo = bar:quux 207 | """) 208 | blank = requirement.Requirement('', '', '', '', '') 209 | r1 = requirement.Requirement( 210 | 'b', '', '>=1', "python_version=='2.7'", '') 211 | r2 = requirement.Requirement('d', '', '', '', '# BSD') 212 | reqs = { 213 | 'a': requirement.Requirements([blank, r1]), 214 | 'c': requirement.Requirements([blank, r2])} 215 | merged = project.merge_setup_cfg(old_content, reqs) 216 | expected = textwrap.dedent(u""" 217 | [metadata] 218 | name = fred 219 | 220 | [extras] 221 | # Comment 222 | a = 223 | b>=1:python_version=='2.7' 224 | # comment 225 | c = 226 | d # BSD 227 | 228 | [entry_points] 229 | console_scripts = 230 | foo = bar:quux 231 | """) 232 | self.assertEqual(expected, merged) 233 | 234 | 235 | class TestWriteProject(testtools.TestCase): 236 | 237 | def test_smoke(self): 238 | stdout = io.StringIO() 239 | root = self.useFixture(fixtures.TempDir()).path 240 | proj = {'root': root} 241 | actions = [ 242 | project.File('foo', '123\n'), 243 | project.File('bar', '456\n'), 244 | project.Verbose(u'fred')] 245 | project.write(proj, actions, stdout, True) 246 | foo = open(root + '/foo', 'rt').read() 247 | self.expectThat(foo, matchers.Equals('123\n')) 248 | bar = open(root + '/bar', 'rt').read() 249 | self.expectThat(bar, matchers.Equals('456\n')) 250 | self.expectThat(stdout.getvalue(), matchers.Equals('fred\n')) 251 | 252 | def test_non_verbose(self): 253 | stdout = io.StringIO() 254 | root = self.useFixture(fixtures.TempDir()).path 255 | proj = {'root': root} 256 | actions = [project.Verbose(u'fred')] 257 | project.write(proj, actions, stdout, False) 258 | self.expectThat(stdout.getvalue(), matchers.Equals('')) 259 | 260 | def test_bad_action(self): 261 | root = self.useFixture(fixtures.TempDir()).path 262 | stdout = io.StringIO() 263 | proj = {'root': root} 264 | actions = [('foo', 'bar')] 265 | with testtools.ExpectedException(Exception): 266 | project.write(proj, actions, stdout, True) 267 | 268 | def test_stdout(self): 269 | stdout = io.StringIO() 270 | root = self.useFixture(fixtures.TempDir()).path 271 | proj = {'root': root} 272 | actions = [project.StdOut(u'fred\n')] 273 | project.write(proj, actions, stdout, True) 274 | self.expectThat(stdout.getvalue(), matchers.Equals('fred\n')) 275 | 276 | def test_errors(self): 277 | stdout = io.StringIO() 278 | root = self.useFixture(fixtures.TempDir()).path 279 | proj = {'root': root} 280 | actions = [project.Error(u'fred')] 281 | with testtools.ExpectedException(Exception): 282 | project.write(proj, actions, stdout, True) 283 | self.expectThat(stdout.getvalue(), matchers.Equals('fred\n')) 284 | -------------------------------------------------------------------------------- /openstack_requirements/tests/test_requirement.py: -------------------------------------------------------------------------------- 1 | # Licensed under the Apache License, Version 2.0 (the "License"); you may 2 | # not use this file except in compliance with the License. You may obtain 3 | # a copy of the License at 4 | # 5 | # http://www.apache.org/licenses/LICENSE-2.0 6 | # 7 | # Unless required by applicable law or agreed to in writing, software 8 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 9 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 10 | # License for the specific language governing permissions and limitations 11 | # under the License. 12 | 13 | import textwrap 14 | 15 | import testscenarios 16 | import testtools 17 | 18 | from openstack_requirements import requirement 19 | 20 | 21 | load_tests = testscenarios.load_tests_apply_scenarios 22 | 23 | 24 | class TestParseRequirement(testtools.TestCase): 25 | 26 | dist_scenarios = [ 27 | ('package', dict( 28 | line='swift', 29 | req=requirement.Requirement('swift', '', '', '', ''))), 30 | ('specifier', dict( 31 | line='alembic>=0.4.1', 32 | req=requirement.Requirement('alembic', '', '>=0.4.1', '', ''))), 33 | ('specifiers', dict( 34 | line='alembic>=0.4.1,!=1.1.8', 35 | req=requirement.Requirement('alembic', '', '!=1.1.8,>=0.4.1', '', 36 | ''))), 37 | ('comment-only', dict( 38 | line='# foo', 39 | req=requirement.Requirement('', '', '', '', '# foo'))), 40 | ('comment', dict( 41 | line='Pint>=0.5 # BSD', 42 | req=requirement.Requirement('Pint', '', '>=0.5', '', '# BSD'))), 43 | ('comment-with-semicolon', dict( 44 | line='Pint>=0.5 # BSD;fred', 45 | req=requirement.Requirement('Pint', '', '>=0.5', '', '# BSD;fred'))), 46 | ('case', dict( 47 | line='Babel>=1.3', 48 | req=requirement.Requirement('Babel', '', '>=1.3', '', ''))), 49 | ('markers', dict( 50 | line="pywin32;sys_platform=='win32'", 51 | req=requirement.Requirement('pywin32', '', '', 52 | "sys_platform=='win32'", ''))), 53 | ('markers-with-comment', dict( 54 | line="Sphinx<=1.2; python_version=='2.7'# Sadface", 55 | req=requirement.Requirement('Sphinx', '', '<=1.2', 56 | "python_version=='2.7'", '# Sadface')))] 57 | url_scenarios = [ 58 | ('url', dict( 59 | line='file:///path/to/thing#egg=thing', 60 | req=requirement.Requirement('thing', 'file:///path/to/thing', '', '', 61 | ''), 62 | permit_urls=True)), 63 | ('oslo-url', dict( 64 | line='file:///path/to/oslo.thing#egg=oslo.thing', 65 | req=requirement.Requirement('oslo.thing', 66 | 'file:///path/to/oslo.thing', '', '', ''), 67 | permit_urls=True)), 68 | ('url-comment', dict( 69 | line='file:///path/to/thing#egg=thing # http://altpath#egg=boo', 70 | req=requirement.Requirement('thing', 'file:///path/to/thing', '', '', 71 | '# http://altpath#egg=boo'), 72 | permit_urls=True)), 73 | ('editable', dict( 74 | line='-e file:///path/to/bar#egg=bar', 75 | req=requirement.Requirement('bar', '-e file:///path/to/bar', '', '', 76 | ''), 77 | permit_urls=True)), 78 | ('editable_vcs_git', dict( 79 | line='-e git+http://github.com/path/to/oslo.bar#egg=oslo.bar', 80 | req=requirement.Requirement('oslo.bar', 81 | '-e git+http://github.com' 82 | '/path/to/oslo.bar', '', '', ''), 83 | permit_urls=True)), 84 | ('editable_vcs_git_ssh', dict( 85 | line='-e git+ssh://github.com/path/to/oslo.bar#egg=oslo.bar', 86 | req=requirement.Requirement('oslo.bar', 87 | '-e git+ssh://github.com' 88 | '/path/to/oslo.bar', '', '', ''), 89 | permit_urls=True)), 90 | ] 91 | scenarios = dist_scenarios + url_scenarios 92 | 93 | def test_parse(self): 94 | parsed = requirement.parse_line( 95 | self.line, permit_urls=getattr(self, 'permit_urls', False)) 96 | self.assertEqual(self.req, parsed) 97 | 98 | 99 | class TestParseRequirementFailures(testtools.TestCase): 100 | 101 | scenarios = [ 102 | ('url', dict(line='http://tarballs.openstack.org/oslo.config/' 103 | 'oslo.config-1.2.0a3.tar.gz#egg=oslo.config')), 104 | ('-e', dict(line='-e git+https://foo.com#egg=foo')), 105 | ('-f', dict(line='-f http://tarballs.openstack.org/'))] 106 | 107 | def test_does_not_parse(self): 108 | self.assertRaises(ValueError, requirement.parse_line, self.line) 109 | 110 | 111 | class TestToContent(testtools.TestCase): 112 | 113 | def test_smoke(self): 114 | reqs = requirement.to_content(requirement.Requirements( 115 | [requirement.Requirement( 116 | 'foo', '', '<=1', "python_version=='2.7'", '# BSD')]), 117 | marker_sep='!') 118 | self.assertEqual( 119 | ''.join(requirement._REQS_HEADER 120 | + ["foo<=1!python_version=='2.7' # BSD\n"]), 121 | reqs) 122 | 123 | def test_location(self): 124 | reqs = requirement.to_content(requirement.Requirements( 125 | [requirement.Requirement( 126 | 'foo', 'file://foo', '', "python_version=='2.7'", '# BSD')])) 127 | self.assertEqual( 128 | ''.join(requirement._REQS_HEADER 129 | + ["file://foo#egg=foo;python_version=='2.7' # BSD\n"]), 130 | reqs) 131 | 132 | 133 | class TestToReqs(testtools.TestCase): 134 | 135 | def test_editable(self): 136 | line = '-e file:///foo#egg=foo' 137 | reqs = list(requirement.to_reqs(line, permit_urls=True)) 138 | req = requirement.Requirement('foo', '-e file:///foo', '', '', '') 139 | self.assertEqual(reqs, [(req, line)]) 140 | 141 | def test_urls(self): 142 | line = 'file:///foo#egg=foo' 143 | reqs = list(requirement.to_reqs(line, permit_urls=True)) 144 | req = requirement.Requirement('foo', 'file:///foo', '', '', '') 145 | self.assertEqual(reqs, [(req, line)]) 146 | 147 | def test_not_urls(self): 148 | self.assertRaises( 149 | ValueError, list, requirement.to_reqs('file:///foo#egg=foo')) 150 | 151 | def test_multiline(self): 152 | content = textwrap.dedent("""\ 153 | oslo.config>=1.11.0 # Apache-2.0 154 | oslo.concurrency>=2.3.0 # Apache-2.0 155 | oslo.context>=0.2.0 # Apache-2.0 156 | """) 157 | reqs = requirement.parse(content) 158 | self.assertEqual( 159 | set(['oslo.config', 'oslo.concurrency', 'oslo.context']), 160 | set(reqs.keys()), 161 | ) 162 | 163 | def test_extras(self): 164 | content = textwrap.dedent("""\ 165 | oslo.config>=1.11.0 # Apache-2.0 166 | oslo.concurrency[fixtures]>=1.11.0 # Apache-2.0 167 | oslo.db[fixtures,mysql]>=1.11.0 # Apache-2.0 168 | """) 169 | reqs = requirement.parse(content) 170 | self.assertEqual( 171 | set(['oslo.config', 'oslo.concurrency', 'oslo.db']), 172 | set(reqs.keys()), 173 | ) 174 | self.assertEqual(reqs['oslo.config'][0][0].extras, frozenset(())) 175 | self.assertEqual(reqs['oslo.concurrency'][0][0].extras, 176 | frozenset(('fixtures',))) 177 | self.assertEqual(reqs['oslo.db'][0][0].extras, 178 | frozenset(('fixtures', 'mysql'))) 179 | self.assertCountEqual(reqs, 180 | ['oslo.config', 'oslo.concurrency', 'oslo.db']) 181 | 182 | 183 | class TestCanonicalName(testtools.TestCase): 184 | 185 | def test_underscores(self): 186 | self.assertEqual('foo-bar', requirement.canonical_name('Foo_bar')) 187 | 188 | 189 | class TestToDict(testtools.TestCase): 190 | 191 | def test_canonicalises(self): 192 | req = requirement.Requirement('Foo_bar', '', '', '', '') 193 | self.assertEqual( 194 | {'foo-bar': [(req, '')]}, requirement.to_dict([(req, '')])) 195 | 196 | 197 | class TestReqPolicy(testtools.TestCase): 198 | 199 | def test_requirements_policy_pass(self): 200 | content = textwrap.dedent("""\ 201 | cffi!=1.1.2 202 | other 203 | """) 204 | reqs = requirement.parse(content) 205 | policy_check = [x for x in requirement.check_reqs_bounds_policy(reqs)] 206 | self.assertEqual(len(policy_check), 0) 207 | 208 | def test_requirements_policy_fail(self): 209 | content = textwrap.dedent("""\ 210 | cffi>=1.1.1,!=1.1.0 211 | other>=1,>=2,!=1.1.0 212 | """) 213 | reqs = requirement.parse(content) 214 | self.assertEqual([ 215 | 'Requirement cffi should not include a >= specifier', 216 | 'Requirement other should not include a >= specifier'], 217 | sorted([x for x in requirement.check_reqs_bounds_policy(reqs)])) 218 | -------------------------------------------------------------------------------- /openstack_requirements/utils.py: -------------------------------------------------------------------------------- 1 | from openstack_requirements import requirement 2 | 3 | 4 | def read_requirements_file(filename): 5 | with open(filename, 'rt') as f: 6 | body = f.read() 7 | return requirement.parse(body) 8 | -------------------------------------------------------------------------------- /playbooks/drop-wheel-mirror.yaml: -------------------------------------------------------------------------------- 1 | - hosts: all 2 | 3 | tasks: 4 | - name: Drop wheel mirror from pip.conf 5 | become: yes 6 | lineinfile: 7 | path: /etc/pip.conf 8 | regexp: "^extra-index-url" 9 | state: absent 10 | -------------------------------------------------------------------------------- /playbooks/files/project-requirements-change.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python3 2 | # Copyright (C) 2011 OpenStack, LLC. 3 | # Copyright (c) 2013 Hewlett-Packard Development Company, L.P. 4 | # Copyright (c) 2013 OpenStack Foundation 5 | # 6 | # Licensed under the Apache License, Version 2.0 (the "License"); 7 | # you may not use this file except in compliance with the License. 8 | # You may obtain a copy of the License at 9 | # 10 | # http://www.apache.org/licenses/LICENSE-2.0 11 | # 12 | # Unless required by applicable law or agreed to in writing, software 13 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 14 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 15 | # License for the specific language governing permissions and limitations 16 | # under the License. 17 | 18 | import argparse 19 | import contextlib 20 | import os 21 | import re 22 | import shlex 23 | import shutil 24 | import subprocess 25 | import sys 26 | import tempfile 27 | 28 | from openstack_requirements import check # noqa 29 | from openstack_requirements import project # noqa 30 | from openstack_requirements import requirement # noqa 31 | 32 | 33 | PYTHON_3_BRANCH = re.compile(r'^stable\/[u-z].*') 34 | 35 | 36 | def run_command(cmd): 37 | print(cmd) 38 | cmd_list = shlex.split(str(cmd)) 39 | kwargs = {} 40 | if sys.version_info >= (3, ): 41 | kwargs = { 42 | 'encoding': 'utf-8', 43 | 'errors': 'surrogateescape', 44 | } 45 | p = subprocess.Popen(cmd_list, stdout=subprocess.PIPE, 46 | stderr=subprocess.PIPE, **kwargs) 47 | (out, err) = p.communicate() 48 | if p.returncode != 0: 49 | raise SystemError(err) 50 | return (out.strip(), err.strip()) 51 | 52 | 53 | _DEFAULT_REQS_DIR = os.path.expanduser( 54 | '~/src/opendev.org/openstack/requirements') 55 | 56 | 57 | def grab_args(): 58 | """Grab and return arguments""" 59 | parser = argparse.ArgumentParser( 60 | description="Check if project requirements have changed" 61 | ) 62 | parser.add_argument('--local', action='store_true', 63 | help='check local changes (not yet in git)') 64 | parser.add_argument('src_dir', help='directory to process') 65 | parser.add_argument('branch', nargs='?', default='master', 66 | help='target branch for diffs') 67 | parser.add_argument('--zc', help='what zuul cloner to call') 68 | parser.add_argument('--reqs', help='use a specified requirements tree', 69 | default=None) 70 | 71 | return parser.parse_args() 72 | 73 | 74 | @contextlib.contextmanager 75 | def tempdir(): 76 | try: 77 | reqroot = tempfile.mkdtemp() 78 | yield reqroot 79 | finally: 80 | shutil.rmtree(reqroot) 81 | 82 | 83 | def main(): 84 | args = grab_args() 85 | branch = args.branch 86 | reqdir = args.reqs 87 | 88 | print(sys.version_info) 89 | 90 | if reqdir is None: 91 | if args.local: 92 | print('selecting default requirements directory for local mode') 93 | reqdir = os.path.dirname( 94 | os.path.dirname( 95 | os.path.dirname( 96 | os.path.abspath(sys.argv[0])))) 97 | else: 98 | print('selecting default requirements directory for normal mode') 99 | reqdir = _DEFAULT_REQS_DIR 100 | 101 | print('Branch: {}'.format(branch)) 102 | print('Source: {}'.format(args.src_dir)) 103 | print('Requirements: {}'.format(reqdir)) 104 | 105 | os.chdir(args.src_dir) 106 | sha, _ = run_command('git log -n 1 --format=%H') 107 | print('Patch under test: {}'.format(sha)) 108 | 109 | # build a list of requirements from the global list in the 110 | # openstack/requirements project so we can match them to the changes 111 | with tempdir(): 112 | with open(reqdir + '/global-requirements.txt', 'rt') as f: 113 | global_reqs = check.get_global_reqs(f.read()) 114 | denylist = requirement.parse( 115 | open(reqdir + '/denylist.txt', 'rt').read()) 116 | backports_file = reqdir + '/backports.txt' 117 | if os.path.exists(backports_file): 118 | backports = requirement.parse(open(backports_file, 'rt').read()) 119 | else: 120 | backports = {} 121 | cwd = os.getcwd() 122 | # build a list of requirements in the proposed change, 123 | # and check them for style violations while doing so 124 | head_proj = project.read(cwd) 125 | head_reqs = check.RequirementsList(sha, head_proj) 126 | # Don't apply strict parsing rules to stable branches. 127 | # Reasoning is: 128 | # - devstack etc protect us from functional issues 129 | # - we're backporting to stable, so guarding against 130 | # aesthetics and DRY concerns is not our business anymore 131 | # - if in future we have other not-functional linty style 132 | # things to add, we don't want them to affect stable 133 | # either. 134 | head_strict = not branch.startswith('stable/') 135 | head_reqs.process(strict=head_strict) 136 | # Starting with Ussuri and later, we only need to be strict about 137 | # Python 3 requirements. 138 | python_3_branch = head_strict or PYTHON_3_BRANCH.match(branch) 139 | 140 | failed = check.validate( 141 | head_reqs, 142 | denylist, 143 | global_reqs, 144 | list(backports.keys()), 145 | allow_3_only=python_3_branch, 146 | ) 147 | 148 | failed = ( 149 | check.validate_lower_constraints( 150 | head_reqs, 151 | head_proj['lower-constraints.txt'], 152 | denylist, 153 | ) 154 | or failed 155 | ) 156 | 157 | # report the results 158 | if failed or head_reqs.failed: 159 | print("*** Incompatible requirement found!") 160 | print("*** See https://docs.openstack.org/requirements/latest/") 161 | sys.exit(1) 162 | print("Updated requirements match openstack/requirements.") 163 | 164 | 165 | if __name__ == '__main__': 166 | main() 167 | -------------------------------------------------------------------------------- /playbooks/nodejs-pre.yaml: -------------------------------------------------------------------------------- 1 | - hosts: all 2 | roles: 3 | # package.json needs to run 'tox' as command, ensure that it's 4 | # installed and can be used globally. 5 | - role: ensure-tox 6 | ensure_global_symlinks: True 7 | -------------------------------------------------------------------------------- /playbooks/requirements-check.yaml: -------------------------------------------------------------------------------- 1 | - hosts: all 2 | roles: 3 | - ensure-pip 4 | - check-requirements 5 | -------------------------------------------------------------------------------- /projects.txt: -------------------------------------------------------------------------------- 1 | openstack/hacking 2 | openstack/pbr 3 | openstack/requestsexceptions 4 | openstack/shade 5 | openstack/ansible-config_template 6 | openstack/ansible-hardening 7 | openstack/ansible-role-python_venv_build 8 | openstack/automaton 9 | openstack/barbican 10 | openstack/barbican-tempest-plugin 11 | openstack/bifrost 12 | openstack/blazar 13 | openstack/blazar-dashboard 14 | openstack/blazar-nova 15 | openstack/blazar-tempest-plugin 16 | openstack/castellan 17 | openstack/cinder 18 | openstack/cinder-tempest-plugin 19 | openstack/cliff 20 | openstack/debtcollector 21 | openstack/designate 22 | openstack/designate-dashboard 23 | openstack/designate-tempest-plugin 24 | openstack/diskimage-builder 25 | openstack/etcd3gw 26 | openstack/freezer 27 | openstack/freezer-api 28 | openstack/freezer-tempest-plugin 29 | openstack/freezer-web-ui 30 | openstack/futurist 31 | openstack/glance 32 | openstack/glance_store 33 | openstack/glance-tempest-plugin 34 | openstack/heat 35 | openstack/heat-agents 36 | openstack/heat-dashboard 37 | openstack/heat-tempest-plugin 38 | openstack/heat-translator 39 | openstack/horizon 40 | openstack/i18n 41 | openstack/ironic 42 | openstack/ironic-inspector 43 | openstack/ironic-lib 44 | openstack/ironic-python-agent 45 | openstack/ironic-tempest-plugin 46 | openstack/ironic-ui 47 | openstack/keystone 48 | openstack/keystone-tempest-plugin 49 | openstack/keystoneauth 50 | openstack/keystonemiddleware 51 | openstack/kolla 52 | openstack/kolla-ansible 53 | openstack/kuryr 54 | openstack/kuryr-kubernetes 55 | openstack/kuryr-libnetwork 56 | openstack/kuryr-tempest-plugin 57 | openstack/ldappool 58 | openstack/magnum 59 | openstack/magnum-tempest-plugin 60 | openstack/magnum-ui 61 | openstack/manila 62 | openstack/manila-image-elements 63 | openstack/manila-tempest-plugin 64 | openstack/manila-ui 65 | openstack/masakari 66 | openstack/masakari-monitors 67 | openstack/metalsmith 68 | openstack/microversion-parse 69 | openstack/mistral 70 | openstack/mistral-dashboard 71 | openstack/mistral-lib 72 | openstack/mistral-tempest-plugin 73 | openstack/monasca-api 74 | openstack/monasca-common 75 | openstack/monasca-notification 76 | openstack/monasca-persister 77 | openstack/monasca-statsd 78 | openstack/monasca-tempest-plugin 79 | openstack/monasca-ui 80 | openstack/networking-bagpipe 81 | openstack/networking-baremetal 82 | openstack/networking-bgpvpn 83 | openstack/networking-generic-switch 84 | openstack/networking-sfc 85 | openstack/neutron 86 | openstack/neutron-dynamic-routing 87 | openstack/neutron-fwaas 88 | openstack/neutron-fwaas-dashboard 89 | openstack/neutron-lib 90 | openstack/neutron-tempest-plugin 91 | openstack/neutron-vpnaas 92 | openstack/neutron-vpnaas-dashboard 93 | openstack/nova 94 | openstack/octavia 95 | openstack/octavia-dashboard 96 | openstack/octavia-tempest-plugin 97 | openstack/openstack-ansible 98 | openstack/openstack-ansible-apt_package_pinning 99 | openstack/openstack-ansible-ceph_client 100 | openstack/openstack-ansible-galera_server 101 | openstack/openstack-ansible-haproxy_server 102 | openstack/openstack-ansible-lxc_container_create 103 | openstack/openstack-ansible-lxc_hosts 104 | openstack/openstack-ansible-memcached_server 105 | openstack/openstack-ansible-openstack_hosts 106 | openstack/openstack-ansible-openstack_openrc 107 | openstack/openstack-ansible-ops 108 | openstack/openstack-ansible-os_aodh 109 | openstack/openstack-ansible-os_barbican 110 | openstack/openstack-ansible-os_ceilometer 111 | openstack/openstack-ansible-os_cinder 112 | openstack/openstack-ansible-os_cloudkitty 113 | openstack/openstack-ansible-os_designate 114 | openstack/openstack-ansible-os_glance 115 | openstack/openstack-ansible-os_gnocchi 116 | openstack/openstack-ansible-os_heat 117 | openstack/openstack-ansible-os_horizon 118 | openstack/openstack-ansible-os_ironic 119 | openstack/openstack-ansible-os_keystone 120 | openstack/openstack-ansible-os_magnum 121 | openstack/openstack-ansible-os_neutron 122 | openstack/openstack-ansible-os_nova 123 | openstack/openstack-ansible-os_octavia 124 | openstack/openstack-ansible-os_rally 125 | openstack/openstack-ansible-os_swift 126 | openstack/openstack-ansible-os_tacker 127 | openstack/openstack-ansible-os_tempest 128 | openstack/openstack-ansible-os_trove 129 | openstack/openstack-ansible-plugins 130 | openstack/openstack-ansible-rabbitmq_server 131 | openstack/openstack-ansible-repo_server 132 | openstack/openstack-ansible-specs 133 | openstack/openstack-doc-tools 134 | openstack/openstackclient 135 | openstack/openstackdocstheme 136 | openstack/openstacksdk 137 | openstack/os-api-ref 138 | openstack/os-apply-config 139 | openstack/os-brick 140 | openstack/os-client-config 141 | openstack/os-collect-config 142 | openstack/os-refresh-config 143 | openstack/os-service-types 144 | openstack/os-testr 145 | openstack/os-traits 146 | openstack/os-vif 147 | openstack/osc-lib 148 | openstack/oslo.cache 149 | openstack/oslo.concurrency 150 | openstack/oslo.config 151 | openstack/oslo.context 152 | openstack/oslo.db 153 | openstack/oslo.i18n 154 | openstack/oslo.limit 155 | openstack/oslo.log 156 | openstack/oslo.messaging 157 | openstack/oslo.metrics 158 | openstack/oslo.middleware 159 | openstack/oslo.policy 160 | openstack/oslo.privsep 161 | openstack/oslo.reports 162 | openstack/oslo.rootwrap 163 | openstack/oslo.serialization 164 | openstack/oslo.service 165 | openstack/oslo.upgradecheck 166 | openstack/oslo.utils 167 | openstack/oslo.versionedobjects 168 | openstack/oslo.vmware 169 | openstack/oslotest 170 | openstack/osprofiler 171 | openstack/ovsdbapp 172 | openstack/placement 173 | openstack/pycadf 174 | openstack/pymod2pkg 175 | openstack/python-barbicanclient 176 | openstack/python-blazarclient 177 | openstack/python-brick-cinderclient-ext 178 | openstack/python-cinderclient 179 | openstack/python-cyborgclient 180 | openstack/python-designateclient 181 | openstack/python-freezerclient 182 | openstack/python-glanceclient 183 | openstack/python-heatclient 184 | openstack/python-ironic-inspector-client 185 | openstack/python-ironicclient 186 | openstack/python-keystoneclient 187 | openstack/python-magnumclient 188 | openstack/python-manilaclient 189 | openstack/python-masakariclient 190 | openstack/python-mistralclient 191 | openstack/python-monascaclient 192 | openstack/python-neutronclient 193 | openstack/python-novaclient 194 | openstack/python-octaviaclient 195 | openstack/python-openstackclient 196 | openstack/python-swiftclient 197 | openstack/python-tackerclient 198 | openstack/python-troveclient 199 | openstack/python-vitrageclient 200 | openstack/python-watcherclient 201 | openstack/python-zaqarclient 202 | openstack/python-zunclient 203 | openstack/requirements 204 | openstack/stevedore 205 | openstack/sushy 206 | openstack/sushy-tools 207 | openstack/swift 208 | openstack/tacker 209 | openstack/tacker-horizon 210 | openstack/tap-as-a-service 211 | openstack/taskflow 212 | openstack/tempest 213 | openstack/tooz 214 | openstack/tosca-parser 215 | openstack/trove 216 | openstack/trove-tempest-plugin 217 | openstack/virtualbmc 218 | openstack/vitrage 219 | openstack/vitrage-tempest-plugin 220 | openstack/watcher 221 | openstack/watcher-dashboard 222 | openstack/watcher-tempest-plugin 223 | openstack/zaqar 224 | openstack/zaqar-tempest-plugin 225 | openstack/zaqar-ui 226 | openstack/zun 227 | openstack/zun-tempest-plugin 228 | openstack/zun-ui 229 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | fixtures>=3.0.0 # Apache-2.0/BSD 2 | Parsley>=1.2 # MIT 3 | packaging!=20.5,!=20.6,!=20.7,>=16.5 # Apache-2.0 4 | requests>=2.14.2 # Apache-2.0 5 | PyYAML>=3.12 # MIT 6 | beagle>=0.2.1 # Apache-2.0 7 | setuptools!=24.0.0,!=34.0.0,!=34.0.1,!=34.0.2,!=34.0.3,!=34.1.0,!=34.1.1,!=34.2.0,!=34.3.0,!=34.3.1,!=34.3.2,!=36.2.0,>=21.0.0 # PSF/ZPL 8 | -------------------------------------------------------------------------------- /roles/check-requirements/README.rst: -------------------------------------------------------------------------------- 1 | Check that a project's requirements match the global requirements repo. 2 | 3 | **Role Variables** 4 | 5 | .. zuul:rolevar:: zuul_work_dir 6 | :default: {{ zuul.project.src_dir }} 7 | 8 | Directory holding the project to check. 9 | 10 | .. zuul:rolevar:: zuul_branch 11 | :default: {{ zuul.branch }} 12 | 13 | Branch to check. 14 | -------------------------------------------------------------------------------- /roles/check-requirements/defaults/main.yaml: -------------------------------------------------------------------------------- 1 | zuul_work_dir: "{{ zuul.project.src_dir }}" 2 | zuul_branch: "{{ zuul.branch }}" 3 | -------------------------------------------------------------------------------- /roles/check-requirements/tasks/main.yaml: -------------------------------------------------------------------------------- 1 | - name: Create virtual environment 2 | command: "{{ ensure_pip_virtualenv_command }} {{ zuul_work_dir }}/venv" 3 | 4 | - name: Install openstack_requirements 5 | command: "{{ zuul_work_dir }}/venv/bin/pip3 install {{ zuul.projects['opendev.org/openstack/requirements'].src_dir }}" 6 | 7 | - name: Run requirements check script 8 | # Use command module here instead of script since Zuul gets output 9 | # with command into job-output.txt file. 10 | # Command expects the file to be at the remote system - the system 11 | # running the tests. Ask zuul variables for the path. 12 | command: "{{ zuul_work_dir }}/venv/bin/python3 {{ zuul.projects['opendev.org/openstack/requirements'].src_dir }}/playbooks/files/project-requirements-change.py {{ zuul_work_dir }} {{ zuul_branch }}" 13 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [metadata] 2 | name = openstack_requirements 3 | summary = OpenStack python dependency management tools 4 | description_file = 5 | README.rst 6 | author = OpenStack 7 | author_email = openstack-discuss@lists.openstack.org 8 | home_page = https://docs.openstack.org/requirements/latest/ 9 | classifier = 10 | Environment :: OpenStack 11 | Intended Audience :: Information Technology 12 | Intended Audience :: System Administrators 13 | License :: OSI Approved :: Apache Software License 14 | Operating System :: POSIX :: Linux 15 | Operating System :: Microsoft :: Windows 16 | Operating System :: MacOS :: MacOS X 17 | Programming Language :: Python 18 | Programming Language :: Python :: 3 19 | Programming Language :: Python :: 3.9 20 | Programming Language :: Python :: 3.10 21 | Programming Language :: Python :: 3.11 22 | Programming Language :: Python :: 3.12 23 | 24 | [files] 25 | packages = 26 | openstack_requirements 27 | 28 | [entry_points] 29 | console_scripts = 30 | edit-constraints = openstack_requirements.cmds.edit_constraint:main 31 | generate-constraints = openstack_requirements.cmds.generate:main 32 | check-conflicts = openstack_requirements.cmds.check_conflicts:main 33 | validate-constraints = openstack_requirements.cmds.validate:main 34 | validate-projects = openstack_requirements.cmds.validate_projects:main 35 | normalize-requirements = openstack_requirements.cmds.normalize_requirements:main 36 | check-python2-support = openstack_requirements.cmds.check_py2:main 37 | check-constraints = openstack_requirements.cmds.check_exists:main 38 | build-lower-constraints = openstack_requirements.cmds.build_lower_constraints:main 39 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2013 Hewlett-Packard Development Company, L.P. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 12 | # implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | # THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT 17 | import setuptools 18 | 19 | setuptools.setup( 20 | setup_requires=['pbr>=2.0.0'], 21 | pbr=True) 22 | -------------------------------------------------------------------------------- /test-requirements.txt: -------------------------------------------------------------------------------- 1 | # NOTE: These are requirements for testing the requirements project only 2 | # See global-requirements for the actual requirements list 3 | stestr>=1.0.0 # Apache-2.0 4 | testscenarios>=0.4 # Apache-2.0/BSD 5 | testtools>=2.2.0 # MIT 6 | virtualenv>=14.0.6 # MIT 7 | bashate>=0.5.1 # Apache-2.0 8 | -------------------------------------------------------------------------------- /tools/README.txt: -------------------------------------------------------------------------------- 1 | OpenStack Requirements tools. 2 | 3 | This directory contains a number of tools that are useful to the requirements core team and OpenStack 4 | developers. 5 | 6 | babel-test.sh 7 | ------------- 8 | A tool check for regressions with new Babel releases. 9 | 10 | build_wheels.sh 11 | --------------- 12 | 13 | Generate wheels for all of the requirements, ignoring any packages 14 | that won't build wheels so we get as many as possible. This is meant 15 | to be used on a development box combined with devpi and a wheelhouse 16 | configuration setting for pip, such as described in 17 | https://www.berrange.com/posts/2014/11/14/faster-rebuilds-for-python-virtualenv-trees/ 18 | 19 | cap.py 20 | ------ 21 | 22 | Take the output of 'pip freeze' and use the installed versions to caps requirements. 23 | 24 | check-install.py 25 | ---------------- 26 | 27 | Used in tox environment pip-install. Only installs requirements (as opposed to 28 | test-requirements and verifies that all console-scripts have all modules 29 | needed. 30 | 31 | code-search.sh 32 | -------------- 33 | Assuming you have a set of local git repos grep them all for interesting things. 34 | 35 | cruft.sh 36 | -------- 37 | 38 | This script, when run from the root directory of this repository, will search 39 | the default and feature branches of all projects listed in the projects.txt 40 | file for declared dependencies, then output a list of any entries in the 41 | global-requirements.txt file which are not actual dependencies of those 42 | projects. Old dependencies which were removed from projects or which were used 43 | only for projects which have since been removed should be cleaned up, but many 44 | entries likely represent recent additions which still have pending changes to 45 | add them to one or more projects. In most cases, git pickaxe will yield the 46 | answer. 47 | 48 | grep-all.sh 49 | ----------- 50 | 51 | List a requirements specification and constratint for a given libarary 52 | 53 | noop-change.sh 54 | -------------- 55 | 56 | Generate a bulk no-op changes in supplied projects. Useful if we have a risky 57 | change in global-requirements or upper-constraints and we want to test impacted 58 | projects. 59 | 60 | publish_constraints.sh 61 | ---------------------- 62 | Used in the gate! Generate the constraints files from git for publishing to a 63 | static server. 64 | 65 | what-broke.py 66 | ------------- 67 | figure out what requirements change likely broke us. 68 | -------------------------------------------------------------------------------- /tools/babel-test.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -xe 2 | 3 | pybabel extract \ 4 | --add-comments Translators: \ 5 | --msgid-bugs-address="https://bugs.launchpad.net/openstack-i18n/" \ 6 | --project=requirements --version=1 \ 7 | -k "_C:1c,2" -k "_P:1,2" \ 8 | -o babel-test/test.pot babel-test 9 | 10 | pybabel extract --no-default-keywords \ 11 | --add-comments Translators: \ 12 | --msgid-bugs-address="https://bugs.launchpad.net/openstack-i18n/" \ 13 | --project=requirements --version=1 \ 14 | -k "_LE" \ 15 | -o babel-test/test-log-error.pot babel-test 16 | 17 | # Entries to ignore 18 | REGEX="(POT-Creation-Date|Generated-By|Copyright (C) |FIRST AUTHOR )" 19 | 20 | function diff_files { 21 | local expected=$1 22 | local testfile=$2 23 | local extra 24 | 25 | # grep fails if there's no content - which is fine here. 26 | set +e 27 | extra=$(diff -u0 $expected $testfile | \ 28 | egrep -v "$REGEX" |egrep -c "^([-+][^-+#])") 29 | set -e 30 | 31 | if [ $extra -ne 0 ] ; then 32 | echo "Generation of test.pot failed." 33 | echo "Extra content is:" 34 | diff -u0 $expected $testfile | egrep -v "$REGEX" 35 | exit 1 36 | fi 37 | } 38 | 39 | diff_files babel-test/expected.pot babel-test/test.pot 40 | diff_files babel-test/expected-log-error.pot babel-test/test-log-error.pot 41 | 42 | echo "Everything fine" 43 | -------------------------------------------------------------------------------- /tools/build_wheels.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); you may 4 | # not use this file except in compliance with the License. You may obtain 5 | # a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 11 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 12 | # License for the specific language governing permissions and limitations 13 | # under the License. 14 | # 15 | # Generate wheels for all of the requirements, ignoring any packages 16 | # that won't build wheels so we get as many as possible. This is meant 17 | # to be used on a development box combined with devpi and a wheelhouse 18 | # configuration setting for pip, such as described in 19 | # https://www.berrange.com/posts/2014/11/14/faster-rebuilds-for-python-virtualenv-trees/ 20 | # 21 | # Usage: 22 | # 23 | # install pip for the version(s) of python you want 24 | # 25 | # use each of those versions of pip to install the wheel package 26 | # pip2.7 install wheel 27 | # pip3.3 install wheel 28 | # pip3.4 install wheel 29 | # 30 | # run this script, passing those versions on the command line: 31 | # 32 | # ./tools/build_wheels.sh 2.7 3.3 3.4 33 | 34 | versions="$*" 35 | 36 | if [ -z "$versions" ] ; then 37 | echo "ERROR: Usage: $0 " 1>&2 38 | echo "Example: $0 2.7 3.3 3.4" 1>&2 39 | exit 1 40 | fi 41 | 42 | grep -v '^$\|#' global-requirements.txt | while read req 43 | do 44 | echo "Building $req" 45 | for v in $versions 46 | do 47 | pip${v} wheel "$req" 48 | done 49 | done 50 | -------------------------------------------------------------------------------- /tools/cap.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 11 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 12 | # License for the specific language governing permissions and limitations 13 | # under the License. 14 | 15 | 16 | import argparse 17 | import re 18 | 19 | import pkg_resources 20 | 21 | overrides = dict() 22 | # List of overrides needed. Ignore version in pip-freeze and use the one here 23 | # instead. Example: 24 | # suds 0.4.1 isn't pip installable but is in distribution packages 25 | # overrides['suds'] = 'suds==0.4' 26 | # apt package of libvirt-python is lower then our minimum requirement 27 | # overrides['libvirt-python'] = None 28 | 29 | 30 | def cap(requirements, frozen): 31 | """Cap requirements to version in freeze. 32 | 33 | Go through every package in requirements and try to cap. 34 | 35 | Input: two arrays of lines. 36 | Output: Array of new lines. 37 | """ 38 | output = [] 39 | for line in requirements: 40 | try: 41 | req = pkg_resources.Requirement.parse(line) 42 | specifier = str(req.specifier) 43 | if any(op in specifier for op in ['==', '~=', '<']): 44 | # if already capped, continue 45 | output.append(line) 46 | continue 47 | except ValueError: 48 | # line was a comment, continue 49 | output.append(line) 50 | continue 51 | if req.project_name in overrides: 52 | new_line = overrides[req.project_name] 53 | if new_line: 54 | output.append(overrides[req.project_name]) 55 | else: 56 | output.append(line) 57 | continue 58 | # add cap 59 | new_cap = cap_requirement(req.project_name, frozen) 60 | if new_cap: 61 | output.append(pin(line, new_cap)) 62 | else: 63 | output.append(line) 64 | return output 65 | 66 | 67 | def pin(line, new_cap): 68 | """Add new cap into existing line 69 | 70 | Don't use pkg_resources so we can preserve the comments. 71 | """ 72 | end = None 73 | use_comma = False 74 | parts = line.split(' #') 75 | if len(split(parts[0].strip())) > 1: 76 | use_comma = True 77 | if "#" in line: 78 | # if comment 79 | end = parts[1] 80 | # cap to new max version 81 | if end: 82 | new_end = "<=%s #%s" % (new_cap, end) 83 | else: 84 | new_end = "<=%s" % new_cap 85 | if use_comma is True: 86 | return "%s,%s" % (parts[0].strip(), new_end) 87 | else: 88 | return "%s%s" % (parts[0].strip(), new_end) 89 | 90 | 91 | def split(line): 92 | return re.split('[><=]', line) 93 | 94 | 95 | def cap_requirement(requirement, frozen): 96 | # Find current version of requirement in freeze 97 | specifier = frozen.get(requirement, None) 98 | if specifier: 99 | return split(str(specifier))[-1] 100 | return None 101 | 102 | 103 | def freeze(lines): 104 | """Parse lines from freeze file into a dict. 105 | 106 | Where k:v is project_name:specifier. 107 | """ 108 | freeze = dict() 109 | 110 | for line in lines: 111 | try: 112 | req = pkg_resources.Requirement.parse(line) 113 | freeze[req.project_name] = req.specifier 114 | except ValueError: 115 | # not a valid requirement, can be a comment, blank line etc 116 | continue 117 | return freeze 118 | 119 | 120 | def main(): 121 | parser = argparse.ArgumentParser( 122 | description="Take the output of " 123 | "'pip freeze' and use the installed versions to " 124 | "caps requirements.") 125 | parser.add_argument('requirements', help='requirements file input') 126 | parser.add_argument( 127 | 'freeze', 128 | help='output of pip freeze, taken from a full tempest job') 129 | args = parser.parse_args() 130 | with open(args.requirements) as f: 131 | requirements = [line.strip() for line in f.readlines()] 132 | with open(args.freeze) as f: 133 | frozen = freeze([line.strip() for line in f.readlines()]) 134 | for line in cap(requirements, frozen): 135 | print(line) 136 | 137 | 138 | if __name__ == '__main__': 139 | main() 140 | -------------------------------------------------------------------------------- /tools/check-install.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import configparser 4 | import importlib 5 | import re 6 | import sys 7 | 8 | 9 | def main(): 10 | errors = 0 11 | pattern = re.compile(r'^(.*?)\s*=\s*([^:]*?):.*$') 12 | config = configparser.ConfigParser() 13 | config.read('setup.cfg') 14 | console_scripts = config.get('entry_points', 'console_scripts') 15 | for script in console_scripts.split('\n'): 16 | match = pattern.match(script) 17 | if match: 18 | (script, module) = match.groups() 19 | try: 20 | importlib.import_module(module) 21 | except ImportError as err: 22 | print('Imports for %s failed:\n\t%s' % (script, err)) 23 | errors += 1 24 | return 1 if errors else 0 25 | 26 | 27 | if __name__ == '__main__': 28 | sys.exit(main()) 29 | -------------------------------------------------------------------------------- /tools/code-search.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); you may 4 | # not use this file except in compliance with the License. You may obtain 5 | # a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 11 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 12 | # License for the specific language governing permissions and limitations 13 | # under the License. 14 | 15 | declare -a projects 16 | in_projects=0 17 | base=$HOME 18 | 19 | while [ $# -gt 1 ] ; do 20 | case "$1" in 21 | --prefix) 22 | prefix=$2 23 | shift 1 24 | ;; 25 | --projects) 26 | in_projects=1 27 | ;; 28 | --) 29 | break 30 | ;; 31 | *) 32 | if [ "$in_projects" == 1 ] ; then 33 | projects+=($1) 34 | else 35 | echo Unknown arg/context >&2 36 | exit 1 37 | fi 38 | ;; 39 | esac 40 | shift 1 41 | done 42 | 43 | for prj in ${projects[@]} ; do 44 | ( 45 | cd $prj>/dev/null 2>&1 && \ 46 | git grep -HEin $@ 2>/dev/null|sed -e "s,^,${prj#$prefix}:,g" 47 | ) 48 | done 49 | -------------------------------------------------------------------------------- /tools/cruft.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -ex 2 | 3 | # Copyright 2015 OpenStack Foundation 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); you may 6 | # not use this file except in compliance with the License. You may obtain 7 | # a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 13 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 14 | # License for the specific language governing permissions and limitations 15 | # under the License. 16 | 17 | # This script, when run from the root directory of this repository, will 18 | # search the default and feature branches of all projects listed in the 19 | # projects.txt file for declared dependencies, then output a list of any 20 | # entries in the global-requirements.txt file which are not actual 21 | # dependencies of those projects. Old dependencies which were removed 22 | # from projects or which were used only for projects which have since 23 | # been removed should be cleaned up, but many entries likely represent 24 | # recent additions which still have pending changes to add them to one 25 | # or more projects. In most cases, git pickaxe will yield the answer. 26 | 27 | # Remove the raw list if a copy already exists, since we're going to 28 | # append to it in this loop. 29 | rm -f raw-requirements.txt 30 | for PROJECT in $(cat projects.txt); do 31 | # Reuse existing clones in case this is being rerun. 32 | if [ ! -d $PROJECT ]; then 33 | mkdir -p $PROJECT 34 | # In case this makes it into a CI job, use local copies. 35 | if [ -d /opt/git/$PROJECT/.git ]; then 36 | git clone file:///opt/git/$PROJECT $PROJECT 37 | else 38 | git clone https://git.openstack.org/$PROJECT.git $PROJECT 39 | fi 40 | fi 41 | pushd $PROJECT 42 | git remote update 43 | # Loop over the default (HEAD) and any feature branches. 44 | for BRANCH in $( 45 | git branch -a \ 46 | | grep '^ remotes/origin/\(feature/\|HEAD \)' \ 47 | | cut -d' ' -f3 48 | ); do 49 | git checkout $BRANCH 50 | # These are files which are considered by the update.py script, 51 | # so check them all for the sake of completeness. 52 | for FILE in \ 53 | requirements-py2.txt \ 54 | requirements-py3.txt \ 55 | requirements.txt \ 56 | test-requirements-py2.txt \ 57 | test-requirements-py3.txt \ 58 | test-requirements.txt \ 59 | tools/pip-requires \ 60 | tools/test-requires \ 61 | doc/requirements.txt 62 | do 63 | if [ -f $FILE ]; then 64 | # Add diagnostic comments to aid debugging. 65 | echo -e "\n# -----\n# $PROJECT $BRANCH $FILE\n# -----" \ 66 | >> ${OLDPWD}/raw-requirements.txt 67 | cat $FILE >> ${OLDPWD}/raw-requirements.txt 68 | fi 69 | done 70 | done 71 | popd 72 | done 73 | 74 | # Generate a unique set of package names from the raw list of all 75 | # project requirements filtered for the same lines ignored by the 76 | # update.py script, lower-cased with hyphens normalized to underscores. 77 | sed -e '/^\($\|#\|http:\/\/tarballs.openstack.org\/\|-e\|-f\)/d' \ 78 | -e 's/^\([^<>=! ]*\).*/\L\1/' -e s/-/_/g raw-requirements.txt \ 79 | | sort -u > all-requirements.txt 80 | 81 | # From here on, xtrace gets uselessly noisy. 82 | set +x 83 | 84 | # Loop over the set of package names from the global requirements list. 85 | for CANDIDATE in $( 86 | sed -e '/^\($\|#\)/d' -e 's/^\([^<>=!; ]*\).*/\1/' global-requirements.txt 87 | ); do 88 | # Search for the package name in the set of project requirements, 89 | # normalizing hyphens to underscores, and output the package name if 90 | # not found. 91 | grep -iq ^$(echo $CANDIDATE | sed s/-/_/g)$ all-requirements.txt \ 92 | || echo $CANDIDATE 93 | done | sort > cruft-requirements.txt 94 | 95 | # Provide a helpful summary of the results. 96 | if [ -s cruft-requirements.txt ] ; then 97 | echo -e "\nCruft entries found in global-requirements.txt:\n" 98 | cat cruft-requirements.txt 99 | else 100 | echo -e "\nSomething must be wrong--I found no cruft!!!" 101 | fi 102 | -------------------------------------------------------------------------------- /tools/fix-lower-constraints.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 11 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 12 | # License for the specific language governing permissions and limitations 13 | # under the License. 14 | """ 15 | Instructions: 16 | 17 | 1. virtualenv venv 18 | 2. source venv/bin/activate 19 | 3. pip install /path/to/local/copy/of/requirements/repository 20 | 4. cd /path/to/project/to/fix 21 | 5. .../requirements/tools/fix-lower-constraints.py > new-lc.txt 22 | 6. mv new-lc.txt lower-constraints.txt 23 | 7. Update the patch and resubmit it to gerrit. 24 | """ 25 | 26 | import io 27 | 28 | from openstack_requirements import requirement 29 | 30 | 31 | def read_file(name): 32 | with io.open(name, 'r', encoding='utf-8') as f: 33 | return requirement.parse(f.read()) 34 | 35 | 36 | requirements = read_file('requirements.txt') 37 | requirements.update(read_file('test-requirements.txt')) 38 | constraints = read_file('lower-constraints.txt') 39 | 40 | output = [] 41 | 42 | for const in constraints.values(): 43 | const = const[0][0] 44 | actual = const.specifiers.lstrip('=') 45 | name = const.package.lower() 46 | if name not in requirements: 47 | # Ignore secondary dependencies 48 | output.append(const.to_line()) 49 | continue 50 | for req, _ in requirements[name]: 51 | min = [ 52 | s 53 | for s in req.specifiers.split(',') 54 | if '>' in s 55 | ] 56 | if not min: 57 | # If there is no lower bound, assume the constraint is 58 | # right. 59 | output.append(const.to_line()) 60 | continue 61 | required = min[0].lstrip('>=') 62 | if required != actual: 63 | output.append('{}=={}\n'.format( 64 | const.package, required)) 65 | else: 66 | output.append(const.to_line()) 67 | 68 | for line in sorted(output, key=lambda x: x.lower()): 69 | if not line.strip(): 70 | continue 71 | print(line.rstrip()) 72 | -------------------------------------------------------------------------------- /tools/functions: -------------------------------------------------------------------------------- 1 | # Licensed under the Apache License, Version 2.0 (the "License"); you may 2 | # not use this file except in compliance with the License. You may obtain 3 | # a copy of the License at 4 | # 5 | # http://www.apache.org/licenses/LICENSE-2.0 6 | # 7 | # Unless required by applicable law or agreed to in writing, software 8 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 9 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 10 | # License for the specific language governing permissions and limitations 11 | # under the License. 12 | 13 | # Shared functions for shell scripts 14 | 15 | function enable_venv () { 16 | BASE="${1}" 17 | if [[ -z "${VIRTUAL_ENV}" ]]; then 18 | if [[ ! -d ${BASE}/.tox/venv ]]; then 19 | (cd ${BASE} && tox -e venv --notest > /dev/null) 20 | fi 21 | source ${BASE}/.tox/venv/bin/activate 22 | fi 23 | } 24 | 25 | # Search for requirements used in openstack/ repos 26 | function search_reqs () { 27 | beagle search --ignore-case --file '(.*requirement.*|setup.cfg)' "${1}" | \ 28 | grep "openstack/" | \ 29 | # Sometimes we get false positives from a package name being a 30 | # substring within another package. This filter isn't working right 31 | # though. This just means we might miss a package that isn't being 32 | # used. 33 | # grep "${1}[ |\!|>]" | \ 34 | grep -v "openstack.requirements" 35 | } 36 | 37 | # Get a list of all package names by filtering out comments, blank lines, and 38 | # any package modifiers like version constraints. 39 | function get_tracked_requirements () { 40 | reqs=$(sed 's/[!|>|<|=|;].*//g' global-requirements.txt | 41 | sed 's/ .*//g' | 42 | sed '/^#/d' | 43 | sed '/^$/d' | 44 | sort | uniq) 45 | } 46 | -------------------------------------------------------------------------------- /tools/get-health-report.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); you may 4 | # not use this file except in compliance with the License. You may obtain 5 | # a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 11 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 12 | # License for the specific language governing permissions and limitations 13 | # under the License. 14 | 15 | # Checks all of our tracked packages for any issues 16 | 17 | TOOLSDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" 18 | BASEDIR=$(dirname ${TOOLSDIR}) 19 | 20 | source ${TOOLSDIR}/functions 21 | 22 | # Make sure we are using our venv 23 | enable_venv "${BASEDIR}" 24 | 25 | update= 26 | if [[ "$#" -eq 1 ]]; then 27 | update="${1}" 28 | fi 29 | 30 | # Save off our current timestamp for use later 31 | current=$(date +%s) 32 | 33 | # Loop through each package to get details and check for issues 34 | get_tracked_requirements 35 | for req in $reqs; do 36 | count=$(search_reqs ${req} | 37 | grep -v " openstack/${req} " | 38 | wc -l) 39 | 40 | metadata=$(curl -s -L "https://pypi.org/pypi/$req/json") 41 | summary=$(echo "${metadata}" | jq -r '.info.summary') 42 | last_release=$(echo "${metadata}" | jq -r '.info.version') 43 | release_date=$(echo "${metadata}" | jq -r ".releases.\"${last_release}\" | .[0].upload_time") 44 | 45 | # Print basic package information 46 | echo "${req}" 47 | if [[ "${summary}" != "" ]]; then 48 | echo " Summary: ${summary}" 49 | fi 50 | echo " Used by repos: ${count}" 51 | echo " Last release: ${last_release}" 52 | echo " Release date: ${release_date}" 53 | 54 | # Check for various things to warn about 55 | package_name=$(echo "${metadata}" | jq -r '.info.name') 56 | if [[ "${req}" != "${package_name}" ]]; then 57 | echo " WARNING: In g-r as ${req} but actual name is ${package_name}" 58 | fi 59 | 60 | py3=$(echo "${metadata}" | \ 61 | jq -r '.info.classifiers | .[]' | \ 62 | grep "Programming Language :: Python :: 3") 63 | if [[ -z ${py3} ]]; then 64 | echo " WARNING: No python 3 classifier in metadata" 65 | fi 66 | 67 | release=$(date -d $release_date +%s) 68 | seconds_since_release=$((current-release)) 69 | years_since_release=$((seconds_since_release/60/60/24/365)) 70 | message=$(echo "It's been ${years_since_release} years since last release") 71 | if [[ ${years_since_release} -gt 4 ]]; then 72 | echo " !!WARNING!! ${message}" 73 | elif [[ ${years_since_release} -gt 2 ]]; then 74 | echo " WARNING ${message}" 75 | fi 76 | done 77 | -------------------------------------------------------------------------------- /tools/grep-all.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); you may 4 | # not use this file except in compliance with the License. You may obtain 5 | # a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 11 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 12 | # License for the specific language governing permissions and limitations 13 | # under the License. 14 | 15 | # Note(tonyb): Expand HEAD into something that's hopefully more human 16 | # readable 17 | declare -a refs=($(git describe --always) origin/master) 18 | refs+=($(git branch --no-color -r --list 'origin/stable/*' | sort -r -t/ -k 3)) 19 | 20 | if [ "$1" == "--with-eol" ] ; then 21 | refs+=($(git tag --list '*-eol' | sort -r)) 22 | shift 1 23 | fi 24 | 25 | if [ $# -ne 1 ]; then 26 | echo "Usage: $0 dependency-name" 1>&2 27 | exit 1 28 | fi 29 | 30 | function search { 31 | git grep -hEi "^${1}[ =>/dev/null 32 | } 33 | 34 | printf '\nRequirements\n------------\n' 35 | for ref in ${refs[@]}; do 36 | printf "%-22s: %s\n" $ref "$(search $1 $ref global-requirements.txt)" 37 | done 38 | 39 | printf '\nConstraints\n-----------\n' 40 | for ref in ${refs[@]}; do 41 | printf "%-22s: %s\n" $ref "$(search $1 $ref upper-constraints.txt)" 42 | done 43 | -------------------------------------------------------------------------------- /tools/lint.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 11 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 12 | # License for the specific language governing permissions and limitations 13 | # under the License. 14 | 15 | import os 16 | 17 | GLOBAL_REQS = os.path.join( 18 | os.path.dirname(os.path.realpath(__file__)), 19 | '..', 20 | 'global-requirements.txt', 21 | ) 22 | 23 | 24 | def sort() -> None: 25 | """Sort global-requirements, respecting sections.""" 26 | section_headers: dict[str, str] = {} 27 | section_deps: dict[str, list[tuple[str, str | None]]] = {} 28 | section: str = '' 29 | deps: list[tuple[str, str | None]] = [] 30 | comment: str = '' 31 | 32 | with open(GLOBAL_REQS) as fh: 33 | for line in fh.readlines(): 34 | if not line.strip(): 35 | continue 36 | 37 | if line.startswith('## section:'): 38 | if section: 39 | section_deps[section] = sorted( 40 | deps, key=lambda x: x[0].lower() 41 | ) 42 | deps = [] 43 | 44 | section = line.removeprefix('## section:') 45 | section_headers[section] = line 46 | continue 47 | 48 | if line.startswith('##'): 49 | section_headers[section] += line 50 | continue 51 | 52 | if line.startswith('#'): 53 | comment += line 54 | continue 55 | 56 | deps.append((line, comment or None)) 57 | comment = '' 58 | 59 | section_deps[section] = sorted( 60 | deps, key=lambda x: x[0].lower() 61 | ) 62 | 63 | with open(GLOBAL_REQS, 'w') as fh: 64 | for i, section in enumerate(section_deps): 65 | if i != 0: 66 | fh.write('\n') 67 | 68 | fh.write(section_headers[section]) 69 | fh.write('\n') 70 | 71 | for dep, dep_comment in section_deps[section]: 72 | if dep_comment: 73 | fh.write(dep_comment) 74 | 75 | fh.write(dep) 76 | 77 | 78 | if __name__ == '__main__': 79 | sort() 80 | -------------------------------------------------------------------------------- /tools/list-unused-packages.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); you may 4 | # not use this file except in compliance with the License. You may obtain 5 | # a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 11 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 12 | # License for the specific language governing permissions and limitations 13 | # under the License. 14 | 15 | # Lists any packages in global-constraints that appear to no longer be used 16 | 17 | TOOLSDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" 18 | BASEDIR=$(dirname ${TOOLSDIR}) 19 | 20 | source ${TOOLSDIR}/functions 21 | 22 | # Make sure we are using our venv 23 | enable_venv "${BASEDIR}" 24 | 25 | update= 26 | if [[ "$#" -eq 1 ]]; then 27 | update="${1}" 28 | fi 29 | 30 | # Loop through each package and check for its presence in any repo's 31 | # requirements files other than mentions in its own repo 32 | get_tracked_requirements 33 | for req in $reqs; do 34 | count=$(search_reqs ${req} | 35 | grep -v " openstack/${req} " | 36 | wc -l) 37 | if [[ ${count} -eq 0 ]]; then 38 | echo "${req}" 39 | 40 | # See if we should clean up the requirements files 41 | if [[ "${update}" == "--update" ]]; then 42 | sed -i "/${req}/d" global-requirements.txt 43 | sed -i "/${req}/d" upper-constraints.txt 44 | fi 45 | fi 46 | done 47 | -------------------------------------------------------------------------------- /tools/noop-change.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); you may 4 | # not use this file except in compliance with the License. You may obtain 5 | # a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 11 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 12 | # License for the specific language governing permissions and limitations 13 | # under the License. 14 | 15 | function cleanup { 16 | # Don't abort early if there's a problem in the clean up 17 | set +e 18 | git checkout $start_branch 19 | git branch -D ${topic} 20 | } 21 | 22 | function usage { 23 | ( 24 | if [ -n "$1" ] ; then 25 | echo $0 $1 26 | fi 27 | echo $0 '-p [project] -t [topic] -c [change] -s [style]' 28 | echo ' project: The directory for the openstack project' 29 | echo ' topic : The topic as passed to git review' 30 | echo ' change : The change that the no-op change depends on if any' 31 | echo ' style : the style of change [doc|python|releasenotes]' 32 | ) >&2 33 | exit 1 34 | } 35 | 36 | project='' 37 | topic='' 38 | change='' 39 | style='' 40 | verbose=0 41 | 42 | while getopts vp:t:c:s: opt ; do 43 | case $opt in 44 | p) 45 | project=${OPTARG/=} 46 | ;; 47 | t) 48 | topic=${OPTARG/=} 49 | ;; 50 | c) 51 | change=${OPTARG/=} 52 | ;; 53 | s) 54 | style=${OPTARG/=} 55 | ;; 56 | v) 57 | verbose=$((verbose + 1)) 58 | ;; 59 | \?) 60 | usage 61 | ;; 62 | esac 63 | done 64 | 65 | if [ -z "$project" ] ; then 66 | usage 'project missing!' 67 | fi 68 | 69 | if [ -z "$topic" ] ; then 70 | usage 'topic missing!' 71 | # NOTE(tonyb): if the topic without white space or / == itself then it didn't 72 | # contain any bad characters 73 | elif [ "${topic/[ \/]/}" != "${topic}" ] ; then 74 | echo "topic [$topic] contains white space or /'s" 75 | exit 1 76 | fi 77 | 78 | # TODO(tonyb): Do we need to validate that change looks like a change ID? 79 | # With zuulv3 it could infact be a url or anything so it'd be 80 | # hard to validate 81 | 82 | if [ -z "$style" ] ; then 83 | usage 'style missing!' 84 | elif [[ ! 'releasenotes doc python' =~ "$style" ]] ; then 85 | usage "style $style invalid" 86 | fi 87 | 88 | if [ $verbose -ge 1 ] ; then 89 | printf '%-10s: %s\n' 'Project' "$project" 90 | printf '%-10s: %s\n' 'Topic' "$topic" 91 | printf '%-10s: %s\n' 'Change' "$change" 92 | printf '%-10s: %s\n' 'Style' "$style" 93 | printf '%-10s: %s\n' 'Verbosity' "$verbose" 94 | fi 95 | 96 | [ $verbose -ge 2 ] && set -x 97 | 98 | cd $project 99 | 100 | # FIXME(tonyb): Save the current branch 101 | start_branch=$(git rev-parse --symbolic --abbrev-ref HEAD) 102 | if [ "$start_branch" == "$topic" ] ; then 103 | echo $0 Current git branch is the same as topic aborting >&2 104 | exit 1 105 | fi 106 | 107 | # NOTE(tonyb): git diff exits with 0 if the tree is clean 108 | if ! git diff --exit-code -s ; then 109 | echo $0 Current working tree is dirty aborting >&2 110 | exit 1 111 | fi 112 | 113 | # The real works starts here so now lets get a bit careful and exit if a 114 | # command fails 115 | set -e 116 | 117 | git branch -D ${topic} || true 118 | # NOTE(tonyb): We don't really need to switch branches we could do it all in 119 | # the current branch but this is easier. 120 | git checkout -b ${topic} -t origin/master 121 | 122 | # Install the clean up handler 123 | trap cleanup EXIT 124 | 125 | case "$style" in 126 | releasenotes|doc) 127 | file="${style}/source/index.rst" 128 | [ "$verbose" -ge 3 ] && git diff 129 | echo -e '\n\n.. # no-op test' >> $file 130 | git add $file 131 | ;; 132 | python) 133 | # TODO(tonyb): work out a 99% safe way to modify python code 134 | echo $0 python syle change isn\'t finished 135 | # NOTE(tonyb): The pipeline works like: 136 | # Find all the __init__.py files that contain something. 137 | # We know this has to be code of some sort or they'd file pep8. 138 | # Remove tests 139 | # Tests might trick the gate into using a subset of jobs 140 | # Sort by the 3rd path element. 141 | # project/dir/__init__.py this will mean that paths that don't have a 142 | # dir component will sort to the top. This (I hope) means that we'll 143 | # prefer the project __init__.py if it exists 144 | # Grab only the first item 145 | # We could store this in an array and do something smarter if we wanted 146 | file=$(find * -type f -name __init__.py -not -empty | \ 147 | grep -v tests | \ 148 | sort -t / -k+3 |\ 149 | head -n 1) 150 | if [ -n "$file" ] ; then 151 | echo -e '\n\n# no-op test' >> ${file} 152 | [ "$verbose" -ge 3 ] && git diff 153 | git add $file 154 | else 155 | echo $0 failed to find file to patch for $style 156 | exit 1 157 | fi 158 | ;; 159 | esac 160 | 161 | commit_msg="WiP: Do not merge - $topic" 162 | if [ -n "$change" ] ; then 163 | commit_msg+=" 164 | 165 | Depends-On: $change" 166 | fi 167 | 168 | git commit -m "$commit_msg" 169 | git review -t ${topic} 170 | # TODO(tonyb): Check for vote-a-tron and -W the change if it's available 171 | -------------------------------------------------------------------------------- /tools/publish_constraints.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); you may 4 | # not use this file except in compliance with the License. You may obtain 5 | # a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 11 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 12 | # License for the specific language governing permissions and limitations 13 | # under the License. 14 | 15 | function get_from_git { 16 | ref=$1 17 | series=$2 18 | path=${3:-publish/constraints/upper} 19 | 20 | git show ${ref}:upper-constraints.txt > ${path}/${series}.txt 21 | } 22 | 23 | # Make the directory tree, don't fail if it already exists 24 | mkdir -p publish/constraints/upper 25 | # Clear out any stale files, don't fail if we just created it 26 | rm publish/constraints/upper/* || true 27 | 28 | case "$ZUUL_BRANCH" in 29 | stable/*) 30 | series=$(basename "$ZUUL_BRANCH") 31 | get_from_git origin/$ZUUL_BRANCH $series 32 | ;; 33 | master) 34 | # NOTE(tonyb): Publish EOL'd constraints files. We do this here as a 35 | # quick way to publish the data. It can be removed anytime after the first 36 | # successful run 37 | for tag in juno-eol kilo-eol liberty-eol mitaka-eol newton-eol ; do 38 | # trim the '-eol' 39 | series=${tag::-4} 40 | get_from_git $tag $series 41 | done 42 | 43 | for series in queens rocky ; do 44 | if ! git rev-parse origin/stable/$series ; then 45 | get_from_git origin/master $series 46 | fi 47 | done 48 | get_from_git origin/master master 49 | ;; 50 | esac 51 | -------------------------------------------------------------------------------- /tools/what-broke.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # 3 | # Copyright 2015 Hewlett-Packard Development Company, L.P. 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); you may 6 | # not use this file except in compliance with the License. You may obtain 7 | # a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 13 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 14 | # License for the specific language governing permissions and limitations 15 | # under the License. 16 | 17 | """what-broke.py - figure out what requirements change likely broke us. 18 | 19 | Monday morning, 6am. Loading up zuul status page, and realize there is 20 | a lot of red in the gate. Get second cup of coffee. Oh, some library 21 | must have released a bad version. Man, what released recently? 22 | 23 | This script attempts to give that answer by programmatically providing 24 | a list of everything in global-requirements that released recently, in 25 | descending time order. 26 | 27 | This does *not* handle the 2nd order dependency problem (in order to 28 | do that we'd have to install the world as well, this is purely a 29 | metadata lookup tool). If we have regularly problematic 2nd order 30 | dependencies add them to the list at the end in the code to be 31 | checked. 32 | 33 | """ 34 | 35 | import argparse 36 | import datetime 37 | import json 38 | import sys 39 | import urllib.request as urlreq 40 | 41 | import pkg_resources 42 | 43 | 44 | class Release(object): 45 | name = "" 46 | version = "" 47 | filename = "" 48 | released = "" 49 | 50 | def __init__(self, name, version, filename, released): 51 | self.name = name 52 | self.version = version 53 | self.filename = filename 54 | self.released = released 55 | 56 | def __repr__(self): 57 | return "" % (self.name, self.version, self.released) 58 | 59 | 60 | def _parse_pypi_released(datestr): 61 | return datetime.datetime.strptime(datestr, "%Y-%m-%dT%H:%M:%S") 62 | 63 | 64 | def _package_name(line): 65 | return pkg_resources.Requirement.parse(line).project_name 66 | 67 | 68 | def get_requirements(): 69 | reqs = [] 70 | with open('global-requirements.txt') as f: 71 | for line in f.readlines(): 72 | # skip the comment or empty lines 73 | if not line or line.startswith(('#', '\n')): 74 | continue 75 | # get rid of env markers, they are not relevant for our purposes. 76 | line = line.split(';')[0] 77 | reqs.append(_package_name(line)) 78 | return reqs 79 | 80 | 81 | def get_releases_for_package(name, since): 82 | 83 | """Get the release history from pypi 84 | 85 | Use the json API to get the release history from pypi. The 86 | returned json structure includes a 'releases' dictionary which has 87 | keys that are release numbers and the value is an array of 88 | uploaded files. 89 | 90 | While we don't have a 'release time' per say (only the upload time 91 | on each of the files), we'll consider the timestamp on the first 92 | source file found (which will be a .zip or tar.gz typically) to be 93 | 'release time'. This is inexact, but should be close enough for 94 | our purposes. 95 | 96 | """ 97 | f = urlreq.urlopen("http://pypi.org/project/%s/json" % name) 98 | jsondata = f.read() 99 | data = json.loads(jsondata) 100 | releases = [] 101 | for relname, rellist in data['releases'].iteritems(): 102 | for rel in rellist: 103 | if rel['python_version'] == 'source': 104 | when = _parse_pypi_released(rel['upload_time']) 105 | # for speed, only care about when > since 106 | if when < since: 107 | continue 108 | 109 | releases.append( 110 | Release( 111 | name, 112 | relname, 113 | rel['filename'], 114 | when)) 115 | break 116 | return releases 117 | 118 | 119 | def get_releases_since(reqs, since): 120 | all_releases = [] 121 | for req in reqs: 122 | all_releases.extend(get_releases_for_package(req, since)) 123 | # return these in a sorted order from newest to oldest 124 | sorted_releases = sorted(all_releases, 125 | key=lambda x: x.released, 126 | reverse=True) 127 | return sorted_releases 128 | 129 | 130 | def parse_args(): 131 | parser = argparse.ArgumentParser( 132 | description=( 133 | 'List recent releases of items in global requirements ' 134 | 'to look for possible breakage')) 135 | parser.add_argument('-s', '--since', type=int, 136 | default=14, 137 | help='look back ``since`` days (default 14)') 138 | return parser.parse_args() 139 | 140 | 141 | def main(): 142 | opts = parse_args() 143 | since = datetime.datetime.today() - datetime.timedelta(days=opts.since) 144 | print("Looking for requirements releases since %s" % since) 145 | reqs = get_requirements() 146 | # additional sensitive requirements 147 | reqs.append('tox') 148 | reqs.append('pycparser') 149 | releases = get_releases_since(reqs, since) 150 | for rel in releases: 151 | print(rel) 152 | 153 | 154 | if __name__ == '__main__': 155 | sys.exit(main()) 156 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | minversion = 4.11.0 3 | envlist = validate,py3,pep8,pip-install 4 | ignore_basepython_conflict=true 5 | 6 | [testenv] 7 | basepython = python3 8 | usedevelop = True 9 | deps = -c{toxinidir}/upper-constraints.txt 10 | -r{toxinidir}/requirements.txt 11 | -r{toxinidir}/test-requirements.txt 12 | commands = 13 | stestr run {posargs} 14 | 15 | [testenv:py39-check-uc] 16 | allowlist_externals = 17 | check-conflicts 18 | basepython = python3.9 19 | deps = -r{toxinidir}/upper-constraints.txt 20 | commands = check-conflicts {toxinidir}/upper-constraints.txt {toxinidir}/upper-constraints-xfails.txt 21 | 22 | [testenv:py310-check-uc] 23 | basepython = python3.10 24 | deps = -r{toxinidir}/upper-constraints.txt 25 | commands = check-conflicts {toxinidir}/upper-constraints.txt {toxinidir}/upper-constraints-xfails.txt 26 | 27 | [testenv:py311-check-uc] 28 | basepython = python3.11 29 | deps = -r{toxinidir}/upper-constraints.txt 30 | commands = check-conflicts {toxinidir}/upper-constraints.txt {toxinidir}/upper-constraints-xfails.txt 31 | 32 | [testenv:py312-check-uc] 33 | basepython = python3.12 34 | deps = -r{toxinidir}/upper-constraints.txt 35 | commands = check-conflicts {toxinidir}/upper-constraints.txt {toxinidir}/upper-constraints-xfails.txt 36 | 37 | [testenv:py313-check-uc] 38 | basepython = python3.13 39 | deps = -r{toxinidir}/upper-constraints.txt 40 | commands = check-conflicts {toxinidir}/upper-constraints.txt {toxinidir}/upper-constraints-xfails.txt 41 | 42 | [testenv:venv] 43 | commands = {posargs} 44 | 45 | [testenv:generate] 46 | allowlist_externals = 47 | generate-constraints 48 | description = Regenerates upper-constraints.txt 49 | # Generate needs an unconstrained install to get new dependencies 50 | deps = -r{toxinidir}/requirements.txt 51 | -r{toxinidir}/test-requirements.txt 52 | commands = generate-constraints {posargs: -d denylist.txt -r global-requirements.txt -p python3.9 -p python3.10 -p python3.11 -p python3.12 -p python3.13 > upper-constraints.txt} 53 | 54 | [testenv:validate] 55 | allowlist_externals = 56 | validate-constraints 57 | commands = 58 | validate-constraints {toxinidir}/global-requirements.txt {toxinidir}/upper-constraints.txt {toxinidir}/denylist.txt 59 | 60 | [testenv:validate-projects] 61 | allowlist_externals = 62 | validate-projects 63 | commands = validate-projects {toxinidir}/projects.txt 64 | 65 | # TODO remove once zuul reconfigured to run linters on gate 66 | [testenv:pep8] 67 | deps = {[testenv:linters]deps} 68 | allowlist_externals = 69 | bash 70 | commands = 71 | flake8 72 | bash -c "find {toxinidir}/tools \ 73 | -type f \ 74 | -name \*.sh \ 75 | -print0 | xargs -0 bashate -v -iE006,E010" 76 | bash -c 'sed -e "s,===,==," upper-constraints.txt > {envtmpdir}/safety-check.txt' 77 | -safety check --json -r {envtmpdir}/safety-check.txt 78 | 79 | [testenv:linters] 80 | description = Perform linting 81 | deps = 82 | hacking>=1.0.0 83 | bashate>=0.5.1 84 | safety 85 | allowlist_externals = 86 | bash 87 | commands = 88 | flake8 89 | bash -c "find {toxinidir}/tools \ 90 | -type f \ 91 | -name \*.sh \ 92 | -print0 | xargs -0 bashate -v -iE006,E010" 93 | bash -c 'sed -e "s,===,==," upper-constraints.txt > {envtmpdir}/safety-check.txt' 94 | -safety check --json -r {envtmpdir}/safety-check.txt 95 | 96 | [testenv:bindep] 97 | # Do not install any requirements. We want this to be fast and work even if 98 | # system dependencies are missing, since it's used to tell you what system 99 | # dependencies are missing! This also means that bindep must be installed 100 | # separately, outside of the requirements files, and develop mode disabled 101 | # explicitly to avoid unnecessarily installing the checked-out repo too (this 102 | # further relies on "tox.skipsdist = True" above). 103 | deps = bindep 104 | commands = bindep test 105 | usedevelop = False 106 | 107 | [testenv:docs] 108 | allowlist_externals = 109 | sphinx-build 110 | deps = -c{env:TOX_CONSTRAINTS_FILE:{toxinidir}/upper-constraints.txt} 111 | -r{toxinidir}/doc/requirements.txt 112 | commands = sphinx-build -W -b html doc/source doc/build/html 113 | 114 | [testenv:pip-install] 115 | recreate = True 116 | deps = . 117 | commands = python {toxinidir}/tools/check-install.py 118 | 119 | [testenv:requirements-check] 120 | allowlist_externals = 121 | {toxinidir}/playbooks/files/project-requirements-change.py 122 | deps = -r{toxinidir}/requirements.txt 123 | commands = 124 | {toxinidir}/playbooks/files/project-requirements-change.py --local {posargs} 125 | 126 | [testenv:babel] 127 | # Use the local upper-constraints.txt file 128 | allowlist_externals = 129 | {toxinidir}/tools/babel-test.sh 130 | deps = Babel 131 | commands = {toxinidir}/tools/babel-test.sh 132 | 133 | [flake8] 134 | exclude = .venv,.git,.tox,dist,doc,*egg,build 135 | -------------------------------------------------------------------------------- /upper-constraints-xfails.txt: -------------------------------------------------------------------------------- 1 | # Format is 2 | # ==; 3 | # e.g. pyopenssl==17.5.0;mitmproxy 4 | --------------------------------------------------------------------------------