├── autospec ├── keyid_blocklist ├── configure_blacklist ├── __init__.py ├── qt_modules ├── pkg_scan.py ├── download.py ├── license_translations ├── license_blacklist ├── translate.dic ├── logcheck.py ├── pypidata.py ├── check.py ├── license.py ├── git.py ├── specdescription.py ├── util.py ├── abireport.py ├── commitmessage.py ├── build.py └── ignored_commands ├── MANIFEST.in ├── .gitignore ├── requirements.txt ├── tests ├── testfiles │ └── pkg_integrity │ │ ├── tappy-0.9.2.tar.gz │ │ ├── xattr-0.9.1.tar.gz │ │ ├── quagga-1.1.0.tar.gz │ │ ├── SDL_gfx-2.0.25.tar.gz │ │ ├── bad_quagga-1.1.0.tar.gz │ │ ├── hoe-debugging-1.2.1.gem │ │ ├── pygobject-3.24.0.tar.xz │ │ ├── pkg-config-0.29.1.tar.gz │ │ ├── testkeys │ │ ├── 023A4420C7EC6914.pkey │ │ └── 6FE57CA8C1A4AEA6.pkey │ │ ├── qtspeech-everywhere-src-5.12.4.tar.xz │ │ ├── pkg-config-0.29.1.tar.gz.asc │ │ └── quagga-1.1.0.tar.gz.asc ├── test_general.py ├── README.rst ├── test_config.py ├── test_util.py ├── builderrors ├── test_download.py ├── test_check.py ├── test_tarball.py ├── test_commitmessage.py ├── test_abireport.py ├── test_specdescription.py └── test_license.py ├── .github ├── actions │ └── clearlinux-latest-action │ │ ├── Dockerfile │ │ ├── entrypoint.sh │ │ └── action.yml └── workflows │ └── tests.yml ├── setup.cfg ├── README.md ├── setup.py └── Makefile /autospec/keyid_blocklist: -------------------------------------------------------------------------------- 1 | 59FCF207FEA7F445 2 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include README.rst 2 | include LICENSE 3 | -------------------------------------------------------------------------------- /autospec/configure_blacklist: -------------------------------------------------------------------------------- 1 | whether double complex BLAS can be used 2 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | __pycache__ 2 | *.pyc 3 | *~ 4 | *.swp 5 | tags 6 | .coverage 7 | htmlcov 8 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | flake8>=3.4.0 2 | pycurl>=7.43.0 3 | toml>=0.9.0 4 | mock>=2.0.0 5 | coverage>=4.4.1 6 | requests>=2.18.4 7 | -------------------------------------------------------------------------------- /tests/testfiles/pkg_integrity/tappy-0.9.2.tar.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clearlinux/autospec/master/tests/testfiles/pkg_integrity/tappy-0.9.2.tar.gz -------------------------------------------------------------------------------- /tests/testfiles/pkg_integrity/xattr-0.9.1.tar.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clearlinux/autospec/master/tests/testfiles/pkg_integrity/xattr-0.9.1.tar.gz -------------------------------------------------------------------------------- /tests/testfiles/pkg_integrity/quagga-1.1.0.tar.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clearlinux/autospec/master/tests/testfiles/pkg_integrity/quagga-1.1.0.tar.gz -------------------------------------------------------------------------------- /tests/testfiles/pkg_integrity/SDL_gfx-2.0.25.tar.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clearlinux/autospec/master/tests/testfiles/pkg_integrity/SDL_gfx-2.0.25.tar.gz -------------------------------------------------------------------------------- /tests/testfiles/pkg_integrity/bad_quagga-1.1.0.tar.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clearlinux/autospec/master/tests/testfiles/pkg_integrity/bad_quagga-1.1.0.tar.gz -------------------------------------------------------------------------------- /tests/testfiles/pkg_integrity/hoe-debugging-1.2.1.gem: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clearlinux/autospec/master/tests/testfiles/pkg_integrity/hoe-debugging-1.2.1.gem -------------------------------------------------------------------------------- /tests/testfiles/pkg_integrity/pygobject-3.24.0.tar.xz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clearlinux/autospec/master/tests/testfiles/pkg_integrity/pygobject-3.24.0.tar.xz -------------------------------------------------------------------------------- /tests/testfiles/pkg_integrity/pkg-config-0.29.1.tar.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clearlinux/autospec/master/tests/testfiles/pkg_integrity/pkg-config-0.29.1.tar.gz -------------------------------------------------------------------------------- /tests/testfiles/pkg_integrity/testkeys/023A4420C7EC6914.pkey: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clearlinux/autospec/master/tests/testfiles/pkg_integrity/testkeys/023A4420C7EC6914.pkey -------------------------------------------------------------------------------- /tests/testfiles/pkg_integrity/testkeys/6FE57CA8C1A4AEA6.pkey: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clearlinux/autospec/master/tests/testfiles/pkg_integrity/testkeys/6FE57CA8C1A4AEA6.pkey -------------------------------------------------------------------------------- /tests/testfiles/pkg_integrity/qtspeech-everywhere-src-5.12.4.tar.xz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clearlinux/autospec/master/tests/testfiles/pkg_integrity/qtspeech-everywhere-src-5.12.4.tar.xz -------------------------------------------------------------------------------- /.github/actions/clearlinux-latest-action/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM clearlinux:latest 2 | RUN swupd bundle-add package-builder python-extras 3 | COPY entrypoint.sh /entrypoint.sh 4 | ENTRYPOINT ["/entrypoint.sh"] 5 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [egg_info] 2 | tag_build = 3 | [pycodestyle] 4 | ignore = E501 5 | 6 | [coverage:run] 7 | # omit tests and site-packages content 8 | omit = tests/*,*site-packages*,*site.py 9 | 10 | [flake8] 11 | max-line-length = 199 12 | ignore = B902, E722, W503 13 | -------------------------------------------------------------------------------- /autospec/__init__.py: -------------------------------------------------------------------------------- 1 | """Autospec, an automated specfile generation utility.""" 2 | 3 | __all__ = ["abireport", "buildreq", "build", "config", "files", 4 | "git", "lang", "license", "patches", "specdescription", 5 | "tarball", "util", "commitmessage", "test", "patches"] 6 | -------------------------------------------------------------------------------- /.github/actions/clearlinux-latest-action/entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -lx 2 | 3 | run_flake8() { 4 | make check 5 | } 6 | 7 | run_unittests() { 8 | make unittests-no-coverage 9 | } 10 | 11 | if t=$(type -t "$INPUT_TESTFUNC"); then 12 | if [ "$t" = "function" ]; then 13 | $INPUT_TESTFUNC 14 | fi 15 | fi 16 | -------------------------------------------------------------------------------- /.github/actions/clearlinux-latest-action/action.yml: -------------------------------------------------------------------------------- 1 | name: 'Clear in Docker' 2 | description: 'Run commands in the latest Clear Linux OS Docker image' 3 | inputs: 4 | testfunc: 5 | description: 'Test function to run' 6 | required: true 7 | default: 'run_flake8' 8 | runs: 9 | using: 'docker' 10 | image: 'Dockerfile' 11 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ## DISCONTINUATION OF PROJECT. 2 | 3 | This project will no longer be maintained by Intel. 4 | 5 | Intel will not provide or guarantee development of or support for this project, including but not limited to, maintenance, bug fixes, new releases or updates. Patches to this project are no longer accepted by Intel. If you have an ongoing need to use this project, are interested in independently developing it, or would like to maintain patches for the community, please create your own fork of the project. 6 | 7 | Contact: webadmin@linux.intel.com 8 | -------------------------------------------------------------------------------- /.github/workflows/tests.yml: -------------------------------------------------------------------------------- 1 | name: Autospec Tests 2 | 3 | on: [push, pull_request] 4 | 5 | jobs: 6 | test_style: 7 | runs-on: ubuntu-latest 8 | name: Flake8 9 | timeout-minutes: 30 10 | steps: 11 | - name: Checkout 12 | uses: actions/checkout@v3.3.0 13 | - name: Run Tests 14 | uses: ./.github/actions/clearlinux-latest-action 15 | with: 16 | testfunc: run_flake8 17 | test_unit: 18 | runs-on: ubuntu-latest 19 | name: Unit 20 | timeout-minutes: 30 21 | steps: 22 | - name: Checkout 23 | uses: actions/checkout@v3.3.0 24 | - name: Run Tests 25 | uses: ./.github/actions/clearlinux-latest-action 26 | with: 27 | testfunc: run_unittests 28 | -------------------------------------------------------------------------------- /tests/test_general.py: -------------------------------------------------------------------------------- 1 | import subprocess 2 | import unittest 3 | 4 | 5 | class TestGeneral(unittest.TestCase): 6 | 7 | def test_ConfigParser_regressions(self): 8 | """ 9 | Make sure ConfigParser is always called with the required 10 | interpolation=None argument 11 | """ 12 | grep_cmd = ["grep", "-re", 13 | "ConfigParser(.*(^interpolation=None).*)", 14 | "autospec"] 15 | try: 16 | output = subprocess.check_output(grep_cmd).decode('utf-8') 17 | except subprocess.CalledProcessError as e: 18 | output = e.output.decode('utf-8') 19 | 20 | self.assertEqual(output.strip(), "") 21 | 22 | 23 | if __name__ == "__main__": 24 | unittest.main(buffer=True) 25 | -------------------------------------------------------------------------------- /tests/README.rst: -------------------------------------------------------------------------------- 1 | ================ 2 | Autospec Testing 3 | ================ 4 | 5 | Code Style 6 | ========== 7 | 8 | Autospec changes are scanned to vet code style issues with the ``flake8`` tool. 9 | To check for issues, run ``make check`` from the root of the autospec source 10 | tree, which executes ``flake8`` with appropriate arguments. 11 | 12 | Unit 13 | ==== 14 | 15 | Autospec ships with several test modules that correspond to individual modules 16 | from the toplevel ``autospec`` directory. 17 | 18 | Each module can be tested in isolation by running ``make test_``, where 19 | ```` corresponds to the module name. For example, ``make 20 | test_pkg_integrity`` runs unit tests for the ``pkg_integrity.py`` module. 21 | 22 | To run *all* unit tests, run ``make unittests``. If all tests pass, a code 23 | coverage report is also generated. 24 | -------------------------------------------------------------------------------- /tests/testfiles/pkg_integrity/pkg-config-0.29.1.tar.gz.asc: -------------------------------------------------------------------------------- 1 | -----BEGIN PGP SIGNATURE----- 2 | Version: GnuPG v2 3 | 4 | iQIcBAABCAAGBQJW2ZtDAAoJEAI6RCDH7GkUc0IP/1bH7KEJdUM+lrGM1SOuNHdq 5 | 4VEwDp1II8abbBzHeGEXZ8p4+MwwSOYHFiy+NM1yldZkDXtqAlAqvIuEzc+PtgGd 6 | vFeNPb9infibNaEDK+zz4fcqJOSab1ZcQ/D3EIJXwKr5nIYP8RuCHu/zstf7o6R0 7 | /wnGWaAIB1+p9PxvUhMPMbBEQCw/cBzyZ2d7nApHF3b0OH2wM7P8VG8ot4cuglPq 8 | hzk27ZnrYeUDyUUhMRlL7sZZouJlSy/0OxsBK++tOjE6MiuAZhqtlSW+cFK4L7k/ 9 | q4eLodX7GtF0psSgTRjTk2ozdSIDkB2ccLBN6CzgCcbPrbcz4tVQqaQBcSd0mCl7 10 | RWAKmSye7p+CY8mIIOjdYm+KaQRmJMKDXs49hMycti22jnu5T2BM6O7MZpiY+cb3 11 | O2UKUXbVyX/cXKwTYwf4VMddxJKFaqYac+7n5qWbdwBjk9E5OC2ltz94taM1pxZ5 12 | 2jRtfyIb3s+Rj6M5cXI5UChrGqzMK6BmEbyZ0KbHAJ7Y0xvGqwydC6J+RwGIqRlp 13 | LmW3k1ggpUajoMcgq9KqJgVqo/9f4+6anADHRMNJ93MxR7h5BRQ1/GWSXpOzYsYv 14 | DnfQPrhc+z20m81qsvfUcBnN/k74yiDVqp3I/HrYGD+f8cXKPpBRESkPAXlUIcu2 15 | ALHZXEdBGcWdUrAIpJT2 16 | =SMbV 17 | -----END PGP SIGNATURE----- 18 | -------------------------------------------------------------------------------- /tests/testfiles/pkg_integrity/quagga-1.1.0.tar.gz.asc: -------------------------------------------------------------------------------- 1 | -----BEGIN PGP SIGNATURE----- 2 | Version: GnuPG v1 3 | 4 | iQJMBAABCAA2BQJYBh/XLxpodHRwczovL3d3dy5qYWttYS5vcmcvfnBhdWwvcGdw 5 | X3BvbGljeS0xLjEudHh0AAoJEG/lfKjBpK6mNOMP/irlUcU/4NQ6BQQbF7Vc90wo 6 | hmTH/zGAodnndxSplIGV63BQqmQr5KiSOp6tNQ3OlCIwLFWlr/LiJz+MeThaMtZx 7 | /mlrZptWIhbEcJzVa6efg8UGIYl2NC+QxFgEyezfCYSEmczd1qE3kiX+5WjcqVPH 8 | pVjisPwaAYa0FTCsAwBMUKyJr2K3sq5hSJR7DFDVnvH1DJ1xZd9871ZdDpZBHz2z 9 | GvR+IuZcWbYleH1ArFvT6cMokTpIUMvd33/+Gdpfu9fihzQQn199nN2LFZJzuEGV 10 | vH6+IZVTmtXb5U/sdtbWGaDv8eFLAWl2NH9VNdlVw5FbWOYRC4YNN39/hBy7s0po 11 | hvq33ZugC7JqPuje+4W1oF1T/dbRCIBmWUzKsoH8+3z4KJHdotSR0cU+TT+w6SJC 12 | QhF9TAbjrfbeJs0D0NnZrllDLiLGFgLl5yULzMjRDqKcgqE5+nvYBPXHPUCCPU7+ 13 | 59QOkPMsz/kZGV1lRzoUoxlM6V/phJRPU7jit10puiNij0c4peWbjVmTWDei3Xeu 14 | kHpck5wIAUFzAIXBUpVhYdLl/kxq654Qqthoci869ATZIKH/SOId9Y0K1GE6xO6H 15 | 7vQFUGn3dcdnbgapM+tpmC77EmV4BtzkTjcu2pX6s0qnI4P+M+zXS+7G96xP3zHN 16 | ZSFq45swEKd6SKbOYxcN 17 | =mt1F 18 | -----END PGP SIGNATURE----- 19 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup, find_packages 2 | 3 | import sys, os 4 | version = "1.2.0" 5 | 6 | def readme(): 7 | with open("README.rst") as f: 8 | return f.read() 9 | 10 | setup(name="autospec", 11 | description="Automated creation of RPM packaging", 12 | long_description=readme(), 13 | version = version, 14 | license = "GPLv3", 15 | packages = ["autospec"], 16 | package_data = { 17 | '': ['*.pl', '*.dic'], 18 | }, 19 | classifiers=[ 20 | 'Intended Audience :: Developers', 21 | 'Topic :: Software Development :: Build Tools', 22 | 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)', 23 | # Python versions supported. 24 | 'Programming Language :: Python :: 3 :: Only', 25 | 'Programming Language :: Python :: 3', 26 | 'Programming Language :: Python :: 3.2', 27 | 'Programming Language :: Python :: 3.3', 28 | 'Programming Language :: Python :: 3.4', 29 | 'Programming Language :: Python :: 3.5', 30 | 'Programming Language :: Python :: 3.6', 31 | ], 32 | include_package_data = True, 33 | ) 34 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | check: autospec/*.py 2 | @flake8 --ignore=B902,D100,I201 $^ 3 | 4 | test_download: 5 | PYTHONPATH=${CURDIR}/autospec python3 tests/test_download.py 6 | 7 | test_pkg_integrity: 8 | PYTHONPATH=${CURDIR}/autospec python3 tests/test_pkg_integrity.py 9 | 10 | test_tarball: 11 | PYTHONPATH=${CURDIR}/autospec python3 tests/test_tarball.py 12 | 13 | test_specfile: 14 | PYTHONPATH=${CURDIR}/autospec python3 tests/test_specfile.py 15 | 16 | test_abireport: 17 | PYTHONPATH=${CURDIR}/autospec python3 tests/test_abireport.py 18 | 19 | test_commitmessage: 20 | PYTHONPATH=${CURDIR}/autospec python3 tests/test_commitmessage.py 21 | 22 | test_files: 23 | PYTHONPATH=${CURDIR}/autospec python3 tests/test_files.py 24 | 25 | test_license: 26 | PYTHONPATH=${CURDIR}/autospec python3 tests/test_license.py 27 | 28 | test_config: 29 | PYTHONPATH=${CURDIR}/autospec python3 tests/test_config.py 30 | 31 | test_build: 32 | PYTHONPATH=${CURDIR}/autospec python3 tests/test_build.py 33 | 34 | test_buildreq: 35 | PYTHONPATH=${CURDIR}/autospec python3 tests/test_buildreq.py 36 | 37 | test_specdescription: 38 | PYTHONPATH=${CURDIR}/autospec python3 tests/test_specdescription.py 39 | 40 | test_count: 41 | PYTHONPATH=${CURDIR}/autospec python3 tests/test_count.py 42 | 43 | test_check: 44 | PYTHONPATH=${CURDIR}/autospec python3 tests/test_check.py 45 | 46 | test_util: 47 | PYTHONPATH=${CURDIR}/autospec python3 tests/test_util.py 48 | 49 | test_general: 50 | PYTHONPATH=${CURDIR}/autospec python3 tests/test_general.py 51 | 52 | unittests: 53 | PYTHONPATH=${CURDIR}/autospec coverage run -m unittest discover -b -s tests -p 'test_*.py' && coverage report 54 | 55 | unittests-no-coverage: 56 | PYTHONPATH=${CURDIR}/autospec python3 -m unittest discover -b -s tests -p 'test_*.py' 57 | 58 | coverage: 59 | coverage report -m 60 | -------------------------------------------------------------------------------- /autospec/qt_modules: -------------------------------------------------------------------------------- 1 | # This file maps the qmake entry for the QT or QT_PRIVATE variable to the 2 | # pkgconfig equivalent, to be added to BuildRequires. Please keep sorted. 3 | # This file is sorted with LC_COLLATE=C 4 | # Format: , 5 | 3danimation, Qt63DAnimation 6 | 3dcore, Qt63DCore 7 | 3dextras, Qt63DExtras 8 | 3dinput, Qt63DInput 9 | 3dlogic, Qt63DLogic 10 | 3dquick, Qt63DQuick 11 | 3dquickanimation, Qt63DQuickAnimation 12 | 3dquickextras, Qt63DQuickExtras 13 | 3dquickinput, Qt63DQuickInput 14 | 3dquickrender, Qt63DQuickRender 15 | 3dquickscene2d, Qt63DQuickScene2D 16 | 3drender, Qt63DRender 17 | bluetooth, Qt6Bluetooth 18 | charts, Qt6Charts 19 | concurrent, Qt6Concurrent 20 | core, Qt6Core 21 | datavisualization, Qt6DataVisualization 22 | dbus, Qt6DBus 23 | declarative, Qt6Declarative 24 | designer, Qt6Designer 25 | gamepad, Qt6Gamepad 26 | gui, Qt6Gui 27 | help, Qt6Help 28 | location, Qt6Location 29 | multimedia, Qt6Multimedia 30 | multimediawidgets, Qt6MultimediaWidgets 31 | network, Qt6Network 32 | networkauth, Qt6NetworkAuth 33 | nfc, Qt6Nfc 34 | opengl, Qt6OpenGL 35 | openglextensions, Qt6OpenGLExtensions 36 | positioning, Qt6Positioning 37 | printsupport, Qt6PrintSupport 38 | purchasing, Qt6Purchasing 39 | qml, Qt6Qml 40 | qmltest, Qt6QuickTest 41 | quick, Qt6Quick 42 | quickcontrols2, Qt6QuickControls2 43 | quickwidgets, Qt6QuickWidgets 44 | remoteobjects, Qt6RemoteObjects 45 | script, Qt6Script 46 | scripttools, Qt6ScriptTools 47 | scxml, Qt6Scxml 48 | sensors, Qt6Sensors 49 | serialbus, Qt6SerialBus 50 | serialport, Qt6SerialPort 51 | sql, Qt6Sql 52 | svg, Qt6Svg 53 | testlib, Qt6Test 54 | texttospeech, Qt6TextToSpeech 55 | uitools, Qt6UiTools 56 | waylandclient, Qt6WaylandClient 57 | waylandcompositor, Qt6WaylandCompositor 58 | webchannel, Qt6WebChannel 59 | webengine, Qt6WebEngine 60 | webenginecore, Qt6WebEngineCore 61 | webenginewidgets, Qt6WebEngineWidgets 62 | webkit, Qt6WebKit 63 | webkitwidgets, Qt6WebKitWidgets 64 | websockets, Qt6WebSockets 65 | widgets, Qt6Widgets 66 | x11extras, Qt6X11Extras 67 | xml, Qt6Xml 68 | xmlpatterns, Qt6XmlPatterns 69 | -------------------------------------------------------------------------------- /autospec/pkg_scan.py: -------------------------------------------------------------------------------- 1 | # 2 | # pkg_scan.py - part of autospec 3 | # Copyright (C) 2017 Intel Corporation 4 | # 5 | # This program is free software: you can redistribute it and/or modify 6 | # it under the terms of the GNU General Public License as published by 7 | # the Free Software Foundation, either version 3 of the License, or 8 | # (at your option) any later version. 9 | # 10 | # This program is distributed in the hope that it will be useful, 11 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | # GNU General Public License for more details. 14 | # 15 | # You should have received a copy of the GNU General Public License 16 | # along with this program. If not, see . 17 | # 18 | import subprocess 19 | 20 | import util 21 | 22 | 23 | def get_whatrequires(pkg, yum_conf): 24 | """ 25 | Write list of packages. 26 | 27 | Write packages that require the current package to a file 28 | using dnf repoquery what-requires and --recursive commands. 29 | """ 30 | # clean up dnf cache to avoid 'no more mirrors repo' error 31 | try: 32 | subprocess.check_output(['dnf', '--config', yum_conf, 33 | '--releasever', 'clear', 'clean', 'all']) 34 | except subprocess.CalledProcessError as err: 35 | util.print_warning("Unable to clean dnf repo: {}, {}".format(pkg, err)) 36 | return 37 | 38 | try: 39 | out = subprocess.check_output(['dnf', 'repoquery', 40 | '--config', yum_conf, 41 | '--releasever', 'clear', 42 | '--archlist=src', '--recursive', '--queryformat=%{NAME}', 43 | '--whatrequires', pkg]).decode('utf-8') 44 | 45 | except subprocess.CalledProcessError as err: 46 | util.print_warning("dnf repoquery whatrequires for {} failed with: {}".format(pkg, err)) 47 | return 48 | 49 | util.write_out('whatrequires', '# This file contains recursive sources that ' 50 | 'require this package\n' + out) 51 | -------------------------------------------------------------------------------- /autospec/download.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/true 2 | # 3 | # download.py - part of autospec 4 | # Copyright (C) 2018 Intel Corporation 5 | # 6 | # This program is free software: you can redistribute it and/or modify 7 | # it under the terms of the GNU General Public License as published by 8 | # the Free Software Foundation, either version 3 of the License, or 9 | # (at your option) any later version. 10 | # 11 | # This program is distributed in the hope that it will be useful, 12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | # GNU General Public License for more details. 15 | # 16 | # You should have received a copy of the GNU General Public License 17 | # along with this program. If not, see . 18 | # 19 | 20 | import os 21 | import sys 22 | from io import BytesIO 23 | 24 | import pycurl 25 | from util import print_fatal 26 | 27 | 28 | def do_curl(url, dest=None, post=None, is_fatal=False): 29 | """ 30 | Perform a curl operation for `url`. 31 | 32 | If `post` is set, a POST is performed for `url` with fields taken from the 33 | specified value. Otherwise a GET is performed for `url`. If `dest` is set, 34 | the curl response (if successful) is written to the specified path and the 35 | path is returned. Otherwise a successful response is returned as a BytesIO 36 | object. If `is_fatal` is `True` (`False` is the default), a GET failure, 37 | POST failure, or a failure to write to the path specified for `dest` 38 | results in the program exiting with an error. Otherwise, `None` is returned 39 | for any of those error conditions. 40 | """ 41 | c = pycurl.Curl() 42 | c.setopt(c.URL, url) 43 | if post: 44 | c.setopt(c.POSTFIELDS, post) 45 | c.setopt(c.FOLLOWLOCATION, True) 46 | c.setopt(c.FAILONERROR, True) 47 | c.setopt(c.CONNECTTIMEOUT, 10) 48 | c.setopt(c.TIMEOUT, 600) 49 | c.setopt(c.LOW_SPEED_LIMIT, 1) 50 | c.setopt(c.LOW_SPEED_TIME, 10) 51 | buf = BytesIO() 52 | c.setopt(c.WRITEDATA, buf) 53 | try: 54 | c.perform() 55 | except pycurl.error as e: 56 | if is_fatal: 57 | print_fatal("Unable to fetch {}: {}".format(url, e)) 58 | sys.exit(1) 59 | return None 60 | finally: 61 | c.close() 62 | 63 | # write to dest if specified 64 | if dest: 65 | try: 66 | with open(dest, 'wb') as fp: 67 | fp.write(buf.getvalue()) 68 | except IOError as e: 69 | if os.path.exists(dest): 70 | os.unlink(dest) 71 | if is_fatal: 72 | print_fatal("Unable to write to {}: {}".format(dest, e)) 73 | sys.exit(1) 74 | return None 75 | 76 | if dest: 77 | return dest 78 | else: 79 | return buf 80 | -------------------------------------------------------------------------------- /autospec/license_translations: -------------------------------------------------------------------------------- 1 | # This file contains license strings and the actual licenses they are associated 2 | # with in the form: : 3 | # This file is sorted with LC_COLLATE=C 4 | # Lines beginning with '#' are ignored. 5 | # For strings that start with '#', escape the '#' as '\#'. 6 | 2-clause, BSD-2-Clause 7 | AGPL-3, AGPL-3.0 8 | APL-2.0, Apache-2.0 9 | APL2, Apache-2.0 10 | APL2.0, Apache-2.0 11 | ASL 2.0, Apache-2.0 12 | ASL-2, Apache-2.0 13 | ASL-2.0, Apache-2.0 14 | Apache 2.0, Apache-2.0 15 | Apache License 2.0, Apache-2.0 16 | Apache License, Version 2.0, Apache-2.0 17 | Apache, Apache-2.0 18 | Apache-2, Apache-2.0 19 | Apache2, Apache-2.0 20 | Apache2.0, Apache-2.0 21 | Apachev2, Apache-2.0 22 | Artistic-1.0+GPL-1.0, Artistic-1.0 GPL-1.0 23 | Artistic/GPL, Artistic-1.0-Perl GPL-1.0-or-later 24 | Artistic_2, Artistic-2.0 25 | BSD(3-clause), BSD-3-Clause 26 | BSD-2-clause, BSD-2-Clause 27 | BSD-3-clause, BSD-3-Clause 28 | BSD_2_clause, BSD-2-Clause 29 | BSD_3_clause, BSD-3-Clause 30 | Boost, BSL-1.0 31 | CC0, CC0-1.0 32 | CPL, CPL-1.0 33 | Common Public License Version 1.0, CPL-1.0 34 | Expat, MIT 35 | GFDL1.1, GFDL-1.1 36 | GPL (> 2), GPL-3.0+ 37 | GPL(==-2), GPL-2.0 38 | GPL(>=-2), GPL-2.0+ 39 | GPL(>=-2.0), GPL-2.0+ 40 | GPL(>=-2.1), GPL-2.0 41 | GPL(>=-3), GPL-3.0 42 | GPL(>=-3.0), GPL-3.0+ 43 | GPL(>=2), GPL-2.0+ 44 | GPL(>=3), GPL-3.0+ 45 | GPL-2+, GPL-2.0+ 46 | GPL-2, GPL-2.0 47 | GPL-2.0+, GPL-2.0+ 48 | GPL-2.0+LGPL-2.1, GPL-2.0 LGPL-2.1 49 | GPL-2.0, GPL-2.0 50 | GPL-2.0-or-later, GPL-2.0+ 51 | GPL-3+, GPL-3.0 52 | GPL-3, GPL-3.0 53 | GPL-3.0+, GPL-3.0+ 54 | GPL-3.0, GPL-3.0 55 | GPL2, GPL-2.0 56 | GPL3, GPL-3.0 57 | GPLV2, GPL-2.0 58 | GPLV3, GPL-3.0 59 | GPLv2+, GPL-2.0+ 60 | GPLv2, GPL-2.0 61 | GPLv3+, GPL-3.0+ 62 | GPLv3, GPL-3.0 63 | ISCL, ISC 64 | LGPL(>=-2), LGPL-2.0+ 65 | LGPL(>=-2.1), LGPL-2.1+ 66 | LGPL(>=-3), LGPL-3.0+ 67 | LGPL(>=2), LGPL-2.0+ 68 | LGPL-2, LGPL-2.0 69 | LGPL-2.0+, LGPL-2.0+ 70 | LGPL-2.1+, LGPL-2.1+ 71 | LGPL-2.1-or-later, LGPL-2.1+ 72 | LGPL-3+, LGPL-3.0+ 73 | LGPL-3, LGPL-3.0 74 | LGPLv2+, LGPL-2.0+ 75 | LGPLv2, LGPL-2.0 76 | LGPLv2.1+, LGPL-2.1+ 77 | LGPLv2.1, LGPL-2.1 78 | LGPLv3+, LGPL-3.0+ 79 | LGPLv3, LGPL-3.0 80 | MIT/X, MIT 81 | MPL-2, MPL-2.0 82 | MPL2, MPL-2.0 83 | MPLv1.1, MPL-1.1 84 | MPLv2, MPL-2.0 85 | MPLv2.0, MPL-2.0 86 | MPLv2.0,, MPL-2.0 87 | PSF, Python-2.0 88 | Perl, Artistic-1.0-Perl 89 | Python, Python-2.0 90 | VIM, Vim 91 | ZLIB, Zlib 92 | ZPL 2.1, ZPL-2.1 93 | ZPL, ZPL-2.0 94 | apache, Apache-2.0 95 | artistic2, Artistic-2.0 96 | artistic_2, Artistic-2.0 97 | gplv3, GPL-3.0 98 | http://creativecommons.org/licenses/BSD/, BSD-2-Clause 99 | http://opensource.org/licenses/MIT, MIT 100 | http://www.apache.org/licenses/LICENSE-2.0, Apache-2.0 101 | lgpl, LGPL-2.1 102 | mit, MIT 103 | perl, Artistic-1.0-Perl 104 | w3c, W3C 105 | zlib, Zlib 106 | zlib/libpng, zlib-acknowledgement 107 | -------------------------------------------------------------------------------- /autospec/license_blacklist: -------------------------------------------------------------------------------- 1 | # This file contains the list of strings in the license blacklist 2 | # in the form: 3 | # This file is sorted with LC_COLLATE=C 4 | # Lines beginning with '#' are ignored. 5 | # For strings that start with '#', escape the '#' as '\#'. 6 | % 7 | %license 8 | (LGPL) 9 | (new) 10 | (specified 11 | * 12 | + 13 | - 14 | -MIT 15 | -or- 16 | .git 17 | .md 18 | .mit 19 | .txt 20 | / 21 | 1-2-3 22 | 1.0 23 | 1.1 24 | 2.0 25 | 2BSD 26 | 3-Clause 27 | 3-clause 28 | 3BSD 29 | ========================= 30 | >=-2 31 | @CPACK_RPM_PACKAGE_LICENSE@ 32 | AGPLv3+ 33 | ALv2 34 | AND 35 | APL-2.0 36 | APPLICATION__TYPE 37 | ASL 38 | Artistic 39 | BSD 40 | BSD(2 41 | BSD(3 42 | BSD-2-Clause-Views 43 | BSD-3 44 | BSD-compatible 45 | BSD-derived(Repoze) 46 | BSD-like 47 | BSD-style 48 | BSDL 49 | BSD_3_clause 50 | BSDish 51 | BSL 52 | CC-BY 53 | Clause 54 | Commercial 55 | Corp. 56 | Distribution 57 | Domain 58 | Dual 59 | Dual GPL/BSD/CPL 60 | EPL 61 | Eclipse 62 | Expat(MIT/X11) 63 | Expat/MIT 64 | FOUNDATION 65 | Foundation 66 | FreeBSD 67 | GENERAL 68 | GFDL 69 | GNU 70 | GPL 71 | GPL+ 72 | GPL-1.0-only 73 | GPL/BSD 74 | GPL/BSD/CPL 75 | GPLv2.1 76 | General 77 | IBM 78 | Jupyter 79 | LESSER 80 | LGPL 81 | LGPL+BSD 82 | LGPL/MIT 83 | LICENCE 84 | Lesser 85 | Library 86 | Licences 87 | License 88 | License(2.0) 89 | License(==-2.0) 90 | License(>=-2) 91 | License(>=-2.0) 92 | License(LGPL) 93 | License(MIT) 94 | License, 95 | License-2 96 | License-2.0 97 | License-2.0(MPL-2.0) 98 | License-3(GPLv3) 99 | Licensing 100 | Lucent 101 | MIT-0 102 | MIT/Expat 103 | MPL 104 | Minpack 105 | Modified 106 | Mozilla 107 | Muddy-MIT 108 | N/A 109 | New 110 | OFL 111 | OR 112 | Open 113 | PIL 114 | PSF-2+ 115 | PUBLIC 116 | PYTHON 117 | Permission 118 | Public 119 | Revised 120 | SIL 121 | SIP 122 | SOFTWARE 123 | See 124 | Set 125 | Software 126 | Source 127 | Standard 128 | The 129 | This 130 | Two-clause 131 | UN 132 | UNKNOWN 133 | Unknown 134 | Unkown 135 | Unlimited 136 | VERSION-2 137 | Version 138 | Version-2.0 139 | Version-3 140 | WITH 141 | What 142 | \# 143 | a 144 | advertising 145 | and 146 | any 147 | bsd 148 | charge 149 | classifiers) 150 | clause) 151 | copy 152 | cryptsetup-OpenSSL-exception 153 | details 154 | domain 155 | domain. 156 | dual 157 | exceptions 158 | for 159 | free 160 | gpl 161 | granted 162 | hereby 163 | http://nmap.org/man/man-legal.html 164 | into 165 | is 166 | it 167 | later 168 | later(LGPLv2+) 169 | license 170 | licensing 171 | ndg/httpsclient/LICENCE 172 | new 173 | none 174 | obtaining 175 | of 176 | on 177 | open_source 178 | option 179 | option) 180 | or 181 | or(at 182 | others 183 | person 184 | public 185 | released 186 | software 187 | style 188 | terms 189 | the 190 | to 191 | under? 192 | unencumbered 193 | unknown 194 | unrestricted 195 | uses 196 | using 197 | v1.0 198 | version 199 | version-2 200 | version-2.1 201 | with 202 | your 203 | | 204 | ~ 205 | -------------------------------------------------------------------------------- /autospec/translate.dic: -------------------------------------------------------------------------------- 1 | async_timeout=pypi-async_timeout 2 | babel=pypi-babel 3 | backports.ssl-match-hostname=pypi-backports.ssl_match_hostname 4 | barbicanclient-python=python-barbicanclient 5 | BeautifulSoup=pypi-beautifulsoup4 6 | cryptography-python=pypi-cryptography 7 | Crypto-python=pycrypt-python 8 | dateutil-python=pypi-python_dateutil 9 | dateutil.relativedelta-python=pypi-python_dateutil 10 | designateclient-python=python-designateclient 11 | django-python=pypi-django 12 | enum-python=enum34 13 | formencode=FormEncode 14 | Genshi-python=Genshi 15 | gi.overrides-python=pygobject-python 16 | gi-python=pygobject-python 17 | github.com/golang/glog=golang-github-golang-glog 18 | github.com/gorilla/context=golang-github-gorilla-context 19 | github.com/mattn/go-sqlite3=golang-github-mattn-go-sqlite3 20 | github.com/mitchellh/mapstructure=golang-github-mitchellh-mapstructure 21 | github.com/stretchr/testify/assert=golang-github-stretchr-testify 22 | glance-store=glance_store 23 | golang.org/x/net/context=golang-googlecode-go-net 24 | golang.org/x/net/netutil=golang-googlecode-go-net 25 | gopkg.in/check.v1=golang-github-go-check-check 26 | gopkg.in/yaml.v2=golang-github-go-yaml-yaml 27 | hacking-python=hacking 28 | httplib2-python=httplib2 29 | ironicclient-python=python-ironicclient 30 | jinja2=pypi-jinja2 31 | jinja2-python=pypi-jinja2 32 | Jinja2-python=pypi-jinja2 33 | jsonpath_rw_ext=jsonpath-rw-ext 34 | keystoneclient.auth-python=python-keystoneclient 35 | keystoneclient.v3-python=python-keystoneclient 36 | keystonemiddleware-python=keystonemiddleware 37 | lazy_object_proxy=lazy-object-proxy 38 | manilaclient-python=python-manilaclient 39 | mistralclient.api.v2-python=python-mistralclient 40 | netaddr-python=netaddr 41 | Opcodes=opcodes 42 | openstackclient.tests-python=python-openstackclient 43 | openstack.nose-plugin-python=openstack.nose_plugin 44 | os_brick.initiator-python=os-brick 45 | oslo_cache-python=oslo.cache 46 | oslo_config-python=oslo.config 47 | oslo.config-python=oslo.config 48 | oslo_policy-python=oslo.policy 49 | oslo_reports-python=oslo.reports 50 | oslo_versionedobjects-python=oslo.versionedobjects 51 | oslo_versionedobjects.tests-python=oslo.versionedobjects 52 | pep8-python=pypi-pep8 53 | posix-ipc-python=pypi-posix_ipc 54 | PrettyTable-python=pypi-prettytable 55 | pyaml-python=pypi-pyaml 56 | PyECLib-python=pyeclib 57 | pygments=pypi-pygments 58 | Pygments-python=pypi-pygments 59 | PyGObject=pygobject 60 | pylibmc-python=pylibmc 61 | PyOpenSSL=pypi-pyopenssl 62 | pyqt5=PyQt5 63 | pytest_runner=pytest-runner 64 | python-memcached-python=python-memcached 65 | pyyaml=PyYAML 66 | qtawesome=QtAwesome 67 | qtpy=QtPy 68 | restructuredtext-lint=restructuredtext_lint 69 | semantic-version=semantic_version 70 | setuptools-scm-python=pypi-setuptools_scm 71 | simplejson-python=simplejson 72 | sphinx=pypi-sphinx 73 | sqlalchemy=pypi-sqlalchemy 74 | sqlparse-python=pypi-sqlparse 75 | stevedore-python=stevedore 76 | testresources-python=testresources 77 | testscenarios-python=testscenarios 78 | websocket-client=websocket_client 79 | WebTest-python=pypi-webtest 80 | yaml-python=PyYAML 81 | zaqarclient.queues.v1-python=python-zaqarclient 82 | zaqarclient.transport-python=python-zaqarclient 83 | -------------------------------------------------------------------------------- /tests/test_config.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import config 3 | 4 | 5 | # Input for tarball.detect_build_from_url method tests 6 | # Structure: (url, build_pattern) 7 | BUILD_PAT_URL = [ 8 | ("https://cran.r-project.org/src/contrib/raster_3.0-12.tar.gz", "R"), 9 | ("https://ftp.osuosl.org/pub/cran/src/contrib/hexbin_1.28.5.tar.gz", "R"), 10 | ("http://pypi.debian.net/argparse/argparse-1.4.0.tar.gz", "distutils3"), 11 | ("https://pypi.python.org/packages/source/T/Tempita/Tempita-0.5.2.tar.gz", "distutils3"), 12 | ("https://cpan.metacpan.org/authors/id/T/TO/TODDR/IO-Tty-1.14.tar.gz", "cpan"), 13 | ("http://search.cpan.org/CPAN/authors/id/D/DS/DSKOLL/IO-stringy-2.111.tar.gz", "cpan"), 14 | ("https://pecl.php.net//get/lua-2.0.6.tgz", "phpize"), 15 | ] 16 | 17 | 18 | def detect_build_test_generator(url, build_pattern): 19 | """Create test for tarball.detect_build_from_url method.""" 20 | def generator(self): 21 | """Test template.""" 22 | conf = config.Config("") 23 | conf.detect_build_from_url(url) 24 | self.assertEqual(build_pattern, conf.default_pattern) 25 | 26 | return generator 27 | 28 | 29 | def create_dynamic_tests(): 30 | # Create tests for config.detect_build_from_url method. 31 | for url, build_pattern in BUILD_PAT_URL: 32 | test_name = 'test_pat_{}'.format(url) 33 | test = detect_build_test_generator(url, build_pattern) 34 | setattr(TestConfig, test_name, test) 35 | 36 | 37 | class TestConfig(unittest.TestCase): 38 | 39 | def test_set_build_pattern(self): 40 | """ 41 | Test set_build_pattern with sufficient pattern strength 42 | """ 43 | conf = config.Config("") 44 | conf.set_build_pattern("configure_ac", 1) 45 | self.assertEqual(conf.default_pattern, "configure_ac") 46 | self.assertEqual(conf.pattern_strength, 1) 47 | 48 | def test_set_build_pattern_low_strength(self): 49 | """ 50 | Test set_build_pattern with low pattern strength, nothing in the module 51 | should change 52 | """ 53 | conf = config.Config("") 54 | conf.pattern_strength = 2 55 | conf.set_build_pattern("configure_ac", 1) 56 | self.assertEqual(conf.default_pattern, "make") 57 | self.assertEqual(conf.pattern_strength, 2) 58 | 59 | def test_validate_extras_content_bad_glob(self): 60 | """ 61 | Test validate_extras_content with more than one glob in a directory 62 | """ 63 | conf = config.Config("") 64 | lines = ['/bad*path*/file'] 65 | new_lines = conf.validate_extras_content(lines, 'bad_glob') 66 | self.assertEqual(len(new_lines), 0) 67 | 68 | def test_validate_extras_content_good_single_glob(self): 69 | """ 70 | Test validate_extras_content with a single glob in a directory 71 | """ 72 | conf = config.Config("") 73 | lines = ['/good*/file'] 74 | new_lines = conf.validate_extras_content(lines, 'good_single_glob') 75 | self.assertEqual(new_lines, [lines[0].split('/')]) 76 | 77 | def test_validate_extras_content_good_multi_glob(self): 78 | """ 79 | Test validate_extras_content with a multiple valid globs 80 | """ 81 | conf = config.Config("") 82 | lines = ['/path1', '/good*/glob*/file', '/path3'] 83 | new_lines = conf.validate_extras_content(lines, 'good_multi_glob') 84 | self.assertEqual(new_lines, ['/path1', lines[1].split('/'), '/path3']) 85 | 86 | # Create dynamic tests 87 | create_dynamic_tests() 88 | 89 | if __name__ == '__main__': 90 | unittest.main(buffer=True) 91 | -------------------------------------------------------------------------------- /autospec/logcheck.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | # 3 | # logcheck.py - part of autospec 4 | # Copyright (C) 2015 Intel Corporation 5 | # 6 | # This program is free software: you can redistribute it and/or modify 7 | # it under the terms of the GNU General Public License as published by 8 | # the Free Software Foundation, either version 3 of the License, or 9 | # (at your option) any later version. 10 | # 11 | # This program is distributed in the hope that it will be useful, 12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | # GNU General Public License for more details. 15 | # 16 | # You should have received a copy of the GNU General Public License 17 | # along with this program. If not, see . 18 | 19 | import os 20 | import re 21 | import sys 22 | 23 | from util import print_fatal, write_out 24 | 25 | 26 | def log_etc(lines): 27 | """Return the content of the START/etc ... END/etc section.""" 28 | etc = [] 29 | while True: 30 | line = next(lines) 31 | line = line.strip() 32 | if line == 'END/etc': 33 | break 34 | if line.startswith('+'): 35 | continue 36 | etc.append(line) 37 | return etc 38 | 39 | 40 | def logcheck(pkg_loc): 41 | """Try to discover configuration options that were automatically switched off.""" 42 | log = os.path.join(pkg_loc, 'results', 'build.log') 43 | if not os.path.exists(log): 44 | print('build log is missing, unable to perform logcheck.') 45 | return 46 | 47 | whitelist = [] 48 | file_dir = os.path.dirname(os.path.abspath(__file__)) 49 | file_path = os.path.join(file_dir, 'configure_whitelist') 50 | with open(file_path, "r") as whitelistf: 51 | for line in whitelistf: 52 | if line.startswith("#"): 53 | continue 54 | whitelist.append(line.rstrip()) 55 | 56 | blacklist = [] 57 | file_dir = os.path.dirname(os.path.abspath(__file__)) 58 | file_path = os.path.join(file_dir, 'configure_blacklist') 59 | with open(file_path, "r") as blacklistf: 60 | for line in blacklistf: 61 | if line.startswith("#"): 62 | continue 63 | blacklist.append(line.rstrip()) 64 | 65 | with open(log, 'r') as logf: 66 | lines = logf.readlines() 67 | 68 | pat = re.compile(r"^checking (?:for )?(.*?)\.\.\. no") 69 | misses = [] 70 | iter_lines = iter(lines) 71 | for line in iter_lines: 72 | if line.strip() == "START/etc": 73 | etc = log_etc(iter_lines) 74 | if etc: 75 | write_log(pkg_loc, "etc_files", etc) 76 | match = None 77 | m = pat.search(line) 78 | if m: 79 | match = m.group(1) 80 | 81 | if "none required" in line: 82 | match = None 83 | 84 | if "warning: format not a string literal" in line: 85 | match = line 86 | 87 | if not match or match in whitelist: 88 | continue 89 | 90 | if match in blacklist: 91 | print_fatal("Blacklisted configure-miss is forbidden: " + match) 92 | misses.append("Blacklisted configure-miss is forbidden: " + match) 93 | write_log(pkg_loc, 'configure_misses', misses) 94 | sys.exit(1) 95 | 96 | print("Configure miss: " + match) 97 | misses.append("Configure miss: " + match) 98 | 99 | if not misses: 100 | return 101 | 102 | write_log(pkg_loc, 'configure_misses', misses) 103 | 104 | 105 | def write_log(pkg_loc, fname, content): 106 | """Create log file with content.""" 107 | write_out(os.path.join(pkg_loc, fname), '\n'.join(sorted(content))) 108 | -------------------------------------------------------------------------------- /tests/test_util.py: -------------------------------------------------------------------------------- 1 | import subprocess 2 | import os 3 | import tempfile 4 | import unittest 5 | from unittest.mock import MagicMock, mock_open, patch 6 | 7 | import util 8 | 9 | 10 | def mock_gen(rv=None): 11 | def mock_f(*args, **kwargs): 12 | return rv 13 | 14 | return mock_f 15 | 16 | 17 | class TestUtil(unittest.TestCase): 18 | 19 | def test_call(self): 20 | """ 21 | Test call with default arguments, make sure it passes out the correct 22 | returncode 23 | """ 24 | call_backup = subprocess.call 25 | util.subprocess.call = mock_gen(rv=0) 26 | self.assertEqual(util.call('some command'), 0) 27 | util.subprocess.call = call_backup 28 | 29 | def test_call_check(self): 30 | """ 31 | Test call with check=True (default) and a bad returncode. Should raise a 32 | CalledProcessError 33 | """ 34 | call_backup = subprocess.call 35 | util.subprocess.call = mock_gen(rv=1) 36 | with self.assertRaises(subprocess.CalledProcessError): 37 | util.call('some command') 38 | 39 | util.subprocess.call = call_backup 40 | 41 | def test_call_no_check(self): 42 | """ 43 | Test call with check=False and a bad returncode, should return the 44 | returncode 45 | """ 46 | call_backup = subprocess.call 47 | util.subprocess.call = mock_gen(rv=1) 48 | self.assertEqual(util.call('some command', check=False), 1) 49 | util.subprocess.call = call_backup 50 | 51 | def test_translate(self): 52 | """ 53 | Spot-test the translate function with a package defined in 54 | translate.dic 55 | """ 56 | self.assertEqual(util.translate('dateutil-python'), 'pypi-python_dateutil') 57 | 58 | def test_binary_in_path(self): 59 | """ 60 | Test binary_in_path 61 | """ 62 | with tempfile.TemporaryDirectory() as tmpd: 63 | open(os.path.join(tmpd, 'testbin'), 'w').close() 64 | util.os.environ["PATH"] = tmpd 65 | self.assertTrue(util.binary_in_path('testbin')) 66 | self.assertEqual(util.os_paths, [tmpd]) 67 | 68 | def test__process_build_log_bad_patch(self): 69 | """ 70 | Test _process_build_log with a bad patch 71 | """ 72 | def isfile_mock(_): 73 | return True 74 | isfile_backup = util.os.path.isfile 75 | util.os.path.isfile = isfile_mock 76 | call_backup = util.call 77 | util.call = MagicMock() 78 | open_name = 'util.open_auto' 79 | content = "Patch #1 (bad.patch):\nHunk #1 FAILED at 1." 80 | m_open = mock_open(read_data=content) 81 | with patch(open_name, m_open, create=True): 82 | util._process_build_log('filename') 83 | 84 | util.os.path.isfile = isfile_backup 85 | mock_call = util.call 86 | util.call = call_backup 87 | self.assertTrue(len(mock_call.mock_calls) == 3) 88 | 89 | def test_globlike_match(self): 90 | """ 91 | Test globlike_match 92 | """ 93 | match_name = ['a', 'b', 'c'] 94 | file_path = 'a/b' 95 | self.assertFalse(util.globlike_match(file_path, match_name)) 96 | 97 | match_name = ['a', 'c'] 98 | file_path = 'a/b' 99 | self.assertFalse(util.globlike_match(file_path, match_name)) 100 | 101 | match_name = ['a', 'bb*'] 102 | file_path = 'a/b' 103 | self.assertFalse(util.globlike_match(file_path, match_name)) 104 | 105 | match_name = ['a', 'b*'] 106 | file_path = 'a/ab' 107 | self.assertFalse(util.globlike_match(file_path, match_name)) 108 | 109 | match_name = ['a', '*a'] 110 | file_path = 'a/ab' 111 | self.assertFalse(util.globlike_match(file_path, match_name)) 112 | 113 | match_name = ['a', 'c*'] 114 | file_path = 'a/b' 115 | self.assertFalse(util.globlike_match(file_path, match_name)) 116 | 117 | match_name = ['a', '*c'] 118 | file_path = 'a/b' 119 | self.assertFalse(util.globlike_match(file_path, match_name)) 120 | 121 | match_name = ['a', 'b*'] 122 | file_path = 'a/b' 123 | self.assertTrue(util.globlike_match(file_path, match_name)) 124 | 125 | match_name = ['a', '*b'] 126 | file_path = 'a/b' 127 | self.assertTrue(util.globlike_match(file_path, match_name)) 128 | 129 | match_name = ['a', 'b'] 130 | file_path = 'a/b' 131 | self.assertTrue(util.globlike_match(file_path, match_name)) 132 | 133 | 134 | if __name__ == '__main__': 135 | unittest.main(buffer=True) 136 | -------------------------------------------------------------------------------- /autospec/pypidata.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import json 4 | import os 5 | import subprocess 6 | import sys 7 | import tempfile 8 | 9 | import download 10 | import util 11 | 12 | 13 | def pip_env(): 14 | """Generate a copy of os.environ appropriate for pip.""" 15 | env = os.environ.copy() 16 | env["PYTHON_KEYRING_BACKEND"] = "keyring.backends.null.Keyring" 17 | return env 18 | 19 | 20 | def pkg_search(name): 21 | """Query the pypi json API for name and return True if found.""" 22 | query = f"https://pypi.org/pypi/{name}/json/" 23 | resp = download.do_curl(query) 24 | if resp is not None: 25 | return True 26 | else: 27 | return False 28 | 29 | 30 | def fixup_pypi_prefix(name): 31 | """Try and chop off the 'pypi-' or 'python-' prefix for names.""" 32 | name = name.lower().replace('-', '_') 33 | for prefix in ["pypi_", "python_"]: 34 | if name.startswith(prefix): 35 | name = name[len(prefix):] 36 | return name 37 | 38 | 39 | def get_pypi_name(name, miss=False): 40 | """Try and verify the pypi name for a given package name.""" 41 | # normalize the name for matching as pypi is case insensitve for search 42 | name = name.lower().replace('-', '_') 43 | # Common case is the name and the pypi name match 44 | if pkg_search(name): 45 | return name 46 | # Maybe we have a prefix 47 | name = fixup_pypi_prefix(name) 48 | if pkg_search(name): 49 | return name 50 | # Some cases where search fails (Sphinx) 51 | # Just try the name we were given 52 | if miss: 53 | return "" 54 | return name 55 | 56 | 57 | def _print_command_error(cmd, proc): 58 | if isinstance(cmd, list): 59 | cmd = " ".join(cmd) 60 | util.print_error(f"Command `{cmd}` failed:") 61 | for line in proc.stderr.decode('utf-8', errors='surrogateescape').splitlines(): 62 | util.print_error(line) 63 | 64 | 65 | def get_pypi_metadata(name): 66 | """Get metadata for a pypi package.""" 67 | show = [] 68 | # Create virtenv to do the pip install (needed for pip show) 69 | with tempfile.TemporaryDirectory() as tdir: 70 | cmd = ["virtualenv", "--no-periodic-update", tdir] 71 | proc = subprocess.run(cmd, capture_output=True) 72 | if proc.returncode != 0: 73 | _print_command_error(cmd, proc) 74 | return "" 75 | cmd = f"source bin/activate && pip install {name.removeprefix('pypi_')}" 76 | proc = subprocess.run(cmd, cwd=tdir, shell=True, capture_output=True, 77 | env=pip_env()) 78 | if proc.returncode != 0: 79 | _print_command_error(cmd, proc) 80 | return "" 81 | cmd = f"source bin/activate &> /dev/null && pip show {name.removeprefix('pypi_')}" 82 | proc = subprocess.run(cmd, cwd=tdir, shell=True, capture_output=True, 83 | env=pip_env()) 84 | if proc.returncode != 0: 85 | _print_command_error(cmd, proc) 86 | return "" 87 | show = proc.stdout.decode('utf-8', errors='surrogateescape').splitlines() 88 | # Parse pip show for relevent information 89 | metadata = {} 90 | for line in show: 91 | if line.startswith("Name: "): 92 | # 'Name: pypi-name' 93 | # normalize names -> lowercase and dash to underscore 94 | metadata["name"] = line.split()[1].lower().replace('-', '_') 95 | elif line.startswith("Summary: "): 96 | # 'Summary: ' 97 | try: 98 | metadata["summary"] = line.split(maxsplit=1)[1] 99 | except IndexError: 100 | # No Summary (haven't seen this case before though) 101 | metadata["summary"] = "" 102 | elif line.startswith("Requires: "): 103 | # 'Requires: dep1, dep2 104 | # normalize names -> lowercase and dash to underscore 105 | try: 106 | reqs = [item.strip().lower().replace('-', '_') for item in line.split(maxsplit=1)[1].split(",")] 107 | except IndexError: 108 | # No Requires 109 | reqs = [] 110 | metadata["requires"] = reqs 111 | 112 | return json.dumps(metadata) 113 | 114 | 115 | def main(): 116 | """Standalone pypi metadata query entry point.""" 117 | pkg_name = sys.argv[1] 118 | pypi_name = get_pypi_name(pkg_name) 119 | if not pypi_name: 120 | util.print_fatal(f"Couldn't find {pkg_name} in pypi") 121 | sys.exit(1) 122 | pypi_metadata = get_pypi_metadata(pypi_name) 123 | print(pypi_metadata) 124 | 125 | 126 | if __name__ == '__main__': 127 | main() 128 | -------------------------------------------------------------------------------- /tests/builderrors: -------------------------------------------------------------------------------- 1 | ! swig is not installed|perl(swig) 2 | ExtUtils::Depends not installed|perl(ExtUtils::Depends) 3 | Could not find swig|swig 4 | Did not find swig|swig 5 | exec: swig: not found|swig 6 | mate-common.m4 not found|mate-common-dev 7 | -- Could NOT find swig|swig 8 | -- Could not find wpa_supplicant|wpa_supplicant 9 | -- Could not find xdg-open|xdg-utils 10 | -- swig not found.|swig 11 | etc etc /usr/bin/swig not found|swig 12 | /bin/ld: cannot find -lz|zlib-dev 13 | /usr/bin/env: swig: No such file or directory|swig 14 | /usr/bin/python: No module named swig|swig 15 | Add the installation prefix of "swig" to CMAKE_PREFIX_PATH|swig 16 | By not providing "Findswig.cmake" in CMAKE_MODULE_PATH this|swig 17 | By not providing "swig.cmake" in CMAKE_MODULE_PATH this project|swig 18 | C library 'swig' not found|swig 19 | CMake Error at cmake/modules/swig.cmake|swig 20 | Can't locate swig in @INC (you may need to install the swig module)|perl(swig) 21 | Cannot find swig|swig 22 | Checking for swig : not found|swig 23 | Checking for swig >= 1.2 : not found|swig 24 | Checking for swig >= 1.2 : not found|swig 25 | Checking for swig development files... No|swig 26 | Checking for swig...no|swig 27 | Checking for swig: not found|swig 28 | Could NOT find swig|swig 29 | Could not find wpa_supplicant|wpa_supplicant 30 | Could not find xdg-open|xdg-utils 31 | Could not find a package configuration file provided by "swig" etc etc|swig 32 | Could not find suitable distribution for Requirement.parse('swig')|swig 33 | Dependency swig found: NO|swig 34 | Dependency testpkg found: NO (tried pkgconfig and cmake)|pkgconfig(testpkg) 35 | Dependency testpkg found: NO (tried pkgconfig)|pkgconfig(testpkg) 36 | Download error on https://pypi.python.org/simple/swig/|pypi(swig) 37 | Downloading https://test.python.org/packages/pkg/t/swig/1.23|swig 38 | ERROR: dependencies 'swig' are not available for package 'something'|R-swig 39 | ERROR: dependencies 'swig', 'swig2' are not available for package 'something'|R-swig 40 | ERROR: dependencies ‘swig’ are not available for package ‘something’|R-swig 41 | ERROR: dependencies ‘swig’, ‘swig2’ are not available for package ‘something’|R-swig 42 | ERROR: dependency 'swig' is not available for package 'something'|R-swig 43 | ERROR: dependency ‘swig’ is not available for package ‘something’|R-swig 44 | Error: Unable to find swig|swig 45 | Error: package 'swig' required by|R-swig 46 | Error: package ‘swig’ required by|R-swig 47 | ImportError: No module named 'swig'|pypi(swig) 48 | ImportError: No module named swig|swig 49 | ImportError: No module named swig|pypi(swig) 50 | ImportError: cannot import name swig|swig 51 | ImportError: swig module missing|swig 52 | ImportError:..*: No module named swig|pypi(swig) 53 | ModuleNotFoundError: No module named swig|pypi(swig) 54 | ModuleNotFoundError: No module named 'foo-bar'|pypi(foo_bar) 55 | Native dependency 'swig' not found|pkgconfig(swig) 56 | No local packages or working download links found for swig|pypi(swig) 57 | No matching distribution found for swig|pypi(swig) 58 | No package 'swig' found|pkgconfig(swig) 59 | No rule to make target `swig', etc|swig 60 | Package 'swig', required by 'something', not found|pkgconfig(swig) 61 | Package which this enhances but not available for checking: 'swig'|R-swig 62 | Package which this enhances but not available for checking: ‘swig’|R-swig 63 | Perhaps you should add the directory containing `swig.pc'|pkgconfig(swig) 64 | Program swig found: NO|swig 65 | Target 't' can't be generated as 'swig' could not be found|swig 66 | Unable to `import swig`|swig 67 | Unable to find 'swig'|swig 68 | Warning: no usable swig found|swig 69 | Warning: prerequisite swig 8 not found.|perl(swig) 70 | You need swig to build this program.|swig 71 | "swig" with any of the following names|swig 72 | checking for library containing swig... no|swig 73 | checking for perl module swig 7... no|perl(swig) 74 | checking for something in swig... no|swig 75 | checking for swig in default path... not found|swig 76 | checking for swig support... no|swig 77 | checking for swig support... no|swig 78 | checking for swig with pkg-config... no|swig 79 | checking for swig... configure: error|swig 80 | checking for swig... no|swig 81 | checking for swig... not found|swig 82 | checking for swig... not_found|swig 83 | checking swig... no|swig 84 | configure: error: Cannot find swig. Make sure|swig 85 | configure: error: Unable to locate swig|swig 86 | configure: error: pkg-config missing swig|swig 87 | configure: error: swig is required to build|swig 88 | configure: error: swig not found|swig 89 | dependency testpkg found: NO (tried pkgconfig and cmake)|pkgconfig(testpkg) 90 | dependency testpkg found: NO (tried pkgconfig)|pkgconfig(testpkg) 91 | fatal error: swig: No such file or directory|swig 92 | make: swig: Command not found|swig 93 | make: help2man: No such file or directory|help2man 94 | swig 8 is required to configure this module; please install it or upgrade your CPAN/CPANPLUS shell.|swig 95 | swig tool not found or not executable|swig 96 | swig validation tool not found or not executable|swig 97 | swig: command not found|swig 98 | there is no package called 'swig'|R-swig 99 | there is no package called ‘swig’|R-swig 100 | unable to execute 'swig': No such file or directory|swig 101 | warning: failed to load external entity "/usr/share/sgml/docbook/xsl-stylesheets/something"|docbook-xml 102 | which: no swig in (/usr/bin/swig/)|swig 103 | you may need to install the swig module|perl(swig) 104 | -------------------------------------------------------------------------------- /tests/test_download.py: -------------------------------------------------------------------------------- 1 | from enum import Enum, auto 2 | import unittest 3 | from unittest.mock import patch, mock_open, call 4 | 5 | import pycurl 6 | 7 | import download 8 | 9 | 10 | class MockOpts(Enum): 11 | URL = auto() 12 | WRITEDATA = auto() 13 | POSTFIELDS = auto() 14 | FOLLOWLOCATION = auto() 15 | FAILONERROR = auto() 16 | CONNECTTIMEOUT = auto() 17 | TIMEOUT = auto() 18 | LOW_SPEED_LIMIT = auto() 19 | LOW_SPEED_TIME = auto() 20 | 21 | 22 | def init_curl_instance(mock_curl): 23 | instance = mock_curl.return_value 24 | instance.URL = MockOpts.URL 25 | instance.FOLLOWLOCATION = MockOpts.FOLLOWLOCATION 26 | instance.FAILONERROR = MockOpts.FAILONERROR 27 | instance.WRITEDATA = MockOpts.WRITEDATA 28 | instance.POSTFIELDS = MockOpts.POSTFIELDS 29 | return instance 30 | 31 | 32 | def test_opts(*opts): 33 | if not opts: 34 | raise Exception("no curl options specified") 35 | if len(opts) != 2: 36 | raise Exception("expected two args to setopt()") 37 | key, val = opts 38 | if key == MockOpts.WRITEDATA: 39 | val.write(b'foobar') 40 | 41 | 42 | class TestDownload(unittest.TestCase): 43 | 44 | @patch('download.pycurl.Curl') 45 | def test_download_get_success_no_dest(self, test_curl): 46 | """ 47 | Test successful GET request when dest is not set. 48 | """ 49 | instance = init_curl_instance(test_curl) 50 | instance.setopt.side_effect = test_opts 51 | data = download.do_curl("foo") 52 | self.assertEqual(b'foobar', data.getvalue()) 53 | 54 | @patch('download.pycurl.Curl') 55 | def test_download_set_basic(self, test_curl): 56 | """ 57 | Test curl option settings set by default 58 | """ 59 | instance = init_curl_instance(test_curl) 60 | instance.setopt.side_effect = test_opts 61 | data = download.do_curl("foo") 62 | calls = [ 63 | call().setopt(MockOpts.URL, 'foo'), 64 | call().setopt(MockOpts.FOLLOWLOCATION, True), 65 | call().setopt(MockOpts.FAILONERROR, True), 66 | ] 67 | test_curl.assert_has_calls(calls) 68 | 69 | @patch('download.pycurl.Curl') 70 | def test_download_set_post(self, test_curl): 71 | """ 72 | Test setting of POSTFIELDS curl option 73 | """ 74 | instance = init_curl_instance(test_curl) 75 | instance.setopt.side_effect = test_opts 76 | data = download.do_curl("foo", post='postdata') 77 | calls = [ 78 | call().setopt(MockOpts.POSTFIELDS, 'postdata'), 79 | ] 80 | test_curl.assert_has_calls(calls) 81 | 82 | @patch('download.pycurl.Curl') 83 | def test_download_get_failure_no_dest(self, test_curl): 84 | """ 85 | Test failed GET request when dest is not set. 86 | """ 87 | instance = init_curl_instance(test_curl) 88 | instance.setopt.side_effect = test_opts 89 | instance.perform.side_effect = pycurl.error 90 | data = download.do_curl("foo") 91 | self.assertIsNone(data) 92 | 93 | @patch('download.sys.exit') 94 | @patch('download.pycurl.Curl') 95 | def test_download_get_failure_fatal(self, test_curl, test_exit): 96 | """ 97 | Test failed GET request when is_fatal is set. 98 | """ 99 | instance = init_curl_instance(test_curl) 100 | instance.setopt.side_effect = test_opts 101 | instance.perform.side_effect = pycurl.error 102 | data = download.do_curl("foo", is_fatal=True) 103 | test_exit.assert_called_once_with(1) 104 | 105 | @patch('download.open', new_callable=mock_open) 106 | @patch('download.pycurl.Curl') 107 | def test_download_get_success_dest(self, test_curl, test_open): 108 | """ 109 | Test successful GET request when dest is set. 110 | """ 111 | instance = init_curl_instance(test_curl) 112 | instance.setopt.side_effect = test_opts 113 | data = download.do_curl("foo", "testdest") 114 | test_open.assert_called_once_with('testdest', 'wb') 115 | test_open().write.assert_called_once_with(b'foobar') 116 | 117 | @patch('download.os.path.exists') 118 | @patch('download.open', new_callable=mock_open) 119 | @patch('download.pycurl.Curl') 120 | def test_download_get_write_fail_dest(self, test_curl, test_open, test_path): 121 | """ 122 | Test failure to write to dest after successful GET request. 123 | """ 124 | instance = init_curl_instance(test_curl) 125 | instance.setopt.side_effect = test_opts 126 | test_open.side_effect = IOError 127 | test_path.return_value = None 128 | data = download.do_curl("foo", "testdest") 129 | self.assertIsNone(data) 130 | 131 | @patch('download.sys.exit') 132 | @patch('download.os.path.exists') 133 | @patch('download.open') 134 | @patch('download.pycurl.Curl') 135 | def test_download_write_fail_fatal(self, test_curl, test_open, test_path, test_exit): 136 | """ 137 | Test fatal failure to write to dest after successful GET request. 138 | """ 139 | instance = init_curl_instance(test_curl) 140 | instance.setopt.side_effect = test_opts 141 | test_open.side_effect = IOError 142 | test_path.return_value = None 143 | data = download.do_curl("foo", "testdest", is_fatal=True) 144 | test_exit.assert_called_once_with(1) 145 | 146 | @patch('download.os.unlink') 147 | @patch('download.os.path.exists') 148 | @patch('download.open') 149 | @patch('download.pycurl.Curl') 150 | def test_download_write_fail_remove_dest(self, test_curl, test_open, test_path, test_unlink): 151 | """ 152 | Test removal of dest following a write failure. 153 | """ 154 | instance = init_curl_instance(test_curl) 155 | instance.setopt.side_effect = test_opts 156 | test_open.side_effect = IOError 157 | test_path.return_value = True 158 | data = download.do_curl("foo", "testdest") 159 | test_path.assert_called_once_with("testdest") 160 | test_unlink.assert_called_once_with("testdest") 161 | 162 | 163 | if __name__ == '__main__': 164 | unittest.main(buffer=True) 165 | -------------------------------------------------------------------------------- /tests/test_check.py: -------------------------------------------------------------------------------- 1 | import os 2 | import tempfile 3 | import unittest 4 | from unittest.mock import mock_open, patch 5 | 6 | import buildreq 7 | import check 8 | import config 9 | import tarball 10 | 11 | def mock_generator(rv=None): 12 | def mock_f(*args, **kwargs): 13 | return rv 14 | 15 | return mock_f 16 | 17 | 18 | class TestTest(unittest.TestCase): 19 | backup_isfile = check.os.path.isfile 20 | 21 | @classmethod 22 | def setUpClass(self): 23 | self.open_name = 'check.util.open_auto' 24 | check.os.path.isfile = mock_generator(True) 25 | 26 | @classmethod 27 | def tearDownClass(self): 28 | check.os.path.isfile = self.backup_isfile 29 | 30 | def setUp(self): 31 | check.tests_config = '' 32 | 33 | def test_check_regression(self): 34 | """ 35 | Test check_regression 36 | """ 37 | def mock_parse_log(log): 38 | return ',120,100,20,0,0' 39 | 40 | parse_log_backup = check.count.parse_log 41 | check.count.parse_log = mock_parse_log 42 | m_open = mock_open() 43 | open_name = 'util.open' 44 | with patch(open_name, m_open, create=True): 45 | check.check_regression('pkgdir', False, -1) 46 | 47 | check.count.parse_log = parse_log_backup 48 | 49 | exp_call = unittest.mock.call().write('Total : 120\n' 50 | 'Pass : 100\n' 51 | 'Fail : 20\n' 52 | 'Skip : 0\n' 53 | 'XFail : 0\n') 54 | self.assertIn(exp_call, m_open.mock_calls) 55 | 56 | def test_check_regression_multi(self): 57 | """ 58 | Test check_regression with multiple results 59 | """ 60 | def mock_parse_log(log): 61 | return 'test-a,120,100,20,0,0\ntest-b,10,5,3,2,1' 62 | 63 | parse_log_backup = check.count.parse_log 64 | check.count.parse_log = mock_parse_log 65 | m_open = mock_open() 66 | open_name = 'util.open' 67 | with patch(open_name, m_open, create=True): 68 | check.check_regression('pkgdir', False, -1) 69 | 70 | check.count.parse_log = parse_log_backup 71 | 72 | exp_call = unittest.mock.call().write('Package : test-a\n' 73 | 'Total : 120\n' 74 | 'Pass : 100\n' 75 | 'Fail : 20\n' 76 | 'Skip : 0\n' 77 | 'XFail : 0\n' 78 | 'Package : test-b\n' 79 | 'Total : 10\n' 80 | 'Pass : 5\n' 81 | 'Fail : 3\n' 82 | 'Skip : 2\n' 83 | 'XFail : 1\n') 84 | self.assertIn(exp_call, m_open.mock_calls) 85 | 86 | def test_scan_for_tests_makecheck_in(self): 87 | """ 88 | Test scan_for_tests with makecheck suite 89 | """ 90 | reqs = buildreq.Requirements("") 91 | conf = config.Config("") 92 | tcontent = tarball.Content("", "", "", [], conf, "") 93 | listdir_backup = os.listdir 94 | check.os.listdir = mock_generator(['Makefile.in']) 95 | content = 'check:' 96 | m_open = mock_open(read_data=content) 97 | with patch(self.open_name, m_open, create=True): 98 | conf.default_pattern = "configure" 99 | check.scan_for_tests('pkgdir', conf, reqs, tcontent) 100 | 101 | check.os.listdir = listdir_backup 102 | self.assertEqual(check.tests_config, 103 | 'make %{?_smp_mflags} check') 104 | 105 | def test_scan_for_tests_makecheck_am(self): 106 | """ 107 | Test scan_for_tests with makecheck suite via Makefile.am 108 | """ 109 | reqs = buildreq.Requirements("") 110 | conf = config.Config("") 111 | tcontent = tarball.Content("", "", "", [], conf, "") 112 | listdir_backup = os.listdir 113 | check.os.listdir = mock_generator(['Makefile.am']) 114 | m_open = mock_open() 115 | with patch(self.open_name, m_open, create=True): 116 | conf.default_pattern = "configure_ac" 117 | check.scan_for_tests('pkgdir', conf, reqs, tcontent) 118 | 119 | check.os.listdir = listdir_backup 120 | self.assertEqual(check.tests_config, 121 | 'make %{?_smp_mflags} check') 122 | 123 | def test_scan_for_tests_perlcheck_PL(self): 124 | """ 125 | Test scan_for_tests with perlcheck suite 126 | """ 127 | reqs = buildreq.Requirements("") 128 | conf = config.Config("") 129 | tcontent = tarball.Content("", "", "", [], conf, "") 130 | listdir_backup = os.listdir 131 | check.os.listdir = mock_generator(['Makefile.PL']) 132 | conf.default_pattern = "cpan" 133 | check.scan_for_tests('pkgdir', conf, reqs, tcontent) 134 | check.os.listdir = listdir_backup 135 | self.assertEqual(check.tests_config, 'make TEST_VERBOSE=1 test') 136 | 137 | def test_scan_for_tests_perlcheck_in(self): 138 | """ 139 | Test scan_for_tests with perlcheck suite via Makefile.in 140 | """ 141 | reqs = buildreq.Requirements("") 142 | conf = config.Config("") 143 | tcontent = tarball.Content("", "", "", [], conf, "") 144 | listdir_backup = os.listdir 145 | check.os.listdir = mock_generator(['Makefile.in']) 146 | content = 'test:' 147 | m_open = mock_open(read_data=content) 148 | with patch(self.open_name, m_open, create=True): 149 | conf.default_pattern = "cpan" 150 | check.scan_for_tests('pkgdir', conf, reqs, tcontent) 151 | 152 | check.os.listdir = listdir_backup 153 | self.assertEqual(check.tests_config, 'make TEST_VERBOSE=1 test') 154 | 155 | def test_scan_for_tests_cmake(self): 156 | """ 157 | Test scan_for_tests with cmake suite 158 | """ 159 | reqs = buildreq.Requirements("") 160 | conf = config.Config("") 161 | tcontent = tarball.Content("", "", "", [], conf, "") 162 | listdir_backup = os.listdir 163 | check.os.listdir = mock_generator(['CMakeLists.txt']) 164 | content = 'enable_testing' 165 | m_open = mock_open(read_data=content) 166 | with patch(self.open_name, m_open, create=True): 167 | conf.default_pattern = "cmake" 168 | check.scan_for_tests('pkgdir', conf, reqs, tcontent) 169 | 170 | check.os.listdir = listdir_backup 171 | self.assertEqual(check.tests_config, 172 | 'cd clr-build; make test') 173 | 174 | def test_scan_for_tests_tox_requires(self): 175 | """ 176 | Test scan_for_tests with tox.ini in the files list, should add several 177 | build requirements 178 | """ 179 | reqs = buildreq.Requirements("") 180 | conf = config.Config("") 181 | tcontent = tarball.Content("", "", "", [], conf, "") 182 | listdir_backup = os.listdir 183 | check.os.listdir = mock_generator(['tox.ini']) 184 | check.scan_for_tests('pkgdir', conf, reqs, tcontent) 185 | check.os.listdir = listdir_backup 186 | self.assertEqual(reqs.buildreqs, 187 | set(['pypi-tox', 188 | 'pypi-pytest', 189 | 'pypi-virtualenv', 190 | 'pypi-pluggy', 191 | 'pypi(py)'])) 192 | 193 | 194 | if __name__ == "__main__": 195 | unittest.main(buffer=True) 196 | -------------------------------------------------------------------------------- /autospec/check.py: -------------------------------------------------------------------------------- 1 | #!/bin/true 2 | # 3 | # test.py - part of autospec 4 | # Copyright (C) 2015 Intel Corporation 5 | # 6 | # This program is free software: you can redistribute it and/or modify 7 | # it under the terms of the GNU General Public License as published by 8 | # the Free Software Foundation, either version 3 of the License, or 9 | # (at your option) any later version. 10 | # 11 | # This program is distributed in the hope that it will be useful, 12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | # GNU General Public License for more details. 15 | # 16 | # You should have received a copy of the GNU General Public License 17 | # along with this program. If not, see . 18 | # 19 | # Deduce and emmit the patterns for %check 20 | # 21 | 22 | import os 23 | import re 24 | 25 | import count 26 | import util 27 | 28 | tests_config = "" 29 | 30 | 31 | def check_regression(pkg_dir, skip_tests, test_round): 32 | """Check the build log for test regressions using the count module.""" 33 | if skip_tests: 34 | return 35 | 36 | log_path = os.path.join(pkg_dir, 'results', 'build.log') 37 | result = count.parse_log(log_path) 38 | if len(result) == 0 or result[0:2] == ',0': 39 | log_path = os.path.join(pkg_dir, 'results', f"round{test_round}-build.log") 40 | result = count.parse_log(log_path) 41 | 42 | titles = [('Package', 'package name', 1), 43 | ('Total', 'total tests', 1), 44 | ('Pass', 'total passing', 1), 45 | ('Fail', 'total failing', 0), 46 | ('Skip', 'tests skipped', 0), 47 | ('XFail', 'expected fail', 0)] 48 | res_str = "" 49 | for line in result.strip('\n').split('\n'): 50 | s_line = line.split(',') 51 | for idx, title in enumerate(titles): 52 | if s_line[idx]: 53 | if (s_line[idx] != '0') or (title[2] > 0): 54 | print("{}: {}".format(title[1], s_line[idx])) 55 | res_str += "{} : {}\n".format(title[0], s_line[idx]) 56 | 57 | util.write_out(os.path.join(pkg_dir, "testresults"), res_str) 58 | 59 | 60 | def scan_for_tests(src_dir, config, requirements, content): 61 | """Scan source directory for test files and set tests_config accordingly.""" 62 | global tests_config 63 | 64 | if config.config_opts.get('skip_tests') or tests_config: 65 | return 66 | 67 | make_command = "ninja" if config.config_opts.get('use_ninja') else "make" 68 | makeflags = "%{?_smp_mflags} " if config.parallel_build else "" 69 | make_check = "{} {}check".format(make_command, makeflags) 70 | cmake_check = "{} test".format(make_command) 71 | make_check_openmpi = "module load openmpi\nexport OMPI_MCA_rmaps_base_oversubscribe=1\n" \ 72 | "{} {}check\nmodule unload openmpi".format(make_command, makeflags) 73 | cmake_check_openmpi = "module load openmpi\nexport OMPI_MCA_rmaps_base_oversubscribe=1\n" \ 74 | "{} test\nmodule unload openmpi".format(make_command) 75 | 76 | if config.config_opts.get('allow_test_failures'): 77 | make_check_openmpi = "module load openmpi\nexport OMPI_MCA_rmaps_base_oversubscribe=1\n" \ 78 | "{} {}check || :\nmodule unload openmpi".format(make_command, makeflags) 79 | cmake_check_openmpi = "module load openmpi\nexport OMPI_MCA_rmaps_base_oversubscribe=1\n" \ 80 | "{} test || :\nmodule unload openmpi".format(make_command) 81 | 82 | perl_check = "{} TEST_VERBOSE=1 test".format(make_command) 83 | meson_check = "meson test -C builddir --print-errorlogs" 84 | if config.config_opts.get('allow_test_failures'): 85 | make_check += " || :" 86 | cmake_check += " || :" 87 | perl_check += " || :" 88 | meson_check += " || :" 89 | 90 | testsuites = { 91 | "makecheck": make_check, 92 | "perlcheck": perl_check, 93 | "cmake": "cd clr-build; " + cmake_check, 94 | "meson": meson_check, 95 | } 96 | if config.config_opts.get('32bit'): 97 | testsuites["makecheck"] += "\ncd ../build32;\n" + make_check + " || :" 98 | testsuites["cmake"] += "\ncd ../../build32/clr-build32;\n" + cmake_check + " || :" 99 | testsuites["meson"] += "\ncd ../build32;\n" + meson_check + " || :" 100 | if config.config_opts.get('use_avx2'): 101 | testsuites["makecheck"] += "\ncd ../buildavx2;\n" + make_check + " || :" 102 | testsuites["cmake"] += "\ncd ../../buildavx2/clr-build-avx2;\n" + cmake_check + " || :" 103 | testsuites["meson"] += "\ncd ../buildavx2;\n" + meson_check + " || :" 104 | if config.config_opts.get('use_avx512'): 105 | testsuites["makecheck"] += "\ncd ../buildavx512;\n" + make_check + " || :" 106 | testsuites["cmake"] += "\ncd ../../buildavx512/clr-build-avx512;\n" + cmake_check + " || :" 107 | testsuites["meson"] += "\ncd ../buildavx512;\n" + meson_check + " || :" 108 | if config.config_opts.get('use_apx'): 109 | testsuites["makecheck"] += "\ncd ../buildapx;\n" + make_check + " || :" 110 | testsuites["cmake"] += "\ncd ../../buildapx/clr-build-apx;\n" + cmake_check + " || :" 111 | testsuites["meson"] += "\ncd ../buildapx;\n" + meson_check + " || :" 112 | if config.config_opts.get('openmpi'): 113 | testsuites["makecheck"] += "\ncd ../build-openmpi;\n" + make_check_openmpi 114 | testsuites["cmake"] += "\ncd ../../build-openmpi/clr-build-openmpi;\n" + cmake_check_openmpi 115 | 116 | files = os.listdir(src_dir) 117 | 118 | if config.default_pattern == "cmake": 119 | makefile_path = os.path.join(src_dir, "CMakeLists.txt") 120 | if not os.path.isfile(makefile_path): 121 | return 122 | 123 | if "enable_testing" in util.open_auto(makefile_path).read(): 124 | tests_config = testsuites["cmake"] 125 | 126 | elif config.default_pattern in ["cpan", "configure", "configure_ac", "autogen"] and "Makefile.in" in files: 127 | makefile_path = os.path.join(src_dir, "Makefile.in") 128 | if os.path.isfile(makefile_path): 129 | with util.open_auto(makefile_path, 'r') as make_fp: 130 | lines = make_fp.readlines() 131 | for line in lines: 132 | if line.startswith("check:"): 133 | tests_config = testsuites["makecheck"] 134 | break 135 | if line.startswith("test:"): 136 | tests_config = testsuites["perlcheck"] 137 | break 138 | 139 | elif config.default_pattern in ["configure", "configure_ac", "autogen"] and "Makefile.am" in files: 140 | tests_config = testsuites["makecheck"] 141 | 142 | elif config.default_pattern in ["cpan"] and "Makefile.PL" in files: 143 | tests_config = testsuites["perlcheck"] 144 | 145 | elif config.default_pattern == "R": 146 | tests_config = "export _R_CHECK_FORCE_SUGGESTS_=false\n" \ 147 | "R CMD check --no-manual --no-examples --no-codoc . " \ 148 | "|| :" 149 | elif config.default_pattern == "meson": 150 | found_tests = False 151 | makefile_path = os.path.join(src_dir, "meson.build") 152 | if not os.path.isfile(makefile_path): 153 | return 154 | for dirpath, _, files in os.walk(src_dir): 155 | for f in files: 156 | if f == "meson.build": 157 | with util.open_auto(os.path.join(dirpath, f)) as fp: 158 | if any(re.search(r'^\s*test\s*\(.+', line) for line in fp): 159 | found_tests = True 160 | tests_config = testsuites["meson"] 161 | break 162 | if found_tests: 163 | break 164 | 165 | if "tox.ini" in files: 166 | requirements.add_buildreq("pypi-tox") 167 | requirements.add_buildreq("pypi-pytest") 168 | requirements.add_buildreq("pypi-virtualenv") 169 | requirements.add_buildreq("pypi-pluggy") 170 | requirements.add_buildreq("pypi(py)") 171 | 172 | 173 | def load_specfile(specfile): 174 | """Load the specfile object.""" 175 | specfile.tests_config = tests_config 176 | -------------------------------------------------------------------------------- /autospec/license.py: -------------------------------------------------------------------------------- 1 | #!/bin/true 2 | # 3 | # license.py - part of autospec 4 | # Copyright (C) 2015 Intel Corporation 5 | # 6 | # This program is free software: you can redistribute it and/or modify 7 | # it under the terms of the GNU General Public License as published by 8 | # the Free Software Foundation, either version 3 of the License, or 9 | # (at your option) any later version. 10 | # 11 | # This program is distributed in the hope that it will be useful, 12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | # GNU General Public License for more details. 15 | # 16 | # You should have received a copy of the GNU General Public License 17 | # along with this program. If not, see . 18 | # 19 | # Deduce the upstream version from the package content 20 | # based on both license file pattern matching and 21 | # exact matches on hashes of the COPYING file 22 | # 23 | 24 | import os 25 | import re 26 | import shlex 27 | import sys 28 | import urllib.parse 29 | 30 | import chardet 31 | import download 32 | import util 33 | 34 | default_license = "TO BE DETERMINED" 35 | 36 | licenses = [] 37 | license_files = [] 38 | hashes = dict() 39 | 40 | 41 | def process_licenses(lics, translations, blacklist): 42 | """Handle licenses string from the license server. 43 | 44 | The license server response may contain multiple space-separated licenses. 45 | Add each license individually. 46 | """ 47 | for lic in lics.split(): 48 | add_license(lic, translations, blacklist) 49 | 50 | 51 | def add_license(lic, translations, blacklist): 52 | """Add licenses from the server. 53 | 54 | Add license from license string lic after checking for duplication or 55 | presence in the blacklist. Returns False if no license were added, True 56 | otherwise. 57 | """ 58 | lic = lic.strip().strip(',') 59 | result = False 60 | 61 | # Translate the license if a translation exists 62 | real_lic_str = translations.get(lic, lic) 63 | real_lics = real_lic_str.split() 64 | for real_lic in real_lics: 65 | if real_lic in blacklist: 66 | continue 67 | elif real_lic in licenses: 68 | result = True 69 | else: 70 | result = True 71 | licenses.append(real_lic) 72 | 73 | return result 74 | 75 | 76 | def decode_license(license): 77 | """Try and decode the license string.""" 78 | def try_with_charset(license, charset): 79 | if not charset: 80 | return 81 | 82 | try: 83 | return license.decode(charset) 84 | except UnicodeDecodeError: 85 | if charset in ('ISO-8859-1', 'ISO-8859-15'): 86 | if b'\xff' in license: 87 | return try_with_charset(license, 'ISO-8859-13') 88 | if b'\xd2' in license and b'\xd3' in license: 89 | return try_with_charset(license, 'mac_roman') 90 | 91 | return try_with_charset(license, chardet.detect(license)['encoding']) 92 | 93 | 94 | def license_from_copying_hash(copying, srcdir, config, name): 95 | """Add licenses based on the hash of the copying file.""" 96 | try: 97 | data = util.get_contents(copying) 98 | except FileNotFoundError: 99 | # LICENSE file is a bad symlink (qemu-4.2.0!) 100 | return 101 | 102 | if data.startswith(b'#!'): 103 | # Not a license if this is a script 104 | return 105 | 106 | data = decode_license(data) 107 | if not data: 108 | return 109 | 110 | hash_sum = util.get_sha1sum(copying) 111 | 112 | if config.license_fetch: 113 | values = {'hash': hash_sum, 'text': data, 'package': name} 114 | data = urllib.parse.urlencode(values) 115 | data = data.encode('utf-8') 116 | 117 | buffer = download.do_curl(config.license_fetch, post=data, is_fatal=True) 118 | response = buffer.getvalue() 119 | page = response.decode('utf-8').strip() 120 | if page: 121 | print("License : ", page, " (server) (", hash_sum, ")") 122 | process_licenses(page, config.license_translations, config.license_blacklist) 123 | 124 | if page != "none": 125 | # Strip the build source directory off the front 126 | lic_path = copying[len(srcdir):] 127 | # Strip any leading slashes 128 | while lic_path.startswith('/'): 129 | lic_path = lic_path[1:] 130 | lic_path = shlex.quote(lic_path) 131 | license_files.append(lic_path) 132 | hashes[lic_path] = hash_sum 133 | 134 | return 135 | 136 | if hash_sum in config.license_hashes: 137 | add_license(config.license_hashes[hash_sum], 138 | config.license_translations, 139 | config.license_blacklist) 140 | else: 141 | if not config.license_show: 142 | return 143 | util.print_warning("Unknown license {0} with hash {1}".format(copying, hash_sum)) 144 | hash_url = config.license_show % {'HASH': hash_sum} 145 | util.print_warning("Visit {0} to enter".format(hash_url)) 146 | 147 | 148 | def skip_license(license_path, config): 149 | """Check if a given license file path should be skipped.""" 150 | skip_name = False 151 | for skip in config.license_skips: 152 | # handle the common tempfile prefix and normalize for 153 | # skip lines without a starting '/' 154 | skip = skip if skip[0] != '' else skip[1:] 155 | skip_path = ['', 'tmp', '*'] + skip 156 | if util.globlike_match(license_path, skip_path): 157 | util.print_warning(f"Skip license detected for file at {license_path}") 158 | skip_name = True 159 | break 160 | return skip_name 161 | 162 | 163 | def scan_for_licenses(srcdir, config, pkg_name): 164 | """Scan the project directory for things we can use to guess a description and summary.""" 165 | targets = ["copyright", 166 | "copyright.txt", 167 | "apache-2.0", 168 | "artistic.txt", 169 | "libcurllicense", 170 | "gpl.txt", 171 | "gpl2.txt", 172 | "gplv2.txt", 173 | "notice", 174 | "copyrights", 175 | "about_bsd.txt"] 176 | # look for files that start with copying or licen[cs]e (but are 177 | # not likely scripts) or end with licen[cs]e 178 | target_pat = re.compile(r"^((copying)|(licen[cs]e)|(e[dp]l-v\d+))|(licen[cs]e)(\.(txt|xml))?|(intel simplified software license.*\.txt)$") 179 | for dirpath, dirnames, files in os.walk(srcdir): 180 | for name in files: 181 | if name.lower() in targets or target_pat.search(name.lower()): 182 | license_path = os.path.join(dirpath, name) 183 | if not skip_license(license_path, config): 184 | license_from_copying_hash(license_path, srcdir, config, pkg_name) 185 | # Also search for license texts in project trees that are 186 | # REUSE-compliant, or are in process of adopting this standard (for 187 | # example, KDE ecosystem packages). See https://reuse.software for 188 | # details. At a basic level, this layout requires a toplevel 189 | # `LICENSES` directory that includes separate files (with .txt 190 | # extension) for each license text that covers source code, data, 191 | # etc elsewhere in the project tree. A variant layout is currently 192 | # seen in the DPDK 20.11.3 tree, where the `LICENSES` directory is 193 | # named `license` instead. 194 | dirbase = os.path.basename(dirpath) 195 | if re.search(r'^(LICENSES|licenses?|licensing)$', dirbase) and re.search(r'\.txt$', name): 196 | license_path = os.path.join(dirpath, name) 197 | if not skip_license(license_path, config): 198 | license_from_copying_hash(license_path, srcdir, config, pkg_name) 199 | 200 | if not licenses: 201 | util.print_fatal(" Cannot find any license or a valid {}.license file!\n".format(pkg_name)) 202 | sys.exit(1) 203 | 204 | print("Licenses : ", " ".join(sorted(licenses))) 205 | 206 | 207 | def load_specfile(specfile): 208 | """Get licenses from the specfile content.""" 209 | specfile.licenses = licenses if licenses else [default_license] 210 | specfile.license_files = sorted(license_files) 211 | specfile.hashes = hashes 212 | -------------------------------------------------------------------------------- /autospec/git.py: -------------------------------------------------------------------------------- 1 | #!/bin/true 2 | # 3 | # git.py - part of autospec 4 | # Copyright (C) 2015 Intel Corporation 5 | # 6 | # This program is free software: you can redistribute it and/or modify 7 | # it under the terms of the GNU General Public License as published by 8 | # the Free Software Foundation, either version 3 of the License, or 9 | # (at your option) any later version. 10 | # 11 | # This program is distributed in the hope that it will be useful, 12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | # GNU General Public License for more details. 15 | # 16 | # You should have received a copy of the GNU General Public License 17 | # along with this program. If not, see . 18 | # 19 | # Commit to git 20 | # 21 | 22 | import glob 23 | import os 24 | import subprocess 25 | import sys 26 | import tempfile 27 | 28 | from util import call, open_auto, write_out 29 | 30 | 31 | def get_autospec_info(): 32 | """Get the latest tag from autospec.""" 33 | path = os.path.dirname(sys.path[0]) 34 | git_out = tempfile.mkstemp()[1] 35 | try: 36 | call("git tag -l --sort=-v:refname", git_out, cwd=path) 37 | with open_auto(git_out) as gfile: 38 | tag = gfile.readlines()[0].strip() 39 | except Exception: 40 | tag = "" 41 | os.unlink(git_out) 42 | try: 43 | call('git log -1 --pretty=format:"%h"', git_out, cwd=path) 44 | with open_auto(git_out) as gfile: 45 | commit = gfile.readlines()[0].strip() 46 | except Exception: 47 | commit = "" 48 | os.unlink(git_out) 49 | return tag, commit 50 | 51 | 52 | def commit_to_git(config, name, success): 53 | """Update package's git tree for autospec managed changes.""" 54 | path = config.download_path 55 | call("git init -b main", stdout=subprocess.DEVNULL, cwd=path) 56 | 57 | # This config is used for setting the remote URI, so it is optional. 58 | if config.git_uri: 59 | try: 60 | call("git config --get remote.origin.url", cwd=path) 61 | except subprocess.CalledProcessError: 62 | upstream_uri = config.git_uri % {'NAME': name} 63 | call("git remote add origin %s" % upstream_uri, cwd=path) 64 | 65 | for config_file in config.config_files: 66 | call("git add %s" % config_file, cwd=path, check=False) 67 | for unit in config.sources["unit"]: 68 | call("git add %s" % unit, cwd=path) 69 | call("git add Makefile", cwd=path) 70 | call("git add upstream", cwd=path) 71 | call("bash -c 'shopt -s failglob; git add *.spec'", cwd=path) 72 | call("git add %s.tmpfiles" % name, check=False, stderr=subprocess.DEVNULL, cwd=path) 73 | call("git add %s.sysusers" % name, check=False, stderr=subprocess.DEVNULL, cwd=path) 74 | call("git add prep_prepend", check=False, stderr=subprocess.DEVNULL, cwd=path) 75 | call("git add pypi.json", check=False, stderr=subprocess.DEVNULL, cwd=path) 76 | call("git add build_prepend", check=False, stderr=subprocess.DEVNULL, cwd=path) 77 | call("git add make_prepend", check=False, stderr=subprocess.DEVNULL, cwd=path) 78 | call("git add install_prepend", check=False, stderr=subprocess.DEVNULL, cwd=path) 79 | call("git add install_append", check=False, stderr=subprocess.DEVNULL, cwd=path) 80 | call("git add series", check=False, stderr=subprocess.DEVNULL, cwd=path) 81 | # Add/remove version specific patch lists 82 | for filename in glob.glob('series.*'): 83 | base, version = filename.split('.', 1) 84 | if version in config.versions: 85 | call("git add {}".format(filename), check=False, stderr=subprocess.DEVNULL, cwd=path) 86 | else: 87 | call("git rm {}".format(filename), check=False, stderr=subprocess.DEVNULL, cwd=path) 88 | call("bash -c 'shopt -s failglob; git add -f *.asc'", check=False, stderr=subprocess.DEVNULL, cwd=path) 89 | call("bash -c 'shopt -s failglob; git add -f *.sig'", check=False, stderr=subprocess.DEVNULL, cwd=path) 90 | call("bash -c 'shopt -s failglob; git add -f *.sha256'", check=False, stderr=subprocess.DEVNULL, cwd=path) 91 | call("bash -c 'shopt -s failglob; git add -f *.sign'", check=False, stderr=subprocess.DEVNULL, cwd=path) 92 | call("bash -c 'shopt -s failglob; git add -f *.pkey'", check=False, stderr=subprocess.DEVNULL, cwd=path) 93 | call("git add configure", check=False, stderr=subprocess.DEVNULL, cwd=path) 94 | call("git add configure32", check=False, stderr=subprocess.DEVNULL, cwd=path) 95 | call("git add configure64", check=False, stderr=subprocess.DEVNULL, cwd=path) 96 | call("git add configure_avx2", check=False, stderr=subprocess.DEVNULL, cwd=path) 97 | call("git add configure_avx512", check=False, stderr=subprocess.DEVNULL, cwd=path) 98 | call("git add make_check_command", check=False, stderr=subprocess.DEVNULL, cwd=path) 99 | call("bash -c 'shopt -s failglob; git add *.patch'", check=False, stderr=subprocess.DEVNULL, cwd=path) 100 | call("bash -c 'shopt -s failglob; git add *.nopatch'", check=False, stderr=subprocess.DEVNULL, cwd=path) 101 | for item in config.transforms.values(): 102 | call("git add {}".format(item), check=False, stderr=subprocess.DEVNULL, cwd=path) 103 | call("git add release", cwd=path) 104 | call("git add symbols", check=False, stderr=subprocess.DEVNULL, cwd=path) 105 | call("git add symbols32", check=False, stderr=subprocess.DEVNULL, cwd=path) 106 | call("git add used_libs", check=False, stderr=subprocess.DEVNULL, cwd=path) 107 | call("git add used_libs32", check=False, stderr=subprocess.DEVNULL, cwd=path) 108 | call("git add testresults", check=False, stderr=subprocess.DEVNULL, cwd=path) 109 | call("git add profile_payload", check=False, stderr=subprocess.DEVNULL, cwd=path) 110 | call("git add options.conf", check=False, stderr=subprocess.DEVNULL, cwd=path) 111 | call("git add configure_misses", check=False, stderr=subprocess.DEVNULL, cwd=path) 112 | call("git add etc_files", check=False, stderr=subprocess.DEVNULL, cwd=path) 113 | call("git add whatrequires", check=False, stderr=subprocess.DEVNULL, cwd=path) 114 | call("git add description", check=False, stderr=subprocess.DEVNULL, cwd=path) 115 | call("git add attrs", check=False, stderr=subprocess.DEVNULL, cwd=path) 116 | 117 | # remove deprecated config files 118 | call("git rm make_install_append", check=False, stderr=subprocess.DEVNULL, cwd=path) 119 | call("git rm prep_append", check=False, stderr=subprocess.DEVNULL, cwd=path) 120 | call("git rm use_clang", check=False, stderr=subprocess.DEVNULL, cwd=path) 121 | call("git rm use_lto", check=False, stderr=subprocess.DEVNULL, cwd=path) 122 | call("git rm use_avx2", check=False, stderr=subprocess.DEVNULL, cwd=path) 123 | call("git rm fast-math", check=False, stderr=subprocess.DEVNULL, cwd=path) 124 | call("git rm broken_c++", check=False, stderr=subprocess.DEVNULL, cwd=path) 125 | call("git rm skip_test_suite", check=False, stderr=subprocess.DEVNULL, cwd=path) 126 | call("git rm optimize_size", check=False, stderr=subprocess.DEVNULL, cwd=path) 127 | call("git rm asneeded", check=False, stderr=subprocess.DEVNULL, cwd=path) 128 | call("git rm broken_parallel_build", check=False, stderr=subprocess.DEVNULL, cwd=path) 129 | call("git rm pgo", check=False, stderr=subprocess.DEVNULL, cwd=path) 130 | call("git rm unit_tests_must_pass", check=False, stderr=subprocess.DEVNULL, cwd=path) 131 | call("git rm funroll-loops", check=False, stderr=subprocess.DEVNULL, cwd=path) 132 | call("git rm keepstatic", check=False, stderr=subprocess.DEVNULL, cwd=path) 133 | call("git rm allow_test_failures", check=False, stderr=subprocess.DEVNULL, cwd=path) 134 | call("git rm no_autostart", check=False, stderr=subprocess.DEVNULL, cwd=path) 135 | call("git rm insecure_build", check=False, stderr=subprocess.DEVNULL, cwd=path) 136 | call("git rm conservative_flags", check=False, stderr=subprocess.DEVNULL, cwd=path) 137 | 138 | # add a gitignore 139 | ignorelist = [ 140 | ".*~", 141 | "*~", 142 | "*.info", 143 | "*.mod", 144 | "*.swp", 145 | ".repo-index", 146 | "*.log", 147 | "build.log.round*", 148 | "*.tar.*", 149 | "*.tgz", 150 | "!*.tar.*.*", 151 | "*.zip", 152 | "*.jar", 153 | "*.pom", 154 | "*.xml", 155 | "commitmsg", 156 | "results/", 157 | "rpms/", 158 | "for-review.txt", 159 | "" 160 | ] 161 | write_out(os.path.join(path, '.gitignore'), '\n'.join(ignorelist)) 162 | call("git add .gitignore", check=False, stderr=subprocess.DEVNULL, cwd=path) 163 | 164 | if success == 0: 165 | return 166 | 167 | call("git commit -a -F commitmsg ", cwd=path) 168 | call("rm commitmsg", cwd=path) 169 | -------------------------------------------------------------------------------- /tests/test_tarball.py: -------------------------------------------------------------------------------- 1 | import copy 2 | import os 3 | import unittest 4 | from collections import OrderedDict 5 | from unittest.mock import MagicMock, Mock, patch 6 | import build 7 | import config 8 | import files 9 | import tarball 10 | 11 | 12 | # Stores all test cases for dynamic tests. 13 | # In order to add more tests just add more elements to the lists provided below. 14 | 15 | CONTENT_PECL = [ 16 | 'package.xml', 17 | 'common-prefix/', 18 | 'common-prefix/md5/', 19 | 'common-prefix/md5/CMakeLists.txt', 20 | 'common-prefix/md5/md5.h', 21 | 'common-prefix/md5/md5hl.c', 22 | 'common-prefix/md5/md5cmp.c', 23 | 'common-prefix/md5/md5.c', 24 | 'common-prefix/md5/Makefile.am', 25 | 'common-prefix/md5/Makefile.in', 26 | 'common-prefix/jerror.c', 27 | 'common-prefix/sharedlib/', 28 | 'common-prefix/sharedlib/CMakeLists.txt', 29 | 'common-prefix/turbojpeg-mapfile', 30 | 'common-prefix/jdpostct.c', 31 | 'common-prefix/turbojpeg-jni.c', 32 | ] 33 | 34 | CONTENT_PREFIX = [ 35 | 'common-prefix/', 36 | 'common-prefix/md5/', 37 | 'common-prefix/md5/CMakeLists.txt', 38 | 'common-prefix/md5/md5.h', 39 | 'common-prefix/md5/md5hl.c', 40 | 'common-prefix/md5/md5cmp.c', 41 | 'common-prefix/md5/md5.c', 42 | 'common-prefix/md5/Makefile.am', 43 | 'common-prefix/md5/Makefile.in', 44 | 'common-prefix/jerror.c', 45 | 'common-prefix/sharedlib/', 46 | 'common-prefix/sharedlib/CMakeLists.txt', 47 | 'common-prefix/turbojpeg-mapfile', 48 | 'common-prefix/jdpostct.c', 49 | 'common-prefix/turbojpeg-jni.c', 50 | ] 51 | 52 | CONTENT_SUBDIR = [ 53 | 'dir1/', 54 | 'dir1/md5/', 55 | 'dir1/md5/CMakeLists.txt', 56 | 'dir1/md5/md5.h', 57 | 'dir1/md5/md5hl.c', 58 | 'dir1/md5/md5cmp.c', 59 | 'dir1/md5/md5.c', 60 | 'dir1/md5/Makefile.am', 61 | 'dir1/md5/Makefile.in', 62 | 'dir2/', 63 | 'dir2/jerror.c', 64 | 'dir2/sharedlib/', 65 | 'dir2/sharedlib/CMakeLists.txt', 66 | 'dir2/turbojpeg-mapfile', 67 | 'dir2/jdpostct.c', 68 | 'dir2/turbojpeg-jni.c', 69 | 'file.c' 70 | ] 71 | 72 | # Input for tarball.Source class tests. 73 | # Structure: (url, destination, path, fake-content, source_type, prefix, subddir) 74 | SRC_CREATION = [ 75 | ("https://example/src-PECL.tar", "", "/tmp/src-PECL.tar", CONTENT_PECL, "tar", "common-prefix", None), 76 | ("https://example/src-non-PECL.tar", "", "/tmp/src-non-PECL.tar", CONTENT_PECL, "tar", "", "src-non-PECL"), 77 | ("https://example/src-prefix.zip", "", "/tmp/src-prefix.zip", CONTENT_PREFIX, "zip", "common-prefix", None), 78 | ("https://example/src-subdir.zip", "", "/tmp/src-subdir.zip", CONTENT_SUBDIR, "zip", "", "src-subdir"), 79 | ("https://example/src-prefix.tar", "", "/tmp/src-prefix.tar", CONTENT_PREFIX, "tar", "common-prefix", None), 80 | ("https://example/src-subdir.tar", "", "/tmp/src-subdir.tar", CONTENT_SUBDIR, "tar", "", "src-subdir"), 81 | ("https://example/src-no-extractable.tar", ":", "/tmp/src-no-extractable.tar", None, None, None, None), 82 | ] 83 | 84 | 85 | class MockSrcFile(): 86 | """Mock class for zipfile and tarfile.""" 87 | 88 | def __init__(self, path, mode): 89 | self.name = path 90 | self.mode = mode 91 | 92 | def __enter__(self): 93 | return self 94 | 95 | def __exit__(self, exc_type, exc_val, traceback): 96 | return False 97 | 98 | @classmethod 99 | def set_content(cls, content): 100 | # deep copy because the content is modified by Source 101 | cls.content = copy.deepcopy(content) 102 | 103 | def getnames(self): 104 | return self.content 105 | 106 | def namelist(self): 107 | return self.content 108 | 109 | 110 | def source_test_generator(url, destination, path, content, src_type, prefix, subdir): 111 | """Create test for tarball.Source class using generator template.""" 112 | 113 | @patch('tarball.tarfile.open', MockSrcFile) 114 | @patch('tarball.zipfile.ZipFile', MockSrcFile) 115 | @patch('tarball.tarfile.is_tarfile', Mock(return_value=True)) 116 | @patch('tarball.zipfile.is_zipfile', Mock(return_value=True)) 117 | def generator(self): 118 | """Test template.""" 119 | # Set fake content 120 | MockSrcFile.set_content(content) 121 | if os.path.basename(path) in ['src-PECL.tar']: 122 | src = tarball.Source(url, destination, path, 'phpize') 123 | else: 124 | src = tarball.Source(url, destination, path) 125 | self.assertEqual(src.type, src_type) 126 | self.assertEqual(src.prefix, prefix, f"fail for: {url}") 127 | self.assertEqual(src.subdir, subdir) 128 | 129 | return generator 130 | 131 | 132 | def name_and_version_test_generator(url, name, version): 133 | """Create test for tarball.name_and_version method.""" 134 | def generator(self): 135 | """Test template.""" 136 | conf = config.Config('/download/path') 137 | conf.parse_config_versions = Mock(return_value={}) 138 | # Test four different name/version states for tarball.Content, each in 139 | # a subtest. Test failures will print these state numbers for easy 140 | # identification: 141 | # 0 - no state 142 | # 1 - name only 143 | # 2 - version only 144 | # 3 - name and version 145 | for state in range(4): 146 | with self.subTest(state=state): 147 | name_arg = "" 148 | version_arg = "" 149 | if state == 1 or state == 3: 150 | name_arg = f"state.{name}" 151 | if state == 2 or state == 3: 152 | version_arg = f"state.{version}" 153 | content = tarball.Content(url, name_arg, version_arg, [], conf, '/tmp') 154 | content.config = conf 155 | pkg = build.Build() 156 | mgr = files.FileManager(conf, pkg) 157 | content.name_and_version(mgr) 158 | name_cmp = name 159 | version_cmp = version 160 | if state == 1 or state == 3: 161 | name_cmp = name_arg 162 | if state == 2 or state == 3: 163 | version_cmp = version_arg 164 | self.assertEqual(name_cmp, content.name) 165 | self.assertEqual(version_cmp, content.version) 166 | # redo without args and verify giturl is set correctly 167 | content.name = "" 168 | content.version = "" 169 | content.name_and_version(Mock()) 170 | if "github.com" in url: 171 | self.assertRegex(content.giturl, r"https://github.com/[^/]+/" + content.repo + ".git") 172 | 173 | return generator 174 | 175 | 176 | def create_dynamic_tests(): 177 | """Create dynamic tests based on content in lists and packageulrs file.""" 178 | # Create tests for tarball.Source class. 179 | for url, dest, path, content, src_type, prefix, subdir in SRC_CREATION: 180 | test_name = 'test_src_{}'.format(url) 181 | test = source_test_generator(url, dest, path, content, src_type, prefix, subdir) 182 | setattr(TestTarball, test_name, test) 183 | 184 | # Create tests for tarball.name_and_version method. 185 | with open('tests/packageurls', 'r') as pkgurls: 186 | for urlline in pkgurls.read().split('\n'): 187 | if not urlline or urlline.startswith('#'): 188 | continue 189 | (url, name, version) = urlline.split(',') 190 | test_name = 'test_name_ver_{}'.format(url) 191 | test = name_and_version_test_generator(url, name, version) 192 | setattr(TestTarball, test_name, test) 193 | 194 | 195 | class TestTarball(unittest.TestCase): 196 | """Main testing class for tarball.py. 197 | 198 | This class would contain all static tests and dynamic tests for tarball.py 199 | """ 200 | 201 | def setUp(self): 202 | """Set up default values before start test.""" 203 | # Set strenght to 0 so it can be updated during tests 204 | conf = config.Config('/download/path') 205 | self.content = tarball.Content('', '', '', [], conf, '/tmp') 206 | conf.content = self.content 207 | 208 | @patch('tarball.os.path.isfile', Mock(return_value=True)) 209 | def test_set_gcov(self): 210 | """Test for tarball.set_gcov method.""" 211 | # Set up input values 212 | self.content.name = 'test' 213 | self.content.set_gcov() 214 | self.assertEqual(self.content.gcov_file, 'test.gcov') 215 | 216 | @patch('tarball.Source.set_prefix', Mock()) 217 | @patch('tarball.Source.extract', Mock()) 218 | def test_extract_sources(self): 219 | """Test for Content extract_sources method.""" 220 | # Set up input values 221 | main_src = tarball.Source('https://example1.tar', '', '/tmp') 222 | arch1_src = tarball.Source('https://example2.tar', '', '/tmp') 223 | arch2_src = tarball.Source('https://example3.tar', ':', '/tmp') 224 | arch3_src = tarball.Source('https://example4.tar', '', '/tmp') 225 | archives_src = [arch1_src, arch2_src, arch3_src] 226 | self.content.extract_sources(main_src, archives_src) 227 | # Sources with destination=':' should not be extracted, so method 228 | # should be called only 3 times. 229 | self.assertEqual(tarball.Source.extract.call_count, 3) 230 | 231 | 232 | # Create dynamic tests based on config file 233 | create_dynamic_tests() 234 | 235 | if __name__ == '__main__': 236 | unittest.main(buffer=True) 237 | -------------------------------------------------------------------------------- /autospec/specdescription.py: -------------------------------------------------------------------------------- 1 | #!/bin/true 2 | # 3 | # specdescription.py - part of autospec 4 | # Copyright (C) 2015 Intel Corporation 5 | # 6 | # This program is free software: you can redistribute it and/or modify 7 | # it under the terms of the GNU General Public License as published by 8 | # the Free Software Foundation, either version 3 of the License, or 9 | # (at your option) any later version. 10 | # 11 | # This program is distributed in the hope that it will be useful, 12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | # GNU General Public License for more details. 15 | # 16 | # You should have received a copy of the GNU General Public License 17 | # along with this program. If not, see . 18 | # 19 | # objective 20 | # 21 | # create the following parts of the spec file 22 | # 23 | # %description 24 | # Summary 25 | # Group 26 | # %description 27 | # 28 | 29 | import os 30 | import re 31 | 32 | import license 33 | import util 34 | 35 | default_description = "No detailed description available" 36 | default_description_score = 0 37 | default_summary = "No detailed summary available" 38 | default_summary_score = 0 39 | 40 | 41 | def clean_license_string(lic): 42 | """Clean up license string by replacing substrings.""" 43 | if lic.find("same as") >= 0: 44 | return "" 45 | 46 | reps = [(" (", "("), 47 | (" v2", "-2"), 48 | (" v3", "-3"), 49 | (" 2", "-2"), 50 | (" 3", "-3"), 51 | (" <", "<"), 52 | (" >", ">"), 53 | ("= ", "="), 54 | ("GPL(>=-2)", "GPL-2.0+"), 55 | ("Modified", ""), 56 | ("OSI", ""), 57 | ("Approved", ""), 58 | ("Simplified", ""), 59 | ("file", ""), 60 | ("LICENSE", "")] 61 | 62 | for sub, rep in reps: 63 | lic = lic.replace(sub, rep) 64 | 65 | return lic 66 | 67 | 68 | def assign_summary(summary, score): 69 | """Assign summary to default_summary if score is greater than default_summary_score.""" 70 | global default_summary 71 | global default_summary_score 72 | if score > default_summary_score: 73 | default_summary = summary 74 | default_summary_score = score 75 | 76 | 77 | def assign_description(description, score): 78 | """Assign description to default_description if score is greater than default_description_score.""" 79 | global default_description 80 | global default_description_score 81 | if score > default_description_score: 82 | default_description = description 83 | default_description_score = score 84 | 85 | 86 | def description_from_spec(specfile, translations, blacklist): 87 | """Parse any existing RPM specfiles.""" 88 | try: 89 | with util.open_auto(specfile, 'r') as specfd: 90 | lines = specfd.readlines() 91 | except FileNotFoundError: 92 | return 93 | 94 | specdesc = "" 95 | section = False 96 | for line in lines: 97 | if line.startswith("#"): 98 | continue 99 | 100 | if line.startswith("%"): 101 | section = False 102 | 103 | excludes = ["Copyright", "see ", "("] 104 | if line.startswith("License:") and not any(e in line for e in excludes): 105 | splits = line.split(":")[1:] 106 | words = ":".join(splits).strip() 107 | if words in translations: 108 | if license.add_license(words, translations, blacklist): 109 | print("Added license from spec:", words) 110 | else: 111 | words = clean_license_string(words).split() 112 | for word in words: 113 | if ":" not in word and not word.startswith("@"): 114 | if license.add_license(word, translations, blacklist): 115 | print("Added license from spec:", word) 116 | 117 | if line.startswith("Summary: "): 118 | assign_summary(line[9:], 4) 119 | 120 | specdesc += line if section else "" 121 | # Check for %description after assigning the line to specdesc so the 122 | # %description string is not included 123 | if line.endswith("%description\n"): 124 | section = True 125 | 126 | if len(specdesc) > 10: 127 | assign_description(specdesc, 4) 128 | 129 | 130 | def description_from_pkginfo(pkginfo, translations, blacklist): 131 | """Parse existing package info files.""" 132 | try: 133 | with util.open_auto(pkginfo, 'r') as pkgfd: 134 | lines = pkgfd.readlines() 135 | except FileNotFoundError: 136 | return 137 | 138 | pkginfo = "" 139 | section = False 140 | for line in lines: 141 | if ":" in line and section: 142 | section = False 143 | 144 | excludes = ["Copyright", "see "] 145 | if line.lower().startswith("license:") and not any(e in line for e in excludes): 146 | splits = line.split(":")[1:] 147 | words = ":".join(splits).strip() 148 | if words in translations: 149 | if license.add_license(words, translations, blacklist): 150 | print("Added license from PKG-INFO:", words) 151 | else: 152 | words = clean_license_string(words).split() 153 | for word in words: 154 | if ":" not in word: 155 | if license.add_license(word, translations, blacklist): 156 | print("Added license from PKG-INFO:", word) 157 | 158 | for sub in ["Summary: ", "abstract: "]: 159 | if line.startswith(sub): 160 | assign_summary(line[len(sub):].strip(), 4) 161 | 162 | pkginfo += line if section else "" 163 | if line.startswith("Description:"): 164 | section = True 165 | 166 | if len(pkginfo) > 10: 167 | assign_description(pkginfo, 4) 168 | 169 | 170 | def summary_from_pkgconfig(pkgfile, package): 171 | """Parse pkgconfig files for Description: lines.""" 172 | try: 173 | with util.open_auto(pkgfile, "r") as pkgfd: 174 | lines = pkgfd.readlines() 175 | except FileNotFoundError: 176 | return 177 | 178 | score = 3 if package + ".pc" in pkgfile else 2 179 | for line in lines: 180 | if line.startswith("Description:"): 181 | assign_summary(line[13:], score) 182 | # Score will not increase, stop trying 183 | break 184 | 185 | 186 | def summary_from_R(pkgfile): 187 | """Parse DESCRIPTION file for Title: lines.""" 188 | try: 189 | with util.open_auto(pkgfile, "r") as pkgfd: 190 | lines = pkgfd.readlines() 191 | except FileNotFoundError: 192 | return 193 | 194 | for line in lines: 195 | if line.startswith("Title:"): 196 | assign_summary(line[7:], 3) 197 | # Score will not increase, stop trying 198 | break 199 | 200 | 201 | def skipline(line): 202 | """Skip boilerplate readme lines.""" 203 | if line.endswith("introduction"): 204 | return True 205 | 206 | skips = ["Copyright", 207 | "Free Software Foundation, Inc.", 208 | "Copying and distribution of", 209 | "are permitted in any", 210 | "notice and this notice", 211 | "README", 212 | "-*-"] 213 | return any(s in line for s in skips) 214 | 215 | 216 | def description_from_readme(readmefile): 217 | """Try to pick the first paragraph or two from the readme file.""" 218 | try: 219 | with util.open_auto(readmefile, "r") as readmefd: 220 | lines = readmefd.readlines() 221 | except FileNotFoundError: 222 | return 223 | 224 | section = False 225 | desc = "" 226 | for line in lines: 227 | if section and len(line) < 2 and len(desc) > 80: 228 | # If we are in a section and encounter a new line, break as long as 229 | # we already have a description > 80 characters. 230 | break 231 | if not section and len(line) > 2: 232 | # Found the first paragraph hopefully 233 | section = True 234 | if section: 235 | # Copy all non-empty lines into the description 236 | if skipline(line) == 0 and len(line) > 2: 237 | desc = desc + line.strip() + "\n" 238 | 239 | score = 1.5 if readmefile.lower().endswith("readme") else 1 240 | assign_description(desc, score) 241 | 242 | 243 | def scan_for_description(package, dirn, translations, blacklist): 244 | """Scan the project directory for things we can use to guess a description and summary.""" 245 | test_pat = re.compile(r"tests?") 246 | dirpath_seen = "" 247 | for dirpath, dirnames, files in os.walk(dirn): 248 | if dirpath_seen != dirpath: 249 | dirpath_seen = dirpath 250 | dirnames[:] = [d for d in dirnames if not re.match(test_pat, d)] 251 | for name in files: 252 | if name.lower().endswith(".pdf"): 253 | continue 254 | if name.lower().endswith(".spec"): 255 | description_from_spec(os.path.join(dirpath, name), translations, blacklist) 256 | if name.lower().endswith("pkg-info"): 257 | description_from_pkginfo(os.path.join(dirpath, name), translations, blacklist) 258 | if name.lower().endswith("meta.yml"): 259 | description_from_pkginfo(os.path.join(dirpath, name), translations, blacklist) 260 | if name.lower().endswith("description"): 261 | description_from_pkginfo(os.path.join(dirpath, name), translations, blacklist) 262 | if name.lower().endswith(".pc"): 263 | summary_from_pkgconfig(os.path.join(dirpath, name), package) 264 | if name.startswith("DESCRIPTION"): 265 | summary_from_R(os.path.join(dirpath, name)) 266 | if name.lower().endswith(".pc.in"): 267 | summary_from_pkgconfig(os.path.join(dirpath, name), package) 268 | if name.lower().startswith("readme"): 269 | description_from_readme(os.path.join(dirpath, name)) 270 | 271 | print("Summary :", default_summary.strip()) 272 | 273 | 274 | def load_specfile(specfile, description, summary): 275 | """Load specfile with parse results.""" 276 | if description: 277 | specfile.default_desc = "\n".join(description) 278 | else: 279 | specfile.default_desc = default_description 280 | if summary: 281 | specfile.default_sum = summary[0] 282 | else: 283 | specfile.default_sum = default_summary 284 | -------------------------------------------------------------------------------- /autospec/util.py: -------------------------------------------------------------------------------- 1 | #!/bin/true 2 | # 3 | # util.py - part of autospec 4 | # Copyright (C) 2015 Intel Corporation 5 | # 6 | # This program is free software: you can redistribute it and/or modify 7 | # it under the terms of the GNU General Public License as published by 8 | # the Free Software Foundation, either version 3 of the License, or 9 | # (at your option) any later version. 10 | # 11 | # This program is distributed in the hope that it will be useful, 12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | # GNU General Public License for more details. 15 | # 16 | # You should have received a copy of the GNU General Public License 17 | # along with this program. If not, see . 18 | # 19 | 20 | import hashlib 21 | import os 22 | import re 23 | import shlex 24 | import subprocess 25 | import sys 26 | 27 | dictionary_filename = os.path.dirname(__file__) + "/translate.dic" 28 | dictionary = [line.strip() for line in open(dictionary_filename, 'r')] 29 | os_paths = None 30 | ERROR_FILE = 'pumpAutospec' 31 | ERROR_ENV = 'AUTOSPEC_UPDATE' 32 | 33 | 34 | def _log_error(error): 35 | write_out(ERROR_FILE, f"{error}\n", mode='a') 36 | 37 | 38 | def _commit_result(): 39 | if not os.path.isfile(ERROR_FILE): 40 | return 41 | call(f"git add {ERROR_FILE}", check=False, stderr=subprocess.DEVNULL) 42 | call(f"git commit {ERROR_FILE} -m 'Notes update'", check=False, stderr=subprocess.DEVNULL) 43 | call("git push", check=False, stderr=subprocess.DEVNULL) 44 | 45 | 46 | def _process_line(line, prev_line, current_patch, reported_patches, error): 47 | if m := re.match('^Patch #[0-9]+ .(?P.*).:', line): 48 | current_patch[0] = m.group('patch') 49 | 50 | if m := re.match('Hunk #[0-9]+ FAILED at [0-9]+', line): 51 | if current_patch[0] not in reported_patches: 52 | _log_error("Patch " + current_patch[0] + " does not apply") 53 | reported_patches[current_patch[0]] = True 54 | return True 55 | 56 | if m := re.match(".*can't find file to patch at input line ", line): 57 | if current_patch[0] not in reported_patches: 58 | _log_error("Patch " + current_patch[0] + " does not apply") 59 | reported_patches[current_patch[0]] = True 60 | return True 61 | 62 | if m := re.match('.*meson.build:[0-9]+:[0-9]+: ERROR: Unknown options: "(?P