├── .gitignore ├── administrative ├── requirements.txt ├── README.md ├── annual-ompi-github-committer-review.py └── wait-for-pr-ci-completion.py ├── jenkins ├── requirements-checkers.txt ├── whitespace-checker.sh ├── open-mpi.dist.create-tarball.build-manpages.sh ├── build-amis.sh ├── commit-email-checker.py ├── signed-off-by-checker.py ├── open-mpi.dist.create-tarball.build-srpm.sh ├── checker.py ├── open-mpi.pr.command-runner.groovy ├── open-mpi.dist.create-tarball.build-tarball.sh ├── packer.json ├── open-mpi-autotools-build.sh ├── open-mpi.dist.create-tarball.groovy ├── customize-ami.sh └── open-mpi-build-script.sh ├── nightly-tarball ├── run-with-autotools.sh ├── prrte-nightly-tarball ├── BuildFiler.py ├── pmix-nightly-tarball ├── BuilderUtils.py ├── openmpi-nightly-tarball ├── OMPIBuilder.py ├── hwloc-nightly-tarball ├── Coverity.py ├── MockBuildFiler.py ├── S3BuildFiler.py └── Builder.py ├── README.md ├── COPYING ├── migration ├── nightly-tarball-sync.py └── build-staged-tarball-migration.py └── dist ├── upload-release-to-s3.py └── uploadutils.py /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | -------------------------------------------------------------------------------- /administrative/requirements.txt: -------------------------------------------------------------------------------- 1 | PyGithub 2 | -------------------------------------------------------------------------------- /jenkins/requirements-checkers.txt: -------------------------------------------------------------------------------- 1 | gitpython 2 | -------------------------------------------------------------------------------- /jenkins/whitespace-checker.sh: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env bash 2 | # 3 | # Copyright (c) 2019 Hewlett Packard Enterprise. All Rights Reserved. 4 | # 5 | # Additional copyrights may follow 6 | # 7 | # Check for white space violation in a given commit range. 8 | # Run on a PR to check whether it is introducing bad white space. 9 | # 10 | 11 | context=3 12 | config_file=.whitespace-checker-config.txt 13 | if [[ -r $config_file ]]; then 14 | exclude_dirs=`cat $config_file` 15 | else 16 | exclude_dirs='((opal/mca/hwloc/hwloc.*/hwloc)|/(libevent|pmix4x|treematch|romio))/' 17 | fi 18 | 19 | foundTab=0 20 | for file in $(git diff -l0 --name-only $1 $2 | grep -vE "($exclude_dirs)" | grep -E "(\.c|\.h)$") 21 | do 22 | git diff $1 $2 -- $file | grep -C $context -E "^\+.* +" 23 | if [[ $? -eq 0 ]] 24 | then 25 | foundTab=1 26 | fi 27 | done 28 | 29 | if [[ $foundTab -ne 0 ]] 30 | then 31 | exit 1 32 | fi 33 | -------------------------------------------------------------------------------- /nightly-tarball/run-with-autotools.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # Copyright (c) 2017 Amazon.com, Inc. or its affiliates. All Rights 4 | # Reserved. 5 | # 6 | # Additional copyrights may follow 7 | # 8 | # Wrapper to start scripts under the right modules environment. It's 9 | # hard to make modules do something rational from Python, so use this 10 | # wrapper to provide the missing functionality. 11 | # 12 | 13 | if test "$#" -lt 2; then 14 | echo "usage: ./run-with-autotools.sh [options]" 15 | exit 1 16 | fi 17 | 18 | module_name="$1" 19 | shift 20 | program_name="$1" 21 | shift 22 | arguments=("$@") 23 | 24 | if ! type -t module > /dev/null 2>&1 ; then 25 | if test "$MODULESHOME" = ""; then 26 | if test -d ${HOME}/local/modules; then 27 | export MODULESHOME=${HOME}/local/modules 28 | else 29 | echo "Can't find \$MODULESHOME. Aborting." 30 | exit 1 31 | fi 32 | fi 33 | . ${MODULESHOME}/init/bash 34 | fi 35 | 36 | module unload autotools 37 | module load $module_name 38 | 39 | $program_name "${arguments[*]}" 40 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # General scripts used by the Open MPI infrastructure. 2 | 3 | A place for all the various cross-project scripts that used to live in 4 | the main ompi/ tree or, worse, in every source tree, each with its own 5 | bugs. Initially, this is mainly nightly build scripts. 6 | 7 | ## nightly-tarball/ 8 | 9 | Scripts for building the nightly tarballs for ompi, pmix, and hwloc. 10 | The scripts are designed to push to S3 (fronted by the CloudFront 11 | download.open-mpi.org URL), but can also scp to the web tree for 12 | www.open-mpi.org until the web bits are updated. 13 | 14 | ## migration/ 15 | 16 | Scripts for migrating bits of Open MPI's infrastructure from IU or 17 | HostGator to AWS. 18 | 19 | ## jenkins/ 20 | 21 | Scripts used for running the community Jenkins server 22 | (https://jenkins.open-mpi.org/). 23 | 24 | ## dist/ 25 | 26 | Scripts used to build releases, upload to s3, etc. The jenkins and 27 | nightly tarball scripts call these, in addition to direct human use. 28 | 29 | ## administrative/ 30 | 31 | Scripts used for administrative purposes. For example, a script to 32 | assist with the annual Github 'open-mpi' organization committer 33 | review. 34 | -------------------------------------------------------------------------------- /jenkins/open-mpi.dist.create-tarball.build-manpages.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # Build man pages for Open MPI. 4 | # 5 | # usage build-manpages.sh 6 | # 7 | # Expected filesystem layout: 8 | # ${WORKSPACE}/ompi-scripts/ ompi-scripts checkout 9 | # ${WORKSPACE}/ompi/ ompi checkout @ target REF 10 | # ${WORKSPACE}/dist-files/ output of build 11 | 12 | set -e 13 | 14 | build_prefix=$1 15 | tarball=$2 16 | branch=$3 17 | 18 | echo "build_prefix: ${build_prefix}" 19 | echo "tarball: ${tarball}" 20 | echo "branch: ${branch}" 21 | 22 | aws s3 cp "${build_prefix}/${tarball}" "${WORKSPACE}/dist-files/${tarball}" 23 | tar xf ${WORKSPACE}/dist-files/${tarball} 24 | directory=`echo ${tarball} | sed -e 's/\(.*\)\.tar\..*/\1/'` 25 | cd ${directory} 26 | ../ompi/contrib/dist/make-html-man-pages.pl 27 | mkdir ${WORKSPACE}/dist-files/doc 28 | cp -rp man-page-generator/php ${WORKSPACE}/dist-files/doc/${branch} 29 | 30 | cd ${WORKSPACE}/dist-files 31 | docname="${directory}-doc.tar.gz" 32 | tar czf ${docname} doc/ 33 | aws s3 cp ${docname} s3://open-mpi-scratch/scratch/open-mpi-doc/${docname} 34 | 35 | echo "https://download.open-mpi.org/scratch/open-mpi-doc/${docname}" > ${WORKSPACE}/manpage-build-artifacts.txt 36 | -------------------------------------------------------------------------------- /jenkins/build-amis.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # 3 | # Copyright (c) 2017 Amazon.com, Inc. or its affiliates. All Rights 4 | # Reserved. 5 | # 6 | # Additional copyrights may follow 7 | # 8 | # Build a new set of AMIs for Jenkins using Packer. This script 9 | # requires packer be installed, along with the packer.json and 10 | # customize-ami.sh scripts in this directory. 11 | # 12 | # Run this script from your laptop using an IAM role for the 13 | # ompi-aws-prod account with EC2 priviledges or from aws.open-mpi.org 14 | # using the instance's role. 15 | # 16 | 17 | OPTIND=1 18 | packer_opts="" 19 | 20 | while getopts "h?a:l" opt; do 21 | case "$opt" in 22 | h|\?) 23 | echo "usage: build-ami.sh [-a ]" 24 | echo " -a Only build amis in ami list (comma separated)" 25 | echo " -l List ami names available for building" 26 | exit 1 27 | ;; 28 | a) 29 | packer_opts="--only ${OPTARG}" 30 | ;; 31 | l) 32 | ami_list=`sed -n -e 's/.*\"name\".*\"\(.*\)\".*/\1/p' packer.json | xargs` 33 | echo "Available amis: ${ami_list}" 34 | exit 0 35 | ;; 36 | esac 37 | done 38 | 39 | export BUILD_DATE=`date +%Y%m%d%H%M` 40 | export AWS_IAM_ROLE="jenkins-worker" 41 | 42 | packer build ${packer_opts} packer.json 43 | -------------------------------------------------------------------------------- /jenkins/commit-email-checker.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import logging 4 | 5 | from git import Repo 6 | 7 | # Helper module for out git commit checkers 8 | import checker 9 | 10 | #-------------------------------------- 11 | 12 | result_messages = { 13 | 'all good' : { 'message' : 'All commits have good email addresses. Yay!', 14 | 'status' : 0, }, 15 | 'some good' : { 'message' : 'Some commits have bad email addresses', 16 | 'status' : 1, }, 17 | 'none good' : { 'message' : 'No commits have good email addresses', 18 | 'status' : 1, }, 19 | } 20 | 21 | #-------------------------------------- 22 | 23 | def _email_address_checker(commit, results): 24 | for type, email in (('author', commit.author.email), 25 | ('committer', commit.committer.email)): 26 | # Check for typical bad email addresses 27 | if ('root@' in email or 28 | 'localhost' in email or 29 | 'localdomain' in email): 30 | logging.error("Commit %s has an unspecific %s email address: %s" % (commit.hexsha, type, email)) 31 | results['bad'] += 1 32 | 33 | else: 34 | logging.info("Commit %s has a good %s email address: %s" % (commit.hexsha, type, email)) 35 | results['good'] += 1 36 | 37 | #-------------------------------------- 38 | # Call the main engine 39 | checker.run('Commit email address checker', _email_address_checker, 40 | result_messages) 41 | -------------------------------------------------------------------------------- /jenkins/signed-off-by-checker.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import re 4 | import logging 5 | 6 | from git import Repo 7 | 8 | # Helper module for out git commit checkers 9 | import checker 10 | 11 | #-------------------------------------- 12 | 13 | result_messages = { 14 | 'all good' : { 'message' : 'All commits signed off. Yay!', 15 | 'status' : 0, }, 16 | 'some good' : { 'message' : 'Some commits not signed off', 17 | 'status' : 1, }, 18 | 'none good' : { 'message' : 'No commits are signed off', 19 | 'status' : 1, }, 20 | } 21 | 22 | _prog = re.compile("^Signed-off-by: (.+?) <(.+)>$", 23 | flags=re.MULTILINE) 24 | 25 | #-------------------------------------- 26 | 27 | def _signed_off_by_checker(commit, results): 28 | # Ignore merge commits 29 | if len(commit.parents) > 1: 30 | logging.info("Merge commit %s skipped" % (commit.hexsha)) 31 | return 32 | 33 | match = _prog.search(commit.message) 34 | if not match: 35 | results['bad'] += 1 36 | logging.error("Commit %s not signed off" % (commit.hexsha)) 37 | 38 | else: 39 | results['good'] += 1 40 | name = match.group(1) 41 | addr = match.group(2) 42 | logging.info("Commit %s properly signed off: %s <%s>" % (commit.hexsha, name, addr)) 43 | 44 | #-------------------------------------- 45 | # Call the main engine 46 | checker.run('Signed-off-by checker', _signed_off_by_checker, 47 | result_messages) 48 | -------------------------------------------------------------------------------- /jenkins/open-mpi.dist.create-tarball.build-srpm.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # usage: build-srpm.sh \ 4 | # 5 | # 6 | # Script to build a source RPM from an existing tarball and upload 7 | # the output to the correct S3 bucket. 8 | # 9 | # Expected filesystem layout: 10 | # ${WORKSPACE}/ompi-scripts/ ompi-scripts checkout 11 | # ${WORKSPACE}/ompi/ ompi checkout @ target REF 12 | # ${WORKSPACE}/dist-files/ output of build 13 | 14 | set -e 15 | 16 | s3_prefix="$1" 17 | tarball="$2" 18 | branch_name="$3" 19 | build_date="$4" 20 | 21 | # guess release version from tarball, same way build_tarball does 22 | release_version=`echo ${tarball} | sed -e 's/.*openmpi-\(.*\)\.tar\.\(gz\|bz2\)/\1/'` 23 | 24 | # copy tarball back locally 25 | aws s3 cp "${s3_prefix}/open-mpi/${branch_name}/${tarball}" "${WORKSPACE}/dist-files/${tarball}" 26 | 27 | cp ompi/contrib/dist/linux/openmpi.spec . 28 | ompi/contrib/dist/linux/buildrpm.sh dist-files/${tarball} 29 | rpms=`find rpmbuild/SRPMS -name "*.rpm" -print` 30 | ${WORKSPACE}/ompi-scripts/dist/upload-release-to-s3.py \ 31 | --s3-base "${s3_prefix}" --project "open-mpi" --branch "${branch_name}" \ 32 | --version "${release_version}" --date "${build_date}" --yes \ 33 | --files $rpms 34 | srpm_name=`find rpmbuild/SRPMS -name "*.rpm" -print | head -n 1` 35 | # only want the filename, not the path to the file, since upload_release_to_s3.py will remove 36 | # the directory prefixes as well. 37 | basename "$srpm_name" > ${WORKSPACE}/srpm-name.txt 38 | -------------------------------------------------------------------------------- /administrative/README.md: -------------------------------------------------------------------------------- 1 | This Python script is useful in generating a spreadsheet for the 2 | annual Open MPI committers review. 3 | 4 | It uses the Github Python API to query all the users, repos, and teams 5 | in the "open-mpi" github.com organization. 6 | 7 | It generates a CSV with users as rows and repos as columns. 8 | 9 | A bunch of information is included for each user in an attempt to help 10 | identify each user. 11 | 12 | For each user, if that user has commit access to a given repo, the 13 | github team name that gives them commit access to that repo is listed 14 | in the corresponding cell. 15 | 16 | The rationale is that if you need to remove someone's access to a 17 | repo, that tells you exactly which Github team(s) to remove the user 18 | from in order to remove their commit access to that repo. 19 | 20 | To run this script: 21 | 22 | - You need to have "python3" in your path. 23 | - "pip3 install pygithub" 24 | - Go to https://github.com/settings/tokens and make a personal access 25 | token with full permissions to the repo and org. 26 | - Save that token in a single-line text file named 'oauth-token.txt' 27 | in the same directory as this script (I didn't make the script 28 | feature-full, e.g., to accept a CLI arg telling where the token file 29 | is -- sorry). 30 | 31 | Running the script will generate "permissions.csv". 32 | 33 | I suggest uploading this CSV to Google Drive (e.g., in the shared Open 34 | MPI folder) and converting it to a Google Sheet. Then you can color 35 | the cells as appropriate, resize columns, wrap text, ...etc., and then 36 | ask the community to validate all the committers. 37 | -------------------------------------------------------------------------------- /nightly-tarball/prrte-nightly-tarball: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # Copyright (c) 2017 Amazon.com, Inc. or its affiliates. All Rights 4 | # Reserved. 5 | # 6 | # Additional copyrights may follow 7 | # 8 | 9 | import OMPIBuilder 10 | import S3BuildFiler 11 | 12 | 13 | nightly_prefix='/mnt/data/nightly-tarball' 14 | config_data = { 'project_name' : 'PRRTE', 15 | 'project_short_name' : 'prrte', 16 | 'email_dest' : 'bbarrett@amazon.com', 17 | 'email_from' : 'mpiteam@aws.open-mpi.org', 18 | 'repository' : 'https://github.com/pmix/prrte.git', 19 | 'scratch_path' : nightly_prefix + '/scratch', 20 | 'failed_build_prefix' : 'failed-builds/', 21 | 'failed_build_url' : 'http://download.open-mpi.org/nightly/prrte/', 22 | 'autogen' : './autogen.pl', 23 | 'coverity' : { 'tool_dir' : nightly_prefix + '/coverity', 24 | 'tool_url' : 'https://scan.coverity.com/download/cxx/linux64', 25 | 'token_file' : nightly_prefix + '/coverity/prrte-token.txt', 26 | 'project_name' : 'open-mpi%2Fprrte', 27 | 'project_prefix' : 'prrte', 28 | 'configure_args' : '', 29 | 'make_args' : '-j 8', 30 | 'email' : 'rhc@open-mpi.org' }, 31 | 'branches' : { 'master' : { 'output_location' : 'master/', 32 | 'coverity' : False, 33 | 'max_count' : 7 }, 34 | }, 35 | } 36 | 37 | 38 | filer = S3BuildFiler.S3BuildFiler('open-mpi-nightly', 'nightly/prrte/') 39 | builder = OMPIBuilder.OMPIBuilder(config_data, filer) 40 | builder.run() 41 | -------------------------------------------------------------------------------- /COPYING: -------------------------------------------------------------------------------- 1 | Redistribution and use in source and binary forms, with or without 2 | modification, are permitted provided that the following conditions are 3 | met: 4 | 5 | - Redistributions of source code must retain the above copyright 6 | notice, this list of conditions and the following disclaimer. 7 | 8 | - Redistributions in binary form must reproduce the above copyright 9 | notice, this list of conditions and the following disclaimer listed 10 | in this license in the documentation and/or other materials 11 | provided with the distribution. 12 | 13 | - Neither the name of the copyright holders nor the names of its 14 | contributors may be used to endorse or promote products derived from 15 | this software without specific prior written permission. 16 | 17 | The copyright holders provide no reassurances that the source code 18 | provided does not infringe any patent, copyright, or any other 19 | intellectual property rights of third parties. The copyright holders 20 | disclaim any liability to any recipient for claims brought against 21 | recipient by any third party for infringement of that parties 22 | intellectual property rights. 23 | 24 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 25 | "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 26 | LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 27 | A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 28 | OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 29 | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 30 | LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 31 | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 32 | THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 33 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 34 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 35 | -------------------------------------------------------------------------------- /nightly-tarball/BuildFiler.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2017 Amazon.com, Inc. or its affiliates. All Rights 3 | # Reserved. 4 | # 5 | # Additional copyrights may follow 6 | # 7 | 8 | import logging 9 | 10 | 11 | logger = logging.getLogger('Builder.S3BuildFiler') 12 | 13 | 14 | class BuildFiler(object): 15 | """Abstraction for interacting with storage (S3, local, etc.) 16 | 17 | Base class for the BuildFiler. You probably don't want to use 18 | this implementation, as it's pretty much an abstract base class. 19 | Use the S3BuildFiler. 20 | 21 | """ 22 | 23 | def download_to_stream(self, filename): 24 | """Download to stream 25 | 26 | Gets the object at basename/filename and returns as a 27 | StreamObject, suitable for turning into a string via .read() 28 | or passing to JSON / YAML constructors. 29 | 30 | """ 31 | raise NotImplementedError 32 | 33 | 34 | def upload_from_stream(self, filename, data, properties = {}): 35 | """Upload from a stream 36 | 37 | Puts the stream information in data to an object at 38 | basename/filename. Data can be the output of json.dumps() or 39 | similar. 40 | 41 | """ 42 | raise NotImplementedError 43 | 44 | 45 | def download_to_file(self, remote_filename, local_filename): 46 | """Download to a file 47 | 48 | Download the object at basename/remote_filename to 49 | local_filename. 50 | 51 | """ 52 | raise NotImplementedError 53 | 54 | 55 | def upload_from_file(self, local_filename, remote_filename, properties = {}): 56 | """Upload a file 57 | 58 | Upload the local_file to the remote filename. 59 | """ 60 | raise NotImplementedError 61 | 62 | 63 | def delete(self, filename): 64 | """Delete file""" 65 | raise NotImplementedError 66 | 67 | 68 | def file_search(self, dirname, blob): 69 | """Search for file blob in dirname directory 70 | 71 | Search for all files in dirname matching blob. Returns a list 72 | of filenames that match the search. 73 | """ 74 | raise NotImplementedError 75 | -------------------------------------------------------------------------------- /nightly-tarball/pmix-nightly-tarball: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # Copyright (c) 2017 Amazon.com, Inc. or its affiliates. All Rights 4 | # Reserved. 5 | # 6 | # Additional copyrights may follow 7 | # 8 | 9 | import OMPIBuilder 10 | import S3BuildFiler 11 | 12 | 13 | nightly_prefix='/mnt/data/nightly-tarball' 14 | config_data = { 'project_name' : 'PMIx', 15 | 'project_short_name' : 'pmix', 16 | 'email_dest' : 'rhc@open-mpi.org', 17 | 'email_from' : 'mpiteam@aws.open-mpi.org', 18 | 'repository' : 'https://github.com/pmix/pmix.git', 19 | 'scratch_path' : nightly_prefix + '/scratch', 20 | 'failed_build_prefix' : 'failed-builds/', 21 | 'failed_build_url' : 'http://download.open-mpi.org/nightly/pmix/', 22 | 'autogen' : './autogen.pl', 23 | 'coverity' : { 'tool_dir' : nightly_prefix + '/coverity', 24 | 'tool_url' : 'https://scan.coverity.com/download/cxx/linux64', 25 | 'token_file' : nightly_prefix + '/coverity/pmix-token.txt', 26 | 'project_name' : 'open-mpi%2Fpmix', 27 | 'project_prefix' : 'pmix', 28 | 'configure_args' : '', 29 | 'make_args' : '-j 8', 30 | 'email' : 'rhc@open-mpi.org' }, 31 | 'branches' : { 'master' : { 'output_location' : 'master/', 32 | 'coverity' : True, 33 | 'max_count' : 7 }, 34 | 'v2.2' : { 'output_location' : 'v2.2.x', 35 | 'coverity' : False, 36 | 'max_count' : 7 }, 37 | 'v3.1' : { 'output_location' : 'v3.1.x', 38 | 'coverity' : False, 39 | 'max_count' : 7 }, 40 | }, 41 | } 42 | 43 | 44 | filer = S3BuildFiler.S3BuildFiler('open-mpi-nightly', 'nightly/pmix/') 45 | builder = OMPIBuilder.OMPIBuilder(config_data, filer) 46 | builder.run() 47 | -------------------------------------------------------------------------------- /nightly-tarball/BuilderUtils.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2017 Amazon.com, Inc. or its affiliates. All Rights 3 | # Reserved. 4 | # 5 | # Additional copyrights may follow 6 | # 7 | 8 | import subprocess 9 | import logging 10 | import os 11 | import fileinput 12 | 13 | 14 | def logged_call(args, 15 | wrapper_args=None, 16 | log_file=None, 17 | err_log_len=20, 18 | env=None): 19 | """Wrapper around check_call to log output 20 | 21 | Wrap around check_call to capture stdout and stderr and save them 22 | in log_file (or command-output.txt) with the given environment. 23 | The amount of data saved from the capture and emitted to the log 24 | stream is dependent on the status of the logging system. 25 | 26 | """ 27 | logger = logging.getLogger('Builder.BuildUtils') 28 | 29 | base_command = os.path.basename(args[0]) 30 | 31 | call_args = [] 32 | if wrapper_args != None: 33 | call_args.extend(wrapper_args) 34 | call_args.extend(args) 35 | 36 | logger.debug('Executing: %s' % (str(call_args))) 37 | logger.debug('cwd: %s' % (str(os.getcwd()))) 38 | 39 | if log_file != None: 40 | stdout_file = log_file 41 | else: 42 | stdout_file = '%s-output.txt' % (base_command) 43 | stdout = open(stdout_file, 'w') 44 | 45 | if env != None and 'CALL_DEBUG' in env: 46 | return 47 | 48 | try: 49 | subprocess.check_call(call_args, stdout=stdout, stderr=subprocess.STDOUT, env=env) 50 | except: 51 | stdout.close() 52 | logger.warn("Exceuting %s failed:" % (base_command)) 53 | if logger.getEffectiveLevel() == logging.DEBUG: 54 | # caller wanted all output anyway (for debug), so give 55 | # them everything 56 | for line in fileinput.input(stdout_file): 57 | logger.warn(line.rstrip('\n')) 58 | else: 59 | # caller wasn't going to get success output, so only give 60 | # the last err_log_len lines to keep emails rationally 61 | # sized 62 | output = open(stdout_file, 'r') 63 | lines = output.readlines() 64 | for line in lines[-err_log_len:]: 65 | logger.warn(line.rstrip('\n')) 66 | raise 67 | else: 68 | stdout.close() 69 | if logger.getEffectiveLevel() == logging.DEBUG: 70 | for line in fileinput.input(stdout_file): 71 | logger.debug(line.rstrip('\n')) 72 | -------------------------------------------------------------------------------- /jenkins/checker.py: -------------------------------------------------------------------------------- 1 | # Helper module used by the signed-off-by checker and email address 2 | # checker. 3 | # 4 | # This module handles all the common code stuff between the various 5 | # checkers. 6 | 7 | import os 8 | import sys 9 | import logging 10 | import argparse 11 | 12 | from git import Repo 13 | 14 | def run(checker_name, per_commit_callback, result_messages): 15 | logging.basicConfig(level=logging.INFO, stream=sys.stderr) 16 | logging.info("%s starting" % (checker_name)) 17 | 18 | argparser = argparse.ArgumentParser(description='Per-commit PR checker') 19 | argparser.add_argument('--status-msg-file', 20 | help='File in which to print the GitHub status message', 21 | type=str, required=True) 22 | argparser.add_argument('--gitdir', help='Git directory', type=str, 23 | required=True) 24 | argparser.add_argument('--base-branch', help='Merge base branch name', 25 | type=str, required=True) 26 | argparser.add_argument('--pr-branch', help='PR branch name', 27 | type=str, required=True) 28 | args = argparser.parse_args() 29 | args_dict = vars(args) 30 | 31 | base_branch = args_dict['base_branch'] 32 | pr_branch = args_dict['pr_branch'] 33 | clone_dir = args_dict['gitdir'] 34 | 35 | logging.info("Git clone: %s" % (clone_dir)) 36 | logging.info("PR branch: %s" % (pr_branch)) 37 | logging.info("Base branch: %s" % (base_branch)) 38 | 39 | #-------------------------------------- 40 | # Make a python object representing the Git repo 41 | repo = Repo(clone_dir) 42 | merge_base = repo.commit(base_branch) 43 | logging.info("Merge base: %s" % (merge_base.hexsha)) 44 | 45 | #-------------------------------------- 46 | # Iterate from the HEAD of the PR branch down to the merge base with 47 | # the base branch. 48 | results = { 49 | 'good' : 0, 50 | 'bad' : 0, 51 | } 52 | 53 | for commit in repo.iter_commits(repo.commit(pr_branch)): 54 | if commit.binsha == merge_base.binsha: 55 | logging.info("Found the merge base %s: we're done" % (commit.hexsha)) 56 | break 57 | 58 | per_commit_callback(commit, results) 59 | 60 | #-------------------------------------- 61 | # Analyze what happened 62 | if results['good'] == 0 and results['bad'] == 0: 63 | msg = 'No commits -- nothing to do' 64 | status = 0 65 | elif results['good'] > 0 and results['bad'] == 0: 66 | msg = result_messages['all good']['message'] 67 | status = result_messages['all good']['status'] 68 | elif results['good'] > 0 and results['bad'] > 0: 69 | msg = result_messages['some good']['message'] 70 | status = result_messages['some good']['status'] 71 | else: 72 | msg = result_messages['none good']['message'] 73 | status = result_messages['none good']['status'] 74 | 75 | print(msg) 76 | with open(args_dict['status_msg_file'], 'w') as writer: 77 | writer.write(msg) 78 | 79 | exit(status) 80 | -------------------------------------------------------------------------------- /nightly-tarball/openmpi-nightly-tarball: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # Copyright (c) 2017 Amazon.com, Inc. or its affiliates. All Rights 4 | # Reserved. 5 | # 6 | # Additional copyrights may follow 7 | # 8 | 9 | import OMPIBuilder 10 | import S3BuildFiler 11 | 12 | 13 | nightly_prefix='/mnt/data/nightly-tarball' 14 | config_data = { 'project_name' : 'Open MPI', 15 | 'project_short_name' : 'openmpi', 16 | 'project_very_short_name' : 'ompi', 17 | 'email_dest' : 'testing@lists.open-mpi.org', 18 | 'email_from' : 'mpiteam@aws.open-mpi.org', 19 | 'repository' : 'https://github.com/open-mpi/ompi.git', 20 | 'scratch_path' : nightly_prefix + '/scratch', 21 | 'failed_build_prefix' : 'failed-builds/', 22 | 'failed_build_url' : 'http://download.open-mpi.org/nightly/open-mpi', 23 | 'autogen' : './autogen.pl', 24 | 'coverity' : { 'tool_dir' : nightly_prefix + '/coverity', 25 | 'tool_url' : 'https://scan.coverity.com/download/cxx/linux64', 26 | 'token_file' : nightly_prefix + '/coverity/openmpi-token.txt', 27 | 'project_name' : 'Open+MPI', 28 | 'project_prefix' : 'openmpi', 29 | 'configure_args' : '--enable-debug --enable-mpi-fortran --enable-mpi-java --enable-oshmem --enable-oshmem-fortran --with-usnic --with-libfabric=/mnt/data/local-installs --with-ucx=/mnt/data/local-installs', 30 | 'make_args' : '-j 8', 31 | 'email' : 'jsquyres@cisco.com' }, 32 | 'branches' : { 'master' : { 'output_location' : 'master/', 33 | 'coverity' : True, 34 | 'max_count' : 7 }, 35 | 'v4.1.x' : { 'output_location' : 'v4.1.x/', 36 | 'coverity' : False, 37 | 'max_count' : 7 }, 38 | 'v4.0.x' : { 'output_location' : 'v4.0.x/', 39 | 'coverity' : False, 40 | 'max_count' : 7 }, 41 | 'v3.1.x' : { 'output_location' : 'v3.1.x/', 42 | 'coverity' : False, 43 | 'max_count' : 7 }, 44 | 'v3.0.x' : { 'output_location' : 'v3.0.x/', 45 | 'coverity' : False, 46 | 'max_count' : 7 }, 47 | 'v2.x' : { 'output_location' : 'v2.x/', 48 | 'coverity' : False, 49 | 'max_count' : 7 }, 50 | 'v2.0.x' : { 'output_location' : 'v2.0.x/', 51 | 'coverity' : False, 52 | 'max_count' : 7 }, 53 | 'v1.10' : { 'output_location' : 'v1.10/', 54 | 'coverity' : False, 55 | 'max_count' : 7 }, 56 | }, 57 | } 58 | 59 | 60 | filer = S3BuildFiler.S3BuildFiler('open-mpi-nightly', 'nightly/open-mpi/') 61 | builder = OMPIBuilder.OMPIBuilder(config_data, filer) 62 | builder.run() 63 | -------------------------------------------------------------------------------- /migration/nightly-tarball-sync.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # 3 | # Copyright (c) 2017 Amazon.com, Inc. or its affiliates. All Rights 4 | # Reserved. 5 | # 6 | # Additional copyrights may follow 7 | # 8 | # usage: nightly-tarball-sync.py --project \ 9 | # --input-tree --output-tree 10 | # 11 | # Sync nightly tarballs from S3 into the HostGator web site (until 12 | # such time as we convert the website to pull from S3 directly). If 13 | # there are multiple builds between runs, all but the build that ends 14 | # up in latest_snapshot.txt will be missed. This script should be run 15 | # significantly more often than once a day as a result. Since the 16 | # builds are serialized and take > 10 minutes, the plan is to run 17 | # every 10 minutes. Checking latest_snapshot.txt is pretty low 18 | # overhead, so that shouldn't be a problem. 19 | # 20 | 21 | import os 22 | import argparse 23 | import time 24 | import json 25 | import shutil 26 | import urllib 27 | import urllib2 28 | import subprocess 29 | 30 | def sync_tree(project, input_path, output_path, branch): 31 | response = urllib2.urlopen('%s/%s/latest_snapshot.txt' % (input_path, branch)) 32 | s3_latest_snapshot = response.read().strip() 33 | 34 | try: 35 | local_file = open('%s/%s/latest_snapshot.txt' % (output_path, branch), 'r') 36 | local_latest_snapshot = local_file.read().strip() 37 | except: 38 | local_latest_snapshot = '' 39 | 40 | if s3_latest_snapshot == local_latest_snapshot: 41 | return 42 | 43 | # get info about new snapshot 44 | response = urllib2.urlopen('%s/%s/build-%s-%s.json' % (input_path, branch, 45 | project, s3_latest_snapshot)) 46 | data = json.load(response) 47 | 48 | # delete copies older than 7 days 49 | for filename in os.listdir('%s/%s' % (output_path, branch)): 50 | full_filename = '%s/%s/%s' % (output_path, branch, filename) 51 | if (filename.endswith(".txt") or filename.endswith(".php")): 52 | continue 53 | if (time.time() - os.path.getmtime(full_filename) > (7 * 24 * 60 * 60)): 54 | os.remove(full_filename) 55 | 56 | # copy files from new snapshot 57 | for file in data['files']: 58 | fileurl = urllib.URLopener() 59 | fileurl.retrieve('%s/%s/%s' % (input_path, branch, file), 60 | '%s/%s/%s' % (output_path, branch, file)) 61 | 62 | # generate md5sums and sha1sums 63 | os.chdir('%s/%s' % (output_path, branch)) 64 | output = open('md5sums.txt', 'w') 65 | subprocess.check_call(['md5sum *.tar.gz *.tar.bz2'], stdout=output, shell=True) 66 | output = open('sha1sums.txt', 'w') 67 | subprocess.check_call(['sha1sum *.tar.gz *.tar.bz2'], stdout=output, shell=True) 68 | 69 | # update snapshot file 70 | snapfile = open('latest_snapshot.txt', 'w') 71 | snapfile.write(s3_latest_snapshot) 72 | 73 | 74 | parser = argparse.ArgumentParser(description='Web tarball S3 staging') 75 | parser.add_argument('--project', help='project name (tarball prefix)', 76 | type=str, required=True) 77 | parser.add_argument('--input-path', help='input path to traverse', 78 | type=str, required=True) 79 | parser.add_argument('--output-path', help='scratch directory to stage for later s3 upload', 80 | type=str, required=True) 81 | parser.add_argument('branches', nargs='*', default=[], help='List of branches to build') 82 | args = parser.parse_args() 83 | 84 | args_dict = vars(args) 85 | 86 | for branch in args_dict['branches']: 87 | sync_tree(args_dict['project'], args_dict['input_path'], 88 | args_dict['output_path'], branch) 89 | 90 | -------------------------------------------------------------------------------- /jenkins/open-mpi.pr.command-runner.groovy: -------------------------------------------------------------------------------- 1 | // -*- groovy -*- 2 | // 3 | // Run a python script on a PR commit series to check 4 | // for things like signed-off-by or commit emails. 5 | // 6 | // 7 | // WORKSPACE Layout: 8 | // srcdir/ PR checking source tree 9 | // ompi-scripts/ ompi-scripts master checkout 10 | 11 | def builder_label = "headnode" 12 | def pr_context = env.context_name 13 | def script_name = "ompi-scripts/" + env.script_name 14 | def target_srcdir = "srcdir" 15 | 16 | // Start by immediately tagging this as in progress... 17 | setGitHubPullRequestStatus(context: pr_context, message: 'In progress', state: 'PENDING') 18 | 19 | node(builder_label) { 20 | stage('Source Checkout') { 21 | try { 22 | checkout_code(target_srcdir); 23 | } catch (err) { 24 | setGitHubPullRequestStatus(context: pr_context, 25 | message: "Internal Accounting Error", 26 | state: 'ERROR') 27 | throw(err) 28 | } 29 | } 30 | 31 | stage('Checking git commits'){ 32 | // There's no way to capture the stdout when the script fails, 33 | // so we have the script dump any output we want to use as the status 34 | // message in a file that is slurped up later. 35 | ret = sh(script: "python ${script_name} --status-msg-file checker-output.txt --gitdir ${target_srcdir} --base-branch origin/${env.GITHUB_PR_TARGET_BRANCH} --pr-branch origin/PR-${env.GITHUB_PR_NUMBER}", 36 | returnStatus: true) 37 | echo "script return code: ${ret}" 38 | 39 | // GitHub has three status states: 40 | // SUCCESS - everything is good 41 | // FAILURE - the tests functioned, but did not pass 42 | // ERROR - the tests did not function 43 | // We expect script error code 0 to map to SUCCESS, 1 to map to 44 | // FAILURE, and all others map to error. We do not expect to have 45 | // a useful status message on FAILURE. 46 | if (ret == 0 || ret == 1) { 47 | status_string = sh(script: "cat checker-output.txt", returnStdout: true) 48 | status_string = status_string.trim() 49 | if (ret == 0) { 50 | status_state = 'SUCCESS' 51 | } else { 52 | status_state = 'FAILURE' 53 | } 54 | } else { 55 | status_string = "Internal Accounting Error" 56 | status_state = 'ERROR' 57 | } 58 | 59 | setGitHubPullRequestStatus(context: pr_context, 60 | message: "${status_string}", 61 | state: "${status_state}") 62 | 63 | if (ret != 0) { 64 | currentBuild.result = 'FAILURE' 65 | } 66 | } 67 | } 68 | 69 | 70 | def checkout_code(target_srcdir) { 71 | pr_num = env.GITHUB_PR_NUMBER 72 | 73 | // Pull the refspecs for all the origin branches, as well as the PR 74 | // in question. We could be more specific with origin branches, but 75 | // that would be more work for only a little space savings. 76 | checkout(changelog: false, poll: false, 77 | scm: [$class: 'GitSCM', 78 | extensions: [[$class: 'RelativeTargetDirectory', 79 | relativeTargetDir: "${target_srcdir}"]], 80 | userRemoteConfigs: [[credentialsId: '6de58bf1-2619-4065-99bb-8d284b4691ce', 81 | refspec: "+refs/pull/${pr_num}/head:refs/remotes/origin/PR-${pr_num} +refs/heads/*:refs/remotes/origin/*", 82 | url: "${env.GITHUB_REPO_GIT_URL}"]]]) 83 | 84 | // Make sure we have the ompi-scripts repository on the build node as well. 85 | // scm is a provided global variable that points to the repository 86 | // configured in the Jenkins job for the pipeline source. Since the 87 | // pipeline and the helper scripts live in the same place, this is 88 | // perfect for us. We check this out on the worker nodes so that 89 | // the helper scripts are always available. 90 | checkout(changelog: false, poll: false, scm: scm) 91 | } 92 | -------------------------------------------------------------------------------- /jenkins/open-mpi.dist.create-tarball.build-tarball.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # usage: build-tarball.sh 4 | # 5 | # First step in building a release tarball; takes a git checkout 6 | # and makes the tarballs, uploading the results to S3. Before 7 | # uploading, some basic sanity checks are performed to make sure 8 | # that the requested release in some way matches the actual 9 | # release. 10 | # 11 | # Expected filesystem layout: 12 | # ${WORKSPACE}/ompi-scripts/ ompi-scripts checkout 13 | # ${WORKSPACE}/ompi/ ompi checkout @ target REF 14 | # ${WORKSPACE}/dist-files/ output of build 15 | 16 | set -e 17 | 18 | build_type=$1 19 | ref=$2 20 | s3_prefix=$3 21 | build_date="$4" 22 | 23 | echo "build_type: ${build_type}" 24 | echo "ref: ${ref}" 25 | echo "s3_prefix: ${s3_prefix}" 26 | echo "build_date: ${build_date}" 27 | 28 | # If doing a release or pre-release, the given ref must be a tag. 29 | # search for the tag and chicken out if we can't ifnd it. 30 | if test "$build_type" = "release" -o "${build_type}" = "pre-release"; then 31 | (cd ompi; 32 | git tag -l ${ref}; 33 | if test "`git tag -l ${ref}`" = ""; then 34 | echo "Build target ${ref} does not appear to be a tag." 35 | exit 1 36 | fi) 37 | fi 38 | 39 | # This won't do anything rational on a ref that isn't a tag in 40 | # our usual format, but the check is only used for the release 41 | # and pre-release options, which should only build on tags, so 42 | # shrug. 43 | greek=`echo "$ref" | sed -e 's/v\?[0-9]\+\.[0-9]\+\.[0-9]\+//'` 44 | 45 | case "$build_type" in 46 | "release") 47 | greek_option="--no-greek" 48 | if test "$greek" != "" ; then 49 | echo "Found what appears to be a greek version in tag $ref of $greek." 50 | echo "Aborting because this doesn't look right." 51 | exit 1 52 | fi 53 | ;; 54 | "pre-release") 55 | greek_option="--greekonly" 56 | if test "$greek" = "" ; then 57 | echo "Did not find a greek version in tag $ref." 58 | echo "Aborting because this doesn't look right." 59 | exit 1 60 | fi 61 | ;; 62 | "scratch") 63 | greek_option="--greekonly" 64 | ;; 65 | *) 66 | echo "Unknown build type ${build_type}" 67 | exit 1 68 | ;; 69 | esac 70 | 71 | rm -rf dist-files 72 | mkdir dist-files 73 | (cd ompi ; contrib/dist/make_dist_tarball --no-git-update ${greek_option} --distdir ${WORKSPACE}/dist-files) 74 | 75 | # release version is the tarball version name, which is roughly what 76 | # is in the tarball's VERSION file. 77 | tarball=`ls -1 dist-files/openmpi-*.tar.gz` 78 | tarball=`basename ${tarball}` 79 | release_version=`echo ${tarball} | sed -e 's/.*openmpi-\(.*\)\.tar\.\(gz\|bz2\)/\1/'` 80 | if test "${release_version}" = "" ; then 81 | echo "Could not determine release version for ${tarball}" 82 | exit 1 83 | fi 84 | 85 | # branch_directory is the directory in S3/web pages for this release. 86 | # While the branch that releases come from might not follow a strict 87 | # format (I'm looking at you, v2.x), the web page names are always 88 | # v. 89 | branch_directory=`echo "v${release_version}" | sed -e 's/\([0-9]\+\.[0-9]\+\).*/\1/'` 90 | 91 | # ref_version is the ref with any leading v stripped out, because 92 | # people aren't always great about tagging versions as v1.2.3 instead 93 | # of 1.2.3. 94 | ref_version=`echo ${REF} | sed -e 's/v\(.*\)/\1/'` 95 | 96 | # if we're not doing a scratch build, make sure that the tag version 97 | # matches the version produced 98 | if test "${build_type}" != "scratch" -a "${ref_version}" != "$release_version"; then 99 | echo "Build target version ${ref_version} does not match release tarball version ${release_version}." 100 | exit 1 101 | fi 102 | 103 | # release the files into s3 104 | dist_files=`ls dist-files/openmpi-*` 105 | ${WORKSPACE}/ompi-scripts/dist/upload-release-to-s3.py \ 106 | --s3-base "${s3_prefix}" --project "open-mpi" --branch "${branch_directory}" \ 107 | --version "${release_version}" --date "${build_date}" --yes \ 108 | --files $dist_files 109 | 110 | # need to save the tarball name and branch_directory for consumption 111 | # by the calling script 112 | echo "${tarball}" > build-tarball-filename.txt 113 | echo "${branch_directory}" > build-tarball-branch_directory.txt 114 | -------------------------------------------------------------------------------- /administrative/annual-ompi-github-committer-review.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | ''' 4 | Requirements: 5 | 6 | $ pip3 install pygithub 7 | 8 | Go to https://github.com/settings/tokens and make a personal access 9 | token with full permissions to the repo and org. 10 | 11 | ''' 12 | 13 | import csv 14 | 15 | from github import Github 16 | from pprint import pprint 17 | 18 | #-------------------------------------------------------------------- 19 | 20 | # Read the oAuth token file. 21 | # (you will need to supply this file yourself -- see the comment at 22 | # the top of this file) 23 | token_filename = 'oauth-token.txt' 24 | with open(token_filename, 'r') as f: 25 | token = f.read().strip() 26 | g = Github(token) 27 | 28 | #-------------------------------------------------------------------- 29 | 30 | print("Getting open-mpi organization...") 31 | org = 'open-mpi' 32 | ompi_org = g.get_organization(org) 33 | 34 | #-------------------------------------------------------------------- 35 | 36 | print("Loading organization repos...") 37 | all_members = dict() 38 | repos = dict() 39 | for repo in ompi_org.get_repos(): 40 | print("Found Org Repo: {repo}".format(repo=repo.name)) 41 | 42 | # For each repo, get the teams on that repo 43 | repo_teams = dict() 44 | for team in repo.get_teams(): 45 | out = (" Found team on repo {org}/{repo}: {team} ({perm}) " 46 | .format(org=ompi_org.name, repo=repo.name, 47 | team=team.name, perm=team.permission)) 48 | # We only care about teams with push permissions 49 | if team.permission == "pull": 50 | print("{out} -- SKIPPED".format(out=out)) 51 | continue 52 | 53 | print(out) 54 | 55 | # Find all the members of this team 56 | team_members = dict() 57 | member_teams = dict() 58 | for member in team.get_members(): 59 | print(" Found member: {name}" 60 | .format(name=member.login)) 61 | team_members[member.id] = member 62 | 63 | if member.id not in all_members: 64 | all_members[member.id] = { 65 | 'member' : member, 66 | 'member_teams' : dict(), 67 | } 68 | 69 | # Find the member in the org and add this team to them 70 | all_members[member.id]['member_teams'][team.id] = team 71 | 72 | # Same the results 73 | repo_teams[team.id] = { 74 | 'team' : team, 75 | 'team_members' : team_members, 76 | } 77 | 78 | # Save the results 79 | repos[repo.id] = { 80 | 'repo' : repo, 81 | 'repo_teams' : repo_teams, 82 | } 83 | 84 | 85 | print("All the repos:") 86 | pprint(repos) 87 | pprint(all_members) 88 | 89 | #-------------------------------------------------------------------- 90 | 91 | # Pre-load the field names with info about the user and repo 92 | fieldnames = ['login', 'name', 'email', 'company'] 93 | 94 | # Add all the repo names 95 | for _, rentry in repos.items(): 96 | repo = rentry['repo'] 97 | fieldnames.append("{org}/{repo}" 98 | .format(org=ompi_org.login, 99 | repo=repo.name)) 100 | 101 | #-------------------------------------------------------------------- 102 | 103 | # Now write out the CSV 104 | outfile = 'permissions.csv' 105 | print("Writing: ".format(outfile=outfile)) 106 | with open(outfile, 'w', newline='') as csvfile: 107 | writer = csv.DictWriter(csvfile, fieldnames=fieldnames, 108 | quoting=csv.QUOTE_ALL) 109 | writer.writeheader() 110 | for mid, mentry in all_members.items(): 111 | member = mentry['member'] 112 | print(" Writing member: {member}".format(member=member.login)) 113 | 114 | # Initial entries about the user 115 | row = { 116 | 'login' : member.login, 117 | 'name' : member.name, 118 | 'email' : member.email, 119 | 'company' : member.company, 120 | } 121 | 122 | # Fill in values for each repo 123 | for _, rentry in repos.items(): 124 | repo = rentry['repo'] 125 | 126 | found = list() 127 | for tid, tentry in rentry['repo_teams'].items(): 128 | if tid in mentry['member_teams']: 129 | team = tentry['team'] 130 | found.append(team.name) 131 | 132 | row[repo.full_name] = ', '.join(found) 133 | 134 | writer.writerow(row) 135 | -------------------------------------------------------------------------------- /jenkins/packer.json: -------------------------------------------------------------------------------- 1 | { 2 | "variables": { 3 | "subnet_id" : "{{env `AWS_SUBNET_ID`}}", 4 | "vpc_id" : "{{env `AWS_VPC_ID`}}", 5 | "build_date" : "{{env `BUILD_DATE`}}", 6 | "iam_role" : "{{env `AWS_IAM_ROLE`}}" 7 | }, 8 | "builders": [{ 9 | "type": "amazon-ebs", 10 | "name" : "AmazonLinux1", 11 | "region": "us-west-2", 12 | "source_ami": "ami-01f08ef3e76b957e5", 13 | "instance_type": "t3.micro", 14 | "ssh_username": "ec2-user", 15 | "ssh_pty" : true, 16 | "ami_name": "Jenkins Amazon Linux 1 {{user `build_date`}}", 17 | "associate_public_ip_address" : true, 18 | "sriov_support" : true, 19 | "ena_support" : true, 20 | "iam_instance_profile" : "{{user `iam_role`}}" 21 | },{ 22 | "type": "amazon-ebs", 23 | "name" : "AmazonLinux2", 24 | "region": "us-west-2", 25 | "source_ami": "ami-0d6621c01e8c2de2c", 26 | "instance_type": "t3.micro", 27 | "ssh_username": "ec2-user", 28 | "ssh_pty" : true, 29 | "ami_name": "Jenkins Amazon Linux 2 {{user `build_date`}}", 30 | "associate_public_ip_address" : true, 31 | "sriov_support" : true, 32 | "ena_support" : true, 33 | "iam_instance_profile" : "{{user `iam_role`}}" 34 | },{ 35 | "type": "amazon-ebs", 36 | "name" : "Ubuntu16.04", 37 | "region": "us-west-2", 38 | "source_ami": "ami-008c6427c8facbe08", 39 | "instance_type": "t3.micro", 40 | "ssh_username": "ubuntu", 41 | "ssh_pty" : true, 42 | "ami_name": "Jenkins Ubuntu 16.04 {{user `build_date`}}", 43 | "associate_public_ip_address" : true, 44 | "sriov_support" : true, 45 | "ena_support" : true, 46 | "iam_instance_profile" : "{{user `iam_role`}}" 47 | },{ 48 | "type": "amazon-ebs", 49 | "name" : "Ubuntu18.04", 50 | "region": "us-west-2", 51 | "source_ami": "ami-003634241a8fcdec0", 52 | "instance_type": "t3.micro", 53 | "ssh_username": "ubuntu", 54 | "ssh_pty" : true, 55 | "ami_name": "Jenkins Ubuntu 18.04 {{user `build_date`}}", 56 | "associate_public_ip_address" : true, 57 | "sriov_support" : true, 58 | "ena_support" : true, 59 | "iam_instance_profile" : "{{user `iam_role`}}" 60 | },{ 61 | "type": "amazon-ebs", 62 | "name" : "Ubuntu20.04", 63 | "region": "us-west-2", 64 | "source_ami": "ami-09dd2e08d601bff67", 65 | "instance_type": "t3.micro", 66 | "ssh_username": "ubuntu", 67 | "ssh_pty" : true, 68 | "ami_name": "Jenkins Ubuntu 20.04 {{user `build_date`}}", 69 | "associate_public_ip_address" : true, 70 | "sriov_support" : true, 71 | "ena_support" : true, 72 | "iam_instance_profile" : "{{user `iam_role`}}" 73 | },{ 74 | "type": "amazon-ebs", 75 | "name" : "RHEL77", 76 | "region": "us-west-2", 77 | "source_ami": "ami-00aa0a1b208ece144", 78 | "instance_type": "t3.micro", 79 | "ssh_username": "ec2-user", 80 | "ssh_pty" : true, 81 | "ami_name": "Jenkins RHEL 7.7 {{user `build_date`}}", 82 | "associate_public_ip_address" : true, 83 | "sriov_support" : true, 84 | "ena_support" : true, 85 | "iam_instance_profile" : "{{user `iam_role`}}" 86 | },{ 87 | "type": "amazon-ebs", 88 | "name" : "RHEL80", 89 | "region": "us-west-2", 90 | "source_ami": "ami-02f147dfb8be58a10", 91 | "instance_type": "t3.micro", 92 | "ssh_username": "ec2-user", 93 | "ssh_pty" : true, 94 | "ami_name": "Jenkins RHEL 8.0 {{user `build_date`}}", 95 | "associate_public_ip_address" : true, 96 | "sriov_support" : true, 97 | "ena_support" : true, 98 | "iam_instance_profile" : "{{user `iam_role`}}" 99 | },{ 100 | "type": "amazon-ebs", 101 | "name" : "SLES12sp5", 102 | "region": "us-west-2", 103 | "source_ami": "ami-0ef7bc560a2db93f8", 104 | "instance_type": "t3.micro", 105 | "ssh_username": "ec2-user", 106 | "ssh_pty" : true, 107 | "ami_name": "Jenkins SLES 12sp5 {{user `build_date`}}", 108 | "associate_public_ip_address" : true, 109 | "sriov_support" : true, 110 | "ena_support" : true, 111 | "iam_instance_profile" : "{{user `iam_role`}}" 112 | },{ 113 | "type": "amazon-ebs", 114 | "name" : "SLES15sp1", 115 | "region": "us-west-2", 116 | "source_ami": "ami-0fde69c9f9f78ae47", 117 | "instance_type": "t3.micro", 118 | "ssh_username": "ec2-user", 119 | "ssh_pty" : true, 120 | "ami_name": "Jenkins SLES 15sp1 {{user `build_date`}}", 121 | "associate_public_ip_address" : true, 122 | "sriov_support" : true, 123 | "ena_support" : true, 124 | "iam_instance_profile" : "{{user `iam_role`}}" 125 | },{ 126 | "type": "amazon-ebs", 127 | "name" : "FreeBSD11", 128 | "region": "us-west-2", 129 | "source_ami": "ami-6926f809", 130 | "instance_type": "t2.micro", 131 | "ssh_username": "ec2-user", 132 | "ssh_pty" : true, 133 | "ssh_timeout" : "10m", 134 | "ami_name": "Jenkins FreeBSD 11 {{user `build_date`}}", 135 | "associate_public_ip_address" : true, 136 | "sriov_support" : true, 137 | "ena_support" : true, 138 | "iam_instance_profile" : "{{user `iam_role`}}" 139 | }], 140 | "provisioners": [{ 141 | "type": "shell", 142 | "script" : "customize-ami.sh" 143 | }] 144 | } 145 | -------------------------------------------------------------------------------- /jenkins/open-mpi-autotools-build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # Install (building if necessary) the autotools packages necessary for 4 | # building a particular version of Open MPI. 5 | # 6 | # We build and save artifacts outside of the Jenkins workspace, which 7 | # is a little odd, but allows us to persist builds across jobs (and 8 | # avoid any job-specific naming problems in paths). We will write the 9 | # autotools installations into $HOME/autotools-setup/ unless otherwise 10 | # instructed. If JENKINS_AGENT_HOME is set, we will use that instead 11 | # of $HOME. 12 | # 13 | # In addition to building and saving artifacts (including to S3 for 14 | # EC2 instances), this script can be called for every job to create a 15 | # simlink from the built autotools into $WORKSPACE/autotools-install. 16 | # 17 | # usage: ./open-mpi-autotools-build.sh 18 | # 19 | 20 | # be lazy... 21 | set -e 22 | 23 | ompi_tree=$1 24 | dist_script=${ompi_tree}/contrib/dist/make_dist_tarball 25 | s3_path="s3://ompi-jenkins-config/autotools-builds" 26 | 27 | if test ! -n "${JENKINS_AGENT_HOME}" ; then 28 | JENKINS_AGENT_HOME=${HOME} 29 | fi 30 | autotools_root=${JENKINS_AGENT_HOME}/autotools-builds 31 | mkdir -p "${autotools_root}" 32 | 33 | if test ! -r ${dist_script} ; then 34 | echo "Can not read ${dist_script}. Aborting." 35 | exit 1 36 | fi 37 | 38 | os=`uname -s` 39 | if test "${os}" = "Linux"; then 40 | eval "PLATFORM_ID=`sed -n 's/^ID=//p' /etc/os-release`" 41 | eval "VERSION_ID=`sed -n 's/^VERSION_ID=//p' /etc/os-release`" 42 | else 43 | PLATFORM_ID=`uname -s` 44 | VERSION_ID=`uname -r` 45 | fi 46 | 47 | if `echo ${NODE_NAME} | grep -q '^EC2'` ; then 48 | IS_EC2="yes" 49 | else 50 | IS_EC2="no" 51 | fi 52 | 53 | echo "==> Platform: $PLATFORM_ID" 54 | echo "==> Version: $VERSION_ID" 55 | echo "==> EC2: $IS_EC2" 56 | 57 | for pkg in AC AM LT M4 FLEX; do 58 | eval "${pkg}_VERSION=`sed -ne \"s/^${pkg}_TARGET_VERSION=\(.*\)/\1/p\" ${dist_script}`" 59 | done 60 | 61 | version_string="${AC_VERSION}-${AM_VERSION}-${LT_VERSION}-${M4_VERSION}-${FLEX_VERSION}" 62 | tarball_name="autotools-${PLATFORM_ID}_${VERSION_ID}-${version_string}.tar.gz" 63 | autotools_install_short="autotools-install-${version_string}" 64 | autotools_install="${autotools_root}/${autotools_install_short}" 65 | echo "==> Version string: ${version_string}" 66 | 67 | cd ${autotools_root} 68 | 69 | if test ${IS_EC2} = "yes" && test ! -d ${autotools_install} ; then 70 | if aws s3 cp ${s3_path}/${tarball_name} . >& /dev/null ; then 71 | echo "==> Downloaded build from S3" 72 | tar xf ${tarball_name} 73 | rm ${tarball_name} 74 | fi 75 | fi 76 | 77 | if test ! -d ${autotools_install} ; then 78 | echo "==> No build found ; building from scratch" 79 | 80 | autotools_srcdir="${autotools_root}/autotools-src.$$" 81 | 82 | mkdir -p ${autotools_srcdir} 83 | cd ${autotools_srcdir} 84 | 85 | export PATH=${autotools_install/bin}:${PATH} 86 | export LD_LIBRARY_PATH=${autotools_install}/lib:${LD_LIBRARY_PATH} 87 | 88 | curl -fO http://ftp.gnu.org/gnu/autoconf/autoconf-${AC_VERSION}.tar.gz 89 | tar xf autoconf-${AC_VERSION}.tar.gz 90 | (cd autoconf-${AC_VERSION} ; ./configure --prefix=${autotools_install} ; make install) 91 | 92 | curl -fO http://ftp.gnu.org/gnu/automake/automake-${AM_VERSION}.tar.gz 93 | tar xf automake-${AM_VERSION}.tar.gz 94 | (cd automake-${AM_VERSION} ; ./configure --prefix=${autotools_install} ; make install) 95 | 96 | curl -fO http://ftp.gnu.org/gnu/libtool/libtool-${LT_VERSION}.tar.gz 97 | tar xf libtool-${LT_VERSION}.tar.gz 98 | (cd libtool-${LT_VERSION} ; ./configure --prefix=${autotools_install} ; make install) 99 | 100 | curl -fO http://ftp.gnu.org/gnu/m4/m4-${M4_VERSION}.tar.gz 101 | tar xf m4-${M4_VERSION}.tar.gz 102 | (cd m4-${M4_VERSION} ; ./configure --prefix=${autotools_install} ; make install) 103 | 104 | # When flex moved from ftp.gnu.org to sourceforge to GitHub for 105 | # downloads, they dropped all the archive versions. Including the 106 | # one we say we require (sigh). So we archive that tarball 107 | # (stolen from a distro archive repository) for use. Hopefully, 108 | # one day, we will be able to update :). 109 | flex_tarball="flex-${FLEX_VERSION}.tar.gz" 110 | if ! curl -fO https://github.com/westes/flex/releases/download/v${FLEX_VERSION}/${flex_tarball} ; then 111 | curl -fO https://download.open-mpi.org/archive/flex/${flex_tarball} 112 | fi 113 | tar xf ${flex_tarball} 114 | (cd flex-${FLEX_VERSION} ; ./configure --prefix=${autotools_install} ; make install) 115 | 116 | cd ${autotools_root} 117 | 118 | # autotools_srcdir was unique to this process, so this is safe 119 | # even in a concurrent Jenkins jobs situation. 120 | rm -rf ${autotools_srcdir} 121 | 122 | if test "$IS_EC2" = "yes" ; then 123 | echo "==> Archiving build to S3" 124 | tar czf ${tarball_name} ${autotools_install_short} 125 | aws s3 cp ${tarball_name} ${s3_path}/${tarball_name} 126 | rm ${tarball_name} 127 | fi 128 | fi 129 | 130 | echo "==> Symlinking ${autotools_install} to ${WORKSPACE}/autotools-install" 131 | ln -s ${autotools_install} ${WORKSPACE}/autotools-install 132 | -------------------------------------------------------------------------------- /nightly-tarball/OMPIBuilder.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2017 Amazon.com, Inc. or its affiliates. All Rights 3 | # Reserved. 4 | # 5 | # Additional copyrights may follow 6 | # 7 | 8 | import Builder 9 | import S3BuildFiler 10 | import os 11 | import re 12 | import shutil 13 | import tempfile 14 | 15 | 16 | class OMPIBuilder(Builder.Builder): 17 | """Wrapper for some current oddities in OMPI project build files 18 | 19 | Currently, the create_tarball scripts have some, but not all, of 20 | the functionality needed to avoid too much special casing of the 21 | build system. We're going to fix up the create_tarball scripts in 22 | all the branches so that this code (possibly with the exception of 23 | the run_with_autotools bits) goes away, so put it here rather in 24 | the Builder class that will be used long term... 25 | 26 | """ 27 | 28 | def update_version_file(self): 29 | """Update version file in the OMPI/PMIx way 30 | 31 | Rewrite VERSION file, subsituting tarball_version and rep_rev 32 | based on computed values. 33 | """ 34 | branch = self._current_build['branch'] 35 | build_time = self._current_build['build_time'] 36 | githash = self._current_build['revision'] 37 | version_file = os.path.join(self._current_build['source_tree'], 'VERSION') 38 | 39 | self._current_build['version_string'] = '%s-%s-%s' % (branch, build_time, githash) 40 | self._logger.debug('version_string: %s' % (self._current_build['version_string'])) 41 | 42 | # sed in the new tarball_version= and repo_rev= lines in the VERSION file 43 | tarball_version_pattern = re.compile(r'^tarball_version=.*') 44 | repo_rev_pattern = re.compile(r'^rep_rev=.*') 45 | with tempfile.NamedTemporaryFile(mode='w', delete=False) as tmp_file: 46 | with open(version_file) as src_file: 47 | for line in src_file: 48 | line = tarball_version_pattern.sub('tarball_version=%s' % 49 | (self._current_build['version_string']), line) 50 | line = repo_rev_pattern.sub('repo_rev=%s' % (githash), line) 51 | tmp_file.write(line) 52 | shutil.copystat(version_file, tmp_file.name) 53 | shutil.move(tmp_file.name, version_file) 54 | 55 | 56 | def build(self): 57 | """Run OMPI-custom build step 58 | 59 | Call autogen ; configure ; make distcheck, but with the right 60 | $USER. It would be more awesome to use the make_dist_tarball 61 | script that is part of OMPI / PMIx, but it undoes any VERSION 62 | file changes we did in the update_version step. So do 63 | everything here instead until we can update the 64 | make_dist_tarball scripts. 65 | """ 66 | # currently can't use the build script because it always 67 | # rewrites the VERSION file. Need to fix that, and then can 68 | # kill off this function and use the tarball_builder. 69 | branch_name = self._current_build['branch_name'] 70 | source_tree = self._current_build['source_tree'] 71 | cwd = os.getcwd() 72 | os.chdir(source_tree) 73 | try: 74 | # lie about our username in $USER so that autogen will skip all 75 | # .ompi_ignore'ed directories (i.e., so that we won't get 76 | # .ompi_unignore'ed) 77 | child_env = os.environ.copy() 78 | child_env['USER'] = self._config['project_very_short_name'] + 'builder' 79 | 80 | self.call([self._config['autogen']], build_call=True, env=child_env) 81 | self.call(['./configure'], build_call=True, env=child_env) 82 | 83 | # Do make distcheck (which will invoke config/distscript.csh to set 84 | # the right values in VERSION). distcheck does many things; we need 85 | # to ensure it doesn't pick up any other installs via LD_LIBRARY_PATH. 86 | # It may be a bit Draconian to totally clean LD_LIBRARY_PATH (i.e., we 87 | # may need something in there), but at least in the current building 88 | # setup, we don't. But be advised that this may need to change in the 89 | # future... 90 | child_env['LD_LIBRARY_PATH'] = '' 91 | self.call(['make', 'distcheck'], build_call=True, env=child_env) 92 | finally: 93 | os.chdir(cwd) 94 | 95 | 96 | def call(self, args, log_name=None, build_call=False, env=None): 97 | """OMPI wrapper around call 98 | 99 | Add wrapper to properly set up autotools for OMPI/PMIx/hwloc, 100 | then call the base Builder.call() 101 | """ 102 | if build_call: 103 | run_with_autotools = os.path.join(self._config['builder_tools'], 'run-with-autotools.sh') 104 | full_args = [run_with_autotools, 'autotools/%s-%s' % 105 | (self._config['project_very_short_name'], self._current_build['branch_name'])] 106 | full_args.extend(args) 107 | if log_name == None: 108 | log_name = os.path.basename(args[0]) 109 | else: 110 | full_args = args 111 | super(OMPIBuilder, self).call(full_args, log_name, env=env) 112 | -------------------------------------------------------------------------------- /nightly-tarball/hwloc-nightly-tarball: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # Copyright (c) 2017 Amazon.com, Inc. or its affiliates. All Rights 4 | # Reserved. 5 | # 6 | # Additional copyrights may follow 7 | # 8 | 9 | import OMPIBuilder 10 | import S3BuildFiler 11 | import os 12 | import re 13 | import shutil 14 | import tempfile 15 | import S3BuildFiler 16 | 17 | 18 | nightly_prefix='/mnt/data/nightly-tarball' 19 | config_data = { 'project_name' : 'hwloc', 20 | 'project_short_name' : 'hwloc', 21 | 'email_dest' : 'hwloc-commits@lists.open-mpi.org', 22 | 'email_from' : 'mpiteam@aws.open-mpi.org', 23 | 'repository' : 'https://github.com/open-mpi/hwloc.git', 24 | 'scratch_path' : nightly_prefix + '/scratch', 25 | 'failed_build_prefix' : 'failed-builds/', 26 | 'failed_build_url' : 'http://download.open-mpi.org/nightly/hwloc/', 27 | 'autogen' : './autogen.sh', 28 | 'coverity' : { 'tool_dir' : nightly_prefix + '/coverity', 29 | 'tool_url' : 'https://scan.coverity.com/download/cxx/linux64', 30 | 'token_file' : nightly_prefix + '/coverity/hwloc-token.txt', 31 | 'project_name' : 'hwloc', 32 | 'project_prefix' : 'hwloc', 33 | 'configure_args' : '', 34 | 'make_args' : '-j 8 check', 35 | 'email' : 'brice.goglin@labri.fr' }, 36 | 'branches' : { 'master' : { 'output_location' : 'master/', 37 | 'coverity' : False, 38 | 'max_count' : 7 }, 39 | 'v2.0' : { 'output_location' : 'v2.0/', 40 | 'coverity' : False, 41 | 'max_count' : 7 }, 42 | 'v1.11' : { 'output_location' : 'v1.11/', 43 | 'coverity' : False, 44 | 'max_count' : 7 }, 45 | }, 46 | } 47 | 48 | 49 | class HwlocBuilder(OMPIBuilder.OMPIBuilder): 50 | """hwloc Custom Bulder class 51 | 52 | Hwloc does things really differently. Have a custom OMPIBuilder 53 | until we can fix the make_tarball script. 54 | 55 | """ 56 | def update_version_file(self): 57 | branch = self._current_build['branch'] 58 | build_time = self._current_build['build_time'] 59 | githash = self._current_build['revision'] 60 | version_file = os.path.join(self._current_build['source_tree'], 'VERSION') 61 | 62 | self._current_build['version_string'] = '%s-%s-%s' % (branch, build_time, githash) 63 | self._logger.debug('version_string: %s' % (self._current_build['version_string'])) 64 | 65 | # sed in the new tarball_version= and repo_rev= lines in the VERSION file 66 | snapshot_pattern = re.compile(r'^snapshot=.*') 67 | snapshot_version_pattern = re.compile(r'^snapshot_version=.*') 68 | with tempfile.NamedTemporaryFile(mode='w', delete=False) as tmp_file: 69 | with open(version_file) as src_file: 70 | for line in src_file: 71 | line = snapshot_version_pattern.sub('snapshot_version=%s' % 72 | (self._current_build['version_string']), line) 73 | line = snapshot_pattern.sub('snapshot=1', line) 74 | tmp_file.write(line) 75 | shutil.copystat(version_file, tmp_file.name) 76 | shutil.move(tmp_file.name, version_file) 77 | 78 | 79 | def build(self): 80 | # currently can't use the build script because it always 81 | # rewrites the VERSION file. Need to fix that, and then can 82 | # kill off this function and use the tarball_builder. 83 | branch_name = self._current_build['branch_name'] 84 | source_tree = self._current_build['source_tree'] 85 | cwd = os.getcwd() 86 | os.chdir(source_tree) 87 | try: 88 | # lie about our username in $USER so that autogen will skip all 89 | # .ompi_ignore'ed directories (i.e., so that we won't get 90 | # .ompi_unignore'ed) 91 | child_env = os.environ.copy() 92 | child_env['USER'] = self._config['project_very_short_name'] + 'builder' 93 | 94 | self.call([self._config['autogen']], build_call=True, env=child_env) 95 | self.call(['./configure'], build_call=True, env=child_env) 96 | 97 | # Do make distcheck (which will invoke config/distscript.csh to set 98 | # the right values in VERSION). distcheck does many things; we need 99 | # to ensure it doesn't pick up any other installs via LD_LIBRARY_PATH. 100 | # It may be a bit Draconian to totally clean LD_LIBRARY_PATH (i.e., we 101 | # may need something in there), but at least in the current building 102 | # setup, we don't. But be advised that this may need to change in the 103 | # future... 104 | child_env['LD_LIBRARY_PATH'] = '' 105 | self.call(['make', 'doc'], build_call=True, env=child_env) 106 | self.call(['make', 'distcheck'], build_call=True, env=child_env) 107 | finally: 108 | os.chdir(cwd) 109 | 110 | 111 | filer = S3BuildFiler.S3BuildFiler('open-mpi-nightly', 'nightly/hwloc/') 112 | builder = HwlocBuilder(config_data, filer) 113 | builder.run() 114 | -------------------------------------------------------------------------------- /dist/upload-release-to-s3.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # 3 | # Copyright (c) 2018 Amazon.com, Inc. or its affiliates. All Rights 4 | # Reserved. 5 | # 6 | # Additional copyrights may follow 7 | # 8 | # This script is used to upload new release artifacts to the Open MPI 9 | # organization's S3 release bucket. See ./upload-release-to-s3.py 10 | # --help for more information on command line arguments. 11 | # 12 | # In general, the usage flow looks something like: 13 | # * Log into aws.open-mpi.org 14 | # * Clone https://github.com/bwbarrett/ompi-www.git somewhere in 15 | # your ~/www directory 16 | # * Go to https://aws.open-mpi.org/~[your userid]// and 17 | # make sure it is working 18 | # * Use upload-release-to-s3.py to push release artifacts 19 | # * Edit software///version.inc to make 20 | # the release "live" 21 | # * Visit the web site and make sure the right things appeared. 22 | # * Commit web page changes. The HostGator site syncs from GitHub 23 | # every 15 minutes. 24 | # 25 | 26 | import argparse 27 | import boto3 28 | import botocore 29 | # stupid python versions 30 | try: 31 | from urllib.parse import urlparse 32 | except ImportError: 33 | from urlparse import urlparse 34 | import sys 35 | import re 36 | import os 37 | import dateutil 38 | import time 39 | import uploadutils 40 | 41 | 42 | default_s3_base = 's3://open-mpi-release/release' 43 | default_region = 'us-west-2' 44 | 45 | 46 | def arg_check_copy(target, source, name): 47 | if not name in source: 48 | print('%s not specified but is required either because --yes was specified' % (name)) 49 | print('or one of --project, --branch, --version, or --date was specified.') 50 | exit(1) 51 | target[name] = source[name] 52 | 53 | 54 | parser = argparse.ArgumentParser(description='Upload project release to S3', 55 | epilog='If any of --project, --base, --version, ' + 56 | 'or --date are specified, all 4 options must be ' + 57 | 'specified. If none are specified, the script ' + 58 | 'will attempt to guess the options.') 59 | parser.add_argument('--region', 60 | help='Default AWS region', 61 | type=str, required=False, default=default_region) 62 | parser.add_argument('--s3-base', 63 | help='S3 base URL. Optional, defaults to s3://open-mpi-release/release', 64 | type=str, required=False, default=default_s3_base) 65 | parser.add_argument('--project', 66 | help='Project (open-mpi, hwloc, etc.) for release being pushed', 67 | type=str, required=False) 68 | parser.add_argument('--branch', 69 | help='Release branch for release', 70 | type=str, required=False) 71 | parser.add_argument('--version', 72 | help='Version for release', 73 | type=str, required=False) 74 | parser.add_argument('--date', 75 | help='Specify release date, in the local timezone', 76 | type=str, required=False) 77 | parser.add_argument('--yes', 78 | help='Assume yes to go/no go question. Note that you must ' + 79 | 'specify --s3-base, --project, --branch, and --date ' + 80 | 'explicitly when using --yes and --yes will cause the upload ' + 81 | 'to fail if files would be overwritten.', 82 | action='store_true', required=False) 83 | parser.add_argument('--files', 84 | help='space separated list of files to upload', 85 | type=str, required=True, nargs='*') 86 | args = parser.parse_args() 87 | args_dict = vars(args) 88 | 89 | # split the s3 URL into bucket and path, which is what Boto3 expects 90 | parts = urlparse(args_dict['s3_base']) 91 | if parts.scheme != 's3': 92 | print('unexpected URL format for s3-base. Expected scheme s3, got %s' % parts.scheme) 93 | exit(1) 94 | bucket_name = parts.netloc 95 | key_prefix = parts.path.lstrip('/') 96 | 97 | if len(args_dict['files']) < 1: 98 | print('No files specified. Stopping.') 99 | exit(1) 100 | 101 | if (args_dict['project'] == None or args_dict['branch'] == None 102 | or args_dict['version'] == None) or args_dict['date'] == None: 103 | if args_dict['yes']: 104 | print('Can not use --yes option unless --project, --branch, --version, ' + 105 | 'and --date are also set.') 106 | exit(1) 107 | releaseinfo = uploadutils.parse_versions(args_dict['files']) 108 | else: 109 | releaseinfo = {} 110 | 111 | arg_check_copy(releaseinfo, args_dict, 'project') 112 | arg_check_copy(releaseinfo, args_dict, 'branch') 113 | arg_check_copy(releaseinfo, args_dict, 'version') 114 | arg_check_copy(releaseinfo, args_dict, 'date') 115 | 116 | # add the basename based on the project name (because we screwed 117 | # up the name of the project in S3 for Open MPI) 118 | if releaseinfo['project'] == 'open-mpi': 119 | releaseinfo['basename'] = 'openmpi' 120 | else: 121 | releaseinfo['basename'] = releaseinfo['project'] 122 | 123 | # convert the date into a unix time 124 | release_timetuple = dateutil.parser.parse(releaseinfo['date']).timetuple() 125 | releaseinfo['build_unix_time'] = int(time.mktime(release_timetuple)) 126 | 127 | prompt = 'ALWAYS_PROMPT' 128 | if args_dict['yes']: 129 | prompt = 'NO_OVERWRITE' 130 | 131 | s3_client = boto3.client('s3', args_dict['region']) 132 | uploadutils.upload_files(s3_client, bucket_name, key_prefix, 133 | releaseinfo, args_dict['files'], prompt) 134 | -------------------------------------------------------------------------------- /nightly-tarball/Coverity.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # Copyright (c) 2017 Amazon.com, Inc. or its affiliates. All Rights 4 | # Reserved. 5 | # 6 | # Additional copyrights may follow 7 | # 8 | 9 | import os 10 | import sys 11 | import re 12 | import argparse 13 | import logging 14 | import time 15 | import shlex 16 | import shutil 17 | import urllib 18 | import requests 19 | import BuilderUtils 20 | 21 | 22 | def run_coverity_internal(logger, build_root, source_tarball, config): 23 | # read the token file 24 | file = open(config['token_file'], 'r') 25 | token = file.readline().rstrip('\n') 26 | 27 | # get the tool 28 | if not os.path.isdir(config['tool_dir']): 29 | os.makedirs(config['tool_dir']) 30 | os.chdir(config['tool_dir']) 31 | timestamp = 0 32 | if os.path.exists('coverity_tool.tgz'): 33 | timestamp = os.stat('coverity_tool.tgz').st_mtime 34 | if (timestamp + (24 * 3600)) > int(time.time()): 35 | logger.debug('Reusing existing tarball') 36 | else: 37 | logger.debug('Downloading %s' % (config['tool_url'])) 38 | urllib.urlretrieve(config['tool_url'], 'coverity_tool.tgz', 39 | data='token=%s&project=%s' % (token, config['project_name'])) 40 | 41 | # make sure we have a build root 42 | if not os.path.isdir(build_root): 43 | os.makedirs(build_root) 44 | os.chdir(build_root) 45 | 46 | # The name of the top-level directory in the tarball changes every 47 | # time Coverity releases a new version of the tool. So search 48 | # around and hope we find something. 49 | logger.debug('Expanding coverity_tool.tgz') 50 | BuilderUtils.logged_call(['tar', 'xf', os.path.join(config['tool_dir'], 'coverity_tool.tgz')], 51 | log_file=os.path.join(build_root, 'coverity-tools-untar-output.txt')) 52 | cov_path='' 53 | for file in os.listdir(build_root): 54 | if file.startswith('cov-'): 55 | cov_path = os.path.join(build_root, file, 'bin') 56 | break 57 | logger.debug('Found Coverity path %s' % (cov_path)) 58 | 59 | child_env = os.environ.copy() 60 | child_env['PATH'] = cov_path + ':' + child_env['PATH'] 61 | 62 | logger.debug('Extracting build tarball: %s' % (source_tarball)) 63 | BuilderUtils.logged_call(['tar', 'xf', source_tarball], 64 | log_file=os.path.join(build_root, 'coverity-source-untar-output.txt')) 65 | 66 | # guess the directory based on the tarball name. Don't worry 67 | # about the exception, because we want out in that case anyway... 68 | build_version = re.search('^' + config['project_prefix'] + '-(.*)\.tar\..*$', 69 | os.path.basename(source_tarball)).group(1) 70 | srcdir = config['project_prefix'] + '-' + build_version 71 | os.chdir(srcdir) 72 | 73 | logger.debug('coverity configure') 74 | args = ['./configure'] 75 | if 'configure_args' in config: 76 | args.extend(shlex.split(config['configure_args'])) 77 | BuilderUtils.logged_call(args, env=child_env, 78 | log_file=os.path.join(build_root, 'coverity-configure-output.txt')) 79 | 80 | logger.debug('coverity build') 81 | args = ['cov-build', '--dir', 'cov-int', 'make'] 82 | if 'make_args' in config: 83 | args.extend(shlex.split(config['make_args'])) 84 | BuilderUtils.logged_call(args, env=child_env, 85 | log_file=os.path.join(build_root, 'coverity-make-output.txt')) 86 | 87 | logger.debug('bundling results') 88 | results_tarball = os.path.join(build_root, 'analyzed.tar.bz2') 89 | BuilderUtils.logged_call(['tar', 'jcf', results_tarball, 'cov-int'], 90 | log_file=os.path.join(build_root, 'coverity-results-tar-output.txt')) 91 | 92 | logger.debug('submitting results') 93 | url = 'https://scan.coverity.com/builds?project=' + config['project_name'] 94 | files = { 'file': open(results_tarball, 'rb') } 95 | values = { 'email' : config['email'], 96 | 'version' : build_version, 97 | 'description' : 'nightly-master', 98 | 'token' : token } 99 | r = requests.post(url, files=files, data=values) 100 | r.raise_for_status() 101 | 102 | 103 | def run_coverity(logger, build_root, source_tarball, config): 104 | """Run coverity test and submit results 105 | 106 | Run Coverity test and submit results to their server. Can be run 107 | either standalone (with a tarball as a target) or integrated into 108 | the Builder class. 109 | 110 | """ 111 | cwd = os.getcwd() 112 | try: 113 | run_coverity_internal(logger, build_root, source_tarball, config) 114 | finally: 115 | os.chdir(cwd) 116 | 117 | 118 | if __name__ == '__main__': 119 | config = { 'tool_url' : 'https://scan.coverity.com/download/cxx/linux64', 120 | 'log_level' : 'INFO' } 121 | 122 | parser = argparse.ArgumentParser(description='Coverity submission script for Open MPI related projects') 123 | parser.add_argument('--log-level', help='Log level.', type=str, 124 | choices=['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']) 125 | parser.add_argument('--build-root', 126 | help='Directory to use as base of build tree.', 127 | type=str) 128 | parser.add_argument('--source-tarball', 129 | help='Tarball to submit for analysis', 130 | type=str) 131 | parser.add_argument('--tool-dir', 132 | help='Directory in which to store downloaded tool (for reuse)', 133 | type=str) 134 | parser.add_argument('--tool-url', 135 | help='URL for downloading Coverity tool', 136 | type=str) 137 | parser.add_argument('--project-name', 138 | help='Coverity project name', 139 | type=str) 140 | parser.add_argument('--project-prefix', 141 | help='prefix of the tarball directory', 142 | type=str) 143 | parser.add_argument('--token-file', 144 | help='File containing the Coverity token for project', 145 | type=str) 146 | parser.add_argument('--configure-args', 147 | help='Configuration arguments for source tarball', 148 | type=str) 149 | parser.add_argument('--make-args', 150 | help='Build arguments for source tarball', 151 | type=str) 152 | parser.add_argument('--email', 153 | help='Coverity submission email address', 154 | type=str) 155 | 156 | for key, value in vars(parser.parse_args()).iteritems(): 157 | if not value == None: 158 | config[key] = value 159 | 160 | logging.basicConfig() 161 | logger = logging.getLogger() 162 | logger.setLevel(config['log_level']) 163 | 164 | run_coverity(logger, config['build_root'], config['source_tarball'], config) 165 | -------------------------------------------------------------------------------- /jenkins/open-mpi.dist.create-tarball.groovy: -------------------------------------------------------------------------------- 1 | // -*- groovy -*- 2 | // 3 | // Build an Open MPI dist release 4 | // 5 | // 6 | // WORKSPACE Layout: 7 | // dist-files/ Output of build 8 | // autotools-install/ Autotools install for the builder 9 | // ompi/ Open MPI source tree 10 | // ompi-scripts/ ompi-scripts master checkout 11 | // rpmbuild/ Where RPMs go to die... 12 | 13 | import java.text.SimpleDateFormat 14 | 15 | def rpm_builder = 'amazon_linux_2' 16 | def manpage_builder = 'ubuntu_18.04' 17 | 18 | def release_version 19 | def branch 20 | def tarball 21 | def srpm_name 22 | def s3_prefix 23 | def download_prefix 24 | def build_type = env.Build_type.toLowerCase() 25 | def dateFormat = new SimpleDateFormat("MM/dd/yyyy HH:mm") 26 | def date = new Date() 27 | def build_date = dateFormat.format(date) 28 | 29 | currentBuild.displayName = "#${currentBuild.number} - ${build_type} - ${env.REF}" 30 | currentBuild.description = "Build type: ${build_type}
\nRef: ${env.REF}
\nBuild date: ${build_date}
\n" 31 | 32 | // Step 1: Build a release tarball and RPM. Needs to be on an 33 | // RPM-based system, and easier to do it all in serial on one node. 34 | node(rpm_builder) { 35 | stage('Source Checkout') { 36 | checkout_code(); 37 | } 38 | 39 | stage('Installing Dependencies') { 40 | // because we build tags or hashes, not just branch heads, 41 | // there's not a great way to do the branch -> Autotools 42 | // version matching. So instead grab them from the 43 | // make_dist_tarball script 44 | sh '/bin/bash ompi-scripts/jenkins/open-mpi-autotools-build.sh ompi' 45 | } 46 | 47 | // Build the initial tarball, verify that the resulting tarball 48 | // has a version that matches the tag if we're building Release or 49 | // Pre-Release tarballs. Scratch tarballs have a much looser 50 | // requirement, because scratch. 51 | stage('Build Tarball') { 52 | withEnv(["PATH+AUTOTOOLS=${WORKSPACE}/autotools-install/bin", 53 | "LD_LIBRARY_PATH+AUTOTOOLS=${WORKSPACE}/autotools-install/lib"]) { 54 | def greek_option = "" 55 | switch (build_type) { 56 | case "release": 57 | greek_option = "--no-greek" 58 | s3_prefix="s3://open-mpi-release/release" 59 | download_prefix="https://download.open-mpi.org/release" 60 | break 61 | case "pre-release": 62 | greek_option = "--greekonly" 63 | s3_prefix="s3://open-mpi-release/release" 64 | download_prefix="https://download.open-mpi.org/release" 65 | break 66 | case "scratch": 67 | greek_option = "--greekonly" 68 | def uuid = UUID.randomUUID().toString() 69 | s3_prefix="s3://open-mpi-scratch/scratch/${uuid}" 70 | download_prefix="https://download.open-mpi.org/scratch/${uuid}" 71 | break 72 | default: 73 | error("Unknown build type ${env.Build_type}") 74 | break 75 | } 76 | 77 | sh "/bin/bash ompi-scripts/jenkins/open-mpi.dist.create-tarball.build-tarball.sh ${build_type} ${env.REF} ${s3_prefix} \"${build_date}\"" 78 | 79 | // if we just call File to read the file, it will look on 80 | // master's filesystem, instead of on this node. So use 81 | // a shell instead. 82 | tarball = sh(script: "cat build-tarball-filename.txt", 83 | returnStdout: true).trim() 84 | branch = sh(script: "cat build-tarball-branch_directory.txt", 85 | returnStdout: true).trim() 86 | build_prefix="${s3_prefix}/open-mpi/${branch}" 87 | download_prefix="${download_prefix}/open-mpi/${branch}" 88 | currentBuild.description="${currentBuild.description}Tarball: ${download_prefix}/${tarball}
\n" 89 | } 90 | } 91 | 92 | stage('Build Source RPM') { 93 | prep_rpm_environment() 94 | sh "/bin/bash ompi-scripts/jenkins/open-mpi.dist.create-tarball.build-srpm.sh ${s3_prefix} ${tarball} ${branch} \"${build_date}\"" 95 | srpm_name = sh(returnStdout:true, script: 'cat ${WORKSPACE}/srpm-name.txt').trim() 96 | currentBuild.description="${currentBuild.description}SRC RPM: ${download_prefix}/${srpm_name}
\n" 97 | } 98 | } 99 | 100 | // Run a bunch of different tests in parallel 101 | parallel ( 102 | "man pages" : { 103 | node(manpage_builder) { 104 | stage('Build Man Pages') { 105 | checkout_code(); 106 | sh "ls -lR . ; /bin/bash ompi-scripts/jenkins/open-mpi.dist.create-tarball.build-manpages.sh ${build_prefix} ${tarball} ${branch}" 107 | artifacts = sh(returnStdout:true, script:'cat ${WORKSPACE}/manpage-build-artifacts.txt').trim() 108 | currentBuild.description="${currentBuild.description}Manpages: ${artifacts}
\n" 109 | } 110 | } 111 | }, 112 | 113 | "tarball distcheck" : { 114 | node(rpm_builder) { 115 | stage('Tarball Distcheck') { 116 | remove_build_directory('openmpi-*') 117 | sh """aws s3 cp ${build_prefix}/${tarball} ${tarball} 118 | tar xf ${tarball} 119 | cd openmpi-* 120 | ./configure 121 | make distcheck""" 122 | } 123 | } 124 | }, 125 | 126 | "rpm test suites" : { 127 | node(rpm_builder) { 128 | stage('RPM Build') { 129 | prep_rpm_environment() 130 | sh """aws s3 cp ${build_prefix}/${srpm_name} ${srpm_name} 131 | rpmbuild --rebuild ${srpm_name} 132 | bin_rpm_name=`find ${WORKSPACE}/rpmbuild/RPMS -name "*.rpm" -print` 133 | sudo rpm -Uvh \${bin_rpm_name}""" 134 | } 135 | } 136 | }, 137 | 138 | "tarball test suites" : { 139 | node('gcc5') { 140 | stage('Tarball Test Build') { 141 | remove_build_directory('openmpi-*') 142 | sh """aws s3 cp ${build_prefix}/${tarball} ${tarball} 143 | tar xf ${tarball} 144 | cd openmpi-* 145 | ./configure --prefix=$WORKSPACE/openmpi-install 146 | make -j 8 all 147 | make check 148 | make install""" 149 | } 150 | } 151 | } 152 | ) 153 | 154 | 155 | def prep_rpm_environment() { 156 | sh """rm -rf ${WORKSPACE}/rpmbuild ; mkdir -p ${WORKSPACE}/rpmbuild/{BUILD,RPMS,SOURCES,SPECS,SRPMS} ; rm -f ~/.rpmmacros ; echo \"%_topdir ${WORKSPACE}/rpmbuild\" > ~/.rpmmacros""" 157 | } 158 | 159 | 160 | // delete build directory (relative to WORKSPACE), dealing with the autotools silly permissions 161 | def remove_build_directory(dirname) { 162 | sh """if ls -1 ${dirname} ; then 163 | chmod -R u+w ${dirname} 164 | rm -rf ${dirname} 165 | fi""" 166 | } 167 | 168 | 169 | def checkout_code() { 170 | checkout(changelog: false, poll: false, 171 | scm: [$class: 'GitSCM', branches: [[name: '$REF']], 172 | doGenerateSubmoduleConfigurations: false, 173 | extensions: [[$class: 'WipeWorkspace'], 174 | [$class: 'RelativeTargetDirectory', 175 | relativeTargetDir: 'ompi']], 176 | submoduleCfg: [], 177 | userRemoteConfigs: [[credentialsId: '6de58bf1-2619-4065-99bb-8d284b4691ce', 178 | url: 'https://github.com/open-mpi/ompi/']]]) 179 | // scm is a provided global variable that points to the repository 180 | // configured in the Jenkins job for the pipeline source. Since the 181 | // pipeline and the helper scripts live in the same place, this is 182 | // perfect for us. We check this out on the worker nodes so that 183 | // the helper scripts are always available. 184 | checkout(changelog: false, poll: false, scm: scm) 185 | } 186 | -------------------------------------------------------------------------------- /nightly-tarball/MockBuildFiler.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # Copyright (c) 2017 Amazon.com, Inc. or its affiliates. All Rights 4 | # Reserved. 5 | # 6 | # Additional copyrights may follow 7 | 8 | import BuildFiler 9 | import unittest 10 | import logging 11 | import time 12 | import os 13 | import shutil 14 | import tempfile 15 | import errno 16 | import glob 17 | 18 | 19 | logger = logging.getLogger('Builder.MockBuildFiler') 20 | 21 | 22 | class MockBuildFiler(BuildFiler.BuildFiler): 23 | """Mock BuildFiler 24 | 25 | Used for unit tests. Use with care. Files are stored in a temp 26 | directory unique to the instance of MockBuildFiler and destroyed 27 | when the instance is destroyed. 28 | 29 | """ 30 | 31 | def __init__(self, basename=None, clean_on_delete=True): 32 | logger.debug("-> creating LocalBuildFiler") 33 | testtime = str(time.time()) 34 | self._stream_map = { } 35 | self._file_map = { } 36 | if basename == None: 37 | self._basename = tempfile.mkdtemp() 38 | else: 39 | self._basename = os.path.expandvars(basename) 40 | self._clean_on_delete = clean_on_delete 41 | 42 | 43 | # Yes, I know, del is evil. But it's actually what I want here. 44 | # Don't really care about garbage collection (I think) 45 | def __del__(self): 46 | if self._clean_on_delete: 47 | logger.debug("-> Cleaning tree %s", self._basename) 48 | shutil.rmtree(self._basename) 49 | 50 | 51 | def download_to_stream(self, filename): 52 | """Download to stream 53 | 54 | Gets the object at basename/filename and returns as a 55 | StreamObject, suitable for turning into a string via .read() 56 | or passing to JSON / YAML constructors. 57 | 58 | """ 59 | logger.debug("-> downloading to stream: " + filename) 60 | pathname = os.path.join(self._basename, filename) 61 | return open(pathname, "r") 62 | 63 | 64 | def upload_from_stream(self, filename, data, properties = {}): 65 | """Upload from a stream 66 | 67 | Puts the stream information in data to an object at 68 | basename/filename. Data can be the output of json.dumps() or 69 | similar. 70 | 71 | """ 72 | logger.debug("-> uploading from stream: " + filename) 73 | pathname = os.path.join(self._basename, filename) 74 | dirname = os.path.dirname(pathname) 75 | if not os.access(dirname, os.F_OK): 76 | os.makedirs(dirname) 77 | with open(pathname, "w") as text_file: 78 | text_file.write(data) 79 | 80 | 81 | def download_to_file(self, remote_filename, local_filename): 82 | """Download to a file 83 | 84 | Download the object at basename/remote_filename to 85 | local_filename. like delete(), this file is provided mainly 86 | for unit testing. 87 | 88 | """ 89 | logger.debug("-> downloading to file, remote: " + remote_filename 90 | + " local: " + local_filename) 91 | remote_pathname = os.path.join(self._basename, remote_filename) 92 | shutil.copyfile(remote_pathname, local_filename) 93 | 94 | 95 | def upload_from_file(self, local_filename, remote_filename, properties = {}): 96 | """Upload a file 97 | 98 | Upload the local_file to S3 as basename/remote_filename. 99 | """ 100 | logger.debug("-> uploading from file, remote: " + remote_filename 101 | + " local: " + local_filename) 102 | remote_pathname = os.path.join(self._basename, remote_filename) 103 | dirname = os.path.dirname(remote_pathname) 104 | if not os.access(dirname, os.F_OK): 105 | os.makedirs(dirname) 106 | shutil.copyfile(local_filename, remote_pathname) 107 | 108 | 109 | def delete(self, filename): 110 | """Delete file 111 | 112 | This is not necessary before uploading a build history file 113 | over an existing file, but is provided mainly for unit 114 | testing. 115 | 116 | """ 117 | logger.debug("-> deleting build history " + filename) 118 | pathname = os.path.join(self._basename, filename) 119 | os.remove(pathname) 120 | 121 | 122 | def file_search(self, dirname, blob): 123 | """Search for file blob in dirname directory 124 | 125 | Search for all files in dirname matching blob. Returns a list 126 | of filenames that match the search. 127 | """ 128 | remote_pathname = os.path.join(self._basename, dirname, blob) 129 | retval = glob.glob(remote_pathname) 130 | logger.debug("retval: %s" % (str(retval))) 131 | return retval 132 | 133 | 134 | class MockBuildFilerTest(unittest.TestCase): 135 | def setUp(self): 136 | self._tempdir = tempfile.mkdtemp() 137 | 138 | 139 | def tearDown(self): 140 | shutil.rmtree(self._tempdir) 141 | pass 142 | 143 | 144 | def test_destructor(self): 145 | filer = MockBuildFiler() 146 | 147 | 148 | def test_stream_bad_get(self): 149 | filer = MockBuildFiler() 150 | try: 151 | filer.download_to_stream("file-that-should-not-exist.txt") 152 | except IOError as e: 153 | if e.errno != errno.ENOENT: 154 | raise 155 | else: 156 | self.fail() 157 | 158 | 159 | def test_stream_read_write(self): 160 | filename = "foo/test-abc.txt" 161 | input_string = "I love me some unit tests.\n" 162 | filer = MockBuildFiler() 163 | 164 | filer.upload_from_stream(filename, input_string) 165 | 166 | body = filer.download_to_stream(filename) 167 | output_string = body.read() 168 | 169 | filer.delete(filename) 170 | 171 | self.assertEqual(input_string, output_string, 172 | input_string + " != " + output_string) 173 | 174 | try: 175 | filer.download_to_stream(filename) 176 | except IOError as e: 177 | if e.errno != errno.ENOENT: 178 | raise 179 | else: 180 | self.fail() 181 | 182 | 183 | def test_file_bad_get(self): 184 | pathname = os.path.join(self._tempdir, "foobar.txt") 185 | 186 | filer = MockBuildFiler() 187 | try: 188 | filer.download_to_file("read-only/file-that-should-not-exist.txt", 189 | pathname) 190 | except IOError as e: 191 | if e.errno != errno.ENOENT: 192 | raise 193 | else: 194 | self.fail() 195 | 196 | 197 | def test_file_read_write(self): 198 | remote_filename = "foo/test-abc.txt" 199 | pathname = os.path.join(self._tempdir, "foobar.txt") 200 | input_string = "I love me some unit tests.\n" 201 | filer = MockBuildFiler() 202 | 203 | with open(pathname, "w") as text_file: 204 | text_file.write(input_string) 205 | filer.upload_from_file(pathname, remote_filename) 206 | 207 | os.remove(pathname) 208 | 209 | try: 210 | body = filer.download_to_stream(remote_filename) 211 | except: 212 | filer.delete(remote_filename) 213 | raise 214 | 215 | try: 216 | filer.download_to_file(remote_filename, pathname) 217 | except: 218 | filer.delete(remote_filename) 219 | raise 220 | 221 | filer.delete(remote_filename) 222 | 223 | output_string = body.read() 224 | self.assertEqual(input_string, output_string, 225 | input_string + " != " + output_string) 226 | 227 | with open(pathname, 'r') as data: 228 | text=data.read() 229 | self.assertEqual(text, output_string, 230 | input_string + " != " + text) 231 | 232 | 233 | if __name__ == '__main__': 234 | unittest.main() 235 | -------------------------------------------------------------------------------- /migration/build-staged-tarball-migration.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # 3 | # Copyright (c) 2017 Amazon.com, Inc. or its affiliates. All Rights 4 | # Reserved. 5 | # 6 | # Additional copyrights may follow 7 | # 8 | # usage: build-staged-tarball-migration.py --input-path IN --output-path OUT 9 | # 10 | # Builds a staged tree of tarballs/srpms from the historical Open MPI 11 | # layout into the directory structure used for S3 hosting (which is 12 | # *always* release/// or 13 | # nightly///). It doesn't re-arrange the 14 | # directories, but instead verifies the various hashes, computes an 15 | # accurate timestamp from the build artifacts themselves (because the 16 | # curren tree has tarballs copied without timestamp preservation) and 17 | # builds the build-*.json files for the S3 scheme the projects are 18 | # using. 19 | # 20 | # This script doesn't push anything into S3 (giving you a chance to 21 | # undo any directory structure before the push). The AWS CLI has a 22 | # nice S3 copy interface for pushing a directory tree. After 23 | # organizing into two directory structures (nightly and release), the 24 | # initial push was run with: 25 | # 26 | # % aws --region us-east-1 s3 cp \ 27 | # s3://open-mpi-release/release/ --recursive 28 | # % aws --region us-east-1 s3 cp \ 29 | # s3://open-mpi-nightly/nightly/ --recursive 30 | # 31 | 32 | import os 33 | import re 34 | import tarfile 35 | import argparse 36 | import time 37 | import json 38 | import hashlib 39 | import shutil 40 | 41 | def compute_hashes(filename): 42 | """Helper function to compute MD5 and SHA1 hashes""" 43 | retval = {} 44 | md5 = hashlib.md5() 45 | sha1 = hashlib.sha1() 46 | with open(filename, 'rb') as f: 47 | while True: 48 | data = f.read(64 * 1024) 49 | if not data: 50 | break 51 | md5.update(data) 52 | sha1.update(data) 53 | retval['md5'] = md5.hexdigest() 54 | retval['sha1'] = sha1.hexdigest() 55 | return retval 56 | 57 | 58 | def do_migrate(input_path, output_path): 59 | for root, dirs, files in os.walk(input_path, topdown=False): 60 | for name in files: 61 | output_root = root 62 | if os.path.basename(root) == 'downloads': 63 | output_root = os.path.dirname(output_root) 64 | output_dir = os.path.join(output_path, output_root) 65 | 66 | if name == 'latest_snapshot.txt': 67 | continue 68 | 69 | pattern = '\.dmg\.gz|\.exe|\.tar\.gz|\.tar\.bz2|-[0-9]+\.src\.rpm|\.zip' 70 | if re.search(pattern, name): 71 | base_filename = re.sub(pattern, '', name) 72 | full_filename = os.path.join(root, name) 73 | 74 | print("==> %s" % (full_filename)) 75 | 76 | # clean up Open MPI windows names 77 | if re.search('\.exe', name) : 78 | version_search = re.search('OpenMPI_v(.*)-.*', base_filename) 79 | if version_search: 80 | base_filename = 'openmpi-' + version_search.group(1) 81 | else: 82 | print("--> no joy %s" % base_filename) 83 | continue 84 | 85 | # clean up hwloc windows names 86 | if re.search('\.zip', name): 87 | version_search = re.search('(hwloc|libtopology)-win.*-build-(.*)', base_filename) 88 | if version_search: 89 | base_filename = '%s-%s' % (version_search.group(1), version_search.group(2)) 90 | else: 91 | print("--> no joy %s" % base_filename) 92 | continue 93 | 94 | # skip the bad tarballs entirely... 95 | if re.search('\.tar\.', name): 96 | try: 97 | tar = tarfile.open(full_filename) 98 | except: 99 | continue 100 | 101 | # build info json files are named 102 | # build-.json, which hopefully is 103 | # unique enough (given that it should be unique enough 104 | # for the actual tarball). 105 | buildfile = 'build-%s.json' % (base_filename) 106 | 107 | build_pathname = os.path.join(output_path, output_root, buildfile) 108 | try: 109 | with open(build_pathname, 'r') as fh: 110 | builddata = json.load(fh) 111 | except: 112 | builddata = {} 113 | branch = os.path.basename(output_root) 114 | version_search = re.search('.*-.*-[0-9]+-(.*)', base_filename) 115 | if version_search: 116 | revision = version_search.group(1) 117 | else: 118 | revision = '' 119 | builddata['branch'] = branch 120 | builddata['valid'] = True 121 | # revision is only used for comparing nightly 122 | # build versions. If the tarball name doesn't 123 | # match the git-based nightly tarball version, set 124 | # revision to empty, as that will cause a rebuild 125 | # (since, by definition, we're not at the latest. 126 | builddata['revision'] = revision 127 | builddata['build_unix_time'] = 0 128 | builddata['delete_on'] = 0 129 | builddata['files'] = {} 130 | 131 | if builddata['build_unix_time'] == 0 and re.search('\.tar\.', name): 132 | try: 133 | tar = tarfile.open(full_filename) 134 | except: 135 | print("tar file %s looks invalid" % (full_filename)) 136 | else: 137 | # many tarballs had their ctime and mtime 138 | # changed in the migration from IU to 139 | # hostgator. So look at the top level 140 | # directory in the tarball instead. 141 | builddata['build_unix_time'] = tar.getmembers()[0].mtime 142 | 143 | hashes = compute_hashes(full_filename) 144 | info = os.stat(full_filename) 145 | builddata['files'][name] = {} 146 | builddata['files'][name]['sha1'] = hashes['sha1'] 147 | builddata['files'][name]['md5'] = hashes['md5'] 148 | builddata['files'][name]['size'] = info.st_size 149 | 150 | # verify the md5sums / sha1sums are sane.. 151 | verify = 0 152 | with open(os.path.join(root, 'md5sums.txt')) as f: 153 | content = f.readlines() 154 | for line in content: 155 | entry = line.split() 156 | if len(entry) != 2: 157 | continue 158 | if name == entry[1]: 159 | if hashes['md5'] == entry[0]: 160 | verify = verify + 1 161 | break 162 | else: 163 | raise Exception("hash mismatch %s %s" % (entry[0], hashesh['md5'])) 164 | with open(os.path.join(root, 'sha1sums.txt')) as f: 165 | content = f.readlines() 166 | for line in content: 167 | entry = line.split() 168 | if len(entry) != 2: 169 | continue 170 | if name == entry[1]: 171 | if hashes['sha1'] == entry[0]: 172 | verify = verify + 1 173 | break 174 | else: 175 | raise Exception("hash mismatch %s %s" % (entry[0], hashesh['sha1'])) 176 | if verify != 2: 177 | raise Exception("Hash verification failure on %s" % (name)) 178 | 179 | # make sure the directory exists... 180 | if not os.access(output_dir, os.F_OK): 181 | os.makedirs(output_dir) 182 | 183 | with open(build_pathname, 'w') as fh: 184 | json.dump(builddata, fh) 185 | 186 | shutil.copyfile(full_filename, 187 | os.path.join(output_dir, name)) 188 | 189 | parser = argparse.ArgumentParser(description='Web tarball S3 staging') 190 | parser.add_argument('--input-path', help='input path to traverse', 191 | type=str, required=True) 192 | parser.add_argument('--output-path', help='scratch directory to stage for later s3 upload', 193 | type=str, required=True) 194 | args = parser.parse_args() 195 | 196 | args_dict = vars(args) 197 | 198 | do_migrate(args_dict['input_path'], args_dict['output_path']) 199 | -------------------------------------------------------------------------------- /jenkins/customize-ami.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # 3 | # Copyright (c) 2017 Amazon.com, Inc. or its affiliates. All Rights 4 | # Reserved. 5 | # 6 | # Additional copyrights may follow 7 | # 8 | # Script to take a normal EC2 AMI and make it OMPI Jenkins-ified, 9 | # intended to be run on a stock instance. This script should probably 10 | # not be called directly (except when debugging), but instead called 11 | # from the packer.json file included in this directory. Packer will 12 | # automate creating all current AMIs, using this script to configure 13 | # all the in-AMI bits. 14 | # 15 | # It is recommended that you use build-amis.sh to build a current set 16 | # of AMIs; see build-amis.sh for usage details. 17 | # 18 | 19 | set -e 20 | 21 | labels="ec2" 22 | 23 | os=`uname -s` 24 | if test "${os}" = "Linux"; then 25 | eval "PLATFORM_ID=`sed -n 's/^ID=//p' /etc/os-release`" 26 | eval "VERSION_ID=`sed -n 's/^VERSION_ID=//p' /etc/os-release`" 27 | else 28 | PLATFORM_ID=`uname -s` 29 | VERSION_ID=`uname -r` 30 | fi 31 | 32 | echo "==> Platform: $PLATFORM_ID" 33 | echo "==> Version: $VERSION_ID" 34 | 35 | OPTIND=1 # Reset in case getopts has been used previously in the shell. 36 | run_test=0 # -b runs an ompi build test; useful for testing new AMIs 37 | clean_ami=1 # -t enables testing mode, where the AMI isn't cleaned up 38 | # after the test (so remote logins still work) 39 | 40 | while getopts "h?tb" opt; do 41 | case "$opt" in 42 | h|\?) 43 | echo "usage: customize-ami.sh [-t]" 44 | exit 1 45 | ;; 46 | b) 47 | run_test=1 48 | ;; 49 | t) 50 | clean_ami=0 51 | ;; 52 | esac 53 | done 54 | 55 | echo "==> Sleeping 2 minutes" 56 | # Some systems run package updates during boot. Avoid yum/apt/zypper 57 | # lock conflicts by waiting a couple minutes. 58 | sleep 120 59 | 60 | echo "==> Installing packages" 61 | 62 | case $PLATFORM_ID in 63 | rhel|centos) 64 | # RHEL's default repos only include the "base" compiler 65 | # version, so don't worry about script version 66 | # differentiation. 67 | # gcc = 4.8.5 68 | sudo yum -y update 69 | sudo yum -y group install "Development Tools" 70 | sudo yum -y install libevent hwloc hwloc-libs java gdb 71 | labels="${labels} linux rhel ${VERSION_ID}" 72 | case $VERSION_ID in 73 | 7.*) 74 | sudo yum -y install gcc gcc-c++ gcc-gfortran 75 | labels="${labels} gcc48" 76 | ;; 77 | 8.*) 78 | sudo yum -y install python3.8 \ 79 | gcc gcc-c++ gcc-gfortran 80 | sudo alternatives --set python /usr/bin/python3 81 | labels="${labels} gcc8" 82 | ;; 83 | *) 84 | echo "ERROR: Unknown version ${PLATFORM_ID} ${VERSION_ID}" 85 | exit 1 86 | ;; 87 | esac 88 | (cd /tmp && \ 89 | curl "https://s3.amazonaws.com/aws-cli/awscli-bundle.zip" -o "awscli-bundle.zip" && \ 90 | unzip awscli-bundle.zip && \ 91 | sudo ./awscli-bundle/install -i /usr/local/aws -b /usr/local/bin/aws && \ 92 | rm -rf awscli-bundle*) 93 | ;; 94 | amzn) 95 | sudo yum -y update 96 | sudo yum -y groupinstall "Development Tools" 97 | sudo yum -y install libevent-devel hwloc-devel \ 98 | java-1.8.0-openjdk-devel java-1.8.0-openjdk \ 99 | gdb 100 | labels="${labels} linux" 101 | case $VERSION_ID in 102 | 2016.09|2017.03|2017.09|2018.03) 103 | # clang == 3.6.2 104 | sudo yum -y groupinstall "Java Development" 105 | sudo yum -y install gcc44 gcc44-c++ gcc44-gfortran \ 106 | gcc48 gcc48-c++ gcc48-gfortran clang \ 107 | python27-mock python27-boto python27-boto3 108 | sudo alternatives --set java /usr/lib/jvm/jre-1.8.0-openjdk.x86_64/bin/java 109 | labels="${labels} amazon_linux_1 gcc44 gcc48 clang36" 110 | ;; 111 | 2) 112 | sudo yum -y install clang hwloc-devel \ 113 | python2-pip python2 python2-boto3 114 | sudo pip install mock 115 | labels="${labels} amazon_linux_2 gcc7 clang7" 116 | ;; 117 | *) 118 | echo "ERROR: Unknown version ${PLATFORM_ID} ${VERSION_ID}" 119 | exit 1 120 | ;; 121 | esac 122 | ;; 123 | ubuntu) 124 | sudo apt-get update 125 | sudo apt-get -y upgrade 126 | sudo apt-get -y install build-essential gfortran \ 127 | autoconf automake libtool flex hwloc libhwloc-dev git \ 128 | default-jre awscli python-mock rman pandoc 129 | labels="${labels} linux ubuntu_${VERSION_ID} pandoc" 130 | case $VERSION_ID in 131 | 14.04) 132 | sudo apt-get -y install python-boto3 python-mock \ 133 | gcc-4.4 g++-4.4 gfortran-4.4 \ 134 | gcc-4.6 g++-4.6 gfortran-4.6 \ 135 | gcc-4.7 g++-4.7 gfortran-4.7 \ 136 | gcc-4.8 g++-4.8 gfortran-4.8 \ 137 | clang-3.6 clang-3.7 clang-3.8 138 | labels="${labels} gcc44 gcc46 gcc47 gcc48 clang36 clang37 clang38" 139 | ;; 140 | 16.04) 141 | sudo apt-get -y install python-boto3 python-mock \ 142 | gcc-4.7 g++-4.7 gfortran-4.7 \ 143 | gcc-4.8 g++-4.8 gfortran-4.8 \ 144 | gcc-4.9 g++-4.9 gfortran-4.9 \ 145 | clang-3.6 clang-3.7 clang-3.8 \ 146 | gcc-multilib g++-multilib gfortran-multilib 147 | labels="${labels} gcc47 gcc48 gcc49 gcc5 clang36 clang37 clang38 32bit_builds" 148 | ;; 149 | 18.04) 150 | sudo apt-get -y install \ 151 | python-boto3 \ 152 | gcc-4.8 g++-4.8 gfortran-4.8 \ 153 | gcc-5 g++-5 gfortran-5 \ 154 | gcc-6 g++-6 gfortran-6 \ 155 | gcc-7 g++-7 gfortran-7 \ 156 | gcc-8 g++-8 gfortran-8 \ 157 | clang-3.9 clang-4.0 clang-5.0 clang-6.0 \ 158 | clang-7 clang-8 clang-9 \ 159 | gcc-multilib g++-multilib gfortran-multilib 160 | labels="${labels} gcc48 gcc5 gcc6 gcc7 gcc8 clang39 clang40 clang50 clang60 clang7 clang8 clang9 32bit_builds" 161 | ;; 162 | 20.04) 163 | sudo apt-get -y install \ 164 | python-is-python3 python3-boto3 python3-mock \ 165 | gcc-7 g++-7 gfortran-7 \ 166 | gcc-8 g++-8 gfortran-8 \ 167 | gcc-9 g++-9 gfortran-9 \ 168 | gcc-10 g++-10 gfortran-10 \ 169 | clang-6.0 clang-7 clang-8 clang-9 clang-10 \ 170 | gcc-multilib g++-multilib gfortran-multilib 171 | labels="${labels} gcc7 gcc8 gcc9 gcc10 clang60 clang7 clang8 clang9 clang10 32bit_builds" 172 | ;; 173 | *) 174 | echo "ERROR: Unknown version ${PLATFORM_ID} ${VERSION_ID}" 175 | exit 1 176 | ;; 177 | esac 178 | ;; 179 | sles) 180 | sudo zypper -n update 181 | sudo zypper -n install gcc gcc-c++ gcc-fortran \ 182 | autoconf automake libtool flex make gdb 183 | labels="${labels} linux sles_${VERSION_ID}" 184 | case $VERSION_ID in 185 | 12.*) 186 | # gcc5 == 5.3.1 187 | # gcc6 == 6.2.1 188 | sudo zypper -n install \ 189 | hwloc-devel \ 190 | python-boto python-boto3 python-mock \ 191 | gcc48 gcc48-c++ gcc48-fortran \ 192 | gcc5 gcc5-c++ gcc5-fortran \ 193 | gcc6 gcc6-c++ gcc6-fortran 194 | labels="${labels} gcc48 gcc5 gcc6" 195 | ;; 196 | 15.*) 197 | sudo zypper -n install \ 198 | python3-boto python3-boto3 python3-mock \ 199 | gcc7 gcc7-c++ gcc7-fortran \ 200 | gcc8 gcc8-c++ gcc8-fortran \ 201 | gcc9 gcc9-c++ gcc9-fortran 202 | sudo ln -s /usr/bin/python3 /usr/bin/python 203 | labels="${labels} gcc7 gcc8 gcc9" 204 | ;; 205 | *) 206 | echo "ERROR: Unknown version ${PLATFORM_ID} ${VERSION_ID}" 207 | exit 1 208 | ;; 209 | esac 210 | # No java shipped in SLES by default... 211 | jre_file=jre-8u121-linux-x64.rpm 212 | aws s3 cp s3://ompi-jenkins-config/${jre_file} /tmp/${jre_file} 213 | sudo rpm -i /tmp/${jre_file} 214 | ;; 215 | FreeBSD) 216 | su -m root -c 'pkg install -y sudo' 217 | if ! grep -q '^%wheel ALL=(ALL) NOPASSWD: ALL' /usr/local/etc/sudoers ; then 218 | echo "--> Updating sudoers" 219 | su -m root -c 'echo "%wheel ALL=(ALL) NOPASSWD: ALL" >> /usr/local/etc/sudoers' 220 | else 221 | echo "--> Skipping sudoers update" 222 | fi 223 | sudo pkg install -y openjdk8 autoconf automake libtool gcc wget curl git 224 | if ! grep -q '/dev/fd' /etc/fstab ; then 225 | echo "Adding /dev/fd entry to /etc/fstab" 226 | sudo sh -c 'echo "fdesc /dev/fd fdescfs rw 0 0" >> /etc/fstab' 227 | fi 228 | if ! grep -q '/proc' /etc/fstab ; then 229 | echo "Adding /proc entry to /etc/fstab" 230 | sudo sh -c 'echo "proc /proc procfs rw 0 0 " >> /etc/fstab' 231 | fi 232 | ;; 233 | *) 234 | echo "ERROR: Unkonwn platform ${PLATFORM_ID}" 235 | exit 1 236 | esac 237 | 238 | if test $run_test != 0; then 239 | # for these tests, fail the script if a test fails 240 | set -e 241 | echo "==> Running Compile test" 242 | cd 243 | git clone --recurse-submodules https://github.com/open-mpi/ompi.git 244 | cd ompi 245 | ./autogen.pl 246 | ./configure --prefix=$HOME/install 247 | make -j 4 all 248 | make check 249 | make install 250 | cd $HOME 251 | rm -rf ${HOME}/ompi ${HOME}/install 252 | echo "==> SUCCESS! Open MPI compiled!" 253 | fi 254 | 255 | if test "${clean_ami}" != "0" ; then 256 | echo "==> Cleaning instance" 257 | 258 | if test "${PLATFORM_ID}" = "FreeBSD" ; then 259 | sudo touch /firstboot 260 | fi 261 | 262 | rm -rf ${HOME}/.ssh ${HOME}/.history ${HOME}/.bash_history ${HOME}/.sudo_as_admin_successful ${HOME}/.cache ${HOME}/.oracle_jre_usage 263 | sudo rm -rf /var/log/* 264 | sudo rm -f /etc/ssh/ssh_host* 265 | sudo rm -rf /root/* ~root/.ssh ~root/.history ~root/.bash_history 266 | echo "Recommended labels: ${labels}" 267 | else 268 | echo "Skipped cleaning instance. Do not use to build AMI!" 269 | fi 270 | 271 | echo "==> All done!" 272 | 273 | 274 | # cleanup phase 275 | -------------------------------------------------------------------------------- /jenkins/open-mpi-build-script.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | # abort on error 4 | set -e 5 | 6 | # sigh; this probably isn't the most user friendly thing I've ever done... 7 | for var in "$@"; do 8 | eval $@ 9 | done 10 | 11 | # 12 | # Start by figuring out what we are... 13 | # 14 | os=`uname -s` 15 | if test "${os}" = "Linux"; then 16 | eval "PLATFORM_ID=`sed -n 's/^ID=//p' /etc/os-release`" 17 | eval "VERSION_ID=`sed -n 's/^VERSION_ID=//p' /etc/os-release`" 18 | else 19 | PLATFORM_ID=`uname -s` 20 | VERSION_ID=`uname -r` 21 | fi 22 | 23 | echo "--> platform: $PLATFORM_ID" 24 | echo "--> version: $VERSION_ID" 25 | 26 | AUTOGEN_ARGS= 27 | CONFIGURE_ARGS= 28 | MAKE_ARGS= 29 | MAKE_J="-j 8" 30 | PREFIX="${WORKSPACE}/install" 31 | 32 | # 33 | # If they exist, use installed autotools 34 | # 35 | if test -n "${JENKINS_AGENT_HOME}" ; then 36 | base_dir=${JENKINS_AGENT_HOME} 37 | else 38 | base_dir=${HOME} 39 | fi 40 | AUTOTOOLS=${base_dir}/software/autotools-2.69-1.15.0-2.4.6/bin 41 | if test -d ${AUTOTOOLS} ; then 42 | export PATH=${AUTOTOOLS}:${PATH} 43 | fi 44 | 45 | # 46 | # See if builder provided a compiler we should use, and translate it 47 | # to CONFIGURE_ARGS 48 | # 49 | case ${PLATFORM_ID} in 50 | rhel) 51 | case "$Compiler" in 52 | gcc48|"") 53 | echo "--> Using default compilers" 54 | ;; 55 | *) 56 | echo "Unsupported compiler ${Compiler}. Aborting" 57 | exit 1 58 | ;; 59 | esac 60 | ;; 61 | amzn) 62 | case "$Compiler" in 63 | "") 64 | echo "--> Using default compilers" 65 | ;; 66 | gcc44) 67 | CONFIGURE_ARGS="CC=gcc44 CXX=g++44 FC=gfortran44" 68 | ;; 69 | gcc48) 70 | CONFIGURE_ARGS="CC=gcc48 CXX=g++48 FC=gfortran48" 71 | ;; 72 | clang36) 73 | CONFIGURE_ARGS="CC=clang CXX=clang++ --disable-mpi-fortran" 74 | ;; 75 | *) 76 | echo "Unsupported compiler ${Compiler}. Aborting" 77 | exit 1 78 | ;; 79 | esac 80 | ;; 81 | ubuntu) 82 | # On Ubuntu, gcc 4.x was packaged as major.minor version 83 | # packages. 5.x and later was packaged as major version only. 84 | # Clang 6.x and earlier was packaged as major.minor, while 85 | # Clang 7 and lager was packaged as major version only. 86 | case "$Compiler" in 87 | "") 88 | echo "--> Using default compilers" 89 | ;; 90 | gcc4*) 91 | version=`echo "$Compiler" | sed -e 's/gcc4\([0-9]*\)/4.\1/'` 92 | CONFIGURE_ARGS="CC=gcc-${version} CXX=g++-${version} FC=gfortran-${version}" 93 | ;; 94 | gcc*) 95 | version=`echo "$Compiler" | sed -e 's/gcc\([0-9]*\)/\1/'` 96 | CONFIGURE_ARGS="CC=gcc-${version} CXX=g++-${version} FC=gfortran-${version}" 97 | ;; 98 | clang3*|clang4*|clang5*|clang6*) 99 | version=`echo "$Compiler" | sed -e 's/clang\([0-9]\)\([0-9]*\)/\1.\2/'` 100 | CONFIGURE_ARGS="CC=clang-${version} CXX=clang++-${version} --disable-mpi-fortran" 101 | ;; 102 | clang*) 103 | version=`echo "$Compiler" | sed -e 's/clang\([0-9]*\)/\1/'` 104 | CONFIGURE_ARGS="CC=clang-${version} CXX=clang++-${version} --disable-mpi-fortran" 105 | ;; 106 | *) 107 | echo "Unsupported compiler ${Compiler}. Aborting" 108 | exit 1 109 | ;; 110 | esac 111 | ;; 112 | sles) 113 | case "$Compiler" in 114 | "") 115 | echo "--> Using default compilers" 116 | ;; 117 | gcc48) 118 | CONFIGURE_ARGS="CC=gcc-48 CXX=g++-48 FC=gfortran-48" 119 | ;; 120 | gcc5) 121 | CONFIGURE_ARGS="CC=gcc-5 CXX=g++-5 FC=gfortran-5" 122 | ;; 123 | gcc6) 124 | CONFIGURE_ARGS="CC=gcc-6 CXX=g++-6 FC=gfortran-6" 125 | ;; 126 | *) 127 | echo "Unsupported compiler ${Compiler}. Aborting" 128 | exit 1 129 | ;; 130 | esac 131 | ;; 132 | FreeBSD) 133 | CONFIGURE_ARGS="LDFLAGS=-Wl,-rpath,/usr/local/lib/gcc5 --with-wrapper-ldflags=-Wl,-rpath,/usr/local/lib/gcc5" 134 | ;; 135 | esac 136 | 137 | echo "--> Compiler setup: $CONFIGURE_ARGS" 138 | 139 | # 140 | # Add any Autogen or Configure arguments provided by the builder job 141 | # 142 | if test "$AUTOGEN_OPTIONS" != ""; then 143 | # special case, to work around the fact that Open MPI can't build 144 | # when there's a space in the build path name (sigh) 145 | if test "$AUTOGEN_OPTIONS" = "--no-orte"; then 146 | AUTOGEN_OPTIONS="--no-orte --no-ompi" 147 | fi 148 | echo "--> Adding autogen arguments: $AUTOGEN_OPTIONS" 149 | AUTOGEN_ARGS="${AUTOGEN_ARGS} ${AUTOGEN_OPTIONS}" 150 | fi 151 | 152 | if test "$CONFIGURE_OPTIONS" != ""; then 153 | echo "--> Adding configure arguments: $CONFIGURE_OPTIONS" 154 | CONFIGURE_ARGS="${CONFIGURE_ARGS} ${CONFIGURE_OPTIONS}" 155 | fi 156 | 157 | # 158 | # Build. 159 | # 160 | cd "${WORKSPACE}/src" 161 | 162 | sha1=`git rev-parse HEAD` 163 | echo "--> Building commit ${sha1}" 164 | 165 | if test -f autogen.pl; then 166 | echo "--> running ./autogen.pl ${AUTOGEN_ARGS}" 167 | ./autogen.pl ${AUTOGEN_ARGS} 168 | else 169 | if test "${AUTOGEN_ARGS}" != ""; then 170 | echo "--> Being a coward and not running with special autogen arguments and autogen.sh" 171 | exit 1 172 | else 173 | echo "--> running ./atogen.sh" 174 | ./autogen.sh 175 | fi 176 | fi 177 | 178 | # note: we can't do this until after autogen, because we need to run 179 | # ./configure --help. In prep for 5.0, we added a developer 180 | # requirement for pandoc to build (not required for dist tarballs), 181 | # with an explicit option to disable. 182 | if `which pandoc > /dev/null 2>&1` ; then 183 | echo "--> Found pandoc. Allowing default manpage behavior" 184 | else 185 | if `./configure --help | grep -q disable-man-pages` ; then 186 | echo "--> No pandoc and configure supports --disable-man-pages" 187 | CONFIGURE_ARGS="${CONFIGURE_ARGS} --disable-man-pages" 188 | else 189 | echo "--> No pandoc and no --disable-man-pages. Allowing default manpage behavior" 190 | fi 191 | fi 192 | 193 | echo "--> running ./configure --prefix=\"${PREFIX}\" ${CONFIGURE_ARGS}" 194 | if ! ./configure --prefix="${PREFIX}" ${CONFIGURE_ARGS}; then 195 | echo "./configure --prefix=\"${PREFIX}\" ${CONFIGURE_ARGS} failed, ABORTING !" 196 | if test -f config.log; then 197 | echo "config.log content :" 198 | cat config.log 199 | else 200 | echo "no config.log was generated" 201 | fi 202 | exit 1 203 | fi 204 | 205 | # shortcut for the distcheck case, as it won't run any tests beyond 206 | # the build-in make check tests. 207 | if test "${MAKE_DISTCHECK}" != ""; then 208 | echo "--> running make ${MAKE_ARGS} distcheck" 209 | make ${MAKE_ARGS} distcheck 210 | exit 0 211 | fi 212 | 213 | echo "--> running make ${MAKE_J} ${MAKE_ARGS} all" 214 | make ${MAKE_J} ${MAKE_ARGS} all 215 | echo "--> running make check" 216 | make ${MAKE_ARGS} check 217 | echo "--> running make install" 218 | make ${MAKE_ARGS} install 219 | 220 | export PATH="${PREFIX}/bin":${PATH} 221 | 222 | case "$AUTOGEN_OPTIONS" in 223 | *--no-ompi*) 224 | echo "--> Skipping MPI tests due to --no-ompi" 225 | exit 0 226 | ;; 227 | esac 228 | 229 | echo "--> running ompi_info" 230 | ompi_info 231 | 232 | echo "--> running make all in examples" 233 | cd "${WORKSPACE}/src/examples" 234 | make ${MAKE_ARGS} all 235 | cd .. 236 | 237 | # it's hard to determine what the failure was and there's no printing 238 | # of error code with set -e, so for the tests, we do per-command 239 | # checking... 240 | set +e 241 | 242 | run_example() { 243 | example=`basename ${2}` 244 | echo "--> Running example: $example" 245 | ${1} ${2} 246 | ret=$? 247 | if test ${ret} -ne 0 ; then 248 | echo "Example failed: ${ret}" 249 | echo "Command was: ${1} ${2}" 250 | exit ${ret} 251 | fi 252 | } 253 | 254 | if test "${MPIRUN_MODE}" != "none"; then 255 | echo "--> running examples" 256 | echo "localhost cpu=2" > "${WORKSPACE}/hostfile" 257 | # Note: using perl here because figuring out a portable sed regexp 258 | # proved to be a little challenging. 259 | mpirun_version=`"${WORKSPACE}/install/bin/mpirun" --version | perl -wnE 'say $1 if /mpirun [^\d]*(\d+.\d+)/'` 260 | echo "--> mpirun version: ${mpirun_version}" 261 | case ${mpirun_version} in 262 | 1.*|2.0*) 263 | exec="timeout -s SIGSEGV 3m mpirun -hostfile ${WORKSPACE}/hostfile -np 2 " 264 | ;; 265 | *) 266 | exec="timeout -s SIGSEGV 4m mpirun --get-stack-traces --timeout 180 --hostfile ${WORKSPACE}/hostfile -np 2 " 267 | ;; 268 | esac 269 | run_example "${exec}" ./examples/hello_c 270 | run_example "${exec}" ./examples/ring_c 271 | run_example "${exec}" ./examples/connectivity_c 272 | if ompi_info --parsable | grep -q bindings:cxx:yes >/dev/null; then 273 | echo "--> running C++ examples" 274 | run_example "${exec}" ./examples/hello_cxx 275 | run_example "${exec}" ./examples/ring_cxx 276 | else 277 | echo "--> skipping C++ examples" 278 | fi 279 | if ompi_info --parsable | grep -q bindings:mpif.h:yes >/dev/null; then 280 | echo "--> running mpif examples" 281 | run_example "${exec}" ./examples/hello_mpifh 282 | run_example "${exec}" ./examples/ring_mpifh 283 | else 284 | echo "--> skipping mpif examples" 285 | fi 286 | if ompi_info --parsable | egrep -q bindings:use_mpi:\"\?yes >/dev/null; then 287 | echo "--> running usempi examples" 288 | run_example "${exec}" ./examples/hello_usempi 289 | run_example "${exec}" ./examples/ring_usempi 290 | else 291 | echo "--> skipping usempi examples" 292 | fi 293 | if ompi_info --parsable | grep -q bindings:use_mpi_f08:yes >/dev/null; then 294 | echo "--> running usempif08 examples" 295 | run_example "${exec}" ./examples/hello_usempif08 296 | run_example "${exec}" ./examples/ring_usempif08 297 | else 298 | echo "--> skipping usempif08 examples" 299 | fi 300 | else 301 | echo "--> Skipping examples (MPIRUN_MODE = none)" 302 | fi 303 | 304 | echo "--> All done!" 305 | -------------------------------------------------------------------------------- /nightly-tarball/S3BuildFiler.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # Copyright (c) 2017 Amazon.com, Inc. or its affiliates. All Rights 4 | # Reserved. 5 | # 6 | # Additional copyrights may follow 7 | # 8 | 9 | import BuildFiler 10 | import unittest 11 | import logging 12 | import boto3 13 | import botocore 14 | import time 15 | import os 16 | import errno 17 | import re 18 | 19 | 20 | logger = logging.getLogger('Builder.S3BuildFiler') 21 | 22 | 23 | class S3BuildFiler(BuildFiler.BuildFiler): 24 | """S3 Implementation of the BuildFiler 25 | 26 | S3 implementation of the BuildFiler. Assumes that the current 27 | environment is already setup with the required AWS permissions, 28 | according to Boto3's credentials search path: 29 | http://boto3.readthedocs.io/en/latest/guide/configuration.html 30 | 31 | """ 32 | 33 | def __init__(self, Bucket, Basename): 34 | logger.debug("-> creating S3BuildFiler with bucket_name=", Bucket, 35 | " base_name=", Basename) 36 | self._bucket = Bucket 37 | self._basename = Basename 38 | self._s3 = boto3.client('s3') 39 | 40 | 41 | def download_to_stream(self, filename): 42 | """Download to stream 43 | 44 | Gets the object at basename/filename and returns as a 45 | StreamObject, suitable for turning into a string via .read() 46 | or passing to JSON / YAML constructors. 47 | 48 | """ 49 | logger.debug("-> downloading to stream: " + filename) 50 | key = self._basename + filename 51 | try: 52 | response = self._s3.get_object(Bucket=self._bucket, Key=key) 53 | except botocore.exceptions.ClientError as e: 54 | code = e.response['Error']['Code'] 55 | if code == "NoSuchKey" or code == "NoSuchBucket": 56 | raise IOError(errno.ENOENT, os.strerror(errno.ENOENT), filename) 57 | else: 58 | raise 59 | return response['Body'] 60 | 61 | 62 | def upload_from_stream(self, filename, data, properties = {}): 63 | """Upload from a stream 64 | 65 | Puts the stream information in data to an object at 66 | basename/filename. Data can be the output of json.dumps() or 67 | similar. 68 | 69 | """ 70 | logger.debug("-> uploading from stream: " + filename) 71 | key = self._basename + filename 72 | try: 73 | if len(properties) > 0: 74 | self._s3.put_object(Bucket=self._bucket, Key=key, Body=data, 75 | Metadata=properties) 76 | else: 77 | self._s3.put_object(Bucket=self._bucket, Key=key, Body=data) 78 | except botocore.exceptions.ClientError as e: 79 | code = e.response['Error']['Code'] 80 | if code == "NoSuchBucket": 81 | raise IOError(errno.ENOENT, os.strerror(errno.ENOENT), filename) 82 | else: 83 | raise 84 | 85 | 86 | def download_to_file(self, remote_filename, local_filename): 87 | """Download to a file 88 | 89 | Download the object at basename/remote_filename to 90 | local_filename. 91 | 92 | """ 93 | logger.debug("-> downloading to file, remote: " + remote_filename 94 | + " local: " + local_filename) 95 | key = self._basename + remote_filename 96 | try: 97 | self._s3.download_file(self._bucket, key, local_filename) 98 | except botocore.exceptions.ClientError as e: 99 | code = e.response['Error']['Code'] 100 | if code == "NoSuchKey" or code == "NoSuchBucket" or code == "404": 101 | raise IOError(errno.ENOENT, os.strerror(errno.ENOENT), 102 | remote_filename) 103 | else: 104 | raise 105 | 106 | 107 | def upload_from_file(self, local_filename, remote_filename, properties = {}): 108 | """Upload a file 109 | 110 | Upload the local_file to S3 as basename/remote_filename. 111 | """ 112 | logger.debug("-> uploading from file, remote: " + remote_filename 113 | + " local: " + local_filename) 114 | key = self._basename + remote_filename 115 | try: 116 | self._s3.upload_file(local_filename, self._bucket, key) 117 | except botocore.exceptions.ClientError as e: 118 | code = e.response['Error']['Code'] 119 | if code == "NoSuchBucket": 120 | raise IOError(errno.ENOENT, os.strerror(errno.ENOENT), 121 | remote_filename) 122 | else: 123 | raise 124 | 125 | 126 | def delete(self, filename): 127 | """Delete file 128 | 129 | Delete file on remote path. Note that S3 delete has much 130 | eventual consistency, so you may still find the file 131 | immediately after a delete. But it will be deleted 132 | eventually. 133 | 134 | """ 135 | logger.debug("-> deleting file: " + filename) 136 | key = self._basename + filename 137 | try: 138 | self._s3.delete_object(Bucket=self._bucket, Key=key) 139 | except botocore.exceptions.ClientError as e: 140 | code = e.response['Error']['Code'] 141 | if code == "NoSuchKey" or code == "NoSuchBucket": 142 | raise IOError(errno.ENOENT, os.strerror(errno.ENOENT), filename) 143 | else: 144 | raise 145 | 146 | 147 | def file_search(self, dirname, blob): 148 | """Search for file blob in dirname directory 149 | 150 | Search for all files in dirname matching blob. Returns a list 151 | of filenames that match the search. This is not the most 152 | efficient implementation, but there's no great way to search 153 | S3other than searching the directory and then running a regex 154 | match. So this will be rather inefficient in very large 155 | directories. 156 | 157 | """ 158 | full_prefix = self._basename + dirname 159 | logger.debug('-> search directory %s, blob %s' % (full_prefix, blob)) 160 | regex = re.sub('\.', '\.', blob) 161 | regex = re.sub('\*', '.*', regex) 162 | retval = [] 163 | results = self._s3.list_objects(Bucket=self._bucket, Prefix=full_prefix) 164 | if not 'Contents' in results: 165 | return [] 166 | blobs = results['Contents'] 167 | for blob in blobs: 168 | blobname = blob['Key'] 169 | # BWB: fix me! 170 | if re.search(regex, blobname) != None: 171 | short_blobname = re.sub(self._basename, '', blobname) 172 | retval.append(short_blobname) 173 | return retval 174 | 175 | 176 | class S3BuildFilerTest(unittest.TestCase): 177 | _bucket = "ompi-s3buildfiler-test" 178 | _basename = "" 179 | _testtime = str(time.time()) 180 | 181 | def test_bad_bucket(self): 182 | filer = S3BuildFiler("this-is-a-random-bucket", self._basename) 183 | try: 184 | filer.download_to_stream("read-only/file-that-should-not-exist.txt") 185 | except IOError as e: 186 | if e.errno != errno.ENOENT: 187 | raise 188 | else: 189 | self.fail() 190 | 191 | 192 | def test_stream_bad_get(self): 193 | filer = S3BuildFiler(self._bucket, self._basename) 194 | try: 195 | filer.download_to_stream("read-only/file-that-should-not-exist.txt") 196 | except IOError as e: 197 | if e.errno != errno.ENOENT: 198 | raise 199 | else: 200 | self.fail() 201 | 202 | 203 | def test_stream_good_get(self): 204 | filer = S3BuildFiler(self._bucket, self._basename) 205 | body = filer.download_to_stream("read-only/file-that-should-exist.txt") 206 | text = body.read() 207 | self.assertEqual(text, "This is a test!\n", "Broken text:" + text) 208 | 209 | 210 | def test_stream_read_write(self): 211 | filename = "cleaned-nightly/" + self._testtime + "/test-abc.txt" 212 | 213 | filer = S3BuildFiler(self._bucket, self._basename) 214 | input_string = "I love me some unit tests.\n" 215 | filer.upload_from_stream(filename, input_string) 216 | body = filer.download_to_stream(filename) 217 | output_string = body.read() 218 | filer.delete(filename) 219 | self.assertEqual(input_string, output_string, 220 | input_string + " != " + output_string) 221 | 222 | 223 | def test_file_bad_get(self): 224 | pathname = "/tmp/test-" + self._testtime + ".txt" 225 | 226 | filer = S3BuildFiler(self._bucket, self._basename) 227 | try: 228 | filer.download_to_file("read-only/file-that-should-not-exist.txt", 229 | pathname) 230 | except IOError as e: 231 | if e.errno != errno.ENOENT: 232 | raise 233 | else: 234 | self.fail() 235 | 236 | 237 | def test_file_good_get(self): 238 | pathname = "/tmp/test-" + self._testtime + ".txt" 239 | 240 | filer = S3BuildFiler(self._bucket, self._basename) 241 | filer.download_to_file("read-only/file-that-should-exist.txt", 242 | pathname) 243 | with open(pathname, 'r') as data: 244 | text=data.read() 245 | self.assertEqual(text, "This is a test!\n", "Broken text:" + text) 246 | 247 | 248 | def test_file_read_write(self): 249 | remote_filename = "cleaned-nightly/" + self._testtime + "/test-abc.txt" 250 | pathname = "/tmp/test-" + self._testtime + ".txt" 251 | input_string = "I love me some unit tests.\n" 252 | 253 | filer = S3BuildFiler(self._bucket, self._basename) 254 | with open(pathname, "w") as text_file: 255 | text_file.write(input_string) 256 | filer.upload_from_file(pathname, remote_filename) 257 | 258 | os.remove(pathname) 259 | 260 | try: 261 | body = filer.download_to_stream(remote_filename) 262 | except: 263 | filer.delete(remote_filename) 264 | raise 265 | 266 | try: 267 | filer.download_to_file(remote_filename, pathname) 268 | except: 269 | filer.delete(remote_filename) 270 | raise 271 | 272 | filer.delete(remote_filename) 273 | 274 | output_string = body.read() 275 | self.assertEqual(input_string, output_string, 276 | input_string + " != " + output_string) 277 | 278 | with open(pathname, 'r') as data: 279 | text=data.read() 280 | self.assertEqual(text, output_string, 281 | input_string + " != " + text) 282 | 283 | 284 | if __name__ == '__main__': 285 | unittest.main() 286 | -------------------------------------------------------------------------------- /administrative/wait-for-pr-ci-completion.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # 3 | # Copyright (c) 2018 Jeff Squyres. All rights reserved. 4 | # 5 | # Additional copyrights may follow 6 | # 7 | # $HEADER$ 8 | # 9 | # Redistribution and use in source and binary forms, with or without 10 | # modification, are permitted provided that the following conditions are 11 | # met: 12 | # 13 | # - Redistributions of source code must retain the above copyright 14 | # notice, this list of conditions and the following disclaimer. 15 | # 16 | # - Redistributions in binary form must reproduce the above copyright 17 | # notice, this list of conditions and the following disclaimer listed 18 | # in this license in the documentation and/or other materials 19 | # provided with the distribution. 20 | # 21 | # - Neither the name of the copyright holders nor the names of its 22 | # contributors may be used to endorse or promote products derived from 23 | # this software without specific prior written permission. 24 | # 25 | # The copyright holders provide no reassurances that the source code 26 | # provided does not infringe any patent, copyright, or any other 27 | # intellectual property rights of third parties. The copyright holders 28 | # disclaim any liability to any recipient for claims brought against 29 | # recipient by any third party for infringement of that parties 30 | # intellectual property rights. 31 | # 32 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 33 | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 34 | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 35 | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 36 | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 37 | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 38 | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 39 | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 40 | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 41 | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 42 | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 43 | # 44 | 45 | '''This script waits for all the CI on a given PR to complete. 46 | 47 | You typically want to run some kind of notifier after this script 48 | completes to let you know that all the CI has completed. For example: 49 | 50 | $ ./wait-for-pr-ci-completion.py \ 51 | --pr https://github.com/open-mpi/ompi/pull/5731; \ 52 | pushover CI for PR5731 is done 53 | 54 | where "pushover" is a notifier script that I use to send a push 55 | notification to my phone. I.e., the 'pushover' script will execute 56 | when the CI for PR 5731 completes. 57 | 58 | ----- 59 | 60 | # Requirements: 61 | 62 | 1. You need the PyGithub python module 63 | 2. You need a GitHub personal access token to use the GitHub API 64 | (through the PyGithub python module) 65 | 66 | ----- 67 | 68 | ## Installing PyGithub: 69 | 70 | $ pip3 install pygithub 71 | 72 | Docs: 73 | 74 | https://github.com/PyGithub/PyGithub 75 | http://pygithub.readthedocs.io/ 76 | 77 | ## Getting a Github personal access token 78 | 79 | Go to https://github.com/settings/tokens and make a personal access 80 | token with full permissions to the repo and org. 81 | 82 | You can pass the oauth token to this script in one of 3 ways: 83 | 84 | 1. Name the file 'oauth-token.txt' and have it in the PWD when you run 85 | this script. 86 | 2. Pass the filename of the token via --oauth-file CLI options. 87 | 3. Set the env variable GITHUB_OAUTH_TOKEN with the filename of your 88 | oauth token (pro tip: if you set it to the absolute filename, it 89 | will be found no matter what directory you run this script from). 90 | 91 | ''' 92 | 93 | import os 94 | import time 95 | import http 96 | import logging 97 | import argparse 98 | import requests 99 | 100 | from github import Github 101 | from urllib.parse import urlparse 102 | from datetime import datetime 103 | 104 | #-------------------------------------------------------------------- 105 | 106 | default_delay = 60 107 | real_default_oauth_file = 'oauth-token.txt' 108 | 109 | if 'GITHUB_OAUTH_TOKEN' in os.environ: 110 | default_oauth_file = os.environ['GITHUB_OAUTH_TOKEN'] 111 | else: 112 | default_oauth_file = real_default_oauth_file 113 | 114 | #-------------------------------------------------------------------- 115 | 116 | # Parse the CLI options 117 | 118 | parser = argparse.ArgumentParser(description='Github actions.') 119 | 120 | parser.add_argument('--pr', help='URL of PR') 121 | parser.add_argument('--debug', action='store_true', help='Be really verbose') 122 | parser.add_argument('--delay', default=default_delay, 123 | help='Delay this many seconds between checking') 124 | parser.add_argument('--oauth-file', default=default_oauth_file, 125 | help='Filename containinig OAuth token to access Github (default is "{file}")' 126 | .format(file=default_oauth_file)) 127 | 128 | args = parser.parse_args() 129 | 130 | # Sanity check the CLI args 131 | 132 | if not args.pr: 133 | print("Must specify a PR URL via --pr") 134 | exit(1) 135 | 136 | if not os.path.exists(args.oauth_file): 137 | print("Cannot find oauth token file: {filename}" 138 | .format(filename=args.oauth_file)) 139 | exit(1) 140 | 141 | #-------------------------------------------------------------------- 142 | 143 | delay = args.delay 144 | 145 | # Read the oAuth token file. 146 | # (you will need to supply this file yourself -- see the comment at 147 | # the top of this file) 148 | with open(args.oauth_file, 'r') as f: 149 | token = f.read().strip() 150 | 151 | #-------------------------------------------------------------------- 152 | 153 | log = logging.getLogger('GithubPRwaiter') 154 | level = logging.INFO 155 | if args.debug: 156 | level = logging.DEBUG 157 | log.setLevel(level) 158 | 159 | ch = logging.StreamHandler() 160 | ch.setLevel(level) 161 | 162 | format = '%(asctime)s %(levelname)s: %(message)s' 163 | formatter = logging.Formatter(format) 164 | 165 | ch.setFormatter(formatter) 166 | 167 | log.addHandler(ch) 168 | 169 | #-------------------------------------------------------------------- 170 | 171 | # Pick apart the URL 172 | parts = urlparse(args.pr) 173 | path = parts.path 174 | vals = path.split('/') 175 | org = vals[1] 176 | repo = vals[2] 177 | pull = vals[3] 178 | num = vals[4] 179 | 180 | if parts.hostname != "github.com": 181 | log.debug("Logging in to Github Enterprise server {hostname}" 182 | .format(hostname=parts.hostname)) 183 | g = Github(base_url=("https://{hostname}/api/v3" 184 | .format(hostname=parts.hostname)), 185 | login_or_token=token) 186 | else: 187 | g = Github(token) 188 | 189 | full_name = os.path.join(org, repo) 190 | log.debug("Getting repo {r}...".format(r=full_name)) 191 | repo = g.get_repo(full_name) 192 | 193 | log.debug("Getting PR {pr}...".format(pr=num)) 194 | pr = repo.get_pull(int(num)) 195 | 196 | log.info("PR {num}: {title}".format(num=num, title=pr.title)) 197 | log.info("PR {num} is {state}".format(num=num, state=pr.state)) 198 | if pr.state != "open": 199 | log.info("Nothing to do!".format(num=num)) 200 | exit(0) 201 | 202 | log.debug("PR head is {sha}".format(sha=pr.head.sha)) 203 | 204 | log.debug("Getting commits...") 205 | commits = pr.get_commits() 206 | 207 | # Find the HEAD commit -- that's where the most recent statuses will be 208 | head_commit = None 209 | for c in commits: 210 | if c.sha == pr.head.sha: 211 | log.debug("Found HEAD commit: {sha}".format(sha=c.sha)) 212 | head_commit = c 213 | break 214 | 215 | if not head_commit: 216 | log.error("Did not find HEAD commit (!)") 217 | log.error("That's unexpected -- I'm going to abort...") 218 | exit(1) 219 | 220 | #-------------------------------------------------------------------- 221 | 222 | # Main loop 223 | 224 | done = False 225 | succeeded = None 226 | failed = None 227 | statuses = dict() 228 | while not done: 229 | # There can be a bunch of statuses from the same context. Take 230 | # only the *chronologically-last* status from each context. 231 | 232 | # Note: put both the "head_commit.get_statuses()" *and* the "for s 233 | # in github_statuses" in the try block because some empirical 234 | # testing shows that pygithub may be obtaining statuses lazily 235 | # during the for loop (i.e., not during .get_statuses()). 236 | try: 237 | github_statuses = head_commit.get_statuses() 238 | for s in github_statuses: 239 | save = False 240 | if s.context not in statuses: 241 | save = True 242 | log.info("Found new {state} CI: {context} ({desc})" 243 | .format(context=s.context, state=s.state, 244 | desc=s.description)) 245 | else: 246 | # s.updated_at is a python datetime. Huzzah! 247 | if s.updated_at > statuses[s.context].updated_at: 248 | log.info("Found update {state} CI: {context} ({desc})" 249 | .format(context=s.context, state=s.state, 250 | desc=s.description)) 251 | save = True 252 | 253 | if save: 254 | statuses[s.context] = s 255 | 256 | except ConnectionResetError: 257 | log.error("Got Connection Reset. Sleeping and trying again...") 258 | time.sleep(5) 259 | continue 260 | except requests.exceptions.ConnectionError: 261 | log.error("Got Connection error. Sleeping and trying again...") 262 | time.sleep(5) 263 | continue 264 | except http.client.RemoteDisconnected: 265 | log.error("Got http Remote Disconnected. Sleeping and trying again...") 266 | time.sleep(5) 267 | continue 268 | except requests.exceptions.RemotedDisconnected: 269 | log.error("Got requests Remote Disconnected. Sleeping and trying again...") 270 | time.sleep(5) 271 | continue 272 | 273 | done = True 274 | succeeded = list() 275 | failed = list() 276 | for context,status in statuses.items(): 277 | if status.state == 'success': 278 | succeeded.append(status) 279 | elif status.state == 'failure': 280 | failed.append(status) 281 | elif status.state == 'pending': 282 | log.debug("Still waiting for {context}: {desc}" 283 | .format(context=context, 284 | desc=status.description)) 285 | done = False 286 | else: 287 | log.warning("Got unknown status state: {state}" 288 | .format(state=status.state)) 289 | exit(1) 290 | 291 | if not done: 292 | log.debug("Waiting {delay} seconds...".format(delay=delay)) 293 | time.sleep(delay) 294 | 295 | log.info("All CI statuses are complete:") 296 | for s in succeeded: 297 | log.info("PASSED {context}: {desc}" 298 | .format(context=s.context, 299 | desc=s.description.strip())) 300 | for s in failed: 301 | log.info("FAILED {context}: {desc}" 302 | .format(context=s.context, 303 | desc=s.description.strip())) 304 | exit(0) 305 | -------------------------------------------------------------------------------- /dist/uploadutils.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # 3 | # Copyright (c) 2018 Amazon.com, Inc. or its affiliates. All Rights 4 | # Reserved. 5 | # 6 | # Additional copyrights may follow 7 | # 8 | 9 | import boto3 10 | import botocore 11 | import sys 12 | import re 13 | import os 14 | import json 15 | import tarfile 16 | import hashlib 17 | from io import StringIO 18 | import datetime 19 | import unittest 20 | import mock 21 | import posix 22 | 23 | def __unique_assign(releaseinfo, key, value): 24 | if not key in releaseinfo: 25 | releaseinfo[key] = value 26 | elif releaseinfo[key] != value: 27 | raise Exception('Found files from two %ss: %s %s' % 28 | (key, releaseinfo[key], value)) 29 | 30 | 31 | def __compute_hashes(filename): 32 | """Helper function to compute MD5 and SHA1 hashes""" 33 | retval = {} 34 | md5 = hashlib.md5() 35 | sha1 = hashlib.sha1() 36 | sha256 = hashlib.sha256() 37 | with open(filename, 'rb') as f: 38 | while True: 39 | data = f.read(64 * 1024) 40 | if not data: 41 | break 42 | md5.update(data) 43 | sha1.update(data) 44 | sha256.update(data) 45 | retval['md5'] = md5.hexdigest() 46 | retval['sha1'] = sha1.hexdigest() 47 | retval['sha256'] = sha256.hexdigest() 48 | return retval 49 | 50 | 51 | def __query_yes_no(question, default="yes"): 52 | """Ask a yes/no question via input() and return their answer. 53 | 54 | "question" is a string that is presented to the user. 55 | "default" is the presumed answer if the user just hits . 56 | It must be "yes" (the default), "no" or None (meaning 57 | an answer is required of the user). 58 | 59 | The "answer" return value is True for "yes" or False for "no". 60 | """ 61 | valid = {"yes": True, "y": True, "ye": True, 62 | "no": False, "n": False} 63 | if default is None: 64 | prompt = " [y/n] " 65 | elif default == "yes": 66 | prompt = " [Y/n] " 67 | elif default == "no": 68 | prompt = " [y/N] " 69 | else: 70 | raise ValueError("invalid default answer: '%s'" % default) 71 | 72 | while True: 73 | sys.stdout.write(question + prompt) 74 | choice = input().lower() 75 | if default is not None and choice == '': 76 | return valid[default] 77 | elif choice in valid: 78 | return valid[choice] 79 | else: 80 | sys.stdout.write("Please respond with 'yes' or 'no' " 81 | "(or 'y' or 'n').\n") 82 | 83 | 84 | def parse_versions(filelist): 85 | """Parse the project name, branch, file basename, and version name from a file list 86 | 87 | We're pretty conservative in this function, because it's an 88 | optimization over specifying a bunch of command linke arguments 89 | explicitly. Add projects / regexes as necessary... 90 | """ 91 | 92 | releaseinfo = {} 93 | build_unix_time = 0 94 | 95 | for filename in filelist: 96 | if re.search(r'openmpi|OpenMPI', filename): 97 | m = re.search(r'openmpi\-([0-9a-zA-Z\.]+)(?:\.tar|\-[0-9]+\.src\.rpm|\.dmg.gz)', 98 | filename) 99 | if m == None: 100 | m = re.search(r'OpenMPI_v([0-9a-zA-Z\.]+)\-[0-9]+_win', filename) 101 | if m == None: 102 | raise Exception('Could not parse Open MPI filename: %s' % (filename)) 103 | 104 | # yes, we mean open-mpi for the project. We perhaps were 105 | # silly in naming the branch in S3. 106 | __unique_assign(releaseinfo, 'basename', 'openmpi') 107 | __unique_assign(releaseinfo, 'project', 'open-mpi') 108 | __unique_assign(releaseinfo, 'version', m.group(1)) 109 | 110 | elif re.search('^hwloc-', filename): 111 | m = re.search(r'hwloc\-([0-9a-zA-Z\.]+)(?:\.tar|\-[0-9]+\.src\.rpm)', 112 | filename) 113 | if m == None: 114 | m = re.search(r'hwloc-win[0-9]+-build-([0-9a-zA-Z\.]+)\.zip', filename) 115 | if m == None: 116 | raise Exception('Could not parse hwloc filename: %s' % (filename)) 117 | 118 | __unique_assign(releaseinfo, 'basename', 'hwloc') 119 | __unique_assign(releaseinfo, 'project', 'hwloc') 120 | __unique_assign(releaseinfo, 'version', m.group(1)) 121 | 122 | else: 123 | raise Exception('Could not parse %s' % (filename)) 124 | 125 | m = re.search(r'^[0-9]+\.[0-9]+', releaseinfo['version']) 126 | if m == None: 127 | raise Exception('Could not parse version %s' % (releaseinfo['version'])) 128 | __unique_assign(releaseinfo, 'branch', 'v%s' % (m.group(0))) 129 | 130 | if build_unix_time == 0 and re.search('\.tar\.', filename): 131 | try: 132 | tar = tarfile.open(filename) 133 | except: 134 | raise 135 | else: 136 | # rather than look at the ctime and mtime of the 137 | # tarball (which may change as tarballs are copied 138 | # around), look at the top level directory (first 139 | # entry in the tarball) for a mtime. 140 | build_unix_time = tar.getmembers()[0].mtime 141 | 142 | if build_unix_time != 0: 143 | releaseinfo['build_unix_time'] = build_unix_time 144 | 145 | return releaseinfo 146 | 147 | 148 | def upload_files(s3_client, s3_bucket, s3_key_prefix, release_info, files, prompt): 149 | # first, verify that the key_prefix exists. We are chicken here 150 | # and won't create it. 151 | result = s3_client.list_objects_v2(Bucket = s3_bucket, 152 | Prefix = s3_key_prefix) 153 | if s3_bucket != 'open-mpi-scratch' and result['KeyCount'] == 0: 154 | raise Exception('s3://%s/%s does not appear to be a valid prefix.' % 155 | (s3_bucket, full_key_prefix)) 156 | 157 | # figure out if project and branch exist... 158 | new = "" 159 | project_key_path = '%s/%s' % (s3_key_prefix, release_info['project']) 160 | branch_key_path = '%s/%s' % (project_key_path, release_info['branch']) 161 | 162 | # print some release info 163 | print('Upload path: s3://%s/%s' % (s3_bucket, branch_key_path)) 164 | print('Project: %s' % release_info['project']) 165 | print('Version: %s' % release_info['version']) 166 | print('Branch: %s' % release_info['branch']) 167 | print('Date: %s' % datetime.datetime.fromtimestamp(release_info['build_unix_time'])) 168 | 169 | branch_result = s3_client.list_objects_v2(Bucket = s3_bucket, 170 | Prefix = branch_key_path) 171 | if branch_result['KeyCount'] == 0: 172 | project_result = s3_client.list_objects_v2(Bucket = s3_bucket, 173 | Prefix = project_key_path) 174 | if project_result['KeyCount'] == 0: 175 | print(' * New project %s and branch %s' % 176 | (release_info['project'], release_info['branch'])) 177 | else: 178 | print(' * New branch %s' % (release_info['branch'])) 179 | 180 | # and check for existing release 181 | build_filename = '%s/build-%s-%s.json' % (branch_key_path, release_info['basename'], 182 | release_info['version']) 183 | try: 184 | response = s3_client.get_object(Bucket = s3_bucket, Key = build_filename) 185 | buildinfo = json.load(response['Body']) 186 | buildinfo_found = True 187 | except botocore.exceptions.ClientError as e: 188 | code = e.response['Error']['Code'] 189 | if code == 'NoSuchKey': 190 | buildinfo_found = False 191 | else: 192 | raise 193 | buildinfo = {} 194 | buildinfo['files'] = {} 195 | 196 | # check if we would overwrite a file and verify that would be ok... 197 | will_overwrite = False 198 | if buildinfo_found: 199 | print('Existing release found for %s %s' % 200 | (release_info['basename'], release_info['version'])) 201 | 202 | print(' * Existing files that will not change:') 203 | for filename in buildinfo['files']: 204 | if not filename in files: 205 | print(' - %s' % filename) 206 | 207 | print(' * Existing files that will be overwritten:') 208 | for filename in buildinfo['files']: 209 | if filename in files: 210 | will_overwrite = True 211 | print(' - %s' % filename) 212 | 213 | print(' * New files:') 214 | for filename in files: 215 | filename = os.path.basename(filename) 216 | if not filename in buildinfo['files']: 217 | print(' - %s' % filename) 218 | else: 219 | print('New release for %s %s' % 220 | (release_info['basename'], release_info['version'])) 221 | print(' * Files to upload:') 222 | for filename in files: 223 | filename = os.path.basename(filename) 224 | print(' - %s' % filename) 225 | 226 | print('') 227 | if prompt == 'ALWAYS_PROMPT': 228 | if not __query_yes_no('Continue?', 'no'): 229 | print('Aborting due to user selection') 230 | return 231 | elif prompt == 'NO_OVERWRITE': 232 | if will_overwrite: 233 | print('Aborting due to --yes and file overwrite') 234 | return 235 | elif prompt == 'NEVER_PROMPT': 236 | pass 237 | elif prompt == 'ASSUME_NO': 238 | print('Aborting due to ASSUME_NO') 239 | return 240 | else: 241 | raise Exception('Unknown Prompt value %d' % prompt) 242 | 243 | # build a build-info structure for the release, possibly building 244 | # on the old one... 245 | buildinfo['branch'] = release_info['branch'] 246 | buildinfo['valid'] = True 247 | buildinfo['revision'] = release_info['version'] 248 | buildinfo['build_unix_time'] = release_info['build_unix_time'] 249 | buildinfo['delete_on'] = 0 250 | 251 | for filename in files: 252 | info = os.stat(filename) 253 | hashes = __compute_hashes(filename) 254 | fileinfo = {} 255 | fileinfo['sha1'] = hashes['sha1'] 256 | fileinfo['sha256'] = hashes['sha256'] 257 | fileinfo['md5'] = hashes['md5'] 258 | fileinfo['size'] = info.st_size 259 | buildinfo['files'][os.path.basename(filename)] = fileinfo 260 | 261 | for filename in files: 262 | target_name = '%s/%s' % (branch_key_path, os.path.basename(filename)) 263 | s3_client.upload_file(filename, s3_bucket, target_name) 264 | 265 | buildinfo_str = json.dumps(buildinfo) 266 | s3_client.put_object(Bucket = s3_bucket, Key = build_filename, 267 | Body = buildinfo_str) 268 | 269 | 270 | ###################################################################### 271 | # 272 | # Unit Test Code 273 | # 274 | ###################################################################### 275 | def _test_stat(filename): 276 | info = posix.stat_result((0, 0, 0, 0, 0, 0, 987654, 0, 0, 0)) 277 | return info 278 | 279 | def _test_compute_hashes(filename): 280 | retval = {} 281 | retval['md5'] = "ABC" 282 | retval['sha1'] = "ZYX" 283 | return retval 284 | 285 | 286 | class _test_tarfile(): 287 | def __init__(self): 288 | pass 289 | 290 | def getmembers(self): 291 | info = tarfile.TarInfo 292 | info.mtime = 12345 293 | return [info] 294 | 295 | @classmethod 296 | def open(cls, filename): 297 | return _test_tarfile() 298 | 299 | 300 | class parse_versions_tests(unittest.TestCase): 301 | @mock.patch('tarfile.open', _test_tarfile.open) 302 | def test_ompi_release(self): 303 | filelist = ["openmpi-1.4.0.tar.gz", 304 | "openmpi-1.4.0.tar.bz2", 305 | "openmpi-1.4.0-1.src.rpm"] 306 | releaseinfo = parse_versions(filelist) 307 | self.assertEqual(releaseinfo['project'], "open-mpi", 308 | releaseinfo['project'] + " != open-mpi") 309 | self.assertEqual(releaseinfo['basename'], "openmpi", 310 | releaseinfo['basename'] + " != openmpi") 311 | self.assertEqual(releaseinfo['branch'], "v1.4", 312 | releaseinfo['branch'] + " != v1.4") 313 | self.assertEqual(releaseinfo['version'], "1.4.0", 314 | releaseinfo['version'] + " != 1.4.0") 315 | self.assertEqual(releaseinfo['build_unix_time'], 12345, 316 | str(releaseinfo['build_unix_time']) + " != 12345") 317 | 318 | @mock.patch('tarfile.open', _test_tarfile.open) 319 | def test_ompi_release_second_srpm(self): 320 | filelist = ["openmpi-1.4.0.tar.gz", 321 | "openmpi-1.4.0.tar.bz2", 322 | "openmpi-1.4.0-2.src.rpm"] 323 | releaseinfo = parse_versions(filelist) 324 | self.assertEqual(releaseinfo['project'], "open-mpi", 325 | releaseinfo['project'] + " != open-mpi") 326 | self.assertEqual(releaseinfo['basename'], "openmpi", 327 | releaseinfo['basename'] + " != openmpi") 328 | self.assertEqual(releaseinfo['branch'], "v1.4", 329 | releaseinfo['branch'] + " != v1.4") 330 | self.assertEqual(releaseinfo['version'], "1.4.0", 331 | releaseinfo['version'] + " != 1.4.0") 332 | 333 | @mock.patch('tarfile.open', _test_tarfile.open) 334 | def test_ompi_binaries(self): 335 | filelist = ["openmpi-1.4.0.tar.gz", 336 | "openmpi-1.4.0.tar.bz2", 337 | "openmpi-1.4.0-1.src.rpm", 338 | "openmpi-1.4.0.dmg.gz", 339 | "OpenMPI_v1.4.0-1_win64.exe"] 340 | releaseinfo = parse_versions(filelist) 341 | self.assertEqual(releaseinfo['project'], "open-mpi", 342 | releaseinfo['project'] + " != open-mpi") 343 | self.assertEqual(releaseinfo['basename'], "openmpi", 344 | releaseinfo['basename'] + " != openmpi") 345 | self.assertEqual(releaseinfo['branch'], "v1.4", 346 | releaseinfo['branch'] + " != v1.4") 347 | self.assertEqual(releaseinfo['version'], "1.4.0", 348 | releaseinfo['version'] + " != 1.4.0") 349 | 350 | @mock.patch('tarfile.open', _test_tarfile.open) 351 | def test_ompi_prerelease(self): 352 | filelist = ["openmpi-1.4.0rc1.tar.gz", 353 | "openmpi-1.4.0rc1.tar.bz2", 354 | "openmpi-1.4.0rc1-1.src.rpm"] 355 | releaseinfo = parse_versions(filelist) 356 | self.assertEqual(releaseinfo['project'], "open-mpi", 357 | releaseinfo['project'] + " != open-mpi") 358 | self.assertEqual(releaseinfo['basename'], "openmpi", 359 | releaseinfo['basename'] + " != openmpi") 360 | self.assertEqual(releaseinfo['branch'], "v1.4", 361 | releaseinfo['branch'] + " != v1.4") 362 | self.assertEqual(releaseinfo['version'], "1.4.0rc1", 363 | releaseinfo['version'] + " != 1.4.0rc1") 364 | 365 | @mock.patch('tarfile.open', _test_tarfile.open) 366 | def test_ompi_mixed_versions(self): 367 | filelist = ["openmpi-1.4.0.tar.gz", 368 | "openmpi-1.4.1.tar.bz2", 369 | "openmpi-1.4.0-1.src.rpm"] 370 | try: 371 | releaseinfo = parse_versions(filelist) 372 | except Exception as e: 373 | pass 374 | else: 375 | self.fail() 376 | 377 | @mock.patch('tarfile.open', _test_tarfile.open) 378 | def test_hwloc_release(self): 379 | filelist = ["hwloc-1.4.0.tar.gz", 380 | "hwloc-1.4.0.tar.bz2", 381 | "hwloc-win32-build-1.4.0.zip", 382 | "hwloc-win64-build-1.4.0.zip"] 383 | releaseinfo = parse_versions(filelist) 384 | self.assertEqual(releaseinfo['project'], "hwloc", 385 | releaseinfo['project'] + " != hwloc") 386 | self.assertEqual(releaseinfo['basename'], "hwloc", 387 | releaseinfo['basename'] + " != hwloc") 388 | self.assertEqual(releaseinfo['branch'], "v1.4", 389 | releaseinfo['branch'] + " != v1.4") 390 | self.assertEqual(releaseinfo['version'], "1.4.0", 391 | releaseinfo['version'] + " != 1.4.0") 392 | 393 | @mock.patch('tarfile.open', _test_tarfile.open) 394 | def test_hwloc_prerelease(self): 395 | filelist = ["hwloc-1.4.0rc1.tar.gz", 396 | "hwloc-1.4.0rc1.tar.bz2", 397 | "hwloc-win32-build-1.4.0rc1.zip", 398 | "hwloc-win64-build-1.4.0rc1.zip"] 399 | releaseinfo = parse_versions(filelist) 400 | self.assertEqual(releaseinfo['project'], "hwloc", 401 | releaseinfo['project'] + " != hwloc") 402 | self.assertEqual(releaseinfo['basename'], "hwloc", 403 | releaseinfo['basename'] + " != hwloc") 404 | self.assertEqual(releaseinfo['branch'], "v1.4", 405 | releaseinfo['branch'] + " != v1.4") 406 | self.assertEqual(releaseinfo['version'], "1.4.0rc1", 407 | releaseinfo['version'] + " != 1.4.0rc1") 408 | 409 | @mock.patch('tarfile.open', _test_tarfile.open) 410 | def test_hwloc_mixed_versions(self): 411 | filelist = ["hwloc-1.4.0.tar.gz", 412 | "hwloc-1.4.1.tar.bz2", 413 | "hwloc-win32-build-1.4.0.zip", 414 | "hwloc-win64-build-1.4.0.zip"] 415 | try: 416 | releaseinfo = parse_versions(filelist) 417 | except Exception as e: 418 | pass 419 | else: 420 | self.fail() 421 | 422 | @mock.patch('tarfile.open', _test_tarfile.open) 423 | def test_hwloc_mixed_versions2(self): 424 | filelist = ["hwloc-1.4.0.tar.gz", 425 | "hwloc-1.4.0.tar.bz2", 426 | "hwloc-win32-build-1.4.1.zip", 427 | "hwloc-win64-build-1.4.0.zip"] 428 | try: 429 | releaseinfo = parse_versions(filelist) 430 | except Exception as e: 431 | pass 432 | else: 433 | self.fail() 434 | 435 | # we didn't teach the parser about netloc (because it's dead), so 436 | # this should fail 437 | def test_netloc(self): 438 | filelist = ["netloc-1.4.0.tar.gz", 439 | "netloc-1.4.0.tar.bz2"] 440 | try: 441 | releaseinfo = parse_versions(filelist) 442 | except Exception as e: 443 | pass 444 | else: 445 | self.fail() 446 | 447 | 448 | class upload_files_tests(unittest.TestCase): 449 | class test_s3_client(): 450 | def __init__(self, path, Existing = False): 451 | self._readcount = 0 452 | self._file_write_list = [] 453 | self._stream_write = "" 454 | self._path = path 455 | self._existing = Existing 456 | 457 | 458 | def get_object(self, Bucket, Key): 459 | self._readcount += 1 460 | result = {} 461 | 462 | if not self._existing or Key != self._path + 'build-openmpi-100.0.0rho1.json': 463 | response = {} 464 | response['Error'] = {} 465 | response['Error']['Code'] = 'NoSuchKey' 466 | raise botocore.exceptions.ClientError(response, 'get_object') 467 | 468 | buildinfo = {} 469 | buildinfo['branch'] = 'v100.0' 470 | buildinfo['valid'] = True 471 | buildinfo['revision'] = '100.0.0rho1' 472 | buildinfo['build_unix_time'] = 314314 473 | buildinfo['delete_on'] = 0 474 | buildinfo['files'] = {} 475 | fileinfo = {} 476 | fileinfo['sha1'] = 'abc' 477 | fileinfo['md5'] = 'zyx' 478 | fileinfo['size'] = 1024 479 | buildinfo['files']['openmpi-100.0.0rho1.tar.bz2'] = fileinfo 480 | result['Body'] = StringIO(json.dumps(buildinfo)) 481 | 482 | return result 483 | 484 | 485 | def list_objects_v2(self, Bucket, Prefix): 486 | self._readcount += 1 487 | result = {} 488 | 489 | if self._path.startswith(Prefix): 490 | result['KeyCount'] = 1 491 | else: 492 | result['KeyCount'] = 0 493 | return result 494 | 495 | 496 | def upload_file(self, Filename, Bucket, Key): 497 | assert(Key.startswith(self._path)) 498 | self._file_write_list.append(Key) 499 | 500 | 501 | def put_object(self, Bucket, Key, Body): 502 | assert(Key.startswith(self._path)) 503 | self._file_write_list.append(Key) 504 | self._stream_write += Body 505 | 506 | 507 | def get_readcount(self): 508 | return self._readcount 509 | 510 | 511 | def get_write_list(self): 512 | return self._file_write_list 513 | 514 | 515 | def get_write_stream(self): 516 | return self._stream_write 517 | 518 | 519 | @mock.patch('os.stat', _test_stat) 520 | @mock.patch('__main__.__compute_hashes', _test_compute_hashes) 521 | def test_new_buildinfo(self): 522 | releaseinfo = {} 523 | releaseinfo['project'] = 'open-mpi' 524 | releaseinfo['branch'] = 'v100.0' 525 | releaseinfo['version'] = '100.0.0rho1' 526 | releaseinfo['basename'] = 'openmpi' 527 | releaseinfo['build_unix_time'] = 12345 528 | 529 | files = ['openmpi-100.0.0rho1.tar.gz', 'openmpi-100.0.0rho1.tar.bz2'] 530 | 531 | client = self.test_s3_client("scratch/open-mpi/v100.0/", Existing = False) 532 | 533 | upload_files(client, 'open-mpi-scratch', 'scratch', 534 | releaseinfo, files, 'NO_OVERWRITE') 535 | self.assertEqual(client.get_readcount(), 3, 536 | "readcount was %d, expected 3" % (client.get_readcount())) 537 | self.assertEqual(len(client.get_write_list()), 3, 538 | "Unexpected write list length: %s" % str(client.get_write_list())) 539 | buildinfo = json.loads(client.get_write_stream()) 540 | self.assertEqual(len(buildinfo['files']), 2, 541 | 'Unexpected files length: %s' % str(buildinfo['files'])) 542 | 543 | 544 | def test_existing_buildinfo_nocontinue(self): 545 | releaseinfo = {} 546 | releaseinfo['project'] = 'open-mpi' 547 | releaseinfo['branch'] = 'v100.0' 548 | releaseinfo['version'] = '100.0.0rho1' 549 | releaseinfo['basename'] = 'openmpi' 550 | releaseinfo['build_unix_time'] = 1 551 | 552 | files = ['openmpi-100.0.0rho1.tar.gz'] 553 | 554 | client = self.test_s3_client("scratch/open-mpi/v100.0/", Existing = True) 555 | 556 | upload_files(client, 'open-mpi-scratch', 'scratch', 557 | releaseinfo, files, 'ASSUME_NO') 558 | self.assertEqual(client.get_readcount(), 3, 559 | "readcount was %d, expected 3" % (client.get_readcount())) 560 | self.assertEqual(len(client.get_write_list()), 0, 561 | "Unexpected write list length: %s" % str(client.get_write_list())) 562 | 563 | 564 | @mock.patch('os.stat', _test_stat) 565 | @mock.patch('__main__.__compute_hashes', _test_compute_hashes) 566 | def test_existing_buildinfo_nooverlap(self): 567 | releaseinfo = {} 568 | releaseinfo['project'] = 'open-mpi' 569 | releaseinfo['branch'] = 'v100.0' 570 | releaseinfo['version'] = '100.0.0rho1' 571 | releaseinfo['basename'] = 'openmpi' 572 | releaseinfo['build_unix_time'] = 1 573 | 574 | files = ['openmpi-100.0.0rho1.tar.gz'] 575 | 576 | client = self.test_s3_client("scratch/open-mpi/v100.0/", Existing = True) 577 | 578 | upload_files(client, 'open-mpi-scratch', 'scratch', 579 | releaseinfo, files, 'NO_OVERWRITE') 580 | 581 | self.assertEqual(client.get_readcount(), 3, 582 | "readcount was %d, expected 3" % (client.get_readcount())) 583 | self.assertEqual(len(client.get_write_list()), 2, 584 | "Unexpected write list length: %s" % str(client.get_write_list())) 585 | buildinfo = json.loads(client.get_write_stream()) 586 | self.assertEqual(len(buildinfo['files']), 2, 587 | 'Unexpected files length: %s' % str(buildinfo['files'])) 588 | 589 | 590 | @mock.patch('os.stat', _test_stat) 591 | @mock.patch('__main__.__compute_hashes', _test_compute_hashes) 592 | def test_existing_buildinfo_overlap_ok(self): 593 | releaseinfo = {} 594 | releaseinfo['project'] = 'open-mpi' 595 | releaseinfo['branch'] = 'v100.0' 596 | releaseinfo['version'] = '100.0.0rho1' 597 | releaseinfo['basename'] = 'openmpi' 598 | releaseinfo['build_unix_time'] = 1 599 | 600 | files = ['openmpi-100.0.0rho1.tar.gz', 'openmpi-100.0.0rho1.tar.bz2'] 601 | 602 | client = self.test_s3_client("scratch/open-mpi/v100.0/", Existing = True) 603 | 604 | upload_files(client, 'open-mpi-scratch', 'scratch', 605 | releaseinfo, files, 'NEVER_PROMPT') 606 | self.assertEqual(client.get_readcount(), 3, 607 | "readcount was %d, expected 3" % (client.get_readcount())) 608 | self.assertEqual(len(client.get_write_list()), 3, 609 | "Unexpected write list length: %s" % str(client.get_write_list())) 610 | buildinfo = json.loads(client.get_write_stream()) 611 | self.assertEqual(len(buildinfo['files']), 2, 612 | 'Unexpected files length: %s' % str(buildinfo['files'])) 613 | 614 | 615 | @mock.patch('os.stat', _test_stat) 616 | @mock.patch('__main__.__compute_hashes', _test_compute_hashes) 617 | def test_existing_buildinfo_overlap_fail(self): 618 | releaseinfo = {} 619 | releaseinfo['project'] = 'open-mpi' 620 | releaseinfo['branch'] = 'v100.0' 621 | releaseinfo['version'] = '100.0.0rho1' 622 | releaseinfo['basename'] = 'openmpi' 623 | releaseinfo['build_unix_time'] = 1 624 | 625 | files = ['openmpi-100.0.0rho1.tar.gz', 'openmpi-100.0.0rho1.tar.bz2'] 626 | 627 | client = self.test_s3_client("scratch/open-mpi/v100.0/", Existing = True) 628 | 629 | upload_files(client, 'open-mpi-scratch', 'scratch', 630 | releaseinfo, files, 'NO_OVERWRITE') 631 | self.assertEqual(client.get_readcount(), 3, 632 | "readcount was %d, expected 3" % (client.get_readcount())) 633 | self.assertEqual(len(client.get_write_list()), 0, 634 | "Unexpected write list length: %s" % str(client.get_write_list())) 635 | 636 | 637 | if __name__ == '__main__': 638 | unittest.main() 639 | -------------------------------------------------------------------------------- /nightly-tarball/Builder.py: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright (c) 2017-2019 Amazon.com, Inc. or its affiliates. All Rights 3 | # Reserved. 4 | # Copyright (c) 2081 Cisco Systems, Inc. All rights reserved. 5 | # 6 | # Additional copyrights may follow 7 | # 8 | 9 | import argparse 10 | import logging 11 | import os 12 | import json 13 | import hashlib 14 | import time 15 | import datetime 16 | import shutil 17 | import subprocess 18 | import fileinput 19 | import Coverity 20 | import BuilderUtils 21 | import smtplib 22 | from email.mime.text import MIMEText 23 | from git import Repo, exc 24 | from enum import Enum 25 | 26 | 27 | def compute_hashes(filename): 28 | """Helper function to compute MD5 and SHA1 hashes""" 29 | retval = {} 30 | md5 = hashlib.md5() 31 | sha1 = hashlib.sha1() 32 | sha256 = hashlib.sha256() 33 | with open(filename, 'rb') as f: 34 | while True: 35 | data = f.read(64 * 1024) 36 | if not data: 37 | break 38 | md5.update(data) 39 | sha1.update(data) 40 | sha256.update(data) 41 | retval['md5'] = md5.hexdigest() 42 | retval['sha1'] = sha1.hexdigest() 43 | retval['sha256'] = sha256.hexdigest() 44 | return retval 45 | 46 | 47 | # a note on paths used in the Builder... 48 | # 49 | # config['scratch_path'] : 50 | # config['project_path'] : / 51 | # current_build['build_root'] : //-/ 52 | # current_build['source_tree'] : //-/[repo] 53 | class Builder(object): 54 | """Build one or more branches of a git repo 55 | 56 | Core class of a nightly build system (possibly to be extended into 57 | a release build system as well). User callable functions are the 58 | object constructor as well as run() 59 | 60 | """ 61 | 62 | _base_options = { 'email_log_level' : 'INFO', 63 | 'console_log_level' : 'CRITICAL', 64 | 'scratch_path' : '${TMPDIR}' } 65 | 66 | class BuildResult(Enum) : 67 | SUCCESS = 1 68 | FAILED = 2 69 | SKIPPED = 3 70 | 71 | 72 | def __init__(self, config, filer): 73 | """Create a Builder object 74 | 75 | Create a builder object, which will build most simple 76 | projects. Projects with more complicated needs will likely 77 | want to override the add_arguments(), call(), and 78 | find_build_artifacts() functions. In the case of 79 | add_arguments() and call(), it is highly recommended 80 | that functions provided by a subclass of Builder call into the 81 | Builder functions to do the actual work. 82 | 83 | """ 84 | self._logger = None 85 | self._current_build = {} 86 | self._config = self._base_options.copy() 87 | self._config.update(config) 88 | self._filer = filer 89 | self._parser = argparse.ArgumentParser(description='Nightly build script for Open MPI related projects') 90 | self.add_arguments(self._parser) 91 | # copy arguments into options, assuming they were specified 92 | for key, value in vars(self._parser.parse_args()).items(): 93 | if not value == None: 94 | self._config[key] = value 95 | # special case hack... expand out scratch_path 96 | self._config['scratch_path'] = os.path.expandvars(self._config['scratch_path']) 97 | self._config['project_path'] = os.path.join(self._config['scratch_path'], 98 | self._config['project_short_name']) 99 | self._config['builder_tools'] = os.path.dirname(os.path.realpath(__file__)) 100 | 101 | # special hack for OMPI being inconsistent in short names.... 102 | if not 'project_very_short_name' in self._config: 103 | self._config['project_very_short_name'] = self._config['project_short_name'] 104 | 105 | if not os.path.exists(self._config['scratch_path']): 106 | os.makedirs(self._config['scratch_path']) 107 | 108 | # logging initialization. Logging will work after this point. 109 | self._logger = logging.getLogger("Builder") 110 | # while we use the handler levels to limit output, the 111 | # effective level is the lowest of the handlers and the base 112 | # logger output. There's a switch in the output function of 113 | # the call() utility to dump all output on debug, so be a 114 | # little careful about setting debug level output on the 115 | # logger to avoid that path being activated all the time. 116 | if self._config['console_log_level'] == 'DEBUG' or self._config['email_log_level'] == 'DEBUG': 117 | self._logger.setLevel(logging.DEBUG) 118 | else: 119 | self._logger.setLevel(logging.INFO) 120 | 121 | ch = logging.StreamHandler() 122 | ch.setLevel(self._config['console_log_level']) 123 | ch.setFormatter(logging.Formatter('%(levelname)s: %(message)s')) 124 | self._logger.addHandler(ch) 125 | 126 | self._config['log_file'] = os.path.join(self._config['scratch_path'], 127 | 'builder-output-%d.log' % (int(time.time()))) 128 | 129 | self._fh = logging.FileHandler(self._config['log_file'], 'w') 130 | self._fh.setLevel(self._config['email_log_level']) 131 | self._fh.setFormatter(logging.Formatter('%(message)s')) 132 | self._logger.addHandler(self._fh) 133 | 134 | 135 | def __del__(self): 136 | # delete the log file, since it doesn't auto-clean (we're only 137 | # using it for email, so no one will miss it) 138 | if self._logger != None: 139 | self._logger.removeHandler(self._fh) 140 | self._fh.close() 141 | os.remove(self._config['log_file']) 142 | 143 | 144 | def add_arguments(self, parser): 145 | """Add options for command line arguments 146 | 147 | Called during initialization of the class in order to add any 148 | required arguments to the options parser. Builder classes can 149 | provide their own add_arguments call, but should call the base 150 | add_arguments() in order to get the base set of options added 151 | to the parser. 152 | 153 | """ 154 | self._parser.add_argument('--console-log-level', 155 | help='Console Log level (default: CRITICAL).', type=str, 156 | choices=['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']) 157 | self._parser.add_argument('--email-log-level', 158 | help='Email Log level (default: INFO).', type=str, 159 | choices=['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']) 160 | self._parser.add_argument('--scratch-path', 161 | help='Directory to use as base of build tree.', 162 | type=str) 163 | 164 | 165 | def run(self): 166 | """Do all the real work of the Builder 167 | 168 | Other than __init__(), this is the real API for the Builder 169 | class. This function will execute every build described by 170 | the configuration passed to __init__(). Internally, it uses a 171 | helper function run_single_build() to execute each build. The 172 | only real logic in this function (other than iterating over 173 | keys and calling single_build) is to write the summary output 174 | / send emails). 175 | 176 | """ 177 | self._logger.info("Branches: %s", str(self._config['branches'].keys())) 178 | good_builds = [] 179 | failed_builds = [] 180 | skipped_builds = [] 181 | 182 | for branch_name in self._config['branches']: 183 | try: 184 | result = self.run_single_build(branch_name) 185 | if result == Builder.BuildResult.SUCCESS: 186 | good_builds.append(branch_name) 187 | elif result == Builder.BuildResult.FAILED: 188 | failed_builds.append(branch_name) 189 | elif result == Builder.BuildResult.SKIPPED: 190 | skipped_builds.append(branch_name) 191 | except Exception as e: 192 | self._logger.error("run_single_build(%s) threw exception %s: %s" % 193 | (branch_name, str(type(e)), str(e))) 194 | failed_builds.append(branch_name) 195 | # if run_single_build throws an exception, we should 196 | # not continue trying to run, but should just do the 197 | # cleanup work 198 | break 199 | 200 | # Generate results output for email 201 | body = "Successful builds: %s\n" % (str(good_builds)) 202 | body += "Skipped builds: %s\n" % (str(skipped_builds)) 203 | body += "Failed builds: %s\n" % (str(failed_builds)) 204 | if len(failed_builds) > 0: 205 | subject = "%s nightly build: FAILURE" % (self._config['project_name']) 206 | else: 207 | subject = "%s nightly build: SUCCESS" % (self._config['project_name']) 208 | body += "\n=== Build output ===\n\n" 209 | body += open(self._config['log_file'], 'r').read() 210 | body += "\nYour friendly daemon,\nCyrador\n" 211 | 212 | msg = MIMEText(body) 213 | msg['Subject'] = subject 214 | msg['From'] = self._config['email_from'] 215 | msg['To'] = self._config['email_dest'] 216 | 217 | s = smtplib.SMTP('localhost') 218 | s.sendmail(self._config['email_from'], [self._config['email_dest']], msg.as_string()) 219 | s.quit() 220 | 221 | 222 | def run_single_build(self, branch_name): 223 | """Run a single branch build 224 | 225 | All the logic required to run a single build. This function 226 | should not raise an exception unless all follow-on builds 227 | should be skipped. 228 | 229 | """ 230 | self._logger.info("\nStarting build for " + branch_name) 231 | self._current_build = { "status" : 0, 232 | "branch_name" : branch_name } 233 | retval = Builder.BuildResult.SUCCESS 234 | 235 | remote_repository = self._config['repository'] 236 | 237 | now = time.time() 238 | self._current_build['build_unix_time'] = int(now) 239 | self._current_build['build_time'] = self.generate_build_time(now) 240 | build_root = os.path.join(self._config['project_path'], 241 | branch_name + "-" + self._current_build['build_time']) 242 | source_tree = os.path.join(build_root, 243 | os.path.basename(remote_repository)) 244 | 245 | self._current_build['remote_repository'] = remote_repository 246 | self._current_build['build_root'] = build_root 247 | self._current_build['source_tree'] = source_tree 248 | self._current_build['branch'] = branch_name 249 | 250 | build_history = self.get_build_history() 251 | if len(build_history) > 0: 252 | # this is really kind of awful, but build_history keys are 253 | # unix timestamps of the build. Find the last timestamp, 254 | # and that's the last build. Then look at the revision to 255 | # get the revision id of that build. 256 | last_version = build_history[sorted(build_history.keys())[-1:][0]]['revision'] 257 | else: 258 | last_version = '' 259 | 260 | self.prepare_source_tree() 261 | try: 262 | if last_version == self._current_build['revision']: 263 | self._logger.info("Build for revision %s already exists, skipping.", 264 | self._current_build['revision']) 265 | retval = Builder.BuildResult.SKIPPED 266 | else: 267 | self._logger.info("Found new revision %s", 268 | self._current_build['revision']) 269 | 270 | self.update_version_file() 271 | self.build() 272 | self.find_build_artifacts() 273 | if ('coverity' in self._config['branches'][branch_name] 274 | and self._config['branches'][branch_name]['coverity'] 275 | and len(self._current_build['artifacts']) > 0): 276 | try: 277 | Coverity.run_coverity(self._logger, 278 | self._current_build['build_root'], 279 | os.path.join(self._current_build['source_tree'], 280 | self._current_build['artifacts'].keys()[0]), 281 | self._config['coverity']) 282 | except Exception as e: 283 | self._logger.error("ERROR: Coverity submission failed: %s" 284 | % (str(e))) 285 | else: 286 | self._logger.info("Successfully submitted Coverity build") 287 | self.publish_build_artifacts() 288 | self._logger.info("%s build of revision %s completed successfully" % 289 | (branch_name, self._current_build['revision'])) 290 | except Exception as e: 291 | self._logger.error("FAILURE: %s: %s" 292 | % (str(type(e)), str(e))) 293 | self.publish_failed_build() 294 | retval = Builder.BuildResult.FAILED 295 | finally: 296 | self.cleanup() 297 | self.remote_cleanup(build_history) 298 | return retval 299 | 300 | 301 | def generate_build_time(self, build_unix_time): 302 | """Helper function to format time strings from unix time""" 303 | return datetime.datetime.utcfromtimestamp(build_unix_time).strftime("%Y%m%d%H%M") 304 | 305 | 306 | def generate_build_history_filename(self, branch_name, build_unix_time, revision): 307 | """Helper function to build filename 308 | 309 | The build history file represents a single build, and has to 310 | have an agreed-upon naming convention between both the builder 311 | script and the web pages that will consume the output. 312 | Override if build--.json 313 | is not sufficient for your project. 314 | 315 | """ 316 | build_time = self.generate_build_time(build_unix_time) 317 | return os.path.join(self._config['branches'][branch_name]['output_location'], 318 | "build-%s-%s-%s-%s.json" % (self._config['project_short_name'], 319 | branch_name, 320 | build_time, 321 | revision)) 322 | 323 | 324 | def get_build_history(self): 325 | """Helper function to list all known builds for the current branch 326 | 327 | Pull all known builds from the remote storage and return an 328 | array of the build history objects for the current branch. 329 | Returns an empty list if there are no known builds for the 330 | current branch. 331 | 332 | """ 333 | branch_name = self._current_build['branch_name'] 334 | dirname = self._config['branches'][branch_name]['output_location'] 335 | builds = self._filer.file_search(dirname, "build-*.json") 336 | build_history = {} 337 | for build in builds: 338 | self._logger.debug("looking at data file %s" % build) 339 | stream = self._filer.download_to_stream(build) 340 | data = json.load(stream) 341 | if not 'build_unix_time' in data: 342 | continue 343 | if not 'branch' in data: 344 | continue 345 | data_build_unix_time = data['build_unix_time'] 346 | data_branch_name = data['branch'] 347 | if data_branch_name == branch_name: 348 | build_history[data_build_unix_time] = data 349 | return build_history 350 | 351 | 352 | def prepare_source_tree(self): 353 | """Build a local source tree for the current branch 354 | 355 | Builds the current tree, including building all parent 356 | directories, checks out the source for the current branch, and 357 | sets _current_build['revision'] to the revision of the HEAD 358 | for the current branch. 359 | 360 | """ 361 | branch_name = self._current_build['branch_name'] 362 | remote_repository = self._current_build['remote_repository'] 363 | source_tree = self._current_build['source_tree'] 364 | branch = self._current_build['branch'] 365 | 366 | # assume that the build tree doesn't exist. Makedirs will 367 | # throw an exception if it does. 368 | self._logger.debug("Making build tree: " + os.path.dirname(source_tree)) 369 | os.makedirs(os.path.dirname(source_tree)) 370 | 371 | # get an up-to-date git repository 372 | self._logger.debug("Cloning from " + remote_repository) 373 | repo = Repo.clone_from(remote_repository, source_tree) 374 | 375 | # switch to the right branch and reset the HEAD to be 376 | # origin//HEAD 377 | self._logger.debug("Switching to branch: " + branch) 378 | if not branch in repo.heads: 379 | # TODO: Can we avoid calling into repo.git here? 380 | repo.git.checkout('origin/' + branch, b=branch) 381 | repo.head.reference = repo.refs['origin/' + branch] 382 | 383 | # And pull in all the right submodules 384 | repo.submodule_update(recursive = True) 385 | 386 | # wish I could figure out how to do this without resorting to 387 | # shelling out to git :/ 388 | self._current_build['revision'] = repo.git.rev_parse(repo.head.object.hexsha, short=7) 389 | 390 | 391 | def update_version_file(self): 392 | """Hook to update version file if needed before the actual build step. 393 | 394 | Most projects have custom methods of updating the version used 395 | by the build process before making a nightly tarball (so that 396 | different revisions are evident by the tarball name / build 397 | version). Projects should provide a customized version of 398 | this function if necessary. Default action is to do 399 | nothing. 400 | 401 | """ 402 | pass 403 | 404 | 405 | def build(self): 406 | """Execute building the tarball. 407 | 408 | Most projects have a helper script for building tarballs. If 409 | the key 'tarball_builder' is present in the config, this 410 | function will execute the tarball_builder. Otherwise, it will 411 | run autoreconf -if; ./configure ; make distcheck. 412 | 413 | """ 414 | branch_name = self._current_build['branch_name'] 415 | source_tree = self._current_build['source_tree'] 416 | cwd = os.getcwd() 417 | os.chdir(source_tree) 418 | try: 419 | if 'tarball_builder' in self._config: 420 | self.call(self._config['tarball_builder'], build_call=True) 421 | else: 422 | self.call(["autoreconf", "-if"], build_call=True) 423 | self.call(["./configure"], build_call=True) 424 | self.call(["make", "distcheck"], build_call=True) 425 | finally: 426 | os.chdir(cwd) 427 | 428 | 429 | def call(self, args, log_name=None, build_call=False, env=None): 430 | """Modify shell executable string before calling 431 | 432 | Some projects (like Open MPI) use shell modules to configure 433 | the environment properly for a build. The easiest way to 434 | support that use case is a shell wrapper function that 435 | properly configures the environment. This function provides a 436 | hook which can be used to add the shell wrapper function into 437 | the call arguments, resulting in the build system having the 438 | right environment at execution time. The default is to call 439 | args directly. 440 | 441 | """ 442 | if log_name == None: 443 | log_file = args[0] 444 | else: 445 | log_file = log_name 446 | log_file=os.path.join(self._current_build['build_root'], log_file + "-output.txt") 447 | BuilderUtils.logged_call(args, log_file=log_file, env=env) 448 | 449 | 450 | def find_build_artifacts(self): 451 | """Pick up any build artifacts from the build step 452 | 453 | Returns a list of file names relative to source_tree of the 454 | build artifacts from the build step. The 455 | Builder.find_build_artifacts() implementation will search for 456 | any .tar.gz and .tar.bz2 files in the top level of the build 457 | tree. Overload if the project builder can be more 458 | specific. 459 | 460 | """ 461 | self._current_build['artifacts'] = {} 462 | source_tree = self._current_build['source_tree'] 463 | for file in os.listdir(source_tree): 464 | if file.endswith(".tar.gz") or file.endswith(".tar.bz2"): 465 | filename = os.path.join(source_tree, file) 466 | info = os.stat(filename) 467 | hashes = compute_hashes(filename) 468 | self._current_build['artifacts'][file] = {} 469 | self._current_build['artifacts'][file]['sha1'] = hashes['sha1'] 470 | self._current_build['artifacts'][file]['sha256'] = hashes['sha256'] 471 | self._current_build['artifacts'][file]['md5'] = hashes['md5'] 472 | self._current_build['artifacts'][file]['size'] = info.st_size 473 | self._logger.debug("Found artifact %s, size: %d, md5: %s, sha1: %s sha256: %s" 474 | % (file, info.st_size, hashes['md5'], hashes['sha1'], hashes['sha256'])) 475 | 476 | 477 | def publish_build_artifacts(self): 478 | """Publish any successful build artifacts 479 | 480 | Publish any build artifacts found by find_build_artifacts and 481 | create / publish the build history blob for the artifacts. 482 | This function also creates the "latest_snapshot.txt" file, 483 | on the assumption that the current build is, in fact, the latest. 484 | 485 | """ 486 | branch_name = self._current_build['branch_name'] 487 | 488 | build_data = {} 489 | build_data['branch'] = self._current_build['branch'] 490 | build_data['valid'] = True 491 | build_data['revision'] = self._current_build['revision'] 492 | build_data['build_unix_time'] = self._current_build['build_unix_time'] 493 | build_data['delete_on'] = 0 494 | build_data['files'] = {} 495 | 496 | for build in self._current_build['artifacts']: 497 | local_filename = os.path.join(self._current_build['source_tree'], 498 | build) 499 | remote_filename = os.path.join(self._config['branches'][branch_name]['output_location'], 500 | build) 501 | self._logger.debug("Publishing file %s (local: %s, remote: %s)" % 502 | (build, local_filename, remote_filename)) 503 | self._filer.upload_from_file(local_filename, remote_filename) 504 | build_data['files'][build] = self._current_build['artifacts'][build] 505 | 506 | datafile = self.generate_build_history_filename(self._current_build['branch_name'], 507 | self._current_build['build_unix_time'], 508 | self._current_build['revision']) 509 | self._filer.upload_from_stream(datafile, json.dumps(build_data), {'Cache-Control' : 'max-age=600'}) 510 | 511 | latest_filename = os.path.join(self._config['branches'][branch_name]['output_location'], 512 | 'latest_snapshot.txt') 513 | version_string = self._current_build['version_string'] + '\n' 514 | self._filer.upload_from_stream(latest_filename, version_string, {'Cache-Control' : 'max-age=600'} ) 515 | 516 | 517 | def update_build_history(self, build_history): 518 | """Update any build histories that need expiring 519 | 520 | Deletion of build histories / artifacts is a two step process. 521 | First, when there are more config['than max_count'] builds 522 | found, the oldest N are expired to get under max_count. 523 | Expired builds are not immediately deleted. Instead, they 524 | have their valid field set to false and a delete_on time set 525 | to 24 hours from now. This is to give the web front end time 526 | to see the update and stop publishing the now-expired builds. 527 | Second, builds with a delete_on time in the past are deleted 528 | from the remote archive. This function handles moving builds 529 | from "valid" to "expired", and remote_cleanup() handles the 530 | deletion case. 531 | 532 | """ 533 | branch_name = self._current_build['branch_name'] 534 | 535 | # set builds past max_count to invalid and set an expiration 536 | # if one isn't already set. Note that this isn't quite right, 537 | # as we'll count already invalid builds against max_count, but 538 | # unless builds are added to the build_history out of order 539 | # (which would be an entertaining causality problem), the 540 | # effect is the same, and this is way less code. 541 | if 'max_count' in self._config['branches'][branch_name]: 542 | max_count = self._config['branches'][branch_name]['max_count'] 543 | else: 544 | max_count = 10 545 | builds = sorted(build_history[branch_name]['builds'].keys()) 546 | if len(builds) > max_count: 547 | expire_builds = builds[max_count:] 548 | for key in expire_builds: 549 | if not build_history[branch_name]['builds'][key]['valid']: 550 | continue 551 | build_history[branch_name]['builds'][key]['valid'] = False 552 | build_history[branch_name]['builds'][key]['delete_on'] = 12 553 | self._logger.debug("Expiring build %s" % (key)) 554 | 555 | 556 | def publish_failed_build(self): 557 | """Deal with a failed build 558 | 559 | Builds fail. It happens to the best of us. This function is 560 | called when something in the build failed (any step, from code 561 | checkout to finding build artifacts). This function will 562 | create a tarball of the build directory and publish it so that 563 | future generations may see what went wrong and learn from our 564 | mistakes. After making a tarball of the directory, it uploads 565 | the tarball to the remote storage and sets 566 | ._current_build['failed tarball'] to the name (relative to 567 | ._config['failed_build_prefix'] where the tarball was 568 | uploaded. 569 | 570 | """ 571 | if not 'failed_build_prefix' in self._config: 572 | self._logger.warn("failed_build_prefix not set in config; not saving failed build info") 573 | return 574 | 575 | branch_name = self._current_build['branch_name'] 576 | self._logger.debug("publishing failed build for %s" % (branch_name)) 577 | failed_tarball_name = "%s-%s-%s-failed.tar.gz" % (self._config['project_short_name'], 578 | branch_name, 579 | self._current_build['build_time']) 580 | failed_tarball_path = os.path.join(self._config['project_path'], 581 | failed_tarball_name) 582 | cwd = os.getcwd() 583 | os.chdir(self._current_build['build_root']) 584 | try: 585 | self.call(["tar", "czf", failed_tarball_path, "."], 586 | log_name="failed-tarball-tar") 587 | finally: 588 | os.chdir(cwd) 589 | remote_filename = os.path.join(self._config['failed_build_prefix'], 590 | failed_tarball_name) 591 | 592 | self._filer.upload_from_file(failed_tarball_path, remote_filename) 593 | os.remove(failed_tarball_path) 594 | 595 | self._logger.warn('Build artifacts available at: %s' % 596 | (self._config['failed_build_url'] + remote_filename)) 597 | 598 | 599 | def cleanup(self): 600 | """Clean up after ourselves 601 | 602 | If your builder subclass does anything crazy in the previous 603 | steps, override here. Otherwise, deleting everything in the 604 | build directory should be sufficient. 605 | 606 | """ 607 | dirpath = self._config['project_path'] 608 | self._logger.debug("Deleting directory: %s" % (dirpath)) 609 | # deal with "make distcheck"'s stupid permissions. Exception 610 | # handling is inside the loop so that we do not skip some 611 | # files on an error. os.chmod will throw an error if it 612 | # tries to follow a dangling symlink. 613 | for root, dirs, files in os.walk(dirpath): 614 | for momo in dirs: 615 | try: 616 | os.chmod(os.path.join(root, momo), 0o700) 617 | except: 618 | pass 619 | for momo in files: 620 | try: 621 | os.chmod(os.path.join(root, momo), 0o700) 622 | except: 623 | pass 624 | shutil.rmtree(dirpath) 625 | 626 | 627 | def remote_cleanup(self, build_history): 628 | """Clean up old builds on remote storage""" 629 | now = int(time.time()) 630 | branch_name = self._current_build['branch_name'] 631 | 632 | # set builds past max_count to invalid and set an expiration 633 | # if one isn't already set. Note that this isn't quite right, 634 | # as we'll count already invalid builds against max_count, but 635 | # unless builds are added to the build_history out of order 636 | # (which would be an entertaining causality problem), the 637 | # effect is the same, and this is way less code. Also, this 638 | # is a little racy as hell, given there's no locking on 639 | # simultaneous builds, but the worst case should be that the 640 | # server ends up with a few too many valid builds. 641 | if 'max_count' in self._config['branches'][branch_name]: 642 | max_count = self._config['branches'][branch_name]['max_count'] 643 | else: 644 | max_count = 10 645 | builds = sorted(build_history.keys()) 646 | if len(builds) > max_count: 647 | builds = builds[0:len(builds) - max_count] 648 | for key in builds: 649 | if not build_history[key]['valid']: 650 | continue 651 | build_history[key]['valid'] = False 652 | # expire in one day 653 | build_history[key]['delete_on'] = now + (24 * 60 * 60) 654 | self._logger.debug("Expiring build %s" % (key)) 655 | filename = self.generate_build_history_filename(build_history[key]['branch'], 656 | build_history[key]['build_unix_time'], 657 | build_history[key]['revision']) 658 | self._filer.upload_from_stream(filename, 659 | json.dumps(build_history[key]), {'Cache-Control' : 'max-age=600'}) 660 | 661 | for build in build_history.keys(): 662 | delete_on = build_history[build]['delete_on'] 663 | if delete_on != 0 and delete_on < int(time.time()): 664 | self._logger.debug("Removing build %s" % (build)) 665 | for name in build_history[build]['files'].keys(): 666 | dirname = self._config['branches'][branch_name]['output_location'] 667 | pathname = os.path.join(dirname, name) 668 | self._logger.debug("Removing file %s" % (pathname)) 669 | self._filer.delete(pathname) 670 | datafile = self.generate_build_history_filename(build_history[build]['branch'], 671 | build_history[build]['build_unix_time'], 672 | build_history[build]['revision']) 673 | self._logger.debug("Removing data file %s" % (datafile)) 674 | self._filer.delete(datafile) 675 | 676 | # as a (maybe temporary?) hack, generate md5sum.txt and 677 | # sha1sum.txt files for all valid builds. Do this in 678 | # remote_cleanup rather than update_build_history so that it 679 | # gets regenerated whenever files go invalid/removed, rather 680 | # than just when new builds are created. 681 | md5sum_string = '' 682 | sha1sum_string = '' 683 | for build in build_history.keys(): 684 | if not build_history[build]['valid']: 685 | continue 686 | for filename in build_history[build]['files'].keys(): 687 | filedata = build_history[build]['files'][filename] 688 | md5sum_string += '%s %s\n' % (filedata['md5'], filename) 689 | sha1sum_string += '%s %s\n' % (filedata['sha1'], filename) 690 | output_base = self._config['branches'][branch_name]['output_location'] 691 | self._filer.upload_from_stream(os.path.join(output_base, 'md5sums.txt'), 692 | md5sum_string) 693 | self._filer.upload_from_stream(os.path.join(output_base, 'sha1sums.txt'), 694 | sha1sum_string) 695 | --------------------------------------------------------------------------------