├── .github ├── PULL_REQUEST_TEMPLATE.md └── workflows │ └── test_workflow.yml ├── LICENSE ├── NOTICE ├── README.md ├── __init__.py ├── __main__.py ├── build_pyoptsparse.py └── pyproject.toml /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | ### Summary 2 | 3 | Summary of PR. 4 | 5 | ### Related Issues 6 | 7 | - Resolves # 8 | 9 | ### Backwards incompatibilities 10 | 11 | None 12 | 13 | ### New Dependencies 14 | 15 | None 16 | -------------------------------------------------------------------------------- /.github/workflows/test_workflow.yml: -------------------------------------------------------------------------------- 1 | name: Tests 2 | 3 | on: 4 | # Trigger on push or pull request events for the master branch 5 | push: 6 | branches: [ master ] 7 | pull_request: 8 | branches: [ master ] 9 | 10 | # Run the workflow Sundays at 0400 UTC 11 | schedule: 12 | - cron: '0 4 * * 0' 13 | 14 | # Allow running the workflow manually from the Actions tab 15 | workflow_dispatch: 16 | inputs: 17 | run_name: 18 | description: 'Name of workflow run as it will appear under Actions tab (optional):' 19 | type: string 20 | required: false 21 | default: '' 22 | job_name: 23 | description: 'Select a job from the matrix to run (default: all jobs)' 24 | type: choice 25 | options: 26 | - '' 27 | - 'Ubuntu Default' 28 | - 'Ubuntu Default, no MPI' 29 | - 'Ubuntu Default, Numpy 2.x' 30 | - 'MacOS Default (Intel)' 31 | - 'MacOS Default (ARM), NumPy 2.x' 32 | - 'Ubuntu Latest' 33 | - 'Ubuntu Oldest' 34 | - 'Ubuntu Default, no MPI, forced build' 35 | - 'MacOS Default, no MPI, forced build' 36 | - 'Ubuntu Latest, no MPI, forced build' 37 | required: false 38 | default: '' 39 | debug_enabled: 40 | description: 'Run the build with tmate debugging enabled (https://github.com/marketplace/actions/debugging-with-tmate)' 41 | type: boolean 42 | required: false 43 | default: false 44 | 45 | run-name: ${{ inputs.run_name }} 46 | 47 | jobs: 48 | 49 | tests: 50 | 51 | timeout-minutes: 30 52 | 53 | strategy: 54 | fail-fast: false 55 | matrix: 56 | include: 57 | 58 | # test default pyoptsparse on Ubuntu, Numpy 1.x 59 | - NAME: Ubuntu Default 60 | OS: ubuntu-latest 61 | PY: '3.12' 62 | NUMPY: '1.26' 63 | SCIPY: '1.13' 64 | MPI: true 65 | PYOPTSPARSE: 'default' 66 | # PAROPT: true 67 | SNOPT: 7.7 68 | 69 | # test default pyoptsparse on Ubuntu, NumPy 1.x, no MPI 70 | - NAME: Ubuntu Default, no MPI 71 | OS: ubuntu-latest 72 | PY: '3.12' 73 | NUMPY: '1.26' 74 | SCIPY: '1.13' 75 | PYOPTSPARSE: 'default' 76 | SNOPT: 7.7 77 | 78 | # test default pyoptsparse on Ubuntu, Numpy 2.x 79 | - NAME: Ubuntu Default, Numpy 2.x 80 | OS: ubuntu-latest 81 | PY: '3.13' 82 | NUMPY: '2.2' 83 | SCIPY: '1.15' 84 | MPI: true 85 | PYOPTSPARSE: 'default' 86 | # PAROPT: true 87 | SNOPT: 7.7 88 | 89 | # test default pyoptsparse on MacOS Legacy (Intel), NumPy 1.x 90 | - NAME: MacOS Default (Intel) 91 | OS: macos-13 92 | PY: '3.11' 93 | NUMPY: '1.26' 94 | SCIPY: '1.13' 95 | MPI: true 96 | PYOPTSPARSE: 'default' 97 | # PAROPT: true 98 | SNOPT: 7.7 99 | 100 | # test default pyoptsparse on MacOS latest (ARM), NumPy 2.x 101 | - NAME: MacOS Default (ARM), NumPy 2.x 102 | OS: macos-latest 103 | PY: '3.12' 104 | NUMPY: '1.26' 105 | SCIPY: '1.13' 106 | MPI: true 107 | PYOPTSPARSE: 'default' 108 | SNOPT: 7.7 109 | 110 | # test latest release of pyoptsparse, NumPy 2.x 111 | - NAME: Ubuntu Latest 112 | OS: ubuntu-latest 113 | PY: 3.13 114 | NUMPY: 2.2 115 | SCIPY: 1.15 116 | MPI: true 117 | PYOPTSPARSE: 'latest' 118 | # PAROPT: true 119 | SNOPT: 7.7 120 | 121 | # test oldest supported version of pyoptsparse 122 | - NAME: Ubuntu Oldest 123 | OS: ubuntu-latest 124 | PY: 3.8 125 | NUMPY: 1.22 126 | SCIPY: 1.7 127 | PYOPTSPARSE: 'v2.9.0' 128 | NO_IPOPT: true 129 | SNOPT: 7.2 130 | 131 | # test default pyoptsparse on Ubuntu without MPI with forced build, NumPy 1.x 132 | - NAME: Ubuntu Default, no MPI, forced build 133 | OS: ubuntu-latest 134 | PY: '3.12' 135 | NUMPY: '1.26' 136 | SCIPY: '1.13' 137 | PYOPTSPARSE: 'default' 138 | SNOPT: 7.7 139 | FORCE_BUILD: true 140 | 141 | # test default pyoptsparse on MacOS without MPI with forced build 142 | - NAME: MacOS Default, no MPI, forced build 143 | OS: macos-13 144 | PY: '3.11' 145 | NUMPY: '1.26' 146 | SCIPY: '1.13' 147 | PYOPTSPARSE: 'default' 148 | SNOPT: 7.7 149 | FORCE_BUILD: true 150 | XCODE: '14.2' 151 | 152 | # test latest pyoptsparse without MPI with forced build 153 | - NAME: Ubuntu Latest, no MPI, forced build 154 | OS: ubuntu-latest 155 | PY: 3.13 156 | NUMPY: 2.2 157 | SCIPY: 1.15 158 | PYOPTSPARSE: 'latest' 159 | SNOPT: 7.7 160 | FORCE_BUILD: true 161 | 162 | runs-on: ${{ matrix.OS }} 163 | 164 | name: ${{ matrix.NAME }} 165 | 166 | defaults: 167 | run: 168 | shell: bash -l {0} 169 | 170 | steps: 171 | - name: Display run details 172 | run: | 173 | echo "=============================================================" 174 | echo "Run #${GITHUB_RUN_NUMBER}" 175 | echo "Run ID: ${GITHUB_RUN_ID}" 176 | echo "Testing: ${GITHUB_REPOSITORY}" 177 | echo "Triggered by: ${GITHUB_EVENT_NAME}" 178 | echo "Initiated by: ${GITHUB_ACTOR}" 179 | echo "=============================================================" 180 | 181 | - name: Exit if this job was not selected 182 | if: github.event_name == 'workflow_dispatch' && inputs.job_name != '' && inputs.job_name != matrix.NAME 183 | uses: actions/github-script@v7 184 | with: 185 | script: core.setFailed('The ${{ matrix.NAME }} job was not included in the run, exiting...'); 186 | 187 | - name: Create SSH key 188 | if: matrix.SNOPT 189 | env: 190 | SSH_PRIVATE_KEY: ${{ secrets.SSH_PRIVATE_KEY }} 191 | SSH_KNOWN_HOSTS: ${{ secrets.SSH_KNOWN_HOSTS }} 192 | run: | 193 | mkdir -p ~/.ssh/ 194 | echo "$SSH_PRIVATE_KEY" > ~/.ssh/id_rsa 195 | sudo chmod 600 ~/.ssh/id_rsa 196 | echo "$SSH_KNOWN_HOSTS" > ~/.ssh/known_hosts 197 | 198 | - name: Checkout code 199 | uses: actions/checkout@v4 200 | 201 | - name: Setup conda 202 | uses: conda-incubator/setup-miniconda@v3 203 | with: 204 | miniconda-version: latest 205 | python-version: ${{ matrix.PY }} 206 | channels: conda-forge 207 | 208 | - name: Install 209 | run: | 210 | conda install numpy=${{ matrix.NUMPY }} scipy=${{ matrix.SCIPY }} -q -y 211 | 212 | conda install cython swig compilers cmake meson liblapack openblas -q -y 213 | 214 | if [[ "${{ matrix.XCODE }}" ]]; then 215 | sudo xcode-select -s "/Applications/Xcode_${{ matrix.XCODE }}.app" 216 | fi 217 | 218 | echo "=============================================================" 219 | echo "Install build_pyoptsparse" 220 | echo "=============================================================" 221 | python -m pip install . 222 | 223 | - name: Install MPI 224 | if: matrix.MPI 225 | run: | 226 | echo "=============================================================" 227 | echo "Install MPI" 228 | echo "=============================================================" 229 | conda install openmpi-mpicc mpi4py -q -y 230 | 231 | echo "PRTE_MCA_rmaps_default_mapping_policy=:oversubscribe" >> $GITHUB_ENV 232 | echo "OMPI_MCA_rmaps_base_oversubscribe=1" >> $GITHUB_ENV 233 | echo "OMPI_MCA_btl=^openib" >> $GITHUB_ENV 234 | 235 | - name: Display environment before 236 | run: | 237 | conda info 238 | conda list 239 | 240 | echo "=============================================================" 241 | echo "Check installed versions of Python, Numpy and Scipy" 242 | echo "=============================================================" 243 | python -c "import sys; assert str(sys.version).startswith(str(${{ matrix.PY }})), \ 244 | f'Python version {sys.version} is not the requested version (${{ matrix.PY }})'" 245 | 246 | python -c "import numpy; assert str(numpy.__version__).startswith(str(${{ matrix.NUMPY }})), \ 247 | f'Numpy version {numpy.__version__} is not the requested version (${{ matrix.NUMPY }})'" 248 | 249 | python -c "import scipy; assert str(scipy.__version__).startswith(str(${{ matrix.SCIPY }})), \ 250 | f'Scipy version {scipy.__version__} is not the requested version (${{ matrix.SCIPY }})'" 251 | 252 | - name: Build pyOptSparse 253 | run: | 254 | echo "=============================================================" 255 | echo "Build pyoptsparse" 256 | echo "=============================================================" 257 | 258 | if [[ "${{ matrix.PYOPTSPARSE }}" == "default" ]]; then 259 | BRANCH="" 260 | elif [[ "${{ matrix.PYOPTSPARSE }}" == "latest" ]]; then 261 | LATEST_URL=`curl -fsSLI -o /dev/null -w %{url_effective} https://github.com/mdolab/pyoptsparse/releases/latest` 262 | LATEST_VER=`echo $LATEST_URL | awk '{split($0,a,"/tag/"); print a[2]}'` 263 | BRANCH="-b $LATEST_VER" 264 | else 265 | BRANCH="-b ${{ matrix.PYOPTSPARSE }}" 266 | fi 267 | 268 | if [[ "${{ matrix.PAROPT }}" ]]; then 269 | PAROPT="-a" 270 | fi 271 | 272 | if [[ "${{ matrix.SNOPT }}" == "7.7" && "${{ secrets.SNOPT_LOCATION_77 }}" ]]; then 273 | echo " > Secure copying SNOPT 7.7 over SSH" 274 | mkdir SNOPT 275 | scp -qr ${{ secrets.SNOPT_LOCATION_77 }} SNOPT 276 | SNOPT="-s SNOPT/src" 277 | elif [[ "${{ matrix.SNOPT }}" == "7.2" && "${{ secrets.SNOPT_LOCATION_72 }}" ]]; then 278 | echo " > Secure copying SNOPT 7.2 over SSH" 279 | mkdir SNOPT 280 | scp -qr ${{ secrets.SNOPT_LOCATION_72 }} SNOPT 281 | SNOPT="-s SNOPT/source" 282 | elif [[ "${{ matrix.SNOPT }}" ]]; then 283 | echo "SNOPT version ${{ matrix.SNOPT }} was requested but source is not available" 284 | fi 285 | 286 | if [[ "${{ matrix.LINEAR_SOLVER }}" == "hsl" ]]; then 287 | if "${{ secrets.HSL_LOCATION }}" ]]; then 288 | scp -q ${{ secrets.HSL_LOCATION }} hsl.tar.gz 289 | LINEAR_SOLVER="-l hsl -t hsl.tar.gz" 290 | else 291 | echo "---------------------------------------------------------------------------" 292 | echo "HSL was requested but source is not available, using default linear solver." 293 | echo "---------------------------------------------------------------------------" 294 | fi 295 | elif [[ "${{ matrix.LINEAR_SOLVER }}" == "pardiso" ]]; then 296 | echo "-------------------------------------------------------------------------------" 297 | echo "Pardiso requires Intel compilers, which are not installed. The build will fail." 298 | echo "-------------------------------------------------------------------------------" 299 | LINEAR_SOLVER="-l pardiso" 300 | fi 301 | 302 | if [[ "${{ matrix.NO_IPOPT }}" ]]; then 303 | NO_IPOPT="--no-ipopt" 304 | fi 305 | 306 | if [[ "${{ matrix.FORCE_BUILD }}" ]]; then 307 | FORCE_BUILD="--force-build" 308 | fi 309 | 310 | echo "build_pyoptsparse -v $BRANCH $FORCE_BUILD $PAROPT $SNOPT $NO_IPOPT $LINEAR_SOLVER" 311 | build_pyoptsparse -v $BRANCH $FORCE_BUILD $PAROPT $SNOPT $NO_IPOPT $LINEAR_SOLVER -d 312 | 313 | echo "BRANCH=${BRANCH}" >> $GITHUB_ENV 314 | 315 | - name: Display build log 316 | if: failure() 317 | run: | 318 | for f in $(find /tmp/ -name 'meson-log.txt'); do 319 | echo "=============================================================" 320 | echo $f 321 | echo "=============================================================" 322 | cat $f 323 | done 324 | if test -d /private/var/folders; then 325 | for f in $(find /private/var/folders/ -name 'meson-log.txt'); do 326 | echo "=============================================================" 327 | echo $f 328 | echo "=============================================================" 329 | cat $f 330 | done 331 | for f in $(find /private/var/folders/ -name 'compile.log'); do 332 | echo "=============================================================" 333 | echo $f 334 | echo "=============================================================" 335 | cat $f 336 | done 337 | fi 338 | 339 | # Enable tmate debugging of manually-triggered workflows if the input option was provided 340 | # 341 | # To access the terminal through the web-interface: 342 | # 1. Click on the web-browser link printed out in this action from the github 343 | # workflow terminal 344 | # 2. Press cntrl + c in the new tab that opens up to reveal the terminal 345 | # 3. To activate the conda environment run: 346 | # $ source $CONDA/etc/profile.d/conda.sh 347 | # $ conda activate test 348 | - name: Setup tmate session 349 | if: github.event_name == 'workflow_dispatch' && inputs.debug_enabled && inputs.job_name == matrix.NAME 350 | uses: mxschmitt/action-tmate@v3 351 | with: 352 | limit-access-to-actor: true 353 | 354 | - name: Setup tmate session after failure 355 | if: github.event_name == 'workflow_dispatch' && inputs.debug_enabled && inputs.job_name == matrix.NAME && failure() 356 | uses: mxschmitt/action-tmate@v3 357 | with: 358 | limit-access-to-actor: true 359 | 360 | - name: Display environment after 361 | run: | 362 | conda info 363 | conda list 364 | 365 | echo "=============================================================" 366 | echo "Check installed versions of Python, Numpy and Scipy" 367 | echo "=============================================================" 368 | python -c "import sys; assert str(sys.version).startswith(str(${{ matrix.PY }})), \ 369 | f'Python version {sys.version} is not the requested version (${{ matrix.PY }})'" 370 | 371 | python -c "import numpy; assert str(numpy.__version__).startswith(str(${{ matrix.NUMPY }})), \ 372 | f'Numpy version {numpy.__version__} is not the requested version (${{ matrix.NUMPY }})'" 373 | 374 | python -c "import scipy; assert str(scipy.__version__).startswith(str(${{ matrix.SCIPY }})), \ 375 | f'Scipy version {scipy.__version__} is not the requested version (${{ matrix.SCIPY }})'" 376 | 377 | - name: Run tests 378 | run: | 379 | python -m pip install testflo parameterized six 380 | 381 | echo "=============================================================" 382 | echo "Run tests from pyoptsparse repository" 383 | echo "=============================================================" 384 | unset DYLD_LIBRARY_PATH 385 | 386 | if [[ "$BRANCH" == "" ]]; then 387 | BRANCH=`python -c "from build_pyoptsparse import build_info; print(build_info['pyoptsparse']['branch'])"` 388 | BRANCH="-b $BRANCH" 389 | fi 390 | 391 | echo "git clone $BRANCH https://github.com/mdolab/pyoptsparse" 392 | git clone $BRANCH https://github.com/mdolab/pyoptsparse 393 | 394 | cd pyoptsparse/test*/ 395 | testflo --pre_announce --timeout=120 --show_skipped . 396 | 397 | - name: Audit dependencies 398 | id: audit 399 | continue-on-error: true 400 | run: | 401 | python -m pip install pip-audit 402 | 403 | echo "=============================================================" 404 | echo "Scan environment for packages with known vulnerabilities" 405 | echo "=============================================================" 406 | python -m pip_audit 407 | 408 | - name: Slack audit warnings 409 | if: steps.audit.outcome == 'failure' && matrix.NAME != 'Ubuntu Oldest' 410 | uses: act10ns/slack@v2.0.0 411 | with: 412 | webhook-url: ${{ secrets.SLACK_WEBHOOK_URL }} 413 | status: 'warning' 414 | message: | 415 | pip-audit detected vulnerabilities. 416 | ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} 417 | 418 | - name: Notify slack 419 | uses: act10ns/slack@v2.0.0 420 | with: 421 | webhook-url: ${{ secrets.SLACK_WEBHOOK_URL }} 422 | status: ${{ job.status }} 423 | if: failure() 424 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | 179 | -------------------------------------------------------------------------------- /NOTICE: -------------------------------------------------------------------------------- 1 | Copyright 2023 OpenMDAO 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this software except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | 15 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # build_pyoptsparse 2 | This script was written to overcome the complexities of building pyOptSparse and IPOPT. It can download and install IPOPT and other dependencies and pyOptSparse itself. This behavior can be adjusted with various command-line switches. 3 | 4 | Support for **conda** and will be used if either has been activated, unless disabled by command line arguments. In both cases, software will be installed under the virtual environment folder. If a conda environment is active and **mamba** is available, it will be used to install/uninstall to improve performance. 5 | 6 | Alternatively, if a **venv** environement is active, the script will install to that virtual environment's folder. 7 | 8 | For dependencies that require building, temporary directories are used then removed by default after the item has been installed. 9 | 10 | By default, MUMPS is used as the linear solver, but if HSL or PARDISO are available, one of those can be selected instead. 11 | 12 | The script performs checks the environment by testing for commands that are required to build or install pyOptSparse and it dependencies. 13 | 14 | If you have a previous installation of pyOptSparse and its dependencies and are encountering errors when running this script, try using the --uninstall switch first to remove old include/library files. 15 | 16 | To install: 17 | 1. Activate your virtual environment 18 | 2. Git clone the repository 19 | 3. Run `python -m pip install ./build_pyoptsparse`. 20 | If ParOpt support is desired, run `python -m pip install './build_pyoptsparse[paropt]'` 21 | 22 | ## Usage 23 | ``` 24 | usage: build_pyoptsparse [-h] [-a] [-b BRANCH] [-c CONDA_CMD] [-d] [-e] [-f] [-k] [-i] 25 | [-l {mumps,hsl,pardiso}] [-m] [-n] [-o] [-p PREFIX] [-s SNOPT_DIR] 26 | [-t HSL_TAR_FILE] [-u] [-v] 27 | 28 | Download, configure, build, and/or install pyOptSparse with dependencies. 29 | Temporary working directories are created, which are removed after 30 | installation unless -d is used. 31 | 32 | When running with a conda environment active, all packages that can be installed 33 | with conda will be, except when command line arguments modify this behavior. If 34 | found, mamba will be used to install/uninstall unless -m is invoked. 35 | 36 | 37 | options: 38 | -h, --help show this help message and exit 39 | -a, --paropt Add ParOpt support. Default: no ParOpt 40 | -b BRANCH, --branch BRANCH 41 | pyOptSparse git branch. Default: v2.9.2 42 | -c CONDA_CMD, --conda-cmd CONDA_CMD 43 | Command to install packages with if conda is used. Default: conda 44 | -d, --no-delete Do not erase the build directories after completion. 45 | -e, --ignore-conda Do not install conda packages, install under conda environment, or 46 | uninstall from the conda environment. 47 | -f, --force-build Build/rebuild packages even if found to be installed or can be installed 48 | with conda. 49 | -k, --no-sanity-check 50 | Skip the sanity checks. 51 | -i, --intel Build with the Intel compiler suite instead of GNU. 52 | -l {mumps,hsl,pardiso}, --linear-solver {mumps,hsl,pardiso} 53 | Which linear solver to use with IPOPT. Default: mumps 54 | -m, --ignore-mamba Do not use mamba to install conda packages. Default: Use mamba if found 55 | -n, --no-install Prepare, but do not build/install pyOptSparse itself. Default: 56 | install 57 | -o, --no-ipopt Do not install IPOPT. Default: install IPOPT 58 | -p PREFIX, --prefix PREFIX 59 | Where to install if not a conda/venv environment. Default: 60 | $HOME/pyoptsparse 61 | -s SNOPT_DIR, --snopt-dir SNOPT_DIR 62 | Include SNOPT from SNOPT-DIR. Default: no SNOPT 63 | -t HSL_TAR_FILE, --hsl-tar-file HSL_TAR_FILE 64 | If HSL is the linear solver, use this as the path to the tar file of the 65 | HSL source. E.g. -t ../../coinhsl-archive-2014.01.17.tar.gz 66 | -u, --uninstall Attempt to remove an installation previously built from source (using the 67 | same --prefix) and/or installed with conda in the same environment, then 68 | exit. Default: Do not uninstall 69 | -v, --verbose Show output from git, configure, make, conda, etc. and expand all 70 | environment variables. 71 | 72 | NOTES: 73 | When using HSL as the linear solver, the source code tar file can be obtained 74 | from http://www.hsl.rl.ac.uk/ipopt/ 75 | If PARDISO is selected as the linear solver, the Intel compiler suite with MKL 76 | must be available. 77 | 78 | Examples: 79 | build_pyoptsparse 80 | build_pyoptsparse --intel --linear-solver=pardiso 81 | build_pyoptsparse -l hsl -n -t ../../coinhsl-archive-2014.01.17.tar.gz 82 | ``` 83 | -------------------------------------------------------------------------------- /__init__.py: -------------------------------------------------------------------------------- 1 | __version__ = '2.0.12' 2 | -------------------------------------------------------------------------------- /__main__.py: -------------------------------------------------------------------------------- 1 | from . import perform_install 2 | 3 | if __name__ == '__main__': 4 | perform_install() 5 | -------------------------------------------------------------------------------- /build_pyoptsparse.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import argparse 3 | import os 4 | import platform 5 | import re 6 | import shutil 7 | import sys 8 | import subprocess 9 | import tarfile 10 | from pathlib import Path, PurePath 11 | import tempfile 12 | from colors import * 13 | from shutil import which 14 | from packaging.version import parse 15 | import numpy 16 | 17 | 18 | # Default options that the user can change with command line switches 19 | opts = { 20 | 'prefix': str(Path(Path.home() / 'pyoptsparse')), 21 | 'linear_solver': 'mumps', 22 | 'build_pyoptsparse': True, 23 | 'intel_compiler_suite': False, 24 | 'snopt_dir': None, 25 | 'hsl_tar_file': None, 26 | 'include_paropt': False, 27 | 'include_ipopt': True, 28 | 'keep_build_dir': False, 29 | 'check_sanity': True, 30 | 'conda_cmd': 'conda', 31 | 'force_build': False, 32 | 'ignore_conda': False, 33 | 'ignore_mamba': False, 34 | 'verbose': False, 35 | 'compile_required': True, # Not set directly by the user, but determined from other options 36 | 'uninstall': False, 37 | 'pyoptsparse_version': None, # Parsed pyOptSparse version, set by finish_setup() 38 | 'make_name': 'make', 39 | 'fall_back': False, 40 | 'pip_cmd': 'pip' 41 | } 42 | 43 | # Information about the host, status, and constants 44 | sys_info = { 45 | 'gcc_major_ver': -1, 46 | 'gcc_is_apple_clang': False, 47 | 'line_color': 'white', 48 | 'msg_color': 'gray', 49 | 'gnu_sanity_check_done': False, 50 | 'python_sanity_check_done': False, 51 | 'compile_cores': int(os.cpu_count()/2), 52 | 'sys_name': platform.system(), 53 | 'conda_activate_dir': None, 54 | 'conda_deactivate_dir': None, 55 | 'conda_env_script': 'pyoptsparse_lib.sh', 56 | 'conda_forge_available': False 57 | } 58 | 59 | # Where to find each package, which branch to use if obtained by git, 60 | # and which include file to test to see if it's already installed 61 | build_info = { 62 | 'metis': { 63 | 'branch': 'releases/2.0.0', 64 | 'url': 'https://github.com/coin-or-tools/ThirdParty-Metis.git', 65 | 'src_lib_glob': 'libcoinmetis*', 66 | 'include_subdir': 'metis', 67 | 'include_file': 'metis.h' 68 | }, 69 | 'mumps': { 70 | 'branch': 'releases/3.0.2', 71 | 'url': 'https://github.com/coin-or-tools/ThirdParty-Mumps.git', 72 | 'src_lib_glob': 'libcoinmumps*', 73 | 'include_subdir': 'mumps', 74 | 'include_file': 'mumps_c_types.h' 75 | }, 76 | 'ipopt': { 77 | 'branch': 'releases/3.14.7', 78 | 'url': 'https://github.com/coin-or/Ipopt.git', 79 | 'src_lib_glob': 'lib*ipopt*', 80 | 'include_subdir': '.', 81 | 'include_glob_list': ['Ip*.hpp', 'Sens*.hpp', 'Ip*.h', 'Ip*.inc'], 82 | 'include_file': 'IpoptConfig.h' 83 | }, 84 | 'pyoptsparse': { 85 | 'branch': 'v2.10.1' if parse(numpy.__version__) < parse('2.0') else 'v2.13.1', 86 | 'url': 'https://github.com/mdolab/pyoptsparse.git', 87 | }, 88 | 'hsl': { 89 | 'branch': 'releases/2.2.1', 90 | 'url': 'https://github.com/coin-or-tools/ThirdParty-HSL', 91 | 'src_lib_glob': 'libcoinhsl*', 92 | 'include_subdir': 'hsl', 93 | 'include_file': 'CoinHslConfig.h' 94 | }, 95 | 'paropt': { 96 | 'branch': 'v2.1.4', 97 | 'url': 'https://github.com/smdogroup/paropt.git', 98 | 'src_lib_glob': 'libparopt*', 99 | } 100 | } 101 | 102 | def process_command_line(): 103 | """ Validate command line arguments and update options, or print usage and exit. """ 104 | parser = argparse.ArgumentParser( 105 | formatter_class=argparse.RawDescriptionHelpFormatter, 106 | description=''' 107 | Download, configure, build, and/or install pyOptSparse with dependencies. 108 | Temporary working directories are created, which are removed after 109 | installation unless -d is used. 110 | 111 | When running with a conda environment active, all packages that can be installed 112 | with conda will be, except when command line arguments modify this behavior. If 113 | found, mamba will be used to install/uninstall unless -m is invoked. 114 | ''', 115 | epilog=''' 116 | NOTES: 117 | When using HSL as the linear solver, the source code tar file can be obtained 118 | from http://www.hsl.rl.ac.uk/ipopt/ 119 | If PARDISO is selected as the linear solver, the Intel compiler suite with MKL 120 | must be available. 121 | 122 | Examples: 123 | build_pyoptsparse 124 | build_pyoptsparse --intel --linear-solver=pardiso 125 | build_pyoptsparse -l hsl -n -t ../../coinhsl-archive-2014.01.17.tar.gz 126 | ''' 127 | ) 128 | parser.add_argument("-a", "--paropt", 129 | help="Add ParOpt support. Default: no ParOpt", 130 | action="store_true", 131 | default=opts['include_paropt']) 132 | parser.add_argument("-b", "--branch", 133 | help=f"pyOptSparse release. \ 134 | Default: {build_info['pyoptsparse']['branch']}", 135 | default=build_info['pyoptsparse']['branch']) 136 | parser.add_argument("-c", "--conda-cmd", 137 | help=f"Command to install packages with if conda is used. \ 138 | Default: {opts['conda_cmd']}") 139 | parser.add_argument("-d", "--no-delete", 140 | help="Do not erase the build directories after completion.", 141 | action="store_true", 142 | default=opts['keep_build_dir']) 143 | parser.add_argument("-e", "--ignore-conda", 144 | help="Do not install conda packages, install under conda environment, \ 145 | or uninstall from the conda environment.", 146 | action="store_true", 147 | default=opts['ignore_conda']) 148 | parser.add_argument("-f", "--force-build", 149 | help="Build/rebuild packages even if found to be installed or \ 150 | can be installed with conda.", 151 | action="store_true", 152 | default=opts['force_build']) 153 | parser.add_argument("-g", "--fall-back", 154 | help="If a conda package fails to install, fall back to \ 155 | building from source. If selected, the build environment is \ 156 | tested in case it is needed.", 157 | action="store_true", 158 | default=opts['fall_back']) 159 | parser.add_argument("-k", "--no-sanity-check", 160 | help="Skip the sanity checks.", 161 | action="store_true", 162 | default=not opts['check_sanity']) 163 | parser.add_argument("-i", "--intel", 164 | help="Build with the Intel compiler suite instead of GNU.", 165 | action="store_true", 166 | default=opts['intel_compiler_suite']) 167 | parser.add_argument("-l", "--linear-solver", 168 | help="Which linear solver to use with IPOPT. Default: mumps", 169 | choices=['mumps', 'hsl', 'pardiso'], 170 | default=opts['linear_solver']) 171 | parser.add_argument("-m", "--ignore-mamba", 172 | help="Do not use mamba to install conda packages. \ 173 | Default: Use mamba if found", 174 | action="store_true", 175 | default=opts['ignore_mamba']) 176 | parser.add_argument("-n", "--no-install", 177 | help=f"Prepare, but do {yellow('not')} build/install pyOptSparse itself. \ 178 | Default: install", 179 | action="store_true", 180 | default=not opts['build_pyoptsparse']) 181 | parser.add_argument("-o", "--no-ipopt", 182 | help="Do not install IPOPT. Default: install IPOPT", 183 | action="store_true", 184 | default=not opts['include_ipopt']), 185 | parser.add_argument("-p", "--prefix", 186 | help=f"Where to install if not a conda/venv environment. Default: {opts['prefix']}", 187 | default=opts['prefix']) 188 | parser.add_argument("--pip-cmd", 189 | help=f"pip command to use. Set to to --pip-cmd='uv pip' if using uv. Default: {opts['pip_cmd']}", 190 | default=opts['pip_cmd']) 191 | parser.add_argument("-s", "--snopt-dir", 192 | help="Include SNOPT from SNOPT-DIR. Default: no SNOPT", 193 | default=opts['snopt_dir']) 194 | parser.add_argument("-t", "--hsl-tar-file", 195 | help="If HSL is the linear solver, use this as the path \ 196 | to the tar file of the HSL source. \ 197 | E.g. -t ../../coinhsl-archive-2014.01.17.tar.gz", 198 | default=opts['hsl_tar_file']) 199 | parser.add_argument("-u", "--uninstall", 200 | help="Attempt to remove include/lib files previously built from source \ 201 | (using the same --prefix) and/or installed with conda in the same \ 202 | environment, then exit. Default: Do not uninstall", 203 | action="store_true", 204 | default=opts['uninstall']) 205 | parser.add_argument("-v", "--verbose", 206 | help="Show output from git, configure, make, conda, etc. and expand \ 207 | all environment variables.", 208 | action="store_true", 209 | default=opts['verbose']) 210 | 211 | args = parser.parse_args() 212 | 213 | # Update options with user selections 214 | opts['include_paropt'] = args.paropt 215 | opts['include_ipopt'] = not args.no_ipopt 216 | build_info['pyoptsparse']['branch'] = args.branch 217 | 218 | # Determine conda settings 219 | opts['ignore_conda'] = args.ignore_conda 220 | opts['ignore_mamba'] = args.ignore_mamba 221 | 222 | if opts['ignore_conda'] is False and conda_is_active(): 223 | if args.conda_cmd is not None: 224 | opts['conda_cmd'] = args.conda_cmd 225 | else: 226 | if opts['ignore_mamba'] is True or which('mamba') is None: 227 | opts['conda_cmd'] = 'conda' 228 | else: 229 | opts['conda_cmd'] = 'mamba' 230 | 231 | # Make sure conda forge channel is available 232 | if args.uninstall is False: 233 | note('Checking for conda-forge') 234 | cmd_list=['info','--unsafe-channels'] 235 | result = run_conda_cmd(cmd_list) 236 | 237 | if re.search(r'conda.*forge', result.stdout) is not None: 238 | sys_info['conda_forge_available'] = True 239 | note_ok() 240 | else: 241 | print(f'{yellow("WARNING")}: The conda-forge channel is not configured, cannot \ 242 | install conda packages. Falling back to building from source.') 243 | opts['compile_required'] = True 244 | 245 | opts['keep_build_dir'] = args.no_delete 246 | opts['force_build'] = args.force_build 247 | opts['fall_back'] = args.fall_back 248 | opts['check_sanity'] = not args.no_sanity_check 249 | opts['linear_solver'] = args.linear_solver 250 | if opts['linear_solver'] == 'pardiso': 251 | opts['intel_compiler_suite'] = True 252 | else: 253 | opts['intel_compiler_suite'] = args.intel 254 | 255 | opts['prefix'] = args.prefix 256 | opts['build_pyoptsparse'] = not args.no_install 257 | opts['pip_cmd'] = args.pip_cmd 258 | opts['snopt_dir'] = args.snopt_dir 259 | opts['hsl_tar_file'] = args.hsl_tar_file 260 | opts['verbose'] = args.verbose 261 | opts['uninstall'] = args.uninstall 262 | 263 | def announce(msg:str): 264 | """ 265 | Print an important message in color with a line above and below. 266 | 267 | Parameters 268 | ---------- 269 | msg : str 270 | The information to be printed. 271 | """ 272 | print(color(f' {msg} '.center(79, '-'), '#d0d0d0', 'blue')) 273 | 274 | def note(msg:str): 275 | """ 276 | Print a quick status message. If not in verbose mode, do not terminate with 277 | a newline because the result of the operation will print after. 278 | 279 | Parameters 280 | ---------- 281 | msg : str 282 | The information to be printed. 283 | """ 284 | if opts['verbose'] is False: 285 | print(msg, end="... ") 286 | sys.stdout.flush() 287 | else: 288 | print(msg + '...') 289 | 290 | def note_ok(): 291 | """ Print a green OK message to follow up a note() with. """ 292 | if opts['verbose'] is False: 293 | print(green('OK')) 294 | 295 | def note_failed(): 296 | """ Print a red failure message to follow up a note() with. """ 297 | if opts['verbose'] is False: 298 | print(red('failed')) 299 | 300 | def code(msg:str)->str: 301 | """ 302 | Create a message with "code" hilighting. 303 | 304 | Parameters 305 | ---------- 306 | msg : str 307 | The information to be printed. 308 | """ 309 | return color(msg, 'orange', style='underline') 310 | 311 | def try_fallback(pkg:str, e:Exception): 312 | """ 313 | When a conda package installation fails, this is called to print status info 314 | and determine whether it's possible to try building from source. 315 | 316 | Parameters 317 | ---------- 318 | pkg : str 319 | The name of the package that failed to install. 320 | e : Exception 321 | That exception that was caught when the failure occurred. 322 | """ 323 | 324 | note_failed() 325 | if opts['fall_back'] is True: 326 | print(yellow(f'Installing {pkg} with conda failed, falling back to source build.')) 327 | else: 328 | print(yellow('Use the --fall-back switch to build source on failed conda installs.')) 329 | raise e 330 | 331 | def initialize(): 332 | """ Perform a collection of setup tasks """ 333 | global dir_stack 334 | dir_stack = [] 335 | 336 | if conda_is_active() and (opts['ignore_conda'] is False): 337 | opts['prefix']=os.environ['CONDA_PREFIX'] 338 | sys_info['conda_activate_dir'] = Path(opts['prefix']) / 'etc' / 'conda' / 'activate.d' 339 | sys_info['conda_deactivate_dir'] = Path(opts['prefix']) / 'etc' / 'conda' / 'deactivate.d' 340 | elif venv_is_active(): 341 | opts['prefix']=os.environ['VIRTUAL_ENV'] 342 | 343 | def conda_is_active() -> bool: 344 | """ Determine if a conda environment is active. """ 345 | return ('CONDA_PREFIX' in os.environ) 346 | 347 | def allow_install_with_conda() -> bool: 348 | """ Determine if we can install with conda. """ 349 | return conda_is_active() and (opts['ignore_conda'] is False) and \ 350 | (sys_info['conda_forge_available'] is True) 351 | 352 | def venv_is_active() -> bool: 353 | """ Determine if a Python virtual environment is active. """ 354 | return ('VIRTUAL_ENV' in os.environ) 355 | 356 | def subst_env_for_path(path:str)->str: 357 | """ 358 | If a well-known env var is the initial part of the path, substitute the name 359 | of that var to make it easier to read. 360 | 361 | Parameters 362 | ---------- 363 | path : str 364 | The path to check for environment variables. 365 | 366 | Returns 367 | ------- 368 | str 369 | The possibly updated path. 370 | """ 371 | 372 | if opts['verbose'] is True: return path 373 | 374 | for testvar in ['TMPDIR', 'TMP_DIR', 'TEMP_DIR', 'CONDA_PREFIX', 'VIRTUAL_ENV']: 375 | if testvar in os.environ and re.match(os.environ[testvar], path) is not None: 376 | new_path = PurePath(re.sub(os.environ[testvar], f'${testvar}/', path)) 377 | return str(new_path) 378 | 379 | return path 380 | 381 | def run_cmd(cmd_list, do_check=True, raise_error=True)->bool: 382 | """ 383 | Run a command with provided arguments. Hide output unless there's an error 384 | or verbose mode is enabled. 385 | 386 | Parameters 387 | ---------- 388 | cmd_list : list 389 | Each token of the command line is a separate member of the list. 390 | 391 | do_check : bool 392 | If true, test whether the process returns a non-zero status. 393 | 394 | raise_error: bool 395 | Only matters if do_check is true. If false, raise an exception 396 | if the process returns a non-zero status. If true, do not raise 397 | an exception, but have the function return False. 398 | 399 | Returns 400 | ------- 401 | subprocess.CompletedProcess 402 | The result of the finished command. 403 | """ 404 | result = None 405 | 406 | try: 407 | result = subprocess.run(cmd_list, check=do_check, capture_output=True, text=True) 408 | except subprocess.CalledProcessError as inst: 409 | if opts['verbose'] is True: 410 | print(inst.stdout, inst.stderr) 411 | if raise_error is True: 412 | raise inst 413 | 414 | if opts['verbose'] is True and result is not None: 415 | print(result.stdout, result.stderr) 416 | 417 | return result 418 | 419 | def check_make(errors:list): 420 | """ 421 | Find the best make command and test its viability. 422 | 423 | Parameters 424 | ---------- 425 | errors : list 426 | Accumulated pre-check error messages. 427 | """ 428 | 429 | if 'MAKE' in os.environ: 430 | opts['make_name'] = os.environ['MAKE'] 431 | elif which('gmake') is not None: 432 | opts['make_name'] = 'gmake' 433 | 434 | if find_required_command(opts['make_name'], errors): 435 | # If the make command is found, test whether it's GNU make 436 | cmd_list=[opts['make_name'], '--version'] 437 | result = subprocess.run(cmd_list, check=False, capture_output=True, text=True) 438 | 439 | if str(result.stdout).find('GNU Make') == -1: 440 | print(f'{yellow("WARNING")}: {opts["make_name"]} is not GNU Make. ' 441 | 'Source code builds may fail.') 442 | 443 | def make_install(parallel_procs:int=sys_info['compile_cores'], make_args = None, do_install=True): 444 | """ 445 | Run 'make' followed by 'make install' in the current directory. 446 | 447 | Parameters 448 | ---------- 449 | parallel_procs : int 450 | Start this many parallel make processes. Defaults to half of the system cores. 451 | Some packages fail when built in parallel, so 1 should be used in those cases. 452 | """ 453 | note('Building') 454 | os.environ['MAKEFLAGS'] = f'-j {str(parallel_procs)}' 455 | make_cmd=[opts['make_name']] 456 | if make_args is not None: 457 | make_cmd.extend(make_args) 458 | run_cmd(cmd_list=make_cmd) 459 | note_ok() 460 | 461 | if do_install is True: 462 | note('Installing') 463 | run_cmd(cmd_list=[opts['make_name'],'install']) 464 | note_ok() 465 | 466 | def run_conda_cmd(cmd_args): 467 | """ 468 | Shorthand for performing a conda operation. 469 | 470 | Parameters 471 | ---------- 472 | cmd_list : list 473 | Each token of the command line is a separate member of the list. The conda 474 | executable name is prepended, so should not be included in the list. 475 | 476 | Returns 477 | ------- 478 | subprocess.CompletedProcess 479 | The result of the finished command. 480 | """ 481 | cmd_list = [opts['conda_cmd']] 482 | cmd_list.extend(cmd_args) 483 | return run_cmd(cmd_list) 484 | 485 | def pip_install(pip_install_args, pkg_desc='packages'): 486 | """ 487 | Shorthand for performing a 'pip install' operation. 488 | 489 | Parameters 490 | ---------- 491 | pip_install_args : list 492 | Each token of the command line is a separate member of the list. The 493 | is prepended with 'python -m pip install'; '-q' is added when not verbose. 494 | """ 495 | cmd_list = opts['pip_cmd'].split() + ['install'] 496 | if opts['verbose'] is False: 497 | cmd_list.append('-q') 498 | cmd_list.extend(pip_install_args) 499 | note(f'Installing {pkg_desc} with {opts["pip_cmd"]}') 500 | run_cmd(cmd_list) 501 | note_ok() 502 | 503 | def install_conda_pkg(pkg_name:str): 504 | """ 505 | Shorthand for performing a 'conda install' operation for a single package. 506 | 507 | Parameters 508 | ---------- 509 | pkg_name : str 510 | The name of the package to install. 511 | """ 512 | note(f'Installing {pkg_name.upper()} with conda') 513 | install_args = ['install', '-q', '-y', pkg_name] 514 | run_conda_cmd(cmd_args=install_args) 515 | note_ok() 516 | 517 | def pushd(dirname): 518 | """ 519 | Preserve the current directory name in a stack, then change to the specified directory. 520 | 521 | Parameters 522 | ---------- 523 | dirname : str 524 | The absolute or relative name of the folder to change to. 525 | """ 526 | dir_stack.append(str(Path.cwd())) 527 | os.chdir(dirname) 528 | print(f'Changed directory to {code(str(subst_env_for_path(dirname)))}') 529 | 530 | def popd(): 531 | """ Change to the top directory name on the stack of names. """ 532 | dirname = dir_stack.pop() 533 | os.chdir(dirname) 534 | print(f'Changed directory back to {code(subst_env_for_path(dirname))}') 535 | 536 | def get_coin_inc_dir()->str: 537 | """ 538 | Determine what the path to the MUMPS/METIS/IPOPT include directory is, if it exists. 539 | 540 | Returns 541 | ------- 542 | str 543 | The absolute path to the correct existing directory, or None if not found. 544 | """ 545 | coin_inc_dirs = ['coin-or', 'coin'] 546 | for coin_dir in coin_inc_dirs: 547 | coin_path = Path(opts["prefix"]) / 'include' / coin_dir 548 | if coin_path.is_dir(): 549 | return str(coin_path) 550 | 551 | return None 552 | 553 | def get_coin_lib_name(pkg:str)->str: 554 | """ 555 | Determine whether the required lib starts with 'lib' or 'libcoin'. 556 | 557 | Parameters 558 | ---------- 559 | pkg : str 560 | The name of the library to test. 561 | 562 | Returns 563 | ------- 564 | str 565 | The pkg parameter prefaced with either 'coin' or nothing. 566 | """ 567 | lib_vars = ['coin', ''] 568 | 569 | for lv in lib_vars: 570 | lib_glob = f"lib{lv}{pkg}*" 571 | found_libs = sorted(Path(f"{opts['prefix']}/lib").glob(lib_glob)) 572 | if len(found_libs) > 0: 573 | return f'{lv}{pkg}' 574 | 575 | return None 576 | 577 | def git_clone(build_key:str, auto_delete:bool=True): 578 | """ 579 | Create a temporary directory, change to it, and clone the repository associated 580 | with the specified package key. 581 | 582 | Parameters 583 | ---------- 584 | build_key : str 585 | A key in the build_info dict with info about the selected package. 586 | auto_delete : bool 587 | Override the 'keep_build_dir' setting. Auto-delete if true, leave if false. 588 | 589 | Returns 590 | ------- 591 | context manager OR str 592 | When the 'keep_build_dir' option is False, an object with info about the directory, 593 | which causes the directory to be cleaned up and removed when it goes out of scope. 594 | When the 'keep_build_dir' option is True, returns a str with the name of the folder. 595 | """ 596 | d = build_info[build_key] 597 | announce(f'Building {build_key.upper()} from source code') 598 | if opts['keep_build_dir'] is True or auto_delete is False: 599 | build_dir = tempfile.mkdtemp() 600 | dir_name = build_dir 601 | print(f"Remember to delete {code(subst_env_for_path(dir_name))} afterwards.") 602 | else: 603 | build_dir = tempfile.TemporaryDirectory() 604 | dir_name = build_dir.name 605 | 606 | note(f'Cloning {d["url"]}') 607 | run_cmd(cmd_list=['git', 'clone', '-q', d['url'], dir_name]) 608 | note_ok() 609 | pushd(dir_name) 610 | 611 | if d["branch"]: 612 | # We don't care about the "detached HEAD" warning: 613 | run_cmd(cmd_list=['git', 'config', '--local', 'advice.detachedHead', 'false']) 614 | note(f'Checking out branch {d["branch"]}') 615 | run_cmd(cmd_list=['git', 'checkout', '-q', d['branch']]) 616 | 617 | return build_dir 618 | 619 | def allow_build(build_key:str) -> bool: 620 | """ 621 | Determine whether the specified package should be built from source. 622 | 623 | Parameters 624 | ---------- 625 | build_key : str 626 | A key in the build_info dict with info about the selected package. 627 | 628 | Returns 629 | ------- 630 | bool 631 | True if the package is not yet installed or force_build is true, false if already built. 632 | """ 633 | coin_dir = get_coin_inc_dir() 634 | if coin_dir is None: 635 | build_ok = True 636 | else: 637 | d = build_info[build_key] 638 | include_file = Path(coin_dir) / d['include_subdir'] / d['include_file'] 639 | build_ok = opts['force_build'] or not include_file.is_file() 640 | 641 | if build_ok is False: 642 | print(f"{build_key.upper()} is already installed under {opts['prefix']}, {yellow('skipping build')}.") 643 | 644 | return build_ok 645 | 646 | def install_metis_from_src(): 647 | """ Git clone the METIS repo, build the library, and install it and the include files. """ 648 | if not allow_build('metis'): 649 | return 650 | 651 | metis_dir = git_clone('metis') 652 | os.environ['METIS_DIR'] = metis_dir if isinstance(metis_dir, str) else metis_dir.name 653 | 654 | run_cmd(['./get.Metis']) 655 | os.environ['CFLAGS'] = '-Wno-implicit-function-declaration' 656 | note("Running configure") 657 | run_cmd(cmd_list=['./configure', f'--prefix={opts["prefix"]}']) 658 | note_ok() 659 | make_install() 660 | popd() 661 | 662 | def install_metis(): 663 | """ Install METIS either through conda or building. """ 664 | if allow_install_with_conda() and opts['force_build'] is False: 665 | try: 666 | install_conda_pkg('metis') 667 | os.environ['METIS_DIR'] = os.environ['CONDA_PREFIX'] 668 | return 669 | except Exception as e: 670 | try_fallback('METIS', e) 671 | 672 | install_metis_from_src() 673 | 674 | def get_common_solver_config_cmd(): 675 | """ Gets common configuration options for Mumps and HSL solvers. """ 676 | 677 | coin_dir = get_coin_inc_dir() 678 | cflags = f'-w -I{opts["prefix"]}/include -I{coin_dir} -I{coin_dir}/metis' 679 | fcflags = cflags 680 | if sys_info['gcc_major_ver'] >= 10: 681 | fcflags = '-fallow-argument-mismatch ' + fcflags 682 | 683 | metis_lib = get_coin_lib_name('metis') 684 | metis_lflags = f'-L{opts["prefix"]}/lib -l{metis_lib}' 685 | if platform.system() == "Linux": 686 | metis_lflags += ' -lm' 687 | 688 | config_opts = [ 689 | '--with-metis', 690 | f'--with-metis-lflags={metis_lflags}', 691 | f'--with-metis-cflags={cflags}', 692 | f'--prefix={opts["prefix"]}', 693 | f'CFLAGS={cflags}', 694 | f'FCFLAGS={fcflags}' 695 | ] 696 | 697 | # Disable OpenMP support if we are on macOS building with Apple Clang. 698 | if sys_info['gcc_is_apple_clang']: 699 | config_opts.append('--disable-openmp') 700 | 701 | cnf_cmd_list = ['./configure'] 702 | cnf_cmd_list.extend(config_opts) 703 | return cnf_cmd_list 704 | 705 | def install_mumps_from_src(): 706 | """ Git clone the MUMPS repo, build the library, and install it and the include files. """ 707 | if not allow_build('mumps'): 708 | return 709 | 710 | build_dir = git_clone('mumps') 711 | run_cmd(['./get.Mumps']) 712 | 713 | cnf_cmd_list = get_common_solver_config_cmd() 714 | 715 | note("Running configure") 716 | run_cmd(cmd_list=cnf_cmd_list) 717 | note_ok() 718 | 719 | make_install(1) # MUMPS build can fail with parallel make 720 | popd() 721 | 722 | def install_paropt_from_src(): 723 | """ 724 | Git clone the PAROPT repo, build the library, and install it and the include files. 725 | """ 726 | build_dir = git_clone('paropt') 727 | 728 | # Use build defaults as per ParOpt instructions: 729 | Path('Makefile.in.info').rename('Makefile.in') 730 | make_vars = [f'PAROPT_DIR={Path.cwd()}'] 731 | if sys_info['sys_name'] == 'Darwin': 732 | make_vars.extend(['SO_EXT=dylib', 'SO_LINK_FLAGS=-fPIC -dynamiclib -undefined dynamic_lookup', 733 | f'METIS_INCLUDE=-I{os.environ["METIS_DIR"]}/include/', 734 | f'METIS_LIB=-L{os.environ["METIS_DIR"]}/lib/', 735 | '-lmetis']) 736 | else: 737 | make_vars.extend(['SO_EXT=so', 'SO_LINK_FLAGS=-fPIC -shared', 738 | f'METIS_INCLUDE=-I{os.environ["METIS_DIR"]}/include/', 739 | f'METIS_LIB=-L{os.environ["METIS_DIR"]}/lib/', 740 | '-lmetis']) 741 | 742 | make_install(make_args=make_vars, do_install=False) 743 | pip_install(['./'], pkg_desc='paropt') 744 | 745 | lib_dest_dir = str(Path(opts['prefix']) / 'lib') 746 | note(f'Copying library files to {code(subst_env_for_path(lib_dest_dir))}') 747 | lib_files = sorted(Path('lib').glob('libparopt*')) 748 | for lib in lib_files: 749 | shutil.copy2(str(lib), lib_dest_dir) 750 | note_ok() 751 | 752 | popd() 753 | 754 | def install_ipopt_from_src(config_opts:list=None): 755 | """ 756 | Git clone the IPOPT repo, build the library, and install it and the include files. 757 | 758 | Parameters 759 | ---------- 760 | config_opts : list 761 | Additional options to use with the IPOPT configure script. 762 | """ 763 | if not allow_build('ipopt') or opts['include_ipopt'] is False: 764 | return 765 | 766 | build_dir = git_clone('ipopt') 767 | cnf_cmd_list = ['./configure', f'--prefix={opts["prefix"]}', '--disable-java'] 768 | 769 | # Don't accidentally use PARDISO if it wasn't selected: 770 | if opts['linear_solver'] != 'pardiso': cnf_cmd_list.append('--disable-pardisomkl') 771 | 772 | if config_opts is not None: cnf_cmd_list.extend(config_opts) 773 | note("Running configure") 774 | run_cmd(cmd_list=cnf_cmd_list) 775 | note_ok() 776 | make_install() 777 | popd() 778 | 779 | def install_ipopt(config_opts:list=None): 780 | """ 781 | Install IPOPT either through conda or building. 782 | 783 | Parameters 784 | ---------- 785 | config_opts : list 786 | Additional options to use with the IPOPT configure script if building. 787 | """ 788 | if opts['pyoptsparse_version'] >= parse('2.14'): 789 | pkg_path = os.environ['PKG_CONFIG_PATH'] + ':' if 'PKG_CONFIG_PATH' in os.environ else '' 790 | pkg_dir = Path(opts['prefix']) / 'lib' / 'pkgconfig' 791 | os.environ['PKG_CONFIG_PATH'] = pkg_path + str(pkg_dir) 792 | 793 | if allow_install_with_conda() and opts['force_build'] is False: 794 | try: 795 | install_conda_pkg('ipopt') 796 | if opts['pyoptsparse_version'] >= parse('2.14'): 797 | install_conda_pkg('cyipopt') 798 | return 799 | except Exception as e: 800 | try_fallback('IPOPT', e) 801 | 802 | install_ipopt_from_src(config_opts=config_opts) 803 | if opts['pyoptsparse_version'] >= parse('2.14'): 804 | pip_install(['cyipopt', '--use-pep517'], pkg_desc='cyipopt') 805 | 806 | def install_mumps(): 807 | """ Install MUMPS either through conda or building. """ 808 | if allow_install_with_conda() and opts['force_build'] is False: 809 | try: 810 | install_conda_pkg('mumps-include') 811 | install_conda_pkg('mumps-seq') 812 | install_conda_pkg('mumps-mpi') 813 | return 814 | except Exception as e: 815 | try_fallback('MUMPS', e) 816 | 817 | install_mumps_from_src() 818 | 819 | def install_with_mumps(): 820 | """ Install METIS, MUMPS, and IPOPT. """ 821 | install_metis() 822 | install_mumps() 823 | 824 | if opts['include_ipopt'] is True: 825 | # Get this info in case we need to build IPOPT from source 826 | coin_dir = get_coin_inc_dir() 827 | 828 | mumps_lib = get_coin_lib_name('mumps') 829 | ipopt_opts = [ 830 | '--with-mumps', 831 | f'--with-mumps-lflags=-L{opts["prefix"]}/lib -l{mumps_lib}', 832 | f'--with-mumps-cflags=-I{coin_dir}/mumps', 833 | '--without-asl', 834 | '--without-hsl' 835 | ] 836 | 837 | install_ipopt(config_opts=ipopt_opts) 838 | 839 | def install_hsl_from_src(): 840 | """ Build HSL from the user-supplied source tar file. """ 841 | if not allow_build('hsl'): 842 | return 843 | 844 | build_dir = git_clone('hsl') 845 | 846 | # Extract the HSL tar file and rename the folder to 'coinhsl' 847 | # First, determine the name of the top-level folder: 848 | with tarfile.open(opts['hsl_tar_file'], 'r') as tf: 849 | hsl_dir_name = tf.getnames()[0] 850 | run_cmd(cmd_list=['tar', 'xf', opts['hsl_tar_file']]) # Extract 851 | Path(hsl_dir_name).rename('coinhsl') # Rename 852 | 853 | cnf_cmd_list = get_common_solver_config_cmd() 854 | 855 | note("Running configure") 856 | run_cmd(cmd_list=cnf_cmd_list) 857 | note_ok() 858 | make_install() 859 | popd() 860 | 861 | def install_with_hsl(): 862 | """ Install pyOptSparse using the HSL linear solver """ 863 | install_metis() 864 | install_hsl_from_src() 865 | 866 | coin_dir = get_coin_inc_dir() 867 | metis_lib = get_coin_lib_name('metis') 868 | ipopt_opts = [ 869 | '--with-hsl', 870 | f'--with-hsl-lflags=-L{opts["prefix"]}/lib -lcoinhsl -l{metis_lib}', 871 | f'--with-hsl-cflags=-I{coin_dir}/hsl', 872 | '--disable-linear-solver-loader' 873 | ] 874 | install_ipopt_from_src(config_opts=ipopt_opts) 875 | install_pyoptsparse_from_src() 876 | 877 | def install_with_pardiso(): 878 | """ Build IPOPT with the PARDISO linear solver. """ 879 | # install_ipopt_from_src(config_opts=['--with-lapack=-mkl']) 880 | install_ipopt_from_src() 881 | 882 | # pyOptSparse doesn't do well with Intel compilers, so unset: 883 | # select_gnu_compiler() 884 | 885 | install_pyoptsparse_from_src() 886 | 887 | def copy_snopt_files(build_dirname): 888 | """ 889 | Copy SNOPT source files into the pyOptSparse build dir, excluding snopth.f. 890 | 891 | Parameters 892 | ---------- 893 | build_dirname : str 894 | The directory where pyOptSparse is being built/installed from. 895 | """ 896 | note('Copying SNOPT source files') 897 | snoptc_f_list = sorted(Path(opts['snopt_dir']).rglob('snoptc.f')) 898 | all_snopt_files = sorted(Path(snoptc_f_list[0]).parent.glob('*')) 899 | 900 | dest_dir = str(Path(build_dirname) / 'pyoptsparse' / 'pySNOPT' / 'source') 901 | 902 | exclude_snopth_f = re.compile('.*snopth.f') 903 | for sfile in all_snopt_files: 904 | src_file = str(sfile) 905 | # copy source files, exclude any directories (e.g. f2py/) 906 | if not exclude_snopth_f.match(src_file) and not sfile.is_dir(): 907 | shutil.copy2(src_file, dest_dir) 908 | 909 | note_ok() 910 | 911 | def patch_pyoptsparse_src(): 912 | """ Some versions of pyOptSparse need to be modified slightly to build correctly. """ 913 | 914 | if opts['pyoptsparse_version'] < parse('2.6.3'): 915 | pushd("pyoptsparse/pyIPOPT") 916 | note("Patching for versions < 2.6.3") 917 | 918 | setup_py_path = Path("setup.py") 919 | pos_setup_py = open(str(setup_py_path)) 920 | data = pos_setup_py.read() 921 | pos_setup_py.close() 922 | setup_py_path.rename('setup.py.orig') 923 | 924 | new_data = re.sub(r'libraries=.+,', 'libraries=["ipopt"],', data) 925 | 926 | with open(str(setup_py_path), 'w') as setup_py_path: 927 | setup_py_path.write(new_data) 928 | 929 | note_ok() 930 | popd() 931 | 932 | def install_pyoptsparse_from_src(): 933 | """ Git clone the pyOptSparse repo and use pip to install it. """ 934 | pip_install(['Cython'], pkg_desc='Cython') 935 | 936 | # First, build PAROPT if selected: 937 | if opts['include_paropt'] is True: 938 | install_paropt_from_src() 939 | 940 | build_dir = git_clone('pyoptsparse', opts['build_pyoptsparse']) 941 | 942 | if opts['include_ipopt'] is True: 943 | os.environ['IPOPT_INC'] = get_coin_inc_dir() 944 | os.environ['IPOPT_LIB'] = str(Path(opts["prefix"]) / 'lib') 945 | os.environ['IPOPT_DIR'] = str(Path(opts["prefix"])) 946 | os.environ['CFLAGS'] = '-Wno-implicit-function-declaration -std=c99' 947 | 948 | # Pull in SNOPT source: 949 | if opts['snopt_dir'] is not None: 950 | build_dir_str = build_dir if isinstance(build_dir, str) else build_dir.name 951 | copy_snopt_files(build_dir_str) 952 | 953 | if opts['build_pyoptsparse'] is True: 954 | patch_pyoptsparse_src() 955 | pip_install(pip_install_args=['--no-cache-dir', './'], pkg_desc='pyoptsparse') 956 | else: 957 | announce('Not building pyOptSparse by request') 958 | if opts['include_ipopt'] is True: 959 | print(f""" 960 | Make sure to set these environment variables before building it yourself: 961 | 962 | {code(f'export IPOPT_INC={subst_env_for_path(os.environ["IPOPT_INC"])}')} 963 | {code(f'export IPOPT_LIB={subst_env_for_path(os.environ["IPOPT_LIB"])}')} 964 | """) 965 | 966 | popd() 967 | 968 | def uninstall_built_item(build_key:str): 969 | """ Uninstall a specific item that was previously built from source code. """ 970 | d = build_info[build_key] 971 | 972 | if 'include_subdir' in d: 973 | inc_dir = Path(opts['prefix']) / 'include' / 'coin-or' / d['include_subdir'] 974 | if 'include_glob_list' in d: 975 | # If there's a list of glob patterns, remove found files individually instead 976 | # of removing an entire include subdirectory: 977 | note(f'Removing {build_key.upper()} include files') 978 | 979 | for glob_item in d['include_glob_list']: 980 | for inc_file in sorted(Path(inc_dir).glob(glob_item)): 981 | Path(inc_file).unlink() 982 | 983 | try: 984 | inc_dir.rmdir() 985 | except: 986 | pass 987 | 988 | note_ok() 989 | else: 990 | # If there's no chance that other include files will be installed in the same 991 | # folder, just remove the whole subdirectory. 992 | if inc_dir.is_dir(): 993 | note(f'Removing {build_key.upper()} include directory') 994 | shutil.rmtree(inc_dir) 995 | note_ok() 996 | 997 | # Remove individual library files. 998 | if 'src_lib_glob' in d: 999 | lib_dir = Path(opts['prefix']) / 'lib' 1000 | lib_file_list = sorted(lib_dir.glob(d['src_lib_glob'])) 1001 | if len(lib_file_list) > 0: 1002 | note(f'Removing {build_key.upper()} library files') 1003 | for lib_file in lib_file_list: 1004 | Path(lib_file).unlink() 1005 | note_ok() 1006 | 1007 | def uninstall_paropt_and_pyoptsparse(): 1008 | """ Both ParOpt and pyOptSparse were installed with pip. """ 1009 | # Uninstall pyOptSparse 1010 | note('Removing pyOptSparse') 1011 | run_cmd(cmd_list=['pip','uninstall','-y','pyOptSparse'], do_check=False) 1012 | note_ok() 1013 | 1014 | note('Removing PAROPT package') 1015 | run_cmd(cmd_list=['pip','uninstall','-y','paropt'], do_check=False) 1016 | note_ok() 1017 | uninstall_built_item('paropt') 1018 | 1019 | def remove_conda_scripts(): 1020 | """ Remove the conda activate/deactivate scripts if they exist. """ 1021 | if conda_is_active() and opts['ignore_conda'] is False: 1022 | note("Removing conda activate/deactivate scripts") 1023 | act_path = Path(sys_info['conda_activate_dir']) / sys_info['conda_env_script'] 1024 | if act_path.is_file(): act_path.unlink() 1025 | 1026 | deact_path = Path(sys_info['conda_deactivate_dir']) / sys_info['conda_env_script'] 1027 | if deact_path.is_file(): deact_path.unlink() 1028 | note_ok() 1029 | 1030 | def uninstall_built(): 1031 | """ Attempt to remove files that were previously installed when building from source. """ 1032 | uninstall_paropt_and_pyoptsparse() 1033 | 1034 | for build_key in ['ipopt', 'hsl', 'mumps', 'metis']: 1035 | uninstall_built_item(build_key) 1036 | 1037 | if opts['ignore_conda'] is False: remove_conda_scripts() 1038 | 1039 | def uninstall_conda_pkgs(): 1040 | """ Attempt to remove packages previously installed by conda. """ 1041 | 1042 | if conda_is_active(): 1043 | for pkg in ['ipopt','mumps','mumps-include','mumps-seq','mumps-mpi','metis']: 1044 | note(f"Removing {pkg.upper()} conda package") 1045 | run_cmd(cmd_list=[opts['conda_cmd'],'uninstall','-y',pkg], do_check=False) 1046 | note_ok() 1047 | 1048 | def display_environment(): 1049 | announce("Relevant environment variables") 1050 | for ev in ['CONDA_PREFIX','VIRTUAL_ENV','TMPDIR','TMP_DIR','TEMP_DIR']: 1051 | if ev in os.environ: 1052 | print(f'{cyan(ev)}: {code(os.environ[ev])}') 1053 | 1054 | def check_library(libname:str, raise_on_failure=True): 1055 | """ 1056 | Determine whether the specified library is available for linking. 1057 | 1058 | Parameters 1059 | ---------- 1060 | libname : str 1061 | The name of the library without the preceding 'lib' or '.a/.so.*/.dll' extension. 1062 | """ 1063 | 1064 | build_dir = tempfile.TemporaryDirectory() 1065 | pushd(build_dir.name) 1066 | 1067 | note(f'Checking for library: {libname}') 1068 | with open('hello.c', 'w', encoding="utf-8") as f: 1069 | f.write('#include \nint main() {\nprintf("cc works!\\n");\nreturn 0;\n}\n') 1070 | 1071 | result = run_cmd(cmd_list=[os.environ['CC'], '-o', 'hello_c', 'hello.c', f'-l{libname}'], 1072 | raise_error=False) 1073 | 1074 | success = (result is not None and result.returncode == 0) 1075 | if success is True: 1076 | note_ok() 1077 | else: 1078 | print(red('not found')) 1079 | if raise_on_failure is True: 1080 | raise RuntimeError(f'Cannot continue without {libname} library.') 1081 | 1082 | popd() 1083 | 1084 | return success 1085 | 1086 | def check_compiler_sanity(): 1087 | """ Build and run programs written in C, C++, and FORTRAN to test the compilers. """ 1088 | build_dir = tempfile.TemporaryDirectory() 1089 | pushd(build_dir.name) 1090 | 1091 | note(f'Testing {os.environ["CC"]}') 1092 | with open('hello.c', 'w', encoding="utf-8") as f: 1093 | f.write('#include \nint main() {\nprintf("cc works!\\n");\nreturn 0;\n}\n') 1094 | 1095 | run_cmd(cmd_list=[os.environ['CC'], '-o', 'hello_c', 'hello.c']) 1096 | run_cmd(cmd_list=['./hello_c']) 1097 | note_ok() 1098 | 1099 | note(f'Testing {os.environ["CXX"]}') 1100 | with open('hello.cc', 'w', encoding="utf-8") as f: 1101 | f.write('#include \nint main() {\nstd::cout << "c++ works!" << std::endl;\nreturn 0;\n}\n') 1102 | 1103 | run_cmd(cmd_list=[os.environ['CXX'], '-o', 'hello_cxx', 'hello.cc']) 1104 | run_cmd(cmd_list=['./hello_cxx']) 1105 | note_ok() 1106 | 1107 | if opts['include_paropt']: 1108 | note(f'Testing mpicxx') 1109 | run_cmd(cmd_list=['mpicxx', '-o', 'hello_cxx_mpi', 'hello.cc']) 1110 | run_cmd(cmd_list=['./hello_cxx_mpi']) 1111 | note_ok() 1112 | 1113 | note(f'Testing {os.environ["FC"]}') 1114 | with open('hello.f90', 'w', encoding="utf-8") as f: 1115 | f.write("program hello\n print *, 'fortran works!'\nend program hello") 1116 | 1117 | run_cmd(cmd_list=[os.environ['FC'], '-o', 'hello_f', 'hello.f90']) 1118 | run_cmd(cmd_list=['./hello_f']) 1119 | note_ok() 1120 | 1121 | popd() 1122 | 1123 | def find_required_command(cmd:str, errors:list): 1124 | """ 1125 | Determine if the command name is in the PATH. 1126 | 1127 | Parameters 1128 | ---------- 1129 | cmd : str 1130 | The name of the command to look for. 1131 | errors : list 1132 | Accumulated pre-check error messages. 1133 | """ 1134 | cmd_path = which(cmd) 1135 | if cmd_path is None: 1136 | errors.append(f"{red('ERROR')}: Required command {yellow(cmd)} not found.") 1137 | return False 1138 | elif opts['verbose'] is True: 1139 | print(f"{green('FOUND')}: {cmd} is {cmd_path}") 1140 | 1141 | return True 1142 | 1143 | def check_sanity(): 1144 | """ Determine if all the required commands are there and can build if necessary. """ 1145 | announce("Testing build environment functionality. Can be skipped with -k.") 1146 | 1147 | errors = [] 1148 | required_cmds = [] 1149 | 1150 | print(f'Using {code(subst_env_for_path(opts["prefix"]))} for install prefix') 1151 | 1152 | if conda_is_active() and (opts['ignore_conda'] is False): 1153 | cpre = os.environ['CONDA_PREFIX'] 1154 | if re.search('intelpython', cpre) is not None: 1155 | print(f""" 1156 | {yellow("WARNING")}: $CONDA_PREFIX points to: 1157 | {' ' * 9 + code(cpre)} 1158 | This is associated with Intel OneAPI and may cause the installation to fail. 1159 | If it does, set up Intel OneAPI {yellow('before')} activating your conda env. 1160 | """[1:]) 1161 | 1162 | if opts['compile_required'] is True or opts['fall_back'] is True: 1163 | check_make(errors) 1164 | required_cmds.extend(['git', os.environ['CC'], os.environ['CXX'], os.environ['FC']]) 1165 | if opts['build_pyoptsparse'] is True: 1166 | required_cmds.extend(['swig']) 1167 | 1168 | if opts['compile_required'] is False: 1169 | required_cmds.append(opts['conda_cmd']) 1170 | 1171 | if opts['hsl_tar_file'] is not None: 1172 | required_cmds.append('tar') 1173 | if not Path(opts['hsl_tar_file']).is_file(): 1174 | errors.append(f"{red('ERROR')}: HSL tar file {yellow(opts['hsl_tar_file'])} does not exist.") 1175 | 1176 | if opts['include_paropt'] is True: 1177 | required_cmds.append('mpicxx') 1178 | if opts['pyoptsparse_version'] < parse('2.1.2'): 1179 | errors.append(f"{red('ERROR')}: PAROPT is only supported by pyOptSparse {yellow('v2.1.2')} or newer.") 1180 | 1181 | if opts['snopt_dir'] is not None: 1182 | if not Path(opts['snopt_dir']).is_dir(): 1183 | errors.append(f"{red('ERROR')}: SNOPT folder {yellow(opts['snopt_dir'])} does not exist.") 1184 | 1185 | for cmd in required_cmds: 1186 | find_required_command(cmd, errors) 1187 | 1188 | if len(errors) > 0: 1189 | for err in errors: 1190 | print(err) 1191 | 1192 | exit(1) 1193 | 1194 | if opts['compile_required'] is True or opts['fall_back'] is True: 1195 | check_compiler_sanity() 1196 | check_library('lapack') 1197 | check_library('blas') 1198 | 1199 | if opts['build_pyoptsparse'] is True: 1200 | if check_library('openblas', raise_on_failure=False) is False: 1201 | print(f"{yellow('WARNING')}: openblas missing. Required to build scipy on uncommon platforms.") 1202 | 1203 | def select_intel_compilers(): 1204 | """ Set environment variables to use Intel compilers. """ 1205 | os.environ['CC'] = os.environ.get('CC', 'icc') 1206 | os.environ['CXX'] = os.environ.get('CXX', 'icpc') 1207 | os.environ['FC'] = os.environ.get('FC', 'ifort') 1208 | sys_info['gcc_major_ver'] = -1 1209 | sys_info['gcc_is_apple_clang'] = False 1210 | 1211 | def select_gnu_compilers(): 1212 | """ Set environment variables to use GNU compilers. """ 1213 | os.environ['CC'] = os.environ.get('CC', 'gcc') 1214 | os.environ['CXX'] = os.environ.get('CXX', 'g++') 1215 | os.environ['FC'] = os.environ.get('FC', 'gfortran') 1216 | gcc_ver = subprocess.run(['gcc', '-dumpversion'], capture_output=True) 1217 | sys_info['gcc_major_ver'] = int(gcc_ver.stdout.decode('UTF-8').split('.')[0]) 1218 | gcc_version = subprocess.run(['gcc', '--version'], capture_output=True) 1219 | sys_info['gcc_is_apple_clang'] = 'Apple clang' in gcc_version.stdout.decode('UTF-8') 1220 | 1221 | def finish_setup(): 1222 | """ Finalize settings based on provided options and environment state. """ 1223 | if opts['intel_compiler_suite'] is True: 1224 | select_intel_compilers() 1225 | else: 1226 | select_gnu_compilers() 1227 | 1228 | # Determine whether any compiling will actually be performed 1229 | opts['compile_required'] = opts['build_pyoptsparse'] is True or \ 1230 | not (allow_install_with_conda() and opts['snopt_dir'] is None and \ 1231 | opts['include_paropt'] is False and opts['hsl_tar_file'] is None) 1232 | 1233 | # Set an option with the parsed pyOptSparse version 1234 | pos_ver_str = build_info['pyoptsparse']['branch'] 1235 | if pos_ver_str[:1] == 'v': pos_ver_str = pos_ver_str[1:] # Drop the initial v 1236 | opts['pyoptsparse_version'] = parse(pos_ver_str) 1237 | 1238 | # Change snopt_dir to an absolute path 1239 | if opts['snopt_dir'] is not None: 1240 | opts['snopt_dir'] = str(Path(opts['snopt_dir']).resolve()) 1241 | 1242 | if opts['hsl_tar_file'] is not None: 1243 | opts['hsl_tar_file'] = str(Path(opts['hsl_tar_file']).resolve()) 1244 | 1245 | display_environment() 1246 | 1247 | if opts['check_sanity']: 1248 | check_sanity() 1249 | 1250 | def install_conda_scripts(var_name:str, lib_dir:Path): 1251 | """ 1252 | Create conda activate/deactivate scripts to set dynamic linker search path. 1253 | 1254 | Parameters 1255 | ---------- 1256 | var_name : str 1257 | The name of the dynamic linker environment variable. 1258 | lib_dir : Path 1259 | The location of the shared library files. 1260 | """ 1261 | bash_path = which('bash') 1262 | 1263 | sys_info['conda_activate_dir'].mkdir(parents=True, exist_ok=True) 1264 | act_file_name = str(sys_info['conda_activate_dir'] / sys_info['conda_env_script']) 1265 | with open(act_file_name, 'w', encoding="utf-8") as f: 1266 | f.write( 1267 | f"""#!{bash_path} 1268 | if [ -z "${var_name}" ]; then 1269 | export {var_name}="{str(lib_dir)}" 1270 | else 1271 | # Preserve previous settings 1272 | export OLD_{var_name}="${var_name}" 1273 | export {var_name}="{str(lib_dir)}:${var_name}" 1274 | fi 1275 | """) 1276 | 1277 | sys_info['conda_deactivate_dir'].mkdir(parents=True, exist_ok=True) 1278 | deact_file_name = str(sys_info['conda_deactivate_dir'] / sys_info['conda_env_script']) 1279 | with open(deact_file_name, 'w', encoding="utf-8") as f: 1280 | f.write( 1281 | f"""#!{bash_path} 1282 | if [ -z "$OLD_{var_name}" ]; then 1283 | unset {var_name} 1284 | else 1285 | # Restore previous setting 1286 | {var_name}="$OLD_{var_name}" 1287 | unset OLD_{var_name} 1288 | fi 1289 | """) 1290 | 1291 | print( 1292 | f"""Your {cyan(os.environ['CONDA_DEFAULT_ENV'])} conda environment has been updated to automatically 1293 | set the {yellow(var_name)} environment variable when activated. 1294 | 1295 | This setting is found in the following files: 1296 | {code(subst_env_for_path(act_file_name))} 1297 | {code(subst_env_for_path(deact_file_name))} 1298 | 1299 | Run {code(f'source {subst_env_for_path(act_file_name)}')} to set it now. 1300 | """) 1301 | 1302 | def post_build_success(): 1303 | """ Announce successful build and print some instructions. """ 1304 | announce("The pyOptSparse build is complete") 1305 | 1306 | lib_dir = Path(opts['prefix']) / 'lib' 1307 | if sys_info['sys_name'] == 'Darwin': 1308 | var_name = 'DYLD_LIBRARY_PATH' 1309 | else: 1310 | var_name = 'LD_LIBRARY_PATH' 1311 | 1312 | if allow_install_with_conda(): 1313 | install_conda_scripts(var_name, lib_dir) 1314 | else: 1315 | if opts['include_ipopt'] is True: 1316 | print( 1317 | f"""{yellow('NOTE')}: Set the following environment variable before using this installation: 1318 | 1319 | {code(f'export {var_name}={subst_env_for_path(str(lib_dir))}')} 1320 | 1321 | Otherwise, you may encounter errors such as: 1322 | "pyOptSparse Error: There was an error importing the compiled IPOPT module" 1323 | """) 1324 | 1325 | announce('SUCCESS!') 1326 | exit(0) 1327 | 1328 | def perform_install(): 1329 | """ Initiate all the required actions in the script. """ 1330 | process_command_line() 1331 | initialize() 1332 | 1333 | if opts['uninstall']: 1334 | announce('Uninstalling pyOptSparse and related packages') 1335 | print(f'{yellow("NOTE:")} Some items may be listed even if not installed.') 1336 | if opts['ignore_conda'] is False: uninstall_conda_pkgs() 1337 | uninstall_built() 1338 | exit(0) 1339 | 1340 | finish_setup() 1341 | 1342 | announce('Beginning installation') 1343 | 1344 | if opts['linear_solver'] == 'mumps': 1345 | install_with_mumps() 1346 | install_pyoptsparse_from_src() 1347 | elif opts['linear_solver'] == 'pardiso': 1348 | install_with_pardiso() 1349 | elif opts['linear_solver'] == 'hsl': 1350 | install_with_hsl() 1351 | 1352 | post_build_success() 1353 | 1354 | if __name__ == "__main__": 1355 | perform_install() 1356 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["hatchling"] 3 | build-backend = "hatchling.build" 4 | 5 | [project] 6 | name = "build-pyoptsparse" 7 | dynamic = ["version"] 8 | description = "Automated installer for pyOptSparse" 9 | readme = "README.md" 10 | license = "Apache-2.0" 11 | requires-python = ">=3.7" 12 | authors = [ 13 | { name = "OpenMDAO Team", email = "openmdao@openmdao.org" }, 14 | ] 15 | dependencies = [ 16 | "ansicolors", 17 | "numpy", 18 | "cython", 19 | "packaging", 20 | "sqlitedict", 21 | ] 22 | 23 | [project.optional-dependencies] 24 | paropt = [ 25 | "mpi4py", 26 | ] 27 | 28 | [project.scripts] 29 | build-pyoptsparse = "build_pyoptsparse:perform_install" 30 | build_pyoptsparse = "build_pyoptsparse:perform_install" 31 | 32 | [project.urls] 33 | Homepage = "http://openmdao.org" 34 | 35 | [tool.hatch.version] 36 | path = "__init__.py" 37 | 38 | [tool.hatch.build.targets.sdist] 39 | include = [ 40 | "/", 41 | ] 42 | --------------------------------------------------------------------------------