├── .gitattributes ├── .github ├── dependabot.yml └── workflows │ ├── pre_commit.yml │ ├── python_package.yml │ └── release_me.yml ├── .gitignore ├── .pre-commit-config.yaml ├── .pylintrc ├── CHANGELOG.md ├── LICENSE ├── MANIFEST.in ├── README.md ├── docs ├── Makefile ├── make.bat ├── setup.cfg ├── source │ ├── conf.py │ └── index.rst └── versioneer.py ├── mypy.ini ├── requirements.txt ├── setup.cfg ├── setup.py ├── tobascco ├── __init__.py ├── _version.py ├── atoms.py ├── builder.py ├── cifer.py ├── config.py ├── connectpoints.py ├── createinput.py ├── csv.py ├── data │ ├── arc │ │ ├── epinet_sqc_nets.arc │ │ ├── iza_all.arc │ │ ├── iza_extra.arc │ │ ├── rcsr_0.6.0.arc │ │ ├── zeolites.arc │ │ └── zeolites.archived.arc │ └── sbu │ │ ├── all_non_pillared_met.dat │ │ └── all_organic.dat ├── defaults.ini ├── element_properties.py ├── generator.py ├── glog.py ├── linalg.py ├── net.py ├── sbu.py ├── src │ ├── Makefile │ ├── graph.c │ ├── graph.h │ ├── main.c │ ├── pyoptim.cpp │ ├── setup.py │ ├── test.arc │ └── test.py ├── structure.py ├── tobascco.py └── visualizer.py └── versioneer.py /.gitattributes: -------------------------------------------------------------------------------- 1 | *.arc linguist-vendored 2 | tobascco/_version.py export-subst 3 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "pip" 4 | directory: "/" 5 | schedule: 6 | interval: "daily" 7 | -------------------------------------------------------------------------------- /.github/workflows/pre_commit.yml: -------------------------------------------------------------------------------- 1 | name: pre-commit 2 | 3 | on: 4 | pull_request: 5 | branches: [master] 6 | push: 7 | branches: [master] 8 | 9 | jobs: 10 | pre-commit: 11 | strategy: 12 | matrix: 13 | python-version: ["3.8"] 14 | runs-on: ubuntu-latest 15 | steps: 16 | - uses: actions/checkout@v2 17 | - name: Set up Python ${{ matrix.python-version }} 18 | uses: actions/setup-python@v2 19 | with: 20 | python-version: ${{ matrix.python-version }} 21 | - name: Install dependencies 22 | run: | 23 | python -m pip install --upgrade pip 24 | pip install -e .[all,pre-commit,testing] 25 | pip freeze 26 | - name: Run pre-commit 27 | run: 28 | pre-commit run --all-files || ( git status --short ; git diff ; exit 1 ) 29 | -------------------------------------------------------------------------------- /.github/workflows/python_package.yml: -------------------------------------------------------------------------------- 1 | # This workflow will install Python dependencies, run tests and lint with a variety of Python versions 2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions 3 | 4 | name: Python package 5 | 6 | on: 7 | pull_request: 8 | branches: [master] 9 | push: 10 | branches: [master] 11 | 12 | jobs: 13 | build: 14 | strategy: 15 | matrix: 16 | os: [ubuntu-latest, macOS-latest] 17 | python-version: [3.7, 3.8] 18 | runs-on: ${{ matrix.os }} 19 | 20 | steps: 21 | - uses: actions/checkout@v2 22 | - name: Set up Python ${{ matrix.python-version }} 23 | uses: actions/setup-python@v2 24 | with: 25 | python-version: ${{ matrix.python-version }} 26 | - name: Install lightgbm support 27 | run: | 28 | if [ "$RUNNER_OS" == "macOS" ]; then 29 | brew install libomp; 30 | fi 31 | - name: Install dependencies 32 | run: | 33 | python -m pip install --upgrade pip 34 | pip install -e .[all,testing,pre-commit] 35 | - name: Test with pytest (numba activated) 36 | run: | 37 | pytest 38 | - name: coverage (numba deactivated) 39 | run: pytest --cov=./ --cov-report=xml 40 | env: 41 | NUMBA_DISABLE_JIT: 1 42 | 43 | - name: Upload coverage to Codecov 44 | uses: codecov/codecov-action@v1 45 | with: 46 | token: ${{ secrets.CODECOV_TOKEN }} 47 | file: ./coverage.xml 48 | flags: unittests 49 | -------------------------------------------------------------------------------- /.github/workflows/release_me.yml: -------------------------------------------------------------------------------- 1 | on: 2 | push: 3 | branches: 4 | - master 5 | name: release-please 6 | jobs: 7 | release-please: 8 | runs-on: ubuntu-latest 9 | steps: 10 | - uses: GoogleCloudPlatform/release-please-action@v2.6.0 11 | with: 12 | token: ${{ secrets.GITHUB_TOKEN }} 13 | release-type: python 14 | package-name: pyepal 15 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | 2 | # Created by https://www.gitignore.io/api/vim,linux,macos,python,visualstudio,jupyternotebooks 3 | # Edit at https://www.gitignore.io/?templates=vim,linux,macos,python,visualstudio,jupyternotebooks 4 | 5 | ### JupyterNotebooks ### 6 | # gitignore template for Jupyter Notebooks 7 | # website: http://jupyter.org/ 8 | 9 | .ipynb_checkpoints 10 | */.ipynb_checkpoints/* 11 | 12 | # IPython 13 | profile_default/ 14 | ipython_config.py 15 | 16 | # Remove previous ipynb_checkpoints 17 | # git rm -r .ipynb_checkpoints/ 18 | 19 | ### Linux ### 20 | *~ 21 | 22 | # temporary files which can be created if a process still has a handle open of a deleted file 23 | .fuse_hidden* 24 | 25 | # KDE directory preferences 26 | .directory 27 | 28 | # Linux trash folder which might appear on any partition or disk 29 | .Trash-* 30 | 31 | # .nfs files are created when an open file is removed but is still being accessed 32 | .nfs* 33 | 34 | ### macOS ### 35 | # General 36 | .DS_Store 37 | .AppleDouble 38 | .LSOverride 39 | 40 | # Icon must end with two \r 41 | Icon 42 | 43 | # Thumbnails 44 | ._* 45 | 46 | # Files that might appear in the root of a volume 47 | .DocumentRevisions-V100 48 | .fseventsd 49 | .Spotlight-V100 50 | .TemporaryItems 51 | .Trashes 52 | .VolumeIcon.icns 53 | .com.apple.timemachine.donotpresent 54 | 55 | # Directories potentially created on remote AFP share 56 | .AppleDB 57 | .AppleDesktop 58 | Network Trash Folder 59 | Temporary Items 60 | .apdisk 61 | 62 | ### Python ### 63 | # Byte-compiled / optimized / DLL files 64 | __pycache__/ 65 | *.py[cod] 66 | *$py.class 67 | 68 | # C extensions 69 | *.so 70 | 71 | # Distribution / packaging 72 | .Python 73 | build/ 74 | develop-eggs/ 75 | dist/ 76 | downloads/ 77 | eggs/ 78 | .eggs/ 79 | lib/ 80 | lib64/ 81 | parts/ 82 | sdist/ 83 | var/ 84 | wheels/ 85 | pip-wheel-metadata/ 86 | share/python-wheels/ 87 | *.egg-info/ 88 | .installed.cfg 89 | *.egg 90 | MANIFEST 91 | 92 | # PyInstaller 93 | # Usually these files are written by a python script from a template 94 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 95 | *.manifest 96 | *.spec 97 | 98 | # Installer logs 99 | pip-log.txt 100 | pip-delete-this-directory.txt 101 | 102 | # Unit test / coverage reports 103 | htmlcov/ 104 | .tox/ 105 | .nox/ 106 | .coverage 107 | .coverage.* 108 | .cache 109 | nosetests.xml 110 | coverage.xml 111 | *.cover 112 | .hypothesis/ 113 | .pytest_cache/ 114 | 115 | # Translations 116 | *.mo 117 | *.pot 118 | 119 | # Scrapy stuff: 120 | .scrapy 121 | 122 | 123 | # PyBuilder 124 | target/ 125 | 126 | # pyenv 127 | .python-version 128 | 129 | # pipenv 130 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 131 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 132 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 133 | # install all needed dependencies. 134 | #Pipfile.lock 135 | 136 | # celery beat schedule file 137 | celerybeat-schedule 138 | 139 | # SageMath parsed files 140 | *.sage.py 141 | 142 | # Spyder project settings 143 | .spyderproject 144 | .spyproject 145 | 146 | # Rope project settings 147 | .ropeproject 148 | 149 | # Mr Developer 150 | .mr.developer.cfg 151 | .project 152 | .pydevproject 153 | 154 | # mkdocs documentation 155 | /site 156 | 157 | # mypy 158 | .mypy_cache/ 159 | .dmypy.json 160 | dmypy.json 161 | 162 | # Pyre type checker 163 | .pyre/ 164 | 165 | ### Vim ### 166 | # Swap 167 | [._]*.s[a-v][a-z] 168 | [._]*.sw[a-p] 169 | [._]s[a-rt-v][a-z] 170 | [._]ss[a-gi-z] 171 | [._]sw[a-p] 172 | 173 | # Session 174 | Session.vim 175 | Sessionx.vim 176 | 177 | # Temporary 178 | .netrwhist 179 | 180 | # Auto-generated tag files 181 | tags 182 | 183 | # Persistent undo 184 | [._]*.un~ 185 | 186 | # Coc configuration directory 187 | .vim 188 | 189 | ### VisualStudio ### 190 | ## Ignore Visual Studio temporary files, build results, and 191 | ## files generated by popular Visual Studio add-ons. 192 | ## 193 | ## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore 194 | 195 | # User-specific files 196 | *.rsuser 197 | *.suo 198 | *.user 199 | *.userosscache 200 | *.sln.docstates 201 | 202 | # User-specific files (MonoDevelop/Xamarin Studio) 203 | *.userprefs 204 | 205 | # Mono auto generated files 206 | mono_crash.* 207 | 208 | # Build results 209 | [Dd]ebug/ 210 | [Dd]ebugPublic/ 211 | [Rr]elease/ 212 | [Rr]eleases/ 213 | x64/ 214 | x86/ 215 | [Aa][Rr][Mm]/ 216 | [Aa][Rr][Mm]64/ 217 | bld/ 218 | [Bb]in/ 219 | [Oo]bj/ 220 | [Ll]og/ 221 | 222 | # Visual Studio 2015/2017 cache/options directory 223 | .vs/ 224 | # Uncomment if you have tasks that create the project's static files in wwwroot 225 | #wwwroot/ 226 | 227 | # Visual Studio 2017 auto generated files 228 | Generated\ Files/ 229 | 230 | # MSTest test Results 231 | [Tt]est[Rr]esult*/ 232 | [Bb]uild[Ll]og.* 233 | 234 | # NUnit 235 | *.VisualState.xml 236 | TestResult.xml 237 | nunit-*.xml 238 | 239 | # Build Results of an ATL Project 240 | [Dd]ebugPS/ 241 | [Rr]eleasePS/ 242 | dlldata.c 243 | 244 | # Benchmark Results 245 | BenchmarkDotNet.Artifacts/ 246 | 247 | # .NET Core 248 | project.lock.json 249 | project.fragment.lock.json 250 | artifacts/ 251 | 252 | # StyleCop 253 | StyleCopReport.xml 254 | 255 | # Files built by Visual Studio 256 | *_i.c 257 | *_p.c 258 | *_h.h 259 | *.ilk 260 | *.obj 261 | *.iobj 262 | *.pch 263 | *.pdb 264 | *.ipdb 265 | *.pgc 266 | *.pgd 267 | *.rsp 268 | *.sbr 269 | *.tlb 270 | *.tli 271 | *.tlh 272 | *.tmp 273 | *.tmp_proj 274 | *_wpftmp.csproj 275 | *.log 276 | *.vspscc 277 | *.vssscc 278 | .builds 279 | *.pidb 280 | *.svclog 281 | *.scc 282 | 283 | # Chutzpah Test files 284 | _Chutzpah* 285 | 286 | # Visual C++ cache files 287 | ipch/ 288 | *.aps 289 | *.ncb 290 | *.opendb 291 | *.opensdf 292 | *.sdf 293 | *.cachefile 294 | *.VC.db 295 | *.VC.VC.opendb 296 | 297 | # Visual Studio profiler 298 | *.psess 299 | *.vsp 300 | *.vspx 301 | *.sap 302 | 303 | # Visual Studio Trace Files 304 | *.e2e 305 | 306 | # TFS 2012 Local Workspace 307 | $tf/ 308 | 309 | # Guidance Automation Toolkit 310 | *.gpState 311 | 312 | # ReSharper is a .NET coding add-in 313 | _ReSharper*/ 314 | *.[Rr]e[Ss]harper 315 | *.DotSettings.user 316 | 317 | # JustCode is a .NET coding add-in 318 | .JustCode 319 | 320 | # TeamCity is a build add-in 321 | _TeamCity* 322 | 323 | # DotCover is a Code Coverage Tool 324 | *.dotCover 325 | 326 | # AxoCover is a Code Coverage Tool 327 | .axoCover/* 328 | !.axoCover/settings.json 329 | 330 | # Visual Studio code coverage results 331 | *.coverage 332 | *.coveragexml 333 | 334 | # NCrunch 335 | _NCrunch_* 336 | .*crunch*.local.xml 337 | nCrunchTemp_* 338 | 339 | # MightyMoose 340 | *.mm.* 341 | AutoTest.Net/ 342 | 343 | # Web workbench (sass) 344 | .sass-cache/ 345 | 346 | # Installshield output folder 347 | [Ee]xpress/ 348 | 349 | # DocProject is a documentation generator add-in 350 | DocProject/buildhelp/ 351 | DocProject/Help/*.HxT 352 | DocProject/Help/*.HxC 353 | DocProject/Help/*.hhc 354 | DocProject/Help/*.hhk 355 | DocProject/Help/*.hhp 356 | DocProject/Help/Html2 357 | DocProject/Help/html 358 | 359 | # Click-Once directory 360 | publish/ 361 | 362 | # Publish Web Output 363 | *.[Pp]ublish.xml 364 | *.azurePubxml 365 | # Note: Comment the next line if you want to checkin your web deploy settings, 366 | # but database connection strings (with potential passwords) will be unencrypted 367 | *.pubxml 368 | *.publishproj 369 | 370 | # Microsoft Azure Web App publish settings. Comment the next line if you want to 371 | # checkin your Azure Web App publish settings, but sensitive information contained 372 | # in these scripts will be unencrypted 373 | PublishScripts/ 374 | 375 | # NuGet Packages 376 | *.nupkg 377 | # NuGet Symbol Packages 378 | *.snupkg 379 | # The packages folder can be ignored because of Package Restore 380 | **/[Pp]ackages/* 381 | # except build/, which is used as an MSBuild target. 382 | !**/[Pp]ackages/build/ 383 | # Uncomment if necessary however generally it will be regenerated when needed 384 | #!**/[Pp]ackages/repositories.config 385 | # NuGet v3's project.json files produces more ignorable files 386 | *.nuget.props 387 | *.nuget.targets 388 | 389 | # Microsoft Azure Build Output 390 | csx/ 391 | *.build.csdef 392 | 393 | # Microsoft Azure Emulator 394 | ecf/ 395 | rcf/ 396 | 397 | # Windows Store app package directories and files 398 | AppPackages/ 399 | BundleArtifacts/ 400 | Package.StoreAssociation.xml 401 | _pkginfo.txt 402 | *.appx 403 | *.appxbundle 404 | *.appxupload 405 | 406 | # Visual Studio cache files 407 | # files ending in .cache can be ignored 408 | *.[Cc]ache 409 | # but keep track of directories ending in .cache 410 | !?*.[Cc]ache/ 411 | 412 | # Others 413 | ClientBin/ 414 | ~$* 415 | *.dbmdl 416 | *.dbproj.schemaview 417 | *.jfm 418 | *.pfx 419 | *.publishsettings 420 | orleans.codegen.cs 421 | 422 | # Including strong name files can present a security risk 423 | # (https://github.com/github/gitignore/pull/2483#issue-259490424) 424 | #*.snk 425 | 426 | # Since there are multiple workflows, uncomment next line to ignore bower_components 427 | # (https://github.com/github/gitignore/pull/1529#issuecomment-104372622) 428 | #bower_components/ 429 | 430 | # RIA/Silverlight projects 431 | Generated_Code/ 432 | 433 | # Backup & report files from converting an old project file 434 | # to a newer Visual Studio version. Backup files are not needed, 435 | # because we have git ;-) 436 | _UpgradeReport_Files/ 437 | Backup*/ 438 | UpgradeLog*.XML 439 | UpgradeLog*.htm 440 | ServiceFabricBackup/ 441 | *.rptproj.bak 442 | 443 | # SQL Server files 444 | *.mdf 445 | *.ldf 446 | *.ndf 447 | 448 | # Business Intelligence projects 449 | *.rdl.data 450 | *.bim.layout 451 | *.bim_*.settings 452 | *.rptproj.rsuser 453 | *- [Bb]ackup.rdl 454 | *- [Bb]ackup ([0-9]).rdl 455 | *- [Bb]ackup ([0-9][0-9]).rdl 456 | 457 | # Microsoft Fakes 458 | FakesAssemblies/ 459 | 460 | # GhostDoc plugin setting file 461 | *.GhostDoc.xml 462 | 463 | # Node.js Tools for Visual Studio 464 | .ntvs_analysis.dat 465 | node_modules/ 466 | 467 | # Visual Studio 6 build log 468 | *.plg 469 | 470 | # Visual Studio 6 workspace options file 471 | *.opt 472 | 473 | # Visual Studio 6 auto-generated workspace file (contains which files were open etc.) 474 | *.vbw 475 | 476 | # Visual Studio LightSwitch build output 477 | **/*.HTMLClient/GeneratedArtifacts 478 | **/*.DesktopClient/GeneratedArtifacts 479 | **/*.DesktopClient/ModelManifest.xml 480 | **/*.Server/GeneratedArtifacts 481 | **/*.Server/ModelManifest.xml 482 | _Pvt_Extensions 483 | 484 | # Paket dependency manager 485 | .paket/paket.exe 486 | paket-files/ 487 | 488 | # FAKE - F# Make 489 | .fake/ 490 | 491 | # CodeRush personal settings 492 | .cr/personal 493 | 494 | # Python Tools for Visual Studio (PTVS) 495 | *.pyc 496 | 497 | # Cake - Uncomment if you are using it 498 | # tools/** 499 | # !tools/packages.config 500 | 501 | # Tabs Studio 502 | *.tss 503 | 504 | # Telerik's JustMock configuration file 505 | *.jmconfig 506 | 507 | # BizTalk build output 508 | *.btp.cs 509 | *.btm.cs 510 | *.odx.cs 511 | *.xsd.cs 512 | 513 | # OpenCover UI analysis results 514 | OpenCover/ 515 | 516 | # Azure Stream Analytics local run output 517 | ASALocalRun/ 518 | 519 | # MSBuild Binary and Structured Log 520 | *.binlog 521 | 522 | # NVidia Nsight GPU debugger configuration file 523 | *.nvuser 524 | 525 | # MFractors (Xamarin productivity tool) working folder 526 | .mfractor/ 527 | 528 | # Local History for Visual Studio 529 | .localhistory/ 530 | 531 | # BeatPulse healthcheck temp database 532 | healthchecksdb 533 | 534 | # Backup folder for Package Reference Convert tool in Visual Studio 2017 535 | MigrationBackup/ 536 | 537 | # End of https://www.gitignore.io/api/vim,linux,macos,python,visualstudio,jupyternotebooks 538 | 539 | *.pkl 540 | *.npy 541 | *.joblib 542 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/pre-commit/pre-commit-hooks 3 | rev: v2.2.3 4 | hooks: 5 | - id: trailing-whitespace 6 | - id: check-yaml 7 | name: Check Yaml 8 | description: This hook checks yaml files for parseable syntax. 9 | entry: check-yaml 10 | language: python 11 | types: [yaml] 12 | - id: detect-private-key 13 | name: Detect Private Key 14 | description: Detects the presence of private keys 15 | entry: detect-private-key 16 | language: python 17 | types: [text] 18 | - id: end-of-file-fixer 19 | name: Fix End of Files 20 | description: Ensures that a file is either empty, or ends with one newline. 21 | entry: end-of-file-fixer 22 | language: python 23 | types: [text] 24 | - id: fix-encoding-pragma 25 | name: Fix python encoding pragma 26 | language: python 27 | entry: fix-encoding-pragma 28 | description: "Add # -*- coding: utf-8 -*- to the top of python files" 29 | types: [python] 30 | - id: mixed-line-ending 31 | name: Mixed line ending 32 | description: Replaces or checks mixed line ending 33 | entry: mixed-line-ending 34 | language: python 35 | types: [text] 36 | 37 | # - repo: https://github.com/pre-commit/mirrors-mypy 38 | # rev: "" # Use the sha / tag you want to point at 39 | # hooks: 40 | # - id: mypy 41 | # exclude: (versioneer.py|_version.py|^docs/) 42 | 43 | - repo: https://github.com/psf/black 44 | rev: 20.8b1 45 | hooks: 46 | - id: black 47 | language_version: python3 48 | name: black 49 | exclude: versioneer.py|tobascco/_version.py|^docs/ 50 | entry: black 51 | types: [python] 52 | 53 | - repo: https://github.com/pre-commit/mirrors-isort 54 | rev: "v5.6.4" # Use the revision sha / tag you want to point at 55 | hooks: 56 | - id: isort 57 | name: isort 58 | language: system 59 | exclude: versioneer.py|tobascco/_version.py|^docs/ 60 | types: [python] 61 | entry: isort 62 | 63 | - repo: local 64 | hooks: 65 | - id: pylint 66 | language: system 67 | types: [file, python] 68 | exclude: versioneer.py|tobascco/_version.py|^docs 69 | name: pylint 70 | entry: pylint 71 | -------------------------------------------------------------------------------- /.pylintrc: -------------------------------------------------------------------------------- 1 | [MASTER] 2 | ignore=docs 3 | ignore_patterns=versioneer.py,*version.py,*hypervolume.py,models.coregionalized.py 4 | 5 | 6 | [MESSAGES CONTROL] 7 | disable = C0330, C0326, locally-disabled, fixme 8 | 9 | 10 | [format] 11 | max-line-length = 88 12 | 13 | 14 | [SIMILARITIES] 15 | 16 | # Minimum lines number of a similarity. 17 | min-similarity-lines=10 18 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | ## 0.1.0 (2021-03-26) 4 | 5 | 6 | ### Documentation 7 | 8 | * move readme out again ([3c1f438](https://www.github.com/peteboyd/tobascco/commit/3c1f43800e9f5a849bcbf0f9a9b640ce7b481463)) 9 | * start setting up sphinx ([671b456](https://www.github.com/peteboyd/tobascco/commit/671b4563b658d62168440556304ec4a2cdb71f1d)) 10 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | GNU GENERAL PUBLIC LICENSE 2 | Version 2, June 1991 3 | 4 | Copyright (C) 1989, 1991 Free Software Foundation, Inc., 5 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA 6 | Everyone is permitted to copy and distribute verbatim copies 7 | of this license document, but changing it is not allowed. 8 | 9 | Preamble 10 | 11 | The licenses for most software are designed to take away your 12 | freedom to share and change it. By contrast, the GNU General Public 13 | License is intended to guarantee your freedom to share and change free 14 | software--to make sure the software is free for all its users. This 15 | General Public License applies to most of the Free Software 16 | Foundation's software and to any other program whose authors commit to 17 | using it. (Some other Free Software Foundation software is covered by 18 | the GNU Lesser General Public License instead.) You can apply it to 19 | your programs, too. 20 | 21 | When we speak of free software, we are referring to freedom, not 22 | price. Our General Public Licenses are designed to make sure that you 23 | have the freedom to distribute copies of free software (and charge for 24 | this service if you wish), that you receive source code or can get it 25 | if you want it, that you can change the software or use pieces of it 26 | in new free programs; and that you know you can do these things. 27 | 28 | To protect your rights, we need to make restrictions that forbid 29 | anyone to deny you these rights or to ask you to surrender the rights. 30 | These restrictions translate to certain responsibilities for you if you 31 | distribute copies of the software, or if you modify it. 32 | 33 | For example, if you distribute copies of such a program, whether 34 | gratis or for a fee, you must give the recipients all the rights that 35 | you have. You must make sure that they, too, receive or can get the 36 | source code. And you must show them these terms so they know their 37 | rights. 38 | 39 | We protect your rights with two steps: (1) copyright the software, and 40 | (2) offer you this license which gives you legal permission to copy, 41 | distribute and/or modify the software. 42 | 43 | Also, for each author's protection and ours, we want to make certain 44 | that everyone understands that there is no warranty for this free 45 | software. If the software is modified by someone else and passed on, we 46 | want its recipients to know that what they have is not the original, so 47 | that any problems introduced by others will not reflect on the original 48 | authors' reputations. 49 | 50 | Finally, any free program is threatened constantly by software 51 | patents. We wish to avoid the danger that redistributors of a free 52 | program will individually obtain patent licenses, in effect making the 53 | program proprietary. To prevent this, we have made it clear that any 54 | patent must be licensed for everyone's free use or not licensed at all. 55 | 56 | The precise terms and conditions for copying, distribution and 57 | modification follow. 58 | 59 | GNU GENERAL PUBLIC LICENSE 60 | TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 61 | 62 | 0. This License applies to any program or other work which contains 63 | a notice placed by the copyright holder saying it may be distributed 64 | under the terms of this General Public License. The "Program", below, 65 | refers to any such program or work, and a "work based on the Program" 66 | means either the Program or any derivative work under copyright law: 67 | that is to say, a work containing the Program or a portion of it, 68 | either verbatim or with modifications and/or translated into another 69 | language. (Hereinafter, translation is included without limitation in 70 | the term "modification".) Each licensee is addressed as "you". 71 | 72 | Activities other than copying, distribution and modification are not 73 | covered by this License; they are outside its scope. The act of 74 | running the Program is not restricted, and the output from the Program 75 | is covered only if its contents constitute a work based on the 76 | Program (independent of having been made by running the Program). 77 | Whether that is true depends on what the Program does. 78 | 79 | 1. You may copy and distribute verbatim copies of the Program's 80 | source code as you receive it, in any medium, provided that you 81 | conspicuously and appropriately publish on each copy an appropriate 82 | copyright notice and disclaimer of warranty; keep intact all the 83 | notices that refer to this License and to the absence of any warranty; 84 | and give any other recipients of the Program a copy of this License 85 | along with the Program. 86 | 87 | You may charge a fee for the physical act of transferring a copy, and 88 | you may at your option offer warranty protection in exchange for a fee. 89 | 90 | 2. You may modify your copy or copies of the Program or any portion 91 | of it, thus forming a work based on the Program, and copy and 92 | distribute such modifications or work under the terms of Section 1 93 | above, provided that you also meet all of these conditions: 94 | 95 | a) You must cause the modified files to carry prominent notices 96 | stating that you changed the files and the date of any change. 97 | 98 | b) You must cause any work that you distribute or publish, that in 99 | whole or in part contains or is derived from the Program or any 100 | part thereof, to be licensed as a whole at no charge to all third 101 | parties under the terms of this License. 102 | 103 | c) If the modified program normally reads commands interactively 104 | when run, you must cause it, when started running for such 105 | interactive use in the most ordinary way, to print or display an 106 | announcement including an appropriate copyright notice and a 107 | notice that there is no warranty (or else, saying that you provide 108 | a warranty) and that users may redistribute the program under 109 | these conditions, and telling the user how to view a copy of this 110 | License. (Exception: if the Program itself is interactive but 111 | does not normally print such an announcement, your work based on 112 | the Program is not required to print an announcement.) 113 | 114 | These requirements apply to the modified work as a whole. If 115 | identifiable sections of that work are not derived from the Program, 116 | and can be reasonably considered independent and separate works in 117 | themselves, then this License, and its terms, do not apply to those 118 | sections when you distribute them as separate works. But when you 119 | distribute the same sections as part of a whole which is a work based 120 | on the Program, the distribution of the whole must be on the terms of 121 | this License, whose permissions for other licensees extend to the 122 | entire whole, and thus to each and every part regardless of who wrote it. 123 | 124 | Thus, it is not the intent of this section to claim rights or contest 125 | your rights to work written entirely by you; rather, the intent is to 126 | exercise the right to control the distribution of derivative or 127 | collective works based on the Program. 128 | 129 | In addition, mere aggregation of another work not based on the Program 130 | with the Program (or with a work based on the Program) on a volume of 131 | a storage or distribution medium does not bring the other work under 132 | the scope of this License. 133 | 134 | 3. You may copy and distribute the Program (or a work based on it, 135 | under Section 2) in object code or executable form under the terms of 136 | Sections 1 and 2 above provided that you also do one of the following: 137 | 138 | a) Accompany it with the complete corresponding machine-readable 139 | source code, which must be distributed under the terms of Sections 140 | 1 and 2 above on a medium customarily used for software interchange; or, 141 | 142 | b) Accompany it with a written offer, valid for at least three 143 | years, to give any third party, for a charge no more than your 144 | cost of physically performing source distribution, a complete 145 | machine-readable copy of the corresponding source code, to be 146 | distributed under the terms of Sections 1 and 2 above on a medium 147 | customarily used for software interchange; or, 148 | 149 | c) Accompany it with the information you received as to the offer 150 | to distribute corresponding source code. (This alternative is 151 | allowed only for noncommercial distribution and only if you 152 | received the program in object code or executable form with such 153 | an offer, in accord with Subsection b above.) 154 | 155 | The source code for a work means the preferred form of the work for 156 | making modifications to it. For an executable work, complete source 157 | code means all the source code for all modules it contains, plus any 158 | associated interface definition files, plus the scripts used to 159 | control compilation and installation of the executable. However, as a 160 | special exception, the source code distributed need not include 161 | anything that is normally distributed (in either source or binary 162 | form) with the major components (compiler, kernel, and so on) of the 163 | operating system on which the executable runs, unless that component 164 | itself accompanies the executable. 165 | 166 | If distribution of executable or object code is made by offering 167 | access to copy from a designated place, then offering equivalent 168 | access to copy the source code from the same place counts as 169 | distribution of the source code, even though third parties are not 170 | compelled to copy the source along with the object code. 171 | 172 | 4. You may not copy, modify, sublicense, or distribute the Program 173 | except as expressly provided under this License. Any attempt 174 | otherwise to copy, modify, sublicense or distribute the Program is 175 | void, and will automatically terminate your rights under this License. 176 | However, parties who have received copies, or rights, from you under 177 | this License will not have their licenses terminated so long as such 178 | parties remain in full compliance. 179 | 180 | 5. You are not required to accept this License, since you have not 181 | signed it. However, nothing else grants you permission to modify or 182 | distribute the Program or its derivative works. These actions are 183 | prohibited by law if you do not accept this License. Therefore, by 184 | modifying or distributing the Program (or any work based on the 185 | Program), you indicate your acceptance of this License to do so, and 186 | all its terms and conditions for copying, distributing or modifying 187 | the Program or works based on it. 188 | 189 | 6. Each time you redistribute the Program (or any work based on the 190 | Program), the recipient automatically receives a license from the 191 | original licensor to copy, distribute or modify the Program subject to 192 | these terms and conditions. You may not impose any further 193 | restrictions on the recipients' exercise of the rights granted herein. 194 | You are not responsible for enforcing compliance by third parties to 195 | this License. 196 | 197 | 7. If, as a consequence of a court judgment or allegation of patent 198 | infringement or for any other reason (not limited to patent issues), 199 | conditions are imposed on you (whether by court order, agreement or 200 | otherwise) that contradict the conditions of this License, they do not 201 | excuse you from the conditions of this License. If you cannot 202 | distribute so as to satisfy simultaneously your obligations under this 203 | License and any other pertinent obligations, then as a consequence you 204 | may not distribute the Program at all. For example, if a patent 205 | license would not permit royalty-free redistribution of the Program by 206 | all those who receive copies directly or indirectly through you, then 207 | the only way you could satisfy both it and this License would be to 208 | refrain entirely from distribution of the Program. 209 | 210 | If any portion of this section is held invalid or unenforceable under 211 | any particular circumstance, the balance of the section is intended to 212 | apply and the section as a whole is intended to apply in other 213 | circumstances. 214 | 215 | It is not the purpose of this section to induce you to infringe any 216 | patents or other property right claims or to contest validity of any 217 | such claims; this section has the sole purpose of protecting the 218 | integrity of the free software distribution system, which is 219 | implemented by public license practices. Many people have made 220 | generous contributions to the wide range of software distributed 221 | through that system in reliance on consistent application of that 222 | system; it is up to the author/donor to decide if he or she is willing 223 | to distribute software through any other system and a licensee cannot 224 | impose that choice. 225 | 226 | This section is intended to make thoroughly clear what is believed to 227 | be a consequence of the rest of this License. 228 | 229 | 8. If the distribution and/or use of the Program is restricted in 230 | certain countries either by patents or by copyrighted interfaces, the 231 | original copyright holder who places the Program under this License 232 | may add an explicit geographical distribution limitation excluding 233 | those countries, so that distribution is permitted only in or among 234 | countries not thus excluded. In such case, this License incorporates 235 | the limitation as if written in the body of this License. 236 | 237 | 9. The Free Software Foundation may publish revised and/or new versions 238 | of the General Public License from time to time. Such new versions will 239 | be similar in spirit to the present version, but may differ in detail to 240 | address new problems or concerns. 241 | 242 | Each version is given a distinguishing version number. If the Program 243 | specifies a version number of this License which applies to it and "any 244 | later version", you have the option of following the terms and conditions 245 | either of that version or of any later version published by the Free 246 | Software Foundation. If the Program does not specify a version number of 247 | this License, you may choose any version ever published by the Free Software 248 | Foundation. 249 | 250 | 10. If you wish to incorporate parts of the Program into other free 251 | programs whose distribution conditions are different, write to the author 252 | to ask for permission. For software which is copyrighted by the Free 253 | Software Foundation, write to the Free Software Foundation; we sometimes 254 | make exceptions for this. Our decision will be guided by the two goals 255 | of preserving the free status of all derivatives of our free software and 256 | of promoting the sharing and reuse of software generally. 257 | 258 | NO WARRANTY 259 | 260 | 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY 261 | FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN 262 | OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES 263 | PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED 264 | OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF 265 | MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS 266 | TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE 267 | PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, 268 | REPAIR OR CORRECTION. 269 | 270 | 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING 271 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR 272 | REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, 273 | INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING 274 | OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED 275 | TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY 276 | YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER 277 | PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE 278 | POSSIBILITY OF SUCH DAMAGES. 279 | 280 | END OF TERMS AND CONDITIONS 281 | 282 | How to Apply These Terms to Your New Programs 283 | 284 | If you develop a new program, and you want it to be of the greatest 285 | possible use to the public, the best way to achieve this is to make it 286 | free software which everyone can redistribute and change under these terms. 287 | 288 | To do so, attach the following notices to the program. It is safest 289 | to attach them to the start of each source file to most effectively 290 | convey the exclusion of warranty; and each file should have at least 291 | the "copyright" line and a pointer to where the full notice is found. 292 | 293 | 294 | Copyright (C) 295 | 296 | This program is free software; you can redistribute it and/or modify 297 | it under the terms of the GNU General Public License as published by 298 | the Free Software Foundation; either version 2 of the License, or 299 | (at your option) any later version. 300 | 301 | This program is distributed in the hope that it will be useful, 302 | but WITHOUT ANY WARRANTY; without even the implied warranty of 303 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 304 | GNU General Public License for more details. 305 | 306 | You should have received a copy of the GNU General Public License along 307 | with this program; if not, write to the Free Software Foundation, Inc., 308 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 309 | 310 | Also add information on how to contact you by electronic and paper mail. 311 | 312 | If the program is interactive, make it output a short notice like this 313 | when it starts in an interactive mode: 314 | 315 | Gnomovision version 69, Copyright (C) year name of author 316 | Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. 317 | This is free software, and you are welcome to redistribute it 318 | under certain conditions; type `show c' for details. 319 | 320 | The hypothetical commands `show w' and `show c' should show the appropriate 321 | parts of the General Public License. Of course, the commands you use may 322 | be called something other than `show w' and `show c'; they could even be 323 | mouse-clicks or menu items--whatever suits your program. 324 | 325 | You should also get your employer (if you work as a programmer) or your 326 | school, if any, to sign a "copyright disclaimer" for the program, if 327 | necessary. Here is a sample; alter the names: 328 | 329 | Yoyodyne, Inc., hereby disclaims all copyright interest in the program 330 | `Gnomovision' (which makes passes at compilers) written by James Hacker. 331 | 332 | , 1 April 1989 333 | Ty Coon, President of Vice 334 | 335 | This General Public License does not permit incorporating your program into 336 | proprietary programs. If your program is a subroutine library, you may 337 | consider it more useful to permit linking proprietary applications with the 338 | library. If this is what you want to do, use the GNU Lesser General 339 | Public License instead of this License. 340 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include versioneer.py 2 | include tobascco/_version.py 3 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # tobascco 2 | 3 | ## Installation 4 | 5 | You need to first install openbabel and nlopt (we're working on a conda recipe to skip this step): 6 | 7 | ``` 8 | conda install -c conda-forge openbabel nlopt 9 | ``` 10 | 11 | Then you can install the package 12 | 13 | ``` 14 | pip install git+https://github.com/peteboyd/tobascco 15 | ``` 16 | 17 | This automatically installs a runscript that is appropriate for most use cases 18 | 19 | ## Usage 20 | 21 | ### Assembling MOFs 22 | 23 | ### SBU databases 24 | 25 | A key part of the code are the SBU databases (metal nodes, organic linkers) some defaults are shipped with the package. Those databases are currently file based, i.e., plain text files that need to be parsed for every run of the code and to which new SBUs need to be appended. 26 | 27 | #### Extending the SBU database 28 | 29 | New entries to the SBU database can be added using the code in `createinput.py` module (or job type `create_sbu_input_files=True` in an input file). 30 | 31 | ## Reference 32 | 33 | If you use this code, please cite [Boyd, P. G.; K. Woo, T. A Generalized Method for Constructing Hypothetical Nanoporous Materials of Any Net Topology from Graph Theory. CrystEngComm 2016, 18 (21), 3777–3792.](https://pubs.rsc.org/--/content/articlelanding/2016/ce/c6ce00407e) 34 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | SPHINXOPTS ?= 7 | SPHINXBUILD ?= sphinx-build 8 | SOURCEDIR = source 9 | BUILDDIR = build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=source 11 | set BUILDDIR=build 12 | 13 | if "%1" == "" goto help 14 | 15 | %SPHINXBUILD% >NUL 2>NUL 16 | if errorlevel 9009 ( 17 | echo. 18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 19 | echo.installed, then set the SPHINXBUILD environment variable to point 20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 21 | echo.may add the Sphinx directory to PATH. 22 | echo. 23 | echo.If you don't have Sphinx installed, grab it from 24 | echo.http://sphinx-doc.org/ 25 | exit /b 1 26 | ) 27 | 28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 29 | goto end 30 | 31 | :help 32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 33 | 34 | :end 35 | popd 36 | -------------------------------------------------------------------------------- /docs/setup.cfg: -------------------------------------------------------------------------------- 1 | 2 | # See the docstring in versioneer.py for instructions. Note that you must 3 | # re-run 'versioneer.py setup' after changing this section, and commit the 4 | # resulting files. 5 | 6 | [versioneer] 7 | #VCS = git 8 | #style = pep440 9 | #versionfile_source = 10 | #versionfile_build = 11 | #tag_prefix = 12 | #parentdir_prefix = 13 | -------------------------------------------------------------------------------- /docs/source/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Configuration file for the Sphinx documentation builder. 3 | # 4 | # This file only contains a selection of the most common options. For a full 5 | # list see the documentation: 6 | # https://www.sphinx-doc.org/en/master/usage/configuration.html 7 | 8 | # -- Path setup -------------------------------------------------------------- 9 | 10 | # If extensions (or modules to document with autodoc) are in another directory, 11 | # add these directories to sys.path here. If the directory is relative to the 12 | # documentation root, use os.path.abspath to make it absolute, like shown here. 13 | # 14 | # import os 15 | # import sys 16 | # sys.path.insert(0, os.path.abspath('.')) 17 | 18 | 19 | # -- Project information ----------------------------------------------------- 20 | 21 | project = 'tobasccoo' 22 | copyright = '2021, Peter Boyd' 23 | author = 'Peter Boyd' 24 | 25 | 26 | # -- General configuration --------------------------------------------------- 27 | 28 | # Add any Sphinx extension module names here, as strings. They can be 29 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 30 | # ones. 31 | extensions = [ 32 | ] 33 | 34 | # Add any paths that contain templates here, relative to this directory. 35 | templates_path = ['_templates'] 36 | 37 | # List of patterns, relative to source directory, that match files and 38 | # directories to ignore when looking for source files. 39 | # This pattern also affects html_static_path and html_extra_path. 40 | exclude_patterns = [] 41 | 42 | 43 | # -- Options for HTML output ------------------------------------------------- 44 | 45 | # The theme to use for HTML and HTML Help pages. See the documentation for 46 | # a list of builtin themes. 47 | # 48 | html_theme = 'alabaster' 49 | 50 | # Add any paths that contain custom static files (such as style sheets) here, 51 | # relative to this directory. They are copied after the builtin static files, 52 | # so a file named "default.css" will overwrite the builtin "default.css". 53 | html_static_path = ['_static'] 54 | -------------------------------------------------------------------------------- /docs/source/index.rst: -------------------------------------------------------------------------------- 1 | .. tobasccoo documentation master file, created by 2 | sphinx-quickstart on Sat Jan 9 00:08:46 2021. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Welcome to tobasccoo's documentation! 7 | ===================================== 8 | 9 | .. toctree:: 10 | :maxdepth: 2 11 | :caption: Contents: 12 | 13 | 14 | 15 | Indices and tables 16 | ================== 17 | 18 | * :ref:`genindex` 19 | * :ref:`modindex` 20 | * :ref:`search` 21 | -------------------------------------------------------------------------------- /mypy.ini: -------------------------------------------------------------------------------- 1 | [mypy-pyepal._version] 2 | ignore_errors = True 3 | 4 | [mypy-versioneer] 5 | ignore_errors = True 6 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | sympy 2 | numpy 3 | rdkit-pypi 4 | networkx 5 | matplotlib 6 | spglib 7 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [versioneer] 2 | VCS = git 3 | style = pep440 4 | versionfile_source = tobascco/_version.py 5 | versionfile_build = tobascco/_version.py 6 | tag_prefix = 7 | parentdir_prefix = 8 | 9 | [isort] 10 | multi_line_output = 3 11 | include_trailing_comma = True 12 | force_grid_wrap = 0 13 | use_parentheses = True 14 | ensure_newline_before_comments = True 15 | line_length = 88 16 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import os 3 | 4 | from setuptools import find_packages, setup 5 | from numpy.distutils.core import Extension 6 | from numpy.distutils.misc_util import get_numpy_include_dirs 7 | 8 | import versioneer 9 | 10 | include_dirs = [os.path.join(os.getcwd(), "tobascco", "src")] 11 | 12 | 13 | with open("requirements.txt", "r") as fh: 14 | REQUIREMENTS = fh.readlines() 15 | 16 | 17 | with open("README.md", encoding="utf-8") as fh: 18 | LONG_DESCRIPTION = fh.read() 19 | 20 | setup( 21 | name="tobascco", 22 | version=versioneer.get_version(), 23 | cmdclass=versioneer.get_cmdclass(), 24 | description="Assembles MOFs", 25 | setup_requires=["numpy"], 26 | long_description=LONG_DESCRIPTION, 27 | long_description_content_type="text/markdown", 28 | packages=find_packages(include=["tobascco", "tobascco.*"]), 29 | url="https://github.com/peteboyd/tobascco", 30 | license="Apache 2.0", 31 | install_requires=REQUIREMENTS, 32 | extras_require={ 33 | "testing": ["pytest==6.*", "pytest-cov>=2,<4"], 34 | "docs": [ 35 | "sphinx>=3,<5", 36 | "sphinx-book-theme==0.*", 37 | "sphinx-autodoc-typehints==1.*", 38 | "sphinx-copybutton==0.*", 39 | ], 40 | "pre-commit": [ 41 | "pre-commit==2.*", 42 | "pylint==2.*", 43 | "isort==5.*", 44 | ], 45 | "dev": [ 46 | "versioneer==0.*", 47 | "black>=20,<23", 48 | ], 49 | }, 50 | author="Peter Boyd", 51 | author_email="peter.g.boyd@gmail.com", 52 | classifiers=[ 53 | "Programming Language :: Python :: 3", 54 | "Programming Language :: Python :: 3 :: Only", 55 | "Programming Language :: Python :: 3.7", 56 | "Programming Language :: Python :: 3.8", 57 | "Development Status :: 4 - Beta", 58 | "Intended Audience :: Science/Research", 59 | "License :: OSI Approved :: Apache Software License", 60 | "Operating System :: OS Independent", 61 | "Topic :: Scientific/Engineering", 62 | "Topic :: Scientific/Engineering :: Physics", 63 | "Topic :: Scientific/Engineering :: Chemistry", 64 | "Topic :: Software Development :: Libraries :: Python Modules", 65 | "Topic :: Scientific/Engineering :: Artificial Intelligence", 66 | ], 67 | ext_modules=[ 68 | Extension( 69 | "_nloptimize", 70 | include_dirs=include_dirs + get_numpy_include_dirs(), 71 | sources=[os.path.join(os.getcwd(), "tobascco", "src", "pyoptim.cpp")], 72 | language="c++", 73 | libraries=["nlopt"], 74 | extra_link_args=["-O"], 75 | ) 76 | ], 77 | ) 78 | -------------------------------------------------------------------------------- /tobascco/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | from ._version import get_versions 4 | 5 | __version__ = get_versions()["version"] 6 | del get_versions 7 | -------------------------------------------------------------------------------- /tobascco/_version.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # This file helps to compute a version number in source trees obtained from 4 | # git-archive tarball (such as those provided by githubs download-from-tag 5 | # feature). Distribution tarballs (built by setup.py sdist) and build 6 | # directories (produced by setup.py build) will contain a much shorter file 7 | # that just contains the computed version number. 8 | 9 | # This file is released into the public domain. Generated by 10 | # versioneer-0.19 (https://github.com/python-versioneer/python-versioneer) 11 | 12 | """Git implementation of _version.py.""" 13 | 14 | import errno 15 | import os 16 | import re 17 | import subprocess 18 | import sys 19 | 20 | 21 | def get_keywords(): 22 | """Get the keywords needed to look up the version information.""" 23 | # these strings will be replaced by git during git-archive. 24 | # setup.py/versioneer.py will grep for the variable names, so they must 25 | # each be defined on a line of their own. _version.py will just call 26 | # get_keywords(). 27 | git_refnames = " (HEAD -> master)" 28 | git_full = "76bceb151747a49ffacd88a372a7e869c208f79b" 29 | git_date = "2022-02-05 19:02:00 -0500" 30 | keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} 31 | return keywords 32 | 33 | 34 | class VersioneerConfig: 35 | """Container for Versioneer configuration parameters.""" 36 | 37 | 38 | def get_config(): 39 | """Create, populate and return the VersioneerConfig() object.""" 40 | # these strings are filled in when 'setup.py versioneer' creates 41 | # _version.py 42 | cfg = VersioneerConfig() 43 | cfg.VCS = "git" 44 | cfg.style = "pep440" 45 | cfg.tag_prefix = "" 46 | cfg.parentdir_prefix = "" 47 | cfg.versionfile_source = "tobascco/_version.py" 48 | cfg.verbose = False 49 | return cfg 50 | 51 | 52 | class NotThisMethod(Exception): 53 | """Exception raised if a method is not valid for the current scenario.""" 54 | 55 | 56 | LONG_VERSION_PY = {} 57 | HANDLERS = {} 58 | 59 | 60 | def register_vcs_handler(vcs, method): # decorator 61 | """Create decorator to mark a method as the handler of a VCS.""" 62 | def decorate(f): 63 | """Store f in HANDLERS[vcs][method].""" 64 | if vcs not in HANDLERS: 65 | HANDLERS[vcs] = {} 66 | HANDLERS[vcs][method] = f 67 | return f 68 | return decorate 69 | 70 | 71 | def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, 72 | env=None): 73 | """Call the given command(s).""" 74 | assert isinstance(commands, list) 75 | p = None 76 | for c in commands: 77 | try: 78 | dispcmd = str([c] + args) 79 | # remember shell=False, so use git.cmd on windows, not just git 80 | p = subprocess.Popen([c] + args, cwd=cwd, env=env, 81 | stdout=subprocess.PIPE, 82 | stderr=(subprocess.PIPE if hide_stderr 83 | else None)) 84 | break 85 | except EnvironmentError: 86 | e = sys.exc_info()[1] 87 | if e.errno == errno.ENOENT: 88 | continue 89 | if verbose: 90 | print("unable to run %s" % dispcmd) 91 | print(e) 92 | return None, None 93 | else: 94 | if verbose: 95 | print("unable to find command, tried %s" % (commands,)) 96 | return None, None 97 | stdout = p.communicate()[0].strip().decode() 98 | if p.returncode != 0: 99 | if verbose: 100 | print("unable to run %s (error)" % dispcmd) 101 | print("stdout was %s" % stdout) 102 | return None, p.returncode 103 | return stdout, p.returncode 104 | 105 | 106 | def versions_from_parentdir(parentdir_prefix, root, verbose): 107 | """Try to determine the version from the parent directory name. 108 | 109 | Source tarballs conventionally unpack into a directory that includes both 110 | the project name and a version string. We will also support searching up 111 | two directory levels for an appropriately named parent directory 112 | """ 113 | rootdirs = [] 114 | 115 | for i in range(3): 116 | dirname = os.path.basename(root) 117 | if dirname.startswith(parentdir_prefix): 118 | return {"version": dirname[len(parentdir_prefix):], 119 | "full-revisionid": None, 120 | "dirty": False, "error": None, "date": None} 121 | else: 122 | rootdirs.append(root) 123 | root = os.path.dirname(root) # up a level 124 | 125 | if verbose: 126 | print("Tried directories %s but none started with prefix %s" % 127 | (str(rootdirs), parentdir_prefix)) 128 | raise NotThisMethod("rootdir doesn't start with parentdir_prefix") 129 | 130 | 131 | @register_vcs_handler("git", "get_keywords") 132 | def git_get_keywords(versionfile_abs): 133 | """Extract version information from the given file.""" 134 | # the code embedded in _version.py can just fetch the value of these 135 | # keywords. When used from setup.py, we don't want to import _version.py, 136 | # so we do it with a regexp instead. This function is not used from 137 | # _version.py. 138 | keywords = {} 139 | try: 140 | f = open(versionfile_abs, "r") 141 | for line in f.readlines(): 142 | if line.strip().startswith("git_refnames ="): 143 | mo = re.search(r'=\s*"(.*)"', line) 144 | if mo: 145 | keywords["refnames"] = mo.group(1) 146 | if line.strip().startswith("git_full ="): 147 | mo = re.search(r'=\s*"(.*)"', line) 148 | if mo: 149 | keywords["full"] = mo.group(1) 150 | if line.strip().startswith("git_date ="): 151 | mo = re.search(r'=\s*"(.*)"', line) 152 | if mo: 153 | keywords["date"] = mo.group(1) 154 | f.close() 155 | except EnvironmentError: 156 | pass 157 | return keywords 158 | 159 | 160 | @register_vcs_handler("git", "keywords") 161 | def git_versions_from_keywords(keywords, tag_prefix, verbose): 162 | """Get version information from git keywords.""" 163 | if not keywords: 164 | raise NotThisMethod("no keywords at all, weird") 165 | date = keywords.get("date") 166 | if date is not None: 167 | # Use only the last line. Previous lines may contain GPG signature 168 | # information. 169 | date = date.splitlines()[-1] 170 | 171 | # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant 172 | # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 173 | # -like" string, which we must then edit to make compliant), because 174 | # it's been around since git-1.5.3, and it's too difficult to 175 | # discover which version we're using, or to work around using an 176 | # older one. 177 | date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 178 | refnames = keywords["refnames"].strip() 179 | if refnames.startswith("$Format"): 180 | if verbose: 181 | print("keywords are unexpanded, not using") 182 | raise NotThisMethod("unexpanded keywords, not a git-archive tarball") 183 | refs = set([r.strip() for r in refnames.strip("()").split(",")]) 184 | # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of 185 | # just "foo-1.0". If we see a "tag: " prefix, prefer those. 186 | TAG = "tag: " 187 | tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) 188 | if not tags: 189 | # Either we're using git < 1.8.3, or there really are no tags. We use 190 | # a heuristic: assume all version tags have a digit. The old git %d 191 | # expansion behaves like git log --decorate=short and strips out the 192 | # refs/heads/ and refs/tags/ prefixes that would let us distinguish 193 | # between branches and tags. By ignoring refnames without digits, we 194 | # filter out many common branch names like "release" and 195 | # "stabilization", as well as "HEAD" and "master". 196 | tags = set([r for r in refs if re.search(r'\d', r)]) 197 | if verbose: 198 | print("discarding '%s', no digits" % ",".join(refs - tags)) 199 | if verbose: 200 | print("likely tags: %s" % ",".join(sorted(tags))) 201 | for ref in sorted(tags): 202 | # sorting will prefer e.g. "2.0" over "2.0rc1" 203 | if ref.startswith(tag_prefix): 204 | r = ref[len(tag_prefix):] 205 | if verbose: 206 | print("picking %s" % r) 207 | return {"version": r, 208 | "full-revisionid": keywords["full"].strip(), 209 | "dirty": False, "error": None, 210 | "date": date} 211 | # no suitable tags, so version is "0+unknown", but full hex is still there 212 | if verbose: 213 | print("no suitable tags, using unknown + full revision id") 214 | return {"version": "0+unknown", 215 | "full-revisionid": keywords["full"].strip(), 216 | "dirty": False, "error": "no suitable tags", "date": None} 217 | 218 | 219 | @register_vcs_handler("git", "pieces_from_vcs") 220 | def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): 221 | """Get version from 'git describe' in the root of the source tree. 222 | 223 | This only gets called if the git-archive 'subst' keywords were *not* 224 | expanded, and _version.py hasn't already been rewritten with a short 225 | version string, meaning we're inside a checked out source tree. 226 | """ 227 | GITS = ["git"] 228 | if sys.platform == "win32": 229 | GITS = ["git.cmd", "git.exe"] 230 | 231 | out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, 232 | hide_stderr=True) 233 | if rc != 0: 234 | if verbose: 235 | print("Directory %s not under git control" % root) 236 | raise NotThisMethod("'git rev-parse --git-dir' returned error") 237 | 238 | # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] 239 | # if there isn't one, this yields HEX[-dirty] (no NUM) 240 | describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", 241 | "--always", "--long", 242 | "--match", "%s*" % tag_prefix], 243 | cwd=root) 244 | # --long was added in git-1.5.5 245 | if describe_out is None: 246 | raise NotThisMethod("'git describe' failed") 247 | describe_out = describe_out.strip() 248 | full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) 249 | if full_out is None: 250 | raise NotThisMethod("'git rev-parse' failed") 251 | full_out = full_out.strip() 252 | 253 | pieces = {} 254 | pieces["long"] = full_out 255 | pieces["short"] = full_out[:7] # maybe improved later 256 | pieces["error"] = None 257 | 258 | # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] 259 | # TAG might have hyphens. 260 | git_describe = describe_out 261 | 262 | # look for -dirty suffix 263 | dirty = git_describe.endswith("-dirty") 264 | pieces["dirty"] = dirty 265 | if dirty: 266 | git_describe = git_describe[:git_describe.rindex("-dirty")] 267 | 268 | # now we have TAG-NUM-gHEX or HEX 269 | 270 | if "-" in git_describe: 271 | # TAG-NUM-gHEX 272 | mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) 273 | if not mo: 274 | # unparseable. Maybe git-describe is misbehaving? 275 | pieces["error"] = ("unable to parse git-describe output: '%s'" 276 | % describe_out) 277 | return pieces 278 | 279 | # tag 280 | full_tag = mo.group(1) 281 | if not full_tag.startswith(tag_prefix): 282 | if verbose: 283 | fmt = "tag '%s' doesn't start with prefix '%s'" 284 | print(fmt % (full_tag, tag_prefix)) 285 | pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" 286 | % (full_tag, tag_prefix)) 287 | return pieces 288 | pieces["closest-tag"] = full_tag[len(tag_prefix):] 289 | 290 | # distance: number of commits since tag 291 | pieces["distance"] = int(mo.group(2)) 292 | 293 | # commit: short hex revision ID 294 | pieces["short"] = mo.group(3) 295 | 296 | else: 297 | # HEX: no tags 298 | pieces["closest-tag"] = None 299 | count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], 300 | cwd=root) 301 | pieces["distance"] = int(count_out) # total number of commits 302 | 303 | # commit date: see ISO-8601 comment in git_versions_from_keywords() 304 | date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], 305 | cwd=root)[0].strip() 306 | # Use only the last line. Previous lines may contain GPG signature 307 | # information. 308 | date = date.splitlines()[-1] 309 | pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) 310 | 311 | return pieces 312 | 313 | 314 | def plus_or_dot(pieces): 315 | """Return a + if we don't already have one, else return a .""" 316 | if "+" in pieces.get("closest-tag", ""): 317 | return "." 318 | return "+" 319 | 320 | 321 | def render_pep440(pieces): 322 | """Build up version string, with post-release "local version identifier". 323 | 324 | Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you 325 | get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty 326 | 327 | Exceptions: 328 | 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] 329 | """ 330 | if pieces["closest-tag"]: 331 | rendered = pieces["closest-tag"] 332 | if pieces["distance"] or pieces["dirty"]: 333 | rendered += plus_or_dot(pieces) 334 | rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) 335 | if pieces["dirty"]: 336 | rendered += ".dirty" 337 | else: 338 | # exception #1 339 | rendered = "0+untagged.%d.g%s" % (pieces["distance"], 340 | pieces["short"]) 341 | if pieces["dirty"]: 342 | rendered += ".dirty" 343 | return rendered 344 | 345 | 346 | def render_pep440_pre(pieces): 347 | """TAG[.post0.devDISTANCE] -- No -dirty. 348 | 349 | Exceptions: 350 | 1: no tags. 0.post0.devDISTANCE 351 | """ 352 | if pieces["closest-tag"]: 353 | rendered = pieces["closest-tag"] 354 | if pieces["distance"]: 355 | rendered += ".post0.dev%d" % pieces["distance"] 356 | else: 357 | # exception #1 358 | rendered = "0.post0.dev%d" % pieces["distance"] 359 | return rendered 360 | 361 | 362 | def render_pep440_post(pieces): 363 | """TAG[.postDISTANCE[.dev0]+gHEX] . 364 | 365 | The ".dev0" means dirty. Note that .dev0 sorts backwards 366 | (a dirty tree will appear "older" than the corresponding clean one), 367 | but you shouldn't be releasing software with -dirty anyways. 368 | 369 | Exceptions: 370 | 1: no tags. 0.postDISTANCE[.dev0] 371 | """ 372 | if pieces["closest-tag"]: 373 | rendered = pieces["closest-tag"] 374 | if pieces["distance"] or pieces["dirty"]: 375 | rendered += ".post%d" % pieces["distance"] 376 | if pieces["dirty"]: 377 | rendered += ".dev0" 378 | rendered += plus_or_dot(pieces) 379 | rendered += "g%s" % pieces["short"] 380 | else: 381 | # exception #1 382 | rendered = "0.post%d" % pieces["distance"] 383 | if pieces["dirty"]: 384 | rendered += ".dev0" 385 | rendered += "+g%s" % pieces["short"] 386 | return rendered 387 | 388 | 389 | def render_pep440_old(pieces): 390 | """TAG[.postDISTANCE[.dev0]] . 391 | 392 | The ".dev0" means dirty. 393 | 394 | Exceptions: 395 | 1: no tags. 0.postDISTANCE[.dev0] 396 | """ 397 | if pieces["closest-tag"]: 398 | rendered = pieces["closest-tag"] 399 | if pieces["distance"] or pieces["dirty"]: 400 | rendered += ".post%d" % pieces["distance"] 401 | if pieces["dirty"]: 402 | rendered += ".dev0" 403 | else: 404 | # exception #1 405 | rendered = "0.post%d" % pieces["distance"] 406 | if pieces["dirty"]: 407 | rendered += ".dev0" 408 | return rendered 409 | 410 | 411 | def render_git_describe(pieces): 412 | """TAG[-DISTANCE-gHEX][-dirty]. 413 | 414 | Like 'git describe --tags --dirty --always'. 415 | 416 | Exceptions: 417 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 418 | """ 419 | if pieces["closest-tag"]: 420 | rendered = pieces["closest-tag"] 421 | if pieces["distance"]: 422 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) 423 | else: 424 | # exception #1 425 | rendered = pieces["short"] 426 | if pieces["dirty"]: 427 | rendered += "-dirty" 428 | return rendered 429 | 430 | 431 | def render_git_describe_long(pieces): 432 | """TAG-DISTANCE-gHEX[-dirty]. 433 | 434 | Like 'git describe --tags --dirty --always -long'. 435 | The distance/hash is unconditional. 436 | 437 | Exceptions: 438 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) 439 | """ 440 | if pieces["closest-tag"]: 441 | rendered = pieces["closest-tag"] 442 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) 443 | else: 444 | # exception #1 445 | rendered = pieces["short"] 446 | if pieces["dirty"]: 447 | rendered += "-dirty" 448 | return rendered 449 | 450 | 451 | def render(pieces, style): 452 | """Render the given version pieces into the requested style.""" 453 | if pieces["error"]: 454 | return {"version": "unknown", 455 | "full-revisionid": pieces.get("long"), 456 | "dirty": None, 457 | "error": pieces["error"], 458 | "date": None} 459 | 460 | if not style or style == "default": 461 | style = "pep440" # the default 462 | 463 | if style == "pep440": 464 | rendered = render_pep440(pieces) 465 | elif style == "pep440-pre": 466 | rendered = render_pep440_pre(pieces) 467 | elif style == "pep440-post": 468 | rendered = render_pep440_post(pieces) 469 | elif style == "pep440-old": 470 | rendered = render_pep440_old(pieces) 471 | elif style == "git-describe": 472 | rendered = render_git_describe(pieces) 473 | elif style == "git-describe-long": 474 | rendered = render_git_describe_long(pieces) 475 | else: 476 | raise ValueError("unknown style '%s'" % style) 477 | 478 | return {"version": rendered, "full-revisionid": pieces["long"], 479 | "dirty": pieces["dirty"], "error": None, 480 | "date": pieces.get("date")} 481 | 482 | 483 | def get_versions(): 484 | """Get version information or return default if unable to do so.""" 485 | # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have 486 | # __file__, we can work backwards from there to the root. Some 487 | # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which 488 | # case we can only use expanded keywords. 489 | 490 | cfg = get_config() 491 | verbose = cfg.verbose 492 | 493 | try: 494 | return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, 495 | verbose) 496 | except NotThisMethod: 497 | pass 498 | 499 | try: 500 | root = os.path.realpath(__file__) 501 | # versionfile_source is the relative path from the top of the source 502 | # tree (where the .git directory might live) to this file. Invert 503 | # this to find the root from __file__. 504 | for i in cfg.versionfile_source.split('/'): 505 | root = os.path.dirname(root) 506 | except NameError: 507 | return {"version": "0+unknown", "full-revisionid": None, 508 | "dirty": None, 509 | "error": "unable to find root of source tree", 510 | "date": None} 511 | 512 | try: 513 | pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) 514 | return render(pieces, cfg.style) 515 | except NotThisMethod: 516 | pass 517 | 518 | try: 519 | if cfg.parentdir_prefix: 520 | return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) 521 | except NotThisMethod: 522 | pass 523 | 524 | return {"version": "0+unknown", "full-revisionid": None, 525 | "dirty": None, 526 | "error": "unable to compute version", "date": None} 527 | -------------------------------------------------------------------------------- /tobascco/atoms.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import numpy as np 3 | 4 | from .element_properties import WEIGHT 5 | 6 | __all__ = ["Atom"] 7 | 8 | 9 | class Atom(object): 10 | """Basic atom class for the generation of structures.""" 11 | 12 | def __init__(self, element=None): 13 | null = np.array([0.0, 0.0, 0.0, 1.0]) 14 | self.element = element 15 | # atom index 16 | self.index = 0 17 | # the index of the SBU this atom is associated with. 18 | self.sbu_index = None 19 | # order of the SBU in which it is placed in the unit cell. 20 | self.sbu_order = None 21 | self.sbu_metal = False 22 | # is this an atom that connects to another SBU? 23 | self.sbu_bridge = [] 24 | self.force_field_type = None 25 | self.coordinates = null.copy() 26 | self.neighbours = [] 27 | 28 | def scaled_pos(self, inv_cell): 29 | return np.dot(self.coordinates[:3], inv_cell) 30 | 31 | def in_cell_scaled(self, inv_cell): 32 | return np.array([i % 1 for i in self.scaled_pos(inv_cell)]) 33 | 34 | def in_cell(self, cell, inv_cell): 35 | return np.dot(self.in_cell_scaled(inv_cell), cell) 36 | 37 | @property 38 | def mass(self): 39 | return WEIGHT[self.element] 40 | 41 | def from_config_ff(self, line): 42 | """Parse data from old config file format""" 43 | line = line.strip().split() 44 | self.element = line[0] 45 | self.force_field_type = line[1] 46 | for i, c in enumerate(line[2:]): 47 | self.coordinates[i] = float(c) 48 | 49 | def from_config(self, line): 50 | """New config file format, just element, x, y, z""" 51 | line = line.strip().split() 52 | self.element = line[0] 53 | for i, c in enumerate(line[1:]): 54 | self.coordinates[i] = float(c) 55 | 56 | def rotate(self, R): 57 | self.coordinates[:3] = np.dot(R[:3, :3], self.coordinates[:3]) 58 | 59 | def translate(self, vector): 60 | self.coordinates[:3] += vector 61 | 62 | @property 63 | def x(self): 64 | return self.coordinates[0] 65 | 66 | @property 67 | def y(self): 68 | return self.coordinates[1] 69 | 70 | @property 71 | def z(self): 72 | return self.coordinates[2] 73 | -------------------------------------------------------------------------------- /tobascco/cifer.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from datetime import date 3 | 4 | __all__ = ["CIF"] 5 | 6 | 7 | class CIF(object): 8 | def __init__(self, name="structure", file=None): 9 | self.name = name 10 | self._data = {} 11 | self._headings = {} 12 | self._element_labels = {} 13 | self.non_loops = ["data", "cell", "sym", "end"] 14 | self.block_order = ["data", "sym", "sym_loop", "cell", "atoms", "bonds"] 15 | if file is not None: 16 | self._readfile(file) 17 | 18 | def _readfile(self, filename): 19 | filestream = open(filename, "r") 20 | filelines = filestream.readlines() 21 | blocks = [] 22 | loopcount = 0 23 | loopentries = {} 24 | loopread = False 25 | blockread = False 26 | self.block_order = [] 27 | 28 | for line in filelines: 29 | line = line.replace("\n", "") 30 | if line.startswith("data_"): 31 | self.name = line[5:] 32 | self.insert_block_order("data") 33 | self.add_data("data", data_=self.name) 34 | 35 | if loopread and line.startswith("_"): 36 | loopentries[loopcount].append(line) 37 | 38 | elif loopread and not line.startswith("_"): 39 | loopread = False 40 | blockread = True 41 | 42 | elif not loopread and line.startswith("_"): 43 | block = self.get_non_loop_block(line) 44 | self.insert_block_order(block) 45 | # hopefully all non-loop entries are just single value entries, 46 | # otherwise this is invalid. 47 | try: 48 | key, val = line.split() 49 | except ValueError: 50 | key, val = line.split()[:2] 51 | if val.endswith("(0)"): 52 | val = val[:-3] 53 | self.add_data(block, **{key: self.general_label(val)}) 54 | 55 | if blockread and ( 56 | line.startswith("loop_") or line.startswith("_") or not line 57 | ): 58 | blockread = False 59 | 60 | if line == "loop_": 61 | loopcount += 1 62 | loopentries[loopcount] = [] 63 | loopread = True 64 | blockread = False 65 | self.insert_block_order(loopcount) 66 | 67 | if blockread: 68 | split_line = line.split() 69 | assert len(loopentries[loopcount]) == len(split_line) 70 | for key, val in zip(loopentries[loopcount], split_line): 71 | self.add_data(loopcount, **{key: self.general_label(val)}) 72 | 73 | filestream.close() 74 | 75 | def get_time(self): 76 | t = date.today() 77 | return t.strftime("%A %d %B %Y") 78 | 79 | def insert_block_order(self, name, index=None): 80 | """Adds a block to the cif file in a specified order, unless index is specified, 81 | will not override existing order""" 82 | if index is None and name in self.block_order: 83 | return 84 | elif index is None and name not in self.block_order: 85 | index = len(self.block_order) 86 | elif ( 87 | index is not None 88 | and name in self.block_order 89 | and index < len(self.block_order) 90 | ): 91 | old = self.block_order.index(name) 92 | self.block_order.pop(old) 93 | elif ( 94 | index is not None 95 | and name in self.block_order 96 | and index >= len(self.block_order) 97 | ): 98 | old = self.block_order.index(name) 99 | self.block_order.pop(old) 100 | index = len(self.block_order) 101 | self.block_order = self.block_order[:index] + [name] + self.block_order[index:] 102 | 103 | def add_data(self, block, **kwargs): 104 | self._headings.setdefault(block, []) 105 | for key, val in kwargs.items(): 106 | try: 107 | self._data[key].append(val) 108 | except KeyError: 109 | self._headings[block].append(key) 110 | if block in self.non_loops: 111 | self._data[key] = val 112 | else: 113 | self._data[key] = [val] 114 | 115 | def get_element_label(self, el): 116 | self._element_labels.setdefault(el, 0) 117 | self._element_labels[el] += 1 118 | return el + str(self._element_labels[el]) 119 | 120 | def __str__(self): 121 | line = "" 122 | for block in self.block_order: 123 | heads = self._headings[block] 124 | if block in self.non_loops: 125 | vals = zip( 126 | [CIF.label(i) for i in heads], [self._data[i] for i in heads] 127 | ) 128 | else: 129 | line += "loop_\n" + "\n".join([CIF.label(i) for i in heads]) + "\n" 130 | vals = zip(*[self._data[i] for i in heads]) 131 | for ll in vals: 132 | line += "".join(ll) + "\n" 133 | return line 134 | 135 | def get_non_loop_block(self, line): 136 | if line.startswith("_cell"): 137 | return "cell" 138 | elif line.startswith("_symmetry"): 139 | return "sym" 140 | elif line.startswith("_audit"): 141 | return "data" 142 | 143 | # terrible idea for formatting.. but oh well :) 144 | @staticmethod 145 | def atom_site_fract_x(x): 146 | return "%10.5f " % (x) 147 | 148 | @staticmethod 149 | def atom_site_fract_y(x): 150 | return "%10.5f " % (x) 151 | 152 | @staticmethod 153 | def atom_site_fract_z(x): 154 | return "%10.5f " % (x) 155 | 156 | @staticmethod 157 | def atom_site_label(x): 158 | return "%-7s " % (x) 159 | 160 | @staticmethod 161 | def atom_site_type_symbol(x): 162 | return "%-6s " % (x) 163 | 164 | @staticmethod 165 | def atom_site_description(x): 166 | return "%-5s " % (x) 167 | 168 | @staticmethod 169 | def geom_bond_atom_site_label_1(x): 170 | return "%-7s " % (x) 171 | 172 | @staticmethod 173 | def geom_bond_atom_site_label_2(x): 174 | return "%-7s " % (x) 175 | 176 | @staticmethod 177 | def geom_bond_distance(x): 178 | return "%7.3f " % (x) 179 | 180 | @staticmethod 181 | def geom_bond_site_symmetry_2(x): 182 | return "%-5s " % (x) 183 | 184 | @staticmethod 185 | def ccdc_geom_bond_type(x): 186 | return "%5s " % (x) 187 | 188 | @staticmethod 189 | def cell_length_a(x): 190 | return "%-7.4f " % (x) 191 | 192 | @staticmethod 193 | def cell_length_b(x): 194 | return "%-7.4f " % (x) 195 | 196 | @staticmethod 197 | def cell_length_c(x): 198 | return "%-7.4f " % (x) 199 | 200 | @staticmethod 201 | def cell_angle_alpha(x): 202 | return "%-7.4f " % (x) 203 | 204 | @staticmethod 205 | def cell_angle_beta(x): 206 | return "%-7.4f " % (x) 207 | 208 | @staticmethod 209 | def cell_angle_gamma(x): 210 | return "%-7.4f " % (x) 211 | 212 | @staticmethod 213 | def atom_site_fragment(x): 214 | return "%-4i " % (x) 215 | 216 | @staticmethod 217 | def atom_site_constraints(x): 218 | return "%-4i " % (x) 219 | 220 | @staticmethod 221 | def label(x): 222 | """special cases""" 223 | if x == "data_": 224 | return x 225 | elif x == "_symmetry_space_group_name_H_M": 226 | # replace H_M with H-M. 227 | x = x[:28] + "-" + x[29:] 228 | return "%-34s" % (x) 229 | 230 | @staticmethod 231 | def general_label(x): 232 | return "%s " % (x) 233 | -------------------------------------------------------------------------------- /tobascco/config.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import configparser 3 | import optparse 4 | import os 5 | import re 6 | import sys 7 | from ast import literal_eval 8 | from io import StringIO 9 | from logging import error 10 | from optparse import OptionParser 11 | from pathlib import Path 12 | 13 | from . import __version__ 14 | 15 | THIS_DIR = os.path.dirname(os.path.realpath(__file__)) 16 | ARC_DEFAULT = os.path.join( 17 | os.path.dirname(os.path.realpath(__file__)), "data", "arc", "rcsr_0.6.0.arc" 18 | ) 19 | SBU_DEFAULT = os.path.join( 20 | os.path.dirname(os.path.realpath(__file__)), 21 | "data", 22 | "sbu", 23 | ) 24 | 25 | __all__ = ["Options", "ARC_DEFAULT", "SBU_DEFAULT"] 26 | 27 | 28 | class Options(object): 29 | def __init__(self): 30 | 31 | self._command_options() 32 | self.job = configparser.SafeConfigParser() 33 | self.csv = None 34 | self._set_paths() 35 | self._load_defaults() 36 | self._load_job() 37 | self._set_attr() 38 | self.sbu_files = [SBU_DEFAULT] 39 | self.topology_files = [ARC_DEFAULT] 40 | 41 | def _set_paths(self): 42 | if __name__ != "__main__": 43 | self.script_dir = THIS_DIR 44 | else: 45 | self.script_dir = os.path.abspath(sys.path[0]) 46 | self.job_dir = os.getcwd() 47 | self.jobname = Path(self.input_file).stem 48 | # TODO: add command line argument to search here for database files 49 | self.dot_dir = os.path.join(os.path.expanduser("~"), ".sbus") 50 | 51 | def _command_options(self): 52 | """Load data from the command line.""" 53 | 54 | usage = "%prog [options] input_file" 55 | version = "%prog " + "%f" % (__version__) 56 | parser = OptionParser(usage=usage, version=version) 57 | group = optparse.OptionGroup(parser, "Verbosity Options") 58 | group.add_option( 59 | "-s", 60 | "--silent", 61 | action="store_true", 62 | dest="silent", 63 | help="Print nothing to the console.", 64 | ) 65 | group.add_option( 66 | "-q", 67 | "--quiet", 68 | action="store_true", 69 | dest="quiet", 70 | help="Print only warnings and errors.", 71 | ) 72 | group.add_option( 73 | "-v", 74 | "--verbose", 75 | action="store_true", 76 | dest="verbose", 77 | help="Print everything to the console.", 78 | ) 79 | parser.add_option_group(group) 80 | (self.cmd_options, local_args) = parser.parse_args() 81 | 82 | if not local_args: 83 | parser.print_help() 84 | sys.exit(1) 85 | elif len(local_args) != 1: 86 | error("Only one argument required, the input file") 87 | sys.exit(1) 88 | else: 89 | self.input_file = os.path.abspath(local_args[0]) 90 | 91 | def _load_defaults(self): 92 | default_path = os.path.join(self.script_dir, "defaults.ini") 93 | try: 94 | with open(default_path, "r") as handle: 95 | default = handle.read() 96 | if not "[defaults]" in default.lower(): 97 | default = "[defaults]\n" + default 98 | default = StringIO(default) 99 | except IOError: 100 | error("Error loading defaults.ini") 101 | default = StringIO("[defaults]\n") 102 | self.job.readfp(default) 103 | 104 | def _load_job(self): 105 | """Load data from the local job name.""" 106 | if self.input_file is not None: 107 | try: 108 | with open(self.input_file, "r") as handle: 109 | job = handle.read() 110 | if not "[job]" in job.lower(): 111 | job = "[job]\n" + job 112 | job = StringIO(job) 113 | except IOError: 114 | job = StringIO("[job]\n") 115 | else: 116 | job = StringIO("[job]\n") 117 | self.job.readfp(job) 118 | 119 | def _set_attr(self): 120 | """Sets attributes to the base class. default options are over-written 121 | by job-specific options. 122 | 123 | """ 124 | for key, value in self.job.items("defaults"): 125 | value = self.get_val("defaults", key) 126 | setattr(self, key, value) 127 | for key, value in self.job.items("job"): 128 | value = self.get_val("job", key) 129 | setattr(self, key, value) 130 | for key, value in self.cmd_options.__dict__.items(): 131 | setattr(self, key, value) 132 | 133 | def get_val(self, section, key): 134 | """Returns the proper type based on the key used.""" 135 | # known booleans 136 | booleans = [ 137 | "verbose", 138 | "quiet", 139 | "silent", 140 | "create_sbu_input_files", 141 | "calc_sbu_surface_area", 142 | "calc_max_sbu_span", 143 | "show_barycentric_net_only", 144 | "show_embedded_net", 145 | "get_run_info", 146 | "find_symmetric_h", 147 | "store_net", 148 | "use_builds", 149 | "save_builds", 150 | "count_edges_along_lattice_dirs", 151 | ] 152 | floats = [ 153 | "overlap_tolerance", 154 | "sbu_bond_length", 155 | "cell_vol_tolerance", 156 | "symmetry_precision", 157 | "opt_parameter_tol", 158 | "opt_function_tol", 159 | "third_dimension", 160 | ] 161 | integers = [ 162 | "organic_sbu_per_structure", 163 | "metal_sbu_per_structure", 164 | "max_structures", 165 | "max_edge_count", 166 | "min_edge_count", 167 | ] 168 | lists = [ 169 | "topologies", 170 | "sbu_files", 171 | "topology_files", 172 | "organic_sbus", 173 | "metal_sbus", 174 | "build_files", 175 | "ignore_topologies", 176 | ] 177 | tuple_of_tuples = ["sbu_combinations"] 178 | 179 | if key in booleans: 180 | try: 181 | val = self.job.getboolean(section, key) 182 | except ValueError: 183 | val = False 184 | # known integers 185 | elif key in integers: 186 | try: 187 | val = self.job.getint(section, key) 188 | except ValueError: 189 | val = 0 190 | # known floats 191 | elif key in floats: 192 | try: 193 | val = self.job.getfloat(section, key) 194 | except ValueError: 195 | val = 0.0 196 | except TypeError: 197 | val = None 198 | # known lists 199 | elif key in lists: 200 | p = re.compile("[,;\s]+") 201 | val = p.split(self.job.get(section, key)) 202 | try: 203 | val = [int(i) for i in val if i] 204 | except ValueError: 205 | val = [i for i in val if i] 206 | 207 | # tuple of tuples. 208 | elif key in tuple_of_tuples: 209 | val = ( 210 | literal_eval(self.job.get(section, key)) 211 | if self.job.get(section, key) 212 | else None 213 | ) 214 | # failsafe if only one tuple is presented, need to embed it. 215 | if val is not None: 216 | if isinstance(val[0], int) or isinstance(val[0], float): 217 | val = [val] 218 | else: 219 | val = self.job.get(section, key) 220 | return val 221 | 222 | def Terminate(errcode=None): 223 | if errcode is None: 224 | info("TopCryst terminated normally") 225 | else: 226 | warning("TopCryst terminated with errors!") 227 | sys.exit() 228 | -------------------------------------------------------------------------------- /tobascco/connectpoints.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import warnings 3 | 4 | import numpy as np 5 | 6 | __all__ = ["ConnectPoint"] 7 | 8 | 9 | class ConnectPoint(object): 10 | def __init__(self): 11 | """Origin describes the point of intersection of two parameters, 12 | z describes the vector pointing along the bond (parallel), 13 | y describes a vector perpendicular to z for alignment purposes. 14 | """ 15 | null = np.array([0.0, 0.0, 0.0, 1.0]) 16 | self.identifier = None 17 | self.origin = np.zeros(4) 18 | self.y = null.copy() 19 | self.z = null.copy() 20 | # flag to determine if the point has been attached 21 | self.connected = False 22 | # order which the SBU was placed in the Structure 23 | self.sbu_vertex = None 24 | self.bonded_cp_vertex = None 25 | self.constraint = None 26 | self.special = None 27 | self.symmetry = 1 28 | self.vertex_assign = None 29 | 30 | def set_sbu_vertex(self, val): 31 | assert self.sbu_vertex is None 32 | self.sbu_vertex = val 33 | 34 | def from_config(self, line): 35 | """ Obtain the connectivity information from the config .ini file.""" 36 | line = line.strip().split() 37 | self.identifier = int(line[0]) 38 | # obtain the coordinate information. 39 | self.origin[:3] = np.array([float(x) for x in line[1:4]]) 40 | try: 41 | self.z[:3] = np.array([float(x) for x in line[4:7]]) 42 | except ValueError: 43 | warnings.warn( 44 | "Improper formatting of input SBU file! cannot find the" 45 | + "connecting vector for bond %i." % (self.identifier) 46 | + "Catastrophic errors in the bonding will ensue!" 47 | ) 48 | try: 49 | self.y[:3] = np.array([float(x) for x in line[7:10]]) 50 | except ValueError: 51 | # Y not needed at the moment. 52 | pass 53 | if len(line) == 12: 54 | try: 55 | self.symmetry = int(line[10]) 56 | except ValueError: 57 | self.symmetry = 1 58 | try: 59 | self.special = int(line[11]) 60 | except ValueError: 61 | self.special = None 62 | self._normalize() 63 | 64 | def _normalize(self): 65 | """Normalize the y and z vectors""" 66 | self.z[:3] = self.z[:3] / np.linalg.norm(self.z[:3]) 67 | # self.y[:3] = self.y[:3]/np.linalg.norm(self.y[:3]) 68 | 69 | def rotate(self, R): 70 | self.origin = np.dot(R, self.origin) 71 | self.y[:3] = np.dot(R[:3, :3], self.y[:3]) 72 | self.z[:3] = np.dot(R[:3, :3], self.z[:3]) 73 | 74 | def translate(self, vector): 75 | self.origin[:3] += vector 76 | 77 | def __mul__(self, val): 78 | self.origin[:3] *= val 79 | self.z[:3] *= val 80 | -------------------------------------------------------------------------------- /tobascco/createinput.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import os 3 | from logging import info 4 | from sys import version_info 5 | 6 | from openbabel import openbabel as ob 7 | 8 | from .element_properties import ATOMIC_NUMBER 9 | 10 | __all__ = ["InputSBU"] 11 | 12 | 13 | def clean(name, ext): 14 | size = len(ext) + 1 15 | if name[-size:] == "." + ext: 16 | return name[:-size] 17 | return name 18 | 19 | 20 | class InputSBU(object): 21 | """Contains the necessary information to produce an input for 22 | Genstruct. This input file is a necessary step in case bonding 23 | flags or symmetry are incorrect.""" 24 | 25 | def __init__(self, filename, ext): 26 | self.data = { 27 | "name": "", 28 | "index": "", 29 | "metal": "", 30 | "topology": "", 31 | "parent": "", 32 | "atomic_info": "", 33 | "bond_table": "", 34 | "connectivity": "", 35 | "connect_flag": "", 36 | "connect_sym": "", 37 | } 38 | name = os.path.split(filename)[-1] 39 | self.name = clean(name, ext) 40 | self.update(name=self.name) 41 | # may be a source of error.. untested 42 | obConversion = ob.OBConversion() 43 | obConversion.SetInAndOutFormats(ext, "pdb") 44 | self.mol = ob.OBMol() 45 | if version_info.major >= 3: 46 | # self.mol = next(pybel.readfile(ext, filename)) 47 | obConversion.ReadFile(self.mol, filename) 48 | else: 49 | obConversion.ReadFile(self.mol, filename) 50 | # self.mol = pybel.readfile(ext, filename).next() 51 | self._reset_formal_charges() 52 | 53 | def get_index(self): 54 | ind = self.name[:] 55 | if "s" == ind[-1:]: 56 | ind = ind[:-1] 57 | if "m" == ind[-1:]: 58 | ind = ind[:-1] 59 | try: 60 | ind = int(ind.lstrip("index")) 61 | except ValueError: 62 | ind = 0 63 | self.update(index=str(ind)) 64 | 65 | def get_metal(self): 66 | if "m" in self.name[-2:]: 67 | self.update(metal="True") 68 | else: 69 | self.update(metal="False") 70 | 71 | def special(self): 72 | """If the mol file ends with an 's', this will interpret 73 | it as a child SBU, the parent will be the mol name before the 's'""" 74 | if "s" in self.name[-1:]: 75 | self.update(parent=self.name[:-1]) 76 | 77 | def set_topology(self, top): 78 | self.update(topology=top) 79 | 80 | def add_data(self, **kwargs): 81 | self.data.update(kwargs) 82 | 83 | def update(self, **kwargs): 84 | for key, val in kwargs.items(): 85 | self.data[key] += val 86 | 87 | def _reset_formal_charges(self): 88 | """Set all formal charges to zero, this is how special 89 | information will be passed to oBMol objects.""" 90 | for atom in ob.OBMolAtomIter(self.mol): 91 | atom.SetFormalCharge(0) 92 | 93 | def _remove_atoms(self, *args): 94 | for obatom in args: 95 | self.mol.DeleteAtom(obatom) 96 | 97 | def get_connect_info(self): 98 | """Grab all the atoms which are flagged by this program to be 99 | connectivity points. Namely, Xe, Y, and Rn. Ac series 100 | elements are replacement Xe atoms for special bonding purposes. 101 | """ 102 | special, remove = [], [] 103 | connect_index = 0 104 | for ind, atom in enumerate(ob.OBMolAtomIter(self.mol)): 105 | N = atom.GetAtomicNum() 106 | if N == 54 or (N >= 89 and N <= 102): 107 | connect_index += 1 108 | con_line = "%4i " % (connect_index) 109 | X = "%12.4f %8.4f %8.4f" % (atom.GetX(), atom.GetY(), atom.GetZ()) 110 | if N >= 89 and N <= 102: 111 | special.append((connect_index, N % 89 + 1)) 112 | net_vector, bond_vector = "", "" 113 | for neighbour in ob.OBAtomAtomIter(atom): 114 | x = neighbour.GetX() - atom.GetX() 115 | y = neighbour.GetY() - atom.GetY() 116 | z = neighbour.GetZ() - atom.GetZ() 117 | if neighbour.GetAtomicNum() == 39: 118 | net_atom = neighbour 119 | net_vector = "%12.4f %8.4f %8.4f" % (x, y, z) 120 | remove.append(net_atom) 121 | elif neighbour.GetAtomicNum() == 86: 122 | bond_atom = neighbour 123 | bond_vector = "%12.4f %8.4f %8.4f" % (x, y, z) 124 | remove.append(bond_atom) 125 | else: 126 | # TEMP if Rn does not exist 127 | bond_vector = "%12.4f %8.4f %8.4f" % (-x, -y, -z) 128 | neighbour.SetFormalCharge(connect_index) 129 | id = neighbour.GetIdx() 130 | con_line += "".join([X, bond_vector, net_vector, "\n"]) 131 | self.update(connectivity=con_line) 132 | remove.append(atom) 133 | 134 | self._remove_atoms(*remove) 135 | 136 | # include special considerations 137 | for (i, spec) in special: 138 | if spec == 2: 139 | bond_partner = 1 140 | elif spec == 1: 141 | bond_partner = 2 142 | else: 143 | bond_partner = 0 144 | const_line = "%5i%5i%5i\n" % (i, spec, bond_partner) 145 | self.update(connect_flag=const_line) 146 | 147 | def get_atom_info(self): 148 | for atom in ob.OBMolAtomIter(self.mol): 149 | N = atom.GetAtomicNum() 150 | element = ATOMIC_NUMBER[N] 151 | coordlines = "%4s %-6s %8.4f %8.4f %8.4f\n" % ( 152 | element, 153 | self._get_ff_type(atom), 154 | atom.GetX(), 155 | atom.GetY(), 156 | atom.GetZ(), 157 | ) 158 | self.update(atomic_info=coordlines) 159 | if atom.GetFormalCharge() != 0: 160 | conn_atom = str(atom.GetFormalCharge()) + "C" 161 | order = "S" # currently set to a single bond. 162 | tableline = "%4i%4s%4s\n" % (atom.GetIdx() - 1, conn_atom, order) 163 | self.update(bond_table=tableline) 164 | 165 | def get_bond_info(self): 166 | for bond in ob.OBMolBondIter(self.mol): 167 | start_idx = bond.GetBeginAtomIdx() 168 | end_idx = bond.GetEndAtomIdx() 169 | type = self.return_bondtype(bond) 170 | line = "%4i%4i%4s\n" % (start_idx - 1, end_idx - 1, type) 171 | self.update(bond_table=line) 172 | 173 | def return_bondtype(self, bond): 174 | start_atom = bond.GetBeginAtom() 175 | end_atom = bond.GetEndAtom() 176 | order = bond.GetBondOrder() 177 | # if bond.IsSingle(): 178 | if order == 1: 179 | return "S" 180 | # elif bond.IsDouble(): 181 | if order == 2: 182 | return "D" 183 | # elif bond.IsTriple(): 184 | if order == 3: 185 | return "T" 186 | elif bond.IsAromatic(): 187 | return "A" 188 | elif ( 189 | start_atom.GetType()[-1] == "R" 190 | and end_atom.GetType()[-1] == "R" 191 | and start_atom.ExplicitHydrogenCount() == 1 192 | and end_atom.ExplicitHydrogenCount() == 1 193 | ): 194 | return "A" 195 | elif bond.IsAmide(): 196 | return "Am" 197 | 198 | def set_uff(self): 199 | """Adds UFF atomtyping to the openbabel molecule description""" 200 | uff = ob.OBForceField_FindForceField("uff") 201 | uff.Setup(self.mol) 202 | uff.GetAtomTypes(self.mol) 203 | 204 | def _get_ff_type(self, pyatom): 205 | return pyatom.GetData("FFAtomType").GetValue() 206 | 207 | def __str__(self): 208 | line = "[%(name)s]\nindex = %(index)s\nmetal = %(metal)s\n" % (self.data) 209 | line += "topology = %(topology)s\n" % (self.data) 210 | if self.data["parent"]: 211 | line += "parent = %(parent)s\n" % (self.data) 212 | line += "atoms = \n%(atomic_info)stable = \n" % (self.data) 213 | line += "%(bond_table)sconnectivity = \n%(connectivity)s" % (self.data) 214 | if self.data["connect_flag"]: 215 | line += "connect_flag = \n%(connect_flag)s" % (self.data) 216 | if self.data["connect_sym"]: 217 | line += "connect_sym = \n%(connect_sym)s" % (self.data) 218 | return line 219 | 220 | 221 | class SBUFileRead(object): 222 | def __init__(self, options): 223 | self.options = options 224 | self.sbus = [] 225 | 226 | def read_sbu_files(self): 227 | files = [] 228 | ext_len = len(self.options.file_extension.strip()) 229 | if self.options.sbu_files: 230 | for sbuf in self.options.sbu_files: 231 | if sbuf[-ext_len:] == "." + self.options.file_extension: 232 | files.append(sbuf) 233 | else: 234 | if os.path.isdir(os.path.abspath(sbuf)): 235 | for j in os.listdir(os.path.abspath(sbuf)): 236 | if j[-ext_len:] == "." + self.options.file_extension: 237 | files.append(os.path.join(os.path.abspath(sbuf), j)) 238 | 239 | else: 240 | files = [ 241 | j 242 | for j in os.listdir(os.getcwd()) 243 | if j.endswith(self.options.file_extension) 244 | ] 245 | for f in files: 246 | info("Reading: %s" % (os.path.basename(f))) 247 | s = InputSBU(f, self.options.file_extension) 248 | s.get_index() 249 | s.get_metal() 250 | s.special() 251 | if self.options.topologies: 252 | s.set_topology(self.options.topologies[0]) 253 | else: 254 | s.set_topology("None") 255 | s.set_uff() 256 | s.get_connect_info() 257 | s.get_atom_info() 258 | s.get_bond_info() 259 | 260 | self.sbus.append(s) 261 | 262 | def sort_sbus(self): 263 | """Put metals first, then organics in order of their indices""" 264 | metals, organics = [], [] 265 | for sbu in self.sbus: 266 | sbu_ind = int(sbu.data["index"]) 267 | if sbu.data["metal"] == "True": 268 | metals.append((sbu_ind, sbu)) 269 | else: 270 | organics.append((sbu_ind, sbu)) 271 | 272 | self.sbus = [i[1] for i in sorted(metals)] + [i[1] for i in sorted(organics)] 273 | 274 | def write_file(self): 275 | filename = os.path.join(self.options.job_dir, self.options.jobname) + ".out" 276 | info("writing SBU file to %s" % (filename)) 277 | with open(filename, "w") as handle: 278 | for sbu in self.sbus: 279 | handle.writelines(str(sbu)) 280 | -------------------------------------------------------------------------------- /tobascco/csv.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import os 3 | 4 | __all__ = ["CSV"] 5 | 6 | 7 | class CSV(object): 8 | def __init__(self, name, _READ=False): 9 | self._data = {} 10 | self._headings = [] 11 | if not _READ: 12 | self.filename = self.get_filename(name) 13 | else: 14 | self.filename = name 15 | self.read() 16 | 17 | def get_filename(self, name): 18 | if name[-4:] == ".csv": 19 | base = name[:-4] 20 | else: 21 | base = name 22 | 23 | filename = base 24 | count = 0 25 | while os.path.isfile(filename + ".csv"): 26 | count += 1 27 | filename = base + ".%d" % count 28 | return filename + ".csv" 29 | 30 | def add_data(self, **kwargs): 31 | # head_dic = {} 32 | for key, val in kwargs.items(): 33 | # head_dic.setdefault(key, 0) 34 | # if key in head_dic.keys(): 35 | # head_dic[key] += 1 36 | # key = "%s.%i"%(key, head_dic[key]) 37 | if key in self._headings: 38 | self._data.setdefault(key, []).append(val) 39 | else: 40 | print("%s not in the headings! Ignoring data!" % (key)) 41 | 42 | @property 43 | def item_count(self): 44 | lengths = [] 45 | keyss = [] 46 | for key, val in self._data.items(): 47 | keyss.append(key) 48 | lengths.append(len(val)) 49 | # for i, j in zip(keyss, lengths): 50 | # print i,j 51 | assert all([x == lengths[0] for x in lengths]) 52 | if lengths: 53 | return lengths[0] 54 | return 0 55 | 56 | def set_headings(self, *args): 57 | head_dic = {} 58 | for head in self._headings: 59 | name = ".".join(head.split(".")[:-1]) 60 | head_dic.setdefault(name, 0) 61 | head_dic[name] += 1 62 | 63 | for arg in args: 64 | head_dic.setdefault(arg, 0) 65 | if arg in head_dic.keys(): 66 | head_dic[arg] += 1 67 | arg = "%s.%i" % (arg, head_dic[arg]) 68 | self._headings.append(arg) 69 | 70 | def write(self, filename=None): 71 | if filename is None: 72 | f = open(self.filename, "w") 73 | else: 74 | f = open(self.get_filename(self.filename), "w") 75 | # remove the tracking numbers for the final file writing. 76 | heads = [".".join(i.split(".")[:-1]) for i in self._headings] 77 | lines = "%s\n" % (",".join(heads)) 78 | for k in range(self.item_count): 79 | lines += "%s\n" % ( 80 | ",".join( 81 | [self.to_str(self._data[i][k]).strip() for i in self._headings] 82 | ) 83 | ) 84 | 85 | f.writelines(lines) 86 | f.close() 87 | 88 | def to_str(self, val): 89 | if isinstance(val, str): 90 | return val 91 | elif isinstance(val, int): 92 | return "%i" % val 93 | elif isinstance(val, float): 94 | return "%12.6f" % val 95 | elif isinstance(val, bool): 96 | return "%d" % val 97 | 98 | def read(self): 99 | with open(self.filename, "r") as f: 100 | for index, line in enumerate(f): 101 | line = line.strip() 102 | if index == 0: 103 | self.set_headings(*[j for j in line.split(",") if j]) 104 | else: 105 | self.add_data( 106 | **{ 107 | j: i 108 | for j, i in zip( 109 | self._headings, 110 | [k for k in line.split(",") if not k.startswith("#")], 111 | ) 112 | } 113 | ) 114 | 115 | def iter_key_vals(self): 116 | vals = [self._data[j] for j in self._headings] 117 | for k in zip(*vals): 118 | yield (zip(self._headings, k)) 119 | 120 | def keys(self): 121 | return self._data.keys() 122 | 123 | def vals(self): 124 | return self._data.vals() 125 | 126 | def items(self): 127 | return self._data.items() 128 | 129 | def __getitem__(self, key): 130 | try: 131 | return self._data[key] 132 | except KeyError: 133 | # print self._data 134 | print("Error no such key, %s, found in data" % key) 135 | return None 136 | 137 | def get_row(self, row): 138 | """Returns row of data ordered by heading sequence""" 139 | try: 140 | return [self._data[k][row] for k in self._headings] 141 | except KeyError: 142 | return None 143 | 144 | def mofname_dic(self): 145 | """return a dictionary where the MOFnames are the keys which contain a 146 | dictionary with the remaining headers specific to those MOFname values, 147 | only works if MOFname is a header.""" 148 | dic = {} 149 | try: 150 | # MOFname.1 becasue we add integers for redundant column headers. 151 | self._data["MOFname.1"] 152 | except KeyError: 153 | print("No MOFname key - returning an empty dictionary") 154 | return dic 155 | # remaining headers 156 | heads = [i for i in self._headings if i != "MOFname.1"] 157 | for i, name in enumerate(self._data["MOFname.1"]): 158 | name = self.clean(name) 159 | dic.setdefault(name, {}) 160 | for j in heads: 161 | try: 162 | dic[name][j] = self._data[j][i] 163 | except IndexError: 164 | print(i, len(self._data[j])) 165 | print("No data associated with %s, removing from object" % (name)) 166 | dic.pop(name) 167 | break 168 | return dic 169 | 170 | def clean(self, name): 171 | if name.startswith("./run_x"): 172 | name = name[10:] 173 | elif name.startswith("run_x"): 174 | name = name[8:] 175 | if name.endswith(".cif"): 176 | name = name[:-4] 177 | elif name.endswith(".niss"): 178 | name = name[:-5] 179 | elif name.endswith(".out-CO2.csv"): 180 | name = name[:-12] 181 | elif name.endswith("-CO2.csv"): 182 | name = name[:-8] 183 | elif name.endswith(".flog"): 184 | name = name[:-5] 185 | elif name.endswith(".out.cif"): 186 | name = name[:-8] 187 | elif name.endswith(".tar"): 188 | name = name[:-4] 189 | elif name.endswith(".db"): 190 | name = name[:-3] 191 | elif name.endswith(".faplog"): 192 | name = name[:-7] 193 | elif name.endswith(".db.bak"): 194 | name = name[:-7] 195 | elif name.endswith(".csv"): 196 | name = name[:-4] 197 | if name.endswith(".out"): 198 | name = name[:-4] 199 | return name 200 | 201 | def remove(self, index): 202 | if isinstance(index, list): 203 | for k in reversed(sorted(index)): 204 | for j in self._headings: 205 | self._data[j].pop(k) 206 | else: 207 | for j in self._headings: 208 | self._data[j].pop(index) 209 | 210 | @property 211 | def size(self): 212 | return len(self._data[self._headings[0]]) 213 | -------------------------------------------------------------------------------- /tobascco/defaults.ini: -------------------------------------------------------------------------------- 1 | # Defaults 2 | # flag for detecting symmetrically equivalent hydrogens in the MOFs 3 | find_symmetric_h = False 4 | # symmetry precision in angstroms 5 | symmetry_precision = 1.e-5 6 | # set to True, and the program will create SBU input files and then exit, no structures will be tried 7 | create_sbu_input_files = False 8 | # file extension for the sbu input files 9 | file_extension = mol 10 | # request only specific topologies to be built (blank builds all in the db) 11 | # doubles as the topology set for SBU input files 12 | topologies = 13 | # topologies to ignore. These will be removed from the topology queue, even if you 14 | # specifically requested them in the 'topologies' directive 15 | ignore_topologies = 16 | # number of metal SBUs tried per structure (NB: set to 1, might break otherwise) 17 | metal_sbu_per_structure = 1 18 | # number of organic SBUs tried per structure 19 | organic_sbu_per_structure = 1 20 | # Request only the organic SBUs with indices be built. Comma or space delimited 21 | organic_sbus = 22 | # Request only the metal SBUs with indices be built. Comma or space delimited 23 | metal_sbus = 24 | # Request only the following combinations be tried, overrides organic_sbus and metal_sbus 25 | # NOTE: currently only one metal is read as the first index, the rest organic. 26 | # tuples must be in the order: (metal index, organic index, organic..) 27 | # lists of tuples can be included. (1,2,3),(4,5,6) etc. 28 | sbu_combinations = 29 | # overlap tolerance is a scaling factor multiplied by the pairwise addition of 30 | # Van der Waals radii of each atom. This value is used to test bonded atom 31 | # distances as well. Currently set to 1 Angstrom * tol. 32 | overlap_tolerance = 0.4 33 | # set the maximum number of structures to build for a given combination of SBUs 34 | max_structures = 1 35 | # surface area calculation will compute the surface areas of all SBUs and report 36 | # to a .csv file. This will be conducted before any structure generation. 37 | calc_sbu_surface_area = False 38 | # calc the max distance between each SBU's connect points and report to a .csv file. 39 | calc_max_sbu_span = False 40 | # assign the bond length between SBUs when fitting the net 41 | sbu_bond_length = 1.5 42 | # verbosity options, can be set here or on the command line. Command line will override these 43 | # options 44 | # the volume of the resulting unit cell must not be lower than x * the cell lengths 45 | cell_vol_tolerance = 0.1 46 | # request a visualisation of the barycentric embedding of the nets, then exit. 47 | # this will ignore all SBU combinations and embedding attempts. 48 | show_barycentric_net_only = False 49 | # visualize the underlying embedded net of the constructed MOF. NB: this includes nodes 50 | # assigned to SBU connect points, so it may look messy 51 | show_embedded_net = False 52 | # print out a .csv file with all the run information in it 53 | get_run_info = False 54 | # max edge length of net to use to build MOFs. There is a polynomial time cost related to 55 | # building MOFs with N edges 56 | max_edge_count = 1000 57 | # min edge length of net to use to build MOFs. This can be kept at zero, but if more 58 | # expensive nets are calculated, this can be used to ignore the smaller nets. 59 | min_edge_count = 0 60 | # store a reduced representation of the net, keeping only the vertices of the SBUs 61 | # and not the connect points. Edges are between the SBU centre of masses 62 | store_net = False 63 | # global optimisation, which is run before the local. If left blank, no global optimisation 64 | # will occur. NOTE: I have never experienced success with any of these global optimisers! 65 | # options are: {direct, direct-l, crs2, mlsl, mlsl-lds, stogo, stogo-rand, isres, esch} 66 | 67 | # note, the mlsl, and mlsl-lds options require a local optimiser, the one specified by 68 | # local_optim_code will be used for this purpose. 69 | global_optimiser = 70 | # local optimisation function 71 | # options are: {cobyla, bobyqa, praxis, nelder-mead, sbplx, mma, ccsa, slsqp, lbfgs, newton, 72 | # newton-precond, netwon-precond-restart, newton-restart, var2, var1} 73 | 74 | # NOTE: I have had the most success with lbfgs. However the algorithm requires a gradient based 75 | # calculation to determine the proceeding steps. This is currently done with a finite difference 76 | # method which is subject to round-off errors, and can sometimes produce poor net optimisations. 77 | local_optimiser = lbfgs 78 | # factor for function minimisation tolerance. This value will determine when the 79 | # optimisation routine is converged based on the relative change of the input 80 | # variables (namely the cell parameter and the cocycle lattice) 81 | opt_parameter_tol = 1e-5 82 | # factor for function minimisation tolerance. This is a relative change value, 83 | # so the optimisation of the topology will be considered converged when the 84 | # objective function change is below this value 85 | opt_function_tol = 1e-10 86 | # use builds of a net if they exist, i.e. it's embedded structure, and the SBU assignments. 87 | # this makes it easy to just snap new SBUs with the same geometry to the appropriate 88 | # nodes with the same bonding informtaion. 89 | # WARNING: Will only work for SBUs with the same geometry and connectivity! 90 | use_builds = False 91 | # save builds writes the builds as a pickle file after job termination. 92 | # this can be opened when running topcryst again 93 | save_builds = False 94 | # file search for builds to load 95 | build_files = 96 | # third dimension only applies to 2D nets, this is the length of the c vector 97 | # when the cif file is made 98 | third_dimension = 10 99 | # count the number of edges along each lattice direction 100 | count_edges_along_lattice_dirs = False 101 | 102 | verbose = False 103 | quiet = False 104 | silent = True 105 | -------------------------------------------------------------------------------- /tobascco/element_properties.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | WEIGHT = { 3 | None: 0.00000, 4 | "H": 1.00794, 5 | "He": 4.002602, 6 | "Li": 6.941, 7 | "Be": 9.012182, 8 | "B": 10.811, 9 | "C": 12.0107, 10 | "N": 14.0067, 11 | "O": 15.9994, 12 | "F": 18.9984032, 13 | "Ne": 20.1797, 14 | "Na": 22.98976928, 15 | "Mg": 24.3050, 16 | "Al": 26.9815386, 17 | "Si": 28.0855, 18 | "P": 30.973762, 19 | "S": 32.065, 20 | "Cl": 35.453, 21 | "Ar": 39.948, 22 | "K": 39.0983, 23 | "Ca": 40.078, 24 | "Sc": 44.955912, 25 | "Ti": 47.867, 26 | "V": 50.9415, 27 | "Cr": 51.9961, 28 | "Mn": 54.938045, 29 | "Fe": 55.845, 30 | "Co": 58.933195, 31 | "Ni": 58.6934, 32 | "Cu": 63.546, 33 | "Zn": 65.38, 34 | "Ga": 69.723, 35 | "Ge": 72.64, 36 | "As": 74.92160, 37 | "Se": 78.96, 38 | "Br": 79.904, 39 | "Kr": 83.798, 40 | "Rb": 85.4678, 41 | "Sr": 87.62, 42 | "Y": 88.90585, 43 | "Zr": 91.224, 44 | "Nb": 92.90638, 45 | "Mo": 95.96, 46 | "Tc": 98, 47 | "Ru": 101.07, 48 | "Rh": 102.90550, 49 | "Pd": 106.42, 50 | "Ag": 107.8682, 51 | "Cd": 112.411, 52 | "In": 114.818, 53 | "Sn": 118.710, 54 | "Sb": 121.760, 55 | "Te": 127.60, 56 | "I": 126.90447, 57 | "Xe": 131.293, 58 | "Cs": 132.9054519, 59 | "Ba": 137.327, 60 | "La": 138.90547, 61 | "Ce": 140.116, 62 | "Pr": 140.90765, 63 | "Nd": 144.242, 64 | "Pm": 145, 65 | "Sm": 150.36, 66 | "Eu": 151.964, 67 | "Gd": 157.25, 68 | "Tb": 158.92535, 69 | "Dy": 162.500, 70 | "Ho": 164.93032, 71 | "Er": 167.259, 72 | "Tm": 168.93421, 73 | "Yb": 173.054, 74 | "Lu": 174.9668, 75 | "Hf": 178.49, 76 | "Ta": 180.94788, 77 | "W": 183.84, 78 | "Re": 186.207, 79 | "Os": 190.23, 80 | "Ir": 192.217, 81 | "Pt": 195.084, 82 | "Au": 196.966569, 83 | "Hg": 200.59, 84 | "Tl": 204.3833, 85 | "Pb": 207.2, 86 | "Bi": 208.98040, 87 | "Po": 209, 88 | "At": 210, 89 | "Rn": 222, 90 | "Fr": 223, 91 | "Ra": 226, 92 | "Ac": 227, 93 | "Th": 232.03806, 94 | "Pa": 231.03588, 95 | "U": 238.02891, 96 | "Np": 237, 97 | "Pu": 244, 98 | "Am": 243, 99 | "Cm": 247, 100 | "Bk": 247, 101 | "Cf": 251, 102 | "Es": 252, 103 | "Fm": 257, 104 | "Md": 258, 105 | "No": 259, 106 | "Lr": 262, 107 | "Rf": 265, 108 | "Db": 268, 109 | "Sg": 271, 110 | "Bh": 272, 111 | "Hs": 270, 112 | "Mt": 276, 113 | "Ds": 281, 114 | "Rg": 280, 115 | "Cn": 285, 116 | "Uut": 284, 117 | "Uuq": 289, 118 | "Uup": 288, 119 | "Uuh": 293, 120 | "Uuo": 294, 121 | "X": 0, 122 | "Y": 0, 123 | "Z": 0, 124 | "G": 0, 125 | } 126 | 127 | # If this is a nice list we can just .index or [slice] to get atomic numbers 128 | ATOMIC_NUMBER = [ 129 | "ZERO", 130 | "H", 131 | "He", 132 | "Li", 133 | "Be", 134 | "B", 135 | "C", 136 | "N", 137 | "O", 138 | "F", 139 | "Ne", 140 | "Na", 141 | "Mg", 142 | "Al", 143 | "Si", 144 | "P", 145 | "S", 146 | "Cl", 147 | "Ar", 148 | "K", 149 | "Ca", 150 | "Sc", 151 | "Ti", 152 | "V", 153 | "Cr", 154 | "Mn", 155 | "Fe", 156 | "Co", 157 | "Ni", 158 | "Cu", 159 | "Zn", 160 | "Ga", 161 | "Ge", 162 | "As", 163 | "Se", 164 | "Br", 165 | "Kr", 166 | "Rb", 167 | "Sr", 168 | "Y", 169 | "Zr", 170 | "Nb", 171 | "Mo", 172 | "Tc", 173 | "Ru", 174 | "Rh", 175 | "Pd", 176 | "Ag", 177 | "Cd", 178 | "In", 179 | "Sn", 180 | "Sb", 181 | "Te", 182 | "I", 183 | "Xe", 184 | "Cs", 185 | "Ba", 186 | "La", 187 | "Ce", 188 | "Pr", 189 | "Nd", 190 | "Pm", 191 | "Sm", 192 | "Eu", 193 | "Gd", 194 | "Tb", 195 | "Dy", 196 | "Ho", 197 | "Er", 198 | "Tm", 199 | "Yb", 200 | "Lu", 201 | "Hf", 202 | "Ta", 203 | "W", 204 | "Re", 205 | "Os", 206 | "Ir", 207 | "Pt", 208 | "Au", 209 | "Hg", 210 | "Tl", 211 | "Pb", 212 | "Bi", 213 | "Po", 214 | "At", 215 | "Rn", 216 | "Fr", 217 | "Ra", 218 | "Ac", 219 | "Th", 220 | "Pa", 221 | "U", 222 | "Np", 223 | "Pu", 224 | "Am", 225 | "Cm", 226 | "Bk", 227 | "Cf", 228 | "Es", 229 | "Fm", 230 | "Md", 231 | "No", 232 | "Lr", 233 | "Rf", 234 | "Db", 235 | "Sg", 236 | "Bh", 237 | "Hs", 238 | "Mt", 239 | "Ds", 240 | "Rg", 241 | "Cn", 242 | "Uut", 243 | "Uuq", 244 | "Uup", 245 | "Uuh", 246 | "Uuo", 247 | ] 248 | 249 | UFF = { 250 | "H": (2.5711, 0.0440), 251 | "He": (2.1043, 0.0560), 252 | "Li": (2.1836, 0.0250), 253 | "Be": (2.4455, 0.0850), 254 | "B": (3.6375, 0.1800), 255 | "C": (3.4309, 0.1050), 256 | "N": (3.2607, 0.0690), 257 | "O": (3.1181, 0.0600), 258 | "F": (2.9970, 0.0500), 259 | "Ne": (2.8892, 0.0420), 260 | "Na": (2.6576, 0.0300), 261 | "Mg": (2.6914, 0.1110), 262 | "Al": (4.0082, 0.5050), 263 | "Si": (3.8264, 0.4020), 264 | "P": (3.6946, 0.3050), 265 | "S": (3.5948, 0.2740), 266 | "Cl": (3.5164, 0.2270), 267 | "Ar": (3.4460, 0.1850), 268 | "K": (3.3961, 0.0350), 269 | "Ca": (3.0282, 0.2380), 270 | "Sc": (2.9355, 0.0190), 271 | "Ti": (2.8286, 0.0170), 272 | "V": (2.8010, 0.0160), 273 | "Cr": (2.6932, 0.0150), 274 | "Mn": (2.6380, 0.0130), 275 | "Fe": (2.5943, 0.0130), 276 | "Co": (2.5587, 0.0140), 277 | "Ni": (2.5248, 0.0150), 278 | "Cu": (3.1137, 0.0050), 279 | "Zn": (2.4616, 0.1240), 280 | "Ga": (3.9048, 0.4150), 281 | "Ge": (3.8130, 0.3790), 282 | "As": (3.7685, 0.3090), 283 | "Se": (3.7462, 0.2910), 284 | "Br": (3.7320, 0.2510), 285 | "Kr": (3.6892, 0.2200), 286 | "Rb": (3.6652, 0.0400), 287 | "Sr": (3.2438, 0.2350), 288 | "Y": (2.9801, 0.0720), 289 | "Zr": (2.7832, 0.0690), 290 | "Nb": (2.8197, 0.0590), 291 | "Mo": (2.7190, 0.0560), 292 | "Tc": (2.6709, 0.0480), 293 | "Ru": (2.6397, 0.0560), 294 | "Rh": (2.6094, 0.0530), 295 | "Pd": (2.5827, 0.0480), 296 | "Ag": (2.8045, 0.0360), 297 | "Cd": (2.5373, 0.2280), 298 | "In": (3.9761, 0.5990), 299 | "Sn": (3.9128, 0.5670), 300 | "Sb": (3.9378, 0.4490), 301 | "Te": (3.9823, 0.3980), 302 | "I": (4.0090, 0.3390), 303 | "Xe": (3.9235, 0.3320), 304 | "Cs": (4.0242, 0.0450), 305 | "Ba": (3.2990, 0.3640), 306 | "La": (3.1377, 0.0170), 307 | "Ce": (3.1680, 0.0130), 308 | "Pr": (3.2126, 0.0100), 309 | "Nd": (3.1850, 0.0100), 310 | "Pm": (3.1600, 0.0090), 311 | "Sm": (3.1360, 0.0080), 312 | "Eu": (3.1119, 0.0080), 313 | "Gd": (3.0005, 0.0090), 314 | "Tb": (3.0745, 0.0070), 315 | "Dy": (3.0540, 0.0070), 316 | "Ho": (3.0371, 0.0070), 317 | "Er": (3.0210, 0.0070), 318 | "Tm": (3.0059, 0.0060), 319 | "Yb": (2.9890, 0.2280), 320 | "Lu": (3.2429, 0.0410), 321 | "Hf": (2.7983, 0.0720), 322 | "Ta": (2.8241, 0.0810), 323 | "W": (2.7342, 0.0670), 324 | "Re": (2.6317, 0.0660), 325 | "Os": (2.7796, 0.0370), 326 | "Ir": (2.5302, 0.0730), 327 | "Pt": (2.4535, 0.0800), 328 | "Au": (2.9337, 0.0390), 329 | "Hg": (2.4099, 0.3850), 330 | "Tl": (3.8727, 0.6800), 331 | "Pb": (3.8282, 0.6630), 332 | "Bi": (3.8932, 0.5180), 333 | "Po": (4.1952, 0.3250), 334 | "At": (4.2318, 0.2840), 335 | "Rn": (4.2451, 0.2480), 336 | "Fr": (4.3654, 0.0500), 337 | "Ra": (3.2758, 0.4040), 338 | "Ac": (3.0985, 0.0330), 339 | "Th": (3.0255, 0.0260), 340 | "Pa": (3.0504, 0.0220), 341 | "U": (3.0246, 0.0220), 342 | "Np": (3.0504, 0.0190), 343 | "Pu": (3.0504, 0.0160), 344 | "Am": (3.0121, 0.0140), 345 | "Cm": (2.9631, 0.0130), 346 | "Bk": (2.9747, 0.0130), 347 | "Cf": (2.9515, 0.0130), 348 | "Es": (2.9391, 0.0120), 349 | "Fm": (2.9275, 0.0120), 350 | "Md": (2.9168, 0.0110), 351 | "No": (2.8936, 0.0110), 352 | "Lr": (2.8829, 0.0110), 353 | } 354 | 355 | Radii = { 356 | "H": 1.10, 357 | "He": 1.40, 358 | "Li": 1.81, 359 | "Be": 1.53, 360 | "B": 1.92, 361 | "C": 1.70, 362 | "N": 1.55, 363 | "O": 1.52, 364 | "F": 1.47, 365 | "Ne": 1.54, 366 | "Na": 2.27, 367 | "Mg": 1.73, 368 | "Al": 1.84, 369 | "Si": 2.10, 370 | "P": 1.80, 371 | "S": 1.80, 372 | "Cl": 1.75, 373 | "Ar": 1.88, 374 | "K": 2.75, 375 | "Ca": 2.31, 376 | "Ga": 1.87, 377 | "Ge": 2.11, 378 | "As": 1.85, 379 | "Se": 1.90, 380 | "Br": 1.83, 381 | "Kr": 2.02, 382 | "Rb": 3.03, 383 | "Sr": 2.49, 384 | "In": 1.93, 385 | "Sn": 2.17, 386 | "Sb": 2.06, 387 | "Te": 2.06, 388 | "I": 1.98, 389 | "Xe": 2.16, 390 | "Cs": 3.43, 391 | "Ba": 2.68, 392 | "Tl": 1.96, 393 | "Pb": 2.02, 394 | "Bi": 2.07, 395 | "Po": 1.97, 396 | "At": 2.02, 397 | "Rn": 2.20, 398 | "Fr": 3.48, 399 | "Ra": 2.83, 400 | # Transition metals. 4s means the radius of the 4s shell was used, 401 | # B means taken from Bondi DOI: 10.1246/bcsj.20100166 402 | "Sc": 2.08, # 4s 403 | "Ti": 1.99, # 4s 404 | "V": 1.91, # 4s 405 | "Cr": 1.92, # 4s 406 | "Mn": 1.77, # 4s 407 | "Fe": 1.71, # 4s 408 | "Co": 1.65, # 4s 409 | "Ni": 1.63, # B 410 | "Cu": 1.40, # B 411 | "Zn": 1.39, # B 412 | "Y": 2.23, # 5s 413 | "Zr": 2.12, # 5s 414 | "Nb": 2.03, # 5s 415 | "Mo": 1.95, # 5s 416 | "Tc": 1.89, # 5s 417 | "Ru": 1.89, # 5s 418 | "Rh": 1.86, # 5s 419 | "Pd": 1.63, # B 420 | "Ag": 1.72, # B 421 | "Cd": 1.58, # B 422 | } 423 | 424 | 425 | METALS = [ 426 | "Li", 427 | "Na", 428 | "K", 429 | "Rb", 430 | "Cs", 431 | "Be", 432 | "Mg", 433 | "Ca", 434 | "Sr", 435 | "Ba", 436 | "Sc", 437 | "Y", 438 | "Lu", 439 | "Ti", 440 | "Zr", 441 | "Hf", 442 | "V", 443 | "Nb", 444 | "Ta", 445 | "Cr", 446 | "Mo", 447 | "W", 448 | "Mn", 449 | "Tc", 450 | "Re", 451 | "Fe", 452 | "Ru", 453 | "Os", 454 | "Co", 455 | "Rh", 456 | "Ir", 457 | "Ni", 458 | "Pd", 459 | "Pt", 460 | "Cu", 461 | "Ag", 462 | "Au", 463 | "Zn", 464 | "Cd", 465 | "Hg", 466 | "Al", 467 | "Ga", 468 | "In", 469 | "Tl", 470 | "Sn", 471 | "Pb", 472 | "La", 473 | "Ce", 474 | "Pr", 475 | "Nd", 476 | "Pm", 477 | "Sm", 478 | "Eu", 479 | "Gd", 480 | "Tb", 481 | "Dy", 482 | "Ho", 483 | "Er", 484 | "Tm", 485 | "Yb", 486 | ] 487 | -------------------------------------------------------------------------------- /tobascco/generator.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import itertools 3 | 4 | from .sbu import SBU_list 5 | 6 | 7 | class Generate(object): 8 | """Takes as input a sequence of sbus, and returns 9 | build orders to make structures. 10 | 11 | """ 12 | 13 | def __init__(self, options, sbu_list): 14 | self.options = options 15 | self.sbus = SBU_list(sbu_list) 16 | 17 | def generate_sbu_combinations(self, incidence=None, N=None): 18 | if N is None: 19 | N = ( 20 | self.options.metal_sbu_per_structure 21 | + self.options.organic_sbu_per_structure 22 | ) 23 | for i in itertools.combinations_with_replacement(self.sbus.list, N): 24 | if self._valid_sbu_combination(incidence, i): 25 | yield tuple(i) 26 | 27 | def combinations_from_options(self): 28 | """Just return the tuples in turn.""" 29 | combs = [] 30 | Nmetals = self.options.metal_sbu_per_structure 31 | for combo in self.options.sbu_combinations: 32 | # first sbus have to be metals. 33 | met = [] 34 | for i in range(Nmetals): 35 | met.append(self.sbus.get(combo[i], _METAL=True)) 36 | combs.append(tuple(met + [self.sbus.get(i) for i in combo[Nmetals:]])) 37 | return combs 38 | 39 | def _valid_sbu_combination(self, incidence, sbu_set): 40 | """Currently only checks if there is the correct number of metal 41 | SBUs in the combination.""" 42 | if incidence is None: 43 | return ( 44 | len([i for i in sbu_set if i.is_metal]) 45 | == self.options.metal_sbu_per_structure 46 | ) 47 | else: 48 | if set(sorted([i.degree for i in sbu_set])) == set(sorted(incidence)): 49 | return ( 50 | len([i for i in sbu_set if i.is_metal]) 51 | == self.options.metal_sbu_per_structure 52 | ) 53 | else: 54 | return False 55 | 56 | def linear_in_combo(self, combo): 57 | for i in combo: 58 | for j in self.sbus.list: 59 | if j == i: 60 | if j.linear or j.two_connected: 61 | return True 62 | return False 63 | 64 | def yield_linear_org_sbu(self, combo): 65 | for i in self.sbus.list: 66 | if (i.linear or i.two_connected) and not i.is_metal: 67 | ret = list(combo) + [i] 68 | yield tuple(ret) 69 | 70 | @property 71 | def linear_sbus_exist(self): 72 | try: 73 | return self._linear_exist 74 | except AttributeError: 75 | self._linear_exist = False 76 | for i in self.sbus.list: 77 | # not necessarily linear, but 2-c SBUs are OK for this function 78 | if i.linear or i.two_connected: 79 | self._linear_exist = True 80 | break 81 | return self._linear_exist 82 | 83 | def _valid_bond_pair(self, set): 84 | """Determine if the two SBUs can be bonded. Currently set to 85 | flag true if the two sbus contain matching bond flags, otherwise 86 | if they are a (metal|organic) pair 87 | """ 88 | (sbu1, cp1), (sbu2, cp2) = set 89 | if all( 90 | [ 91 | i is None 92 | for i in [cp1.special, cp2.special, cp1.constraint, cp2.constraint] 93 | ] 94 | ): 95 | return sbu1.is_metal != sbu2.is_metal 96 | 97 | return (cp1.special == cp2.constraint) and (cp2.special == cp1.constraint) 98 | -------------------------------------------------------------------------------- /tobascco/glog.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | MPIsize = 0 3 | MPIrank = 0 4 | import copy 5 | import logging 6 | import os 7 | import sys 8 | import textwrap 9 | from logging import error, info, warning 10 | 11 | from . import config 12 | 13 | 14 | class Log(object): 15 | def __init__(self, options): 16 | self.options = options 17 | self._init_logging() 18 | 19 | def _init_logging(self): 20 | if self.options.silent: 21 | stdout_level = logging.CRITICAL 22 | file_level = logging.INFO 23 | elif self.options.quiet: 24 | stdout_level = logging.CRITICAL 25 | file_level = logging.INFO 26 | elif self.options.verbose: 27 | stdout_level = logging.DEBUG 28 | file_level = logging.DEBUG 29 | else: 30 | stdout_level = logging.INFO 31 | file_level = logging.INFO 32 | 33 | MPIstr = "" 34 | if MPIsize > 0: 35 | MPIstr = ".rank%i" % MPIrank 36 | logging.basicConfig( 37 | level=file_level, 38 | format="[%(asctime)s] %(levelname)s %(message)s", 39 | datefmt="%Y%m%d %H:%m:%S", 40 | filename=os.path.join( 41 | self.options.job_dir, self.options.jobname + MPIstr + ".log" 42 | ), 43 | filemode="a", 44 | ) 45 | logging.addLevelName(10, "--") 46 | logging.addLevelName(20, ">>") 47 | logging.addLevelName(30, "**") 48 | logging.addLevelName(40, "!!") 49 | logging.addLevelName(50, "XX") 50 | 51 | console = ColouredConsoleHandler(sys.stdout) 52 | console.setLevel(stdout_level) 53 | formatter = logging.Formatter("%(levelname)s %(message)s") 54 | console.setFormatter(formatter) 55 | logging.getLogger("").addHandler(console) 56 | 57 | 58 | class ColouredConsoleHandler(logging.StreamHandler): 59 | """Makes colourised and wrapped output for the console.""" 60 | 61 | def emit(self, record): 62 | """Colourise and emit a record.""" 63 | # Need to make a actual copy of the record 64 | # to prevent altering the message for other loggers 65 | myrecord = copy.copy(record) 66 | levelno = myrecord.levelno 67 | if levelno >= 50: # CRITICAL / FATAL 68 | front = "\033[30;41m" # black/red 69 | text = "\033[30;41m" # black/red 70 | elif levelno >= 40: # ERROR 71 | front = "\033[30;41m" # black/red 72 | text = "\033[1;31m" # bright red 73 | elif levelno >= 30: # WARNING 74 | front = "\033[30;43m" # black/yellow 75 | text = "\033[1;33m" # bright yellow 76 | elif levelno >= 20: # INFO 77 | front = "\033[30;42m" # black/green 78 | text = "\033[1m" # bright 79 | elif levelno >= 10: # DEBUG 80 | front = "\033[30;46m" # black/cyan 81 | text = "\033[0m" # normal 82 | else: # NOTSET and anything else 83 | front = "\033[0m" # normal 84 | text = "\033[0m" # normal 85 | 86 | myrecord.levelname = "%s%s\033[0m" % (front, myrecord.levelname) 87 | myrecord.msg = ( 88 | textwrap.fill( 89 | myrecord.msg, 90 | initial_indent=text, 91 | width=76, 92 | subsequent_indent="\033[0m %s" % text, 93 | ) 94 | + "\033[0m" 95 | ) 96 | logging.StreamHandler.emit(self, myrecord) 97 | 98 | 99 | def main(): 100 | options = config.Options() 101 | log = Log(options) 102 | info("this is a logging test") 103 | error("this is a logging test") 104 | warning("this is a logging test") 105 | 106 | 107 | if __name__ == "__main__": 108 | main() 109 | -------------------------------------------------------------------------------- /tobascco/linalg.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import numpy as np 3 | 4 | RAD2DEG = 180.0 / np.pi 5 | DEG2RAD = np.pi / 180.0 6 | 7 | 8 | def calc_angle(v1, v2): 9 | """Returns the angle in radians between vectors 'v1' and 'v2'""" 10 | v1_u = v1[:3] / np.linalg.norm(v1[:3]) 11 | v2_u = v2[:3] / np.linalg.norm(v2[:3]) 12 | # Note: using the test (v1_u==v2_u).all(), which returns 13 | # true if all values of v1_u are equal to v2_u fails here. 14 | # there must be a rounding error. 15 | if np.allclose(v1_u, v2_u): 16 | return 0.0 17 | elif np.allclose(v1_u, -v2_u): 18 | return np.pi 19 | angle = np.arccos(np.dot(v1_u, v2_u)) 20 | if np.isnan(angle): 21 | if np.allclose(v1_u, v2_u): 22 | return 0.0 23 | else: 24 | return np.pi 25 | return angle 26 | 27 | 28 | def calc_axis(v1, v2): 29 | v1_u = v1[:3] / np.linalg.norm(v1[:3]) 30 | v2_u = v2[:3] / np.linalg.norm(v2[:3]) 31 | if np.allclose(v1_u, v2_u) or np.allclose(v1_u, -v2_u): 32 | return np.array([1.0, 0.0, 0.0]) 33 | a = np.cross(v1_u, v2_u) 34 | return a / np.linalg.norm(a) 35 | 36 | 37 | def rotation_from_omega(w): 38 | 39 | theta = np.linalg.norm(w) 40 | omega_x = np.array([[0.0, -w[2], w[1]], [w[2], 0.0, -w[0]], [-w[1], w[0], 0.0]]) 41 | R = np.identity(4) 42 | M = ( 43 | np.identity(3) 44 | + np.sin(theta) / theta * omega_x 45 | + (1.0 - np.cos(theta)) / (theta * theta) * np.linalg.matrix_power(omega_x, 2) 46 | ) 47 | R[:3, :3] = M 48 | return R 49 | 50 | 51 | def rotation_from_vectors(v1, v2, point=None): 52 | """Obtain rotation matrix from sets of vectors. 53 | the original set is v1 and the vectors to rotate 54 | to are v2. 55 | 56 | """ 57 | 58 | # v2 = transformed, v1 = neutral 59 | ua = np.array([np.mean(v1.T[0]), np.mean(v1.T[1]), np.mean(v1.T[2])]) 60 | ub = np.array([np.mean(v2.T[0]), np.mean(v2.T[1]), np.mean(v2.T[2])]) 61 | 62 | Covar = np.dot((v2 - ub).T, (v1 - ua)) 63 | 64 | try: 65 | u, s, v = np.linalg.svd(Covar) 66 | uv = np.dot(u, v) 67 | d = np.identity(3) 68 | d[2, 2] = np.linalg.det(uv) # ensures non-reflected solution 69 | M = np.dot(np.dot(u, d), v) 70 | R = np.identity(4) 71 | R[:3, :3] = M 72 | if point is not None: 73 | R[:3, :3] = point - np.dot(M, point) 74 | return R 75 | except np.linalg.linalg.LinAlgError: 76 | return np.identity(4) 77 | 78 | 79 | def rotation_matrix(axis, angle, point=None): 80 | """ 81 | returns a 3x3 rotation matrix based on the 82 | provided axis and angle 83 | """ 84 | axis = np.array(axis) 85 | axis = axis / np.linalg.norm(axis) 86 | a = np.cos(angle / 2.0) 87 | b, c, d = -axis * np.sin(angle / 2.0) 88 | 89 | R = np.array( 90 | [ 91 | [a * a + b * b - c * c - d * d, 2 * (b * c - a * d), 2 * (b * d + a * c)], 92 | [2 * (b * c + a * d), a * a + c * c - b * b - d * d, 2 * (c * d - a * b)], 93 | [2 * (b * d - a * c), 2 * (c * d + a * b), a * a + d * d - b * b - c * c], 94 | ] 95 | ) 96 | 97 | M = np.identity(4) 98 | M[:3, :3] = R 99 | if point is not None: 100 | # rotation not around origin 101 | point = np.array(point[:3], dtype=np.float64, copy=False) 102 | M[:3, 3] = point - np.dot(R, point) 103 | return M 104 | 105 | 106 | def normalized_vectors(array): 107 | _array = np.array(array) 108 | norms = np.sqrt(np.einsum("...i,...i", _array, _array)) 109 | # norms = np.apply_along_axis(np.linalg.norm, 0, _array) 110 | # ret = _array.T / norms.reshape(-1, 1) 111 | return _array / norms[:, None] 112 | 113 | 114 | def central_moment(weights, vects, mean): 115 | """Obtain the central moments""" 116 | mx, my, mz = mean 117 | dic = {} 118 | 119 | def moment(l, m, n): 120 | try: 121 | return dic[(l, m, n)] 122 | except KeyError: 123 | mom = 0.0 124 | for ind, (x, y, z) in enumerate(vects): 125 | mom += ( 126 | ((x - mx) ** l) * ((y - my) ** m) * ((z - mz) ** n) * weights[ind] 127 | ) 128 | dic[(l, m, n)] = mom 129 | return mom 130 | 131 | return moment 132 | 133 | 134 | def raw_moment(weights, vects): 135 | dic = {} 136 | 137 | def moment(l, m, n): 138 | try: 139 | return dic[(l, m, n)] 140 | except KeyError: 141 | mom = 0.0 142 | for ind, (x, y, z) in enumerate(vects): 143 | mom += (x ** l) * (y ** m) * (z ** n) * weights[ind] 144 | dic[(l, m, n)] = mom 145 | return mom 146 | 147 | return moment 148 | 149 | 150 | def elipsoid_vol(cm): 151 | mat = np.matrix( 152 | [ 153 | [cm(2, 0, 0), cm(1, 1, 0), cm(1, 0, 1)], 154 | [cm(1, 1, 0), cm(0, 2, 0), cm(0, 1, 1)], 155 | [cm(1, 0, 1), cm(0, 1, 1), cm(0, 0, 2)], 156 | ] 157 | ) 158 | vol = (np.pi * 4.0 / 3.0 * np.linalg.det(mat)) ** (1.0 / 3.0) 159 | if np.isnan(vol): 160 | return 0.0 161 | return vol 162 | 163 | 164 | def r_gyr(cm): 165 | return np.sqrt((cm(2, 0, 0) + cm(0, 2, 0) + cm(0, 0, 2)) / (3.0 * cm(0, 0, 0))) 166 | 167 | 168 | def get_CI(cm): 169 | r = r_gyr(cm) 170 | s3 = 1.0 / ((cm(0, 0, 0) ** 3) * r ** 9) 171 | s4 = 1.0 / ((cm(0, 0, 0) ** 4) * r ** 9) 172 | # second order 173 | a1 = cm(0, 0, 2) - cm(0, 2, 0) 174 | a2 = cm(0, 2, 0) - cm(2, 0, 0) 175 | a3 = cm(2, 0, 0) - cm(0, 0, 2) 176 | # third order 177 | b1 = cm(0, 2, 1) - cm(2, 0, 1) 178 | b2 = cm(1, 0, 2) - cm(1, 2, 0) 179 | b3 = cm(2, 1, 0) - cm(0, 1, 2) 180 | b4 = cm(0, 0, 3) - cm(2, 0, 1) - 2.0 * cm(0, 2, 1) 181 | b5 = cm(0, 3, 0) - cm(0, 1, 2) - 2.0 * cm(2, 1, 0) 182 | b6 = cm(3, 0, 0) - cm(1, 2, 0) - 2.0 * cm(1, 0, 2) 183 | b7 = cm(0, 2, 1) - cm(0, 0, 3) + 2.0 * cm(2, 0, 1) 184 | b8 = cm(1, 0, 2) - cm(3, 0, 0) + 2.0 * cm(1, 2, 0) 185 | b9 = cm(2, 1, 0) - cm(0, 3, 0) + 2.0 * cm(0, 1, 2) 186 | b10 = cm(0, 2, 1) + cm(2, 0, 1) - 3.0 * cm(0, 0, 3) 187 | b11 = cm(0, 1, 2) + cm(2, 1, 0) - 3.0 * cm(0, 3, 0) 188 | b12 = cm(1, 0, 2) + cm(1, 2, 0) - 3.0 * cm(3, 0, 0) 189 | b13 = cm(0, 2, 1) + cm(0, 0, 3) + 3.0 * cm(2, 0, 1) 190 | b14 = cm(1, 0, 2) + cm(3, 0, 0) + 3.0 * cm(1, 2, 0) 191 | b15 = cm(2, 1, 0) + cm(0, 3, 0) + 3.0 * cm(0, 1, 2) 192 | b16 = cm(0, 1, 2) + cm(0, 3, 0) + 3.0 * cm(2, 1, 0) 193 | b17 = cm(2, 0, 1) + cm(0, 0, 3) + 3.0 * cm(0, 2, 1) 194 | b18 = cm(1, 2, 0) + cm(3, 0, 0) + 3.0 * cm(1, 0, 2) 195 | # fourth order 196 | g1 = cm(0, 2, 2) - cm(4, 0, 0) 197 | g2 = cm(2, 0, 2) - cm(0, 4, 0) 198 | g3 = cm(2, 2, 0) - cm(0, 0, 4) 199 | g4 = cm(1, 1, 2) + cm(1, 3, 0) + cm(3, 1, 0) 200 | g5 = cm(1, 2, 1) + cm(1, 0, 3) + cm(3, 0, 1) 201 | g6 = cm(2, 1, 1) + cm(0, 1, 3) + cm(0, 3, 1) 202 | g7 = cm(0, 2, 2) - cm(2, 2, 0) + cm(0, 0, 4) - cm(4, 0, 0) 203 | g8 = cm(2, 0, 2) - cm(0, 2, 2) + cm(4, 0, 0) - cm(0, 4, 0) 204 | g9 = cm(2, 2, 0) - cm(2, 0, 2) + cm(0, 4, 0) - cm(0, 0, 4) 205 | 206 | CI = 4.0 * s3 * ( 207 | cm(1, 1, 0) 208 | * ( 209 | cm(0, 2, 1) * (3.0 * g2 - 2.0 * g3 - g1) 210 | - cm(2, 0, 1) * (3.0 * g1 - 2.0 * g3 - g2) 211 | + b12 * g5 212 | - b11 * g6 213 | + cm(0, 0, 3) * g8 214 | ) 215 | + cm(1, 0, 1) 216 | * ( 217 | cm(2, 1, 0) * (3.0 * g1 - 2.0 * g2 - g3) 218 | - cm(0, 1, 2) * (3.0 * g3 - 2.0 * g2 - g1) 219 | + b10 * g6 220 | - b12 * g4 221 | + cm(0, 3, 0) * g7 222 | ) 223 | + cm(0, 1, 1) 224 | * ( 225 | cm(1, 0, 2) * (3.0 * g3 - 2.0 * g1 - g2) 226 | - cm(1, 2, 0) * (3.0 * g2 - 2.0 * g1 - g3) 227 | + b11 * g4 228 | - b10 * g5 229 | + cm(3, 0, 0) * g9 230 | ) 231 | + cm(0, 0, 2) * (b18 * g6 - b15 * g5 - 2.0 * (cm(1, 1, 1) * g8 + b1 * g4)) 232 | + cm(0, 2, 0) * (b17 * g4 - b14 * g6 - 2.0 * (cm(1, 1, 1) * g7 + b3 * g5)) 233 | + cm(2, 0, 0) * (b16 * g5 - b13 * g4 - 2.0 * (cm(1, 1, 1) * g9 + b2 * g6)) 234 | ) - 16.0 * s4 * ( 235 | cm(0, 1, 1) * a2 * a3 * b2 236 | + cm(1, 0, 1) * a1 * a2 * b3 237 | + cm(1, 1, 0) * a1 * a3 * b1 238 | - cm(1, 1, 1) * a1 * a2 * a3 239 | - cm(0, 1, 1) 240 | * cm(0, 1, 1) 241 | * (cm(1, 1, 1) * a1 - cm(0, 1, 1) * b2 - cm(1, 0, 1) * b5 - cm(1, 1, 0) * b7) 242 | - cm(1, 0, 1) 243 | * cm(1, 0, 1) 244 | * (cm(1, 1, 1) * a3 - cm(1, 0, 1) * b3 - cm(1, 1, 0) * b4 - cm(0, 1, 1) * b8) 245 | - cm(1, 1, 0) 246 | * cm(1, 1, 0) 247 | * (cm(1, 1, 1) * a2 - cm(1, 1, 0) * b1 - cm(0, 1, 1) * b6 - cm(1, 0, 1) * b9) 248 | + cm(0, 1, 1) 249 | * cm(1, 0, 1) 250 | * (cm(0, 0, 2) * b1 + cm(0, 2, 0) * b4 + cm(2, 0, 0) * b7) 251 | + cm(0, 1, 1) 252 | * cm(1, 1, 0) 253 | * (cm(0, 2, 0) * b3 + cm(2, 0, 0) * b5 + cm(0, 0, 2) * b9) 254 | + cm(1, 0, 1) 255 | * cm(1, 1, 0) 256 | * (cm(2, 0, 0) * b2 + cm(0, 0, 2) * b6 + cm(0, 2, 0) * b8) 257 | ) 258 | 259 | return CI 260 | -------------------------------------------------------------------------------- /tobascco/sbu.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import configparser 3 | import itertools 4 | from logging import debug, error 5 | 6 | import rdkit.Chem 7 | import numpy as np 8 | from numpy import arccos, cos, pi, sin 9 | from scipy.spatial import distance 10 | 11 | from .atoms import Atom 12 | from .connectpoints import ConnectPoint 13 | from .element_properties import Radii 14 | 15 | class SBU_list(object): 16 | def __init__(self, sbu_list): 17 | self.list = sbu_list 18 | self._truncate() 19 | 20 | def _truncate(self): 21 | trunc = [] 22 | for sbu1, sbu2 in itertools.combinations(self.list, 2): 23 | if sbu1.parent == sbu2.name: 24 | trunc.append(self.list.index(sbu1)) 25 | sbu2.children.append(sbu1) 26 | elif sbu2.parent == sbu1.name: 27 | trunc.append(self.list.index(sbu2)) 28 | sbu1.children.append(sbu2) 29 | for k in reversed(sorted(trunc)): 30 | del self.list[k] 31 | 32 | def get(self, identifier, _METAL=False): 33 | """Produces the SBU with the identifier provided, this filters between 34 | organic and metal SBUs""" 35 | for sbu in self.list: 36 | if sbu.identifier == identifier and sbu.is_metal == _METAL: 37 | return sbu 38 | raise Exception("Could not find the SBU with the identifier %s" % (identifier)) 39 | 40 | def getall(self, identifier): 41 | """Produces the SBU with the target identifier regardless of being 42 | metal or organic.""" 43 | for sbu in self.list: 44 | if sbu.identifier == identifier: 45 | return sbu 46 | raise Exception("Could not find the SBU with the identifier %s" % (identifier)) 47 | 48 | 49 | class SBU(rdkit.Chem.rdchem.RWMol): 50 | """Contains atom information, connectivity of a Secondary Building Unit.""" 51 | 52 | def __init__(self, name=None): 53 | self.name = name 54 | self.identifier = 0 55 | self.index = 0 56 | self.topology = None 57 | self.charge = 0.0 58 | self.parent = None 59 | self.is_metal = False 60 | self.atoms = [] 61 | # child SBUs which are associated with this one through self.parent 62 | self.children = [] 63 | self.bonds = {} 64 | self.connect_points = [] 65 | self.edge_assignments = [] 66 | self.vertex_id = None 67 | 68 | def from_config(self, section, cfgdic): 69 | """take atom and connectivity information from a config file""" 70 | 71 | self.name = section 72 | self.identifier = cfgdic.getint(section, "index") 73 | try: 74 | self.charge = cfgdic.getfloat(section, "charge") 75 | except configparser.NoOptionError: 76 | # charge not specified in the input file. 77 | pass 78 | try: 79 | self.topology = cfgdic.get(section, "topology") 80 | except configparser.NoOptionError: 81 | # topologies are depreciated for topcryst 82 | pass 83 | self.is_metal = cfgdic.getboolean(section, "metal") 84 | if cfgdic.has_option(section, "parent"): 85 | self.parent = cfgdic.get(section, "parent") 86 | # read in atom information 87 | # depreciated coordinates, but backwards compatible 88 | if cfgdic.has_option(section, "coordinates"): 89 | atom_info = cfgdic.get(section, "coordinates").strip().splitlines() 90 | elif cfgdic.has_option(section, "atoms"): 91 | atom_info = cfgdic.get(section, "atoms").strip().splitlines() 92 | 93 | for idx, atom_line in enumerate(atom_info): 94 | split_atom_line = atom_line.split() 95 | newatom = Atom() 96 | newatom.index = idx 97 | newatom.sbu_index = self.identifier 98 | newatom.sbu_metal = self.is_metal 99 | if len(split_atom_line) == 5: 100 | newatom.from_config_ff(atom_line) 101 | elif len(split_atom_line) == 4: 102 | newatom.from_config(atom_line) 103 | self.atoms.append(newatom) 104 | 105 | # bonding table 106 | if cfgdic.has_option(section, "table"): 107 | for table_line in cfgdic.get(section, "table").strip().splitlines(): 108 | bond = table_line.strip().split() 109 | # add the bonding information 110 | # first two cases are for bonding to connecting points 111 | if "c" in bond[0].lower(): 112 | connect_ind = int(bond[0].lower().strip("c")) 113 | atom_ind = int(bond[1]) 114 | self.atoms[atom_ind].sbu_bridge.append(connect_ind) 115 | elif "c" in bond[1].lower(): 116 | # subtract 1 since the input file starts at 1 117 | connect_ind = int(bond[1].lower().strip("c")) 118 | atom_ind = int(bond[0]) 119 | self.atoms[atom_ind].sbu_bridge.append(connect_ind) 120 | else: 121 | b = tuple(sorted([int(bond[0]), int(bond[1])])) 122 | self.bonds[b] = bond[2] 123 | 124 | if not self.bonds: 125 | debug( 126 | "No bonding found in input file for %s," % self.name 127 | + " so bonding will not be reported" 128 | ) 129 | # Connect points 130 | for idx, cp_line in enumerate( 131 | cfgdic.get(section, "connectivity").strip().splitlines() 132 | ): 133 | connect_point = ConnectPoint() 134 | connect_point.from_config(cp_line) 135 | self.connect_points.append(connect_point) 136 | 137 | # check for constraints 138 | if cfgdic.has_option(section, "bond_constraints"): 139 | const_lines = cfgdic.get(section, "bond_constraints").strip().splitlines() 140 | for constraint in const_lines: 141 | constraint = constraint.split() 142 | id = int(constraint[0]) 143 | con = int(constraint[1]) 144 | cp = self.get_cp(id) 145 | cp.constraint = con 146 | 147 | # new special/constraint section 148 | elif cfgdic.has_option(section, "connect_flag"): 149 | const_lines = cfgdic.get(section, "connect_flag").strip().splitlines() 150 | for constraint in const_lines: 151 | id, special, const = [int(i) for i in constraint.split()] 152 | cp = self.get_cp(id) 153 | cp.special = special 154 | cp.constraint = const 155 | 156 | # new symmetry flag stuff 157 | if cfgdic.has_option(section, "connect_sym"): 158 | sym_lines = cfgdic.get(section, "connect_sym").strip().splitlines() 159 | for sym in sym_lines: 160 | id, sym_flag = [int(i) for i in sym.split()] 161 | cp = self.get_cp(id) 162 | cp.symmetry = sym_flag 163 | 164 | def update_atoms(self, index_base, order): 165 | self.bonds = { 166 | (i + index_base, j + index_base): val for (i, j), val in self.bonds.items() 167 | } 168 | for atom in self.atoms: 169 | atom.index += index_base 170 | atom.sbu_order = order 171 | 172 | def rotate(self, rotation_matrix): 173 | """Apply the rotation matrix to the coordinates and connect_points in 174 | the SBU.""" 175 | # rotate the connect points 176 | [c.rotate(rotation_matrix) for c in self.connect_points] 177 | 178 | # rotate the atoms 179 | [a.rotate(rotation_matrix) for a in self.atoms] 180 | 181 | def translate(self, v): 182 | if self.two_connected and not self.linear: 183 | vector = v - self.closest_midpoint 184 | else: 185 | vector = v - self.COM[:3] 186 | [c.translate(vector) for c in self.connect_points] 187 | [i.translate(vector) for i in self.atoms] 188 | 189 | def calc_neighbours(self, radii=None): 190 | """Determines atom neighbours, based on bonding, and the supplied radii.""" 191 | atom_combos = itertools.combinations(range(len(self.atoms)), 2) 192 | atom_coordinates = np.array([atom.coordinates[:3] for atom in self.atoms]) 193 | dist_matrix = distance.cdist(atom_coordinates, atom_coordinates) 194 | for atid1, atid2 in atom_combos: 195 | atom1 = self.atoms[atid1] 196 | atom2 = self.atoms[atid2] 197 | 198 | # determine if neighbours 199 | bid1 = atom1.index 200 | bid2 = atom2.index 201 | btest = tuple(sorted([bid1, bid2])) 202 | # loops over Null if self.bonds = [] (i.e. no bonding info in the input file) 203 | dist = dist_matrix[atid1, atid2] 204 | for (i, j), btype in self.bonds.items(): 205 | if btest == (i, j): 206 | # append neighbours 207 | atom1.neighbours.append((dist, atid2)) 208 | atom2.neighbours.append((dist, atid1)) 209 | if radii is None: 210 | # keep a neighbour list of up to 3*(radii) of each atom. 211 | if dist_matrix[atid1, atid2] < 3.0 * ( 212 | Radii[atom1.element] + Radii[atom2.element] 213 | ): 214 | atom1.neighbours.append((dist, atid2)) 215 | atom2.neighbours.append((dist, atid1)) 216 | else: 217 | if dist_matrix[atid1, atid2] < radii: 218 | atom1.neighbours.append((dist, atid2)) 219 | atom2.neighbours.append((dist, atid1)) 220 | atom1.neighbours = list(set(atom1.neighbours)) 221 | atom2.neighbours = list(set(atom2.neighbours)) 222 | for atom in self.atoms: 223 | # sort in order of increasing distance 224 | atom.neighbours = sorted(atom.neighbours)[:] 225 | 226 | @property 227 | def closest_midpoint(self): 228 | """The 'intersection' between the first two connect points 229 | of the SBU. (or the closest point). 230 | """ 231 | # Calculate denominator 232 | A = self.connect_points[0].z[:3] 233 | B = self.connect_points[1].z[:3] 234 | _A = A / np.linalg.norm(A) 235 | _B = B / np.linalg.norm(B) 236 | cross = np.cross(_A, _B) 237 | denom = np.linalg.norm(cross) ** 2 238 | # If denominator is 0, the lines are parallel 239 | if denom == 0: 240 | return None 241 | a0 = self.connect_points[0].origin[:3] 242 | b0 = self.connect_points[1].origin[:3] 243 | 244 | t = b0 - a0 245 | det0 = np.linalg.det([t, _B, cross]) 246 | det1 = np.linalg.det([t, _A, cross]) 247 | 248 | t0 = det0 / denom 249 | t1 = det1 / denom 250 | 251 | pA = a0 + (_A * t0) 252 | pB = b0 + (_B * t1) 253 | d = np.linalg.norm(pA - pB) 254 | if d > 2.0: 255 | debug( 256 | "The distance between the line tracing the connection sites " 257 | + "of SBU %s is pretty big! %9.5f Angstroms" % (self.name, d) 258 | ) 259 | 260 | self._midpoint = (pA + pB) / 2.0 261 | 262 | v1 = self.connect_points[0].origin[:3] - self._midpoint 263 | v2 = self.connect_points[1].origin[:3] - self._midpoint 264 | v1 = v1 / np.linalg.norm(v1) 265 | v2 = v2 / np.linalg.norm(v2) 266 | cp1 = self.connect_points[1] 267 | cp0 = self.connect_points[0] 268 | return self._midpoint 269 | 270 | @property 271 | def linear(self): 272 | """Return true if linear else false""" 273 | if len(self.connect_points) != 2: 274 | return False 275 | if np.allclose( 276 | self.connect_points[0].z[:3], -self.connect_points[1].z[:3], atol=1e-2 277 | ): 278 | return True 279 | return False 280 | 281 | @property 282 | def two_connected(self): 283 | """Return true if only two connection sites""" 284 | return len(self.connect_points) == 2 285 | 286 | @property 287 | def degree(self): 288 | return len(self.connect_points) 289 | 290 | @property 291 | def COM(self): 292 | return np.average( 293 | np.array([atom.coordinates for atom in self.atoms]), 294 | axis=0, 295 | weights=np.array([atom.mass for atom in self.atoms]), 296 | ) 297 | 298 | @property 299 | def centre_of_atoms(self): 300 | return np.average(np.array([atom.coordinates for atom in self.atoms]), axis=0) 301 | 302 | @property 303 | def surface_area(self, probe=1.82, resolution=0.03): 304 | """Computes surface area. Currently uses default resolution of 0.03 A^2 305 | and an N2 probe radii of 1.82 A""" 306 | # make sure we are including neighbours with the correct probe size! 307 | self.calc_neighbours() 308 | xyz = [] 309 | surface_area = 0.0 310 | for atom in self.atoms: 311 | ncount = 0 312 | radii = Radii[atom.element] + probe 313 | atom_sa = 4.0 * pi * (radii ** 2) 314 | nsamples = int(atom_sa / resolution) 315 | phi = np.random.random(nsamples) * pi 316 | costheta = np.random.random(nsamples) * 2.0 - 1.0 317 | theta = arccos(costheta) 318 | points = ( 319 | np.array( 320 | [sin(theta) * cos(phi), sin(theta) * sin(phi), cos(theta)] 321 | ).transpose() 322 | * radii 323 | + atom.coordinates[:3] 324 | ) 325 | for point in points: 326 | for dist, atid in atom.neighbours: 327 | n_atom = self.atoms[atid] 328 | n_radii = Radii[n_atom.element] + probe 329 | if dist > radii + n_radii: 330 | # neighbours are sorted by distance. 331 | # the neighbour atoms are too far apart - include this point. 332 | ncount += 1 333 | xyz.append((atom.element, point)) 334 | break 335 | elif np.linalg.norm(point - n_atom.coordinates[:3]) < n_radii: 336 | # collision with point 337 | break 338 | else: 339 | ncount += 1 340 | xyz.append((atom.element, point)) 341 | 342 | surface_area += (atom_sa * ncount) / nsamples 343 | return surface_area 344 | 345 | @property 346 | def max_span(self): 347 | cp_coords = [cp.origin[:3] for cp in self.connect_points] 348 | dist_matrix = distance.cdist(cp_coords, cp_coords) 349 | max_dist = 0.0 350 | for cp1, cp2 in itertools.combinations(range(len(self.connect_points)), 2): 351 | if dist_matrix[cp1, cp2] > max_dist: 352 | max_dist = dist_matrix[cp1, cp2] 353 | return max_dist 354 | 355 | @property 356 | def moment_of_inertia(self): 357 | try: 358 | return self.I 359 | except AttributeError: 360 | moi = np.empty((3, 3)) 361 | rsq = np.zeros((3, 3)) 362 | self.I = np.empty((3, 3)) 363 | for (i, j), val in np.ndenumerate(moi): 364 | val = 0.0 365 | for atom in self.atoms: 366 | val += atom.mass(atom.coordinates[i] - self.COM[i]) * ( 367 | atom.coordinates[j] - self.COM[j] 368 | ) 369 | moi[i, j] = val 370 | rval = np.identity(3)[i, j] * val 371 | rsq[0, 0] += rval 372 | rsq[1, 1] += rval 373 | rsq[2, 2] += rval 374 | self.I = rsq - moi 375 | return self.I 376 | 377 | @property 378 | def approximate_ellipsoid_volume(self): 379 | # PCA. 380 | coords = np.array([j.coordinates[:3] for j in self.atoms]) 381 | cov_mat = np.cov((coords - self.COM[:3]).T) 382 | eig_val, eig_vec = np.linalg.eig(cov_mat) 383 | # sf = float(len(self)) - 1. 384 | # eig_vec*=sf 385 | tformed = np.dot(coords, eig_vec) 386 | r = np.empty(3) 387 | r[0] = max( 388 | ([x - y for x, y in itertools.combinations(tformed[:, 0], 2)]) 389 | ) # max distance in the first axis 390 | r[1] = max( 391 | ([x - y for x, y in itertools.combinations(tformed[:, 1], 2)]) 392 | ) # max distance in the second axis 393 | r[2] = max( 394 | ([x - y for x, y in itertools.combinations(tformed[:, 2], 2)]) 395 | ) # max distance in the third axis 396 | for id, v in enumerate(eig_vec.T): 397 | line = "" 398 | atom = "H" 399 | line += "%s %12.5f %12.5f %12.5f " % ( 400 | tuple([atom] + self.centre_of_atoms[:3].tolist()) 401 | ) 402 | line += "atom_vector %12.5f %12.5f %12.5f " % (tuple((r[id]) * v[:3])) 403 | # print(line) 404 | # minimum R distance is the carbon radius. 405 | R0 = Radii["C"] 406 | r1 = r[0] / 2.0 if r[0] / 2.0 >= R0 else R0 407 | r2 = r[1] / 2.0 if r[1] / 2.0 >= R0 else R0 408 | r3 = r[2] / 2.0 if r[2] / 2.0 >= R0 else R0 409 | return 4.0 * np.pi * r1 * r2 * r3 / 3.0 410 | 411 | def get_cp(self, identifier): 412 | for cp in self.connect_points: 413 | if identifier == cp.identifier: 414 | return cp 415 | error( 416 | "%i not in the connecting points! " % (identifier) 417 | + ", ".join([str(i.identifier) for i in self.connect_points]) 418 | ) 419 | 420 | def __len__(self): 421 | return len(self.atoms) 422 | 423 | def __str__(self): 424 | """Return an .xyz format of the SBU.""" 425 | line = "" 426 | for cp in self.connect_points: 427 | if cp.connected: 428 | atom = "He" 429 | else: 430 | atom = "H" 431 | line += "%s %12.5f %12.5f %12.5f " % ( 432 | tuple([atom] + cp.origin[:3].tolist()) 433 | ) 434 | line += "atom_vector %12.5f %12.5f %12.5f " % (tuple(cp.z[:3])) 435 | line += "atom_vector %12.5f %12.5f %12.5f\n" % (tuple(cp.y[:3])) 436 | 437 | for atom in self.atoms: 438 | line += "%s %12.5f %12.5f %12.5f\n" % ( 439 | tuple([atom.element] + [i for i in atom.coordinates[:3]]) 440 | ) 441 | 442 | return line 443 | -------------------------------------------------------------------------------- /tobascco/src/Makefile: -------------------------------------------------------------------------------- 1 | CC = g++ 2 | CFLAGS = -Wall -g 3 | LDFLAGS = -lm 4 | 5 | graph.o : graph.c graph.h 6 | ${CC} ${CFLAGS} -c graph.c 7 | 8 | all : graph.o 9 | ${CC} ${CFLAGS} graph.o main.c ${LDFLAGS} -o main 10 | 11 | clean: 12 | rm -f *.o main 13 | -------------------------------------------------------------------------------- /tobascco/src/graph.c: -------------------------------------------------------------------------------- 1 | #include "graph.h" 2 | #include 3 | #include 4 | #include //std::find 5 | #include //std::begin, std::end 6 | 7 | void Graph::init_edges(int size) 8 | { 9 | edges = new Edge[size]; 10 | _size = size; 11 | } 12 | 13 | void Graph::init_vertices(int order) 14 | { 15 | vertices = new Vertex[order]; 16 | for (int j = 0; j < order; j++) 17 | { 18 | vertices[j].set_index(j); 19 | } 20 | _order = order; 21 | } 22 | 23 | void Graph::add_edge(int i, int j, std::vector volt) 24 | { 25 | edges[_order].init_edge(i, j, volt); 26 | _order++; 27 | } 28 | 29 | void Graph::set_vertices() 30 | { 31 | //assuming all edges have been read in 32 | int vi, vj; 33 | int vmax = 0; 34 | if (_size == 0) 35 | return; 36 | for (int i = 0; i < _size; i++) 37 | { 38 | //init screen for max vertices 39 | if (edges[i].from() > vmax) 40 | { 41 | vmax = edges[i].from(); 42 | } 43 | if (edges[i].to() > vmax) 44 | { 45 | vmax = edges[i].to(); 46 | } 47 | } 48 | init_vertices(vmax + 1); //+1 to account for the 0th index 49 | for (int i = 0; i < _size; i++) 50 | { 51 | //add neighbour info 52 | vi = edges[i].from(); 53 | vj = edges[i].to(); 54 | vertices[vi].setNeighbour(vj); 55 | vertices[vj].setNeighbour(vi); 56 | } 57 | } 58 | 59 | void Edge::init_edge(int f, int t, std::vector volt) 60 | { 61 | _f = f; 62 | _t = t; 63 | voltage = volt; 64 | } 65 | 66 | void Vertex::setNeighbour(int id) 67 | { 68 | bool push = true; 69 | for (int i = 0; i < _degree; i++) 70 | { 71 | if (_neighbours[i] == id) 72 | push = false; 73 | } 74 | if (push) 75 | { 76 | _neighbours.push_back(id); 77 | _degree++; 78 | } 79 | } 80 | 81 | int Vertex::getNeighbour(int id) 82 | { 83 | return _neighbours[id]; 84 | } 85 | 86 | //return array of edges which are incident on 87 | // this vertex. negative sign placed on edges which 88 | // point "in" to the vertex 89 | int *Graph::get_connected_edges(int v) 90 | { 91 | int size = vertices[v].degree(), ind = 0; 92 | int *edge_container = new int[size]; 93 | for (int ii = 0; ii < _size; ii++) 94 | { 95 | if ((edges[ii].from() == v) || (edges[ii].to() == v)) 96 | { 97 | edge_container[ind] = ii; 98 | ind++; 99 | } 100 | } 101 | return edge_container; 102 | } 103 | 104 | //Prim's Minimum Spanning Tree algorithm 105 | void Graph::MinimumSpanningTree(std::vector &pool, std::vector &used, int i) 106 | { 107 | //std::cout<<"Vertex: "< 11 | #include 12 | #include 13 | 14 | class Edge 15 | { 16 | std::vector voltage; 17 | int _f, _t; 18 | 19 | public: 20 | Edge(int f, int t) 21 | { 22 | _f = f; 23 | _t = t; 24 | } 25 | Edge(int f, int t, std::vector volt) { init_edge(f, t, volt); } 26 | Edge() : _f(0), _t(0) {} 27 | ~Edge(){}; 28 | void init_edge(int f, int t, std::vector volt); 29 | int from() { return _f; } 30 | int to() { return _t; } 31 | }; 32 | 33 | class Vertex 34 | { 35 | int _i, _degree; 36 | std::vector _neighbours; 37 | 38 | public: 39 | void set_index(int id) { _i = id; } 40 | int index() { return _i; } 41 | int degree() { return _degree; } 42 | Vertex() : _i(0), _degree(0) {} 43 | Vertex(int id) : _degree(0) { _i = id; } 44 | ~Vertex(){}; 45 | void setNeighbour(int j); 46 | int getNeighbour(int j); 47 | std::vector::iterator neighbour_it() { return _neighbours.begin(); } 48 | std::vector::iterator neighbour_end() { return _neighbours.end(); } 49 | }; 50 | 51 | class Graph 52 | { 53 | private: 54 | std::string name; 55 | Edge *edges; 56 | Vertex *vertices; 57 | int _size; //size == number of edges in graph 58 | int _order; //order == number of vertices in graph 59 | 60 | public: 61 | void setName(std::string s) { name = s; } 62 | std::string getName() { return name; } 63 | Graph() : _size(0), _order(0) {} 64 | Graph(const std::string s) : _size(0), _order(0) { setName(s); } 65 | ~Graph(); 66 | void init_edges(int size); 67 | void init_vertices(int order); 68 | void set_vertices(); 69 | int size() { return _size; } 70 | int order() { return _order; } 71 | void add_edge(int i, int j, std::vector volt); 72 | int *get_connected_edges(int vertex); 73 | Vertex get_vertex(int i) { return vertices[i]; } 74 | void MinimumSpanningTree(std::vector &, std::vector &, int); 75 | }; 76 | -------------------------------------------------------------------------------- /tobascco/src/main.c: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | #include 5 | #include 6 | #include 7 | #include 8 | #include "graph.h" 9 | 10 | 11 | std::vector &split(const std::string &s, char delim, std::vector &elems){ 12 | std::stringstream ss(s); 13 | std::string item; 14 | while (std::getline(ss, item, delim)) { 15 | if (!item.empty()) 16 | elems.push_back(item); 17 | } 18 | return elems; 19 | } 20 | 21 | std::vector split(const std::string &s, char delim) { 22 | std::vector elems; 23 | split(s, delim, elems); 24 | return elems; 25 | } 26 | 27 | void readfile(const char* filename, Graph &g){ 28 | std::ifstream f(filename); 29 | std::string line; 30 | std::vector tok; 31 | int u; 32 | while(std::getline(f, line)){ 33 | tok = split(line, ' '); 34 | if (tok[0] == "id"){ 35 | g.setName(tok[1]); 36 | } 37 | else if (tok[0] == "key"){ 38 | u = tok.size(); 39 | int nedge = ( (u - 2) / 5); 40 | g.init_edges(nedge); 41 | for (int j=2; j < u; j+=5){ 42 | int v1 = atoi(tok[j].c_str()) - 1; 43 | int v2 = atoi(tok[j+1].c_str()) - 1; 44 | int vv[3] = { atoi(tok[j+2].c_str()), 45 | atoi(tok[j+3].c_str()), 46 | atoi(tok[j+4].c_str()) }; 47 | std::vector volt(&vv[0], &vv[0]+3); 48 | g.add_edge(v1, v2, volt); 49 | } 50 | 51 | } 52 | 53 | } 54 | std::cout<<"Graph topology: "< tree_nodes, tree_edges; 66 | int kk = 0; 67 | //min spanning tree works, but not sure if the recursive function can 'back out' like the python equivalent. 68 | g.MinimumSpanningTree(tree_nodes, tree_edges, kk); 69 | std::cout< 3 | #include 4 | #include 5 | #include 6 | #include 7 | #include 8 | #include 9 | #include 10 | 11 | void create_full_rep(int, int, double**, int, int, const double*, double**); 12 | static PyObject * nloptimize(PyObject *self, PyObject *args); 13 | void forward_difference_grad(double*, const double*, double, void*, double); 14 | void central_difference_grad(double*, const double* , void*, double); 15 | double objectivefunc(unsigned, const double*, double*, void*); 16 | double objectivefunc2D(unsigned, const double*, double*, void*); 17 | void central_difference_grad2D(double*, const double* , void*, double); 18 | void create_metric_tensor(int, const double*, double*); 19 | void create_metric_tensor2D(int, const double*, double*); 20 | double * get1darrayd(PyArrayObject*); 21 | int * get1darrayi(PyArrayObject*); 22 | double ** get2darrayd(PyArrayObject*); 23 | double ** construct2darray(int rows, int cols); 24 | void free_2d_array(double**, int); 25 | 26 | static PyMethodDef functions[] = { 27 | {"nloptimize", (PyCFunction)nloptimize, METH_VARARGS, NULL}, 28 | {NULL, NULL} 29 | }; 30 | 31 | #if PY_MAJOR_VERSION >= 3 32 | #define MOD_ERROR_VAL NULL 33 | #define MOD_SUCCESS_VAL(val) val 34 | #define MOD_INIT(name) PyMODINIT_FUNC PyInit_##name(void) 35 | #define MOD_DEF(ob,name,doc,methods)\ 36 | static struct PyModuleDef moduledef = { \ 37 | PyModuleDef_HEAD_INIT, name, doc, -1, methods, };\ 38 | ob = PyModule_Create(&moduledef);\ 39 | import_array(); 40 | #define PyInt_FromLong PyLong_FromLong 41 | #define PyString_AsString PyBytes_AsString 42 | #define PyInt_AsLong PyLong_AsLong 43 | #else 44 | #define MOD_ERROR_VAL 45 | #define MOD_SUCCESS_VAL(val) 46 | #define MOD_INIT(name) PyMODINIT_FUNC init##name(void) 47 | #define MOD_DEF(ob,name,doc,methods) \ 48 | ob = Py_InitModule3(name,methods,doc); \ 49 | import_array(); 50 | #endif 51 | 52 | MOD_INIT(_nloptimize) 53 | { 54 | PyObject *m; 55 | MOD_DEF(m, "_nloptimize", "", functions); 56 | if (m==NULL) 57 | return MOD_ERROR_VAL; 58 | 59 | return MOD_SUCCESS_VAL(m); 60 | } 61 | 62 | struct data_info{ 63 | int rep_size, cycle_size, nz_size, x_size; 64 | int B_shape, ndim, diag_ind; 65 | int start; 66 | int *_zi, *_zj; 67 | double ** _cycle_cocycle_I; 68 | double ** _cycle_rep; 69 | double ** _ip_mat; 70 | double ** rep; 71 | double ** edge_vectors; 72 | double ** edge_vectors_T; 73 | double ** M1; 74 | double * Z; 75 | double * diag; 76 | double * diag2; 77 | double * farray; 78 | double * barray; 79 | double * stored_dp; 80 | }; 81 | 82 | double compute_inner_product_fast(const double*, data_info); 83 | double compute_inner_product_fast2D(const double*, data_info); 84 | void jacobian3D_sums(double*, const double* , data_info); 85 | 86 | static PyObject * nloptimize(PyObject *self, PyObject *args) 87 | { 88 | 89 | int ndim; 90 | int diag_ind; 91 | double minf; /* the minimum objective value, upon return */ 92 | double xrel; /* the relative tolerance for the input variables */ 93 | double frel; /* the relative tolerance for the function change */ 94 | data_info data; 95 | nlopt_result retval; /*Return value from nlopt: 1 = GENERAL SUCCESS 96 | 2 = STOPVAL REACHED 97 | 3 = FTOL REACHED 98 | 4 = XTOL REACHED 99 | 5 = MAXEVAL REACHED 100 | 6 = MAXTIME REACHED*/ 101 | nlopt_algorithm global; //global optimizer 102 | nlopt_algorithm local; //local optimizer 103 | PyArrayObject* lower_bounds; 104 | PyArrayObject* upper_bounds; 105 | PyArrayObject* init_x; 106 | PyArrayObject* array_x; 107 | PyArrayObject* inner_product_matrix; 108 | PyArrayObject* cycle_rep; 109 | PyArrayObject* cycle_cocycle_I; 110 | PyArrayObject* zero_indi, *zero_indj; 111 | PyObject* pgoptim, *ploptim; 112 | //read in all the parameters 113 | if (!PyArg_ParseTuple(args, "iiOOOOOOOOddOO", 114 | &ndim, 115 | &diag_ind, 116 | &lower_bounds, 117 | &upper_bounds, 118 | &init_x, 119 | &cycle_rep, 120 | &cycle_cocycle_I, 121 | &inner_product_matrix, 122 | &zero_indi, 123 | &zero_indj, 124 | &xrel, 125 | &frel, 126 | &pgoptim, 127 | &ploptim)){ 128 | return NULL; 129 | }; 130 | nlopt_opt opt, local_opt; 131 | double *x, *lb, *ub; 132 | std::string goptim=PyString_AsString(pgoptim); 133 | std::string loptim=PyString_AsString(ploptim); 134 | lb = get1darrayd(lower_bounds); 135 | ub = get1darrayd(upper_bounds); 136 | x = get1darrayd(init_x); 137 | npy_intp* tt; 138 | tt = PyArray_DIMS(init_x); 139 | data.x_size = (int)tt[0]; 140 | data._cycle_cocycle_I = get2darrayd(cycle_cocycle_I); 141 | data._cycle_rep = get2darrayd(cycle_rep); 142 | data._ip_mat = get2darrayd(inner_product_matrix); 143 | data._zi = get1darrayi(zero_indi); 144 | data._zj = get1darrayi(zero_indj); 145 | //PyObject_Print((PyObject*)zero_indj, stdout, 0); 146 | data.ndim = ndim; 147 | data.diag_ind = diag_ind; 148 | tt = PyArray_DIMS(zero_indi); 149 | data.nz_size = (int)tt[0]; 150 | tt = PyArray_DIMS(cycle_rep); 151 | data.cycle_size = (int)tt[0]; 152 | data.rep_size = (int)tt[0] + data.x_size/data.ndim; 153 | data.rep = construct2darray(data.rep_size, data.ndim); 154 | tt = PyArray_DIMS(cycle_cocycle_I); 155 | data.B_shape = (int) tt[0]; 156 | // B_I * rep = edge_vectors 157 | // edge_vectors * metric_tensor = first_product 158 | // first_product * edge_vectors.T = inner_product 159 | // 160 | // piecewise calculation of (inner_product[i][j] - _ip_mat[i][j])^2 161 | // summation of squared errors = return val. 162 | data.edge_vectors = construct2darray(data.B_shape, data.ndim); 163 | data.edge_vectors_T = construct2darray(data.ndim, data.B_shape); 164 | if(ndim == 3){ 165 | data.Z = (double*)malloc(sizeof(double) * 6); 166 | data.start = 6; 167 | } 168 | else if(data.ndim == 2){ 169 | data.Z = (double*)malloc(sizeof(double) * 3); 170 | data.start = 3; 171 | } 172 | data.M1 = construct2darray(data.B_shape, data.ndim); 173 | data.farray = (double*)malloc(sizeof(double) * data.x_size); 174 | data.barray = (double*)malloc(sizeof(double) * data.x_size); 175 | data.diag = (double*)malloc(sizeof(double) * data.B_shape); 176 | data.diag2 = (double*)malloc(sizeof(double) * data.B_shape); 177 | data.stored_dp = (double*)malloc(sizeof(double) * data.nz_size); 178 | //initialize the local and global optimizers, so the compilation doesn't spew 179 | // out useless warnings 180 | global=NLOPT_GN_DIRECT; 181 | local=NLOPT_LD_LBFGS; 182 | //determine local optimizer 183 | if (loptim == "cobyla")local=NLOPT_LN_COBYLA; 184 | else if (loptim == "bobyqa")local=NLOPT_LN_BOBYQA; 185 | else if (loptim == "newoua")local=NLOPT_LN_NEWUOA_BOUND; 186 | else if (loptim == "praxis")local=NLOPT_LN_PRAXIS; 187 | else if (loptim == "nelder-mead")local=NLOPT_LN_NELDERMEAD; 188 | else if (loptim == "mma")local=NLOPT_LD_MMA; 189 | else if (loptim == "ccsa")local=NLOPT_LD_CCSAQ; 190 | else if (loptim == "slsqp")local=NLOPT_LD_SLSQP; 191 | else if (loptim == "lbfgs")local=NLOPT_LD_LBFGS; 192 | else if (loptim == "newton")local=NLOPT_LD_TNEWTON; 193 | else if (loptim == "newton-restart")local=NLOPT_LD_TNEWTON_RESTART; 194 | else if (loptim == "newton-precond")local=NLOPT_LD_TNEWTON_PRECOND; 195 | else if (loptim == "newton-precond-restart")local=NLOPT_LD_TNEWTON_PRECOND_RESTART; 196 | else if (loptim == "var1")local=NLOPT_LD_VAR1; 197 | else if (loptim == "var2")local=NLOPT_LD_VAR2; 198 | 199 | //GLOBAL OPTIMIZER*********************************** 200 | if (!goptim.empty()){ 201 | if (goptim == "direct")global=NLOPT_GN_DIRECT; 202 | else if (goptim == "direct-l")global=NLOPT_GN_DIRECT_L; 203 | //else if (goptim == "direct-l-rand")global=NLOPT_GLOBAL_DIRECT_L_RAND; 204 | //else if (goptim == "direct-noscale")global=NLOPT_GLOBAL_DIRECT_NOSCAL; 205 | //else if (goptim == "direct-l-noscale")global=NLOPT_GLOBAL_DIRECT_L_NOSCAL; 206 | //else if (goptim == "direct-l-rand-noscale")global=NLOPT_GLOBAL_DIRECT_L_RAND_NOSCAL; 207 | else if (goptim == "crs2")global=NLOPT_GN_CRS2_LM; 208 | else if (goptim == "stogo")global=NLOPT_GD_STOGO; 209 | else if (goptim == "stogo-rand")global=NLOPT_GD_STOGO_RAND; 210 | else if (goptim == "isres")global=NLOPT_GN_ISRES; 211 | else if (goptim == "esch")global=NLOPT_GN_ESCH; 212 | else if (goptim == "mlsl")global=NLOPT_G_MLSL; 213 | else if (goptim == "mlsl-lds")global=NLOPT_G_MLSL_LDS; 214 | opt = nlopt_create(global, data.x_size); 215 | // create local optimizer for the mlsl algorithms. 216 | if ((goptim == "mlsl") || (goptim == "mlsl-lds")){ 217 | local_opt = nlopt_create(local, data.x_size); 218 | nlopt_set_local_optimizer(opt, local_opt); 219 | } 220 | if(ndim==3){ 221 | nlopt_set_min_objective(opt, objectivefunc, &data); 222 | } 223 | else if(ndim==2){ 224 | nlopt_set_min_objective(opt, objectivefunc2D, &data); 225 | } 226 | 227 | nlopt_set_lower_bounds(opt, lb); 228 | nlopt_set_upper_bounds(opt, ub); 229 | nlopt_set_xtol_rel(opt, xrel); // set absolute tolerance on the change in the input parameters 230 | nlopt_set_ftol_abs(opt, frel); // set absolute tolerance on the change in the objective funtion 231 | retval = nlopt_optimize(opt, x, &minf); 232 | if (retval < 0) { 233 | printf("global nlopt failed!\n"); 234 | } 235 | nlopt_destroy(opt); 236 | } 237 | /*else{ 238 | printf("No global optimisation requested, preparing local optimiser\n"); 239 | }*/ 240 | //END GLOBAL OPTIMIZER******************************* 241 | 242 | //LOCAL OPTIMIZER*********************************** 243 | opt = nlopt_create(local, data.x_size); 244 | nlopt_set_vector_storage(opt, 10000); // for quasi-newton algorithms, how many gradients to store 245 | if(ndim==3){ 246 | nlopt_set_min_objective(opt, objectivefunc, &data); 247 | } 248 | else if(ndim==2){ 249 | nlopt_set_min_objective(opt, objectivefunc2D, &data); 250 | } 251 | nlopt_set_lower_bounds(opt, lb); 252 | nlopt_set_upper_bounds(opt, ub); 253 | nlopt_set_xtol_rel(opt, xrel); // set absolute tolerance on the change in the input parameters 254 | nlopt_set_ftol_abs(opt, frel); // set absolute tolerance on the change in the objective funtion 255 | retval = nlopt_optimize(opt, x, &minf); 256 | if (retval < 0) { 257 | printf("nlopt failed!\n"); 258 | Py_INCREF(Py_None); 259 | return(Py_None); 260 | } 261 | //END LOCAL OPTIMIZER******************************* 262 | void* xptr; 263 | npy_intp dim = data.x_size; 264 | PyObject* val; 265 | array_x = (PyArrayObject*) PyArray_SimpleNew(1, &dim, NPY_DOUBLE); 266 | 267 | for (int i=0; i= len(atoms2) else atoms2 93 | bond_atoms = atoms2 if len(atoms2) <= len(atoms1) else atoms1 94 | for atom in base_atoms: 95 | if bond_atoms: 96 | shifted_coords = self.min_img(atom, bond_atoms) 97 | dist = distance.cdist([atom.coordinates[:3]], shifted_coords) 98 | dist = dist[0].tolist() 99 | bond_atom = bond_atoms[dist.index(min(dist))] 100 | self.bonds.update({tuple(sorted((atom.index, bond_atom.index))): "S"}) 101 | 102 | def _compute_bond_info(self): 103 | """Update bonds to contain bond type, distances, and min img 104 | shift.""" 105 | supercells = np.array(list(itertools.product((-1, 0, 1), repeat=3))) 106 | unit_repr = np.array([5, 5, 5], dtype=int) 107 | for (at1, at2), val in self.bonds.items(): 108 | atom1 = self.atoms[at1] 109 | atom2 = self.atoms[at2] 110 | fcoords = atom2.scaled_pos(self.cell.inverse) + supercells 111 | coords = [] 112 | for j in fcoords: 113 | coords.append(np.dot(j, self.cell.lattice)) 114 | coords = np.array(coords) 115 | dists = distance.cdist([atom1.coordinates[:3]], coords) 116 | dists = dists[0].tolist() 117 | image = dists.index(min(dists)) 118 | dist = min(dists) 119 | sym = ( 120 | "." 121 | if all([i == 0 for i in supercells[image]]) 122 | else "1_%i%i%i" 123 | % (tuple(np.array(supercells[image], dtype=int) + unit_repr)) 124 | ) 125 | self.bonds[(at1, at2)] = (val, dist, sym) 126 | 127 | def compute_overlap(self): 128 | """Determines if there is atomistic overlap. Includes periodic 129 | boundary considerations.""" 130 | for id, atom in enumerate(self.atoms): 131 | elem1 = atom.element 132 | non_bonded = [ 133 | i.index 134 | for i in self.atoms[id:] 135 | if tuple(sorted((atom.index, i.index))) not in self.bonds.keys() 136 | ] 137 | bonded = [ 138 | i.index 139 | for i in self.atoms[id:] 140 | if tuple(sorted((atom.index, i.index))) in self.bonds.keys() 141 | ] 142 | indices = [i.index for i in self.atoms[id:]] 143 | shifted_vectors = self.min_img(atom, self.atoms[id:]) 144 | dist_mat = distance.cdist([atom.coordinates[:3]], shifted_vectors) 145 | for (atom1, atom2), dist in np.ndenumerate(dist_mat): 146 | id2 = indices[atom2] 147 | elem2 = self.atoms[id2].element 148 | if ( 149 | (id != id2) 150 | and (id2 in non_bonded) 151 | and (Radii[elem1] + Radii[elem2]) * self.options.overlap_tolerance 152 | > dist 153 | ): 154 | return True 155 | elif ( 156 | (id != id2) 157 | and (id2 in bonded) 158 | and 1.0 * self.options.overlap_tolerance > dist 159 | ): 160 | return True 161 | return False 162 | 163 | def min_img(self, atom, atoms): 164 | """Orient all atoms to within the minimum image 165 | of the provided atom.""" 166 | sc_atom = atom.scaled_pos(self.cell.inverse) 167 | shifted_coords = [] 168 | for at in atoms: 169 | scaled = at.scaled_pos(self.cell.inverse) 170 | shift = np.around(sc_atom - scaled) 171 | shifted_coords.append(np.dot((scaled + shift), self.cell.lattice)) 172 | return shifted_coords 173 | 174 | def write_cif(self): 175 | """Write structure information to a cif file.""" 176 | self._compute_bond_info() 177 | c = CIF(name=self.name) 178 | # c.insert_block_order("fragment", 4) 179 | labels = [] 180 | # data block 181 | c.add_data("data", data_=self.name) 182 | c.add_data("data", _audit_creation_date=CIF.label(c.get_time())) 183 | c.add_data( 184 | "data", 185 | _audit_creation_method=CIF.label("TopCryst v.%s" % (self.options.version)), 186 | ) 187 | if self.charge: 188 | c.add_data( 189 | "data", 190 | _chemical_properties_physical="net charge is %12.5f" % (self.charge), 191 | ) 192 | 193 | # sym block 194 | c.add_data("sym", _symmetry_space_group_name_H_M=CIF.label("P1")) 195 | c.add_data("sym", _symmetry_Int_Tables_number=CIF.label("1")) 196 | c.add_data("sym", _symmetry_cell_setting=CIF.label("triclinic")) 197 | 198 | # sym loop block 199 | c.add_data("sym_loop", _symmetry_equiv_pos_as_xyz=CIF.label("'x, y, z'")) 200 | 201 | # cell block 202 | c.add_data("cell", _cell_length_a=CIF.cell_length_a(self.cell.a)) 203 | c.add_data("cell", _cell_length_b=CIF.cell_length_b(self.cell.b)) 204 | c.add_data("cell", _cell_length_c=CIF.cell_length_c(self.cell.c)) 205 | c.add_data("cell", _cell_angle_alpha=CIF.cell_angle_alpha(self.cell.alpha)) 206 | c.add_data("cell", _cell_angle_beta=CIF.cell_angle_beta(self.cell.beta)) 207 | c.add_data("cell", _cell_angle_gamma=CIF.cell_angle_gamma(self.cell.gamma)) 208 | 209 | # for name, order in self.fragments: 210 | # c.add_data("fragment", _chemical_identifier=CIF.label(order), 211 | # _chemical_name=CIF.label(name)) 212 | # atom block 213 | element_counter = {} 214 | if self.options.find_symmetric_h: 215 | # find symmetry 216 | sym = Symmetry(self.options) 217 | sym.add_structure(self) 218 | sym.refine_cell() 219 | h_equiv = sym.get_equivalent_hydrogens() 220 | self.space_group_name = sym.get_space_group_name() 221 | self.space_group_number = sym.get_space_group_number() 222 | 223 | for id, atom in enumerate(self.atoms): 224 | label = c.get_element_label(atom.element) 225 | labels.append(label) 226 | c.add_data("atoms", _atom_site_label=CIF.atom_site_label(label)) 227 | c.add_data( 228 | "atoms", _atom_site_type_symbol=CIF.atom_site_type_symbol(atom.element) 229 | ) 230 | c.add_data( 231 | "atoms", 232 | _atom_site_description=CIF.atom_site_description(atom.force_field_type), 233 | ) 234 | # c.add_data("atoms", _atom_site_fragment=CIF.atom_site_fragment(atom.sbu_order)) 235 | if self.options.find_symmetric_h: 236 | if atom.element == "H": 237 | symconst = h_equiv[id] 238 | else: 239 | symconst = -1 240 | c.add_data( 241 | "atoms", _atom_site_constraints=CIF.atom_site_constraints(symconst) 242 | ) 243 | fc = atom.scaled_pos(self.cell.inverse) 244 | c.add_data("atoms", _atom_site_fract_x=CIF.atom_site_fract_x(fc[0])) 245 | c.add_data("atoms", _atom_site_fract_y=CIF.atom_site_fract_y(fc[1])) 246 | c.add_data("atoms", _atom_site_fract_z=CIF.atom_site_fract_z(fc[2])) 247 | 248 | # bond block 249 | for (at1, at2), (type, dist, sym) in self.bonds.items(): 250 | label1 = labels[at1] 251 | label2 = labels[at2] 252 | c.add_data( 253 | "bonds", 254 | _geom_bond_atom_site_label_1=CIF.geom_bond_atom_site_label_1(label1), 255 | ) 256 | c.add_data( 257 | "bonds", 258 | _geom_bond_atom_site_label_2=CIF.geom_bond_atom_site_label_2(label2), 259 | ) 260 | c.add_data("bonds", _geom_bond_distance=CIF.geom_bond_distance(dist)) 261 | c.add_data( 262 | "bonds", _geom_bond_site_symmetry_2=CIF.geom_bond_site_symmetry_2(sym) 263 | ) 264 | c.add_data("bonds", _ccdc_geom_bond_type=CIF.ccdc_geom_bond_type(type)) 265 | 266 | file = open("%s.cif" % self.name, "w") 267 | file.writelines(str(c)) 268 | file.close() 269 | 270 | 271 | class Cell(object): 272 | """contains periodic vectors for the structure.""" 273 | 274 | def __init__(self): 275 | self.basis = 0 276 | self.lattice = np.identity(3) 277 | self.nlattice = np.zeros((3, 3)) 278 | 279 | @property 280 | def inverse(self): 281 | try: 282 | return self._ilattice 283 | except AttributeError: 284 | self._ilattice = np.array(np.matrix(self.lattice).I) 285 | return self._ilattice 286 | 287 | def add(self, index, vector): 288 | """Adds a periodic vector to the lattice.""" 289 | self.lattice[index][:] = vector.copy() 290 | self.nlattice[index][:] = vector.copy() / np.linalg.norm(vector) 291 | 292 | def to_xyz(self): 293 | """Returns a list of the strings""" 294 | lines = [] 295 | for vector in self.lattice: 296 | lines.append("atom_vector %12.5f %12.5f %12.5f\n" % (tuple(vector))) 297 | 298 | return lines 299 | 300 | def __mkparam(self): 301 | """Update the parameters to match the cell.""" 302 | self._params = np.zeros(6) 303 | # cell lengths 304 | self._params[0:3] = [np.linalg.norm(i) for i in self.lattice][:] 305 | # angles in rad 306 | self._params[3:6] = [ 307 | calc_angle(i, j) 308 | for i, j in reversed(list(itertools.combinations(self.lattice, 2))) 309 | ] 310 | 311 | def mkcell(self, params): 312 | """Update the cell representation to match the parameters. Currently only 313 | builds a 3d cell.""" 314 | self._params = params 315 | a_mag, b_mag, c_mag = params[:3] 316 | alpha, beta, gamma = params[3:] 317 | a_vec = np.array([a_mag, 0.0, 0.0]) 318 | b_vec = np.array([b_mag * np.cos(gamma), b_mag * np.sin(gamma), 0.0]) 319 | c_x = c_mag * np.cos(beta) 320 | c_y = c_mag * (np.cos(alpha) - np.cos(gamma) * np.cos(beta)) / np.sin(gamma) 321 | c_vec = np.array([c_x, c_y, (c_mag ** 2 - c_x ** 2 - c_y ** 2) ** 0.5]) 322 | self.lattice = np.array([a_vec, b_vec, c_vec]) 323 | 324 | @property 325 | def a(self): 326 | """Magnitude of cell a vector.""" 327 | return self._params[0] 328 | 329 | @property 330 | def b(self): 331 | """Magnitude of cell b vector.""" 332 | return self._params[1] 333 | 334 | @property 335 | def c(self): 336 | """Magnitude of cell c vector.""" 337 | return self._params[2] 338 | 339 | @property 340 | def alpha(self): 341 | """Cell angle alpha.""" 342 | return self._params[3] * RAD2DEG 343 | 344 | @property 345 | def beta(self): 346 | """Cell angle beta.""" 347 | return self._params[4] * RAD2DEG 348 | 349 | @property 350 | def gamma(self): 351 | """Cell angle gamma.""" 352 | return self._params[5] * RAD2DEG 353 | 354 | 355 | class Symmetry(object): 356 | def __init__(self, options): 357 | self.options = options 358 | self._symprec = options.symmetry_precision 359 | self._lattice = None 360 | self._inv_latt = None 361 | self._scaled_coords = None 362 | self._element_symbols = None 363 | self.dataset = {} 364 | 365 | def add_structure(self, structure): 366 | self._lattice = structure.cell.lattice.copy() 367 | self._inv_latt = structure.cell.inverse.copy() 368 | self._scaled_coords = np.array( 369 | [atom.in_cell_scaled(self._inv_latt) for atom in structure.atoms] 370 | ) 371 | self._angle_tol = -1.0 372 | self._element_symbols = [atom.element for atom in structure.atoms] 373 | self._numbers = np.array( 374 | [ATOMIC_NUMBER.index(i) for i in self._element_symbols] 375 | ) 376 | 377 | def refine_cell(self): 378 | """ 379 | get refined data from symmetry finding 380 | """ 381 | # Temporary storage of structure info 382 | _lattice = self._lattice.T.copy() 383 | _scaled_coords = self._scaled_coords.copy() 384 | _symprec = self._symprec 385 | _angle_tol = self._angle_tol 386 | _numbers = self._numbers.copy() 387 | 388 | keys = ( 389 | "number", 390 | "international", 391 | "hall", 392 | "transformation_matrix", 393 | "origin_shift", 394 | "rotations", 395 | "translations", 396 | "wyckoffs", 397 | "equivalent_atoms", 398 | ) 399 | dataset = {} 400 | 401 | dataset["number"] = 0 402 | while dataset["number"] == 0: 403 | 404 | # refine cell 405 | num_atom = len(_scaled_coords) 406 | ref_lattice = _lattice.copy() 407 | ref_pos = np.zeros((num_atom * 4, 3), dtype=float) 408 | ref_pos[:num_atom] = _scaled_coords.copy() 409 | ref_numbers = np.zeros(num_atom * 4, dtype=int) 410 | ref_numbers[:num_atom] = _numbers.copy() 411 | num_atom_bravais = spg.refine_cell( 412 | ref_lattice, ref_pos, ref_numbers, num_atom, _symprec, _angle_tol 413 | ) 414 | for key, data in zip( 415 | keys, 416 | spg.get_symmetry_dataset( 417 | ( 418 | ref_lattice.copy(), 419 | ref_pos[:num_atom_bravais].copy(), 420 | ref_numbers[:num_atom_bravais].copy(), 421 | ), 422 | _symprec, 423 | _angle_tol, 424 | ), 425 | ): 426 | dataset[key] = data 427 | 428 | _symprec = _symprec * 0.5 429 | 430 | # an error occured with met9, org1, org9 whereby no 431 | # symmetry info was being printed for some reason. 432 | # thus a check is done after refining the structure. 433 | 434 | if dataset["number"] == 0: 435 | warnings.warn("WARNING - Bad Symmetry found!", ValueError) 436 | else: 437 | 438 | self.dataset["number"] = dataset["number"] 439 | self.dataset["international"] = dataset["international"].strip() 440 | self.dataset["hall"] = dataset["hall"].strip() 441 | self.dataset["transformation_matrix"] = np.array( 442 | dataset["transformation_matrix"] 443 | ) 444 | self.dataset["origin_shift"] = np.array(dataset["origin_shift"]) 445 | self.dataset["rotations"] = np.array(dataset["rotations"]) 446 | self.dataset["translations"] = np.array(dataset["translations"]) 447 | letters = "0abcdefghijklmnopqrstuvwxyz" 448 | try: 449 | self.dataset["wyckoffs"] = [letters[x] for x in dataset["wyckoffs"]] 450 | except IndexError: 451 | print(dataset["wyckoffs"]) 452 | self.dataset["equivalent_atoms"] = np.array(dataset["equivalent_atoms"]) 453 | self._lattice = ref_lattice.T.copy() 454 | self._scaled_coords = ref_pos[:num_atom_bravais].copy() 455 | self._numbers = ref_numbers[:num_atom_bravais].copy() 456 | self._element_symbols = [ 457 | ATOMIC_NUMBER[i] for i in ref_numbers[:num_atom_bravais] 458 | ] 459 | 460 | def get_space_group_name(self): 461 | return self.dataset["international"] 462 | 463 | def get_space_group_operations(self): 464 | return [ 465 | self.convert_to_string((r, t)) 466 | for r, t in zip(self.dataset["rotations"], self.dataset["translations"]) 467 | ] 468 | 469 | def get_space_group_number(self): 470 | return self.dataset["number"] 471 | 472 | def get_equiv_atoms(self): 473 | """Returs a list where each entry represents the index to the 474 | asymmetric atom. If P1 is assumed, then it just returns a list 475 | of the range of the atoms.""" 476 | return self.dataset["equivalent_atoms"] 477 | 478 | def get_equivalent_hydrogens(self): 479 | at_equiv = self.get_equiv_atoms() 480 | h_equiv = {} 481 | h_id = list( 482 | set( 483 | [i for id, i in enumerate(at_equiv) if self._element_symbols[id] == "H"] 484 | ) 485 | ) 486 | for id, i in enumerate(self._element_symbols): 487 | if i == "H": 488 | h_equiv[id] = h_id.index(at_equiv[id]) 489 | return h_equiv 490 | # for a in self.get_equiv_atoms(): 491 | # print a 492 | -------------------------------------------------------------------------------- /tobascco/visualizer.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | import itertools 4 | from logging import info 5 | 6 | import matplotlib.pyplot as plt 7 | import numpy as np 8 | 9 | 10 | class GraphPlot(object): 11 | def __init__(self, net, two_dimensional=False): 12 | self.fig, self.ax = plt.subplots() 13 | self.net = net 14 | self.fontsize = 20 15 | self.two_dimensional = two_dimensional 16 | if two_dimensional: 17 | self.cell = np.identity(2) 18 | self.params = net.get_2d_params() 19 | self.__mkcell() 20 | self.plot_2d_cell() 21 | else: 22 | self.ax = self.fig.add_subplot(111, projection="3d") 23 | self.params = net.get_3d_params() 24 | self.cell = np.identity(3) 25 | self.__mkcell() 26 | self.plot_3d_cell() 27 | 28 | def plot_2d_cell(self, origin=np.zeros(2), colour="b"): 29 | xyz_a = (self.cell[0] + origin) / 2.0 30 | xyz_b = (self.cell[1] + origin) / 2.0 31 | self.fig.text(xyz_a[0], xyz_a[1], "a", fontsize=self.fontsize) 32 | self.fig.text(xyz_b[0], xyz_b[1], "b", fontsize=self.fontsize) 33 | all_points = [ 34 | np.sum(a, axis=0) + origin for a in list(self.powerset(self.cell)) if a 35 | ] 36 | all_points.append(origin) 37 | for s, e in itertools.combinations(np.array(all_points), 2): 38 | if any([self.zero_cross(s - e, i) for i in self.cell]): 39 | self.ax.plot(*zip(s, e), color=colour) 40 | 41 | def plot_3d_cell(self, origin=np.zeros(3), colour="b"): 42 | 43 | # add axes labels 44 | xyz_a = (self.cell[0] + origin) / 2.0 45 | xyz_b = (self.cell[1] + origin) / 2.0 46 | xyz_c = (self.cell[2] + origin) / 2.0 47 | self.ax.text( 48 | xyz_a[0], 49 | xyz_a[1], 50 | xyz_a[2] + 0.02, 51 | "a", 52 | fontsize=self.fontsize, 53 | color=colour, 54 | ) 55 | self.ax.text( 56 | xyz_b[0], xyz_b[1], xyz_b[2], "b", fontsize=self.fontsize, color=colour 57 | ) 58 | self.ax.text( 59 | xyz_c[0] + 0.02, 60 | xyz_c[1], 61 | xyz_c[2], 62 | "c", 63 | fontsize=self.fontsize, 64 | color=colour, 65 | ) 66 | 67 | all_points = [ 68 | np.sum(a, axis=0) + origin for a in list(self.powerset(self.cell)) if a 69 | ] 70 | all_points.append(origin) 71 | for s, e in itertools.combinations(np.array(all_points), 2): 72 | if any([self.zero_cross(s - e, i) for i in self.cell]): 73 | # line([tuple(s), tuple(e)], rgbcolor=(0,0,255)) 74 | self.ax.plot3D(*zip(s, e), color=colour) 75 | 76 | def add_point(self, p=np.zeros(3), label=None, colour="r"): 77 | if self.two_dimensional: 78 | tp = p + np.array([0.005, 0.005]) 79 | else: 80 | tp = p + np.array([0.05, -0.05, 0.05]) 81 | pp = np.dot(p.copy(), self.cell) 82 | tp = np.dot(tp, self.cell) 83 | try: 84 | self.ax.scatter(*pp, c=colour, lw=0) 85 | except TypeError: 86 | pp = pp.tolist() 87 | self.ax.scatter(pp, c=colour, lw=0) 88 | if label: 89 | if label == "1": 90 | label = "A" 91 | elif label == "4": 92 | label = "C" 93 | elif label == "9": 94 | label = "B" 95 | elif label == "14": 96 | label = "D" 97 | elif label == "19": 98 | label = "E" 99 | self.ax.text(*tp, s=label, fontsize=self.fontsize, color=colour) 100 | 101 | def add_edge(self, vector, origin=np.zeros(3), label=None, colour="g"): 102 | """Accounts for periodic boundaries by splitting an edge where 103 | it intersects with the plane of the boundary conditions. 104 | 105 | """ 106 | p = origin + vector 107 | p1 = np.dot(origin, self.cell) 108 | p2 = np.dot(p, self.cell) 109 | self.ax.plot3D(*zip(p2, p1), color=colour) 110 | if label: 111 | pp = (p2 + p1) * 0.5 112 | # pp = pp - np.floor(p) 113 | # line([tuple(p1), tuple(p2)], rgbcolor=(255,255,0), legend_label=label) 114 | self.ax.text(*pp, s=label, fontsize=self.fontsize) 115 | 116 | def __mkcell(self): 117 | """Update the cell representation to match the parameters.""" 118 | if self.two_dimensional: 119 | a_mag, b_mag = self.params[:2] 120 | gamma = self.params[2] 121 | a_vec = np.array([a_mag, 0.0]) 122 | b_vec = np.array([b_mag * np.cos(gamma), b_mag * np.sin(gamma)]) 123 | self.cell = np.array([a_vec, b_vec]) 124 | else: 125 | a_mag, b_mag, c_mag = self.params[:3] 126 | alpha, beta, gamma = self.params[3:] 127 | a_vec = np.array([a_mag, 0.0, 0.0]) 128 | b_vec = np.array([b_mag * np.cos(gamma), b_mag * np.sin(gamma), 0.0]) 129 | c_x = c_mag * np.cos(beta) 130 | c_y = c_mag * (np.cos(alpha) - np.cos(gamma) * np.cos(beta)) / np.sin(gamma) 131 | c_vec = np.array([c_x, c_y, (c_mag ** 2 - c_x ** 2 - c_y ** 2) ** 0.5]) 132 | self.cell = np.array([a_vec, b_vec, c_vec]) 133 | 134 | def powerset(self, iterable): 135 | s = list(iterable) 136 | return itertools.chain.from_iterable( 137 | itertools.combinations(s, r) for r in range(len(s) + 1) 138 | ) 139 | 140 | def zero_cross(self, vector1, vector2): 141 | vector1 = vector1 / np.linalg.norm(vector1) 142 | vector2 = vector2 / np.linalg.norm(vector2) 143 | return np.allclose(np.zeros(3), np.cross(vector1, vector2), atol=0.01) 144 | 145 | def point_of_intersection(self, p_edge, edge, p_plane, plane_vec1, plane_vec2): 146 | """ 147 | Returns a point of intersection between an edge and a plane 148 | p_edge is a point on the edge vector 149 | edge is the vector direction 150 | p_plane is a point on the plane 151 | plane_vec1 represents one of the vector directions of the plane 152 | plane_vec2 represents the second vector of the plane 153 | 154 | """ 155 | n = np.cross(plane_vec1, plane_vec2) 156 | n = n / np.linalg.norm(n) 157 | l = edge / np.linalg.norm(edge) 158 | 159 | ldotn = np.dot(l, n) 160 | pdotn = np.dot(p_plane - p_edge, n) 161 | if ldotn == 0.0: 162 | return np.zeros(3) 163 | if pdotn == 0.0: 164 | return p_edge 165 | return pdotn / ldotn * l + p_edge 166 | 167 | def view_graph(self): 168 | self.net.graph.show(edge_labels=True) 169 | info("Wait for Xwindow, then press [Enter]") 170 | raw_input("") 171 | 172 | def view_placement(self, init=(0.0, 0.0, 0.0), edge_labels=True, sbu_only=[]): 173 | init = np.array(init) 174 | # set the first node down at the init position 175 | V = self.net.graph.nodes()[0] 176 | V = "1" 177 | edges = self.net.out_edges(V) + self.net.in_edges(V) 178 | unit_cell_vertices = self.net.vertex_positions(edges, [], pos={V: init}) 179 | for key, value in unit_cell_vertices.items(): 180 | if (sbu_only and key in sbu_only) or (not sbu_only): 181 | if key == "1": 182 | key = "A" 183 | elif key == "19": 184 | key = "E" 185 | elif key == "4": 186 | key = "C" 187 | elif key == "14": 188 | key = "D" 189 | elif key == "9": 190 | key = "B" 191 | self.add_point(p=np.array(value), label=key, colour="k") 192 | elif sbu_only and key not in sbu_only: 193 | self.add_point( 194 | p=np.array(value), label=None, colour="#FF6600" 195 | ) # Blaze Orange 196 | for edge in self.net.out_edges(key): 197 | ind = self.net.get_index(edge) 198 | arc = np.array(self.net.lattice_arcs)[ind] 199 | el = None 200 | if edge_labels: 201 | el = edge[2] 202 | self.add_edge(arc, origin=np.array(value), label=el) 203 | for edge in self.net.in_edges(key): 204 | ind = self.net.get_index(edge) 205 | arc = -np.array(self.net.lattice_arcs)[ind] 206 | el = None 207 | if edge_labels: 208 | el = edge[2] 209 | self.add_edge(arc, origin=np.array(value), label=el) 210 | mx = max(self.params[:3]) 211 | mn = min(self.params[:3]) 212 | self.ax.set_xlim3d(-1, mx) 213 | self.ax.set_ylim3d(0, mx) 214 | self.ax.set_zlim3d(0, mx) 215 | self.ax.view_init(elev=17, azim=-96) 216 | plt.axis("off") 217 | plt.savefig("name.png", dpi=900) 218 | # for ii in range(0, 360, 10): 219 | # plt.savefig('name.%i.png'%ii, dpi=600) 220 | 221 | # self.ax.view_init(elev=10.0, azim=ii) 222 | --------------------------------------------------------------------------------