├── .gitattributes
├── .github
├── ISSUE_TEMPLATE
│ ├── bug_report.md
│ └── feature_request.md
└── workflows
│ └── python-publish.yml
├── .gitignore
├── .travis.yml
├── CHANGELOG.md
├── LICENSE.txt
├── README.rst
├── docs_gen
├── Makefile
├── docs_gen.sh
├── requirements.txt
└── source
│ ├── .ipynb_checkpoints
│ └── quickstart-checkpoint.ipynb
│ ├── cobamp.algorithms.rst
│ ├── cobamp.analysis.rst
│ ├── cobamp.core.rst
│ ├── cobamp.nullspace.rst
│ ├── cobamp.rst
│ ├── cobamp.utilities.rst
│ ├── cobamp.wrappers.rst
│ ├── conf.py
│ ├── index.rst
│ ├── installation.rst
│ ├── modules.rst
│ ├── quickstart.ipynb
│ └── requirements.txt
├── examples
├── GBconsensus_resources
│ └── gb_plots.py
├── ecoli_core_model.py
├── gbconsensus.py
├── gbconsensus_model_compression.py
├── iAF1260_resources
│ ├── iAF1260_comp_bound_map.txt
│ ├── iAF1260_comp_exclusions.txt
│ ├── iAF1260_comp_metnames.txt
│ ├── iAF1260_comp_orx_map.txt
│ ├── iAF1260_comp_rxnames.txt
│ ├── iAF1260_comp_stoich.csv
│ └── iAF1260_comp_synthlethals.txt
└── iaf1260.py
├── notice.txt
├── requirements.txt
├── setup.py
├── src
└── cobamp
│ ├── __init__.py
│ ├── algorithms
│ ├── __init__.py
│ └── kshortest.py
│ ├── analysis
│ ├── __init__.py
│ ├── frequency.py
│ ├── graph.py
│ └── plotting.py
│ ├── core
│ ├── __init__.py
│ ├── cb_analysis.py
│ ├── linear_systems.py
│ ├── models.py
│ ├── optimization.py
│ └── transformer.py
│ ├── gpr
│ ├── __init__.py
│ ├── core.py
│ └── integration.py
│ ├── nullspace
│ ├── __init__.py
│ ├── nullspace.py
│ └── subset_reduction.py
│ ├── utilities
│ ├── __init__.py
│ ├── context.py
│ ├── file_io.py
│ ├── hash.py
│ ├── linear_system_diagnostics.py
│ ├── parallel.py
│ ├── postfix_expressions.py
│ ├── printing.py
│ ├── property_management.py
│ ├── set.py
│ ├── test.py
│ └── tree.py
│ └── wrappers
│ ├── __init__.py
│ ├── cobamp.py
│ ├── cobra.py
│ ├── core.py
│ ├── external_wrappers.py
│ ├── framed.py
│ └── method_wrappers.py
└── tests
├── ecoli_compression_test.py
├── linear_system_optimizer_test.py
├── parallel_simulation_test.py
├── property_dictionary_test.py
├── resources
├── toy_efm_mcs.png
└── toy_network.png
├── test_gmcs_toy.py
├── toy_model_test.py
└── toy_model_test_with_api.py
/.gitattributes:
--------------------------------------------------------------------------------
1 | docs_gen/* linguist-generated=true
2 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/bug_report.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Bug report
3 | about: Create a report to help us improve
4 | title: ''
5 | labels: ''
6 | assignees: ''
7 |
8 | ---
9 |
10 | **Describe the bug**
11 | A clear and concise description of what the bug is.
12 |
13 | **To Reproduce**
14 | Steps to reproduce the behavior:
15 | 1. Go to '...'
16 | 2. Click on '....'
17 | 3. Scroll down to '....'
18 | 4. See error
19 |
20 | **Expected behavior**
21 | A clear and concise description of what you expected to happen.
22 |
23 | **Screenshots**
24 | If applicable, add screenshots to help explain your problem.
25 |
26 | **Desktop (please complete the following information):**
27 | - OS: [e.g. iOS]
28 | - Browser [e.g. chrome, safari]
29 | - Version [e.g. 22]
30 |
31 | **Smartphone (please complete the following information):**
32 | - Device: [e.g. iPhone6]
33 | - OS: [e.g. iOS8.1]
34 | - Browser [e.g. stock browser, safari]
35 | - Version [e.g. 22]
36 |
37 | **Additional context**
38 | Add any other context about the problem here.
39 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/feature_request.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Feature request
3 | about: Suggest an idea for this project
4 | title: ''
5 | labels: ''
6 | assignees: ''
7 |
8 | ---
9 |
10 | **Is your feature request related to a problem? Please describe.**
11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
12 |
13 | **Describe the solution you'd like**
14 | A clear and concise description of what you want to happen.
15 |
16 | **Describe alternatives you've considered**
17 | A clear and concise description of any alternative solutions or features you've considered.
18 |
19 | **Additional context**
20 | Add any other context or screenshots about the feature request here.
21 |
--------------------------------------------------------------------------------
/.github/workflows/python-publish.yml:
--------------------------------------------------------------------------------
1 | # This workflows will upload a Python Package using Twine when a release is created
2 | # For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries
3 |
4 | name: Upload Python Package
5 |
6 | on:
7 | release:
8 | types: [created]
9 |
10 | jobs:
11 | deploy:
12 |
13 | runs-on: ubuntu-latest
14 |
15 | steps:
16 | - uses: actions/checkout@v2
17 | - name: Set up Python
18 | uses: actions/setup-python@v2
19 | with:
20 | python-version: '3.x'
21 | - name: Install dependencies
22 | run: |
23 | python -m pip install --upgrade pip
24 | pip install setuptools wheel twine
25 | - name: Build and publish
26 | env:
27 | TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }}
28 | TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
29 | run: |
30 | python setup.py sdist bdist_wheel
31 | twine upload dist/*
32 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .idea
2 | .ipynb_checkpoints
3 | .eggs
4 | gb
5 | old
6 | /gurobi.log
7 | /inspyred.log
8 | build
9 | dist
10 | pypi
11 |
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | language: python
2 | python:
3 | - "3.5"
4 | - "3.6" # current default Python on Travis CI
5 | - "3.7"
6 | - "3.8"
7 | # command to install dependencies
8 | install:
9 | - pip install "."
10 | # command to run tests
11 | script:
12 | - python tests/toy_model_test.py
13 |
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | # Changelog
2 | Major changes between versions will be documented on this file.
3 |
4 | ## [0.2.1] - 2020-10-27
5 |
6 | ### Added
7 | - Constraint-based simulator class that facilitates batch simulations on the same model with
8 | the possibility of changing environmental conditions and objective functions
9 |
10 | ### Changed
11 | - Wrappers module now automatically loads readers that can be used - this is done by attempting
12 | to import each associated package.
13 |
14 | ### Removed
15 | - GIMME and CORSO model objects, now appropriately a part of the troppo framework
16 |
17 |
18 | ## [0.2.0] - 2020-09-08
19 | ### Added
20 | - ConstraintBasedModel simplification based on FVA
21 | - GPR evaluation class that detaches GPR evaluation from models and readers
22 | - Genetic minimal cut sets
23 | - Parameters to ignore GPRs that cannot be parsed due to computational demand
24 | - Method to identify boundary reactions on ConstraintBasedModels
25 | - EFM-based methods can now include a "non_produced" parameter for metabolites that are optionally consumed but
26 | never produced
27 | - With expressions can now be used on ConstraintBasedModels to allow context on reaction and metabolite changes
28 |
29 | ### Changed
30 | - Bugfixes on GPR evaluation and reading
31 | - Singular add/remove methods for reactions/metabolites to be deprecated in the future
32 |
33 | ## [0.1.5] - 2019-12-06 (1 year anniversary!)
34 | ### Added
35 | - Benders decomposition solver (this is a very experimental feature)
36 | - Multiprocessing utilities (mp_utils)
37 | ### Changed
38 | - Several bugfixes and additional features for LinearSystem classes
39 |
40 | ## [0.1.4] - 2019-10-14
41 | ### Changed
42 | - Version revision
43 |
44 | ## [0.1.4b1] - 2019-10-10
45 | ### Added
46 | - Memory and thread setting parameters on LinearSystem / algorithm classes
47 |
48 | ## [0.1.3] - 2019-10-09
49 | ### Changed
50 | - Bugfixes concerning GPR reading (Gene names had an additional underscore character)
51 | - Normalized LinearSystem class argument signature
52 | - Linear systems with lower and upper bounds now properly check for irreversible reactions in the opposite direction
53 |
54 | ## [0.1.2] - 2019-09-23
55 | ### Added
56 | - Gene protein reaction rule evaluation module (cobamp.gpr.evaluators). Still a work in
57 | progress but the aim is to eventually move all GPR code into this module
58 | upon releasing a major revision
59 | - New dependency (boolean.py)
60 | ### Changed
61 | - Fixed errors in the readthedocs quickstart example
62 | - Corrected some assorted bugs on the ConstraintBasedModel and KShortest classes
63 | ### Notice:
64 | - This should be the last release without CI/CD
65 |
66 | ## [0.1.1] - 2019-06-07
67 | ### Changed
68 | - Several bug fixes
69 | - KShortest algorithm now adds constraints iteratively to avoid memory errors
70 | - Several improvements to the ConstraintBasedModel class
71 |
72 | ## [0.1.0] - 2019-04-29
73 | ### Added
74 | - Big-M indicators for solvers without a dedicated indicator constraint abstraction
75 |
76 | ## [0.1.0rc1] - 2019-04-18
77 | ### Added
78 | - SCIPY MAT format model reader
79 | - Gene-protein-rule support with gene expression data integration functions
80 | - Analysis functions (mainly frequency and graphs) with some plotting capability
81 | - Transformer classes for algorithms that alter a metabolic network,
82 | guaranteeing mapping between the reactions of both
83 | - Higher-level classes (linear_systems module) for semi-efficient definition of LP problems based on Optlang
84 | - Classes for evaluating and converting into postfix type arithmetic and boolean expressions
85 |
86 | ### Changed
87 | - Major code refactor. Module structure drastically changed and reorganized.
88 | - Many bugfixes
89 |
90 | ## [0.0.2] - 2018-12-20
91 | ### Added
92 | - Basic metabolic model classes for when an SBML file is not available or necessary
93 | - Elementary flux pattern enumeration using the K-Shortest algorithm (as a wrapper)
94 |
95 | ### Changed
96 | - Entire package structure to accomodate other types of algorithms beyond the K-Shortest one
97 |
98 | ### Removed
99 | - Some modules that were too specific for the changes above
100 |
101 | ## [0.0.1] - 2018-12-04
102 | ### Added
103 |
104 | - Base code for K-Shortest enumeration of EFMs and MCSs
105 | - Core framework architecture (linear systems, algorithms and wrappers)
106 | - COBRApy and framed model object readers
--------------------------------------------------------------------------------
/README.rst:
--------------------------------------------------------------------------------
1 | |License| |PyPI version| |RTD version|
2 |
3 | CoBAMP
4 | ============
5 |
6 | *CoBAMP* (Constraint-Based Analysis of Metabolic Pathways) is a Python package containing pathway analysis methods
7 | for use with constraint-based metabolic models. The main purpose is to provide a framework that is both modular and
8 | flexible enough to be integrated in other packages (such as cobrapy, framed or cameo) that already implement generic
9 | data structures for metabolic models.
10 |
11 | CoBAMP depends on *optlang* (https://github.com/biosustain/optlang) for solving (mixed-integer) linear programming
12 | problems, and thus, requires a compatible solver and Python dependency installed from the following list:
13 |
14 | - `cplex `__ (preferred)
15 | - `gurobi `__ (no explicit indicator variables)
16 | - `glpk `__ (no explicit indicator variables or solution pools)
17 |
18 | Current methods include:
19 | - Elementary flux modes: K-Shortest algorithm
20 | - Minimal cut sets: MCSEnumerator approach
21 | - Elementary flux patterns: K-Shortest algorithm
22 |
23 |
24 | Documentation
25 | ~~~~~~~~~~~~~
26 | Documentation available at https://cobamp.readthedocs.io/
27 |
28 |
29 | Instalation from PyPI (stable releases)
30 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
31 |
32 | pip install cobamp
33 |
34 |
35 | Credits and License
36 | ~~~~~~~~~~~~~~~~~~~
37 |
38 | Developed at the Centre of Biological Engineering, University of Minho
39 |
40 | Please refer to this work through this `publication `__ by Vieira and Rocha (2019):
41 |
42 | - CoBAMP: a Python framework for metabolic pathway analysis in constraint-based models, *Bioinformatics*, btz598
43 |
44 | Released under the GNU Public License (version 3.0).
45 |
46 |
47 | .. |License| image:: https://img.shields.io/badge/license-GPL%20v3.0-blue.svg
48 | :target: https://opensource.org/licenses/GPL-3.0
49 | .. |PyPI version| image:: https://badge.fury.io/py/cobamp.svg
50 | :target: https://badge.fury.io/py/cobamp
51 | .. |RTD version| image:: https://readthedocs.org/projects/cobamp/badge/?version=latest&style=plastic
52 | :target: https://cobamp.readthedocs.io/
53 |
--------------------------------------------------------------------------------
/docs_gen/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line.
5 | SPHINXOPTS =
6 | SPHINXBUILD = sphinx-build
7 | SOURCEDIR = source
8 | BUILDDIR = ../docs/
9 |
10 | # Put it first so that "make" without argument is like "make help".
11 | help:
12 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
13 |
14 | .PHONY: help Makefile
15 |
16 | # Catch-all target: route all unknown targets to Sphinx using the new
17 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
18 | %: Makefile
19 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
20 |
--------------------------------------------------------------------------------
/docs_gen/docs_gen.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | sphinx-apidoc -o ./source/ ././../src/
4 | make html
5 |
--------------------------------------------------------------------------------
/docs_gen/requirements.txt:
--------------------------------------------------------------------------------
1 | nbsphinx
--------------------------------------------------------------------------------
/docs_gen/source/cobamp.algorithms.rst:
--------------------------------------------------------------------------------
1 | cobamp.algorithms package
2 | =========================
3 |
4 | Submodules
5 | ----------
6 |
7 | cobamp.algorithms.kshortest module
8 | ----------------------------------
9 |
10 | .. automodule:: cobamp.algorithms.kshortest
11 | :members:
12 | :undoc-members:
13 | :show-inheritance:
14 |
15 |
16 | Module contents
17 | ---------------
18 |
19 | .. automodule:: cobamp.algorithms
20 | :members:
21 | :undoc-members:
22 | :show-inheritance:
23 |
--------------------------------------------------------------------------------
/docs_gen/source/cobamp.analysis.rst:
--------------------------------------------------------------------------------
1 | cobamp.analysis package
2 | =======================
3 |
4 | Submodules
5 | ----------
6 |
7 | cobamp.analysis.frequency module
8 | --------------------------------
9 |
10 | .. automodule:: cobamp.analysis.frequency
11 | :members:
12 | :undoc-members:
13 | :show-inheritance:
14 |
15 | cobamp.analysis.graph module
16 | ----------------------------
17 |
18 | .. automodule:: cobamp.analysis.graph
19 | :members:
20 | :undoc-members:
21 | :show-inheritance:
22 |
23 | cobamp.analysis.plotting module
24 | -------------------------------
25 |
26 | .. automodule:: cobamp.analysis.plotting
27 | :members:
28 | :undoc-members:
29 | :show-inheritance:
30 |
31 |
32 | Module contents
33 | ---------------
34 |
35 | .. automodule:: cobamp.analysis
36 | :members:
37 | :undoc-members:
38 | :show-inheritance:
39 |
--------------------------------------------------------------------------------
/docs_gen/source/cobamp.core.rst:
--------------------------------------------------------------------------------
1 | cobamp.core package
2 | ===================
3 |
4 | Submodules
5 | ----------
6 |
7 | cobamp.core.linear\_systems module
8 | ----------------------------------
9 |
10 | .. automodule:: cobamp.core.linear_systems
11 | :members:
12 | :undoc-members:
13 | :show-inheritance:
14 |
15 | cobamp.core.models module
16 | -------------------------
17 |
18 | .. automodule:: cobamp.core.models
19 | :members:
20 | :undoc-members:
21 | :show-inheritance:
22 |
23 | cobamp.core.optimization module
24 | -------------------------------
25 |
26 | .. automodule:: cobamp.core.optimization
27 | :members:
28 | :undoc-members:
29 | :show-inheritance:
30 |
31 | cobamp.core.transformer module
32 | ------------------------------
33 |
34 | .. automodule:: cobamp.core.transformer
35 | :members:
36 | :undoc-members:
37 | :show-inheritance:
38 |
39 |
40 | Module contents
41 | ---------------
42 |
43 | .. automodule:: cobamp.core
44 | :members:
45 | :undoc-members:
46 | :show-inheritance:
47 |
--------------------------------------------------------------------------------
/docs_gen/source/cobamp.nullspace.rst:
--------------------------------------------------------------------------------
1 | cobamp.nullspace package
2 | ========================
3 |
4 | Submodules
5 | ----------
6 |
7 | cobamp.nullspace.nullspace module
8 | ---------------------------------
9 |
10 | .. automodule:: cobamp.nullspace.nullspace
11 | :members:
12 | :undoc-members:
13 | :show-inheritance:
14 |
15 | cobamp.nullspace.subset\_reduction module
16 | -----------------------------------------
17 |
18 | .. automodule:: cobamp.nullspace.subset_reduction
19 | :members:
20 | :undoc-members:
21 | :show-inheritance:
22 |
23 |
24 | Module contents
25 | ---------------
26 |
27 | .. automodule:: cobamp.nullspace
28 | :members:
29 | :undoc-members:
30 | :show-inheritance:
31 |
--------------------------------------------------------------------------------
/docs_gen/source/cobamp.rst:
--------------------------------------------------------------------------------
1 | cobamp package
2 | ==============
3 |
4 | Subpackages
5 | -----------
6 |
7 | .. toctree::
8 |
9 | cobamp.algorithms
10 | cobamp.analysis
11 | cobamp.core
12 | cobamp.nullspace
13 | cobamp.utilities
14 | cobamp.wrappers
15 |
16 | Module contents
17 | ---------------
18 |
19 | .. automodule:: cobamp
20 | :members:
21 | :undoc-members:
22 | :show-inheritance:
23 |
--------------------------------------------------------------------------------
/docs_gen/source/cobamp.utilities.rst:
--------------------------------------------------------------------------------
1 | cobamp.utilities package
2 | ========================
3 |
4 | Submodules
5 | ----------
6 |
7 | cobamp.utilities.file\_utils module
8 | -----------------------------------
9 |
10 | .. automodule:: cobamp.utilities.file_utils
11 | :members:
12 | :undoc-members:
13 | :show-inheritance:
14 |
15 | cobamp.utilities.postfix\_expressions module
16 | --------------------------------------------
17 |
18 | .. automodule:: cobamp.utilities.postfix_expressions
19 | :members:
20 | :undoc-members:
21 | :show-inheritance:
22 |
23 | cobamp.utilities.property\_management module
24 | --------------------------------------------
25 |
26 | .. automodule:: cobamp.utilities.property_management
27 | :members:
28 | :undoc-members:
29 | :show-inheritance:
30 |
31 | cobamp.utilities.set\_utils module
32 | ----------------------------------
33 |
34 | .. automodule:: cobamp.utilities.set_utils
35 | :members:
36 | :undoc-members:
37 | :show-inheritance:
38 |
39 | cobamp.utilities.test\_utils module
40 | -----------------------------------
41 |
42 | .. automodule:: cobamp.utilities.test_utils
43 | :members:
44 | :undoc-members:
45 | :show-inheritance:
46 |
47 |
48 | Module contents
49 | ---------------
50 |
51 | .. automodule:: cobamp.utilities
52 | :members:
53 | :undoc-members:
54 | :show-inheritance:
55 |
--------------------------------------------------------------------------------
/docs_gen/source/cobamp.wrappers.rst:
--------------------------------------------------------------------------------
1 | cobamp.wrappers package
2 | =======================
3 |
4 | Submodules
5 | ----------
6 |
7 | cobamp.wrappers.external\_wrappers module
8 | -----------------------------------------
9 |
10 | .. automodule:: cobamp.wrappers.external_wrappers
11 | :members:
12 | :undoc-members:
13 | :show-inheritance:
14 |
15 | cobamp.wrappers.method\_wrappers module
16 | ---------------------------------------
17 |
18 | .. automodule:: cobamp.wrappers.method_wrappers
19 | :members:
20 | :undoc-members:
21 | :show-inheritance:
22 |
23 |
24 | Module contents
25 | ---------------
26 |
27 | .. automodule:: cobamp.wrappers
28 | :members:
29 | :undoc-members:
30 | :show-inheritance:
31 |
--------------------------------------------------------------------------------
/docs_gen/source/conf.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Configuration file for the Sphinx documentation builder.
4 | #
5 | # This file does only contain a selection of the most common options. For a
6 | # full list see the documentation:
7 | # http://www.sphinx-doc.org/en/master/config
8 |
9 | # -- Path setup --------------------------------------------------------------
10 |
11 | # If extensions (or modules to document with autodoc) are in another directory,
12 | # add these directories to sys.path here. If the directory is relative to the
13 | # documentation root, use os.path.abspath to make it absolute, like shown here.
14 | #
15 | import os
16 | import sys
17 | sys.path.insert(0, os.path.abspath('../../src/'))
18 |
19 |
20 | # -- Project information -----------------------------------------------------
21 |
22 | project = 'cobamp'
23 | copyright = '2018, Vítor Vieira'
24 | author = 'Vítor Vieira'
25 |
26 | # The short X.Y version
27 | version = ''
28 | # The full version, including alpha/beta/rc tags
29 | release = '0.0.1'
30 |
31 |
32 | # -- General configuration ---------------------------------------------------
33 |
34 | # If your documentation needs a minimal Sphinx version, state it here.
35 | #
36 | # needs_sphinx = '1.0'
37 |
38 | # Add any Sphinx extension module names here, as strings. They can be
39 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
40 | # ones.
41 | extensions = [
42 | 'sphinx.ext.autodoc',
43 | 'sphinx.ext.doctest',
44 | 'sphinx.ext.intersphinx',
45 | 'sphinx.ext.coverage',
46 | 'sphinx.ext.imgmath',
47 | 'sphinx.ext.viewcode',
48 | 'sphinx.ext.githubpages',
49 | 'nbsphinx'
50 | ]
51 |
52 | # Add any paths that contain templates here, relative to this directory.
53 | templates_path = ['_templates']
54 |
55 | # The suffix(es) of source filenames.
56 | # You can specify multiple suffix as a list of string:
57 | #
58 | # source_suffix = ['.rst', '.md']
59 | source_suffix = '.rst'
60 |
61 | # The master toctree document.
62 | master_doc = 'index'
63 |
64 | # The language for content autogenerated by Sphinx. Refer to documentation
65 | # for a list of supported languages.
66 | #
67 | # This is also used if you do content translation via gettext catalogs.
68 | # Usually you set "language" from the command line for these cases.
69 | language = None
70 |
71 | # List of patterns, relative to source directory, that match files and
72 | # directories to ignore when looking for source files.
73 | # This pattern also affects html_static_path and html_extra_path.
74 | exclude_patterns = ['.ipynb_checkpoints']
75 |
76 | # The name of the Pygments (syntax highlighting) style to use.
77 | pygments_style = None
78 |
79 |
80 | # -- Options for HTML output -------------------------------------------------
81 |
82 | # The theme to use for HTML and HTML Help pages. See the documentation for
83 | # a list of builtin themes.
84 | #
85 | html_theme = "sphinx_rtd_theme"
86 |
87 | # Theme options are theme-specific and customize the look and feel of a theme
88 | # further. For a list of options available for each theme, see the
89 | # documentation.
90 | #
91 | # html_theme_options = {}
92 |
93 | # Add any paths that contain custom static files (such as style sheets) here,
94 | # relative to this directory. They are copied after the builtin static files,
95 | # so a file named "default.css" will overwrite the builtin "default.css".
96 | html_static_path = ['_static']
97 |
98 | # Custom sidebar templates, must be a dictionary that maps document names
99 | # to template names.
100 | #
101 | # The default sidebars (for documents that don't match any pattern) are
102 | # defined by theme itself. Builtin themes are using these templates by
103 | # default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
104 | # 'searchbox.html']``.
105 | #
106 | # html_sidebars = {}
107 |
108 |
109 | # -- Options for HTMLHelp output ---------------------------------------------
110 |
111 | # Output file base name for HTML help builder.
112 | htmlhelp_basename = 'cobampdoc'
113 |
114 |
115 | # -- Options for LaTeX output ------------------------------------------------
116 |
117 | latex_elements = {
118 | # The paper size ('letterpaper' or 'a4paper').
119 | #
120 | # 'papersize': 'letterpaper',
121 |
122 | # The font size ('10pt', '11pt' or '12pt').
123 | #
124 | # 'pointsize': '10pt',
125 |
126 | # Additional stuff for the LaTeX preamble.
127 | #
128 | # 'preamble': '',
129 |
130 | # Latex figure (float) alignment
131 | #
132 | # 'figure_align': 'htbp',
133 | }
134 |
135 | # Grouping the document tree into LaTeX files. List of tuples
136 | # (source start file, target name, title,
137 | # author, documentclass [howto, manual, or own class]).
138 | latex_documents = [
139 | (master_doc, 'cobamp.tex', 'cobamp Documentation',
140 | 'Vítor Vieira', 'manual'),
141 | ]
142 |
143 |
144 | # -- Options for manual page output ------------------------------------------
145 |
146 | # One entry per manual page. List of tuples
147 | # (source start file, name, description, authors, manual section).
148 | man_pages = [
149 | (master_doc, 'cobamp', 'cobamp Documentation',
150 | [author], 1)
151 | ]
152 |
153 |
154 | # -- Options for Texinfo output ----------------------------------------------
155 |
156 | # Grouping the document tree into Texinfo files. List of tuples
157 | # (source start file, target name, title, author,
158 | # dir menu entry, description, category)
159 | texinfo_documents = [
160 | (master_doc, 'cobamp', 'cobamp Documentation',
161 | author, 'cobamp', 'One line description of project.',
162 | 'Miscellaneous'),
163 | ]
164 |
165 |
166 | # -- Options for Epub output -------------------------------------------------
167 |
168 | # Bibliographic Dublin Core info.
169 | epub_title = project
170 |
171 | # The unique identifier of the text. This can be a ISBN number
172 | # or the project homepage.
173 | #
174 | # epub_identifier = ''
175 |
176 | # A unique identification for the text.
177 | #
178 | # epub_uid = ''
179 |
180 | # A list of files that should not be packed into the epub file.
181 | epub_exclude_files = ['search.html']
182 |
183 |
184 | # -- Extension configuration -------------------------------------------------
185 |
186 | # -- Options for intersphinx extension ---------------------------------------
187 |
188 | # Example configuration for intersphinx: refer to the Python standard library.
189 | intersphinx_mapping = {'https://docs_gen.python.org/': None}
--------------------------------------------------------------------------------
/docs_gen/source/index.rst:
--------------------------------------------------------------------------------
1 | .. cobamp documentation master file, created by
2 | sphinx-quickstart on Wed Oct 31 12:04:38 2018.
3 | You can adapt this file completely to your liking, but it should at least
4 | contain the root `toctree` directive.
5 |
6 |
7 | Welcome to cobamp's documentation!
8 | ========================================
9 |
10 | .. toctree::
11 | :maxdepth: 2
12 | :caption: Contents:
13 |
14 | installation
15 | quickstart
16 | modules
17 |
18 | .. automodule:: cobamp
19 | :members:
20 |
21 |
22 | Indices and tables
23 | ==================
24 |
25 | * :ref:`genindex`
26 | * :ref:`modindex`
27 | * :ref:`search`
28 |
--------------------------------------------------------------------------------
/docs_gen/source/installation.rst:
--------------------------------------------------------------------------------
1 |
2 | ************
3 | Installation
4 | ************
5 |
6 | Basic requirements
7 | ==================
8 |
9 | * Python 3.x
10 | * `CPLEX `_ along with its Python wrapper installed in your current distribution (please note that the Python version must be compatible with CPLEX)
11 |
12 | Optional requirements
13 | =====================
14 |
15 | For easier model loading and analysis (using constraint-based methods), the following libraries can be used:
16 |
17 | * `cobrapy `_
18 | * `framed `_
19 |
20 | Additionally, the `escher `_ library can be used to display elementary flux modes on metabolic maps and the `networkx `_ library can also be used to plot trees generated from EFM or MCS enumeration.
21 |
22 | Via pip
23 | =======
24 |
25 | The easiest method is to use pip to `install the package from PyPI `_::
26 |
27 | pip install cobamp
28 |
29 | From source
30 | ===========
31 |
32 | * Download the latest source files from github
33 | * Unpack the source files into a directory of your choosing
34 | * Open the operating system's command-line interface
35 | * Change into the source file directory
36 | * Run the following command ::
37 |
38 | python setup.py install
39 |
40 | It is highly recommended that this package along with its requirements are installed in a separate Python environment.
41 | Tools such as `virtualenv `_ or `conda `_ can be used to create Python environments.
42 |
43 |
--------------------------------------------------------------------------------
/docs_gen/source/modules.rst:
--------------------------------------------------------------------------------
1 | src
2 | ===
3 |
4 | .. toctree::
5 | :maxdepth: 4
6 |
7 | cobamp
8 |
--------------------------------------------------------------------------------
/docs_gen/source/requirements.txt:
--------------------------------------------------------------------------------
1 | nbsphinx
2 | ipykernel
3 | cobamp
4 | numpy
5 | pandas
6 | scipy
--------------------------------------------------------------------------------
/examples/GBconsensus_resources/gb_plots.py:
--------------------------------------------------------------------------------
1 | from cobamp.utilities.file_io import read_pickle
2 | from cobamp.utilities.tree_analysis import *
3 | from itertools import chain
4 | import matplotlib.pyplot as plt
5 | import networkx as nx
6 |
7 | def generate_efm_results_tree(efm_sets, ignore_greater_than=10, pruning_level=6, merge_dupes=False):
8 | """
9 | Args:
10 | efm_sets:
11 | ignore_greater_than:
12 | pruning_level:
13 | merge_dupes:
14 | """
15 | root = Tree('ROOT')
16 | fill_tree(root, efm_sets)
17 | compress_linear_paths(root)
18 | if ignore_greater_than:
19 | ignore_compressed_nodes_by_size(root, ignore_greater_than)
20 | apply_fx_to_all_node_values(root, lambda x: '\n'.join(sorted(x)) if isinstance(x, list) else x if x is not None else "None")
21 | if pruning_level:
22 | probabilistic_tree_prune(root, target_level=pruning_level, cut_leaves=True, name_separator='\n')
23 | compress_linear_paths(root)
24 | if merge_dupes:
25 | merge_duplicate_nodes(root)
26 | return root
27 |
28 | def draw_graph(root, write_path, unique=False):
29 | """
30 | Args:
31 | root:
32 | write_path:
33 | unique:
34 | """
35 | G = nx.DiGraph()
36 | populate_nx_graph(root, G, unique_nodes=unique)
37 | print('NetworkX recognizes this as a tree?',nx.is_tree(G))
38 | pos = nx.nx_pydot.graphviz_layout(G)
39 | plt.figure(figsize=(50,40))
40 | nx.draw_networkx_nodes(G, pos, node_size=0)
41 | nx.draw_networkx_edges(G, pos, alpha=0.5, arrowsize=10)
42 | nx.draw_networkx_labels(G, pos, font_size=18, font_color='red')
43 | if isinstance(write_path, str):
44 | plt.savefig(write_path)
45 |
46 | if __name__ == '__main__':
47 | efms = read_pickle(
48 | '/home/skapur/MEOCloud/Projectos/MCSEnumeratorPython/examples/GBconsensus_resources/EFMs/efms_glc_uptake_decoded.pkl')
49 | efms_orig = [dict(list(chain(*[[(k_token[2:], v) for k_token in k.split('_and_')] for k, v in efm.items()]))) for
50 | efm in efms]
51 | efm_sets = [set([r for r in efm.keys()]) for efm in efms_orig]
52 |
53 | tree = generate_efm_results_tree(
54 | efm_sets=efm_sets,
55 | ignore_greater_than=10,
56 | pruning_level=10,
57 | merge_dupes=False
58 | )
59 |
60 | draw_graph(tree, 'test_graph.pdf', unique=False)
61 |
--------------------------------------------------------------------------------
/examples/ecoli_core_model.py:
--------------------------------------------------------------------------------
1 | import urllib
2 | import cobra
3 | import escher
4 | from src.cobamp.utilities.external_wrappers import KShortestEFMEnumeratorWrapper
5 |
6 | model_url = "http://bigg.ucsd.edu/static/models/e_coli_core.xml"
7 | model_path, model_content = urllib.request.urlretrieve(model_url)
8 | model = cobra.io.sbml3.read_sbml_model(model_path)
9 |
10 | def display_efms_escher(efm):
11 | """
12 | Args:
13 | efm:
14 | """
15 | escher_builder = escher.Builder(
16 | map_name='e_coli_core.Core metabolism',
17 | hide_secondary_metabolites = True,
18 | reaction_data = efm
19 | )
20 | escher_builder.display_in_notebook(js_source='local')
21 |
22 |
23 | if __name__ == '__main__':
24 | ksefm = KShortestEFMEnumeratorWrapper(
25 | model=model,
26 | non_consumed=[],
27 | consumed=['glc__D_e'],
28 | produced=['succ_e'],
29 | algorithm_type=KShortestEFMEnumeratorWrapper.ALGORITHM_TYPE_POPULATE,
30 | stop_criteria=100
31 | )
32 |
33 | enumerator = ksefm.get_enumerator
34 |
35 | efm_list = []
36 | while len(efm_list) == 0:
37 | efm_list += next(enumerator)
38 |
39 | display_efms_escher(efm[0])
--------------------------------------------------------------------------------
/examples/gbconsensus.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import pickle
3 | import pandas as pd
4 | import math
5 |
6 | from itertools import product
7 | from cobamp.efm_enumeration.kshortest_efms import KShortestEFMAlgorithm
8 | from cobamp.linear_systems.linear_systems import DualLinearSystem, IrreversibleLinearSystem, SimpleLinearSystem
9 | from cobamp.linear_systems.optimization import LinearSystemOptimizer
10 | from cobamp.mcs_enumeration.intervention_problem import *
11 | from cobamp.utilities.file_io import pickle_object
12 | import cobamp.efm_enumeration.kshortest_efm_properties as kp
13 |
14 | #os.chdir('/home/skapur/Workspaces/PyCharm/cobamp')
15 |
16 | def decode_mcs(solutions):
17 | """
18 | Args:
19 | solutions:
20 | """
21 | return list(chain(
22 | *[list(product(*[orx_map[rx_names[i]] for i in lethal.get_active_indicator_varids()])) for lethal in
23 | solutions]))
24 |
25 |
26 | S = np.genfromtxt('examples/GBconsensus_resources/GBconsensus_comp_stoich.csv', delimiter=',')
27 |
28 | with open('examples/GBconsensus_resources/GBconsensus_comp_media.pkl', 'rb') as f:
29 | media = pickle.load(f)
30 |
31 | with open('examples/GBconsensus_resources/GBconsensus_comp_exclusions.txt', 'r') as f:
32 | singles = [s.strip() for s in f.readlines()]
33 |
34 | with open('examples/GBconsensus_resources/GBconsensus_comp_rxnames.txt', 'r') as f:
35 | rx_names = [s.strip() for s in f.readlines()]
36 |
37 | with open('examples/GBconsensus_resources/GBconsensus_comp_metnames.txt', 'r') as f:
38 | met_names = [s.strip() for s in f.readlines()]
39 |
40 | with open('examples/GBconsensus_resources/GBconsensus_comp_bound_map.txt', 'r') as f:
41 | bound_map = {k: [float(n) for n in v.split(',')] for k, v in
42 | dict([s.strip().split('=') for s in f.readlines()]).items()}
43 |
44 | with open('examples/GBconsensus_resources/GBconsensus_comp_orx_map.txt', 'r') as f:
45 | orx_map = {k: [n for n in v.split(',')] for k, v in dict([s.strip().split('=') for s in f.readlines()]).items()}
46 |
47 | irrev = np.where(np.array([bound_map[r][0] >= 0 for r in rx_names]))[0]
48 | exclusions = [[rx_names.index([k for k, v in orx_map.items() if s in v][0])] for s in singles if
49 | s in list(chain(*orx_map.values()))]
50 | biomass_index = rx_names.index('R_biomass_reaction')
51 | glc_index = rx_names.index('R_EX_glc_e')
52 |
53 |
54 | def enumerate_lethals():
55 |
56 | configuration = kp.KShortestProperties()
57 | configuration[kp.K_SHORTEST_MPROPERTY_METHOD] = kp.K_SHORTEST_METHOD_ITERATE
58 | configuration[kp.K_SHORTEST_OPROPERTY_MAXSOLUTIONS] = 1
59 |
60 | problem = InterventionProblem(S)
61 | target_space = [DefaultFluxbound(0.001, None, biomass_index)] + [DefaultFluxbound(v[0], v[1] if v[1] != 0 else None, rx_names.index(k)) for k, v in
62 | media.items() if k in rx_names]
63 | T, b = problem.generate_target_matrix(target_space)
64 | dual_system = DualLinearSystem(S, irrev, T, b)
65 |
66 | algorithm = KShortestEFMAlgorithm(configuration)
67 |
68 | lethals = list(algorithm.enumerate(dual_system, exclusions))
69 |
70 | return decode_mcs(lethals)
71 |
72 | def optimize_model():
73 | lin_sys = SimpleLinearSystem(S, [bound_map[k][0] if k not in media.keys() else media[k][0] for k in rx_names], [bound_map[k][1] if k not in media.keys() else media[k][1] for k in rx_names], [n[:128] for n in rx_names])
74 | lso = LinearSystemOptimizer(lin_sys)
75 | sol = lso.optimize([('R_biomass_reaction',1)], False)
76 | sol.var_values()['R_biomass_reaction']
77 |
78 | def find_net_conversion(S, efm, met_names, tol=1e-9):
79 | """
80 | Args:
81 | S:
82 | efm:
83 | met_names:
84 | tol:
85 | """
86 | digits = abs(round(math.log(tol, 10)))
87 | metab_balance_dict = {}
88 | for rx, v in efm.items():
89 | metabs = np.nonzero(S[:, rx])[0]
90 | for metab in metabs:
91 | turnover = v * S[metab, rx]
92 | if metab not in metab_balance_dict.keys():
93 | metab_balance_dict[metab] = turnover
94 | else:
95 | metab_balance_dict[metab] += turnover
96 |
97 | final_balance_dict = {met_names[k]: round(v, digits) for k, v in metab_balance_dict.items() if abs(v) > tol}
98 | return final_balance_dict
99 |
100 | def enumerate_efms():
101 | meta_id_from_drain = lambda x: np.nonzero(S[:,list(x)])[0]
102 |
103 | configuration = kp.KShortestProperties()
104 | configuration[kp.K_SHORTEST_MPROPERTY_METHOD] = kp.K_SHORTEST_METHOD_POPULATE
105 | configuration[kp.K_SHORTEST_OPROPERTY_MAXSIZE] = 15
106 |
107 | drains = set([s for s in singles if "R_EX" in s])
108 |
109 | consumed = set([rx_names.index(k) for k in set(media.keys()) & set(rx_names) if media[k][1] <= 0])
110 | produced = set([rx_names.index(k) for k in set(media.keys()) & set(rx_names) if media[k][1] > 0])
111 | non_consumed = set([rx_names.index(k) for k in drains if k in rx_names]) - consumed - produced
112 |
113 | consumed_meta = list(zip(meta_id_from_drain(consumed), [-media[rx_names[i]][0] for i in consumed]))
114 | produced_meta = meta_id_from_drain(produced)
115 | nc_meta = meta_id_from_drain(non_consumed)
116 |
117 | drains_id = sorted([rx_names.index(k) for k in drains if k in rx_names])
118 | not_drains_id = [i for i in range(S.shape[1]) if i not in drains_id]
119 |
120 | S_int = S[:, not_drains_id]
121 | rx_names_int = [rx_names[i] for i in not_drains_id]
122 | irrev_int = np.where(np.array([bound_map[r][0] >= 0 for r in rx_names_int]))[0]
123 |
124 | biomass_index = rx_names_int.index('R_biomass_reaction')
125 |
126 | S_int_biomass_meta = np.zeros([1,S_int.shape[1]])
127 |
128 | S_int_biomass_meta[0, biomass_index] = 1
129 |
130 | S_int_final = np.vstack([S_int, S_int_biomass_meta])
131 | met_names_int = met_names + ['biomass_c']
132 |
133 | produced_meta = [(met_names_int.index('biomass_c'), 1)]
134 | #consumed_meta = [met_names_int.index('M_glc__D_e')]
135 | irreversible_system = IrreversibleLinearSystem(S_int_final, irrev_int, nc_meta, [consumed_meta[7]], [])
136 | algorithm = KShortestEFMAlgorithm(configuration)
137 |
138 | efms = algorithm.enumerate(irreversible_system)
139 | decoded_efms = [{rx_names_int[i]:v for i,v in efm.attribute_value(efm.SIGNED_VALUE_MAP).items() if v != 0} for efm in efms]
140 | decoded_efms_index = [{i:v for i,v in efm.attribute_value(efm.SIGNED_VALUE_MAP).items() if v != 0} for efm in efms]
141 | decoded = [' | '.join([rx_names_int[i] for i in efmi.get_active_indicator_varids()]) for efmi in efms]
142 | net_conversions = [find_net_conversion(S_int_final, efm, met_names_int) for efm in decoded_efms_index]
143 |
144 |
145 | efm_set_name = 'efms_glc_uptake_'
146 | save_folder = 'examples/GBconsensus_resources/EFMs/'
147 |
148 | pickle_object(decoded_efms, save_folder+efm_set_name+"decoded.pkl")
149 | pickle_object(decoded_efms_index, save_folder+efm_set_name+"decoded_index.pkl")
150 | pickle_object(net_conversions, save_folder+efm_set_name+"net_conversions.pkl")
151 |
152 | #with open('examples/GBconsensus_resources/EFMs/efms_no_media.pkl','w') as f:
153 | # f.write('\n'.join([','.join(d) for d in decoded]))
154 | #pickle_object(decoded_efms_index, 'examples/GBconsensus_resources/EFMs/efms_no_media_index.pkl')
155 | # pickle_object(decoded_efms, 'examples/GBconsensus_resources/EFMs/glc_uptake_kmax12.pkl')
156 |
157 | with open('examples/GBconsensus_resources/EFMs/efms_glc_uptake.pkl', 'r') as f:
158 | decoded_efms = f.read().replace(',',';').split('\n')
159 |
160 | df_dict = {'EFM':[' | '.join(efm) for efm in decoded_efms], 'Conversion':[' | '.join(sorted([str(v)+" "+r for r,v in find_net_conversion(S_int_final, efm, met_names_int).items()])) for efm in decoded_efms_index]}
161 |
162 | df = pd.DataFrame.from_dict(df_dict)
163 | df.to_csv(save_folder+efm_set_name+"dataframe.csv")
164 | return decoded
165 |
166 |
167 | if __name__ == '__main__':
168 | efm = enumerate_efms()
--------------------------------------------------------------------------------
/examples/gbconsensus_model_compression.py:
--------------------------------------------------------------------------------
1 | from framed.io.sbml import *
2 | from framed.cobra import variability,simulation
3 | from framed.convex.subset_reduction import *
4 | from cobamp.utilities.file_io import read_pickle, pickle_object
5 |
6 | CONSENSUS_MODEL_PATH = '/home/skapur/MEOCloud/Projectos/PhDThesis/Material/Models/Consensus/ConsensusModel.xml'
7 |
8 | model = load_cbmodel(CONSENSUS_MODEL_PATH)
9 |
10 | biomass_rx = "R_biomass_reaction"
11 | drains = [r for r in model.reactions if 'R_EX_' in r] + [biomass_rx]
12 | media = read_pickle('/home/skapur/Workspaces/PyCharm/cobamp/examples/GBconsensus_resources/GBconsensus_media.pkl')
13 |
14 | fva_blocked = variability.blocked_reactions(model, constraints=media)
15 |
16 |
17 | cmodel = generate_reduced_model(model, to_exclude=fva_blocked, to_keep_single=drains)
18 |
19 |
20 | wt_orig = simulation.FBA(model, objective={biomass_rx:1}, constraints=media)
21 | wt_comp = simulation.FBA(cmodel, objective={biomass_rx:1}, constraints=media)
22 |
--------------------------------------------------------------------------------
/examples/iAF1260_resources/iAF1260_comp_exclusions.txt:
--------------------------------------------------------------------------------
1 | R_DM_4HBA
2 | R_DM_5DRIB
3 | R_DM_AACALD
4 | R_DM_HMFURN
5 | R_DM_OXAM
6 | R_EX_12ppd_R_e
7 | R_EX_12ppd_S_e
8 | R_EX_14glucan_e
9 | R_EX_15dap_e
10 | R_EX_23camp_e
11 | R_EX_23ccmp_e
12 | R_EX_23cgmp_e
13 | R_EX_23cump_e
14 | R_EX_23dappa_e
15 | R_EX_26dap_M_e
16 | R_EX_2ddglcn_e
17 | R_EX_34dhpac_e
18 | R_EX_3amp_e
19 | R_EX_3cmp_e
20 | R_EX_3gmp_e
21 | R_EX_3hcinnm_e
22 | R_EX_3hpppn_e
23 | R_EX_3ump_e
24 | R_EX_4abut_e
25 | R_EX_4hoxpacd_e
26 | R_EX_5dglcn_e
27 | R_EX_LalaDgluMdapDala_e
28 | R_EX_LalaDgluMdap_e
29 | R_EX_ac_e
30 | R_EX_acac_e
31 | R_EX_acald_e
32 | R_EX_acgal1p_e
33 | R_EX_acgal_e
34 | R_EX_acgam1p_e
35 | R_EX_acgam_e
36 | R_EX_acmana_e
37 | R_EX_acmum_e
38 | R_EX_acnam_e
39 | R_EX_acolipa_e
40 | R_EX_acser_e
41 | R_EX_ade_e
42 | R_EX_adn_e
43 | R_EX_adocbl_e
44 | R_EX_ag_e
45 | R_EX_agm_e
46 | R_EX_akg_e
47 | R_EX_ala_B_e
48 | R_EX_ala_D_e
49 | R_EX_ala_L_e
50 | R_EX_alaala_e
51 | R_EX_all_D_e
52 | R_EX_alltn_e
53 | R_EX_amp_e
54 | R_EX_anhgm_e
55 | R_EX_arab_L_e
56 | R_EX_arbtn_e
57 | R_EX_arbtn_fe3_e
58 | R_EX_arg_L_e
59 | R_EX_ascb_L_e
60 | R_EX_asn_L_e
61 | R_EX_aso3_e
62 | R_EX_asp_L_e
63 | R_EX_but_e
64 | R_EX_butso3_e
65 | R_EX_ca2_e
66 | R_EX_cbi_e
67 | R_EX_cbl1_e
68 | R_EX_cd2_e
69 | R_EX_cgly_e
70 | R_EX_chol_e
71 | R_EX_cit_e
72 | R_EX_cl_e
73 | R_EX_cmp_e
74 | R_EX_co2_e
75 | R_EX_cobalt2_e
76 | R_EX_colipa_e
77 | R_EX_cpgn_e
78 | R_EX_cpgn_un_e
79 | R_EX_crn_e
80 | R_EX_csn_e
81 | R_EX_cu2_e
82 | R_EX_cu_e
83 | R_EX_cyan_e
84 | R_EX_cynt_e
85 | R_EX_cys_D_e
86 | R_EX_cys_L_e
87 | R_EX_cytd_e
88 | R_EX_dad_2_e
89 | R_EX_damp_e
90 | R_EX_dca_e
91 | R_EX_dcmp_e
92 | R_EX_dcyt_e
93 | R_EX_ddca_e
94 | R_EX_dgmp_e
95 | R_EX_dgsn_e
96 | R_EX_dha_e
97 | R_EX_dimp_e
98 | R_EX_din_e
99 | R_EX_dms_e
100 | R_EX_dmso_e
101 | R_EX_dopa_e
102 | R_EX_dtmp_e
103 | R_EX_dump_e
104 | R_EX_duri_e
105 | R_EX_eca4colipa_e
106 | R_EX_enlipa_e
107 | R_EX_enter_e
108 | R_EX_etha_e
109 | R_EX_ethso3_e
110 | R_EX_etoh_e
111 | R_EX_f6p_e
112 | R_EX_fald_e
113 | R_EX_fe2_e
114 | R_EX_fe3_e
115 | R_EX_fe3dcit_e
116 | R_EX_fe3dhbzs_e
117 | R_EX_fe3hox_e
118 | R_EX_fe3hox_un_e
119 | R_EX_fecrm_e
120 | R_EX_fecrm_un_e
121 | R_EX_feenter_e
122 | R_EX_feoxam_e
123 | R_EX_feoxam_un_e
124 | R_EX_for_e
125 | R_EX_fru_e
126 | R_EX_frulys_e
127 | R_EX_fruur_e
128 | R_EX_fuc_L_e
129 | R_EX_fum_e
130 | R_EX_g1p_e
131 | R_EX_g3pc_e
132 | R_EX_g3pe_e
133 | R_EX_g3pg_e
134 | R_EX_g3pi_e
135 | R_EX_g3ps_e
136 | R_EX_g6p_e
137 | R_EX_gal1p_e
138 | R_EX_gal_bD_e
139 | R_EX_gal_e
140 | R_EX_galct_D_e
141 | R_EX_galctn_D_e
142 | R_EX_galctn_L_e
143 | R_EX_galt_e
144 | R_EX_galur_e
145 | R_EX_gam6p_e
146 | R_EX_gam_e
147 | R_EX_gbbtn_e
148 | R_EX_gdp_e
149 | R_EX_glc_e
150 | R_EX_glcn_e
151 | R_EX_glcr_e
152 | R_EX_glcur1p_e
153 | R_EX_glcur_e
154 | R_EX_gln_L_e
155 | R_EX_glu_L_e
156 | R_EX_gly_e
157 | R_EX_glyald_e
158 | R_EX_glyb_e
159 | R_EX_glyc2p_e
160 | R_EX_glyc3p_e
161 | R_EX_glyc_R_e
162 | R_EX_glyc_e
163 | R_EX_glyclt_e
164 | R_EX_gmp_e
165 | R_EX_gsn_e
166 | R_EX_gthox_e
167 | R_EX_gthrd_e
168 | R_EX_gtp_e
169 | R_EX_gua_e
170 | R_EX_h2_e
171 | R_EX_h2o2_e
172 | R_EX_h2o_e
173 | R_EX_h2s_e
174 | R_EX_h_e
175 | R_EX_hacolipa_e
176 | R_EX_halipa_e
177 | R_EX_hdca_e
178 | R_EX_hdcea_e
179 | R_EX_hg2_e
180 | R_EX_his_L_e
181 | R_EX_hom_L_e
182 | R_EX_hxa_e
183 | R_EX_hxan_e
184 | R_EX_idon_L_e
185 | R_EX_ile_L_e
186 | R_EX_imp_e
187 | R_EX_indole_e
188 | R_EX_inost_e
189 | R_EX_ins_e
190 | R_EX_isetac_e
191 | R_EX_k_e
192 | R_EX_kdo2lipid4_e
193 | R_EX_lac_D_e
194 | R_EX_lac_L_e
195 | R_EX_lcts_e
196 | R_EX_leu_L_e
197 | R_EX_lipa_cold_e
198 | R_EX_lipa_e
199 | R_EX_lys_L_e
200 | R_EX_lyx_L_e
201 | R_EX_mal_D_e
202 | R_EX_mal_L_e
203 | R_EX_malt_e
204 | R_EX_malthx_e
205 | R_EX_maltpt_e
206 | R_EX_malttr_e
207 | R_EX_maltttr_e
208 | R_EX_man6p_e
209 | R_EX_man_e
210 | R_EX_manglyc_e
211 | R_EX_melib_e
212 | R_EX_met_D_e
213 | R_EX_met_L_e
214 | R_EX_metsox_R_L_e
215 | R_EX_metsox_S_L_e
216 | R_EX_mg2_e
217 | R_EX_minohp_e
218 | R_EX_mmet_e
219 | R_EX_mn2_e
220 | R_EX_mnl_e
221 | R_EX_mobd_e
222 | R_EX_mso3_e
223 | R_EX_n2o_e
224 | R_EX_na1_e
225 | R_EX_nac_e
226 | R_EX_nh4_e
227 | R_EX_ni2_e
228 | R_EX_nmn_e
229 | R_EX_no2_e
230 | R_EX_no3_e
231 | R_EX_no_e
232 | R_EX_o16a4colipa_e
233 | R_EX_o2_e
234 | R_EX_o2s_e
235 | R_EX_ocdca_e
236 | R_EX_ocdcea_e
237 | R_EX_octa_e
238 | R_EX_orn_e
239 | R_EX_orot_e
240 | R_EX_pacald_e
241 | R_EX_peamn_e
242 | R_EX_phe_L_e
243 | R_EX_pheme_e
244 | R_EX_pi_e
245 | R_EX_pnto_R_e
246 | R_EX_ppa_e
247 | R_EX_ppal_e
248 | R_EX_pppn_e
249 | R_EX_ppt_e
250 | R_EX_pro_L_e
251 | R_EX_progly_e
252 | R_EX_psclys_e
253 | R_EX_pser_L_e
254 | R_EX_ptrc_e
255 | R_EX_pyr_e
256 | R_EX_r5p_e
257 | R_EX_rib_D_e
258 | R_EX_rmn_e
259 | R_EX_sbt_D_e
260 | R_EX_ser_D_e
261 | R_EX_ser_L_e
262 | R_EX_skm_e
263 | R_EX_so2_e
264 | R_EX_so3_e
265 | R_EX_so4_e
266 | R_EX_spmd_e
267 | R_EX_succ_e
268 | R_EX_sucr_e
269 | R_EX_sulfac_e
270 | R_EX_tartr_L_e
271 | R_EX_taur_e
272 | R_EX_tcynt_e
273 | R_EX_thm_e
274 | R_EX_thr_L_e
275 | R_EX_thrp_e
276 | R_EX_thym_e
277 | R_EX_thymd_e
278 | R_EX_tma_e
279 | R_EX_tmao_e
280 | R_EX_tre_e
281 | R_EX_trp_L_e
282 | R_EX_tsul_e
283 | R_EX_ttdca_e
284 | R_EX_ttdcea_e
285 | R_EX_tungs_e
286 | R_EX_tym_e
287 | R_EX_tyr_L_e
288 | R_EX_tyrp_e
289 | R_EX_uacgam_e
290 | R_EX_udpacgal_e
291 | R_EX_udpg_e
292 | R_EX_udpgal_e
293 | R_EX_udpglcur_e
294 | R_EX_ump_e
295 | R_EX_ura_e
296 | R_EX_urea_e
297 | R_EX_uri_e
298 | R_EX_val_L_e
299 | R_EX_xan_e
300 | R_EX_xmp_e
301 | R_EX_xtsn_e
302 | R_EX_xyl_D_e
303 | R_EX_xylu_L_e
304 | R_EX_zn2_e
305 | R_ACALDtpp
306 | R_ACONIs
307 | R_AOBUTDs
308 | R_ARBTNexs
309 | R_ATPHs
310 | R_CO2tpp
311 | R_CPGNexs
312 | R_DATPHs
313 | R_DHPTDCs
314 | R_FALDtpp
315 | R_FALGTHLs
316 | R_FE3HOXexs
317 | R_FECRMexs
318 | R_FEENTERexs
319 | R_FEOXAMexs
320 | R_G5SADs
321 | R_GLYCtpp
322 | R_GTPHs
323 | R_H2Otex
324 | R_H2Otpp
325 | R_H2St1pp
326 | R_H2tpp
327 | R_METOX1s
328 | R_METOX2s
329 | R_N2Otpp
330 | R_NH4tpp
331 | R_NOtpp
332 | R_O2tpp
333 | R_SO2tpp
334 | R_ATPM
335 | R_Ec_biomass_iAF1260_core_59p81M
--------------------------------------------------------------------------------
/examples/iAF1260_resources/iAF1260_comp_metnames.txt:
--------------------------------------------------------------------------------
1 | M_10fthf_c
2 | M_12dgr120_c
3 | M_12dgr140_c
4 | M_12dgr141_c
5 | M_12dgr160_c
6 | M_12dgr161_c
7 | M_12dgr161_p
8 | M_12dgr180_c
9 | M_12dgr181_c
10 | M_12dgr181_p
11 | M_12ppd_R_c
12 | M_12ppd_R_e
13 | M_1pyr5c_c
14 | M_23dhb_c
15 | M_23dhbzs_c
16 | M_26dap_M_c
17 | M_2aobut_c
18 | M_2dr1p_c
19 | M_2h3oppan_c
20 | M_2mahmp_c
21 | M_2ohph_c
22 | M_2oph_c
23 | M_2pg_c
24 | M_3hmrsACP_c
25 | M_3ig3p_c
26 | M_3mob_c
27 | M_3pg_c
28 | M_4abut_c
29 | M_4abut_e
30 | M_4abut_p
31 | M_4ampm_c
32 | M_4hba_c
33 | M_5dglcn_c
34 | M_5dglcn_e
35 | M_5drib_c
36 | M_6pgc_c
37 | M_ACP_c
38 | M_LalaDgluMdapDala_c
39 | M_LalaDgluMdapDala_e
40 | M_LalaDgluMdapDala_p
41 | M_LalaDgluMdap_c
42 | M_LalaDgluMdap_e
43 | M_LalaDgluMdap_p
44 | M_aact_c
45 | M_acACP_c
46 | M_ac_c
47 | M_ac_e
48 | M_ac_p
49 | M_acald_c
50 | M_acald_e
51 | M_acald_p
52 | M_accoa_c
53 | M_acg5sa_c
54 | M_acgam_c
55 | M_acolipa_e
56 | M_acser_c
57 | M_acser_e
58 | M_actACP_c
59 | M_ade_c
60 | M_ade_e
61 | M_adn_c
62 | M_adn_e
63 | M_adn_p
64 | M_adp_c
65 | M_agm_c
66 | M_agm_e
67 | M_agm_p
68 | M_ahcys_c
69 | M_aicar_c
70 | M_air_c
71 | M_akg_c
72 | M_akg_e
73 | M_ala_D_c
74 | M_ala_D_e
75 | M_ala_D_p
76 | M_ala_L_c
77 | M_alaala_c
78 | M_alaala_e
79 | M_alaala_p
80 | M_alltn_c
81 | M_alltn_e
82 | M_amet_c
83 | M_amp_c
84 | M_anhgm3p_c
85 | M_anhgm3p_p
86 | M_anhgm4p_c
87 | M_anhgm4p_p
88 | M_anhgm_c
89 | M_anhgm_e
90 | M_anhgm_p
91 | M_anhm3p_c
92 | M_anhm4p_c
93 | M_anhm_c
94 | M_arbtn_c
95 | M_arbtn_e
96 | M_arbtn_fe3_c
97 | M_arbtn_fe3_e
98 | M_arg_L_c
99 | M_arg_L_e
100 | M_arg_L_p
101 | M_asn_L_c
102 | M_asn_L_e
103 | M_asn_L_p
104 | M_asp_L_c
105 | M_aspsa_c
106 | M_atp_c
107 | M_bglycogen_c
108 | M_but2eACP_c
109 | M_butACP_c
110 | M_ca2_c
111 | M_ca2_e
112 | M_ca2_p
113 | M_cbp_c
114 | M_cd2_c
115 | M_cd2_p
116 | M_cddec5eACP_c
117 | M_cdp_c
118 | M_cdpdddecg_c
119 | M_cdpdhdec9eg_c
120 | M_cdpdhdecg_c
121 | M_cdpdodec11eg_c
122 | M_cdpdodecg_c
123 | M_cdpdtdec7eg_c
124 | M_cdpdtdecg_c
125 | M_cgly_e
126 | M_cgly_p
127 | M_chor_c
128 | M_cit_c
129 | M_ckdo_c
130 | M_cl_c
131 | M_cl_e
132 | M_cmp_c
133 | M_co2_c
134 | M_co2_e
135 | M_co2_p
136 | M_coa_c
137 | M_cobalt2_c
138 | M_cobalt2_e
139 | M_cobalt2_p
140 | M_colipa_e
141 | M_colipa_p
142 | M_cpgn_c
143 | M_cpgn_e
144 | M_cpgn_un_c
145 | M_cpgn_un_e
146 | M_cpppg3_c
147 | M_ctp_c
148 | M_cu2_c
149 | M_cu2_e
150 | M_cu2_p
151 | M_cys_L_c
152 | M_cys_L_e
153 | M_cys_L_p
154 | M_cytd_c
155 | M_cytd_e
156 | M_cytd_p
157 | M_dad_2_c
158 | M_dadp_c
159 | M_damp_c
160 | M_datp_c
161 | M_dcaACP_c
162 | M_dcdp_c
163 | M_dctp_c
164 | M_ddcaACP_c
165 | M_ddca_c
166 | M_ddca_p
167 | M_dgdp_c
168 | M_dgmp_c
169 | M_dgsn_c
170 | M_dgtp_c
171 | M_dha_c
172 | M_dha_e
173 | M_dhap_c
174 | M_dhf_c
175 | M_dhor_S_c
176 | M_dhptd_c
177 | M_din_c
178 | M_ditp_c
179 | M_dmpp_c
180 | M_dtdp4d6dg_c
181 | M_dtdp_c
182 | M_dtmp_c
183 | M_dttp_c
184 | M_dudp_c
185 | M_dump_c
186 | M_duri_c
187 | M_dutp_c
188 | M_e4p_c
189 | M_eca4colipa_e
190 | M_enlipa_e
191 | M_enlipa_p
192 | M_enter_c
193 | M_enter_e
194 | M_etha_e
195 | M_etha_p
196 | M_etoh_e
197 | M_f6p_c
198 | M_fad_c
199 | M_fadh2_c
200 | M_fdp_c
201 | M_fe2_c
202 | M_fe2_e
203 | M_fe2_p
204 | M_fe3_c
205 | M_fe3_e
206 | M_fe3_p
207 | M_fe3hox_c
208 | M_fe3hox_e
209 | M_fe3hox_un_c
210 | M_fe3hox_un_e
211 | M_fecrm_c
212 | M_fecrm_e
213 | M_fecrm_un_c
214 | M_fecrm_un_e
215 | M_feenter_c
216 | M_feenter_e
217 | M_feoxam_c
218 | M_feoxam_e
219 | M_feoxam_un_c
220 | M_feoxam_un_e
221 | M_fgam_c
222 | M_fmn_c
223 | M_fmnh2_c
224 | M_for_c
225 | M_fum_c
226 | M_fum_e
227 | M_fum_p
228 | M_g1p_c
229 | M_g3p_c
230 | M_g3pe_e
231 | M_g3pe_p
232 | M_g3pg_e
233 | M_g3pg_p
234 | M_g6p_c
235 | M_gam6p_c
236 | M_gar_c
237 | M_gdp_c
238 | M_gdpmann_c
239 | M_glc_D_c
240 | M_glc_D_e
241 | M_glc_D_p
242 | M_glcn_c
243 | M_glcn_e
244 | M_glcn_p
245 | M_gln_L_c
246 | M_glu5sa_c
247 | M_glu_L_c
248 | M_glu_L_e
249 | M_glu_L_p
250 | M_glx_c
251 | M_gly_c
252 | M_glyald_e
253 | M_glyc3p_c
254 | M_glyc3p_e
255 | M_glyc3p_p
256 | M_glyc_R_c
257 | M_glyc_R_e
258 | M_glyc_c
259 | M_glyc_e
260 | M_glyc_p
261 | M_glyclt_c
262 | M_glyclt_e
263 | M_glyclt_p
264 | M_glycogen_c
265 | M_gmp_c
266 | M_grxox_c
267 | M_grxrd_c
268 | M_gsn_c
269 | M_gthrd_c
270 | M_gthrd_e
271 | M_gthrd_p
272 | M_gtp_c
273 | M_gua_c
274 | M_gua_e
275 | M_gua_p
276 | M_h2_c
277 | M_h2_e
278 | M_h2_p
279 | M_h2mb4p_c
280 | M_h2o2_c
281 | M_h2o_c
282 | M_h2o_e
283 | M_h2o_p
284 | M_h2s_c
285 | M_h2s_e
286 | M_h2s_p
287 | M_h_c
288 | M_h_e
289 | M_h_p
290 | M_hco3_c
291 | M_hcys_L_c
292 | M_hdca_c
293 | M_hdca_p
294 | M_hdcea_c
295 | M_hdcea_p
296 | M_hdeACP_c
297 | M_hexACP_c
298 | M_his_L_c
299 | M_his_L_e
300 | M_his_L_p
301 | M_hmfurn_c
302 | M_hom_L_c
303 | M_hom_L_e
304 | M_hpyr_c
305 | M_hxa_c
306 | M_hxan_c
307 | M_hxan_e
308 | M_hxcoa_c
309 | M_iasp_c
310 | M_ichor_c
311 | M_idon_L_c
312 | M_idon_L_e
313 | M_ile_L_c
314 | M_ile_L_e
315 | M_ile_L_p
316 | M_imp_c
317 | M_indole_c
318 | M_indole_e
319 | M_indole_p
320 | M_ins_c
321 | M_ins_e
322 | M_ins_p
323 | M_ipdp_c
324 | M_itp_c
325 | M_k_c
326 | M_k_e
327 | M_k_p
328 | M_kdo2lipid4_c
329 | M_kdo2lipid4_e
330 | M_lac_D_c
331 | M_lac_D_e
332 | M_leu_L_c
333 | M_leu_L_e
334 | M_leu_L_p
335 | M_lipa_c
336 | M_lipa_cold_e
337 | M_lipa_e
338 | M_lipa_p
339 | M_lys_L_c
340 | M_lys_L_e
341 | M_lys_L_p
342 | M_malACP_c
343 | M_mal_L_c
344 | M_man1p_c
345 | M_man6p_c
346 | M_man_c
347 | M_met_L_c
348 | M_metsox_R_L_c
349 | M_metsox_S_L_c
350 | M_mg2_c
351 | M_mg2_e
352 | M_mg2_p
353 | M_mlthf_c
354 | M_mn2_c
355 | M_mn2_e
356 | M_mn2_p
357 | M_mobd_c
358 | M_mobd_e
359 | M_mql8_c
360 | M_mqn8_c
361 | M_mthgxl_c
362 | M_murein3p3p_p
363 | M_murein4p3p_p
364 | M_murein4p4p_p
365 | M_murein4px4p_p
366 | M_murein5p3p_p
367 | M_murein5p4p_p
368 | M_murein5p5p_p
369 | M_murein5px3p_p
370 | M_murein5px4p_p
371 | M_myrsACP_c
372 | M_na1_c
373 | M_na1_p
374 | M_nad_c
375 | M_nadh_c
376 | M_nadp_c
377 | M_nadph_c
378 | M_ncam_c
379 | M_nh4_c
380 | M_nh4_e
381 | M_nh4_p
382 | M_ni2_c
383 | M_ni2_p
384 | M_nicrnt_c
385 | M_nmn_c
386 | M_o2_c
387 | M_o2_e
388 | M_o2_p
389 | M_o2s_c
390 | M_oaa_c
391 | M_ocACP_c
392 | M_ocdca_c
393 | M_ocdca_p
394 | M_ocdcea_c
395 | M_ocdcea_p
396 | M_orn_c
397 | M_orn_e
398 | M_orn_p
399 | M_orot_c
400 | M_oxam_c
401 | M_pa120_c
402 | M_pa120_p
403 | M_pa140_c
404 | M_pa140_p
405 | M_pa141_c
406 | M_pa141_p
407 | M_pa160_c
408 | M_pa160_p
409 | M_pa161_c
410 | M_pa161_p
411 | M_pa180_c
412 | M_pa180_p
413 | M_pa181_c
414 | M_pa181_p
415 | M_palmACP_c
416 | M_pap_c
417 | M_paps_c
418 | M_pdx5p_c
419 | M_pe120_c
420 | M_pe120_p
421 | M_pe140_c
422 | M_pe140_p
423 | M_pe141_c
424 | M_pe141_p
425 | M_pe160_c
426 | M_pe160_p
427 | M_pe161_c
428 | M_pe161_p
429 | M_pe180_c
430 | M_pe180_p
431 | M_pe181_c
432 | M_pe181_p
433 | M_pep_c
434 | M_pg120_c
435 | M_pg120_p
436 | M_pg140_c
437 | M_pg140_p
438 | M_pg141_c
439 | M_pg141_p
440 | M_pg160_c
441 | M_pg160_p
442 | M_pg161_c
443 | M_pg161_p
444 | M_pg180_c
445 | M_pg180_p
446 | M_pg181_c
447 | M_pg181_p
448 | M_pgp120_c
449 | M_pgp140_c
450 | M_pgp141_c
451 | M_pgp160_c
452 | M_pgp161_c
453 | M_pgp180_c
454 | M_pgp181_c
455 | M_phe_L_c
456 | M_phe_L_e
457 | M_pheme_c
458 | M_pheme_e
459 | M_pi_c
460 | M_pi_e
461 | M_pi_p
462 | M_ppa_c
463 | M_ppcoa_c
464 | M_ppgpp_c
465 | M_pphn_c
466 | M_ppi_c
467 | M_ppp9_c
468 | M_pppg9_c
469 | M_pppi_c
470 | M_pro_L_c
471 | M_pro_L_e
472 | M_pro_L_p
473 | M_prpp_c
474 | M_ptrc_c
475 | M_ptrc_e
476 | M_ptrc_p
477 | M_pyam5p_c
478 | M_pydx5p_c
479 | M_pyr_c
480 | M_pyr_e
481 | M_q8_c
482 | M_q8h2_c
483 | M_r1p_c
484 | M_r5p_c
485 | M_rbflvrd_c
486 | M_ribflv_c
487 | M_ru5p_D_c
488 | M_s7p_c
489 | M_ser_L_c
490 | M_ser_L_e
491 | M_ser_L_p
492 | M_sheme_c
493 | M_so3_c
494 | M_so4_c
495 | M_so4_e
496 | M_succ_c
497 | M_succ_e
498 | M_succ_p
499 | M_succoa_c
500 | M_sucsal_c
501 | M_t3c5ddeceACP_c
502 | M_t3c7mrseACP_c
503 | M_t3c9palmeACP_c
504 | M_tddec2eACP_c
505 | M_tdeACP_c
506 | M_tdec2eACP_c
507 | M_thex2eACP_c
508 | M_thf_c
509 | M_thmmp_c
510 | M_thmpp_c
511 | M_thr_L_c
512 | M_thr_L_e
513 | M_thr_L_p
514 | M_thymd_c
515 | M_thymd_e
516 | M_thymd_p
517 | M_tmrs2eACP_c
518 | M_toct2eACP_c
519 | M_tpalm2eACP_c
520 | M_trdox_c
521 | M_trdrd_c
522 | M_trp_L_c
523 | M_ttdca_c
524 | M_ttdca_p
525 | M_ttdcea_c
526 | M_ttdcea_p
527 | M_tyr_L_c
528 | M_tyr_L_e
529 | M_uaagmda_c
530 | M_uacgam_c
531 | M_uamr_c
532 | M_udcpdp_c
533 | M_udcpp_c
534 | M_udcpp_p
535 | M_udp_c
536 | M_udpg_c
537 | M_ugmd_c
538 | M_ump_c
539 | M_uppg3_c
540 | M_ura_c
541 | M_ura_e
542 | M_ura_p
543 | M_urdglyc_c
544 | M_urea_e
545 | M_uri_c
546 | M_uri_e
547 | M_uri_p
548 | M_utp_c
549 | M_val_L_c
550 | M_val_L_e
551 | M_val_L_p
552 | M_xan_c
553 | M_xan_e
554 | M_xan_p
555 | M_xmp_c
556 | M_xtp_c
557 | M_xtsn_c
558 | M_xtsn_e
559 | M_xu5p_D_c
560 | M_zn2_c
561 | M_zn2_e
562 | M_zn2_p
--------------------------------------------------------------------------------
/examples/iAF1260_resources/iAF1260_comp_synthlethals.txt:
--------------------------------------------------------------------------------
1 | R_3HAD160_and_R_3OAR160_and_R_3OAS160_and_R_ADCL_and_R_ADCS_and_R_AMPMS2_and_R_APRAUR_and_R_ASP1DC_and_R_CA2tex_and_R_CDPMEK_and_R_CHRPL_and_R_CLt3_2pp_and_R_CLtex_and_R_COBALT2tex_and_R_CU2tex_and_R_CYSTL_and_R_DB4PS_and_R_DHFS_and_R_DHNPA2_and_R_DHPPDA2_and_R_DHPS2_and_R_DMATT_and_R_DNMPPA_and_R_DNTPPA_and_R_DPCOAK_and_R_DPR_and_R_DXPRIi_and_R_DXPS_and_R_E4PD_and_R_FMNAT_and_R_GCALDD_and_R_GRTT_and_R_GTPCI_and_R_GTPCII2_and_R_HBZOPT_and_R_HPPK2_and_R_HSST_and_R_Ktex_and_R_MECDPDH2_and_R_MECDPS_and_R_MEPCT_and_R_METS_and_R_MG2tex_and_R_MNtex_and_R_MOBDabcpp_and_R_MOBDtex_and_R_MOHMT_and_R_MTHFR2_and_R_NNDPR_and_R_OCTDPS_and_R_OHPBAT_and_R_OPHBDC_and_R_PANTS_and_R_PDX5PS_and_R_PERD_and_R_PMDPHT_and_R_PNTK_and_R_PPCDC_and_R_PPNCL2_and_R_PTPATi_and_R_QULNS_and_R_RBFK_and_R_RBFSa_and_R_RBFSb_and_R_SHCHD2_and_R_SHCHF_and_R_SHSL1_and_R_THZPSN_and_R_TMPPP_and_R_UDCPDPS_and_R_UPP3MT_and_R_Zn2tex
2 | R_ACCOAC_and_R_MCOATA
3 | R_NDPK5
4 | R_O2tex
5 | R_ACLS_and_R_DHAD1_and_R_KARA1
6 | R_HCO3E
7 | R_SERAT
8 | R_GLCtex
9 | R_DAPDC
10 | R_UMPK
11 | R_NDPK2
12 | R_G1PACT_and_R_PGAMT_and_R_UAGDP
13 | R_MTHFC_and_R_MTHFD
14 | R_3HAD100_and_R_3HAD40_and_R_3HAD60_and_R_3HAD80_and_R_3OAR100_and_R_3OAR40_and_R_3OAR60_and_R_3OAR80_and_R_3OAS100_and_R_3OAS60_and_R_3OAS80
15 | R_3HAD120_and_R_3OAR120_and_R_3OAS120
16 | R_3OAR140_and_R_3OAS140
17 | R_A5PISO_and_R_KDOCT2_and_R_KDOPP_and_R_KDOPS
18 | R_PItex
19 | R_LPADSS_and_R_MOAT_and_R_MOAT2_and_R_TDSK_and_R_U23GAAT_and_R_UAGAAT_and_R_UHGADA_and_R_USHD
20 | R_3HAD140
21 | R_3HAD121_and_R_3HAD141_and_R_3HAD161_and_R_3OAR121_and_R_3OAR141_and_R_3OAR161_and_R_3OAS121_and_R_3OAS141_and_R_3OAS161_and_R_T2DECAI
22 | R_GLNS
23 | R_GF6PTA
24 | R_ASPTA
25 | R_CS
26 | R_NH4tex
27 | R_ASAD_and_R_ASPK
28 | R_GLUPRT_and_R_PRAGSr_and_R_PRAIS_and_R_PRFGS
29 | R_HSK_and_R_THRS
30 | R_IPMD_and_R_IPPMIa_and_R_IPPMIb_and_R_IPPS_and_R_LEUTAi_and_R_OMCDC
31 | R_ADSL2r_and_R_AIRC2_and_R_AIRC3_and_R_PRASCSi
32 | R_HSDy
33 | R_ACONTa_and_R_ACONTb_and_R_ICDHyr
34 | R_AICART_and_R_IMPC
35 | R_GMPS2
36 | R_ASPCT_and_R_DHORTS_and_R_OMPDC_and_R_ORPT
37 | R_CHORM
38 | R_ADSL1r_and_R_ADSS
39 | R_ARGSL_and_R_ARGSS_and_R_OCBT
40 | R_G3PD2
41 | R_ACODA_and_R_ACOTA
42 | R_ACGK_and_R_ACGS_and_R_AGPR
43 | R_SO4tex_and_R_SULabcpp
44 | R_ADSK_and_R_BPNT_and_R_SADT2_and_R_SULRi
45 | R_CYSS
46 | R_ACHBS_and_R_DHAD2_and_R_ILETA_and_R_KARA2_and_R_THRD_L
47 | R_GK1
48 | R_P5CR
49 | R_DAPE_and_R_DHDPRy_and_R_DHDPS_and_R_SDPDS_and_R_SDPTA_and_R_THDPS
50 | R_CHORS_and_R_DDPA_and_R_DHQS_and_R_DHQTi_and_R_PSCVT_and_R_SHK3Dr_and_R_SHKK
51 | R_CTPS2
52 | R_ALATA_L
53 | R_PSD161_and_R_PSSA161
54 | R_DASYN161
55 | R_AGPAT161_and_R_G3PAT161
56 | R_PHETA1_and_R_PPNDH
57 | R_PPND_and_R_TYRTA
58 | R_ATPPRT_and_R_HISTD_and_R_HISTP_and_R_HSTPT_and_R_IG3PS_and_R_IGPDH_and_R_PRAMPC_and_R_PRATPP_and_R_PRMICI
59 | R_PSD160_and_R_PSSA160
60 | R_AGPAT160_and_R_G3PAT160
61 | R_DASYN160
62 | R_PE161abcpp
63 | R_ANPRT_and_R_ANS_and_R_IGPS_and_R_PRAIi
64 | R_PE160abcpp
65 | R_GLUR_and_R_UAAGDS_and_R_UAMAGS_and_R_UAMAS
66 | R_TMDS
67 | R_PAPPT3_and_R_UAGCVT_and_R_UAGPT3_and_R_UAPGR_and_R_UDCPDP_and_R_UGMDDS
68 | R_DHFR
69 | R_K2L4Aabcpp_and_R_K2L4Atex
70 | R_MPTG
71 | R_MCTP1App
72 | R_ALAALAr
73 | R_NDPK7
74 | R_NDPK8
75 | R_NDPK4
76 | R_ALAR
77 | R_DTMPK
78 | R_CAt6pp
79 | R_CU2tpp
80 | R_COBALT2tpp
81 | R_NADS1_and_R_NNATr
82 | R_NADK
83 | R_AHCYSNS_and_R_RHCCE
84 | R_METAT
85 | R_FCLT_and_R_UPPDC1
86 | R_G1SAT_and_R_GLUTRR_and_R_GLUTRS_and_R_HMBS_and_R_PPBNGS_and_R_UPP3S
87 | R_TMPK
88 | R_PMPK
89 | R_PPC
--------------------------------------------------------------------------------
/examples/iaf1260.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import os
3 | from itertools import product
4 | from cobamp.efm_enumeration.kshortest_efms import KShortestEFMAlgorithm
5 | from cobamp.linear_systems.linear_systems import DualLinearSystem
6 | from cobamp.mcs_enumeration.intervention_problem import *
7 | import cobamp.efm_enumeration.kshortest_efm_properties as kp
8 |
9 | os.chdir('/home/skapur/Workspaces/PyCharm/cobamp/examples/iAF1260_resources')
10 |
11 | S = np.genfromtxt('iAF1260_comp_stoich.csv', delimiter=',')
12 |
13 | with open('iAF1260_comp_exclusions.txt','r') as f:
14 | singles = [s.strip() for s in f.readlines()]
15 |
16 | with open('iAF1260_comp_rxnames.txt','r') as f:
17 | rx_names = [s.strip() for s in f.readlines()]
18 |
19 | with open('iAF1260_comp_metnames.txt','r') as f:
20 | met_names = [s.strip() for s in f.readlines()]
21 |
22 | with open('iAF1260_comp_bound_map.txt','r') as f:
23 | bound_map = {k:[float(n) for n in v.split(',')]for k,v in dict([s.strip().split('=') for s in f.readlines()]).items()}
24 |
25 | with open('iAF1260_comp_orx_map.txt','r') as f:
26 | orx_map = {k:[n for n in v.split(',')]for k,v in dict([s.strip().split('=') for s in f.readlines()]).items()}
27 |
28 | irrev = np.where(np.array([bound_map[r][0] >= 0 for r in rx_names]))[0]
29 | exclusions = [[rx_names.index([k for k,v in orx_map.items() if s in v][0])] for s in singles if s in list(chain(*orx_map.values()))]
30 | biomass_index = rx_names.index('R_Ec_biomass_iAF1260_core_59p81M')
31 | atpm_index = rx_names.index('R_ATPM')
32 | glc_index = rx_names.index('R_EX_glc_e')
33 |
34 | configuration = kp.KShortestProperties()
35 | configuration[kp.K_SHORTEST_MPROPERTY_METHOD] = kp.K_SHORTEST_METHOD_POPULATE
36 | configuration[kp.K_SHORTEST_OPROPERTY_MAXSIZE] = 1
37 |
38 | problem = InterventionProblem(S)
39 | T, b = problem.generate_target_matrix([
40 | DefaultFluxbound(0.0001, None, biomass_index),
41 | DefaultFluxbound(-20, None, glc_index),
42 | DefaultFluxbound(8.39, 8.39, atpm_index)])
43 |
44 | dual_system = DualLinearSystem(S, irrev, T, b)
45 |
46 | algorithm = KShortestEFMAlgorithm(configuration)
47 |
48 | lethals = list(algorithm.enumerate(dual_system, exclusions))
49 |
50 | def decode_solutions(solutions):
51 | """
52 | Args:
53 | solutions:
54 | """
55 | return list(chain(*[list(product(*[orx_map[rx_names[i]] for i in lethal.get_active_indicator_varids()])) for lethal in solutions]))
56 |
57 | decoded = decode_solutions(lethals)
58 |
59 | len(decoded)
--------------------------------------------------------------------------------
/notice.txt:
--------------------------------------------------------------------------------
1 | this is the dev branch!
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | indexed==1.2.1
2 | pandas==1.3.5
3 | scipy==1.7.3
4 | numpy==1.21.6
5 | matplotlib==3.5.2
6 | optlang==1.5.2
7 | pathos==0.2.9
8 | boolean.py==3.8
9 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | from setuptools import setup, find_packages
2 |
3 | setup(
4 | name = 'cobamp',
5 | version = '0.2.2',
6 | package_dir = {'':'src'},
7 | packages = find_packages('src'),
8 | install_requires = ["indexed==1.2.1",
9 | "pandas==1.3.5",
10 | "scipy==1.7.3",
11 | "numpy==1.21.6",
12 | "matplotlib==3.5.2",
13 | "optlang==1.5.2",
14 | "pathos==0.2.9",
15 | "boolean.py==3.8"],
16 |
17 | author = 'Vítor Vieira',
18 | author_email = 'vvieira@ceb.uminho.pt',
19 | description = 'cobamp - pathway analysis methods for genome-scale metabolic models',
20 | license = 'GNU General Public License v3.0',
21 | keywords = 'pathway analysis metabolic model',
22 | url = 'https://github.com/BioSystemsUM/cobamp',
23 | long_description = open('README.rst').read(),
24 | classifiers = [
25 | 'Development Status :: 4 - Beta',
26 | 'Topic :: Scientific/Engineering :: Bio-Informatics',
27 | 'Intended Audience :: Science/Research',
28 | 'Programming Language :: Python :: 3.6',
29 | 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
30 | 'Topic :: Software Development :: Libraries :: Python Modules'
31 | ],
32 | )
33 |
--------------------------------------------------------------------------------
/src/cobamp/__init__.py:
--------------------------------------------------------------------------------
1 | from . import algorithms, analysis, core, nullspace, utilities, wrappers
2 |
--------------------------------------------------------------------------------
/src/cobamp/algorithms/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | Module containing elementary flux mode enumeration methods
3 | """
4 |
5 | from . import kshortest
6 | from .kshortest import KShortestEFMAlgorithm
7 |
--------------------------------------------------------------------------------
/src/cobamp/analysis/__init__.py:
--------------------------------------------------------------------------------
1 | from . import frequency, graph, plotting
2 |
--------------------------------------------------------------------------------
/src/cobamp/analysis/frequency.py:
--------------------------------------------------------------------------------
1 | from collections import Counter
2 | from itertools import combinations, chain
3 |
4 | import pandas as pd
5 |
6 | from cobamp.analysis.plotting import display_heatmap
7 |
8 |
9 | def get_frequency_dataframe(pathway_dict, k_min=1, k_max=1):
10 | def _get_possible_combinations(pathway):
11 | return list(
12 | chain(*[[' '.join(list(frozenset(c))) for c in combinations(pathway, k)] for k in range(k_min, k_max + 1)]))
13 |
14 | def _get_reaction_frequencies(pathways):
15 | c = Counter()
16 | for pathway in pathways:
17 | c.update(_get_possible_combinations(pathway))
18 | return c
19 |
20 | return pd.DataFrame(
21 | {ident: _get_reaction_frequencies(pathways) for ident, pathways in pathway_dict.items()})
22 |
23 |
24 | if __name__ == '__main__':
25 | n_reactions = 30
26 | efm_size_range = (1, 20)
27 | efm_number = 20
28 | efm_group_number = 5
29 |
30 |
31 | def generate_random_efms(n_reactions, efm_size_range, efm_number, efm_group_number):
32 | from random import randint
33 | def random_slightly_readable_string_generator(length):
34 | s = ""
35 | vwls = ['a', 'e', 'i', 'o', 'u']
36 | vwl_flag = bool(randint(0, 1))
37 | while len(s) < length:
38 | some_char = chr(randint(97, 122))
39 | if (some_char in vwls and vwl_flag) or (some_char not in vwls and not vwl_flag):
40 | s += some_char
41 | vwl_flag = not vwl_flag
42 | return s
43 |
44 | reaction_names = [random_slightly_readable_string_generator(randint(4, 10)) for _ in range(n_reactions)]
45 | group_names = [random_slightly_readable_string_generator(randint(10, 15)) for _ in range(n_reactions)]
46 |
47 | efm_groups = {
48 | group_names[j]: [set([reaction_names[randint(0, n_reactions - 1)] for i in range(randint(*efm_size_range))])
49 | for
50 | _ in range(efm_number)] for j in range(efm_group_number)}
51 |
52 | return reaction_names, group_names, efm_groups
53 |
54 |
55 | reaction_names, group_names, efm_groups = generate_random_efms(n_reactions, efm_size_range, efm_number,
56 | efm_group_number)
57 |
58 | df = get_frequency_dataframe(efm_groups)
59 | display_heatmap(df)
60 |
--------------------------------------------------------------------------------
/src/cobamp/analysis/graph.py:
--------------------------------------------------------------------------------
1 | from cobamp.utilities.tree import Tree
2 |
3 |
4 | def compress_linear_paths(tree):
5 | """
6 | Collapses sequences of nodes contained in a Tree with only one children as a single node containing all values of
7 | those nodes.
8 | Parameters
9 | ----------
10 | tree: A Tree instance.
11 | -------
12 |
13 | """
14 | if tree.is_leaf():
15 | pass
16 | else:
17 | if len(tree.children) == 1:
18 | if type(tree.value) != list:
19 | tree.value = [tree.value]
20 | tree.value.append(tree.children[0].value)
21 | tree.children = tree.children[0].children
22 | compress_linear_paths(tree)
23 | for child in tree.children:
24 | compress_linear_paths(child)
25 |
26 |
27 | def ignore_compressed_nodes_by_size(tree, size):
28 | """
29 | Modifies the values of a tree's children that have been previously compressed with the
30 | function if they contain more than a certain number of elements. The node's value is changed to "REMAINING".
31 | Parameters
32 | ----------
33 | tree: A Tree instance
34 | size: An integer with the size threshold
35 | -------
36 |
37 | """
38 | for child in tree.children:
39 | if isinstance(child.value, list) and len(child.value) > size:
40 | child.value = 'REMAINING'
41 | child.children = []
42 | else:
43 | ignore_compressed_nodes_by_size(child, size)
44 |
45 |
46 | def probabilistic_tree_prune(tree, target_level, current_level=0, cut_leaves=False, name_separator=' and '):
47 | """
48 | Cuts a tree's nodes under a certain height (`target_level`) and converts ensuing nodes into a single one whose value
49 | represents the relative frequency of an element in the nodes below. Requires values on the extra_info field.
50 | Parameters
51 | ----------
52 | tree: A Tree instance
53 | target_level: An int representing the level at which the tree will be cut
54 | current_level: The current level of the tree (int). Default is 0 for root nodes.
55 | cut_leaves: A boolean indicating whether the node at the target level is excluded or displays probabilities.
56 | name_separator: Separator to use when representing multiple elements
57 | -------
58 |
59 | """
60 | if current_level < target_level:
61 | if target_level == current_level and cut_leaves:
62 | tree.children = []
63 | else:
64 | for child in tree.children:
65 | probabilistic_tree_prune(child, target_level, current_level + 1, cut_leaves, name_separator)
66 |
67 | else:
68 | probabilistic_tree_compression(tree, name_separator=name_separator)
69 | tree.value = "REMAINING" if cut_leaves else [str(k) + "=" + str(tree.value[k]) for k in
70 | sorted(tree.value, key=tree.value.get)]
71 | return target_level == current_level
72 |
73 |
74 | def probabilistic_tree_compression(tree, data=None, total_count=None, name_separator=' and '):
75 | """
76 | Compresses a node and subsequent children by removing them and modifying the value to a dictionary with the relative
77 | frequency of each element in the subsequent nodes. Requires values on the extra_info field.
78 |
79 | Parameters
80 | ----------
81 | tree: A Tree instance
82 | data: Local count if not available in extra_info
83 | total_count: Total amount of sets if not available in extra_info
84 | name_separator: Separator to use when representing multiple elements
85 | -------
86 |
87 | """
88 | if data is None and total_count is None:
89 | total_count = int(tree.extra_info)
90 | data = {name_separator.join(tree.value) if isinstance(tree.value, list) else tree.value: 1}
91 | for child in tree.children:
92 | probabilistic_tree_compression(child, data, total_count, name_separator)
93 | tree.value = data
94 | tree.children = []
95 | else:
96 | local_proportion = int(tree.extra_info) / total_count
97 | key = name_separator.join(tree.value) if isinstance(tree.value, list) else tree.value
98 | if key not in data.keys():
99 | data[key] = local_proportion
100 | else:
101 | data[key] += local_proportion
102 | for child in tree.children:
103 | probabilistic_tree_compression(child, data, total_count, name_separator)
104 |
105 |
106 | def pretty_print_tree(tree, write_path=None):
107 | """
108 | Parameters
109 | ----------
110 | tree: A Tree instance
111 | write_path: Path to store a text file. Use None if the string is not to be stored in a file.
112 |
113 | Returns a text representation of a Tree instance along with its children.
114 | -------
115 |
116 | """
117 | buffer = []
118 |
119 | def __pretty_print_tree(tree, buffer, overhead, final_node=True):
120 | current_line = overhead + "|-- " + repr(tree)
121 | buffer.append(current_line)
122 | for child in tree.children:
123 | __pretty_print_tree(child, buffer, overhead + "|\t", False)
124 | if final_node:
125 | return buffer
126 |
127 | __pretty_print_tree(tree, buffer, '', True)
128 |
129 | res = '\n'.join(buffer)
130 |
131 | if write_path is not None:
132 | with open(write_path, 'w') as f:
133 | f.write(res)
134 |
135 | return res
136 |
137 |
138 | def apply_fx_to_all_node_values(tree, fx):
139 | """
140 | Applies a function to all nodes below the tree, modifying their value to its result.
141 | Parameters
142 | ----------
143 | tree: A Tree instance
144 | fx: A function to apply
145 | -------
146 |
147 | """
148 | tree.value = fx(tree.value)
149 | for child in tree.children:
150 | apply_fx_to_all_node_values(child, fx)
151 |
152 |
153 | def find_all_tree_nodes(tree):
154 | """
155 | Parameters
156 | ----------
157 | tree: A Tree instance.
158 |
159 | Returns a list of all nodes below a node
160 | -------
161 |
162 | """
163 |
164 | def __find_all_tree_nodes(tree, it):
165 | it.append(tree)
166 | for child in tree.children:
167 | __find_all_tree_nodes(child, it)
168 |
169 | it = []
170 | __find_all_tree_nodes(tree, it)
171 | return it
172 |
173 |
174 | def merge_duplicate_nodes(tree):
175 | """
176 | Merges all nodes with similar values, replacing every instance reference of all nodes with the same object if its
177 | value is identical
178 |
179 | Parameters
180 | ----------
181 | tree: A Tree instance
182 | -------
183 |
184 | """
185 | all_nodes = find_all_tree_nodes(tree)
186 | conv_key = lambda x: str(sorted(x) if isinstance(x, list) else x)
187 | unique_keys = [conv_key(k.value) for k in all_nodes]
188 | unique_node_map = {k: Tree(k) for k in unique_keys}
189 |
190 | def __merge_duplicate_nodes(tree):
191 | new_children = []
192 | for child in tree.children:
193 | grandchildren = child.children
194 | new_child = unique_node_map[conv_key(child.value)]
195 | new_child.children = grandchildren
196 | new_children.append(new_child)
197 | tree.children = new_children
198 | for child in tree.children:
199 | __merge_duplicate_nodes(child)
200 |
201 | __merge_duplicate_nodes(tree)
202 |
203 |
204 | def populate_nx_graph(tree, G, previous=None, name_separator='\n', unique_nodes=True, node_dict=None):
205 | if node_dict is None:
206 | node_dict = {}
207 | if unique_nodes:
208 | node_value_key = name_separator.join(tree.value) if type(tree.value) == list else str(tree.value)
209 | node_value = node_value_key
210 | if node_value_key not in node_dict.keys():
211 | node_dict[node_value_key] = 1
212 | node_value = node_value + "_" + '0'
213 | else:
214 | node_value = node_value + "_" + str(node_dict[node_value])
215 | node_dict[node_value_key] += 1
216 | else:
217 | node_value = tree.value
218 | node_value_key = tree.value
219 | if previous != None:
220 | previous_node, previous_key = previous
221 | G.add_edge(previous_node, node_value)
222 | if not tree.is_leaf():
223 | for child in tree.children:
224 | populate_nx_graph(child, G, previous=(node_value, node_value_key), name_separator=name_separator,
225 | unique_nodes=unique_nodes, node_dict=node_dict)
226 |
--------------------------------------------------------------------------------
/src/cobamp/analysis/plotting.py:
--------------------------------------------------------------------------------
1 | import matplotlib
2 | import matplotlib.pyplot as plt
3 | import numpy as np
4 |
5 |
6 | # code from matplotlib's documentation
7 | # at https://matplotlib.org/gallery/images_contours_and_fields/image_annotated_heatmap.html
8 |
9 | def heatmap(data, row_labels, col_labels, ax=None,
10 | cbar_kw={}, cbarlabel="", **kwargs):
11 | """
12 | Create a heatmap from a numpy array and two lists of labels.
13 |
14 | Arguments:
15 | data : A 2D numpy array of shape (N,M)
16 | row_labels : A list or array of length N with the labels
17 | for the rows
18 | col_labels : A list or array of length M with the labels
19 | for the columns
20 | Optional arguments:
21 | ax : A matplotlib.axes.Axes instance to which the heatmap
22 | is plotted. If not provided, use current axes or
23 | create a new one.
24 | cbar_kw : A dictionary with arguments to
25 | :meth:`matplotlib.Figure.colorbar`.
26 | cbarlabel : The label for the colorbar
27 | All other arguments are directly passed on to the imshow call.
28 | """
29 |
30 | if not ax:
31 | ax = plt.gca()
32 |
33 | # Plot the heatmap
34 | im = ax.imshow(data, **kwargs)
35 |
36 | # Create colorbar
37 | cbar = ax.figure.colorbar(im, ax=ax, **cbar_kw)
38 | cbar.ax.set_ylabel(cbarlabel, rotation=-90, va="bottom")
39 |
40 | # We want to show all ticks...
41 | ax.set_xticks(np.arange(data.shape[1]))
42 | ax.set_yticks(np.arange(data.shape[0]))
43 | # ... and label them with the respective list entries.
44 | ax.set_xticklabels(col_labels)
45 | ax.set_yticklabels(row_labels)
46 |
47 | # Let the horizontal axes labeling appear on top.
48 | ax.tick_params(top=True, bottom=False,
49 | labeltop=True, labelbottom=False)
50 |
51 | # Rotate the tick labels and set their alignment.
52 | plt.setp(ax.get_xticklabels(), rotation=-30, ha="right",
53 | rotation_mode="anchor")
54 |
55 | # Turn spines off and create white grid.
56 | for edge, spine in ax.spines.items():
57 | spine.set_visible(False)
58 |
59 | ax.set_xticks(np.arange(data.shape[1] + 1) - .5, minor=True)
60 | ax.set_yticks(np.arange(data.shape[0] + 1) - .5, minor=True)
61 | ax.grid(which="minor", color="w", linestyle='-', linewidth=3)
62 | ax.tick_params(which="minor", bottom=False, left=False)
63 |
64 | return im, cbar
65 |
66 |
67 | def annotate_heatmap(im, data=None, valfmt="{x:.2f}",
68 | textcolors=["black", "white"],
69 | threshold=None, **textkw):
70 | """
71 | A function to annotate a heatmap.
72 |
73 | Arguments:
74 | im : The AxesImage to be labeled.
75 | Optional arguments:
76 | data : Data used to annotate. If None, the image's data is used.
77 | valfmt : The format of the annotations inside the heatmap.
78 | This should either use the string format method, e.g.
79 | "$ {x:.2f}", or be a :class:`matplotlib.ticker.Formatter`.
80 | textcolors : A list or array of two color specifications. The first is
81 | used for values below a threshold, the second for those
82 | above.
83 | threshold : Value in data units according to which the colors from
84 | textcolors are applied. If None (the default) uses the
85 | middle of the colormap as separation.
86 |
87 | Further arguments are passed on to the created text labels.
88 | """
89 |
90 | if not isinstance(data, (list, np.ndarray)):
91 | data = im.get_array()
92 |
93 | # Normalize the threshold to the images color range.
94 | if threshold is not None:
95 | threshold = im.norm(threshold)
96 | else:
97 | threshold = im.norm(data.max()) / 2.
98 |
99 | # Set default alignment to center, but allow it to be
100 | # overwritten by textkw.
101 | kw = dict(horizontalalignment="center",
102 | verticalalignment="center")
103 | kw.update(textkw)
104 |
105 | # Get the formatter in case a string is supplied
106 | if isinstance(valfmt, str):
107 | valfmt = matplotlib.ticker.StrMethodFormatter(valfmt)
108 |
109 | # Loop over the data and create a `Text` for each "pixel".
110 | # Change the text's color depending on the data.
111 | texts = []
112 | for i in range(data.shape[0]):
113 | for j in range(data.shape[1]):
114 | kw.update(color=textcolors[im.norm(data[i, j]) > threshold])
115 | text = im.axes.text(j, i, valfmt(data[i, j], None), **kw)
116 | texts.append(text)
117 |
118 | return texts
119 |
120 |
121 | def display_heatmap(df):
122 | row_names = df.index.tolist()
123 | col_names = df.columns.tolist()
124 | data = df.values
125 |
126 | fig, ax = plt.subplots(figsize=(6, 15))
127 | im, cbar = heatmap(data, row_names, col_names, ax=ax,
128 | cmap="YlGn", cbarlabel="reaction frequency [%]")
129 | texts = annotate_heatmap(im, valfmt="{x:.1f}%")
130 |
131 | fig.tight_layout()
132 | plt.show()
133 |
--------------------------------------------------------------------------------
/src/cobamp/core/__init__.py:
--------------------------------------------------------------------------------
1 | from . import linear_systems, models, optimization, transformer, cb_analysis
2 |
--------------------------------------------------------------------------------
/src/cobamp/core/cb_analysis.py:
--------------------------------------------------------------------------------
1 | '''
2 | Implementation of the fast flux variability analysis method by Gudmundsson and Thiele (Computationally efficient
3 | flux variability analysis - BMC Bioinformatics 2010 11:489).
4 |
5 | Some parts have been heavily inspired by the talented people supporting cobrapy.
6 |
7 | '''
8 |
9 | from numpy import zeros
10 | from pathos.multiprocessing import cpu_count
11 | from pathos.pools import _ProcessPool
12 |
13 | from functools import reduce, partial
14 | from cobamp.core.optimization import LinearSystemOptimizer
15 |
16 |
17 | def _fva_initializer(linear_system, sense, gamma):
18 | global _linear_system
19 | global _opt
20 | global _sense
21 | global _lenrx
22 | global _gamma
23 | _linear_system = linear_system
24 | _lenrx = _linear_system.get_stoich_matrix_shape()[1]
25 | _opt = LinearSystemOptimizer(_linear_system, build=False)
26 | _sense = sense
27 | _gamma = gamma
28 |
29 |
30 | def _fva_iteration(i):
31 | global _linear_system
32 | global _opt
33 | global _sense
34 | global _gamma
35 | global _lenrx
36 | # print('Iterating for ',i)
37 | w = zeros(_lenrx).ravel()
38 | w[i] = 1
39 | _linear_system.set_objective(w, _sense)
40 | sol = _opt.optimize()
41 | # _linear_system.add_rows_to_model(w, [sol.objective_value()* _gamma], [None], only_nonzero=True)
42 | # print(w, _sense, i, sol.x()[i], sol.objective_value())
43 | return i, sol.objective_value()
44 |
45 |
46 | class FluxVariabilityAnalysis(object):
47 | def __init__(self, linear_system, workers=None):
48 | self.ls = linear_system
49 | self.n_jobs = min(cpu_count() if workers == None else workers, linear_system.get_stoich_matrix_shape()[1])
50 | self.result = None
51 |
52 | def run(self, initial_objective, minimize_initial, gamma=1 - 1e-6):
53 |
54 | M, N = self.ls.get_stoich_matrix_shape()
55 | result = {i: [0, 0] for i in range(N)}
56 | opt = LinearSystemOptimizer(self.ls, build=False)
57 | c = zeros(N)
58 | c[initial_objective] = 1
59 | self.ls.set_objective(c, minimize_initial)
60 |
61 | v0 = opt.optimize()
62 | z0 = v0.objective_value()
63 |
64 | self.ls.add_rows_to_model(c.reshape([1, N]), [z0 * gamma], [None], only_nonzero=True,
65 | names=['FASTFVAINITIALCONSTRAINT'])
66 |
67 | for sense in [True, False]:
68 | rx_per_job = N // self.n_jobs
69 | self.pool = _ProcessPool(
70 | processes=self.n_jobs,
71 | initializer=_fva_initializer,
72 | initargs=(self.ls, sense, gamma)
73 | )
74 | for i, value in self.pool.imap_unordered(_fva_iteration, range(N), chunksize=rx_per_job):
75 | result[i][int(not sense)] = value
76 |
77 | self.pool.close()
78 | self.pool.join()
79 |
80 | self.ls.remove_from_model([M], 'const')
81 | return FluxVariabilityAnalysisResult([result[i] for i in range(N)])
82 |
83 | class FluxVariabilityAnalysisResult(object):
84 |
85 | func_condition_dict = {
86 | 'blocked':lambda x: (abs(x[0]) < 1e-9) and (abs(x[1]) < 1e-9),
87 | 'forced_active' : lambda x: (1e-9 < x[0] <= x[1]) or (-1e-9 > x[1] > x[0]),
88 | 'forward_irreversible' : lambda x: (x[0] >= 0) and (x[1] > 0),
89 | 'backwards_irreversible' : lambda x: (x[1] <= 0) and (x[0] < 0)
90 | }
91 | def __init__(self, limits):
92 | self.__limits = tuple(tuple(l) for l in limits)
93 | for k,v in self.func_condition_dict.items():
94 | func_name = '_'.join(['find',k,'reactions'])
95 | setattr(self, func_name, partial(self.find_filter_matching_reactions,func_conditions=[v]))
96 |
97 | @property
98 | def limits(self):
99 | return self.__limits
100 |
101 | def find_filter_matching_reactions(self, func_conditions):
102 | return set(i for i,l in enumerate(self.__limits) if sum(fx(l) for fx in func_conditions) == len(func_conditions))
103 |
104 | if __name__ == '__main__':
105 | from cobra.io.sbml3 import read_sbml_model
106 | from cobamp.wrappers import COBRAModelObjectReader
107 | import time
108 |
109 | model = read_sbml_model(
110 | '/home/skapur/MEOCloud/Projectos/cobamp/examples/iAF1260_resources/original_model/Ec_iAF1260_flux2.xml')
111 | mor = COBRAModelObjectReader(model)
112 |
113 | cbm_mp = mor.to_cobamp_cbm('CPLEX')
114 | cbm_fast = mor.to_cobamp_cbm('CPLEX')
115 |
116 | init_sol = cbm_mp.optimize({1004: 1}, False)
117 | Z0 = (1 - 1e-6) * init_sol.objective_value()
118 | cbm_mp.set_reaction_bounds(1004, lb=Z0)
119 |
120 | c1_time = time.time()
121 |
122 | pp = _ProcessPool(cpu_count())
123 |
124 | limits_mp = list(pp.map(cbm_mp.flux_limits, range(len(cbm_mp.reaction_names))))
125 | pp.close()
126 | pp.join()
127 | c2_time = time.time()
128 | print('Multi-threaded:', c2_time - c1_time, 'seconds')
129 |
130 | fva = FluxVariabilityAnalysis(cbm_fast.model)
131 | limits_fast = fva.run(1004, False)
132 | c3_time = time.time()
133 | print('Multi-threaded fast FVA:', c3_time - c2_time, 'seconds')
134 |
135 | error = 1e-6
136 | error_rx = []
137 | for i, lsts in enumerate(zip(limits_mp, limits_fast.limits)):
138 | mpr, fr = lsts
139 | ld, ud = [mpr[i] - fr[i] for i in range(len(mpr))]
140 | if (abs(ld) > error) | (abs(ud) > error):
141 | error_rx.append([i, mpr, fr])
142 |
143 | print('Valid:', len(error_rx) == 0)
--------------------------------------------------------------------------------
/src/cobamp/core/optimization.py:
--------------------------------------------------------------------------------
1 | import random
2 | import string
3 | from collections import OrderedDict
4 | from time import time
5 |
6 | import pandas as pd
7 | from numpy import nan, array, abs, zeros, max
8 | from pathos.multiprocessing import cpu_count
9 | from pathos.pools import _ProcessPool
10 |
11 | from cobamp.core.linear_systems import LinearSystem
12 |
13 | MP_THREADS = cpu_count()
14 |
15 |
16 | def random_string_generator(N):
17 | """
18 |
19 | Parameters
20 |
21 | ----------
22 |
23 | N : an integer
24 |
25 | Returns a random string of uppercase character and digits of length N
26 | -------
27 |
28 | """
29 | return ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(N))
30 |
31 |
32 | class Solution(object):
33 | """
34 | Class representing a solution to a given linear optimization problem. Includes an internal dictionary for additional
35 | information to be included.
36 | """
37 |
38 | def __init__(self, value_map, status, **kwargs):
39 | """
40 |
41 | Parameters
42 | ----------
43 |
44 | value_map: A dictionary mapping variable indexes with their values as determined by the solver
45 |
46 | status: An object (preferrably str or int) containing the solution status
47 |
48 | kwargs: Any additional information to be included in the attribute_dict variable that can be accessed by
49 | the function.
50 | """
51 | self.__var_names = None
52 | self.__value_map = value_map
53 | self.__status = status
54 | self.__attribute_dict = {k: v for k, v in kwargs.items() if k != 'names'}
55 | if 'names' in kwargs:
56 | self.__var_names = kwargs['names']
57 |
58 | self.__obj_value = kwargs['objective_value'] if 'objective_value' in kwargs else nan
59 |
60 | def __getitem__(self, item):
61 | if hasattr(item, '__iter__') and not isinstance(item, str):
62 | return {k: self.__value_map[k] for k in item}
63 | elif isinstance(item, str):
64 | return self.__value_map[item]
65 | else:
66 | raise TypeError('\'item\' is not a sequence or string.')
67 |
68 | def to_series(self):
69 | if self.__var_names != None:
70 | return pd.Series({self.__var_names[i]: self.__value_map[i] for i in range(len(self.__var_names))})
71 | else:
72 | return pd.Series(self.var_values())
73 |
74 | def set_attribute(self, key, value):
75 | """
76 | Sets the value of a given as .
77 |
78 | Parameters
79 |
80 | ----------
81 |
82 | key - A string
83 |
84 | value - Any object to be associated with the supplied key
85 | -------
86 |
87 | """
88 | self.__attribute_dict[key] = value
89 |
90 | def var_values(self):
91 | """
92 |
93 | Returns a dict mapping reaction indices with the variable values.
94 | -------
95 |
96 | """
97 | return self.__value_map
98 |
99 | def status(self):
100 | """
101 |
102 | Returns the status of this solution, as supplied by the solver.
103 | -------
104 |
105 | """
106 | return self.__status
107 |
108 | def attribute_value(self, attribute_name):
109 | """
110 |
111 | Parameters
112 |
113 | ----------
114 |
115 | attribute_name: A dictionary key (preferrably str)
116 |
117 | Returns the value associated with the supplied key
118 | -------
119 |
120 | """
121 | return self.__attribute_dict[attribute_name]
122 |
123 | def attribute_names(self):
124 | """
125 |
126 | Returns all keys present in the attribute dictionary.
127 |
128 | -------
129 |
130 | """
131 | return self.__attribute_dict.keys()
132 |
133 | def objective_value(self):
134 | """
135 |
136 | Returns the objective value for this solution
137 |
138 | """
139 | return self.__obj_value
140 |
141 | def x(self):
142 | '''
143 |
144 | Returns a ndarray with the solution values in order (from the variables)
145 |
146 | '''
147 |
148 | return array(list(self.__value_map.values()))
149 |
150 | def __repr__(self):
151 | return '<'+self.status().capitalize()+' Solution - objective: '+\
152 | str(self.objective_value())+'; at '+hex(id(self))+'>'
153 |
154 |
155 | class LinearSystemOptimizer(object):
156 | """
157 | Class with methods to solve a as a linear optimization problem.
158 | """
159 |
160 | def __init__(self, linear_system, hard_fail=False, build=True):
161 | """
162 |
163 | Parameters
164 |
165 | ----------
166 |
167 | linear_system: A instance.
168 | hard_fail: A boolean flag indicating whether an Exception is raised when the optimization fails
169 | """
170 | self.linear_system = linear_system
171 | if build:
172 | linear_system.build_problem()
173 | self.solver = linear_system.solver
174 | self.model = linear_system.get_model()
175 | self.hard_fail = hard_fail
176 |
177 | def optimize(self):
178 | """
179 | Internal function to instantiate the solver and return a solution to the optimization problem
180 |
181 | Parameters
182 |
183 | ----------
184 |
185 | objective: A List[Tuple[coef,name]], where coef is an objective coefficient and name is the name of the variable
186 | to be optimized.
187 |
188 | minimize: A boolean that, when True, defines the problem as a minimization
189 |
190 | Returns a instance
191 | -------
192 | """
193 | names = self.model._get_variables_names()
194 |
195 | value_map = OrderedDict([(v, nan) for v in names])
196 | status = None
197 | ov = nan
198 |
199 | # self.model.configuration.tolerances.feasibility = 1e-9 # TODO this is for a test, to delete later
200 | # self.model.configuration.tolerances.optimality = 1e-6 # TODO this is for a test, to delete later
201 |
202 | # tINIT test parameters
203 | # self.model.problem.Params.MIPGap = 1e-9
204 | # self.model.configuration.tolerances.feasibility = 1e-8
205 | # self.model.configuration.tolerances.optimality = 1e-8
206 | # self.model.configuration.verbosity = 3
207 |
208 | try:
209 | self.model.optimize()
210 | values = self.model._get_primal_values()
211 | value_map = OrderedDict([(k, v) for k, v in zip(names, values)])
212 | status = self.model.status
213 | ov = self.model.objective.value
214 |
215 | except Exception as e:
216 | frozen_exception = e
217 |
218 | if status or not self.hard_fail:
219 | return Solution(value_map, self.model.status, objective_value=ov)
220 | else:
221 | raise frozen_exception
222 |
223 | def populate(self, limit=None):
224 | intf_dict = {
225 | 'CPLEX': self.__populate_cplex,
226 | 'GUROBI': self.__populate_gurobi
227 | }
228 | if self.solver in ['CPLEX', 'GUROBI']:
229 | return intf_dict[self.solver](limit)
230 | else:
231 | raise ValueError('The provided solver does not have an implemented populate function. Choose from' +
232 | ''.join(list(intf_dict.keys())))
233 |
234 | def __populate_cplex(self, limit):
235 | instance = self.model.problem
236 |
237 | if not limit:
238 | instance.parameters.mip.pool.capacity = instance.parameters.mip.pool.capacity.max()
239 | else:
240 | instance.parameters.mip.pool.capacity = limit
241 | vnames = instance.variables.get_names()
242 | mnames = self.model._get_variables_names()
243 | solutions = []
244 | try:
245 | instance.populate_solution_pool()
246 | pool_intf = instance.solution.pool
247 | nsols = pool_intf.get_num()
248 | for s in range(nsols):
249 | vmap = {k: v for k, v in zip(vnames, pool_intf.get_values(s))}
250 | ord_vmap = OrderedDict([(k, vmap[k]) for k in mnames])
251 | sol = Solution(ord_vmap, 'optimal', objective_value=pool_intf.get_objective_value(s))
252 | # TODO: get status dict from optlang and use it accordingly
253 | solutions.append(sol)
254 | except Exception as e:
255 | print(e)
256 |
257 | return solutions
258 |
259 | def __populate_gurobi(self, limit):
260 |
261 | instance = self.model.problem
262 |
263 | solutions = []
264 | instance.params.PoolSolutions = limit
265 | instance.params.SolutionNumber = 0
266 | try:
267 | instance.optimize()
268 | mnames = self.model._get_variables_names()
269 | if instance.SolCount > 0:
270 | for n in range(instance.SolCount):
271 | instance.params.SolutionNumber = n
272 | ord_vmap = OrderedDict([(k, instance.getVarByName(k).Xn) for k in mnames])
273 | sol = Solution(ord_vmap, 'optimal', objective_value=instance.PoolObjVal)
274 | solutions.append(sol)
275 | except Exception as e:
276 | print(e)
277 | finally:
278 | instance.params.SolutionNumber = 0
279 |
280 | return solutions
281 |
282 |
283 | class BendersSlaveOptimizer(LinearSystemOptimizer):
284 | def __init__(self, slave_system, hard_fail, build):
285 | super().__init__(slave_system, hard_fail, build)
286 |
287 | def optimize(self):
288 | normal_sol = super().optimize()
289 | vmap = OrderedDict(zip(self.linear_system.y_var_names, normal_sol.x()[self.linear_system.y_var_mask]))
290 | return Solution(value_map=vmap, status=normal_sol.status())
291 |
292 |
293 | class BendersDecompositionOptimizer(object):
294 | def __init__(self, master_system, slave_system, hard_fail=False, build=True):
295 | self.master, self.slave = master_system, slave_system
296 | t0 = time()
297 | self.opt_master, self.opt_slave = [opti(system, hard_fail, build) for opti, system in
298 | zip([LinearSystemOptimizer, BendersSlaveOptimizer],
299 | [self.master, self.slave])]
300 | t1 = time()
301 | print(t1 - t0, 'spent building the linear problems.')
302 | self.__set_model_parameters()
303 | self.master.remove_cuts()
304 | self.previous_cut = 0
305 |
306 | def _benders_iteration(self, master_sol):
307 | if master_sol.status() != 'infeasible':
308 | t2 = time()
309 | self.slave.parametrize(master_sol.x())
310 | t3 = time()
311 | print(t3 - t2, 'seconds spent applying parameters to the slave problem.')
312 |
313 | t4 = time()
314 | slave_sol = self.opt_slave.optimize()
315 | t5 = time()
316 | print(t5 - t4, 'seconds spent optimizing the slave problem.')
317 | # print('\tSlave has solution with status',slave_sol.status())
318 | print('Cutting at length = ', master_sol.x().astype(bool).sum())
319 | if slave_sol.status() == 'optimal':
320 | self.master.add_combinatorial_benders_cut(master_sol.x())
321 | return slave_sol
322 | else:
323 | # print('\tAdding cut with dimension =',master_sol.x().sum())
324 | self.master.add_combinatorial_benders_cut(master_sol.x())
325 | return None
326 | else:
327 | raise Exception('Master problem is infeasible. No further solutions.')
328 |
329 | def optimize(self, max_iterations=10000, slave_pool=20):
330 | i = 0
331 | r = None
332 | sol_stack = []
333 | while (i < max_iterations) and r == None:
334 | if len(sol_stack) > 1:
335 | master_sol = sol_stack.pop(0)
336 | r = self._benders_iteration(master_sol)
337 | else:
338 | t0 = time()
339 | sol_stack.extend(self.opt_master.populate(slave_pool))
340 | t1 = time()
341 | print(t1 - t0, 'spent populating the solution stack. Current length =', len(sol_stack))
342 | i += 1
343 | return r
344 |
345 | def __set_model_parameters(self):
346 | """
347 | Sets the optlang Model instance's parameters with appropriate values for k-shortest enumeration
348 | """
349 | parset_func = {'CPLEX': self.__set_model_parameters_cplex,
350 | 'GUROBI': self.__set_model_parameters_gurobi}
351 |
352 | if self.master.solver in parset_func.keys():
353 | parset_func[self.master.solver]()
354 |
355 | def __set_model_parameters_cplex(self):
356 |
357 | """
358 | Internal method to set model parameters for the CPLEX solver. This is based on the original MATLAB code by Von
359 | Kamp et al.
360 |
361 | -------
362 | """
363 | instance = self.master.model.problem
364 |
365 | instance.parameters.mip.tolerances.integrality.set(1e-9)
366 | # instance.parameters.mip.tolerances.mipgap.set(1e-2)
367 | # instance.parameters.mip.strategy.probe.set(3)
368 | instance.parameters.clocktype.set(1)
369 | instance.parameters.advance.set(1)
370 | instance.parameters.mip.strategy.fpheur.set(1)
371 | instance.parameters.emphasis.mip.set(2)
372 | instance.parameters.mip.pool.intensity.set(4)
373 | instance.parameters.mip.pool.absgap.set(0)
374 | instance.parameters.mip.pool.replace.set(2)
375 |
376 | def __set_model_parameters_gurobi(self):
377 | """
378 | Internal method to set model parameters for the GUROBI solver. This is based on the original MATLAB code by Von
379 | Kamp et al.
380 |
381 | """
382 | instance = self.master.model.problem
383 |
384 | instance.params.PoolGap = 0
385 | instance.params.MIPFocus = 2
386 | instance.params.MIPAbsGap = 0
387 | instance.params.PoolSearchMode = 2
388 |
389 |
390 | def optimization_pool(lsystem, bound_change_list, objective_coef_list, objective_sense_list, threads=MP_THREADS):
391 | res_map = [None for _ in range(len(bound_change_list))]
392 | true_threads = min((len(bound_change_list) // 2) + 1, threads)
393 | it_per_job = len(bound_change_list) // threads
394 | pool = _ProcessPool(
395 | processes=true_threads,
396 | initializer=_pool_initializer,
397 | initargs=(lsystem, bound_change_list, objective_coef_list, objective_sense_list)
398 | )
399 | for i, value in pool.imap_unordered(_optimize_function, list(range(len(bound_change_list))),
400 | chunksize=it_per_job):
401 | res_map[i] = value
402 |
403 | pool.close()
404 | pool.join()
405 | return res_map
406 |
407 |
408 | def _pool_initializer(linear_system: LinearSystem, bound_change_list, objective_coef_list, objective_sense_list):
409 | global _linear_system, _optimizer, _bound_change_list, _vars, _orig_lb, _orig_ub, _objective_coef_list, _objective_sense_list
410 |
411 | _linear_system, _bound_change_list, _objective_coef_list, _objective_sense_list = \
412 | linear_system, bound_change_list, objective_coef_list, objective_sense_list
413 | _vars = _linear_system.get_model().variables
414 | _orig_lb, _orig_ub = list(zip(*[(var.lb, var.ub) for var in _vars]))
415 | _optimizer = LinearSystemOptimizer(_linear_system, build=not _linear_system.was_built())
416 |
417 |
418 | def _optimize_function(change_index):
419 | global _linear_system, _optimizer, _bound_change_list, _vars, _orig_lb, _orig_ub, _objective_coef_list, _objective_sense_list
420 | var_ids, bounds = list(zip(*list(_bound_change_list[change_index].items())))
421 | chg_vars = _linear_system.get_stuff(index=var_ids, what='var')
422 | lb, ub = list(zip(*bounds))
423 | olb, oub = [[l[k] for k in var_ids] for l in [_orig_lb, _orig_ub]]
424 |
425 | obj_var_ids, obj_coefs = list(zip(*list(_objective_coef_list[change_index].items())))
426 |
427 | obj_sense = _objective_sense_list[change_index]
428 | obj_vars = _linear_system.get_stuff(index=obj_var_ids, what='var')
429 |
430 | _linear_system.set_objective(coefficients=obj_coefs, minimize=obj_sense, vars=obj_vars)
431 | _linear_system.set_variable_bounds(chg_vars, lb, ub)
432 | sol = _optimizer.optimize()
433 | _linear_system.set_variable_bounds(chg_vars, olb, oub)
434 |
435 | return change_index, sol
436 |
437 |
438 | class BatchOptimizer(object):
439 | def __init__(self, linear_system: LinearSystem, threads=MP_THREADS):
440 | self.__linear_system = linear_system
441 | self.__threads = threads
442 |
443 | def batch_optimize(self, bounds, objective_coefs, objective_senses):
444 | assert len(bounds) == len(objective_coefs) == len(objective_senses)
445 | return optimization_pool(self.__linear_system, bounds, objective_coefs, objective_senses,
446 | threads=self.__threads)
447 |
448 |
449 | class KShortestSolution(Solution):
450 | """
451 | A Solution subclass that also contains attributes suitable for elementary flux modes such as non-cancellation sums
452 | of split reactions and reaction activity.
453 | """
454 | SIGNED_INDICATOR_SUM = 'signed_indicator_map'
455 | SIGNED_VALUE_MAP = 'signed_value_map'
456 |
457 | def __init__(self, value_map, status, indicator_map, dvar_mapping, dvars, **kwargs):
458 | """
459 |
460 | Parameters
461 |
462 | ----------
463 |
464 | value_map: A dictionary mapping variable names with values
465 |
466 | status: See
467 |
468 | indicator_map: A dictionary mapping indicators with
469 |
470 | dvar_mapping: A mapping between reaction indices and solver variables (Tuple[str] or str)
471 |
472 | kwargs: See
473 |
474 | """
475 | signed_value_map = {
476 | i: value_map[dvars[varlist[0]]] - value_map[dvars[varlist[1]]] if isinstance(varlist, (tuple, list)) else
477 | value_map[dvars[varlist]] for
478 | i, varlist in dvar_mapping.items()}
479 | signed_indicator_map = {
480 | i: value_map[indicator_map[dvars[varlist[0]]]] - value_map[indicator_map[dvars[varlist[1]]]] if isinstance(
481 | varlist,
482 | (tuple, list)) else
483 | value_map[indicator_map[dvars[varlist]]] for
484 | i, varlist in dvar_mapping.items()}
485 | super().__init__(value_map, status, **kwargs)
486 | self.set_attribute(self.SIGNED_VALUE_MAP, signed_value_map)
487 | self.set_attribute(self.SIGNED_INDICATOR_SUM, signed_indicator_map)
488 |
489 | def get_active_indicator_varids(self):
490 | """
491 |
492 | Returns the indices of active indicator variables (maps with variables on the original stoichiometric matrix)
493 |
494 | -------
495 |
496 | """
497 | return [k for k, v in self.attribute_value(self.SIGNED_INDICATOR_SUM).items() if abs(v) > 1e-9]
498 |
--------------------------------------------------------------------------------
/src/cobamp/core/transformer.py:
--------------------------------------------------------------------------------
1 | import abc
2 | from copy import deepcopy
3 | from itertools import product
4 |
5 | from cobamp.core.models import ConstraintBasedModel
6 |
7 |
8 | class ModelTransformer(object):
9 | __metaclass__ = abc.ABCMeta
10 |
11 | @staticmethod
12 | def transform(self, args, properties):
13 |
14 | # args must be:
15 | # - a dict with 'S', 'lb', 'ub' keys
16 | # - a ConstraintBasedModel
17 | if isinstance(args, dict):
18 | assert len(set(args.keys()) & {'S', 'lb', 'ub'}) == len(set(args.keys())), 'args must contain at least S' + \
19 | ', lb, and ub key-value pairs'
20 |
21 | S, lb, ub = [args[k] for k in ['S', 'lb', 'ub']]
22 | return self.transform_array(S, lb, ub, properties)
23 |
24 | elif isinstance(args, ConstraintBasedModel):
25 | S = args.get_stoichiometric_matrix()
26 | lb, ub = args.get_bounds_as_list()
27 | new_properties = deepcopy(properties)
28 |
29 | for k in ['block', 'keep']:
30 | if new_properties[k] != None:
31 | new_properties[k] = [args.decode_index(r, 'reaction') for r in properties[k]]
32 |
33 | Sn, lbn, ubn, mapping, metabs = self.transform_array(S, lb, ub, new_properties)
34 |
35 | reaction_names_new = [
36 | new_properties['reaction_id_sep'].join([args.reaction_names[i] for i in mapping.from_new(i)]) for i in
37 | range(len(lbn))]
38 | modeln = ConstraintBasedModel(
39 | S=Sn,
40 | thermodynamic_constraints=[list(k) for k in list(zip(lbn, ubn))],
41 | reaction_names=reaction_names_new,
42 | metabolite_names=[args.metabolite_names[k] for k in metabs]
43 | )
44 |
45 | return modeln, mapping, metabs
46 |
47 | @staticmethod
48 | @abc.abstractmethod
49 | def transform_array(S, lb, ub, properties):
50 | ## TODO: implement
51 |
52 | # must return:
53 | # - new S matrix
54 | # - new lower/upper bounds
55 | # - mapping between rows/cols from both matrices
56 | # mapping = ReactionIndexMapping({}, {})
57 | # metabs = []
58 | # return S, lb, ub, mapping, metabs
59 | return
60 |
61 |
62 | class ReactionIndexMapping(object):
63 | def __init__(self, otn, nto):
64 | self.otn = otn
65 | self.nto = nto
66 |
67 | def from_original(self, idx):
68 | return self.otn[idx]
69 |
70 | def from_new(self, idx):
71 | return self.nto[idx]
72 |
73 | def multiply(self, new_ids):
74 | return list(product(*[self.from_new(k) for k in set(new_ids)]))
75 |
--------------------------------------------------------------------------------
/src/cobamp/gpr/__init__.py:
--------------------------------------------------------------------------------
1 | from . import core
2 |
--------------------------------------------------------------------------------
/src/cobamp/gpr/core.py:
--------------------------------------------------------------------------------
1 | import warnings
2 | import ast
3 | from _ast import Name
4 |
5 | from itertools import chain
6 | from numpy import zeros
7 | from boolean.boolean import BooleanAlgebra
8 | import re
9 |
10 | GPR_GENE_RE = re.compile("\\b(?!and\\b|or\\b|AND\\b|OR\\b)[([._A-z0-9]*")
11 |
12 | GPR_DOT = '__COBAMPGPRDOT__'
13 | GPR_UNDERSCORE = '__COBAMPGPRUNDERSCORE__'
14 |
15 | def symbol_encode(match_str): return '_' + match_str.replace('.', GPR_DOT) if match_str != '' else ''
16 | def symbol_decode(match_str): return match_str[1:].replace(GPR_DOT, '.') if match_str != '' else ''
17 |
18 |
19 | def logical_or(x):
20 | return sum(x) >= 1
21 |
22 | def logical_and(x):
23 | return sum(x) == len(x)
24 |
25 | def aux_apply(fx, it):
26 | args = [k for k in it if k is not None]
27 | return fx(args) if args else None
28 |
29 | def normalize_boolean_expression(rule, simplify=False):
30 | BOOLEAN_ALGEBRA = BooleanAlgebra()
31 | try:
32 | expr = BOOLEAN_ALGEBRA.parse(rule).literalize().simplify()
33 | # Simplify first otherwise _rdistributive() may take forever.
34 | operation_example = BOOLEAN_ALGEBRA.OR(BOOLEAN_ALGEBRA.TRUE, BOOLEAN_ALGEBRA.FALSE)
35 | expr = BOOLEAN_ALGEBRA._rdistributive(expr, operation_example)
36 | if simplify:
37 | expr = expr.simplify()
38 | #bool_expression = BOOLEAN_ALGEBRA.normalize(expression, BOOLEAN_ALGEBRA.OR)
39 | return str(expr).replace('&', ' and ').replace('|', ' or ')
40 | except Exception as e:
41 | warnings.warn('Could not normalize this rule: ' + rule)
42 | return rule
43 |
44 | def convert_gpr_to_list(gpr, apply_fx=str, or_char='or', and_char='and'):
45 | proteins = list(
46 | map(
47 | lambda x: x.strip().replace('(', '').replace(')', ''),
48 | apply_fx(gpr).split(or_char)
49 | )
50 | )
51 | rules = [[s.strip() for s in x.split(and_char) if s.strip() != ''] for x in proteins]
52 | return rules
53 |
54 |
55 | class GPRContainer(object):
56 | def __init__(self, gpr_list, apply_fx=str, or_char='or', and_char='and', ttg_ratio=20):
57 | self.apply_fx = apply_fx
58 | self.__or_char, self.__and_char = or_char, and_char
59 | self.ttg_ratio = ttg_ratio
60 | self.__initialize(gpr_list)
61 |
62 |
63 |
64 | def __initialize(self, gpr_list):
65 | self.__gprs = []
66 | self.__gpr_list = []
67 | self.add_gprs(gpr_list)
68 | self.__update_genes()
69 |
70 | def add_gprs(self, gpr_list):
71 | gprs = [self.__preprocess_gprs(gp, token_to_gene_ratio=self.ttg_ratio) if gp != '' else ''
72 | for i,gp in enumerate(gpr_list)]
73 | gpr_list = [convert_gpr_to_list(g, apply_fx=str, or_char=self.__or_char, and_char=self.__and_char) for g in
74 | gprs]
75 |
76 | self.__gprs.extend(gprs)
77 | self.__gpr_list.extend(gpr_list)
78 | self.__update_genes()
79 |
80 | def remove_gprs(self, indices):
81 | self.__gprs, self.__gpr_list = zip(*[(self.__gprs[i], self.__gpr_list[i]) for i in range(len(self.__gprs))
82 | if i not in indices])
83 | self.__gprs, self.__gpr_list = [list(k) for k in [self.__gprs, self.__gpr_list]]
84 | self.__update_genes()
85 |
86 | def __update_genes(self):
87 | self.__genes = tuple(set(list(chain(*chain(*self.__gpr_list)))))
88 | self.__gene_to_index_mapping = dict(zip(self.__genes, range(len(self.__genes))))
89 | self.__safe_gpr_decoder = {symbol_encode(g):g for g in self.get_genes()}
90 |
91 | def get_safe_gpr_decoder(self):
92 | return self.__safe_gpr_decoder
93 |
94 | def get_ast(self, index, decode=False):
95 | gpr = self[index]
96 | encoded = GPR_GENE_RE.sub(lambda x: symbol_encode(x.group()), gpr)
97 | if decode:
98 | for node in filter(lambda x: isinstance(x, Name), ast.walk(tree)):
99 | node.id = gpr_eval.get_safe_gpr_decoder()[node.id]
100 | return ast.parse(encoded).body[0]
101 |
102 | def __preprocess_gprs(self, gpr_str, token_to_gene_ratio=20):
103 | def fix_name(gpr_string):
104 | matches = [k for k in GPR_GENE_RE.finditer(gpr_string) if k.span()[0] - k.span()[1] != 0]
105 | unique_tokens = set([m.string for m in matches])
106 | return GPR_GENE_RE.sub(lambda x: symbol_encode(x.group()), gpr_string), len(matches), len(unique_tokens), unique_tokens
107 | # gpr_list = []
108 | # for gpr_str in gpr_string_list:
109 | rule, num_matches, num_unique_tokens, unique_tokens = fix_name(gpr_str)
110 | if self.apply_fx:
111 | rule = self.apply_fx(rule)
112 | if (num_unique_tokens > 0) and (num_matches // num_unique_tokens) < token_to_gene_ratio:
113 | rule = normalize_boolean_expression(rule)
114 | else:
115 | warnings.warn(
116 | 'Will not normalize rules with more than ' + str(token_to_gene_ratio) + ' average tokens per gene')
117 |
118 | return GPR_GENE_RE.sub(lambda x: symbol_decode(x.group()), rule)
119 |
120 |
121 | def __len__(self):
122 | return len(self.__gpr_list)
123 |
124 | def __getitem__(self, item):
125 | return self.__gprs[item]
126 |
127 | def gpr_has_string(self, index, string):
128 | return string in self.__gprs[index]
129 |
130 | def get_gpr_as_lists(self, index):
131 | return self.__gpr_list[index]
132 |
133 | def or_char(self):
134 | return self.__or_char
135 |
136 | def and_char(self):
137 | return self.__and_char
138 |
139 | def get_genes(self):
140 | return self.__genes
141 |
142 | def eval_gpr(self, index, state, or_fx=logical_or, and_fx=logical_and):
143 | return aux_apply(or_fx,
144 | [aux_apply(and_fx, [state[x] for x in gs if x in state.keys()]) for gs in
145 | self.__gpr_list[index]])
146 |
147 | def associated_genes(self, index):
148 | return list(chain(*self.__gpr_list[index]))
149 |
150 | def associated_gene_matrix(self):
151 | B = zeros([len(self.__genes), len(self.__gpr_list)])
152 | row_ind, col_ind = [], []
153 | for i in range(len(self)):
154 | gene_indexes = [self.__gene_to_index_mapping[k] for k in self.associated_genes(i)]
155 | row_ind += gene_indexes
156 | col_ind += [i] * len(gene_indexes)
157 | B[row_ind, col_ind] = 1
158 | return B
159 |
160 | if __name__ == '__main__':
161 | gprs = [
162 | '11.1 and G2 or G2.7',
163 | 'G3 or G4',
164 | '(G1 and G2) or (G3 and G4)',
165 | 'G5',
166 | 'G6'
167 | ]
168 | # gprs = [model.reactions[i].gene_reaction_rule for i in range(len(model.reactions))]
169 | gpr_eval = GPRContainer(gprs)
170 | for i in range(len(gprs)):
171 | print(gprs[i], gpr_eval.get_gpr_as_lists(i))
172 |
173 | tree = gpr_eval.get_ast(0)
174 |
--------------------------------------------------------------------------------
/src/cobamp/gpr/integration.py:
--------------------------------------------------------------------------------
1 | from numpy import where, zeros, array, vstack, unique, eye, hstack, ones
2 | from cobamp.wrappers.method_wrappers import KShortestGenericMCSEnumeratorWrapper
3 | from cobamp.core.models import ConstraintBasedModel
4 | from functools import reduce
5 |
6 | def filled_vector(dim, index, fill_value=1):
7 | fvec = zeros(dim)
8 | fvec[index] = fill_value
9 | return fvec
10 |
11 |
12 | class GeneMatrixBuilder(object):
13 | def __init__(self, gpr_evaluator):
14 | self.__gpr_evaluator = gpr_evaluator
15 | self.__B, self.__b = self.get_association_matrix()
16 |
17 | def get_association_matrix(self):
18 | B = self.__gpr_evaluator.associated_gene_matrix()
19 | b = B.sum(axis=0)
20 | return B, b
21 |
22 | @staticmethod
23 | def gpr_single_gene(gpr_list):
24 | return len(gpr_list) == 1 and len(gpr_list[0]) == 1
25 |
26 | @staticmethod
27 | def gpr_only_and_rules(gpr_list):
28 | return len(gpr_list) == 1 and len(gpr_list[0]) > 1
29 |
30 | @staticmethod
31 | def gpr_only_or_rules(gpr_list):
32 | return len(gpr_list) > 1 and not (False in ([len(g) <= 1 for g in gpr_list]))
33 |
34 | def optimize_dual_gpr_graph(self, F):
35 | # S -> H -> subset of G
36 | cbm = self.get_gpr_model(F)
37 | dual_mat = zeros([F.shape[1],len(cbm.reaction_names)])
38 | dual_mat[:F.shape[1],:F.shape[1]] = eye(F.shape[1])
39 | wrp = KShortestGenericMCSEnumeratorWrapper(
40 | model=cbm, target_flux_space_dict={'GPRs':(1,None)}, target_yield_space_dict={},
41 | dual_matrix=dual_mat, dual_var_mapper={i:i for i in range(F.shape[1])}, stop_criteria=F.shape[1],
42 | algorithm_type='kse_populate'
43 | )
44 | enum = wrp.get_enumerator()
45 | sols = list(map(lambda d: array(list(d.keys())),reduce(lambda x,y: x+y, enum)))
46 | return sols
47 |
48 | @staticmethod
49 | def get_gpr_model(F):
50 | c,g = F.shape
51 |
52 | S = vstack([hstack(r) for r in [
53 | [eye(g), -F.T, zeros([g,c+1])],
54 | [zeros([c, g]), eye(c), -eye(c), zeros([c, 1])],
55 | [zeros([1, c+g]), ones([1,c]), -ones([1,1])]
56 | ]])
57 |
58 | bounds = [[0, None] for _ in range(S.shape[1])]
59 | mn = ['MG'+str(i) for i in range(g)] + ['MC'+str(i) for i in range(c)] + ['GPR']
60 | rn = ['SG'+str(i) for i in range(g)] + ['SC'+str(i) for i in range(c)] + ['OC'+str(i) for i in range(c)] + \
61 | ['GPRs']
62 | return ConstraintBasedModel(S, bounds, reaction_names=rn, metabolite_names=mn)
63 |
64 | def get_GF_matrices(self):
65 | gpr_eval = self.__gpr_evaluator
66 | genes = dict(zip(gpr_eval.get_genes(), range(len(gpr_eval.get_genes()))))
67 | total_g, total_f = [], []
68 | for i in range(len(gpr_eval) - 1):
69 | gpr_as_list = gpr_eval.get_gpr_as_lists(i)
70 | # AND means new row
71 | # OR means all active indices in the same row
72 | if not (len(gpr_as_list) == 0 or (len(gpr_as_list) == 1 and len(gpr_as_list[0]) == 0)):
73 | alt_case = [f(gpr_as_list) for f in
74 | [self.gpr_single_gene, self.gpr_only_and_rules, self.gpr_only_or_rules]]
75 | cur_g, cur_f = [], []
76 | if not (True in alt_case) and len(gpr_as_list) > 0:
77 | for complex in gpr_as_list:
78 | if len(complex) > 0:
79 | flist = filled_vector(len(genes), [genes[g] for g in complex], fill_value=1)
80 | glist = filled_vector(len(gpr_eval), [i], fill_value=1)
81 | cur_g.append(glist)
82 | cur_f.append(flist)
83 |
84 | F_sub_full = vstack(cur_f)
85 | F_genes = unique(F_sub_full.nonzero()[1])
86 | F_temp = F_sub_full[:, F_genes]
87 | row_inds = [F_genes[k] for k in self.optimize_dual_gpr_graph(F_temp)]
88 | cur_f = [filled_vector(len(genes), inds, fill_value=1) for inds in row_inds]
89 | cur_g = [filled_vector(len(gpr_eval), [i], fill_value=1) for _ in row_inds]
90 | else:
91 | if len(gpr_as_list) > 0 and len(gpr_as_list[0]) > 0:
92 | if alt_case[2]:
93 | flist = [filled_vector(len(genes), [genes[c[0]] for c in gpr_as_list], fill_value=1)]
94 | glist = [filled_vector(len(gpr_eval), [i], fill_value=1)]
95 | elif alt_case[1]:
96 | flist = [filled_vector(len(genes), [genes[g]], fill_value=1) for g in gpr_as_list[0]]
97 | glist = [filled_vector(len(gpr_eval), [i], fill_value=1)]*len(gpr_as_list[0])
98 | elif alt_case[0]:
99 | flist = [filled_vector(len(genes), [genes[gpr_as_list[0][0]]], fill_value=1)]
100 | glist = [filled_vector(len(gpr_eval), [i], fill_value=1)]
101 | else:
102 | flist, glist = [], []
103 | cur_g.extend(glist)
104 | cur_f.extend(flist)
105 | total_f.extend(cur_f)
106 | total_g.extend(cur_g)
107 |
108 | G, F = [vstack(l).astype(bool) for l in [total_g, total_f]]
109 |
110 | Fm, old_indices, reverse = unique(F, axis=0, return_index=True, return_inverse=True)
111 | Frev_dict = {k: where(reverse == k)[0].tolist() for k in unique(reverse)}
112 | Gm = vstack([G[idx, :].any(axis=0) for k, idx in Frev_dict.items()])
113 | rev_gmap = {v: k for k, v in genes.items()}
114 | irreducible_gene_map = {' and '.join(rev_gmap[k] for k in row.nonzero()[0]): i
115 | for i, row in enumerate(Fm)}
116 |
117 | gene_sets = [set(f.nonzero()[0]) for f in Fm]
118 | weights = [len(g) for g in gene_sets]
119 | F_deps = {i: {j for j, h in enumerate(gene_sets) if len(h & g) == len(g) and i != j} for i, g in
120 | enumerate(gene_sets)}
121 |
122 |
123 | return [x.astype(int) for x in [Gm, Fm]] + [irreducible_gene_map, F_deps, weights]
124 |
125 |
126 |
127 | if __name__ == '__main__':
128 | from cobamp.gpr.core import GPRContainer, logical_and, logical_or
129 |
130 | gprs = [
131 | 'g1',
132 | 'g2',
133 | 'g2',
134 | 'g3 and g4',
135 | 'g2 and g5',
136 | 'g3 or g6',
137 | '(g2 and (g5 or g6)) or g7',
138 | ''
139 | ]
140 | gpr_eval = GPRContainer(gprs)
141 | gmb = GeneMatrixBuilder(gpr_eval)
142 | G, F, gene_mapping = gmb.get_GF_matrices()
143 | ord_map = [gene_mapping['g'+str(i)] for i in range(1,8)]
144 | G[:,ord_map]
145 | F[:,ord_map]
146 | # revmap = {v:k for k,v in gene_mapping.items()}
147 | # [[revmap[k] for k in r.nonzero()[0]] for r in F]
148 |
149 |
150 |
151 |
--------------------------------------------------------------------------------
/src/cobamp/nullspace/__init__.py:
--------------------------------------------------------------------------------
1 | from . import nullspace, subset_reduction
2 |
--------------------------------------------------------------------------------
/src/cobamp/nullspace/nullspace.py:
--------------------------------------------------------------------------------
1 | from numpy import abs, where, compress, concatenate, ones, random, sign
2 | from numpy.linalg import svd
3 |
4 | EPSILON = 1e-10
5 | PRECISION = 1e-10
6 |
7 |
8 | def compute_nullspace(A, eps=1e-9, left=True):
9 | """
10 | Computes the nullspace of the matrix A.
11 |
12 | Parameters
13 |
14 | ----------
15 |
16 | A: A 2D-ndarray
17 |
18 | eps: Tolerance value for 0
19 |
20 | left: A boolean value indicating whether the result is the left nullspace (right if False)
21 |
22 | Returns the nullspace of A as a 2D ndarray
23 | -------
24 |
25 | """
26 | u, s, v = svd(A)
27 | padding = max(0, A.shape[1] - s.shape[0])
28 | mask = concatenate(((s <= eps), ones((padding,), dtype=bool)), axis=0)
29 | return compress(mask, u.T, 0) if left else compress(mask, v.T, 1)
30 |
31 |
32 | def nullspace_blocked_reactions(K, tolerance):
33 | """
34 |
35 | Parameters
36 |
37 | ----------
38 |
39 | K: A nullspace matrix as a 2D ndarray
40 |
41 | tolerance: Tolerance value for 0
42 |
43 | -------
44 |
45 | Returns indices of the rows of K where all values are 0
46 |
47 |
48 | """
49 | return where(sum(abs(K.T) < tolerance) == K.shape[0])[0]
50 |
51 |
52 | if __name__ == '__main__':
53 | import profile
54 |
55 | #
56 | # A = array([[2, 3, 5], [-4, 2, 3], [0, 0, 0]])
57 | # nullspace = compute_nullspace(A, left=False)
58 |
59 | Ar = random.rand(5000, 10000) - 0.5
60 | Ar[(-0.4 < Ar) & (0.4 > Ar)] = 0
61 | A = sign(Ar)
62 | profile.run('nullspace = compute_nullspace(A, left=False)')
63 |
--------------------------------------------------------------------------------
/src/cobamp/nullspace/subset_reduction.py:
--------------------------------------------------------------------------------
1 | """
2 | Inspired by Metatool's code
3 | """
4 | from itertools import chain
5 |
6 | from numpy import sqrt, triu, logical_not, nonzero, mean, zeros, argmin, isin, sign, delete, unique, where, \
7 | dot, eye, setdiff1d, ndarray, array
8 | from numpy.linalg import norm
9 | from cobamp.core.transformer import ModelTransformer, ReactionIndexMapping
10 | from cobamp.utilities.property_management import PropertyDictionary
11 | from cobamp.nullspace.nullspace import compute_nullspace, nullspace_blocked_reactions
12 |
13 | EPSILON = 2 ** -52
14 | PRECISION = 1e-10
15 |
16 |
17 | def subset_reduction(S, irrev, to_remove=[], to_keep_single=[]):
18 | """
19 | Reduces a stoichiometric matrix using nullspace analysis by identifying linearly dependent (enzyme) subsets.
20 | These reactions are then compressed.
21 |
22 | Parameters
23 | ----------
24 | S: Stoichiometric matrix as an ndarray.
25 |
26 | irrev: A boolean array with size equal to the number of columns in the S matrix.
27 |
28 | to_remove: A list of indices specifying columns of the S matrix to remove before the compression (usually blocked
29 | reactions)
30 |
31 | to_keep_single: A list of indices specifying columns of the S matrix not to compress.
32 |
33 | Returns rd, sub, irrev_reduced, rdind, irrv_subsets, kept_reactions, kernel, correlation_matrix
34 |
35 | rd : compressed stoichiometric matrix -> numpy.array
36 |
37 | sub : subset matrix, n-subsets by n-reactions -> numpy.array
38 |
39 | irrev_reduced : subset reversibilities -> numpy.array of type bool
40 |
41 | rdind : metabolite indices -> numpy.array of type int
42 |
43 | irrv_subsets : same as sub, but list if empty
44 |
45 | kept_reactions : indexes for reactions used in the network compression
46 |
47 | kernel : numpy.array with the right nullspace of S
48 |
49 | correlation_matrix : numpy.array with reaction correlation matrix
50 | -------
51 |
52 | """
53 | m, n = S.shape
54 |
55 | keep_single = array([False] * n)
56 | keep_single[to_keep_single] = True
57 |
58 | kept_reactions = array([True] * n)
59 | kept_reactions[to_remove] = False
60 | kept_reactions = where(kept_reactions)[0]
61 |
62 | ktol = EPSILON * sum(kept_reactions)
63 |
64 | kernel = compute_nullspace(S[:, kept_reactions], ktol, False)
65 | kernel_blocked = nullspace_blocked_reactions(kernel, ktol)
66 |
67 | if kernel_blocked.shape[0] > 0:
68 | kept_reactions = kept_reactions[setdiff1d(kept_reactions, kernel_blocked)]
69 | kernel = compute_nullspace(S[:, kept_reactions], ktol, False)
70 |
71 | correlation_matrix = subset_candidates(kernel)
72 | S_scm = S[:, kept_reactions]
73 | irrev_scm = irrev[kept_reactions]
74 | scm_kp_ids = where([keep_single[kept_reactions]])[1]
75 |
76 | sub, irrev_reduced, irrv_subsets = subset_correlation_matrix(S_scm, kernel, irrev_scm, correlation_matrix,
77 | scm_kp_ids)
78 | if len(kept_reactions) < n:
79 | temp = zeros([sub.shape[0], n])
80 | temp[:, kept_reactions] = sub
81 | sub = temp
82 | if len(irrv_subsets) > 0:
83 | temp = zeros([len(irrv_subsets), n])
84 | temp[:, kept_reactions] = irrv_subsets
85 | irrv_subsets = temp
86 | rd, rdind, irrev_reduced_final, sub = reduce(S, sub, irrev_reduced)
87 |
88 | return rd, sub, irrev_reduced_final, rdind, irrv_subsets, kept_reactions, kernel, correlation_matrix
89 |
90 |
91 | def subset_candidates(kernel, tol=None):
92 | """
93 | Computes a matrix of subset candidates from the nullspace of the S matrix
94 |
95 | Parameters
96 |
97 | ----------
98 |
99 | kernel: Nullspace of the S matrix
100 |
101 | tol: Tolerance to 0.
102 |
103 | Returns a 2D triangular ndarray
104 | -------
105 |
106 | """
107 | tol = kernel.shape[0] * EPSILON if tol is None else tol
108 | cr = dot(kernel, kernel.T)
109 | for i in range(kernel.shape[0]):
110 | for j in range(i + 1, kernel.shape[0]):
111 | cr[i, j] = cr[i, j] / sqrt(cr[i, i] * cr[j, j])
112 | cr[i, i] = 1
113 | cr = triu(cr)
114 | cr[abs(abs(cr) - 1) >= tol] = 0
115 | return sign(cr)
116 |
117 |
118 | def subset_correlation_matrix(S, kernel, irrev, cr, keepSingle=None):
119 | """
120 |
121 | Parameters
122 | ----------
123 | S: Stoichiometric matrix as ndarray
124 |
125 | kernel: The nullspace of S
126 |
127 | irrev: List of booleans representing irreversible reactions (when True)
128 |
129 | cr: The subset candidate matrix, computed using
130 |
131 | keepSingle: List of reaction indices that will not be compressed.
132 |
133 | Returns sub, irrev_sub, irrev_violating_subsets
134 |
135 | sub : subset matrix, n-subsets by n-reactions -> numpy.array
136 |
137 | irrev_sub : subset reversibilities -> numpy.array of type bool
138 |
139 | irrev_violating_subsets : same as sub, but list if empty. Contains subsets discarded due to irreversibility faults
140 | -------
141 |
142 | """
143 | m, n = S.shape
144 | in_subset = array([False] * n)
145 | irrev_sub = array([False] * cr.shape[0])
146 | sub = zeros([cr.shape[0], n])
147 | irrev_violating_subsets = []
148 | sub_count = 0
149 | if (keepSingle is not None) and (len(keepSingle) > 0):
150 | # keepSingle = array([])
151 | irrev_violating_subsets = []
152 | sub[:len(keepSingle), keepSingle] = eye(len(keepSingle))
153 | irrev_sub[:len(keepSingle)] = irrev[keepSingle]
154 | in_subset[keepSingle] = True
155 | sub_count = len(keepSingle)
156 | for i in range(cr.shape[0] - 1, -1, -1):
157 | reactions = where(cr[:, i] != 0)[0]
158 | in_subset_indexes = where(in_subset)[0]
159 | in_subset_reactions = isin(reactions, in_subset_indexes)
160 | reactions = reactions[logical_not(in_subset_reactions)]
161 | if len(reactions) > 0:
162 | in_subset[reactions] = True
163 | irrev_sub[sub_count] = (irrev[reactions]).any()
164 |
165 | if len(reactions) == 1:
166 | sub[sub_count, reactions] = 1
167 | else:
168 | lengths = norm(kernel[reactions, :], axis=1)
169 | min_ind = argmin(abs(lengths - mean(lengths)))
170 | lengths /= lengths[min_ind]
171 | sub[sub_count, reactions] = lengths * cr[reactions, i]
172 | sub_count += 1
173 |
174 | sub = sub[:sub_count, :]
175 | irrev_sub = irrev_sub[:sub_count]
176 |
177 | ind = unique(where(sub[:, irrev] < 0)[0])
178 | if len(ind) > 0:
179 | sub[ind, :] = -sub[ind, :]
180 | ind = unique(where(sub[:, irrev] < 0)[0])
181 | if len(ind) > 0:
182 | irrev_violating_subsets = sub[ind, :]
183 | sub = delete(sub, ind, 0)
184 | irrv_to_keep = delete(array(range(len(irrev_sub))), ind, 0)
185 | irrev_sub = irrev_sub[irrv_to_keep]
186 |
187 | return sub, irrev_sub, irrev_violating_subsets
188 |
189 |
190 | def reduce(S, sub, irrev_reduced=None):
191 | """
192 | Reduces a stoichiometric matrix according to the subset information present in the sub matrix and irrev_reduced.
193 |
194 | Parameters
195 |
196 | ----------
197 |
198 | S: Stoichiometric matrix
199 |
200 | sub: Subset matrix as computed by
201 |
202 | irrev_reduced: Irreversibility vector regarding the subsets.
203 |
204 | Returns reduced, reduced_indexes, irrev_reduced
205 |
206 | -------
207 |
208 | """
209 |
210 | reduced = dot(S, sub.T)
211 | reduced[abs(reduced) < PRECISION] = 0
212 | reduced_indexes = unique(nonzero(reduced)[0])
213 | reduced = reduced[reduced_indexes, :]
214 |
215 | rdm, rdn = reduced.shape
216 | if rdn == 0 or rdm == 0:
217 | reduced = zeros(1, rdn)
218 |
219 | if irrev_reduced is not None:
220 | ind = unique(nonzero(reduced)[1])
221 | reduced = reduced[:, ind]
222 | irrev_reduced = irrev_reduced[ind]
223 | sub = sub[ind, :]
224 | else:
225 | irrev_reduced = []
226 |
227 | return reduced, reduced_indexes, irrev_reduced, sub
228 |
229 |
230 | class SubsetReducerProperties(PropertyDictionary):
231 | def __init__(self, keep=None, block=None, absolute_bounds=False, reaction_id_sep='_+_'):
232 | def is_list(x):
233 | return isinstance(x, (tuple, list, ndarray))
234 |
235 | new_optional = {
236 | 'keep': lambda x: is_list(x) or type(x) == None,
237 | 'block': lambda x: is_list(x) or type(x) == None,
238 | 'absolute_bounds': bool,
239 | 'reaction_id_sep': str
240 | }
241 |
242 | super().__init__(optional_properties=new_optional)
243 | for name, value in zip(['keep', 'block', 'absolute_bounds', 'reaction_id_sep'],
244 | [keep, block, absolute_bounds, reaction_id_sep]):
245 | self.add_if_not_none(name, value)
246 |
247 |
248 | class SubsetReducer(ModelTransformer):
249 | TO_KEEP_SINGLE = 'SUBSET_REDUCER-TO_KEEP_SINGLE'
250 | TO_BLOCK = 'SUBSET_REDUCER-TO_BLOCK'
251 | ABSOLUTE_BOUNDS = 'SUBSET_REDUCER-ABSOLUTE_BOUNDS'
252 |
253 | @staticmethod
254 | def reduce(S, lb, ub, keep=(), block=(), absolute_bounds=False):
255 | lb, ub = list(map(array, [lb, ub]))
256 | to_keep, to_block = [], []
257 | irrev = (lb >= 0) | ((ub <= 0) & (lb <= 0))
258 |
259 | if block:
260 | to_block = array(block)
261 |
262 | if keep:
263 | to_keep = array(keep)
264 |
265 | rd, sub, irrev_reduced, rdind, irrv_subsets, kept_reactions, K, _ = subset_reduction(
266 | S, irrev, to_keep_single=to_keep, to_remove=to_block)
267 |
268 | mapping = SubsetReducer.get_transform_maps(sub)
269 |
270 | nlb = [0 if irrev_reduced[k] else None for k in range(rd.shape[1])]
271 | nub = [None] * rd.shape[1]
272 |
273 | if absolute_bounds:
274 | nlb = [0 if irrev_reduced[k] else -1000 for k in range(rd.shape[1])]
275 | nub = [float('inf')] * rd.shape[1]
276 | alb, aub = list(zip(*[[fx([x[k] for k in mapping.from_new(i)]) for x, fx in zip([lb, ub], [max, min])]
277 | for i in range(rd.shape[1])]))
278 |
279 | for func, pair in zip([max, min], [[nlb, alb], [nub, aub]]):
280 | new, absolute = pair
281 | for i, v in enumerate(absolute):
282 | new[i] = func(new[i], absolute[i])
283 |
284 | return rd, nlb, nub, mapping, rdind
285 |
286 | @staticmethod
287 | def transform_array(S, lb, ub, properties):
288 | k, b, a = (properties[k] for k in ['keep', 'block', 'absolute_bounds'])
289 |
290 | Sn, lbn, ubn, mapping, metabs = SubsetReducer.reduce(S, lb, ub, k, b, a)
291 |
292 | return Sn, lbn, ubn, mapping, metabs
293 |
294 | @staticmethod
295 | def get_transform_maps(sub):
296 | new_to_orig = {i: list(nonzero(sub[i, :])[0]) for i in range(sub.shape[0])}
297 | orig_to_new = dict(chain(*[[(i, k) for i in v] for k, v in new_to_orig.items()]))
298 |
299 | return ReactionIndexMapping(orig_to_new, new_to_orig)
300 |
--------------------------------------------------------------------------------
/src/cobamp/utilities/__init__.py:
--------------------------------------------------------------------------------
1 | from . import file_io, property_management, set, test, postfix_expressions
2 |
--------------------------------------------------------------------------------
/src/cobamp/utilities/context.py:
--------------------------------------------------------------------------------
1 | from functools import partial
2 |
3 |
4 | class CommandHistory(object):
5 | def __init__(self):
6 | self.commands = []
7 |
8 | def execute_last(self):
9 | self.commands.pop()()
10 |
11 | def execute_first(self):
12 | self.commands.pop(0)()
13 |
14 | def execute_all(self, forward=False):
15 | while len(self.commands) > 0:
16 | (self.execute_first if forward else self.execute_last)()
17 |
18 | def queue_command(self, func, args):
19 | self.commands.append(partial(func, **args))
--------------------------------------------------------------------------------
/src/cobamp/utilities/file_io.py:
--------------------------------------------------------------------------------
1 | import pickle
2 |
3 |
4 | def pickle_object(obj, path):
5 | """
6 | Stores an object as a file.
7 | Parameters
8 | ----------
9 | obj: The object instance
10 | path: Full path as a str where the file will be stored.
11 |
12 | Returns
13 | -------
14 |
15 | """
16 | with open(path, "wb") as f:
17 | pickle.dump(obj, f)
18 |
19 |
20 | def read_pickle(path):
21 | """
22 | Reads a file containing a pickled object and returns it
23 | Parameters
24 | ----------
25 | path: Full path as a str where the file is stored.
26 |
27 | Returns an object.
28 | -------
29 |
30 | """
31 | with open(path, "rb") as f:
32 | return pickle.load(f)
33 |
34 |
35 | def open_file(path, mode):
36 | with open(path, mode) as f:
37 | return f.read()
38 |
--------------------------------------------------------------------------------
/src/cobamp/utilities/hash.py:
--------------------------------------------------------------------------------
1 | from typing import Iterable
2 |
3 | def get_dict_hash(x: dict, hash_function=hash):
4 | return hash_function(frozenset(x.items()))
5 |
6 | def get_unique_dicts(dict_iterable: Iterable[dict], hash_function=hash):
7 | orig_hashes = []
8 | cache = {}
9 |
10 | for d in dict_iterable:
11 | dhash = get_dict_hash(d, hash_function)
12 | if dhash not in cache:
13 | cache[dhash] = d
14 | orig_hashes.append(dhash)
15 | return cache, orig_hashes
--------------------------------------------------------------------------------
/src/cobamp/utilities/linear_system_diagnostics.py:
--------------------------------------------------------------------------------
1 | def identify_conflicts(modelnorm):
2 | """
3 | Identify conflicting constraints in a constraint-based model instantiated with CPLEX
4 | :param modelnorm: A ConstraintBasedModel instance
5 | :return: A dict with reaction/metabolite names involved in an irreducible inconsistent subset and their bounds
6 | (if applicable)
7 | """
8 | try:
9 | cpxi = modelnorm.model.model.problem
10 | cpxi.conflict.refine(cpxi.conflict.all_constraints())
11 | conf = {k[1]: modelnorm.get_reaction_bounds(k[1]) if 'bound' in k[0] else () for k, v in
12 | dict(zip([(cpxi.conflict.constraint_type[k[1][0][0]], modelnorm.reaction_names[k[1][0][1]]
13 | if cpxi.conflict.constraint_type[k[1][0][0]] != 'linear' else modelnorm.metabolite_names[k[1][0][1]])
14 | for k in cpxi.conflict.get_groups()],
15 | [cpxi.conflict.group_status[i] for i in cpxi.conflict.get()])).items()
16 | if v != 'excluded'}
17 |
18 | # for m in {k: v for k, v in conf.items() if v == ()}.keys():
19 | # matx = modelnorm.get_stoichiometric_matrix(rows=[m])[
20 | # modelnorm.get_stoichiometric_matrix(rows=[m]).nonzero()]
21 | # nzi = modelnorm.get_stoichiometric_matrix(rows=[m]).nonzero()[0]
22 | # invrx = [[c * v for v in vk] for vk, c in
23 | # zip([modelnorm.get_reaction_bounds(k) for k in nzi], matx.tolist())]
24 | # final_metab_bound = [sum(k) for k in zip(*[(min(x), max(x)) for x in invrx])]
25 | # #print('\t', m, final_metab_bound, '>=', 1)
26 |
27 | #conf_rxs = {i: j for i, j in conf.items() if j != ()}
28 | return conf
29 | except:
30 | print('\tNo conflict')
31 | finally:
32 | modelnorm.revert_to_original_bounds()
33 |
--------------------------------------------------------------------------------
/src/cobamp/utilities/parallel.py:
--------------------------------------------------------------------------------
1 | from itertools import product
2 | from multiprocessing import cpu_count
3 |
4 | from pathos.pools import _ProcessPool
5 |
6 | from cobamp.core.optimization import BatchOptimizer
7 |
8 | MP_THREADS = cpu_count()
9 |
10 |
11 | def _batch_function(param_index):
12 | global _params, _function, _iterable
13 | return param_index, _function(_iterable[param_index], _params)
14 |
15 |
16 | def _pool_initializer(params):
17 | global _iterable, _params, _function
18 | _params = params
19 | _iterable = params['iterable']
20 | _function = params['function']
21 |
22 |
23 | def _batch_run(params, threads):
24 | jobs = len(params['iterable'])
25 | res_map = [None for _ in range(jobs)]
26 | true_threads = min((jobs // 2) + 1, threads)
27 | it_per_job = jobs // threads
28 | pool = _ProcessPool(
29 | processes=true_threads,
30 | initializer=_pool_initializer,
31 | initargs=([params])
32 | )
33 | for i, value in pool.imap_unordered(_batch_function, list(range(jobs)),
34 | chunksize=it_per_job):
35 | res_map[i] = value
36 |
37 | pool.close()
38 | pool.join()
39 |
40 | return res_map
41 |
42 |
43 | def batch_run(function, sequence, paramargs=None, threads=MP_THREADS):
44 | params = {'function': function, 'iterable': sequence}
45 | if paramargs != None:
46 | params.update(paramargs)
47 | return _batch_run(params, min(len(sequence),threads))
48 |
49 | def batch_optimize_cobamp_model(cobamp_model, bounds, objectives, combine_inputs=False, max_threads=MP_THREADS):
50 |
51 | def check_inputs(bounds, objectives):
52 | all_eq = len(bounds) == len(objectives)
53 | if combine_inputs or not all_eq:
54 | bounds, objectives = zip(*product(bounds, objectives))
55 |
56 | obj_coef, senses = zip(*objectives)
57 |
58 | bounds, obj_coef = [{cobamp_model.decode_index(k, 'reaction'): v for k, v in d.items()}
59 | for d in (bounds, obj_coef)]
60 |
61 | return bounds, obj_coef, objectives
62 |
63 | cbounds, ccoefs, cobjs = check_inputs(bounds, objectives)
64 | bopt = BatchOptimizer(cobamp_model.model, threads=min(cbounds, max_threads))
65 | return bopt.batch_optimize(cbounds, ccoefs, cobjs)
66 |
--------------------------------------------------------------------------------
/src/cobamp/utilities/postfix_expressions.py:
--------------------------------------------------------------------------------
1 | from operator import add, sub, mul, truediv, pow
2 |
3 |
4 | class Stack(list):
5 |
6 | def push(self, x):
7 | self.append(x)
8 |
9 | def top(self):
10 | return self[-1]
11 |
12 |
13 | class Queue(list):
14 |
15 | def push(self, x):
16 | self.insert(0, x)
17 |
18 | def top(self):
19 | return self[-1]
20 |
21 |
22 | def parse_infix_expression(op, is_operand_fx, is_operator_fx, precedence_fx):
23 | tokens = Queue(op[::-1])
24 | output = Queue()
25 | opstk = Stack()
26 | while tokens:
27 | token = tokens.pop()
28 | if is_operand_fx(token):
29 | output.push(token)
30 | elif is_operator_fx(token):
31 | while opstk and ((opstk.top() != '(') and ((precedence_fx(token) < precedence_fx(opstk.top())) or (
32 | left_operator_association(opstk.top()) and (precedence_fx(token) == precedence_fx(opstk.top()))))):
33 | output.push(opstk.pop())
34 | opstk.push(token)
35 | elif token == '(':
36 | opstk.push(token)
37 | elif token == ')':
38 | while opstk and (opstk.top() != '('):
39 | output.push(opstk.pop())
40 | if opstk.top() != '(':
41 | print('Mismatched parentheses found!')
42 | else:
43 | opstk.pop()
44 | while opstk:
45 | op_remaining = opstk.pop()
46 | if op_remaining in ('(', ')'):
47 | print('Mismatched parentheses found')
48 | output.push(op_remaining)
49 | return list(output)[::-1]
50 |
51 |
52 | def evaluate_postfix_expression(op, eval_fx, type_conv=int):
53 | stk = Stack()
54 | for token in op:
55 | if is_operator_token(token):
56 | o1, o2 = type_conv(stk.pop()), type_conv(stk.pop())
57 | result = eval_fx(token, o1, o2)
58 | stk.push(result)
59 | elif is_number_token(token):
60 | stk.push(token)
61 | return stk.pop()
62 |
63 |
64 | def tokenize_infix_expression(inf_exp_str):
65 | return list(filter(lambda x: x != '', inf_exp_str.replace('(', ' ( ').replace(')', ' ) ').split(' ')))
66 |
67 |
68 | # def tokenize_boolean_expression(inf_exp_str, default_value='1'):
69 | # return [tok if tok in ('0', '1', 'and', 'not', 'or', ')', '(') else default_value for tok in
70 | # tokenize_infix_expression(inf_exp_str)]
71 |
72 | def tokenize_boolean_expression(inf_exp_str):
73 | return tokenize_infix_expression(inf_exp_str)
74 |
75 |
76 | def is_number_token(token):
77 | return token.replace('.', '', 1).replace('-', '', 1).isnumeric()
78 |
79 |
80 | def is_operator_token(token):
81 | return token in ['**', '/', '*', '+', '-']
82 |
83 |
84 | def is_string_token(token):
85 | return isinstance(token, str) and not is_boolean_operator(token) and (('(' not in token) and (')' not in token))
86 |
87 |
88 | def op_prec(op):
89 | precedence = {
90 | '**': 4,
91 | '/': 3,
92 | '*': 3,
93 | '+': 2,
94 | '-': 1}
95 | return precedence[op]
96 |
97 |
98 | def left_operator_association(op):
99 | return False if op == '**' else True
100 |
101 |
102 | operators = {
103 | '+': add,
104 | '-': sub,
105 | '*': mul,
106 | '/': truediv,
107 | '**': lambda a, b: pow(b, a)
108 | }
109 |
110 |
111 | def eval_math_operator(operator, o1, o2):
112 | return operators[operator](o1, o2)
113 |
114 |
115 | def is_boolean_value(token):
116 | if token in ('1', '0'):
117 | return True
118 | elif token.isnumeric():
119 | print('Illegal token', token, 'found.')
120 | return False
121 | else:
122 | return False
123 |
124 |
125 | def is_boolean_operator(token):
126 | return token.upper() in ['AND', 'OR', 'NOT']
127 |
128 |
129 | def boolean_precedence(token):
130 | pdict = {'not': 3,
131 | 'and': 2,
132 | 'or': 1}
133 | return pdict[token]
134 |
135 |
136 | truth_table = {
137 | 'and': {
138 | (0, 0): 0,
139 | (1, 0): 0,
140 | (0, 1): 0,
141 | (1, 1): 1
142 | },
143 | 'or': {
144 | (0, 0): 0,
145 | (1, 0): 1,
146 | (0, 1): 1,
147 | (1, 1): 1
148 | },
149 | 'not': {
150 | 0: 1,
151 | 1: 0
152 | }
153 | }
154 |
155 |
156 | def eval_boolean_operator(operator, o1, o2):
157 | return truth_table[operator][(o1, o2)]
158 |
159 |
160 | if __name__ == '__main__':
161 |
162 | # def test_algebraic_expression():
163 | # op = tokenize_infix_expression('((15 / (7 - (1 + 1))) * 3) - (2 + (1 + 1)) ')
164 | # psfix = parse_infix_expression(op, is_number_token, is_operator_token, op_prec)
165 | # res = evaluate_postfix_expression(psfix, eval_math_operator)
166 | # print(res)
167 |
168 |
169 | # def test_boolean_expression():
170 | from urllib.request import urlretrieve
171 | from cobra.io import read_sbml_model
172 | from random import random
173 |
174 | BOOL_OPS = ('and', 'not', 'or', ')', '(')
175 | path, content = urlretrieve('http://bigg.ucsd.edu/static/models/RECON1.xml')
176 | model = read_sbml_model(path)
177 |
178 | # [(i,r.gene_name_reaction_rule) for i,r in enumerate(model.reactions)]
179 | ogpr = model.reactions[616].gene_name_reaction_rule
180 | gpr_tok = tokenize_infix_expression(ogpr)
181 | psfix = parse_infix_expression(gpr_tok, lambda x: x not in BOOL_OPS, is_boolean_operator, boolean_precedence)
182 |
183 | # gene_activation = {k: 1 for k in [g.id for g in model.genes]}
184 | # for test in range(20):
185 | # gpr = ogpr
186 | # for gene in gene_activation:
187 | # dec = random() > 0.6
188 | # gpr = gpr.replace(gene, '1' if dec else '0')
189 | # op = [tok if tok in ('0', '1', 'and', 'not', 'or', ')', '(') else '1' for tok in
190 | # tokenize_infix_expression(gpr)]
191 | # psfix = parse_infix_expression(op, is_boolean_value, is_boolean_operator, boolean_precedence)
192 | # print(gpr, ''.join(psfix), evaluate_postfix_expression(psfix, eval_boolean_operator), sep=',')
193 |
194 | # test_boolean_expression()
--------------------------------------------------------------------------------
/src/cobamp/utilities/printing.py:
--------------------------------------------------------------------------------
1 | def pretty_table_print(table, has_header=True, header_sep=2):
2 | table = list([list(t) for t in table])
3 |
4 | prntstr = ''
5 | col_sep_max = ((max([max([len(k) for k in it]) for it in table])//4)+1)
6 | col_sep = '\t'*col_sep_max
7 |
8 | # check for inconsistent dimensions
9 |
10 | if has_header:
11 | header = table[0]
12 | data = table[1:]
13 | prntstr += col_sep.join(header)+('\n'*header_sep)
14 | else:
15 | data = list(table)
16 | ndata = []
17 | for line in data:
18 | tokens = []
19 | for stri in line:
20 | separator = '\t'*(col_sep_max - (len(stri) // 4) + 1)
21 | tokens.extend([stri, separator])
22 | tokens.pop()
23 | ndata.append(tokens)
24 |
25 | prntstr += '\n'.join(''.join(k) for k in ndata)
26 | print(prntstr)
27 |
28 |
29 | if __name__ == '__main__':
30 | pretty_table_print([['column1','column2'],['value19999','value2'],['item1item11999','item2'],['item3','']])
--------------------------------------------------------------------------------
/src/cobamp/utilities/property_management.py:
--------------------------------------------------------------------------------
1 | import types
2 |
3 |
4 | class PropertyDictionary():
5 | """
6 | Implements a dict with additional control on which objects can be added to which keys and whether these are optional
7 | or mandatory.
8 | """
9 |
10 | def __init__(self, mandatory_properties={}, optional_properties={}):
11 |
12 | """
13 | The values for each of the required dicts can either be:
14 | - A type (such as str, int, etc...)
15 | - A function returning a boolean and accepting a single value as argument
16 | - A list of admissible values
17 |
18 | Parameters
19 | ----------
20 | mandatory_properties: A dict[str,function] mapping the keys of mandatory properties with one of three options
21 | for values, as described above
22 | optional_properties: A dict[str,function] mapping the keys of optional properties with one of three options
23 | for values, as described above
24 | """
25 | self.__mandatory_properties = mandatory_properties
26 | self.__optional_properties = optional_properties
27 | self.__properties = {}
28 |
29 | def add_new_properties(self, mandatory_properties, optional_properties):
30 | """
31 | Adds new properties to the dictionary and/or updates existing ones, if present.
32 | Parameters
33 | ----------
34 | mandatory_properties: A dict[str, function]
35 | optional_properties: A dict[str, function]
36 | -------
37 |
38 | """
39 | self.__mandatory_properties.update(mandatory_properties)
40 | self.__optional_properties.update(optional_properties)
41 |
42 | def get_mandatory_properties(self):
43 | """
44 |
45 | Returns a dictionary containing the mapping between mandatory keys and function/type/list controlling values.
46 | -------
47 |
48 | """
49 | return self.__mandatory_properties
50 |
51 | def get_optional_properties(self):
52 | """
53 |
54 | Returns a dictionary containing the mapping between optional keys and function/type/list controlling values.
55 | -------
56 |
57 | """
58 | return self.__optional_properties
59 |
60 | def __getitem__(self, item):
61 | """
62 | Overloaded indexing to allow the square brace syntax for accessing values through keys. If the key was not
63 | registered, an exception will be raised. If the key was registered but no value exists, None will be returned.
64 |
65 | Parameters
66 | ----------
67 | item: Key for the value to be accessed
68 |
69 | Returns an object.
70 | -------
71 |
72 | """
73 | if item not in self.__mandatory_properties.keys() and item not in self.__optional_properties.keys():
74 | raise Exception(str(item) + " has not been registered as a mandatory or optional property.")
75 | elif item not in self.__properties.keys():
76 | return None
77 | else:
78 | return self.__properties[item]
79 |
80 | def __setitem__(self, key, value):
81 | """
82 | Sets the value for the supplied key
83 | Parameters
84 | ----------
85 | key - str representing the key, preferrably contained in the mandatory or optional properties.
86 | value - an object compliant with the functions set for the key.
87 | -------
88 |
89 | """
90 | if key in self.__mandatory_properties.keys() or key in self.__optional_properties.keys():
91 | expected_type = self.__mandatory_properties[key] if key in self.__mandatory_properties.keys() else \
92 | self.__optional_properties[key]
93 | if self.__check_key_value_pair(expected_type, value):
94 | self.__properties[key] = value
95 | else:
96 | raise Exception(str(key) + " does not accept the supplied `value` as a valid argument.")
97 |
98 | def has_required_properties(self):
99 | """
100 |
101 | Returns a boolean value if the mandatory properties all have an associated value.
102 | -------
103 |
104 | """
105 | return set(self.__properties.keys()) & set(self.__mandatory_properties.keys()) == set(
106 | self.__mandatory_properties.keys())
107 |
108 | def __check_key_value_pair(self, expected_type, value):
109 | """
110 | Checks whether a value is compliant with a function/type or is contained in a list of admissible values.
111 | Parameters
112 | ----------
113 | expected_type: A type to be compared with value, a function returning a boolean and accepting value as argument
114 | or a list of values where value should be contained.
115 | value
116 |
117 | Returns a boolean indicating whether the value can be added, assuming the conditions set by `expected_type`
118 | -------
119 |
120 | """
121 | if type(expected_type) is type:
122 | is_ok = expected_type == type(value)
123 | if not is_ok:
124 | raise TypeError(
125 | "\'value\' has type " + str(type(value)) + " but \'key\' requires type " + str(expected_type))
126 | elif type(expected_type) == types.FunctionType:
127 | is_ok = expected_type(value)
128 | if not is_ok:
129 | raise AssertionError(
130 | "Property checking function " + expected_type.__name__ + " does not allow the specified \'value\'")
131 | else:
132 | is_ok = value in expected_type
133 | if not is_ok:
134 | raise AssertionError("\'value\' is not contained in " + str(expected_type))
135 |
136 | return is_ok
137 |
138 | def add_if_not_none(self, key, value):
139 | if value is not None:
140 | self[key] = value
141 |
142 | def __repr__(self):
143 | """
144 | Returns a string representation of the internal dictionary where all keys/values are stored.
145 | -------
146 |
147 | """
148 | return '\n'.join([str(k) + " = " + str(v) for k, v in self.__properties.items()])
149 |
--------------------------------------------------------------------------------
/src/cobamp/utilities/set.py:
--------------------------------------------------------------------------------
1 | def is_subset(which, of_what):
2 | """
3 |
4 | Parameters
5 | ----------
6 | which: A set or frozenset instance to be checked as a possible subset of `of_what`
7 | of_what: A set or frozenset instance
8 |
9 | Returns a boolean indicating if `which` is a subset of `of_what`
10 | -------
11 |
12 | """
13 | return 0 < len(which & of_what) <= len(of_what)
14 |
15 |
16 | def is_identical(set1, set2):
17 | """
18 |
19 | Parameters
20 | ----------
21 | set1: A set of frozenset instance.
22 | set2: A set of frozenset instance.
23 |
24 | Returns a boolean indicating if both set1 and set2 are identical (contain exactly the same elements)
25 | -------
26 |
27 | """
28 | return len(set1 & set2) == len(set1) == len(set2)
29 |
30 |
31 | def has_no_overlap(set1, set2):
32 | """
33 |
34 | Parameters
35 | ----------
36 | set1: A set of frozenset instance.
37 | set2: A set of frozenset instance.
38 |
39 | Returns a boolean indicating if the intersection of set1 and set2 is empty.
40 | -------
41 |
42 | """
43 | return len(set1 & set2) == 0
44 |
--------------------------------------------------------------------------------
/src/cobamp/utilities/test.py:
--------------------------------------------------------------------------------
1 | import time
2 |
3 |
4 | def timeit(method):
5 | '''
6 | Timer decorator for methods. Courtesy of Fahim Sakri from PythonHive
7 | Args:
8 | method:
9 |
10 | Returns:
11 |
12 | '''
13 |
14 | def timed(*args, **kw):
15 | ts = time.time()
16 | result = method(*args, **kw)
17 | te = time.time()
18 |
19 | if 'log_time' in kw:
20 | name = kw.get('log_name', method.__name__.upper())
21 | kw['log_time'][name] = int((te - ts) * 1000)
22 | else:
23 | print('%r %2.2f ms' % (method.__name__, (te - ts) * 1000))
24 | return result
25 |
26 | return timed
27 |
--------------------------------------------------------------------------------
/src/cobamp/utilities/tree.py:
--------------------------------------------------------------------------------
1 | from collections import Counter
2 | from itertools import chain
3 |
4 |
5 | class Tree(object):
6 | """
7 | A simple class representing an n-ary tree as a node with one or more children nodes.
8 | """
9 |
10 | def __init__(self, value, extra_info=None):
11 | """
12 | Initializes the tree with no children.
13 | Parameters
14 | ----------
15 | value: A value associated with this node
16 | extra_info: Additional hidden information present in the node (OPTIONAL)
17 | """
18 | self.value = value
19 | self.children = []
20 | self.extra_info = extra_info
21 |
22 | def get_children(self):
23 | """
24 |
25 | Returns a list with all the children of this node.
26 | -------
27 |
28 | """
29 | return [c for c in self.children]
30 |
31 | def add_child(self, node):
32 | """
33 | Adds a node to the list of children in this node.
34 | Parameters
35 | ----------
36 | node: A instance
37 | -------
38 |
39 | """
40 | self.children.append(node)
41 |
42 | def is_leaf(self):
43 | """
44 | Checks whether this node has no children (is a leaf)
45 | Returns a boolean.
46 | -------
47 |
48 | """
49 | return self.children == []
50 |
51 | def __eq__(self, other):
52 | """
53 | Overloaded equality comparison operator. Compares the value of both nodes.
54 | Parameters
55 | ----------
56 | other
57 |
58 | Returns a boolean
59 | -------
60 |
61 | """
62 | return self.value == other
63 |
64 | def __repr__(self):
65 | """
66 |
67 | Returns a representation of this node as a string containing the value and extra information.
68 | -------
69 |
70 | """
71 | return str(self.value) + '(' + str(self.extra_info) + ')'
72 |
73 |
74 | def fill_tree(tree, sets):
75 | """
76 | Fills a Tree instance with data from the Iterable[set/frozenset] supplied as the argument `sets`.
77 | The resulting tree will be filled in a way that each set can be retrieved by traversing from the root node `tree`
78 | towards a leaf. The nodes required to travel down this path contain the values of a single set.
79 | The resulting tree will not contain circular references so this should not be treated as a graph. The filling method
80 | is recursive, so each child will be filled with sets contained in the parent node. Sets that have already been added
81 | are removed from the original pool.
82 | Elements to be added as nodes are chosen by the frequency at which they occur in the sets.
83 | Parameters
84 | ----------
85 | tree: A tree instance
86 | sets: A list of set/frozenset instances.
87 | -------
88 |
89 | """
90 | if len(sets) > 0:
91 | counts = Counter(chain(*(sets)))
92 | if len(counts) > 0:
93 | most_common, max_value = counts.most_common(1)[0]
94 | # print(tab_level*"\t"+'Most common element is',most_common,'with',max_value,'hits.')
95 | new_node = Tree(most_common, extra_info=max_value)
96 | sets_containing_most_common = [setf for setf in sets if most_common in setf]
97 | # print(tab_level*"\t"+str(len(sets_containing_most_common)),'contain',most_common)
98 | remaining_sets = [setf for setf in sets if most_common not in setf]
99 | # print(tab_level*"\t"+str(len(remaining_sets)),"sets remaining.")
100 | tree.add_child(new_node)
101 |
102 | if len(sets_containing_most_common) > 0:
103 | fill_tree(new_node, [[k for k in setf if k != most_common] for setf in sets_containing_most_common])
104 | if len(remaining_sets) > 0:
105 | fill_tree(tree, remaining_sets)
106 |
--------------------------------------------------------------------------------
/src/cobamp/wrappers/__init__.py:
--------------------------------------------------------------------------------
1 | from importlib import import_module
2 | from .core import MatFormatReader, ConstraintBasedModelSimulator
3 | from .cobamp import CobampModelObjectReader
4 |
5 | model_readers = {'numpy': MatFormatReader, 'cobamp.core.models': CobampModelObjectReader}
6 |
7 | external_frameworks = {'cobra':'COBRAModelObjectReader', 'reframed':'FramedModelObjectReader'}
8 | external_framework_readers = {}
9 |
10 | available_readers_dict = {
11 | 'numpy':'MatFormatReader',
12 | 'cobamp.core.models':'CobampModelObjectReader',
13 | 'cobra.core.model': 'COBRAModelObjectReader',
14 | 'framed.model.cbmodel': 'FramedModelObjectReader'
15 | }
16 | for module_name, reader_name in external_frameworks.items():
17 | try:
18 | module = import_module(module_name, '')
19 | # print(reader_name,'is available for',module_name)
20 | cobamp_module = import_module('.'+module_name, package='cobamp.wrappers')
21 | reader_class = getattr(cobamp_module, reader_name)
22 | globals().update({module_name: cobamp_module, reader_name: reader_class})
23 | external_framework_readers[reader_name] = reader_class
24 | except Exception as e:
25 | # print(reader_name,'could not be loaded for',module_name)
26 | reader = f'{reader_name} could not be loaded for {module_name}'
27 |
28 |
29 | available_readers_dict = {k:v for k,v in available_readers_dict.items() if v in globals()}
30 |
31 | def get_model_reader(model_obj, **kwargs):
32 | if type(model_obj).__module__ in available_readers_dict.keys():
33 | return globals()[available_readers_dict[type(model_obj).__module__]](model_obj, **kwargs)
34 | else:
35 | raise TypeError('model_obj has an unknown type that could not be read with cobamp:', type(model_obj).__module__)
36 |
37 |
38 | from . import method_wrappers
39 | from .method_wrappers import KShortestEFMEnumeratorWrapper, KShortestEFPEnumeratorWrapper, KShortestMCSEnumeratorWrapper
40 | from .method_wrappers import KShortestGenericMCSEnumeratorWrapper, KShortestGeneticMCSEnumeratorWrapper
41 |
42 |
43 |
--------------------------------------------------------------------------------
/src/cobamp/wrappers/cobamp.py:
--------------------------------------------------------------------------------
1 | from cobamp.wrappers.core import AbstractObjectReader
2 | import numpy as np
3 |
4 | def cobamp_simulate(cobamp_model, cobamp_func, bound_change, objective_coefficient, minimize, result_func=None,
5 | func_args=None):
6 |
7 | if func_args is None:
8 | func_args = {}
9 | if result_func is None:
10 | result_func = lambda x: x
11 |
12 | with cobamp_model as context_model:
13 | if bound_change is not None:
14 | for k, v in bound_change.items(): context_model.set_reaction_bounds(k, lb=v[0], ub=v[1])
15 |
16 | if None not in [objective_coefficient,minimize]:
17 | context_model.set_objective(objective_coefficient, minimize)
18 |
19 | sol = cobamp_func(model=context_model, **func_args)
20 | return result_func(sol)
21 |
22 | def cobamp_simulation_result_function(sol):
23 | return sol.status() == 'optimal', sol.objective_value(), sol.to_series().to_dict()
24 |
25 | def cobamp_fba(model, **func_args):
26 | return model.optimize()
27 |
28 | class CobampModelObjectReader(AbstractObjectReader):
29 |
30 | def get_stoichiometric_matrix(self):
31 | return self.model.get_stoichiometric_matrix()
32 |
33 | def get_model_bounds(self, as_dict, separate_list=False):
34 | if as_dict:
35 | return dict(zip(self.r_ids, self.model.bounds))
36 | else:
37 | if separate_list:
38 | return [bounds for bounds in list(zip(*tuple(self.model.bounds)))]
39 | else:
40 | return tuple(self.model.bounds)
41 |
42 | def get_irreversibilities(self, as_index):
43 | irrev = [not self.model.is_reversible_reaction(r) for r in self.r_ids]
44 | if as_index:
45 | irrev = list(np.where(irrev)[0])
46 | return irrev
47 |
48 | def get_reaction_and_metabolite_ids(self):
49 | return self.model.reaction_names, self.model.metabolite_names
50 |
51 | def get_rx_instances(self):
52 | return None
53 |
54 | def get_model_gpr_strings(self):
55 | return [self.model.gpr[i] for i in range(len(self.r_ids))]
--------------------------------------------------------------------------------
/src/cobamp/wrappers/cobra.py:
--------------------------------------------------------------------------------
1 | import warnings
2 | import numpy as np
3 |
4 | from cobamp.wrappers.core import AbstractObjectReader
5 | from cobra.exceptions import Infeasible, Unbounded, UndefinedSolution
6 |
7 | def cobra_simulate(cobra_model, cobra_func, bound_change, objective_coefficient, minimize, result_func, func_args=None):
8 | if func_args is None:
9 | func_args = {}
10 | with cobra_model as context_model:
11 | if bound_change is not None:
12 | for k, v in bound_change.items(): context_model.reactions.get_by_id(k).bounds = v
13 |
14 | if objective_coefficient is not None:
15 | context_model.objective = {context_model.reactions.get_by_id(k):v for k,v in objective_coefficient.items()}
16 |
17 | if minimize is not None:
18 | context_model.objective.direction = 'min' if minimize else 'max'
19 | try:
20 | sol = cobra_func(model=context_model, **func_args)
21 | except Exception as e:
22 | if isinstance(e, (Infeasible, Unbounded, UndefinedSolution)):
23 | sol = None
24 | else:
25 | raise e
26 | finally:
27 | return result_func(sol)
28 |
29 | def cobra_simulation_result_function(sol):
30 | if sol is not None:
31 | return (sol.status == 'optimal', sol.objective_value, sol.fluxes.to_dict())
32 | else:
33 | return (False, None, None)
34 |
35 | def cobra_fba(model, **func_args):
36 | return model.optimize()
37 |
38 | class COBRAModelObjectReader(AbstractObjectReader):
39 |
40 | def __read_model(self, path, format, **kwargs):
41 | from cobra.io import read_sbml_model, load_matlab_model, load_json_model
42 | parse_functions = {
43 | 'xml': read_sbml_model,
44 | 'mat': load_matlab_model,
45 | 'json': load_json_model,
46 | 'sbml': read_sbml_model
47 | }
48 | if format == None:
49 | nformat = path.split('.')[-1]
50 | else:
51 | nformat = format
52 | if nformat in parse_functions.keys():
53 | return parse_functions[nformat](path, **kwargs)
54 | else:
55 | raise ValueError('Format '+str(nformat)+' is invalid or not yet available through the cobrapy readers. '+
56 | 'Choose one of the following: '+','.join(parse_functions.keys()))
57 |
58 | def __init__(self, model, gpr_gene_parse_function=str, format=None, **kwargs):
59 | if isinstance(model, str):
60 | warnings.warn('Reading model with cobrapy from the provided path...')
61 | model = self.__read_model(model, format, **kwargs)
62 | super().__init__(model, gpr_gene_parse_function=gpr_gene_parse_function, **kwargs)
63 |
64 | def get_stoichiometric_matrix(self):
65 | S = np.zeros((len(self.m_ids), len(self.r_ids)))
66 | for i, r_id in enumerate(self.r_ids):
67 | for metab, coef in self.model.reactions.get_by_id(r_id).metabolites.items():
68 | S[self.m_ids.index(metab.id), i] = coef
69 |
70 | return S
71 |
72 | def get_model_bounds(self, as_dict=False, separate_list=False):
73 | bounds = [r.bounds for r in self.rx_instances]
74 | if as_dict:
75 | return dict(zip(self.r_ids, bounds))
76 | else:
77 | if separate_list:
78 | return [list(bounds) for bounds in list(zip(*tuple(bounds)))]
79 | else:
80 | return tuple(bounds)
81 |
82 | def get_irreversibilities(self, as_index):
83 | irrev = [not r.reversibility for r in self.rx_instances]
84 | if as_index:
85 | irrev = list(np.where(irrev)[0])
86 | return irrev
87 |
88 | def get_rx_instances(self):
89 | return [self.model.reactions.get_by_id(rx) for rx in self.r_ids]
90 |
91 | def get_reaction_and_metabolite_ids(self):
92 | return tuple([[x.id for x in lst] for lst in (self.model.reactions, self.model.metabolites)])
93 |
94 | def get_model_genes(self):
95 | return set([g.id for g in self.model.genes])
96 |
97 | def get_model_gpr_strings(self, apply_fx=None):
98 | return [apply_fx(r.gene_reaction_rule) if apply_fx is not None
99 | else r.gene_reaction_rule for r in self.model.reactions]
100 |
--------------------------------------------------------------------------------
/src/cobamp/wrappers/core.py:
--------------------------------------------------------------------------------
1 | import abc
2 | import warnings
3 |
4 | import numpy as np
5 | from numpy import where
6 |
7 | from cobamp.core.models import ConstraintBasedModel
8 | from cobamp.gpr.core import GPRContainer
9 | from cobamp.utilities.parallel import batch_run, cpu_count
10 |
11 | MAX_PRECISION = 1e-10
12 |
13 |
14 | ## TODO: Add simulation and result functions on the wrappers module to automatically detect which one to use
15 | class ConstraintBasedModelSimulator(object):
16 | def __init__(self, model, simulation_function, result_function):
17 | self.__model = model
18 | self.__simulation_function = simulation_function
19 | self.__result_function = result_function
20 |
21 | def simulate(self, func, bound_change=None, objective_coefficient=None, minimize=None, func_args=None):
22 | self.__simulation_function(self.__model, func, bound_change, objective_coefficient, minimize,
23 | self.__result_function, func_args)
24 |
25 | def batch_simulate(self, func, bound_changes, objective_coefficients, minimize, func_args=None, mp_threads=None):
26 | mp_params = {'model':self.__model, 'func_args':func_args}
27 |
28 | def check_multiple_inputs(bound_changes, objective_coefficients, minimize):
29 | arg_list = bound_changes, objective_coefficients, minimize
30 | corrected_args = []
31 | max_len = max(map(len,arg_list))
32 | for arg in arg_list:
33 | if len(arg) == 1:
34 | corrected_args.append([arg[0]]*max_len)
35 | elif len(arg) == max_len:
36 | corrected_args.append(arg)
37 | else:
38 | raise Exception('One of the arguments contains more than 1 and less than '+str(max_len)+' items.')
39 | return corrected_args
40 |
41 |
42 | def batch_simulation_function(sequence, params):
43 | model_mp = params['model']
44 | func_args_mp = params['func_args']
45 | bc, oc, min = sequence
46 | return self.__simulation_function(model_mp, func, bc, oc, min, self.__result_function, func_args_mp)
47 |
48 | sequence = list(zip(*check_multiple_inputs(bound_changes, objective_coefficients, minimize)))
49 | return batch_run(batch_simulation_function, sequence, mp_params,
50 | threads=cpu_count() if mp_threads is None else mp_threads)
51 |
52 | class AbstractObjectReader(object):
53 | """
54 | An abstract class for reading metabolic model objects from external frameworks, and extracting the data needed for
55 | pathway analysis methods. Also deals with name conversions.
56 | """
57 | __metaclass__ = abc.ABCMeta
58 |
59 | def __init__(self, model, gpr_and_char='and', gpr_or_char='or', gpr_gene_parse_function=str, ttg_ratio=20):
60 | """
61 | Parameters
62 |
63 | ----------
64 |
65 | model: A Model instance from the external framework to use. Must be registered in the dict stored as
66 | external_wrappers.model_readers along with its reader.
67 |
68 | """
69 | self.model = model
70 | self.initialize(gpr_and_char, gpr_or_char, gpr_gene_parse_function, ttg_ratio)
71 |
72 | def initialize(self, gpr_and_char='and', gpr_or_char='or', gpr_gene_parse_function=str, ttg_ratio=20):
73 | """
74 | This method re-initializes the class attributes from the current state of self.model
75 | """
76 |
77 | self.r_ids, self.m_ids = self.get_reaction_and_metabolite_ids()
78 | self.rx_instances = self.get_rx_instances()
79 | self.S = self.get_stoichiometric_matrix()
80 | self.lb, self.ub = tuple(zip(*self.get_model_bounds(False)))
81 | self.irrev_bool = self.get_irreversibilities(False)
82 | self.irrev_index = self.get_irreversibilities(True)
83 | self.bounds_dict = self.get_model_bounds(True)
84 | self.gene_protein_reaction_rules = gpr_and_char, gpr_or_char, gpr_gene_parse_function, ttg_ratio
85 | self.__gpr_read_params = gpr_and_char, gpr_or_char, gpr_gene_parse_function, ttg_ratio
86 | @property
87 | def gene_protein_reaction_rules(self):
88 | return self.__gene_protein_reaction_rules
89 |
90 |
91 | @gene_protein_reaction_rules.setter
92 | def gene_protein_reaction_rules(self, value):
93 | and_char, or_char, apply_fx, ttg_ratio = value
94 | self.__gene_protein_reaction_rules = GPRContainer(
95 | gpr_list=self.get_model_gpr_strings(),
96 | and_char=and_char, or_char=or_char, apply_fx=apply_fx, ttg_ratio=ttg_ratio
97 | )
98 |
99 | @abc.abstractmethod
100 | def get_stoichiometric_matrix(self):
101 | """
102 | Returns a 2D numpy array with the stoichiometric matrix whose metabolite and reaction indexes match the names
103 | defined in the class variables r_ids and m_ids
104 | """
105 | pass
106 |
107 | @abc.abstractmethod
108 | def get_model_bounds(self, as_dict, separate_list):
109 | """
110 | Returns the lower and upper bounds for all fluxes in the model. This either comes in the form of an ordered list
111 | with tuples of size 2 (lb,ub) or a dictionary with the same tuples mapped by strings with reaction identifiers.
112 |
113 | Parameters
114 |
115 | ----------
116 |
117 | as_dict: A boolean value that controls whether the result is a dictionary mapping str to tuple of size 2
118 | separate: A boolean value that controls whether the result is two numpy.array(), one for lb and the other\n
119 | to ub
120 | """
121 | pass
122 |
123 | @abc.abstractmethod
124 | def get_irreversibilities(self, as_index):
125 | """
126 | Returns a vector representing irreversible reactions, either as a vector of booleans (each value is a flux,
127 | ordered in the same way as reaction identifiers) or as a vector of reaction indexes.
128 |
129 | Parameters
130 |
131 | ----------
132 |
133 | as_dict: A boolean value that controls whether the result is a vector of indexes
134 |
135 | """
136 | pass
137 |
138 | @abc.abstractmethod
139 | def get_rx_instances(self):
140 | """
141 | Returns the reaction instances contained in the model. Varies depending on the framework.
142 | """
143 | pass
144 |
145 | @abc.abstractmethod
146 | def get_reaction_and_metabolite_ids(self):
147 | """
148 | Returns two ordered iterables containing the metabolite and reaction ids respectively.
149 | """
150 | pass
151 |
152 | # @abc.abstractmethod
153 | # def get_model_genes(self):
154 | # """
155 | #
156 | # Returns the identifiers for the genes contained in the model
157 | #
158 | # """
159 |
160 | @property
161 | def genes(self):
162 | return self.gene_protein_reaction_rules.get_genes()
163 |
164 |
165 | def reaction_id_to_index(self, id):
166 | """
167 | Returns the numerical index of a reaction when given a string representing its identifier.
168 |
169 | Parameters
170 |
171 | ----------
172 |
173 | id: A reaction identifier as a string
174 |
175 | """
176 | return self.r_ids.index(id)
177 |
178 | def metabolite_id_to_index(self, id):
179 | """
180 | Returns the numerical index of a metabolite when given a string representing its identifier.
181 |
182 | Parameters
183 |
184 | ----------
185 |
186 | id: A metabolite identifier as a string
187 |
188 | """
189 | return self.m_ids.index(id)
190 |
191 | def get_gene_protein_reaction_rule(self, id):
192 | return self.gene_protein_reaction_rules[id]
193 |
194 | def convert_constraint_ids(self, tup, yield_constraint):
195 | if yield_constraint:
196 | constraint = tuple(list(map(self.reaction_id_to_index, tup[:2])) + list(tup[2:]))
197 | else:
198 | constraint = tuple([self.reaction_id_to_index(tup[0])] + list(tup[1:]))
199 | return constraint
200 |
201 |
202 | def get_reaction_scores(self, expression, and_fx=min, or_fx=max, as_vector=False):
203 | exp_map = {rx: self.gene_protein_reaction_rules.eval_gpr(i, expression, or_fx=or_fx, and_fx=and_fx)
204 | for i,rx in enumerate(self.r_ids)}
205 |
206 | if as_vector:
207 | return [exp_map[k] for k in self.r_ids]
208 | else:
209 | return exp_map
210 |
211 | #@warnings.warn('g2rx will be deprecated in a future release. Use the get_reaction_scores method instead',
212 | # DeprecationWarning)
213 | def g2rx(self, expression, and_fx=min, or_fx=max, as_vector=False, apply_fx=str):
214 | warnings.warn('g2rx will be deprecated in a future release. Use the get_reaction_scores method instead',
215 | DeprecationWarning)
216 | return self.get_reaction_scores(expression, and_fx, or_fx, as_vector)
217 |
218 |
219 | @abc.abstractmethod
220 | def get_model_gpr_strings(self):
221 | pass
222 |
223 |
224 | def to_cobamp_cbm(self, solver=None):
225 | and_char, or_char, gpr_gene_parse_function, ttg_ratio = self.__gpr_read_params
226 | ngprs = GPRContainer(
227 | gpr_list=self.get_model_gpr_strings(),
228 | and_char=and_char, or_char=or_char, apply_fx=gpr_gene_parse_function, ttg_ratio=ttg_ratio)
229 |
230 | return ConstraintBasedModel(
231 | S=self.get_stoichiometric_matrix(),
232 | thermodynamic_constraints=[tuple(float(k) for k in l) for l in self.get_model_bounds()],
233 | reaction_names=self.r_ids,
234 | metabolite_names=self.m_ids,
235 | optimizer= (solver == True) or (solver is not None and solver != False),
236 | solver=solver if solver not in (True, False) else None,
237 | gprs=ngprs
238 | )
239 |
240 | class MatFormatReader(AbstractObjectReader):
241 | def get_stoichiometric_matrix(self):
242 | return (self.model['S'][0][0]).toarray()
243 |
244 | def get_model_bounds(self, as_dict=False, separate_list=False):
245 | lb, ub = [(self.model[k][0][0]).ravel() for k in ('lb', 'ub')]
246 | tuples = [(r, (l, u)) for r, l, u in zip(self.r_ids, lb, ub)]
247 | if as_dict:
248 | return dict(tuples)
249 | else:
250 | if separate_list:
251 | return lb, ub
252 | else:
253 | return tuple([(l, u) for l, u in zip(lb, ub)])
254 |
255 | def get_irreversibilities(self, as_index):
256 | if 'rev' in self.model.dtype.names:
257 | bv = (self.model['rev'][0][0]).ravel().astype(bool)
258 | else:
259 | bv = np.array([(l >= 0 and u >= 0) or (l <= 0 and u <= 0) for l, u in zip(self.lb, self.ub)]).astype(bool)
260 | if as_index:
261 | return where(bv)[0]
262 | else:
263 | return bv
264 |
265 | def get_rx_instances(self):
266 | pass
267 |
268 | def get_reaction_and_metabolite_ids(self):
269 | return [[k[0][0] for k in self.model[t][0][0]] for t in ['rxns', 'mets']]
270 |
271 | def get_model_genes(self):
272 | return set([k[0][0] for k in self.model['genes'][0][0]])
273 |
274 | def get_model_gpr_strings(self):
275 | return [k[0][0] if len(k[0]) > 0 else '' for k in self.model['grRules'][0][0]]
--------------------------------------------------------------------------------
/src/cobamp/wrappers/external_wrappers.py:
--------------------------------------------------------------------------------
1 | import warnings
2 | from .cobra import COBRAModelObjectReader
3 | from .framed import FramedModelObjectReader
4 | from .cobamp import CobampModelObjectReader
5 | from .core import MatFormatReader, AbstractObjectReader
6 |
7 | from ..wrappers import get_model_reader, model_readers
8 |
9 | warnings.warn(
10 | '''\nThe wrappers.external_wrappers module will be deprecated in a future release in favour of the wrappers module.
11 | Available ModelObjectReader classes can still be loaded using cobamp.wrappers.. An appropriate model
12 | reader can also be created using the get_model_reader function on cobamp.wrappers''')
13 |
--------------------------------------------------------------------------------
/src/cobamp/wrappers/framed.py:
--------------------------------------------------------------------------------
1 | from cobamp.wrappers.core import AbstractObjectReader
2 | import numpy as np
3 |
4 |
5 | class FramedModelObjectReader(AbstractObjectReader):
6 |
7 | def get_stoichiometric_matrix(self):
8 | return np.array(self.model.stoichiometric_matrix())
9 |
10 | def get_model_bounds(self, as_dict=False, separate_list=False):
11 | bounds = [(r.lb, r.ub) for r in self.rx_instances]
12 | if as_dict:
13 | return dict(zip(self.r_ids, bounds))
14 | else:
15 | if separate_list:
16 | return [bounds for bounds in list(zip(*tuple(bounds)))]
17 | else:
18 | return tuple(bounds)
19 |
20 | def get_irreversibilities(self, as_index):
21 | irrev = [not r.reversible for r in self.rx_instances]
22 | if as_index:
23 | irrev = list(np.where(irrev)[0])
24 | return irrev
25 |
26 | def get_reaction_and_metabolite_ids(self):
27 | return tuple(self.model.reactions.keys()), tuple(self.model.metabolites.keys())
28 |
29 | def get_rx_instances(self):
30 | return [self.model.reactions[rx] for rx in self.r_ids]
31 |
32 | def get_model_gpr_strings(self):
33 | return [rx.gene_reaction_rule for rx in self.get_rx_instances()]
34 |
--------------------------------------------------------------------------------
/src/cobamp/wrappers/method_wrappers.py:
--------------------------------------------------------------------------------
1 | from cobamp.algorithms.kshortest import *
2 | from cobamp.core.linear_systems import IrreversibleLinearSystem, DualLinearSystem, IrreversibleLinearPatternSystem, \
3 | GenericDualLinearSystem
4 |
5 | from itertools import product
6 |
7 | from cobamp.wrappers import get_model_reader
8 |
9 |
10 | class KShortestEnumeratorWrapper(object):
11 | __metaclass__ = abc.ABCMeta
12 | """
13 | An abstract class for methods involving the K-shortest EFM enumeration algorithms
14 | """
15 | ALGORITHM_TYPE_ITERATIVE = 'kse_iterative'
16 | ALGORITHM_TYPE_POPULATE = 'kse_populate'
17 |
18 | __alg_to_prop_name = {
19 | ALGORITHM_TYPE_ITERATIVE: K_SHORTEST_OPROPERTY_MAXSOLUTIONS,
20 | ALGORITHM_TYPE_POPULATE: K_SHORTEST_OPROPERTY_MAXSIZE
21 | }
22 |
23 | __alg_to_alg_name = {
24 | ALGORITHM_TYPE_ITERATIVE: K_SHORTEST_METHOD_ITERATE,
25 | ALGORITHM_TYPE_POPULATE: K_SHORTEST_METHOD_POPULATE
26 | }
27 |
28 | def __init__(self, model, algorithm_type=ALGORITHM_TYPE_POPULATE, stop_criteria=1, forced_solutions=None,
29 | excluded_solutions=None, solver='CPLEX', force_bounds={}, n_threads=0, workmem=None, big_m=False,
30 | max_populate_sols_override=None, time_limit=None, big_m_value=None, cut_function=None, extra_args=None,
31 | pre_enum_function=None):
32 | """
33 |
34 | Parameters
35 |
36 | ----------
37 |
38 | model: A Model instance from the external framework to use. Must be registered in the dict stored as
39 | external_wrappers.model_readers along with its reader.
40 |
41 | algorithm_type: ALGORITHM_TYPE_ITERATIVE or ALGORITHM_TYPE_POPULATE constants stored as class attributes.
42 | ALGORITHM_TYPE_ITERATIVE is a slower method (regarding EFMs per unit of time) that enumerates EFMs one
43 | at a time.
44 | ALGORITHM_TYPE_POPULATE enumerates EFMs one size at a time. This is the preferred method as it's
45 | generally faster.
46 |
47 | stop_criteria: An integer that defines the stopping point for EFM enumeration. Either refers to the maximum
48 | number of EFMs or the maximum size they can reach before the enumeration stops.
49 |
50 | forced_solutions: A list of KShortestSolution or lists of reaction indexes that must show up in the
51 | enumeration process. (experimental feature)
52 |
53 | excluded_solutions: A list of KShortestSolution or lists of reaction indexes that cannot show up in the
54 | enumeration process. (experimental feature)
55 |
56 | force_bounds: A dict mapping reaction indexes (int for now) with tuples containing lower and upper bounds
57 | An experimental feature meant to force certain phenotypes on EFP/EFMs
58 |
59 | n_threads: An integer value defining the amount of threads available to the solver
60 |
61 | workmem: An integer value defining the amount of memory in MegaBytes available to the solver
62 | """
63 |
64 | self.__model = get_model_reader(model)
65 | self.model_reader = self.__model
66 |
67 | self.__algo_properties = KShortestProperties()
68 | self.__algo_properties[K_SHORTEST_MPROPERTY_METHOD] = self.__alg_to_alg_name[algorithm_type]
69 | self.__algo_properties[K_SHORTEST_MPROPERTY_TYPE_EFP] = self.is_efp
70 | self.__algo_properties[K_SHORTEST_OPROPERTY_N_THREADS] = n_threads
71 | self.__algo_properties[K_SHORTEST_OPROPERTY_WORKMEMORY] = workmem
72 | self.__algo_properties[K_SHORTEST_OPROPERTY_TIMELIMIT] = 0 if time_limit == None else time_limit
73 | self.__algo_properties[K_SHORTEST_OPROPERTY_BIG_M_CONSTRAINTS] = big_m
74 | self.__algo_properties[self.__alg_to_prop_name[algorithm_type]] = stop_criteria
75 | if big_m_value != None:
76 | self.__algo_properties[K_SHORTEST_OPROPERTY_BIG_M_VALUE] = big_m_value
77 | if (max_populate_sols_override != None) and algorithm_type == self.ALGORITHM_TYPE_POPULATE:
78 | self.__algo_properties[K_SHORTEST_OPROPERTY_MAXSOLUTIONS] = max_populate_sols_override
79 |
80 |
81 | def preprocess_cuts(cut_list):
82 | if cut_list is not None:
83 | new_cuts = []
84 | for cut in cut_list:
85 | new_cut = cut
86 | if not isinstance(cut, KShortestSolution):
87 | new_cut = [self.model_reader.reaction_id_to_index(k) if isinstance(k, str) else k for k in cut]
88 | new_cuts.append(new_cut)
89 | return new_cuts
90 | else:
91 | return []
92 |
93 | self.__forced_solutions = preprocess_cuts(forced_solutions)
94 | self.__excluded_solutions = preprocess_cuts(excluded_solutions)
95 |
96 | self.force_bounds = {self.model_reader.r_ids.index(k): v for k, v in force_bounds.items()}
97 | self.solver = solver
98 | self.pre_enum_function = pre_enum_function
99 | self.cut_function = cut_function
100 | self.extra_args = extra_args
101 | self.__setup_algorithm()
102 | self.enumerated_sols = []
103 |
104 | def __setup_algorithm(self):
105 | """
106 | Creates the algorithms instance
107 |
108 | Returns:
109 |
110 | """
111 | self.algo = KShortestEFMAlgorithm(self.__algo_properties, False)
112 |
113 | def __get_forced_solutions(self):
114 | """
115 | Returns: A list of KShortestSolution or lists of reaction indexes
116 |
117 | """
118 | return self.__forced_solutions
119 |
120 | def __get_excluded_solutions(self):
121 | """
122 | Returns: A list of KShortestSolution or lists of reaction indexes
123 |
124 | """
125 | return self.__excluded_solutions
126 |
127 | @abc.abstractmethod
128 | def get_linear_system(self):
129 | """
130 |
131 | Returns a KShortestCompatibleLinearSystem instance build from the model
132 | -------
133 |
134 | """
135 | return
136 |
137 | def get_enumerator(self):
138 | """
139 | Returns an iterator that yields a single EFM or a list of multiple EFMs of the same size. Call next(iterator) to
140 | obtain the next set of EFMs.
141 | """
142 | enumerator = self.algo.get_enumerator(
143 | linear_system=self.get_linear_system(),
144 | forced_sets=self.__get_forced_solutions(),
145 | excluded_sets=self.__get_excluded_solutions())
146 |
147 | if self.pre_enum_function is not None:
148 | self.pre_enum_function(self.algo.ksh, self.extra_args)
149 | for solarg in enumerator:
150 | self.enumerated_sols.append(solarg)
151 | yield self.decode_solution(solarg)
152 | if self.cut_function is not None:
153 | cut_arg = solarg if isinstance(solarg, (tuple, list)) else [solarg]
154 | self.cut_function(cut_arg, self.algo.ksh, self.extra_args)
155 |
156 | def decode_k_shortest_solution(self, sol):
157 | ## TODO: Make MAX_PRECISION a parameter for linear systems or the KShortestAlgorithm
158 | return {self.model_reader.r_ids[k]: sol.attribute_value(sol.SIGNED_VALUE_MAP)[k]
159 | for k in sol.get_active_indicator_varids()}
160 |
161 | def decode_solution(self, solarg):
162 | if isinstance(solarg, (list,tuple)):
163 | return [self.decode_k_shortest_solution(sol) for sol in solarg]
164 | else:
165 | return self.decode_k_shortest_solution(solarg)
166 |
167 | class KShortestEFMEnumeratorWrapper(KShortestEnumeratorWrapper):
168 | """
169 | Extension of the abstract class KShortestEnumeratorWrapper that takes a metabolic model as input and yields
170 | elementary flux modes.
171 | """
172 |
173 | def __init__(self, model, non_consumed, consumed, produced, non_produced, subset=None, **kwargs):
174 | """
175 |
176 | Parameters
177 |
178 | ----------
179 |
180 | model: A Model instance from the external framework to use. Must be registered in the dict stored as
181 | external_wrappers.model_readers along with its reader.
182 |
183 | non_consumed: An Iterable[int] or ndarray containing the indices of external metabolites not consumed in the
184 | model.
185 |
186 | consumed: An Iterable[int] or ndarray containing the indices of external metabolites guaranteed to be produced.
187 |
188 | produced: An Iterable[int] or ndarray containing the indices of external metabolites guaranteed to be consumed.
189 |
190 |
191 | """
192 | self.is_efp = False
193 | super().__init__(model, **kwargs)
194 | self.__consumed, self.__non_consumed, self.__produced, self.__non_produced, self.__subset = \
195 | consumed, non_consumed, produced, non_produced, subset
196 |
197 | def get_linear_system(self):
198 | to_convert = [self.__consumed, self.__non_consumed, self.__produced, self.__non_produced]
199 | conv_cn, conv_nc, conv_pr, conv_np = [[self.model_reader.metabolite_id_to_index(k) for k in lst] for lst in to_convert]
200 | lb, ub = [array(k) for k in self.model_reader.get_model_bounds(as_dict=False, separate_list=True)]
201 | if self.__subset == None:
202 |
203 | return IrreversibleLinearSystem(
204 | S=self.model_reader.S,
205 | lb=lb, ub=ub,
206 | consumed=conv_cn,
207 | non_consumed=conv_nc,
208 | produced=conv_pr,
209 | non_produced=conv_np,
210 | solver=self.solver,
211 | force_bounds=self.force_bounds
212 | )
213 | else:
214 | return IrreversibleLinearPatternSystem(
215 | S=self.model_reader.S,
216 | lb=lb, ub=ub,
217 | consumed=conv_cn,
218 | non_consumed=conv_nc,
219 | produced=conv_pr,
220 | non_produced=conv_np,
221 | subset=[self.model_reader.reaction_id_to_index(s) for s in self.__subset],
222 | solver=self.solver,
223 | force_bounds=self.force_bounds
224 | )
225 |
226 | class KShortestEFPEnumeratorWrapper(KShortestEnumeratorWrapper):
227 | """
228 | Extension of the abstract class KShortestEnumeratorWrapper that takes a metabolic model as input and yields
229 | elementary flux patterns.
230 | """
231 |
232 | def __init__(self, model, subset, non_consumed=[], consumed=[], produced=[], non_produced=[],**kwargs):
233 | self.is_efp = True
234 | super().__init__(model, **kwargs)
235 | self.__subset = subset
236 | self.__consumed, self.__non_consumed, self.__produced, self.__non_produced = consumed, non_consumed, produced, non_produced
237 |
238 | def get_linear_system(self):
239 | ## TODO: change irrev to lb/ub structure
240 | to_convert = [self.__consumed, self.__non_consumed, self.__produced, self.__non_produced]
241 | lb, ub = [array(k) for k in self.model_reader.get_model_bounds(as_dict=False, separate_list=True)]
242 | conv_cn, conv_nc, conv_pr, conv_np = [[self.model_reader.metabolite_id_to_index(k) for k in lst] for lst in to_convert]
243 | conv_subsets = [self.model_reader.reaction_id_to_index(s) for s in self.__subset]
244 | return IrreversibleLinearPatternSystem(
245 | S=self.model_reader.S,
246 | lb=lb, ub=ub,
247 | subset=conv_subsets,
248 | consumed=conv_cn,
249 | non_consumed=conv_nc,
250 | produced=conv_pr,
251 | non_produced=conv_np,
252 | solver=self.solver,
253 | force_bounds=self.force_bounds
254 | )
255 |
256 |
257 |
258 |
259 | class KShortestMCSEnumeratorWrapper(KShortestEnumeratorWrapper):
260 | """
261 | Extension of the abstract class KShortestEnumeratorWrapper that takes a metabolic model as input and yields
262 | minimal cut sets.
263 |
264 | """
265 |
266 | def __init__(self, model, target_flux_space_dict, target_yield_space_dict, **kwargs):
267 | self.is_efp = False
268 | super().__init__(model, **kwargs)
269 | self.__ip_constraints = list(chain(*AbstractConstraint.convert_tuple_intervention_problem(
270 | target_flux_space_dict, target_yield_space_dict, self.model_reader)))
271 |
272 | def materialize_intv_problem(self):
273 | return InterventionProblem(self.model_reader.S).generate_target_matrix(self.__ip_constraints)
274 |
275 | def get_linear_system(self):
276 | lb, ub = [array(k) for k in self.model_reader.get_model_bounds(separate_list=True, as_dict=False)]
277 | T, b = self.materialize_intv_problem()
278 | return DualLinearSystem(self.model_reader.S, lb, ub, T, b, solver=self.solver)
279 |
280 |
281 | class KShortestGenericMCSEnumeratorWrapper(KShortestEnumeratorWrapper):
282 |
283 | def __init__(self, model, target_flux_space_dict, target_yield_space_dict, dual_matrix, dual_var_mapper, **kwargs):
284 | self.is_efp = False
285 | super().__init__(model, **kwargs)
286 | self.__ip_constraints = list(chain(*AbstractConstraint.convert_tuple_intervention_problem(
287 | target_flux_space_dict, target_yield_space_dict, self.model_reader)))
288 |
289 | self.dual_matrix, self.dual_var_mapper = dual_matrix, {v:k for k,v in dual_var_mapper.items()}
290 |
291 | def decode_k_shortest_solution(self, sol):
292 | ## TODO: Make MAX_PRECISION a parameter for linear systems or the KShortestAlgorithm
293 | mapper = self.dual_var_mapper if self.dual_var_mapper is not None else self.model_reader.r_ids
294 | return {mapper[k]: sol.attribute_value(sol.SIGNED_VALUE_MAP)[k] for k in sol.get_active_indicator_varids()}
295 |
296 | def get_linear_system(self):
297 | T, b = InterventionProblem(self.model_reader.S).generate_target_matrix(self.__ip_constraints)
298 | return GenericDualLinearSystem(self.model_reader.S, self.dual_matrix, T, b, solver=self.solver)
299 |
300 |
301 | class KShortestGeneticMCSEnumeratorWrapper(KShortestGenericMCSEnumeratorWrapper):
302 | @staticmethod
303 | def gene_cut_function(solx, ksh, extra_args):
304 | alternative_gene_identity = extra_args['F']
305 |
306 | for sol in solx:
307 | act_vars = sol.get_active_indicator_varids()
308 | if len(act_vars) > 1:
309 | dependencies = list(chain(*product(filter(lambda x: len(x) > 0,
310 | [list(alternative_gene_identity[av]) for av in act_vars]))))
311 | else:
312 | dependencies = [[k] for k in [alternative_gene_identity[av] for av in act_vars][0]]
313 | if len(dependencies) > 0:
314 | ksh.exclude_solutions(dependencies)
315 |
316 | @staticmethod
317 | def set_gene_weights(ksh, extra_args):
318 | ksh.set_objective_expression(extra_args['gene_weights'])
319 |
320 | def __init__(self, model, target_flux_space_dict, target_yield_space_dict, G, gene_map, F, gene_weights, **kwargs):
321 | super().__init__(model, target_flux_space_dict, target_yield_space_dict, G, gene_map,
322 | cut_function=self.gene_cut_function, pre_enum_function=self.set_gene_weights,
323 | extra_args={'gene_map': gene_map, 'F': F, 'gene_weights':gene_weights}, **kwargs)
--------------------------------------------------------------------------------
/tests/ecoli_compression_test.py:
--------------------------------------------------------------------------------
1 |
2 | from itertools import chain, product
3 |
4 | from cobamp.nullspace.subset_reduction import SubsetReducerProperties, SubsetReducer
5 | from cobamp.core.cb_analysis import FluxVariabilityAnalysis
6 | from cobamp.wrappers import COBRAModelObjectReader, KShortestMCSEnumeratorWrapper
7 | from cobamp.utilities.file_io import open_file
8 |
9 | import unittest
10 |
11 | class MCSEnumeratorQuickTest(unittest.TestCase):
12 | EXPECTED_COMP_SHAPE = 562,936
13 |
14 | def setUp(self):
15 | # read model using cobra (alternatively, the model instance can be generated previously and passed as an instance)
16 | reader = COBRAModelObjectReader('resources/iaf1260/Ec_iAF1260_flux2.xml')
17 |
18 | # drains/transporters/pseudo-reactions to exclude from network compression
19 | self.singles = [k+'_' if k[:3] == 'EX_' else k for k in [s[2:].strip() for s in
20 | open_file('resources/iaf1260/iAF1260_comp_exclusions.txt','r').split('\n')]]
21 |
22 | # generate a ConstraintBasedModel instance with an optimizer
23 | self.cbm = reader.to_cobamp_cbm(True)
24 |
25 | red_model, mapping, metabs = self.compress_model(self.cbm, self.singles)
26 |
27 | # map the single reactions from the original network to the new reduced model
28 | self.exclusion_indices = [[mapping.from_original(k)] for k in
29 | [self.cbm.reaction_names.index(n) for n in self.singles] if k in mapping.otn]
30 |
31 | # load a file with validated MCSs in the iAF1260 model
32 | self.validated = set((k[2:],) for k in open_file('resources/iaf1260/computedmcs.txt', 'r').strip().split('\t\n'))\
33 | - {('ATPM',), ('Ec_biomass_iAF1260_core_59p81M',)}
34 |
35 | def sets_are_equal(self, set1, set2):
36 | # compare the amount of solutions in each set as well as their intersection
37 | # if all holds true, the enumerated MCSs up to size 1 are correctly identified by this implementation
38 | return (len(set1) == len(set2) == len(set1 & set2))
39 |
40 | def compress_model(self, cbm, singles):
41 | # subset reduction algorithm instance
42 | sr = SubsetReducer()
43 |
44 | # determine blocked reactions using flux variability analysis
45 | blk_idx = [cbm.reaction_names[i] for i in
46 | FluxVariabilityAnalysis(cbm.model).run(0, False, 0).find_blocked_reactions()]
47 |
48 | # create the subset reduction properties instance
49 | properties = SubsetReducerProperties(keep=singles, block=blk_idx, absolute_bounds=True)
50 |
51 | # create a new reduced model using subset reduction
52 | return sr.transform(cbm, properties)
53 |
54 | def get_mcs_enumerator_inst(self, red_model, exclusion_indices):
55 | # create the MCSEnumerator wrapper instance
56 | return KShortestMCSEnumeratorWrapper(
57 | model=red_model,
58 | target_flux_space_dict={ # a dictionary with flux constraints defining the target space
59 | 'Ec_biomass_iAF1260_core_59p81M': (1e-4, None),
60 | 'ATPM': (8.39, 8.39),
61 | 'EX_glc_e_': (-20, None)
62 | },
63 | target_yield_space_dict={}, # additional yield constraints (useful in a growth-coupling strategy problem)
64 | stop_criteria=1, # iterate 2 times at most
65 | algorithm_type='kse_populate', # each iteration yields all solutions of size n, up to stop_criteria
66 | excluded_solutions=exclusion_indices, # exclude the single reactions from appearing as MCSs
67 | )
68 |
69 | def test_size_1_mcs(self):
70 | red_model, mapping, metabs = self.compress_model(self.cbm, self.singles)
71 | mcs_enumerator = self.get_mcs_enumerator_inst(red_model, self.exclusion_indices)
72 |
73 | # iterate until stop_criteria and chain the solutions into a single list
74 | solutions = list(chain(*mcs_enumerator.get_enumerator()))
75 |
76 | # convert reduced model solutions into the original ones
77 | multiplied_sols = list(chain(*[list(product(*[k.split('_+_') for k in s.keys()])) for s in solutions]))
78 |
79 | # look at the solutions with 1 knockout only
80 | essentials = set([m for m in multiplied_sols if len(m) == 1])
81 | self.assertTrue(self.sets_are_equal(essentials, self.validated))
82 |
83 |
84 | def test_network_dimensions(self):
85 | red_model, mapping, metabs = self.compress_model(self.cbm, self.singles)
86 | self.assertTrue(self.EXPECTED_COMP_SHAPE == red_model.get_stoichiometric_matrix().shape)
--------------------------------------------------------------------------------
/tests/linear_system_optimizer_test.py:
--------------------------------------------------------------------------------
1 | from cobamp.core.optimization import LinearSystemOptimizer
2 | from cobamp.core.linear_systems import SteadyStateLinearSystem
3 | from cobamp.wrappers.external_wrappers import COBRAModelObjectReader
4 |
5 | from cobra.io import read_sbml_model
6 |
7 | from urllib.request import urlretrieve
8 | import pandas as pd
9 | import numpy as np
10 |
11 | if __name__ == '__main__':
12 |
13 | path, content = urlretrieve('http://bigg.ucsd.edu/static/models/iAF1260.xml')
14 |
15 | model = read_sbml_model(path)
16 | cobamp_model = COBRAModelObjectReader(model)
17 |
18 | S = cobamp_model.get_stoichiometric_matrix()
19 | lb, ub = cobamp_model.get_model_bounds(False, True)
20 | rx_names = cobamp_model.get_reaction_and_metabolite_ids()[0]
21 |
22 | lsystem = SteadyStateLinearSystem(S, lb, ub, rx_names)
23 |
24 | optimizer = LinearSystemOptimizer(lsystem)
25 |
26 | objective_id = rx_names.index('BIOMASS_Ec_iAF1260_core_59p81M')
27 | f = np.zeros(S.shape[1])
28 | f[objective_id] = 1
29 |
30 | lsystem.set_objective(f, False)
31 |
32 | cobamp_sol = optimizer.optimize()
33 | cobra_sol = model.optimize()
34 |
35 | cobra_fluxes = cobra_sol.fluxes
36 | cobamp_fluxes = pd.Series(cobamp_sol.var_values(),name='new_fluxes')
37 |
38 | sol_df = pd.DataFrame(cobra_fluxes).join(pd.DataFrame(cobamp_fluxes))
39 | sol_df['diff'] = (sol_df['fluxes'] - sol_df['new_fluxes']) > 1e-10
40 |
--------------------------------------------------------------------------------
/tests/parallel_simulation_test.py:
--------------------------------------------------------------------------------
1 | if __name__ == '__main__':
2 | from cobamp.utilities.file_io import read_pickle
3 | from cobamp.core.optimization import BatchOptimizer
4 | from numpy import array
5 |
6 | mobjr = read_pickle('resources/models/Recon2_v04_pruned.xml.objrdr')
7 | S = mobjr.get_stoichiometric_matrix()
8 | lb, ub = map(array, mobjr.get_model_bounds(separate_list=True))
9 |
10 | model = mobjr.to_cobamp_cbm('CPLEX')
11 | obj_rx = model.reaction_names.index('biomass_reaction')
12 |
13 | n_iterations = len(model.reaction_names)
14 | bounds = [{k:(0,0)} for k in range(len(model.reaction_names))]
15 | objective_coefs = [{obj_rx: 1} for _ in range(n_iterations)]
16 | objective_sense = [False for _ in range(n_iterations)]
17 |
18 | batch_opt = BatchOptimizer(model.model, threads=12)
19 | res = batch_opt.batch_optimize(bounds, objective_coefs, objective_sense)
20 | print(res)
21 |
22 |
--------------------------------------------------------------------------------
/tests/property_dictionary_test.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | from cobamp.utilities.property_management import PropertyDictionary
3 | class PropertyDictionaryTest(unittest.TestCase):
4 | def setUp(self):
5 | base_mandatory = {'name':str, 'age':int, 'height':lambda x: x > 0, 'gender':['M','F','U']}
6 | base_optional = {'car_brand':str, 'region':['rural','urban'], 'tyre_sizes':lambda x: len(x) == 2}
7 |
8 | class CustomPropertyDictionary(PropertyDictionary):
9 | def __init__(self):
10 | super().__init__(base_mandatory, base_optional)
11 |
12 | self.dict_class = CustomPropertyDictionary
13 | def test_add_all_mandatory_info(self):
14 | propdict = self.dict_class()
15 | propdict['name'] = 'John'
16 | propdict['age'] = 29
17 | propdict['height'] = 2
18 | propdict['gender'] = 'M'
19 | propdict['car_brand'] = 'Skoda'
20 |
21 | keys_to_check = ['name', 'age', 'height', 'gender', 'car_brand']
22 | values_to_check = ('John', 29, 2, 'M', 'Skoda')
23 | propdict_is_valid = propdict.has_required_properties()
24 | propdict_has_added_keys = tuple(propdict[key] for key in keys_to_check)
25 |
26 | self.assertTrue(propdict_is_valid)
27 | self.assertTrue(values_to_check == propdict_has_added_keys)
28 |
29 | def test_add_some_mandatory_info(self):
30 | propdict = self.dict_class()
31 | propdict['name'] = 'John'
32 | propdict['age'] = 29
33 | propdict['gender'] = 'M'
34 | propdict['car_brand'] = 'Skoda'
35 |
36 | keys_to_check = ['name', 'age', 'gender', 'car_brand']
37 | values_to_check = ('John', 29, 'M', 'Skoda')
38 |
39 | propdict_is_valid = propdict.has_required_properties()
40 | propdict_has_added_keys = tuple(propdict[key] for key in keys_to_check)
41 |
42 | self.assertTrue(not propdict_is_valid)
43 | self.assertTrue(values_to_check == propdict_has_added_keys)
44 |
45 | def test_add_info_of_wrong_type(self):
46 | with self.assertRaises(TypeError) as context:
47 | propdict = self.dict_class()
48 | propdict['name'] = 'John'
49 | propdict['age'] = lambda x: x
50 | propdict_is_valid = propdict.has_required_properties()
51 | self.assertTrue(not propdict_is_valid)
52 |
53 |
54 | def test_add_info_noncompliant_with_function(self):
55 | with self.assertRaises(AssertionError) as context:
56 | propdict = self.dict_class()
57 | propdict['name'] = 'John'
58 | propdict['height'] = 0
59 | propdict_is_valid = propdict.has_required_properties()
60 | self.assertTrue(not propdict_is_valid)
61 |
62 |
63 | def test_add_info_not_in_list(self):
64 | with self.assertRaises(AssertionError) as context:
65 | propdict = self.dict_class()
66 | propdict['name'] = 'John'
67 | propdict['gender'] = 'X'
68 | propdict_is_valid = propdict.has_required_properties()
69 | self.assertTrue(not propdict_is_valid)
70 |
71 | if __name__ == '__main__':
72 | suite = unittest.TestLoader().loadTestsFromTestCase(PropertyDictionaryTest)
73 | unittest.TextTestRunner(verbosity=2).run(suite)
74 |
75 |
--------------------------------------------------------------------------------
/tests/resources/toy_efm_mcs.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BioSystemsUM/cobamp/0f2243ed643f67dfa383912d6cce6d044de04c8d/tests/resources/toy_efm_mcs.png
--------------------------------------------------------------------------------
/tests/resources/toy_network.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BioSystemsUM/cobamp/0f2243ed643f67dfa383912d6cce6d044de04c8d/tests/resources/toy_network.png
--------------------------------------------------------------------------------
/tests/test_gmcs_toy.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | from scipy.sparse import csc_matrix
3 | from numpy import array
4 | from cobamp.core.models import ConstraintBasedModel
5 | from cobamp.wrappers import KShortestGeneticMCSEnumeratorWrapper
6 | from cobamp.gpr.core import GPRContainer
7 | from cobamp.gpr.integration import GeneMatrixBuilder
8 | from itertools import chain
9 |
10 | class GMCSToyModelTest(unittest.TestCase):
11 | def setUp(self) -> None:
12 | rows, cols, data = zip(*(
13 | (0, 0, 1),
14 | (0, 1, -1),
15 | (0, 2, -1),
16 | (1, 1, 1),
17 | (1, 3, -1),
18 | (2, 3, 1),
19 | (2, 5, -1),
20 | (3, 5, 1),
21 | (3, 7, -1),
22 | (4, 2, 1),
23 | (4, 4, -1),
24 | (5, 4, 1),
25 | (5, 6, -1),
26 | (3, 6, 1)
27 | ))
28 |
29 | # G_test = array([
30 | # [1,0,0,0,0,0,0,0,0,0],
31 | # [0,1,0,1,0,1,0,1,0,0],
32 | # [0,0,1,0,0,0,0,0,0,0],
33 | # [0,0,1,0,0,0,0,0,0,0],
34 | # [0,0,0,1,0,0,0,0,0,0],
35 | # [0,0,1,0,0,0,1,0,1,0],
36 | # [0,1,0,1,1,1,0,1,0,0],
37 | # [0,0,0,1,1,0,0,0,0,0]
38 | # ])
39 | #
40 | # F_test = array([
41 | # [1,0,0,0,0,0,0],
42 | # [0,1,0,0,0,0,0],
43 | # [0,0,1,0,0,0,0],
44 | # [0,0,0,1,0,0,0],
45 | # [0,0,0,0,1,0,0],
46 | # [0,0,1,0,0,1,0],
47 | # [0,1,0,0,0,0,1],
48 | # [0,0,0,0,1,1,1]
49 | # ])
50 | #
51 | # G_test_irrev = G_test[:,[0,5,1,2,3,6,4,9,7,8]]
52 |
53 | S = array(csc_matrix((data, (rows, cols))).todense())
54 | lb = array([0] * S.shape[1]).astype(float)
55 | ub = array([1000] * S.shape[1]).astype(float)
56 | lb[[1, 5]] = -1000
57 | rx_names = ['r' + str(i + 1) for i in range(S.shape[1] - 1)] + ['rbio']
58 | met_names = ['m' + str(i + 1) for i in range(S.shape[0] - 1)] + ['mbio']
59 | gprs = ['g1', 'g2', 'g2', 'g3 and g4', 'g2 and g5', 'g3 or g6', '(g2 and (g5 or g6)) or g7', '']
60 | # gprs_irrev = gprs + [g for i, g in enumerate(gprs) if i in [1, 5]]
61 |
62 |
63 | cbm = ConstraintBasedModel(S, list(zip(lb, ub)), reaction_names=rx_names, metabolite_names=met_names, gprs=gprs)
64 | irrev_cbm, mapping = cbm.make_irreversible()
65 |
66 | gmat_builder = GeneMatrixBuilder(irrev_cbm.gpr)
67 | G_new, _, irreducible_gene_map, F_deps, weights = gmat_builder.get_GF_matrices()
68 |
69 |
70 |
71 | gmcs_enumerator = KShortestGeneticMCSEnumeratorWrapper(
72 | model=irrev_cbm,
73 | target_flux_space_dict={'rbio': (1, None)},
74 | target_yield_space_dict={},
75 | stop_criteria=len(irrev_cbm.reaction_names),
76 | algorithm_type='kse_populate',
77 | excluded_solutions=[],
78 | G=G_new, gene_map=irreducible_gene_map, F=F_deps, gene_weights=weights
79 | )
80 |
81 | self.gmcs_enumerator = gmcs_enumerator
82 |
83 | def test_enumerator_results_toy_model(self):
84 | iterator = self.gmcs_enumerator.get_enumerator()
85 | solutions = set([frozenset(s.keys()) for s in list(chain(*iterator))])
86 | expected = set([frozenset(k) for k in [{'g1'},{'g2'},{'g3','g5'},{'g4','g5'}]])
87 | self.assertEqual(solutions, expected)
88 |
89 | if __name__ == '__main__':
90 | unittest.main()
91 |
--------------------------------------------------------------------------------
/tests/toy_model_test.py:
--------------------------------------------------------------------------------
1 | from cobamp.algorithms.kshortest import KShortestEnumerator
2 | from cobamp.core.linear_systems import DualLinearSystem, IrreversibleLinearSystem
3 | import numpy as np
4 | from itertools import chain
5 | import unittest
6 |
7 |
8 | class ToyMetabolicNetworkTests(unittest.TestCase):
9 | def setUp(self):
10 | self.S = np.array([[1, -1, 0, 0, -1, 0, -1, 0, 0],
11 | [0, 1, -1, 0, 0, 0, 0, 0, 0],
12 | [0, 1, 0, 1, -1, 0, 0, 0, 0],
13 | [0, 0, 0, 0, 0, 1, -1, 0, 0],
14 | [0, 0, 0, 0, 0, 0, 1, -1, 0],
15 | [0, 0, 0, 0, 1, 0, 0, 1, -1]])
16 | self.rx_names = ["R" + str(i) for i in range(1, 10)]
17 |
18 | self.lb, self.ub = [0]*len(self.rx_names), [1000]*len(self.rx_names)
19 | self.lb[3] = -1000
20 | self.T = np.array([0] * self.S.shape[1]).reshape(1, self.S.shape[1])
21 | self.T[0, 8] = -1
22 | self.b = np.array([-1]).reshape(1, )
23 |
24 | def enumerate_elementary_flux_modes(self):
25 | lsystem = IrreversibleLinearSystem(self.S, self.lb, self.ub, solver='CPLEX')
26 | ksh = KShortestEnumerator(lsystem)
27 | solution_iterator = ksh.population_iterator(9)
28 | efms = list(chain(*solution_iterator))
29 | return efms
30 |
31 | def enumerate_minimal_cut_sets(self):
32 | dsystem = DualLinearSystem(self.S, self.lb, self.ub, self.T, self.b, solver='CPLEX')
33 | ksh = KShortestEnumerator(dsystem)
34 | solution_iterator = ksh.population_iterator(4)
35 | mcss = list(chain(*solution_iterator))
36 | return mcss
37 |
38 | def test_elementary_flux_modes_support(self):
39 | basic_answer = {"R1, R2, R3, R4", "R1, R4, R5, R9", "R1, R2, R3, R5, R9", "R1, R6, R7, R8, R9"}
40 | test = {self.convert_solution_to_string(sol) for sol in self.enumerate_elementary_flux_modes()}
41 | self.assertEqual(basic_answer, test)
42 |
43 | def test_minimal_cut_sets(self):
44 | answer = {'R1', 'R2, R4, R6', 'R2, R4, R7', 'R2, R4, R8', 'R3, R4, R6', 'R3, R4, R7', 'R3, R4, R8', 'R5, R6',
45 | 'R5, R7', 'R5, R8', 'R9'}
46 | test = {self.convert_solution_to_string(sol) for sol in self.enumerate_minimal_cut_sets()}
47 | self.assertEqual(answer, test)
48 |
49 | def convert_solution_to_string(self, sol):
50 | return ', '.join([self.rx_names[i] for i in sol.get_active_indicator_varids()])
51 |
52 |
53 | if __name__ == '__main__':
54 | suite = unittest.TestLoader().loadTestsFromTestCase(ToyMetabolicNetworkTests)
55 | unittest.TextTestRunner(verbosity=2).run(suite)
56 |
--------------------------------------------------------------------------------
/tests/toy_model_test_with_api.py:
--------------------------------------------------------------------------------
1 | from cobamp.algorithms.kshortest import *
2 | from cobamp.core.linear_systems import DualLinearSystem, IrreversibleLinearSystem
3 | import numpy as np
4 | import unittest
5 |
6 |
7 | efm_populate_enumeration_config = KShortestProperties()
8 | efm_populate_enumeration_config[K_SHORTEST_MPROPERTY_METHOD] = K_SHORTEST_METHOD_POPULATE
9 | efm_populate_enumeration_config[K_SHORTEST_OPROPERTY_MAXSIZE] = 9
10 |
11 | mcs_populate_enumeration_config = KShortestProperties()
12 | mcs_populate_enumeration_config[K_SHORTEST_MPROPERTY_METHOD] = K_SHORTEST_METHOD_POPULATE
13 | mcs_populate_enumeration_config[K_SHORTEST_OPROPERTY_MAXSIZE] = 3
14 |
15 | efm_populate_enumeration_config_wrong = KShortestProperties()
16 | efm_populate_enumeration_config_wrong[K_SHORTEST_MPROPERTY_METHOD] = K_SHORTEST_METHOD_POPULATE
17 | efm_populate_enumeration_config_wrong[K_SHORTEST_OPROPERTY_MAXSIZE] = 4
18 |
19 | mcs_populate_enumeration_config_wrong = KShortestProperties()
20 | mcs_populate_enumeration_config_wrong[K_SHORTEST_MPROPERTY_METHOD] = K_SHORTEST_METHOD_POPULATE
21 | mcs_populate_enumeration_config_wrong[K_SHORTEST_OPROPERTY_MAXSIZE] = 2
22 |
23 | efm_iterate_enumeration_config = KShortestProperties()
24 | efm_iterate_enumeration_config[K_SHORTEST_MPROPERTY_METHOD] = K_SHORTEST_METHOD_ITERATE
25 | efm_iterate_enumeration_config[K_SHORTEST_OPROPERTY_MAXSOLUTIONS] = 4
26 |
27 | mcs_iterate_enumeration_config = KShortestProperties()
28 | mcs_iterate_enumeration_config[K_SHORTEST_MPROPERTY_METHOD] = K_SHORTEST_METHOD_ITERATE
29 | mcs_iterate_enumeration_config[K_SHORTEST_OPROPERTY_MAXSOLUTIONS] = 11
30 |
31 | efm_iterate_enumeration_config_wrong = KShortestProperties()
32 | efm_iterate_enumeration_config_wrong[K_SHORTEST_MPROPERTY_METHOD] = K_SHORTEST_METHOD_ITERATE
33 | efm_iterate_enumeration_config_wrong[K_SHORTEST_OPROPERTY_MAXSOLUTIONS] = 2
34 |
35 | mcs_iterate_enumeration_config_wrong = KShortestProperties()
36 | mcs_iterate_enumeration_config_wrong[K_SHORTEST_MPROPERTY_METHOD] = K_SHORTEST_METHOD_ITERATE
37 | mcs_iterate_enumeration_config_wrong[K_SHORTEST_OPROPERTY_MAXSOLUTIONS] = 9
38 |
39 | configs = [efm_populate_enumeration_config, mcs_populate_enumeration_config, efm_populate_enumeration_config_wrong, mcs_populate_enumeration_config_wrong,
40 | efm_iterate_enumeration_config, mcs_iterate_enumeration_config, efm_iterate_enumeration_config_wrong, mcs_iterate_enumeration_config_wrong]
41 |
42 | for cfg in configs:
43 | cfg[K_SHORTEST_OPROPERTY_BIG_M_VALUE] = 3.4200101010 * 1e4
44 | cfg[K_SHORTEST_MPROPERTY_TYPE_EFP] = False
45 | cfg[K_SHORTEST_OPROPERTY_N_THREADS] = 1
46 | cfg[K_SHORTEST_OPROPERTY_WORKMEMORY] = None
47 |
48 | TEST_SOLVER = 'CPLEX'
49 |
50 | class ToyMetabolicNetworkTests(unittest.TestCase):
51 | def setUp(self):
52 | self.S = np.array([[1, -1, 0, 0, -1, 0, -1, 0, 0],
53 | [0, 1, -1, 0, 0, 0, 0, 0, 0],
54 | [0, 1, 0, 1, -1, 0, 0, 0, 0],
55 | [0, 0, 0, 0, 0, 1, -1, 0, 0],
56 | [0, 0, 0, 0, 0, 0, 1, -1, 0],
57 | [0, 0, 0, 0, 1, 0, 0, 1, -1]])
58 | self.rx_names = ["R" + str(i) for i in range(1, 10)]
59 | self.lb, self.ub = [0]*len(self.rx_names), [1000]*len(self.rx_names)
60 | self.lb[3] = -1000
61 | self.T = np.array([0] * self.S.shape[1]).reshape(1, self.S.shape[1])
62 | self.T[0, 8] = -1
63 | self.b = np.array([-1]).reshape(1, )
64 | self.lsystem = IrreversibleLinearSystem(self.S, self.lb, self.ub, solver=TEST_SOLVER)
65 | self.dsystem = DualLinearSystem(self.S, self.lb, self.ub, self.T, self.b, solver=TEST_SOLVER)
66 |
67 |
68 |
69 | def enumerate_elementary_flux_modes(self):
70 | ks = KShortestEFMAlgorithm(efm_populate_enumeration_config)
71 | r = ks.enumerate(self.lsystem)
72 | #print('Thread_parameter',ks.ksh.model.model.problem.parameters.workmem.get())
73 | return r
74 |
75 | def enumerate_some_elementary_flux_modes(self):
76 | ks = KShortestEFMAlgorithm(efm_populate_enumeration_config_wrong)
77 | r = ks.enumerate(self.lsystem)
78 | #print('Thread_parameter',ks.ksh.model.model.problem.parameters.threads.get())
79 | return r
80 |
81 | def enumerate_minimal_cut_sets(self):
82 | ks = KShortestEFMAlgorithm(mcs_populate_enumeration_config)
83 | r = ks.enumerate(self.dsystem)
84 | #print('Thread_parameter', ks.ksh.model.model.problem.parameters.threads.get())
85 | return r
86 |
87 | def enumerate_some_minimal_cut_sets(self):
88 | ks = KShortestEFMAlgorithm(mcs_populate_enumeration_config_wrong)
89 | r = ks.enumerate(self.dsystem)
90 | #print('Thread_parameter', ks.ksh.model.model.problem.parameters.threads.get())
91 | return r
92 |
93 | def enumerate_elementary_flux_modes_iter(self):
94 | ks = KShortestEFMAlgorithm(efm_iterate_enumeration_config)
95 | r = ks.enumerate(self.lsystem)
96 | #print('Thread_parameter', ks.ksh.model.model.problem.parameters.threads.get())
97 | return r
98 |
99 | def enumerate_some_elementary_flux_modes_iter(self):
100 | ks = KShortestEFMAlgorithm(efm_iterate_enumeration_config_wrong)
101 | r = ks.enumerate(self.lsystem)
102 | #print('Thread_parameter', ks.ksh.model.model.problem.parameters.threads.get())
103 | return r
104 |
105 | def enumerate_minimal_cut_sets_iter(self):
106 | ks = KShortestEFMAlgorithm(mcs_iterate_enumeration_config)
107 | r = ks.enumerate(self.dsystem)
108 | #print('Thread_parameter', ks.ksh.model.model.problem.parameters.threads.get())
109 | return r
110 |
111 | def enumerate_some_minimal_cut_sets_iter(self):
112 | ks = KShortestEFMAlgorithm(mcs_iterate_enumeration_config_wrong)
113 | r = ks.enumerate(self.dsystem)
114 | #print('Thread_parameter', ks.ksh.model.model.problem.parameters.threads.get())
115 | return r
116 |
117 | def test_elementary_flux_modes_support(self):
118 | basic_answer = {"R1, R2, R3, R4", "R1, R4, R5, R9", "R1, R2, R3, R5, R9", "R1, R6, R7, R8, R9"}
119 | test = {self.convert_solution_to_string(sol) for sol in self.enumerate_elementary_flux_modes()}
120 | self.assertEqual(basic_answer, test)
121 |
122 | def test_elementary_flux_modes_support_wrong(self):
123 | basic_answer = {"R1, R2, R3, R4", "R1, R4, R5, R9", "R1, R2, R3, R5, R9", "R1, R6, R7, R8, R9"}
124 | test = {self.convert_solution_to_string(sol) for sol in self.enumerate_some_elementary_flux_modes()}
125 | self.assertNotEqual(basic_answer, test)
126 |
127 | def test_minimal_cut_sets(self):
128 | answer = {'R1', 'R2, R4, R6', 'R2, R4, R7', 'R2, R4, R8', 'R3, R4, R6', 'R3, R4, R7', 'R3, R4, R8', 'R5, R6',
129 | 'R5, R7', 'R5, R8', 'R9'}
130 | test = {self.convert_solution_to_string(sol) for sol in self.enumerate_minimal_cut_sets()}
131 | self.assertEqual(answer, test)
132 |
133 | def test_minimal_cut_sets_wrong(self):
134 | answer = {'R1', 'R2, R4, R6', 'R2, R4, R7', 'R2, R4, R8', 'R3, R4, R6', 'R3, R4, R7', 'R3, R4, R8', 'R5, R6',
135 | 'R5, R7', 'R5, R8', 'R9'}
136 | test = {self.convert_solution_to_string(sol) for sol in self.enumerate_some_minimal_cut_sets()}
137 | self.assertNotEqual(answer, test)
138 |
139 | def test_elementary_flux_modes_support_iter(self):
140 | basic_answer = {"R1, R2, R3, R4", "R1, R4, R5, R9", "R1, R2, R3, R5, R9", "R1, R6, R7, R8, R9"}
141 | test = {self.convert_solution_to_string(sol) for sol in self.enumerate_elementary_flux_modes_iter()}
142 | self.assertEqual(basic_answer, test)
143 |
144 | def test_elementary_flux_modes_support_wrong_iter(self):
145 | basic_answer = {"R1, R2, R3, R4", "R1, R4, R5, R9", "R1, R2, R3, R5, R9", "R1, R6, R7, R8, R9"}
146 | test = {self.convert_solution_to_string(sol) for sol in self.enumerate_some_elementary_flux_modes_iter()}
147 | self.assertNotEqual(basic_answer, test)
148 |
149 | def test_minimal_cut_sets_iter(self):
150 | answer = {'R1', 'R2, R4, R6', 'R2, R4, R7', 'R2, R4, R8', 'R3, R4, R6', 'R3, R4, R7', 'R3, R4, R8', 'R5, R6',
151 | 'R5, R7', 'R5, R8', 'R9'}
152 | test = {self.convert_solution_to_string(sol) for sol in self.enumerate_minimal_cut_sets_iter()}
153 | self.assertEqual(answer, test)
154 |
155 | def test_minimal_cut_sets_wrong_iter(self):
156 | answer = {'R1', 'R2, R4, R6', 'R2, R4, R7', 'R2, R4, R8', 'R3, R4, R6', 'R3, R4, R7', 'R3, R4, R8', 'R5, R6',
157 | 'R5, R7', 'R5, R8', 'R9'}
158 | test = {self.convert_solution_to_string(sol) for sol in self.enumerate_some_minimal_cut_sets_iter()}
159 | self.assertNotEqual(answer, test)
160 |
161 | def convert_solution_to_string(self, sol):
162 | return ', '.join([self.rx_names[i] for i in sol.get_active_indicator_varids()])
163 |
164 |
165 | if __name__ == '__main__':
166 | suite = unittest.TestLoader().loadTestsFromTestCase(ToyMetabolicNetworkTests)
167 | unittest.TextTestRunner(verbosity=2).run(suite)
168 |
--------------------------------------------------------------------------------