├── MANIFEST.in
├── docs
├── requirements.txt
├── _img
│ ├── gpm.png
│ ├── dataframe.png
│ └── basic-example-df.png
├── _api
│ ├── gpmap.rst
│ ├── main.rst
│ └── gpmap.simulate.rst
├── index.rst
├── _pages
│ ├── simulate.rst
│ ├── quick-start.rst
│ ├── helpful.rst
│ └── io.rst
├── make.bat
├── Makefile
└── conf.py
├── gpmap
├── __version__.py
├── __init__.py
├── simulate
│ ├── __init__.py
│ ├── hoc.py
│ ├── random.py
│ ├── mask.py
│ ├── base.py
│ ├── nk.py
│ ├── fuji.py
│ └── multipeak_fuji.py
├── errors.py
├── stats.py
├── utils.py
└── gpm.py
├── .travis.yml
├── .readthedocs.yaml
├── requirements.txt
├── tests
├── data
│ ├── test_data.csv
│ ├── test_data.json
│ └── test_data_five.csv
├── test_utils.py
├── conftest.py
└── test_gpm.py
├── Pipfile
├── .gitignore
├── .github
└── workflows
│ └── python-app.yml
├── README.md
├── setup.py
├── examples
└── Introduction to Genotype-Phenotype Map Module.ipynb
└── Pipfile.lock
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | README.md
2 |
--------------------------------------------------------------------------------
/docs/requirements.txt:
--------------------------------------------------------------------------------
1 | ipython
2 |
--------------------------------------------------------------------------------
/docs/_img/gpm.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/harmslab/gpmap/HEAD/docs/_img/gpm.png
--------------------------------------------------------------------------------
/docs/_img/dataframe.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/harmslab/gpmap/HEAD/docs/_img/dataframe.png
--------------------------------------------------------------------------------
/gpmap/__version__.py:
--------------------------------------------------------------------------------
1 | VERSION = (0, 7, 0)
2 |
3 | __version__ = '.'.join(map(str, VERSION))
4 |
--------------------------------------------------------------------------------
/docs/_img/basic-example-df.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/harmslab/gpmap/HEAD/docs/_img/basic-example-df.png
--------------------------------------------------------------------------------
/gpmap/__init__.py:
--------------------------------------------------------------------------------
1 | __doc__ = """"""
2 | __version__ = '0.1'
3 |
4 | # Import the main module in this package
5 | from gpmap.gpm import GenotypePhenotypeMap
6 |
7 | from .__version__ import __version__
8 |
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | # https://github.com/harmslab/gpmap
2 | language: python
3 | python:
4 | - 3.6
5 | - 3.5
6 | install:
7 | - pip install -r requirements.txt
8 | - pip install -e .
9 | script: pytest
10 |
--------------------------------------------------------------------------------
/gpmap/simulate/__init__.py:
--------------------------------------------------------------------------------
1 | from .random import RandomPhenotypesSimulation
2 | from .nk import NKSimulation
3 | from .hoc import HouseOfCardsSimulation
4 | from .fuji import MountFujiSimulation
5 | from .multipeak_fuji import MultiPeakMountFujiSimulation
6 |
--------------------------------------------------------------------------------
/.readthedocs.yaml:
--------------------------------------------------------------------------------
1 | version: 2
2 |
3 | build:
4 | os: ubuntu-22.04
5 | tools:
6 | python: "3.11"
7 |
8 | sphinx:
9 | configuration: docs/conf.py
10 |
11 | python:
12 | install:
13 | - requirements: docs/requirements.txt
14 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | -i https://pypi.org/simple
2 | matplotlib
3 | atomicwrites==1.2.1; python_version >= '3.7'
4 | attrs==18.2.0
5 | more-itertools==4.3.0
6 | numpy>=1.15.2
7 | pandas>=0.24.2
8 | py==1.6.0; python_version >= '2.7'
9 | pytest>=3.8.1
10 | scipy>=1.1.0
11 |
--------------------------------------------------------------------------------
/tests/data/test_data.csv:
--------------------------------------------------------------------------------
1 | ,genotypes,phenotypes,n_replicates,stdeviations,binary,n_mutations
2 | 0,AAA,0.62589116,1,,000,0
3 | 1,AAB,0.18940848,1,,001,1
4 | 2,ABA,0.36200286,1,,010,1
5 | 3,BAA,0.38687046,1,,100,1
6 | 4,ABB,0.92057246,1,,011,2
7 | 5,BAB,0.41535576,1,,101,2
8 | 6,BBA,0.58436333,1,,110,2
9 | 7,BBB,0.03187195,1,,111,3
10 |
--------------------------------------------------------------------------------
/Pipfile:
--------------------------------------------------------------------------------
1 | [[source]]
2 | url = "https://pypi.org/simple"
3 | verify_ssl = true
4 | name = "pypi"
5 |
6 | [packages]
7 | matplotlib = "*"
8 | numpy = "*"
9 | scipy = "*"
10 | pandas = "*"
11 | pytest = "*"
12 | gpmap = {editable = true, path = "."}
13 |
14 | [dev-packages]
15 | gpmap = {editable = true, path = "."}
16 |
17 | [requires]
18 | python_version = "3.7"
19 |
--------------------------------------------------------------------------------
/docs/_api/gpmap.rst:
--------------------------------------------------------------------------------
1 | gpmap\.errors module
2 | --------------------
3 |
4 | .. automodule:: gpmap.errors
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
8 |
9 | gpmap\.stats module
10 | -------------------
11 |
12 | .. automodule:: gpmap.stats
13 | :members:
14 | :undoc-members:
15 | :show-inheritance:
16 |
17 | gpmap\.utils module
18 | -------------------
19 |
20 | .. automodule:: gpmap.utils
21 | :members:
22 | :undoc-members:
23 | :show-inheritance:
24 |
--------------------------------------------------------------------------------
/docs/_api/main.rst:
--------------------------------------------------------------------------------
1 | API Documentation
2 | =================
3 |
4 | The ``GenotypePhenotypeMap`` is the main entry point to the gpmap package. Load
5 | in your data using the ``read`` methods attached to this object. The following
6 | subpackages include various objects to analyze this object.
7 |
8 | Subpackages
9 | -----------
10 |
11 | .. toctree::
12 | :maxdepth: 1
13 |
14 | gpmap
15 | gpmap.simulate
16 |
17 | GenotypePhenotypeMap
18 | --------------------
19 |
20 | .. automodule:: gpmap.gpm
21 | :members:
22 | :undoc-members:
23 | :show-inheritance:
24 |
--------------------------------------------------------------------------------
/tests/data/test_data.json:
--------------------------------------------------------------------------------
1 | {"wildtype": "AAA", "mutations": {"0": ["A", "B"], "1": ["A", "B"], "2": ["A", "B"]}, "data": {"genotypes": ["AAA", "AAB", "ABA", "BAA", "ABB", "BAB", "BBA", "BBB"], "phenotypes": [0.7078560200734831, 0.615441450030166, 0.2788995021375643, 0.9384420367098406, 0.306983575158586, 0.8928416173397741, 0.3759081125076016, 0.283411951807526], "n_replicates": [1, 1, 1, 1, 1, 1, 1, 1], "stdeviations": [null, null, null, null, null, null, null, null], "binary": ["000", "001", "010", "100", "011", "101", "110", "111"], "n_mutations": [0, 1, 1, 1, 2, 2, 2, 3]}, "log_transform": false}
--------------------------------------------------------------------------------
/gpmap/simulate/hoc.py:
--------------------------------------------------------------------------------
1 | from .nk import NKSimulation
2 | from .base import random_mutation_set
3 |
4 |
5 | class HouseOfCardsSimulation(NKSimulation):
6 | """Construct a 'House of Cards' fitness landscape.
7 | """
8 |
9 | def __init__(self, wildtype, mutations, k_range=(0, 1), *args, **kwargs):
10 | super(NKSimulation, self).__init__(wildtype, mutations, *args,
11 | **kwargs)
12 | # Set parameters
13 | self.set_order(len(self.binary[0]))
14 | self.set_random_values(k_range=k_range)
15 | self.build()
16 |
--------------------------------------------------------------------------------
/tests/data/test_data_five.csv:
--------------------------------------------------------------------------------
1 | ,genotypes,phenotypes,n_replicates,stdeviations,binary,n_mutations
2 | 0,AAAAA,1,1,,00000,0
3 | 1,AAAAB,0,1,,00001,1
4 | 2,AAABA,0,1,,00010,1
5 | 3,AAABB,0,1,,00011,2
6 | 4,AABAA,0,1,,00100,1
7 | 5,AABAB,1,1,,00101,2
8 | 6,AABBA,0,1,,00110,2
9 | 7,AABBB,0,1,,00111,3
10 | 8,ABAAA,0,1,,01000,1
11 | 9,ABAAB,0,1,,01001,2
12 | 10,ABABA,0,1,,01010,2
13 | 11,ABABB,1,1,,01011,3
14 | 12,ABBAA,0,1,,01100,2
15 | 13,ABBAB,0,1,,01101,3
16 | 14,ABBBA,1,1,,01110,3
17 | 15,ABBBB,0,1,,01111,4
18 | 16,BAAAA,0,1,,10000,1
19 | 17,BAAAB,0,1,,10001,2
20 | 18,BAABA,0,1,,10010,2
21 | 19,BAABB,0,1,,10011,3
22 | 20,BABAA,0,1,,10100,2
23 | 21,BABAB,1,1,,10101,3
24 | 22,BABBA,0,1,,10110,3
25 | 23,BABBB,0,1,,10111,4
26 | 24,BBAAA,0,1,,11000,2
27 | 25,BBAAB,0,1,,11001,3
28 | 26,BBABA,0,1,,11010,3
29 | 27,BBABB,0,1,,11011,4
30 | 28,BBBAA,1,1,,11100,3
31 | 29,BBBAB,0,1,,11101,4
32 | 30,BBBBA,1,1,,11110,4
33 | 31,BBBBB,1,1,,11111,5
34 |
--------------------------------------------------------------------------------
/gpmap/simulate/random.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from .base import BaseSimulation
3 |
4 |
5 | class RandomPhenotypesSimulation(BaseSimulation):
6 | """ Build a simulated GenotypePhenotypeMap. Generates random phenotypes.
7 | """
8 |
9 | def __init__(self, wildtype, mutations, phenotype_range=(0, 1),
10 | *args, **kwargs):
11 | # build genotypes
12 | super(RandomPhenotypesSimulation, self).__init__(wildtype,
13 | mutations,
14 | *args, **kwargs)
15 | self.phenotype_range = phenotype_range
16 | self.build()
17 |
18 | def build(self):
19 | """Build phenotypes"""
20 | low, high = self.phenotype_range[0], self.phenotype_range[1]
21 | self.data['phenotypes'] = np.random.uniform(low, high,
22 | size=len(self.genotypes))
23 |
--------------------------------------------------------------------------------
/docs/_api/gpmap.simulate.rst:
--------------------------------------------------------------------------------
1 | gpmap\.simulate
2 | ===============
3 |
4 | gpmap\.simulate\.base module
5 | ----------------------------
6 |
7 | .. automodule:: gpmap.simulate.base
8 | :members:
9 | :undoc-members:
10 | :show-inheritance:
11 |
12 | gpmap\.simulate\.fuji module
13 | ----------------------------
14 |
15 | .. automodule:: gpmap.simulate.fuji
16 | :members:
17 | :undoc-members:
18 | :show-inheritance:
19 |
20 | gpmap\.simulate\.hoc module
21 | ---------------------------
22 |
23 | .. automodule:: gpmap.simulate.hoc
24 | :members:
25 | :undoc-members:
26 | :show-inheritance:
27 |
28 | gpmap\.simulate\.nk module
29 | --------------------------
30 |
31 | .. automodule:: gpmap.simulate.nk
32 | :members:
33 | :undoc-members:
34 | :show-inheritance:
35 |
36 |
37 | Module contents
38 | ---------------
39 |
40 | .. automodule:: gpmap.simulate
41 | :members:
42 | :undoc-members:
43 | :show-inheritance:
44 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .DS_Store
11 | docs/.DS_Store
12 | gpmap/.DS_Store
13 | gpmap/simulate/.DS_Store
14 | .Python
15 | env/
16 | build/
17 | develop-eggs/
18 | dist/
19 | downloads/
20 | eggs/
21 | .eggs/
22 | lib/
23 | lib64/
24 | parts/
25 | sdist/
26 | var/
27 | *.egg-info/
28 | .installed.cfg
29 | *.egg
30 |
31 | # PyInstaller
32 | # Usually these files are written by a python script from a template
33 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
34 | *.manifest
35 | *.spec
36 |
37 | # Installer logs
38 | pip-log.txt
39 | pip-delete-this-directory.txt
40 |
41 | # Unit test / coverage reports
42 | htmlcov/
43 | .tox/
44 | .coverage
45 | .coverage.*
46 | .cache
47 | nosetests.xml
48 | coverage.xml
49 | *,cover
50 |
51 | # Translations
52 | *.mo
53 | *.pot
54 |
55 | # Django stuff:
56 | *.log
57 |
58 | # Sphinx documentation
59 | docs/_build/
60 |
61 | # PyBuilder
62 | target/
63 |
64 | # Jupyter notebooks
65 | *.ipynb_checkpoints
66 |
--------------------------------------------------------------------------------
/.github/workflows/python-app.yml:
--------------------------------------------------------------------------------
1 | # This workflow will install Python dependencies, run tests and lint with a single version of Python
2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions
3 |
4 | name: gpmap
5 |
6 | on:
7 | push:
8 | branches: [ main ]
9 | pull_request:
10 | branches: [ main ]
11 |
12 | jobs:
13 | build:
14 | runs-on: ${{ matrix.os }}
15 | strategy:
16 | fail-fast: false
17 | matrix:
18 | os: [ubuntu-latest, macos-latest, windows-latest]
19 | python-version: ['3.7', '3.8', '3.9' ]
20 | steps:
21 | - name: Checkout
22 | uses: actions/checkout@v2
23 | - name: Install Python ${{ matrix.python-version }}
24 | uses: actions/setup-python@v2
25 | with:
26 | python-version: ${{ matrix.python-version }}
27 | architecture: 'x64'
28 | - name: Install dependencies
29 | run: |
30 | python -m pip install --upgrade pip
31 | pip install flake8 pytest
32 | pip install -e .[test]
33 | - name: Lint with flake8
34 | run: |
35 | # stop the build if there are Python syntax errors or undefined names
36 | flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
37 | # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
38 | flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
39 | - name: Test with pytest
40 | run: |
41 | pytest
42 |
--------------------------------------------------------------------------------
/docs/index.rst:
--------------------------------------------------------------------------------
1 | .. gpmap documentation master file, created by
2 | sphinx-quickstart on Fri Jul 8 10:41:24 2016.
3 | You can adapt this file completely to your liking, but it should at least
4 | contain the root `toctree` directive.
5 |
6 | ``gpmap``
7 | =========
8 |
9 | The Pandas DataFrame for genotype-phenotype (GP) map data.
10 |
11 | .. image:: _img/gpm.png
12 | :align: center
13 |
14 |
15 | The ``GenotypePhenotypeMap`` is a core object for a suite of packages written
16 | in the `Harms Lab`_. It organizes and standardizes genotype-phenotype map data.
17 |
18 | .. _`Harms Lab`: https://github.com/harmslab
19 |
20 | Basic Example
21 | -------------
22 |
23 | .. code-block:: python
24 |
25 | # Import the GenotypePhenotypeMap
26 | from gpmap import GenotypePhenotypeMap
27 |
28 | # The data
29 | wildtype = 'AA'
30 | genotypes = ['AA', 'AT', 'TA', 'TT']
31 | phenotypes = [0.1, 0.5, 0.2, 0.8]
32 | stdeviations = [0.05, 0.05, 0.05, 0.05]
33 |
34 | # Initialize a GenotypePhenotype object
35 | gpm = GenotypePhenotypeMap(wildtype, genotypes, phenotypes,
36 | stdeviations=stdeviations)
37 |
38 | # Show the dataFrame
39 | gpm.data
40 |
41 | .. image:: _img/basic-example-df.png
42 | :width: 350px
43 |
44 |
45 | Documentation
46 | -------------
47 |
48 | .. toctree::
49 | :maxdepth: 2
50 |
51 | _pages/quick-start
52 | _pages/helpful
53 | _pages/simulate
54 | _pages/io
55 | _api/main.rst
56 |
57 |
58 | Indices and tables
59 | ==================
60 |
61 | * :ref:`genindex`
62 | * :ref:`modindex`
63 | * :ref:`search`
64 |
--------------------------------------------------------------------------------
/gpmap/simulate/mask.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import pandas as pd
3 | from .. import GenotypePhenotypeMap
4 |
5 |
6 | def mask(gpm, mask_fraction):
7 | """Create a new GenotypePhenotypeMap object from a random subset of another
8 | GenotypePhenotypeMap.
9 |
10 |
11 | Returns
12 | -------
13 | true_mask_fraction : float
14 | the actual fraction used, since the space is discrete and likely
15 | won't be the exact fraction given.
16 | GenotypePhenotypeMap :
17 | the new genotype-phenotype map.
18 | """
19 | if mask_fraction > 1 or mask_fraction < 0:
20 | raise Exception("mask_fraction must between between 0 and 1.")
21 |
22 | # Calculate the number of genotypes to select
23 | number_to_choose = int((1 - mask_fraction) * gpm.n)
24 |
25 | # Calculate the true fraction (since this is a discrete space.)
26 | true_mask_fraction = 1 - float(number_to_choose) / gpm.n
27 |
28 | # Randomly choose genotypes
29 | index = np.random.choice(gpm.index, number_to_choose, replace=False)
30 |
31 | # Check n_replicates datatype
32 | if type(gpm.n_replicates) == int:
33 | n_replicates = gpm.n_replicates
34 | elif type(gpm.n_replicates) == pd.Series or type(gpm.n_replicates) == np.ndarray:
35 | n_replicates = gpm.n_replicates[index].reset_index(drop=True)
36 | else:
37 | raise Exception("n_replicates are not a valid dtype.")
38 |
39 | # return Subset genotype
40 | return true_mask_fraction, GenotypePhenotypeMap(
41 | gpm.wildtype,
42 | gpm.genotypes[index].reset_index(drop=True),
43 | gpm.phenotypes[index].reset_index(drop=True),
44 | mutations=gpm.mutations,
45 | stdeviations=gpm.stdeviations[index].reset_index(drop=True),
46 | n_replicates=n_replicates)
47 |
--------------------------------------------------------------------------------
/gpmap/errors.py:
--------------------------------------------------------------------------------
1 | # Mapping object for holding upper and lower error bars
2 | #
3 | # Author: Zach Sailer
4 | #
5 | # ----------------------------------------------------------
6 | # Local imports
7 | # ----------------------------------------------------------
8 |
9 | import numpy as np
10 |
11 |
12 | def upper_transform(mean, bound, logbase):
13 | """ Log transformation scaling.
14 |
15 | Examples
16 | --------
17 | Untransformed data looks as so:
18 |
19 | Yupper = Ymean + bound
20 | Ylower = Ymean - bound
21 |
22 | We want log(bounds)
23 | ie.
24 | log(Yupper) - log(Ymean)
25 | log(Ylower) + log(Ymean)
26 |
27 | so log(bound) = log(1 + bound/Ymean)
28 | log(bound) = log(1 - bound/Ymean)
29 | """
30 | return abs(logbase((mean + bound) / mean))
31 |
32 |
33 | def lower_transform(mean, bound, logbase):
34 | """ Log transformation scaling.
35 |
36 |
37 | Examples
38 | --------
39 | Untransformed data looks as so:
40 |
41 | Yupper = Ymean + bound
42 | Ylower = Ymean - bound
43 |
44 | We want log(bounds)
45 | ie.
46 | log(Yupper) - log(Ymean)
47 | log(Ylower) + log(Ymean)
48 |
49 | so log(bound) = log(1 + bound/Ymean)
50 | log(bound) = log(1 - bound/Ymean)
51 | """
52 | return abs(logbase(mean / (mean - bound)))
53 |
54 |
55 | class BaseErrorMap(object):
56 | """ Object to attach to seqspace objects for managing errors, standard
57 | deviations, and their log transforms.
58 |
59 | If a lower bound is given, use it instead of -variances.
60 | """
61 |
62 | def __init__(self, Map):
63 | self._Map = Map
64 |
65 | def wrapper(self, bound, **kwargs):
66 | """Wrapper function that changes variances to whatever bound desired.
67 | """
68 | raise Exception(""" Must be implemented in a subclass """)
69 |
70 | @property
71 | def upper(self):
72 | """Get upper error bound"""
73 | if self._Map.stdeviations is None:
74 | return None
75 | else:
76 | return self.wrapper(self._Map.stdeviations)
77 |
78 | @property
79 | def lower(self):
80 | """Get lower error bound."""
81 | if self._Map.stdeviations is None:
82 | return None
83 | else:
84 | return self.wrapper(self._Map.stdeviations)
85 |
86 |
87 | class StandardDeviationMap(BaseErrorMap):
88 |
89 | def wrapper(self, bounds, **kwargs):
90 | """Wrapper function to convert Variances if necessary"""
91 | return bounds
92 |
93 |
94 | class StandardErrorMap(BaseErrorMap):
95 |
96 | def wrapper(self, bounds):
97 | """Wrapper function to convert Variances if necessary"""
98 | return bounds / np.sqrt(self._Map.n_replicates)
99 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 | # GPMap
3 |
4 | [](https://badge.fury.io/py/gpmap)
5 | [](https://gitter.im/harmslab/gpmap?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
6 | [](http://gpmap.readthedocs.io/en/latest/?badge=latest)
7 | [](https://travis-ci.org/harmslab/gpmap)
8 | [](https://nbviewer.jupyter.org/github/harmslab/gpmap/blob/master/examples/Introduction%20to%20Genotype-Phenotype%20Map%20Module.ipynb#)
9 | [](https://mybinder.org/v2/gh/harmslab/gpmap/master?filepath=examples%2FIntroduction%20to%20Genotype-Phenotype%20Map%20Module.ipynb)
10 |
11 | *A Python API for managing genotype-phenotype map data*
12 |
13 | GPMap defines a flexible object for managing genotype-phenotype (GP) map data. At it's core,
14 | it stores all data in Pandas DataFrames and thus, interacts seamlessly with the
15 | PyData ecosystem.
16 |
17 | To visualize genotype-phenotype objects created by GPMap, checkout [GPGraph](https://github.com/Zsailer/gpgraph).
18 |
19 |
20 |
21 | ## Basic example
22 |
23 | Import the package's base object.
24 | ```python
25 | from gpmap import GenotypePhenotypeMap
26 | ```
27 |
28 | Pass your data to the object.
29 | ```python
30 |
31 | # Data
32 | wildtype = "AAA"
33 | genotypes = ["AAA", "AAT", "ATA", "TAA", "ATT", "TAT", "TTA", "TTT"]
34 | phenotypes = [0.1, 0.2, 0.2, 0.6, 0.4, 0.6, 1.0, 1.1]
35 | stdeviations = [0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05]
36 |
37 | # Initialize the object
38 | gpm = GenotypePhenotypeMap(wildtype,
39 | genotypes,
40 | phenotypes,
41 | stdeviations=stdeviations)
42 |
43 | # Check out the data.
44 | gpm.data
45 | ```
46 |
47 |
48 |
49 | Or load a dataset from disk.
50 | ```python
51 | gpm = GenotypePhenotypeMap.read_json("data.json")
52 | ```
53 |
54 | ## Installation
55 |
56 | **Users**
57 | This simplest way to install this package is using pip:
58 | ```
59 | pip install gpmap
60 | ```
61 |
62 | **Developers**
63 | The recommended way to install this package for development is using `pipenv`.
64 |
65 | 1. Clone this repository:
66 | ```
67 | git clone https://github.com/harmslab/gpmap
68 | ```
69 | 2. Install the package using pipenv.
70 | ```
71 | cd gpmap
72 | pipenv install --dev -e .
73 | ```
74 | 3. Run tests using pytest
75 | ```
76 | pytest
77 | ```
78 |
79 | ## Dependencies
80 |
81 | The following modules are required. Also, the examples/tutorials are written in Jupyter notebooks and require IPython to be installed.
82 |
83 | * [Numpy](http://www.numpy.org/)
84 | * [Pandas](https://pandas.pydata.org/)
85 |
--------------------------------------------------------------------------------
/gpmap/simulate/base.py:
--------------------------------------------------------------------------------
1 | import random
2 | import numpy as np
3 | from gpmap import utils
4 | from gpmap.gpm import GenotypePhenotypeMap
5 |
6 |
7 | def random_mutation_set(length, alphabet_size=2, type='AA'):
8 | """Generate a random mutations dictionary for simulations.
9 |
10 | Parameters
11 | ----------
12 | length : length of genotypes
13 |
14 | alphabet_size : int or list
15 | alphabet size at each site. if list is given, will make site i have
16 | size alphab_size[i].
17 | type : 'AA' or "DNA'
18 | Use amino acid alphabet or DNA alphabet
19 | """
20 |
21 | if isinstance(alphabet_size, int):
22 | size = [alphabet_size for i in range(length)]
23 | else:
24 | size = alphabet_size
25 | # build mutations dictionary
26 | mutations = {}
27 | for i in range(length):
28 | if type == 'AA':
29 | A = utils.AMINO_ACIDS
30 | elif type == 'DNA':
31 | A = utils.DNA
32 | random.shuffle(A)
33 | alphabet = A[:size[i]]
34 | mutations[i] = alphabet
35 | return mutations
36 |
37 |
38 | class BaseSimulation(GenotypePhenotypeMap):
39 | """ Build a simulated GenotypePhenotypeMap. Generates random phenotypes.
40 | """
41 |
42 | def __init__(self, wildtype, mutations, *args, **kwargs):
43 | # build genotypes
44 | genotypes = utils.mutations_to_genotypes(mutations, wildtype=wildtype)
45 | phenotypes = np.empty(len(genotypes), dtype=float)
46 | super(BaseSimulation, self).__init__(wildtype,
47 | genotypes,
48 | phenotypes,
49 | mutations=mutations,
50 | *args,
51 | **kwargs)
52 |
53 | @classmethod
54 | def from_length(cls, length, alphabet_size=2, *args, **kwargs):
55 | """ Create a simulate genotype-phenotype map from a given genotype length.
56 |
57 | Parameters
58 | ----------
59 | length : int
60 | length of genotypes
61 | alphabet_size : int (optional)
62 | alphabet size
63 |
64 | Returns
65 | -------
66 | self : GenotypePhenotypeSimulation
67 | """
68 | mutations = random_mutation_set(length, alphabet_size=alphabet_size)
69 | wildtype = "".join([m[0] for m in mutations.values()])
70 | self = cls(wildtype, mutations, *args, **kwargs)
71 | return self
72 |
73 | def set_stdeviations(self, sigma):
74 | """Add standard deviations to the simulated phenotypes, which can then be
75 | used for sampling error in the genotype-phenotype map.
76 |
77 | Parameters
78 | ----------
79 | sigma : float or array-like
80 | Adds standard deviations to the phenotypes. If float, all
81 | phenotypes are given the same stdeviations. Else, array must be
82 | same length as phenotypes and will be assigned to each phenotype.
83 | """
84 | stdeviations = np.ones(len(self.phenotypes)) * sigma
85 | self.data.stdeviations = stdeviations
86 | return self
87 |
88 | def build(self):
89 | raise Exception("must be implemented in subclass.")
90 |
--------------------------------------------------------------------------------
/tests/test_utils.py:
--------------------------------------------------------------------------------
1 | # Import utils model.
2 | from gpmap import utils
3 |
4 | WILDTYPE = "AAA"
5 |
6 | GENOTYPES = [
7 | "AAA",
8 | "AAB",
9 | "ABA",
10 | "BAA",
11 | "ABB",
12 | "BAB",
13 | "BBA",
14 | "BBB"
15 | ]
16 |
17 | BINARY = [
18 | '000',
19 | '001',
20 | '010',
21 | '100',
22 | '011',
23 | '101',
24 | '110',
25 | '111'
26 | ]
27 |
28 | MUTATIONS = {
29 | 0: ["A", "B"],
30 | 1: ["A", "B"],
31 | 2: ["A", "B"],
32 | }
33 |
34 |
35 | def lists_are_same(list1, list2):
36 | """Return true if lists contain same values
37 | (order doesn't matter).
38 | """
39 | set1, set2 = set(list1), set(list2)
40 |
41 | # Find differences
42 | if set1.issubset(set2) and set1.issuperset(set2):
43 | return True
44 |
45 | else:
46 | return False
47 |
48 |
49 | def dicts_are_same(dict1, dict2):
50 | """Return True if dictionaries are the same.
51 | """
52 | # Check that keys are the same.
53 | keys1, keys2 = list(dict1.keys()), list(dict2.keys())
54 | if not lists_are_same(keys1, keys2):
55 | return False
56 |
57 | # Check that values are the same.
58 | for key, val in dict1.items():
59 | if dict2[key] != val:
60 | return False
61 |
62 | return True
63 |
64 |
65 | def test_hamming_distance():
66 | """Test hamming distance function."""
67 | s1 = "THIS IS A TEST"
68 | s2 = "HWIS IT A TEBT"
69 |
70 | assert utils.hamming_distance(s1, s2) == 4
71 |
72 |
73 | def test_find_differences():
74 | """Test find differences function."""
75 | s1 = "THIS IS A TEST"
76 | s2 = "HWIS IT A TEBT"
77 |
78 | assert utils.find_differences(s1, s2) == [0, 1, 6, 12]
79 |
80 |
81 | def test_farthest_genotype():
82 | """Test farthest genotypes function."""
83 | assert utils.farthest_genotype("AAA", GENOTYPES) == "BBB"
84 |
85 |
86 | def test_list_binary():
87 | """test list binary function."""
88 | somebin = utils.list_binary(3)
89 |
90 | assert lists_are_same(somebin, BINARY)
91 |
92 |
93 | def test_mutations_to_encoding():
94 | """Test mutations to encoding."""
95 | encoding = utils.mutations_to_encoding(WILDTYPE, MUTATIONS)
96 |
97 | assert encoding[0]['A'] == '0'
98 | assert encoding[1]['B'] == '1'
99 | assert encoding[2]['A'] == '0'
100 |
101 |
102 | def test_mutations_to_genotypes():
103 | """Test mutations to genotypes function."""
104 | genotypes = utils.mutations_to_genotypes(MUTATIONS, wildtype=WILDTYPE)
105 |
106 | assert lists_are_same(genotypes, GENOTYPES)
107 |
108 |
109 | def test_genotypes_to_mutations():
110 | """Test genotypes to mutations function."""
111 | mutations = utils.genotypes_to_mutations(GENOTYPES)
112 |
113 | assert dicts_are_same(mutations, MUTATIONS)
114 |
115 |
116 | def test_genotypes_to_binary():
117 | """Test get_missing_genotypes function."""
118 | encoding_table = utils.get_encoding_table(WILDTYPE, MUTATIONS)
119 | binary = utils.genotypes_to_binary(GENOTYPES, encoding_table)
120 |
121 | assert lists_are_same(binary, BINARY)
122 |
123 |
124 | def test_get_missing_genotypes():
125 | """Test get_missing_genotypes function."""
126 | known_, missing_ = GENOTYPES[0:4], GENOTYPES[4:]
127 |
128 | missing = utils.get_missing_genotypes(known_, MUTATIONS)
129 |
130 | assert lists_are_same(missing, missing_)
131 |
--------------------------------------------------------------------------------
/docs/_pages/simulate.rst:
--------------------------------------------------------------------------------
1 | Simulating genotype-phenotype maps
2 | ==================================
3 |
4 | The GPMap package comes with a suite of objects to simulate genotype-phenotype
5 | maps following models in the literature. They are found in the ``gpmap.simulate``
6 | module.
7 |
8 | All Simulation objects inherit the ``GenotypePhenotypeMap`` object as their base
9 | class. Thus, anything you can do with a GenotypePhenotypeMap, you can do with the
10 | simulation objects.
11 |
12 | NK landscape
13 | ------------
14 |
15 | Construct a genotype-phenotype map using Kauffman's NK Model. [1]_
16 | The NK fitness landscape is created using a table with binary, length-K,
17 | sub-sequences mapped to random values. All genotypes are binary with length N.
18 | The fitness of a genotype is constructed by summing the values of all
19 | sub-sequences that make up the genotype using a sliding window across the full genotypes.
20 |
21 | For example, imagine an NK simulation with :math:`N=5` and :math:`K=2`. To construct the fitness
22 | for the 01011 genotype, select the following sub-sequences from an NK table:
23 | "01", "10", "01", "11", "10". Sum their values together.
24 |
25 | .. code-block:: python
26 |
27 | # import the NKSimulation class
28 | from gpmap.simulate import NKSimulation
29 |
30 | # Create an instance of the model. Using `from_length` makes this easy.
31 | gpm = NKSimulation.from_length(6, K=3)
32 |
33 | House of Cards landscape
34 | ------------------------
35 |
36 | Construct a 'House of Cards' fitness landscape. This is a limit of the NK model
37 | where :math:`K=N`. It represents a fitness landscape with maximum roughness.
38 |
39 |
40 | .. code-block:: python
41 |
42 | # import the HouseOfCardsSimulation class
43 | from gpmap.simulate import HouseOfCardsSimulation
44 |
45 | # Create an instance of the model. Using `from_length` makes this easy.
46 | gpm = HouseOfCardsSimulation.from_length(6)
47 |
48 |
49 | Mount Fuji landscape
50 | --------------------
51 |
52 | Construct a genotype-phenotype map from a Mount Fuji model. [2]_
53 |
54 | A Mount Fuji sets a "global" fitness peak (max) on a single genotype in the space.
55 | The fitness goes down as a function of hamming distance away from this genotype,
56 | called a "fitness field". The strength (or scale) of this field is linear and
57 | depends on the parameters `field_strength`.
58 |
59 | Roughness can be added to the Mount Fuji model using a random `roughness` parameter.
60 | This assigns a random roughness value to each genotype.
61 |
62 | .. math::
63 | f(g) = \nu (g) + c \cdot d(g_0, g)
64 |
65 | where :math:`\nu` is the roughness parameter, :math:`c` is the field strength, and :math:`d` is the
66 | hamming distance between genotype :math:`g` and the reference genotype.
67 |
68 | .. code-block:: python
69 |
70 | # import the HouseOfCardsSimulation class
71 | from gpmap.simulate import MountFujiSimulation
72 |
73 | # Create an instance of the model. Using `from_length` makes this easy.
74 | gpm = MountFujiSimulation.from_length(6
75 | roughness_width=0.5,
76 | roughness_dist='normal'
77 | )
78 |
79 |
80 | References
81 | ----------
82 |
83 | .. [1] Kauffman, Stuart A., and Edward D. Weinberger. "The NK model of rugged fitness landscapes and its application to maturation of the immune response." Journal of theoretical biology 141.2 (1989): 211-245.
84 | .. [2] Szendro, Ivan G., et al. "Quantitative analyses of empirical fitness landscapes." Journal of Statistical Mechanics: Theory and Experiment 2013.01 (2013): P01005.
85 |
--------------------------------------------------------------------------------
/docs/_pages/quick-start.rst:
--------------------------------------------------------------------------------
1 | Quick start
2 | ===========
3 |
4 | GPMap is a small Python package that subsets the pandas DataFrame to handle genotype-phenotype map data. The package include utilities to read/write data to/from disk, enumerate large sequence/genotype spaces efficiently, and
5 | compute various statistics from an arbitrary genotype-phenotype map.
6 |
7 | ``GenotypePhenotypeMap`` object
8 | -------------------------------
9 |
10 | The main object in ``gpmap`` is the ``GenotypePhenotypeMap`` object. The object stores data as a Pandas DataFrame, which can be accessed through the ``.data`` attribute. Your object will look something like this:
11 |
12 | .. code-block:: python
13 |
14 | from gpmap import GenotypePhenotypeMap
15 |
16 | # Data
17 | wildtype = "AAA"
18 | genotypes = ["AAA", "AAT", "ATA", "TAA", "ATT", "TAT", "TTA", "TTT"]
19 | phenotypes = [0.1, 0.2, 0.2, 0.6, 0.4, 0.6, 1.0, 1.1]
20 | stdeviations = [0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05]
21 | mutations = {
22 | 0: ["A", "T"],
23 | 1: ["A", "T"],
24 | 2: ["A", "T"]
25 | }
26 |
27 | # Initialize the object
28 | gpm = GenotypePhenotypeMap(
29 | wildtype,
30 | genotypes,
31 | phenotypes,
32 | mutations=mutations,
33 | stdeviations=stdeviations
34 | )
35 |
36 | # Check out the data.
37 | gpm.data
38 |
39 | .. image:: ../_img/basic-example-df.png
40 | :width: 350px
41 |
42 |
43 | The underlying DataFrame will have at least 5 columns: *genotypes*, *phenotypes*, *stdeviations*, *n_replicates*, and *binary*. The *binary* column is computed by the ``GenotypePhenotypeMap`` object
44 |
45 |
46 |
47 | ``mutations`` dictionary
48 | ------------------------
49 |
50 | The ``mutations`` dictionary tells GPMap what mutations, indels, etc. should be incorporated in the map. It is the most important data you pass to GPMap.
51 |
52 | It is a regular Python dictionary and looks something like:
53 |
54 | .. code-block:: python
55 |
56 | wildtype = "AA"
57 | mutations = {
58 | 0: ["A", "B"],
59 | 1: ["A", "B"]
60 | }
61 |
62 | The key represents the position of site, and the value represents the states possible at each site. In this example, the sequences have two sites and each site is either "A" or "T".
63 |
64 | **Non-changing sites**
65 |
66 | If a site doesn't mutate, give it a value of ``None``.
67 |
68 | .. code-block:: python
69 |
70 | wildtype = "AAA"
71 | mutations = {
72 | 0: ["A", "T"],
73 | 1: ["A", "T"],
74 | 2: None # This site does not change
75 | }
76 |
77 | Here, site 2 does not change. All sequences will only have an "A" at that site.
78 |
79 | **Indels**
80 |
81 | You can incorporate indels using the *gap* character:
82 |
83 | .. code-block:: python
84 |
85 | wildtype = "AAAA"
86 | mutations = {
87 | 0: ["A", "T"],
88 | 1: ["A", "T"],
89 | 2: None,
90 | 3: ["A", "-"] # Sometimes, this site doesn't exist.
91 | }
92 |
93 | Here, site 3 will toggle between an "A" and a missing residue "-" (deletion).
94 |
95 | Port to NetworkX
96 | ----------------
97 |
98 | In many cases, you might be interested in porting a ``GenotypePhenotypeMap`` to **NetworkX**. NetworkX provides powerful functions for analyzing and plotting complex graphs. We have written a separate package, named ``gpgraph``, to easily port ``GenotypePhenotypeMap`` to NetworkX.
99 |
100 |
101 | .. code-block:: python
102 |
103 |
104 |
--------------------------------------------------------------------------------
/tests/conftest.py:
--------------------------------------------------------------------------------
1 | __description__ = \
2 | """
3 | Configure test environment for gpmap.
4 | """
5 |
6 | import pytest
7 |
8 | import os
9 |
10 | @pytest.fixture(scope="module")
11 | def test_csv():
12 | """
13 | Example csv file with genotype-phenotype map
14 | """
15 |
16 | # Find directory with test files
17 | base_dir = os.path.dirname(os.path.abspath(__file__))
18 | data_dir = os.path.abspath(os.path.join(base_dir,"data"))
19 |
20 | return os.path.join(data_dir,"test_data.csv")
21 |
22 | @pytest.fixture(scope="module")
23 | def test_json():
24 | """
25 | Example json file with genotype-phenotype map
26 | """
27 |
28 | # Find directory with test files
29 | base_dir = os.path.dirname(os.path.abspath(__file__))
30 | data_dir = os.path.abspath(os.path.join(base_dir,"data"))
31 |
32 | return os.path.join(data_dir,"test_data.json")
33 |
34 | @pytest.fixture(scope="module")
35 | def test_csv_five():
36 | """
37 | Example csv file with 5 binary sites genotype-phenotype map
38 | """
39 |
40 | # Find directory with test files
41 | base_dir = os.path.dirname(os.path.abspath(__file__))
42 | data_dir = os.path.abspath(os.path.join(base_dir,"data"))
43 |
44 | return os.path.join(data_dir,"test_data_five.csv")
45 |
46 | @pytest.fixture(scope="module")
47 | def binary_test_data():
48 | """
49 | Simple binary map.
50 | """
51 |
52 | wildtype = "AAA"
53 |
54 | genotypes = ["AAA", "AAB", "ABA", "ABB",
55 | "BAA", "BAB", "BBA", "BBB"]
56 |
57 | binary = ["000", "001", "010", "011",
58 | "100", "101", "110", "111"]
59 |
60 | mutations = {0:["A", "B"],
61 | 1:["A", "B"],
62 | 2:["A", "B"]}
63 |
64 | phenotypes = [0.2611278, 0.60470609, 0.13114308, 0.76428437,
65 | 0.5018751, 0.18654072, 0.88086482, 0.18263346]
66 |
67 | errors = [0.05, 0.05, 0.05, 0.05,
68 | 0.05, 0.05, 0.05, 0.05]
69 |
70 | return {"genotypes":genotypes,
71 | "binary":binary,
72 | "phenotypes":phenotypes,
73 | "errors":errors,
74 | "wildtype":wildtype,
75 | "mutations":mutations,
76 | "length":8,
77 | "n":8,
78 | "mutant":"BBB"}
79 |
80 | @pytest.fixture(scope="module")
81 | def mixed_test_data():
82 | """
83 | Map that is binary at two sites and trinary at a third site.
84 | """
85 |
86 | wildtype = "AAA"
87 |
88 | genotypes = ["AAA", "AAB", "AAC", "ABA",
89 | "ABB", "ABC", "BAA", "BAB",
90 | "BAC", "BBA", "BBB", "BBC"]
91 |
92 | binary = ['0000', '0010', '0001', '0100',
93 | '0110', '0101', '1000', '1010',
94 | '1001', '1100', '1110', '1101']
95 |
96 | mutations = {0:["A", "B"],
97 | 1:["A", "B"],
98 | 2:["A", "B","C"]}
99 |
100 | phenotypes = [0.60371285, 0.10893567, 0.49704416, 0.34674266,
101 | 0.26102007, 0.02631915, 0.44587924, 0.31596652,
102 | 0.87037953, 0.95649285, 0.39668621, 0.66987709]
103 |
104 | errors = [0.05, 0.05, 0.05, 0.05,
105 | 0.05, 0.05, 0.05, 0.05,
106 | 0.05, 0.05, 0.05, 0.05]
107 |
108 | return {"genotypes":genotypes,
109 | "binary":binary,
110 | "phenotypes":phenotypes,
111 | "errors":errors,
112 | "wildtype":wildtype,
113 | "mutations":mutations}
114 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | # Note: To use the 'upload' functionality of this file, you must:
5 | # $ pip install twine
6 |
7 | import io
8 | import os
9 | import sys
10 | from shutil import rmtree
11 |
12 | from setuptools import find_packages, setup, Command
13 |
14 | # Package meta-data.
15 | NAME = 'gpmap'
16 | DESCRIPTION = 'A Python API for managing genotype-phenotype map data'
17 | URL = 'https://github.com/harmslab/gpmap'
18 | EMAIL = 'zachsailer@gmail.com'
19 | AUTHOR = 'Zachary R. Sailer'
20 | REQUIRES_PYTHON = '>=3.7.0'
21 | VERSION = None
22 |
23 | # What packages are required for this module to be executed?
24 | REQUIRED = [
25 | "numpy",
26 | "scipy",
27 | "pandas>=0.24.2"
28 | ]
29 |
30 | # The rest you shouldn't have to touch too much :)
31 | # ------------------------------------------------
32 | # Except, perhaps the License and Trove Classifiers!
33 | # If you do change the License, remember to change the Trove Classifier for that!
34 |
35 | here = os.path.abspath(os.path.dirname(__file__))
36 |
37 | # Import the README and use it as the long-description.
38 | # Note: this will only work if 'README.md' is present in your MANIFEST.in file!
39 | with io.open(os.path.join(here, 'README.md'), encoding='utf-8') as f:
40 | long_description = '\n' + f.read()
41 |
42 | # Load the package's __version__.py module as a dictionary.
43 | about = {}
44 | if not VERSION:
45 | with open(os.path.join(here, NAME, '__version__.py')) as f:
46 | exec(f.read(), about)
47 | else:
48 | about['__version__'] = VERSION
49 |
50 |
51 | class UploadCommand(Command):
52 | """Support setup.py upload."""
53 |
54 | description = 'Build and publish the package.'
55 | user_options = []
56 |
57 | @staticmethod
58 | def status(s):
59 | """Prints things in bold."""
60 | print('\033[1m{0}\033[0m'.format(s))
61 |
62 | def initialize_options(self):
63 | pass
64 |
65 | def finalize_options(self):
66 | pass
67 |
68 | def run(self):
69 | try:
70 | self.status('Removing previous builds…')
71 | rmtree(os.path.join(here, 'dist'))
72 | except OSError:
73 | pass
74 |
75 | self.status('Building Source and Wheel (universal) distribution…')
76 | os.system('{0} setup.py sdist bdist_wheel --universal'.format(sys.executable))
77 |
78 | self.status('Uploading the package to PyPi via Twine…')
79 | os.system('twine upload dist/*')
80 |
81 | self.status('Pushing git tags…')
82 | os.system('git tag v{0}'.format(about['__version__']))
83 | os.system('git push --tags')
84 |
85 | sys.exit()
86 |
87 |
88 | # Where the magic happens:
89 | setup(
90 | name=NAME,
91 | version=about['__version__'],
92 | description=DESCRIPTION,
93 | long_description=long_description,
94 | long_description_content_type='text/markdown',
95 | author=AUTHOR,
96 | author_email=EMAIL,
97 | python_requires=REQUIRES_PYTHON,
98 | url=URL,
99 | packages=find_packages(exclude=('tests',)),
100 | install_requires=REQUIRED,
101 | include_package_data=True,
102 | license='MIT',
103 | classifiers=[
104 | # Trove classifiers
105 | # Full list: https://pypi.python.org/pypi?%3Aaction=list_classifiers
106 | 'License :: OSI Approved :: MIT License',
107 | 'Programming Language :: Python',
108 | 'Programming Language :: Python :: 3',
109 | 'Programming Language :: Python :: 3.6',
110 | 'Programming Language :: Python :: Implementation :: CPython',
111 | 'Programming Language :: Python :: Implementation :: PyPy'
112 | ],
113 | # $ setup.py publish support.
114 | cmdclass={
115 | 'upload': UploadCommand,
116 | },
117 | )
118 |
--------------------------------------------------------------------------------
/docs/_pages/helpful.rst:
--------------------------------------------------------------------------------
1 | Helpful functions
2 | =================
3 |
4 | GPMap comes with many helpful functions for enumerating genotype-phenotype maps.
5 | This page provides a simple list of those functions.
6 |
7 | - :ref:`get-all-genotypes-from-mutations`
8 | - :ref:`missing-genotypes`
9 | - :ref:`genotypes-to-mutations`
10 | - :ref:`genotypes-to-binary`
11 | - :ref:`get-encoding-table`
12 |
13 | .. _get-all-genotypes-from-mutations:
14 |
15 | Get all genotypes from mutations
16 | --------------------------------
17 |
18 | .. code-block:: python
19 |
20 | from gpmap.utils import genotypes_to_mutations
21 |
22 | wildtype = "AAA"
23 | genotypes = [
24 | "AAA",
25 | "AAB",
26 | "ABA",
27 | "BAA",
28 | "ABB",
29 | "BAB",
30 | "BBA",
31 | "BBB"
32 | ]
33 |
34 | mutations = genotypes_to_mutations(genotypes)
35 |
36 | .. _`get-encoding-table`:
37 |
38 |
39 | Get mutation encoding table
40 | ---------------------------
41 |
42 | .. code-block:: python
43 |
44 | from gpmap.utils import get_encoding_table
45 |
46 | wildtype = "AA"
47 | mutations = {
48 | 0: ["A", "B"],
49 | 1: ["A", "B"]
50 | }
51 | get_encoding_table(wildtype, mutations)
52 |
53 | .. raw:: html
54 |
55 |
56 |
57 |
58 | |
59 | binary_index_start |
60 | binary_index_stop |
61 | binary_repr |
62 | genotype_index |
63 | mutation_index |
64 | mutation_letter |
65 | wildtype_letter |
66 |
67 |
68 |
69 |
70 | | 0 |
71 | 0 |
72 | 1 |
73 | 0 |
74 | 0 |
75 | NaN |
76 | A |
77 | A |
78 |
79 |
80 | | 1 |
81 | 0 |
82 | 1 |
83 | 1 |
84 | 0 |
85 | 1 |
86 | B |
87 | A |
88 |
89 |
90 | | 2 |
91 | 1 |
92 | 2 |
93 | 0 |
94 | 1 |
95 | NaN |
96 | A |
97 | A |
98 |
99 |
100 | | 3 |
101 | 1 |
102 | 2 |
103 | 1 |
104 | 1 |
105 | 2 |
106 | B |
107 | A |
108 |
109 |
110 |
111 |
112 | .. _`genotypes-to-mutations`:
113 |
114 | Get mutations from a list of genotypes
115 | --------------------------------------
116 |
117 | .. code-block:: python
118 |
119 | from gpmap.utils import mutations_to_genotypes
120 |
121 | mutations = {0: ['A', 'B'], 1: ['A', 'B'], 2: ['A', 'B']}
122 |
123 | mutations_to_genotypes(mutations)
124 | # ['AAA', 'AAB', 'ABA', 'ABB', 'BAA', 'BAB', 'BBA', 'BBB']
125 |
126 |
127 | .. _`genotypes-to-binary`:
128 |
129 | Get binary representation of genotypes
130 | --------------------------------------
131 |
132 | .. code-block:: python
133 |
134 | from gpmap.utils import genotypes_to_binary, get_encoding_table
135 |
136 | wildtype = 'AAA'
137 |
138 | genotypes = [
139 | "AAA",
140 | "AAB",
141 | "ABA",
142 | "BAA",
143 | "ABB",
144 | "BAB",
145 | "BBA",
146 | "BBB"
147 | ]
148 |
149 | mutations = {0: ['A', 'B'], 1: ['A', 'B'], 2: ['A', 'B']}
150 | table = get_encoding_table(wildtype, mutations)
151 | binary = genotypes_to_binary(genotypes, table)
152 | # ['000', '001', '010', '100', '011', '101', '110', '111']
153 |
154 | .. _`missing-genotypes`:
155 |
156 | Get a list of missing genotypes from a list of genotypes
157 | --------------------------------------------------------
158 |
159 | .. code-block:: python
160 |
161 | from gpmap.utils import get_missing_genotypes
162 |
163 | genotypes = ["AAA","BBB"]
164 |
165 | get_missing_genotypes(genotypes)
166 | # ['BBA', 'BAB', 'ABB', 'ABA', 'AAB', 'BAA']
167 |
--------------------------------------------------------------------------------
/gpmap/stats.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import math
3 |
4 | # -----------------------------------------------------------------------
5 | # Unbiased calculations of sample statistics to error statistics
6 | # -----------------------------------------------------------------------
7 |
8 | def coverage(gpm):
9 | # Get binary representation
10 | binary = gpm.binary
11 |
12 | # Determine shape of obs matrix
13 | shape = (len(binary), len(binary[0]))
14 |
15 | # Initialize obs matrix
16 | obs_matrix = np.array([np.array(list(b)) for b in binary], dtype=int)
17 |
18 | # Compute the average times each mutation is observed
19 | observations = obs_matrix.sum(axis=0)
20 |
21 |
22 |
23 | def c4_correction(n_samples):
24 | """Return the correction scalar for calculating standard deviation from a normal distribution. """
25 | k1 = round(n_samples / 2.0, 4)
26 | k2 = round((n_samples - 1) / 2.0, 4)
27 |
28 | # If the number of samples is < 100, calculate a correction scalar.
29 | if n_samples < 100:
30 |
31 | if k1.is_integer():
32 | c4 = np.sqrt(2.0 / (math.pi * (2 * k1 - 1))) * ((2**(2 * k1 - 2) *
33 | math.factorial(k1 - 1)**2) / math.factorial(2 * k1 - 2))
34 | elif k2.is_integer():
35 | c4 = np.sqrt(math.pi / k2) * (math.factorial(2 * k2 - 1) /
36 | (2**(2 * k2 - 1) * math.factorial(k2 - 1)**2))
37 | else:
38 | raise Exception("""Non-integer value for correction term, c4.""")
39 |
40 | # Else this scalar is 1
41 | else:
42 | c4 = 1.0
43 |
44 | return c4
45 |
46 |
47 | def unbiased_var(x, axis=None):
48 | """This enforces that the unbias estimate for variance is calculated"""
49 | # First make sure that the samples are in a numpy array
50 | return np.std(x, axis=1, ddof=1)
51 |
52 |
53 | def unbiased_std(x, axis=None):
54 | """ A correction to numpy's standard deviation calculation.
55 | Calculate the unbiased estimation of standard deviation, which includes a correction
56 | factor for sample sizes < 100.
57 | """
58 | # First make sure that the samples are in a numpy array
59 | x = np.array(x)
60 |
61 | # What are the number of samples for this axis of interest
62 | if axis is None:
63 | n_samples = x.size
64 | else:
65 | n_samples = x.shape[axis]
66 |
67 | # If only 1 sample is given, just return numpy's normal standard deviation
68 | if n_samples == 1:
69 | return np.std(x, axis=axis)
70 |
71 | # Calculate the correction scalar
72 | c4 = c4_correction(n_samples)
73 |
74 | corrected_s = c4 * np.std(x, axis=axis, ddof=1)
75 | return corrected_s
76 |
77 |
78 | def unbiased_sterror(x, axis=None):
79 | """ Unbiased error. """
80 | # First make sure that the samples are in a numpy array
81 | x = np.array(x)
82 |
83 | # What are the number of samples for this axis of interest
84 | if axis is None:
85 | n_samples = x.size
86 | else:
87 | n_samples = x.shape[axis]
88 |
89 | # Calculate biased standard deviation
90 | std = np.std(x, axis=axis)
91 |
92 | # Correct with c4 scalar
93 | c4 = c4_correction(n_samples)
94 | return std * np.sqrt(1 - c4)
95 |
96 |
97 | # -----------------------------------------------------------------------
98 | # Correction to sample statistics
99 | # -----------------------------------------------------------------------
100 |
101 | def corrected_std(var, n_samples=2):
102 | """Calculate the unbiased standard deviation from a biased standard deviation. """
103 | _var = np.array(var)
104 | _std = np.sqrt(_var)
105 |
106 | # If the sample size is 1, no correction applies
107 | if n_samples > 100:
108 | c4 = 1
109 | else:
110 | c4 = c4_correction(n_samples)
111 | return _std / c4
112 |
113 |
114 | def corrected_sterror(var, n_samples=2):
115 | """Calculate an unbiased standard error from a BIASED standard deviation. """
116 | _var = np.array(var)
117 |
118 | # If sample size is 1, no correction applies
119 | if n_samples > 100:
120 | _std = np.sqrt(_var)
121 | sterr = _std / np.sqrt(n_samples)
122 | else:
123 | _std = np.sqrt(_var)
124 | sterr = _std / np.sqrt(n_samples)
125 | #_std = corrected_std(_var, n_samples=n_samples)
126 | #sterr = _std/np.sqrt(n_samples)
127 |
128 | #c4 = c4_correction(n_samples)
129 | #sterr = _std * np.sqrt(c4**(-2)-1)
130 | return sterr
131 |
--------------------------------------------------------------------------------
/gpmap/simulate/nk.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import itertools as it
3 |
4 | from gpmap.gpm import GenotypePhenotypeMap
5 | from gpmap import utils
6 | from .base import random_mutation_set, BaseSimulation
7 |
8 |
9 | class NKSimulation(BaseSimulation):
10 | """Generate genotype-phenotype map from NK fitness model. Creates a table
11 | with binary sub-sequences that determine the order of epistasis in the
12 | model.
13 |
14 | The NK fitness landscape is created using a table with binary, length-K,
15 | sub-sequences mapped to random values. All genotypes are binary with
16 | length N. The fitness of a genotype is constructed by summing the values
17 | of all sub-sequences that make up the genotype using a sliding window
18 | across the full genotype.
19 |
20 | For example, imagine an NK simulation with N=5 and K=2. To construct
21 | the fitness for the 01011 genotype, select the following sub-sequences
22 | from an NK table "01", "10", "01", "11", "10". Sum their values together.
23 |
24 | Parameters
25 | ----------
26 |
27 | Attributes
28 | ----------
29 | nk_table : dict
30 | table with binary sub-sequences as keys which are used to construct
31 | phenotypes following an NK routine
32 | keys : array
33 | array of keys in NK table.
34 | values : array
35 | array of values in the NK table.
36 | """
37 |
38 | def __init__(self, wildtype, mutations, K, k_range=(0, 1),
39 | *args, **kwargs):
40 | super(NKSimulation, self).__init__(wildtype, mutations,
41 | *args, **kwargs)
42 | # Set parameters
43 | self.set_order(K)
44 | self.set_random_values(k_range=k_range)
45 | self.build()
46 |
47 | @property
48 | def nk_table(self):
49 | """NK table mapping binary sequence to value."""
50 | return self.map("keys", "values")
51 |
52 | @property
53 | def keys(self):
54 | """NK table keys.
55 | """
56 | return self._keys
57 |
58 | @property
59 | def values(self):
60 | """NK table values
61 | """
62 | return self._values
63 |
64 | def set_order(self, K):
65 | """Set the order (K) of the NK model.
66 | """
67 | self.K = K
68 | # point to order
69 | self.order = self.K
70 | self._keys = np.array(["".join(r) for r in
71 | it.product('01', repeat=self.K)])
72 | # Reset phenotypes
73 | self.data['phenotypes'] = np.empty(self.n, dtype=float)
74 |
75 | def set_random_values(self, k_range=(0, 1)):
76 | """Set the values of the NK table by drawing from a uniform
77 | distribution between the given k_range.
78 | """
79 | if hasattr(self, "keys") is False:
80 | raise Exception("Need to set K first. Try `set_order` method.")
81 | self._values = np.random.uniform(k_range[0], k_range[1],
82 | size=len(self.keys))
83 | self.build()
84 |
85 | def set_table_values(self, values):
86 | """Set the values of the NK table from a list/array of values.
87 | """
88 | if len(values) != len(self.keys):
89 | raise Exception("Length of the values do not equal the length of "
90 | "NK keys. "
91 | "Length of keys is : %d" % (len(self.keys),))
92 | self._values = values
93 | self.build()
94 |
95 | def build(self):
96 | """Build phenotypes from NK table.
97 | """
98 | nk_table = self.nk_table
99 | # Check for even interaction
100 | neighbor = int(self.order / 2)
101 | if self.order % 2 == 0:
102 | pre_neighbor = neighbor - 1
103 | else:
104 | pre_neighbor = neighbor
105 | # Use NK table to build phenotypes
106 | phenotypes = np.zeros(self.n, dtype=float)
107 | for i in range(len(self.genotypes)):
108 | f_total = 0
109 | for j in range(self.length):
110 | if j - pre_neighbor < 0:
111 | pre = self.binary[i][-pre_neighbor:]
112 | post = self.binary[i][j:neighbor + j + 1]
113 | f = "".join(pre) + "".join(post)
114 | elif j + neighbor > self.length - 1:
115 | pre = self.binary[i][j - pre_neighbor:j + 1]
116 | post = self.binary[i][0:neighbor]
117 | f = "".join(pre) + "".join(post)
118 | else:
119 | f = "".join(
120 | self.binary[i][j - pre_neighbor:j + neighbor + 1])
121 | f_total += nk_table[f]
122 | phenotypes[i] = f_total
123 | self.data.phenotypes = phenotypes
124 |
--------------------------------------------------------------------------------
/tests/test_gpm.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | from gpmap import utils
4 | from gpmap import GenotypePhenotypeMap
5 |
6 | import numpy as np
7 |
8 | import time, os
9 |
10 | def _compare_gpmap(gpm1,gpm2):
11 | """
12 | Compare key features of two genotype-phenotype maps.
13 | """
14 |
15 | array_features = ["genotypes","phenotypes","mutations","binary","index",
16 | "stdeviations"]
17 | for a in array_features:
18 | assert np.array_equal(gpm1.__getattribute__(a),
19 | gpm2.__getattribute__(a))
20 |
21 | general_features = ["wildtype","mutant","n","length"]
22 | for g in general_features:
23 | assert gpm1.__getattribute__(g) == gpm2.__getattribute__(g)
24 |
25 | def _compare_gmap_to_input_dict(gpm,input_dict):
26 | """
27 | Compare a bunch of features of a genotype-phenotype map to data from a
28 | pytest input dictionary used to create it.
29 | """
30 |
31 | array_features = ["genotypes","phenotypes","mutations","binary","stdeviations"]
32 | for a in array_features:
33 | gpm_value = gpm.__getattribute__(a)
34 |
35 | try:
36 | dict_value = input_dict[a]
37 | except KeyError:
38 | continue
39 |
40 | assert len(gpm_value) == len(dict_value)
41 | for i in range(len(gpm_value)):
42 | assert gpm_value[i] == dict_value[i]
43 |
44 | general_features = ["wildtype","mutant","n","length"]
45 | for g in general_features:
46 | gpm_value = gpm.__getattribute__(g)
47 | try:
48 | dict_value = input_dict[g]
49 | except KeyError:
50 | continue
51 |
52 | assert gpm_value == dict_value
53 |
54 |
55 | def test_constructor(binary_test_data):
56 | """
57 | Test basic use of GenotypePhenotypeMap constructor.
58 | """
59 |
60 | # Constructor call
61 | gpm = GenotypePhenotypeMap(wildtype=binary_test_data["wildtype"],
62 | genotypes=binary_test_data["genotypes"],
63 | log_transform=False)
64 |
65 | assert isinstance(gpm,GenotypePhenotypeMap)
66 |
67 | stripped_dict = {"wildtype":binary_test_data["wildtype"],
68 | "genotypes":binary_test_data["genotypes"],
69 | "binary":binary_test_data["binary"],
70 | "mutant":binary_test_data["mutant"],
71 | "mutations":binary_test_data["mutations"]}
72 |
73 | _compare_gmap_to_input_dict(gpm,stripped_dict)
74 |
75 |
76 | def test_nonbinary_constructor(mixed_test_data):
77 | """
78 | Test GenotypePhenotypeMap constructor on non-binary input data.
79 | """
80 |
81 | # Constructor call
82 | gpm = GenotypePhenotypeMap(wildtype=mixed_test_data["wildtype"],
83 | genotypes=mixed_test_data["genotypes"],
84 | phenotypes=mixed_test_data["phenotypes"],
85 | log_transform=False)
86 |
87 | assert isinstance(gpm,GenotypePhenotypeMap)
88 |
89 | _compare_gmap_to_input_dict(gpm,mixed_test_data)
90 |
91 |
92 | def test_read_json(test_json):
93 | """
94 | Test basic reading from json.
95 | """
96 | read_gpm = GenotypePhenotypeMap.read_json(test_json)
97 | assert isinstance(read_gpm, GenotypePhenotypeMap)
98 |
99 |
100 | def test_read_csv(test_csv):
101 | """
102 | Test basic reading from csv.
103 | """
104 | read_gpm = GenotypePhenotypeMap.read_csv(test_csv, wildtype='AAA')
105 | assert isinstance(read_gpm, GenotypePhenotypeMap)
106 |
107 |
108 | def test_data_integrity_csv(binary_test_data,tmp_path):
109 | """
110 | Write to a csv and make sure it reads back in properly.
111 | """
112 |
113 | gpm = GenotypePhenotypeMap(wildtype=binary_test_data["wildtype"],
114 | genotypes=binary_test_data["genotypes"],
115 | phenotypes=binary_test_data["phenotypes"],
116 | stdeviations=binary_test_data["errors"],
117 | log_transform=False)
118 |
119 | # Write out a csv file
120 | out_file = os.path.join(tmp_path,"tmp.csv")
121 | gpm.to_csv(out_file)
122 | assert os.path.exists(out_file)
123 |
124 | # Should fail without wildtype specified
125 | with pytest.raises(TypeError):
126 | gpm_read = GenotypePhenotypeMap.read_csv(out_file)
127 |
128 | gpm_read = GenotypePhenotypeMap.read_csv(out_file,wildtype="AAA")
129 |
130 | # Make sure the written and read gpmaps ar ethe same
131 | _compare_gpmap(gpm,gpm_read)
132 |
133 |
134 | #def test_std(tmp_gpm_file):
135 | # """Test raw errors"""
136 | # np.testing.assert_array_equal(tmp_gpm_file.std.upper, tmp_gpm_file.stdeviations)
137 |
138 |
139 | def test_missing_genotypes(binary_test_data):
140 | """
141 | Test that missing genotypes are identified.
142 | """
143 |
144 | gpm = GenotypePhenotypeMap(wildtype=binary_test_data["wildtype"],
145 | genotypes=binary_test_data["genotypes"],
146 | phenotypes=binary_test_data["phenotypes"],
147 | log_transform=False)
148 |
149 |
150 | # Choose a subset indices for testing missing genotypes.
151 | index = np.arange(0, 8)
152 | np.random.shuffle(index)
153 | # Get subset genotypes
154 | chosen_g = np.array(gpm.genotypes)[index[:4]]
155 | missing_g = np.array(gpm.genotypes)[index[4:]]
156 |
157 | # Get subset phenotypes
158 | chosen_p = np.array(gpm.phenotypes)[index[:4]]
159 |
160 | gpm_missing_g = GenotypePhenotypeMap(wildtype=gpm.wildtype,
161 | genotypes=chosen_g,
162 | phenotypes=chosen_p,
163 | log_transform=False,
164 | mutations=gpm.mutations)
165 |
166 | assert np.array_equal(gpm_missing_g.genotypes, chosen_g)
167 | assert np.array_equal(np.sort(gpm_missing_g.get_missing_genotypes()),
168 | np.sort(missing_g))
169 |
--------------------------------------------------------------------------------
/gpmap/simulate/fuji.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from gpmap.gpm import GenotypePhenotypeMap
3 | from gpmap import utils
4 | from .base import random_mutation_set, BaseSimulation
5 |
6 |
7 | class MountFujiSimulation(BaseSimulation):
8 | """Constructs a genotype-phenotype map from a Mount Fuji model. [1]_
9 |
10 | A Mount Fuji sets a "global" fitness peak (max) on a single genotype in
11 | the space. The fitness goes down as a function of hamming distance away
12 | from this genotype, called a "fitness field". The strength (or scale) of
13 | this field is linear and depends on the parameters `field_strength`.
14 | Roughness can be added to the Mount Fuji model using a random
15 | `roughness` parameter. This assigns a random
16 |
17 | .. math::
18 |
19 | f(g) = \\nu (g) + c \cdot d(g_0, g)
20 |
21 | where $\\nu$ is the roughness parameter, $c$ is the field strength,
22 | and $d$ is the hamming distance between genotype $g$ and the
23 | reference genotype.
24 |
25 | Parameters
26 | ----------
27 | wildtype : str
28 | reference genotype to put the
29 |
30 | mutations : dict
31 | mutations alphabet for each site
32 |
33 | field_strength : float
34 | field strength
35 |
36 | roughness_width : float
37 | Width of roughness distribution
38 |
39 | roughness_dist : str, 'normal'
40 | Distribution used to create noise around phenotypes.
41 |
42 |
43 | References
44 | ----------
45 |
46 | _ [1] Szendro, Ivan G., et al. "Quantitative analyses of empirical fitness
47 | landscapes." Journal of Statistical Mechanics: Theory and Experiment
48 | 2013.01 (2013): P01005.
49 | """
50 |
51 | def __init__(
52 | self,
53 | wildtype,
54 | mutations,
55 | field_strength=1,
56 | roughness_width=None,
57 | roughness_dist='normal',
58 | *args,
59 | **kwargs):
60 | # Call parent class.
61 | super(MountFujiSimulation, self).__init__(wildtype, mutations,
62 | *args, **kwargs)
63 | # Set the field strength and roughness
64 | self._field_strength = field_strength
65 | self._roughness_width = roughness_width
66 | self._roughness_dist = roughness_dist
67 | self._roughness = None
68 | self.build()
69 |
70 | @classmethod
71 | def from_length(
72 | cls,
73 | length,
74 | field_strength=1,
75 | roughness_width=None,
76 | roughness_dist='normal',
77 | *args,
78 | **kwargs):
79 | """Constructs a genotype-phenotype map from a Mount Fuji model. [1]_
80 |
81 | A Mount Fuji sets a "global" fitness peak (max) on a single genotype in
82 | the space. The fitness goes down as a function of hamming distance away
83 | from this genotype, called a "fitness field". The strength (or scale) of
84 | this field is linear and depends on the parameters `field_strength`.
85 | Roughness can be added to the Mount Fuji model using a random
86 | `roughness` parameter. This assigns a random
87 |
88 | .. math::
89 |
90 | f(g) = \\nu (g) + c \cdot d(g_0, g)
91 |
92 | where $\\nu$ is the roughness parameter, $c$ is the field strength,
93 | and $d$ is the hamming distance between genotype $g$ and the
94 | reference genotype.
95 |
96 | Parameters
97 | ----------
98 | length : int
99 | length of the genotypes.
100 |
101 | field_strength : float
102 | field strength
103 |
104 | roughness_width : float
105 | Width of roughness distribution
106 |
107 | roughness_dist : str, 'normal'
108 | Distribution used to create noise around phenotypes.
109 | """
110 | cls = super(MountFujiSimulation, cls).from_length(
111 | length,
112 | field_strength=field_strength,
113 | roughness_width=roughness_width,
114 | roughness_dist=roughness_dist,
115 | *args,
116 | **kwargs
117 | )
118 | return cls
119 |
120 | @property
121 | def hamming(self):
122 | """Hamming distance from reference"""
123 | try:
124 | return self._hamming
125 | # calculate the hamming distance if not done already
126 | except AttributeError:
127 | hd = np.empty(self.n, dtype=int)
128 | for i, g in enumerate(self.genotypes):
129 | hd[i] = utils.hamming_distance(self.wildtype, g)
130 | self._hamming = hd
131 | return self._hamming
132 |
133 | @property
134 | def roughness(self):
135 | """Array of roughness values for all genotypes"""
136 | if self._roughness is not None:
137 | return self._roughness
138 |
139 | elif self.roughness_width is None:
140 | return np.zeros(self.n)
141 |
142 | elif self.roughness_dist == 'normal':
143 | # Set roughness.
144 | self._roughness = np.random.normal(
145 | scale=self.roughness_width,
146 | size=self.n)
147 |
148 | return self._roughness
149 |
150 | elif self.roughness_dist == 'uniform':
151 | # Set roughness.
152 | self._roughness = np.random.uniform(
153 | high=self.roughness_width,
154 | low=-self.roughness_width,
155 | size=self.n)
156 |
157 | return self._roughness
158 |
159 | else:
160 | raise Exception("Roughness isn't set.")
161 |
162 | @property
163 | def roughness_dist(self):
164 | """Roughness distribution."""
165 | return self._roughness_dist
166 |
167 | @roughness_dist.setter
168 | def roughess_dist(self, roughness_dist):
169 | """Set the roughness distribution. Also sets the roughness array.s
170 | """
171 | # Make sure roughness dist is the right type.
172 | if not isinstance(roughness_dist, str):
173 | raise TypeError('roughness_dist must be a string.')
174 |
175 | # Get roughness distribution
176 | if roughness_dist not in ['normal', 'uniform']:
177 | raise AttributeError('roughness_dist must be '
178 | 'either normal or uniform')
179 |
180 | # Set roughness distribution and reset map
181 | self._roughness_dist = roughness_dist
182 | self._roughness = None
183 | self.build()
184 |
185 | @property
186 | def roughness_width(self):
187 | return self._roughness_width
188 |
189 | @roughness_width.setter
190 | def roughness_width(self, roughness_width):
191 | # Set roughness distribution and reset map
192 | self._roughness_width = roughness_width
193 | self._roughness = None
194 | self.build()
195 |
196 | @property
197 | def field_strength(self):
198 | return self._field_strength
199 |
200 | @field_strength.setter
201 | def field_strength(self, c):
202 | self._field_strength = c
203 | self.build()
204 |
205 | @property
206 | def scale(self):
207 | """Mt. Fuji phenotypes without noise."""
208 | return self.field_strength * self.hamming
209 |
210 | def build(self):
211 | """Construct phenotypes using a rough Mount Fuji model."""
212 | self.data.phenotypes = self.roughness + self.scale
213 |
--------------------------------------------------------------------------------
/docs/make.bat:
--------------------------------------------------------------------------------
1 | @ECHO OFF
2 |
3 | REM Command file for Sphinx documentation
4 |
5 | if "%SPHINXBUILD%" == "" (
6 | set SPHINXBUILD=sphinx-build
7 | )
8 | set BUILDDIR=_build
9 | set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% .
10 | set I18NSPHINXOPTS=%SPHINXOPTS% .
11 | if NOT "%PAPER%" == "" (
12 | set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
13 | set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%
14 | )
15 |
16 | if "%1" == "" goto help
17 |
18 | if "%1" == "help" (
19 | :help
20 | echo.Please use `make ^` where ^ is one of
21 | echo. html to make standalone HTML files
22 | echo. dirhtml to make HTML files named index.html in directories
23 | echo. singlehtml to make a single large HTML file
24 | echo. pickle to make pickle files
25 | echo. json to make JSON files
26 | echo. htmlhelp to make HTML files and a HTML help project
27 | echo. qthelp to make HTML files and a qthelp project
28 | echo. devhelp to make HTML files and a Devhelp project
29 | echo. epub to make an epub
30 | echo. epub3 to make an epub3
31 | echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter
32 | echo. text to make text files
33 | echo. man to make manual pages
34 | echo. texinfo to make Texinfo files
35 | echo. gettext to make PO message catalogs
36 | echo. changes to make an overview over all changed/added/deprecated items
37 | echo. xml to make Docutils-native XML files
38 | echo. pseudoxml to make pseudoxml-XML files for display purposes
39 | echo. linkcheck to check all external links for integrity
40 | echo. doctest to run all doctests embedded in the documentation if enabled
41 | echo. coverage to run coverage check of the documentation if enabled
42 | echo. dummy to check syntax errors of document sources
43 | goto end
44 | )
45 |
46 | if "%1" == "clean" (
47 | for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
48 | del /q /s %BUILDDIR%\*
49 | goto end
50 | )
51 |
52 |
53 | REM Check if sphinx-build is available and fallback to Python version if any
54 | %SPHINXBUILD% 1>NUL 2>NUL
55 | if errorlevel 9009 goto sphinx_python
56 | goto sphinx_ok
57 |
58 | :sphinx_python
59 |
60 | set SPHINXBUILD=python -m sphinx.__init__
61 | %SPHINXBUILD% 2> nul
62 | if errorlevel 9009 (
63 | echo.
64 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
65 | echo.installed, then set the SPHINXBUILD environment variable to point
66 | echo.to the full path of the 'sphinx-build' executable. Alternatively you
67 | echo.may add the Sphinx directory to PATH.
68 | echo.
69 | echo.If you don't have Sphinx installed, grab it from
70 | echo.http://sphinx-doc.org/
71 | exit /b 1
72 | )
73 |
74 | :sphinx_ok
75 |
76 |
77 | if "%1" == "html" (
78 | %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
79 | if errorlevel 1 exit /b 1
80 | echo.
81 | echo.Build finished. The HTML pages are in %BUILDDIR%/html.
82 | goto end
83 | )
84 |
85 | if "%1" == "dirhtml" (
86 | %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
87 | if errorlevel 1 exit /b 1
88 | echo.
89 | echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
90 | goto end
91 | )
92 |
93 | if "%1" == "singlehtml" (
94 | %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
95 | if errorlevel 1 exit /b 1
96 | echo.
97 | echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
98 | goto end
99 | )
100 |
101 | if "%1" == "pickle" (
102 | %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
103 | if errorlevel 1 exit /b 1
104 | echo.
105 | echo.Build finished; now you can process the pickle files.
106 | goto end
107 | )
108 |
109 | if "%1" == "json" (
110 | %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
111 | if errorlevel 1 exit /b 1
112 | echo.
113 | echo.Build finished; now you can process the JSON files.
114 | goto end
115 | )
116 |
117 | if "%1" == "htmlhelp" (
118 | %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
119 | if errorlevel 1 exit /b 1
120 | echo.
121 | echo.Build finished; now you can run HTML Help Workshop with the ^
122 | .hhp project file in %BUILDDIR%/htmlhelp.
123 | goto end
124 | )
125 |
126 | if "%1" == "qthelp" (
127 | %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
128 | if errorlevel 1 exit /b 1
129 | echo.
130 | echo.Build finished; now you can run "qcollectiongenerator" with the ^
131 | .qhcp project file in %BUILDDIR%/qthelp, like this:
132 | echo.^> qcollectiongenerator %BUILDDIR%\qthelp\seqspace.qhcp
133 | echo.To view the help file:
134 | echo.^> assistant -collectionFile %BUILDDIR%\qthelp\seqspace.ghc
135 | goto end
136 | )
137 |
138 | if "%1" == "devhelp" (
139 | %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
140 | if errorlevel 1 exit /b 1
141 | echo.
142 | echo.Build finished.
143 | goto end
144 | )
145 |
146 | if "%1" == "epub" (
147 | %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
148 | if errorlevel 1 exit /b 1
149 | echo.
150 | echo.Build finished. The epub file is in %BUILDDIR%/epub.
151 | goto end
152 | )
153 |
154 | if "%1" == "epub3" (
155 | %SPHINXBUILD% -b epub3 %ALLSPHINXOPTS% %BUILDDIR%/epub3
156 | if errorlevel 1 exit /b 1
157 | echo.
158 | echo.Build finished. The epub3 file is in %BUILDDIR%/epub3.
159 | goto end
160 | )
161 |
162 | if "%1" == "latex" (
163 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
164 | if errorlevel 1 exit /b 1
165 | echo.
166 | echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
167 | goto end
168 | )
169 |
170 | if "%1" == "latexpdf" (
171 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
172 | cd %BUILDDIR%/latex
173 | make all-pdf
174 | cd %~dp0
175 | echo.
176 | echo.Build finished; the PDF files are in %BUILDDIR%/latex.
177 | goto end
178 | )
179 |
180 | if "%1" == "latexpdfja" (
181 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
182 | cd %BUILDDIR%/latex
183 | make all-pdf-ja
184 | cd %~dp0
185 | echo.
186 | echo.Build finished; the PDF files are in %BUILDDIR%/latex.
187 | goto end
188 | )
189 |
190 | if "%1" == "text" (
191 | %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
192 | if errorlevel 1 exit /b 1
193 | echo.
194 | echo.Build finished. The text files are in %BUILDDIR%/text.
195 | goto end
196 | )
197 |
198 | if "%1" == "man" (
199 | %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
200 | if errorlevel 1 exit /b 1
201 | echo.
202 | echo.Build finished. The manual pages are in %BUILDDIR%/man.
203 | goto end
204 | )
205 |
206 | if "%1" == "texinfo" (
207 | %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo
208 | if errorlevel 1 exit /b 1
209 | echo.
210 | echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo.
211 | goto end
212 | )
213 |
214 | if "%1" == "gettext" (
215 | %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale
216 | if errorlevel 1 exit /b 1
217 | echo.
218 | echo.Build finished. The message catalogs are in %BUILDDIR%/locale.
219 | goto end
220 | )
221 |
222 | if "%1" == "changes" (
223 | %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
224 | if errorlevel 1 exit /b 1
225 | echo.
226 | echo.The overview file is in %BUILDDIR%/changes.
227 | goto end
228 | )
229 |
230 | if "%1" == "linkcheck" (
231 | %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
232 | if errorlevel 1 exit /b 1
233 | echo.
234 | echo.Link check complete; look for any errors in the above output ^
235 | or in %BUILDDIR%/linkcheck/output.txt.
236 | goto end
237 | )
238 |
239 | if "%1" == "doctest" (
240 | %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
241 | if errorlevel 1 exit /b 1
242 | echo.
243 | echo.Testing of doctests in the sources finished, look at the ^
244 | results in %BUILDDIR%/doctest/output.txt.
245 | goto end
246 | )
247 |
248 | if "%1" == "coverage" (
249 | %SPHINXBUILD% -b coverage %ALLSPHINXOPTS% %BUILDDIR%/coverage
250 | if errorlevel 1 exit /b 1
251 | echo.
252 | echo.Testing of coverage in the sources finished, look at the ^
253 | results in %BUILDDIR%/coverage/python.txt.
254 | goto end
255 | )
256 |
257 | if "%1" == "xml" (
258 | %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml
259 | if errorlevel 1 exit /b 1
260 | echo.
261 | echo.Build finished. The XML files are in %BUILDDIR%/xml.
262 | goto end
263 | )
264 |
265 | if "%1" == "pseudoxml" (
266 | %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml
267 | if errorlevel 1 exit /b 1
268 | echo.
269 | echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml.
270 | goto end
271 | )
272 |
273 | if "%1" == "dummy" (
274 | %SPHINXBUILD% -b dummy %ALLSPHINXOPTS% %BUILDDIR%/dummy
275 | if errorlevel 1 exit /b 1
276 | echo.
277 | echo.Build finished. Dummy builder generates no files.
278 | goto end
279 | )
280 |
281 | :end
282 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line.
5 | SPHINXOPTS =
6 | SPHINXBUILD = sphinx-build
7 | PAPER =
8 | BUILDDIR = _build
9 |
10 | # User-friendly check for sphinx-build
11 | ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
12 | $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don\'t have Sphinx installed, grab it from http://sphinx-doc.org/)
13 | endif
14 |
15 | # Internal variables.
16 | PAPEROPT_a4 = -D latex_paper_size=a4
17 | PAPEROPT_letter = -D latex_paper_size=letter
18 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
19 | # the i18n builder cannot share the environment and doctrees with the others
20 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
21 |
22 | .PHONY: help
23 | help:
24 | @echo "Please use \`make ' where is one of"
25 | @echo " html to make standalone HTML files"
26 | @echo " dirhtml to make HTML files named index.html in directories"
27 | @echo " singlehtml to make a single large HTML file"
28 | @echo " pickle to make pickle files"
29 | @echo " json to make JSON files"
30 | @echo " htmlhelp to make HTML files and a HTML help project"
31 | @echo " qthelp to make HTML files and a qthelp project"
32 | @echo " applehelp to make an Apple Help Book"
33 | @echo " devhelp to make HTML files and a Devhelp project"
34 | @echo " epub to make an epub"
35 | @echo " epub3 to make an epub3"
36 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
37 | @echo " latexpdf to make LaTeX files and run them through pdflatex"
38 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
39 | @echo " text to make text files"
40 | @echo " man to make manual pages"
41 | @echo " texinfo to make Texinfo files"
42 | @echo " info to make Texinfo files and run them through makeinfo"
43 | @echo " gettext to make PO message catalogs"
44 | @echo " changes to make an overview of all changed/added/deprecated items"
45 | @echo " xml to make Docutils-native XML files"
46 | @echo " pseudoxml to make pseudoxml-XML files for display purposes"
47 | @echo " linkcheck to check all external links for integrity"
48 | @echo " doctest to run all doctests embedded in the documentation (if enabled)"
49 | @echo " coverage to run coverage check of the documentation (if enabled)"
50 | @echo " dummy to check syntax errors of document sources"
51 |
52 | .PHONY: clean
53 | clean:
54 | rm -rf $(BUILDDIR)/*
55 |
56 | .PHONY: html
57 | html:
58 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
59 | @echo
60 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
61 |
62 | .PHONY: dirhtml
63 | dirhtml:
64 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
65 | @echo
66 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
67 |
68 | .PHONY: singlehtml
69 | singlehtml:
70 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
71 | @echo
72 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
73 |
74 | .PHONY: pickle
75 | pickle:
76 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
77 | @echo
78 | @echo "Build finished; now you can process the pickle files."
79 |
80 | .PHONY: json
81 | json:
82 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
83 | @echo
84 | @echo "Build finished; now you can process the JSON files."
85 |
86 | .PHONY: htmlhelp
87 | htmlhelp:
88 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
89 | @echo
90 | @echo "Build finished; now you can run HTML Help Workshop with the" \
91 | ".hhp project file in $(BUILDDIR)/htmlhelp."
92 |
93 | .PHONY: qthelp
94 | qthelp:
95 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
96 | @echo
97 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \
98 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
99 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/seqspace.qhcp"
100 | @echo "To view the help file:"
101 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/seqspace.qhc"
102 |
103 | .PHONY: applehelp
104 | applehelp:
105 | $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp
106 | @echo
107 | @echo "Build finished. The help book is in $(BUILDDIR)/applehelp."
108 | @echo "N.B. You won't be able to view it unless you put it in" \
109 | "~/Library/Documentation/Help or install it in your application" \
110 | "bundle."
111 |
112 | .PHONY: devhelp
113 | devhelp:
114 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
115 | @echo
116 | @echo "Build finished."
117 | @echo "To view the help file:"
118 | @echo "# mkdir -p $$HOME/.local/share/devhelp/seqspace"
119 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/seqspace"
120 | @echo "# devhelp"
121 |
122 | .PHONY: epub
123 | epub:
124 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
125 | @echo
126 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub."
127 |
128 | .PHONY: epub3
129 | epub3:
130 | $(SPHINXBUILD) -b epub3 $(ALLSPHINXOPTS) $(BUILDDIR)/epub3
131 | @echo
132 | @echo "Build finished. The epub3 file is in $(BUILDDIR)/epub3."
133 |
134 | .PHONY: latex
135 | latex:
136 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
137 | @echo
138 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
139 | @echo "Run \`make' in that directory to run these through (pdf)latex" \
140 | "(use \`make latexpdf' here to do that automatically)."
141 |
142 | .PHONY: latexpdf
143 | latexpdf:
144 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
145 | @echo "Running LaTeX files through pdflatex..."
146 | $(MAKE) -C $(BUILDDIR)/latex all-pdf
147 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
148 |
149 | .PHONY: latexpdfja
150 | latexpdfja:
151 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
152 | @echo "Running LaTeX files through platex and dvipdfmx..."
153 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
154 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
155 |
156 | .PHONY: text
157 | text:
158 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
159 | @echo
160 | @echo "Build finished. The text files are in $(BUILDDIR)/text."
161 |
162 | .PHONY: man
163 | man:
164 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
165 | @echo
166 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man."
167 |
168 | .PHONY: texinfo
169 | texinfo:
170 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
171 | @echo
172 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
173 | @echo "Run \`make' in that directory to run these through makeinfo" \
174 | "(use \`make info' here to do that automatically)."
175 |
176 | .PHONY: info
177 | info:
178 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
179 | @echo "Running Texinfo files through makeinfo..."
180 | make -C $(BUILDDIR)/texinfo info
181 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
182 |
183 | .PHONY: gettext
184 | gettext:
185 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
186 | @echo
187 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
188 |
189 | .PHONY: changes
190 | changes:
191 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
192 | @echo
193 | @echo "The overview file is in $(BUILDDIR)/changes."
194 |
195 | .PHONY: linkcheck
196 | linkcheck:
197 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
198 | @echo
199 | @echo "Link check complete; look for any errors in the above output " \
200 | "or in $(BUILDDIR)/linkcheck/output.txt."
201 |
202 | .PHONY: doctest
203 | doctest:
204 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
205 | @echo "Testing of doctests in the sources finished, look at the " \
206 | "results in $(BUILDDIR)/doctest/output.txt."
207 |
208 | .PHONY: coverage
209 | coverage:
210 | $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage
211 | @echo "Testing of coverage in the sources finished, look at the " \
212 | "results in $(BUILDDIR)/coverage/python.txt."
213 |
214 | .PHONY: xml
215 | xml:
216 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
217 | @echo
218 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml."
219 |
220 | .PHONY: pseudoxml
221 | pseudoxml:
222 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
223 | @echo
224 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
225 |
226 | .PHONY: dummy
227 | dummy:
228 | $(SPHINXBUILD) -b dummy $(ALLSPHINXOPTS) $(BUILDDIR)/dummy
229 | @echo
230 | @echo "Build finished. Dummy builder generates no files."
231 |
--------------------------------------------------------------------------------
/docs/_pages/io.rst:
--------------------------------------------------------------------------------
1 | Reading/Writing
2 | ===========
3 |
4 | The ``GenotypePhenotypeMap`` object is a Pandas DataFrame at its core. Most
5 | tabular formats (i.e. Excel files, csv, tsv, ...) can be read/written.
6 |
7 | Excel Spreadsheets
8 | ------------------
9 |
10 | Excel files are supported through the ``read_excel`` method. This method requires
11 | `genotypes` and `phenotypes` columns, and can include `n_replicates` and
12 | `stdeviations` as optional columns. All other columns are ignored.
13 |
14 |
15 | **Example**: Excel spreadsheet file ("data.xlsx")
16 |
17 | .. raw:: html
18 |
19 |
20 |
21 |
22 | |
23 | genotypes |
24 | phenotypes |
25 | stdeviations |
26 | n_replicates |
27 |
28 |
29 |
30 |
31 | | 0 |
32 | PTEE |
33 | 0.243937 |
34 | 0.013269 |
35 | 1 |
36 |
37 |
38 | | 1 |
39 | PTEY |
40 | 0.657831 |
41 | 0.055803 |
42 | 1 |
43 |
44 |
45 | | 2 |
46 | PTFE |
47 | 0.104741 |
48 | 0.013471 |
49 | 1 |
50 |
51 |
52 | | 3 |
53 | PTFY |
54 | 0.683304 |
55 | 0.081887 |
56 | 1 |
57 |
58 |
59 | | 4 |
60 | PIEE |
61 | 0.774680 |
62 | 0.069631 |
63 | 1 |
64 |
65 |
66 | | 5 |
67 | PIEY |
68 | 0.975995 |
69 | 0.059985 |
70 | 1 |
71 |
72 |
73 | | 6 |
74 | PIFE |
75 | 0.500215 |
76 | 0.098893 |
77 | 1 |
78 |
79 |
80 | | 7 |
81 | PIFY |
82 | 0.501697 |
83 | 0.025082 |
84 | 1 |
85 |
86 |
87 | | 8 |
88 | RTEE |
89 | 0.233230 |
90 | 0.052265 |
91 | 1 |
92 |
93 |
94 | | 9 |
95 | RTEY |
96 | 0.057961 |
97 | 0.036845 |
98 | 1 |
99 |
100 |
101 | | 10 |
102 | RTFE |
103 | 0.365238 |
104 | 0.050948 |
105 | 1 |
106 |
107 |
108 | | 11 |
109 | RTFY |
110 | 0.891505 |
111 | 0.033239 |
112 | 1 |
113 |
114 |
115 | | 12 |
116 | RIEE |
117 | 0.156193 |
118 | 0.085638 |
119 | 1 |
120 |
121 |
122 | | 13 |
123 | RIEY |
124 | 0.837269 |
125 | 0.070373 |
126 | 1 |
127 |
128 |
129 | | 14 |
130 | RIFE |
131 | 0.599639 |
132 | 0.050125 |
133 | 1 |
134 |
135 |
136 | | 15 |
137 | RIFY |
138 | 0.277137 |
139 | 0.072571 |
140 | 1 |
141 |
142 |
143 |
144 |
145 |
146 | Read the spreadsheet directly into the GenotypePhenotypeMap.
147 |
148 | .. code-block:: python
149 |
150 |
151 | from gpmap import GenotypePhenotypeMap
152 |
153 | gpm = GenotypePhenotypeMap.read_excel(wildtype="PTEE", filename="data.xlsx")
154 |
155 |
156 | CSV File
157 | --------
158 |
159 | CSV files are supported through the ``read_excel`` method. This method requires
160 | `genotypes` and `phenotypes` columns, and can include `n_replicates` and
161 | `stdeviations` as optional columns. All other columns are ignored.
162 |
163 | **Example**: CSV File
164 |
165 | .. raw:: html
166 |
167 |
168 |
169 |
170 | |
171 | genotypes |
172 | phenotypes |
173 | stdeviations |
174 | n_replicates |
175 |
176 |
177 |
178 |
179 | | 0 |
180 | PTEE |
181 | 0.243937 |
182 | 0.013269 |
183 | 1 |
184 |
185 |
186 | | 1 |
187 | PTEY |
188 | 0.657831 |
189 | 0.055803 |
190 | 1 |
191 |
192 |
193 | | 2 |
194 | PTFE |
195 | 0.104741 |
196 | 0.013471 |
197 | 1 |
198 |
199 |
200 | | 3 |
201 | PTFY |
202 | 0.683304 |
203 | 0.081887 |
204 | 1 |
205 |
206 |
207 | | 4 |
208 | PIEE |
209 | 0.774680 |
210 | 0.069631 |
211 | 1 |
212 |
213 |
214 | | 5 |
215 | PIEY |
216 | 0.975995 |
217 | 0.059985 |
218 | 1 |
219 |
220 |
221 | | 6 |
222 | PIFE |
223 | 0.500215 |
224 | 0.098893 |
225 | 1 |
226 |
227 |
228 | | 7 |
229 | PIFY |
230 | 0.501697 |
231 | 0.025082 |
232 | 1 |
233 |
234 |
235 | | 8 |
236 | RTEE |
237 | 0.233230 |
238 | 0.052265 |
239 | 1 |
240 |
241 |
242 | | 9 |
243 | RTEY |
244 | 0.057961 |
245 | 0.036845 |
246 | 1 |
247 |
248 |
249 | | 10 |
250 | RTFE |
251 | 0.365238 |
252 | 0.050948 |
253 | 1 |
254 |
255 |
256 | | 11 |
257 | RTFY |
258 | 0.891505 |
259 | 0.033239 |
260 | 1 |
261 |
262 |
263 | | 12 |
264 | RIEE |
265 | 0.156193 |
266 | 0.085638 |
267 | 1 |
268 |
269 |
270 | | 13 |
271 | RIEY |
272 | 0.837269 |
273 | 0.070373 |
274 | 1 |
275 |
276 |
277 | | 14 |
278 | RIFE |
279 | 0.599639 |
280 | 0.050125 |
281 | 1 |
282 |
283 |
284 | | 15 |
285 | RIFY |
286 | 0.277137 |
287 | 0.072571 |
288 | 1 |
289 |
290 |
291 |
292 |
293 |
294 |
295 | Read the csv directly into the GenotypePhenotypeMap.
296 |
297 | .. code-block:: python
298 |
299 | from gpmap import GenotypePhenotypeMap
300 |
301 | gpm = GenotypePhenotypeMap.read_csv(wildtype="PTEE", filename="data.csv")
302 |
303 |
304 |
305 | JSON Format
306 | -----------
307 |
308 | The only keys recognized by the json reader are:
309 |
310 | 1. `genotypes`
311 | 2. `phenotypes`
312 | 3. `stdeviations`
313 | 4. `mutations`
314 | 5. `n_replicates`
315 |
316 | All other keys are ignored in the epistasis models. You can keep other metadata
317 | stored in the JSON, but it won't be appended to the epistasis model object.
318 |
319 | .. code-block:: javascript
320 |
321 | {
322 | "genotypes" : [
323 | '000',
324 | '001',
325 | '010',
326 | '011',
327 | '100',
328 | '101',
329 | '110',
330 | '111'
331 | ],
332 | "phenotypes" : [
333 | 0.62344582,
334 | 0.87943151,
335 | -0.11075798,
336 | -0.59754471,
337 | 1.4314798,
338 | 1.12551439,
339 | 1.04859722,
340 | -0.27145593
341 | ],
342 | "stdeviations" : [
343 | 0.01,
344 | 0.01,
345 | 0.01,
346 | 0.01,
347 | 0.01,
348 | 0.01,
349 | 0.01,
350 | 0.01,
351 | ],
352 | "mutations" : {
353 | 0 : ["0", "1"],
354 | 1 : ["0", "1"],
355 | 2 : ["0", "1"],
356 | }
357 | "n_replicates" : 12,
358 | "title" : "my data",
359 | "description" : "a really hard experiment"
360 | }
361 |
--------------------------------------------------------------------------------
/docs/conf.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding: utf-8 -*-
3 | #
4 | # gpmap documentation build configuration file, created by
5 | # sphinx-quickstart on Fri Jul 8 10:41:24 2016.
6 | #
7 | # This file is execfile()d with the current directory set to its
8 | # containing dir.
9 | #
10 | # Note that not all possible configuration values are present in this
11 | # autogenerated file.
12 | #
13 | # All configuration values have a default; values that are commented out
14 | # serve to show the default.
15 |
16 | import sys
17 | import os
18 |
19 | # importing modules with weird dependencies
20 | try:
21 | from mock import Mock as MagicMock
22 | except ImportError:
23 | from unittest.mock import MagicMock
24 |
25 | class Mock(MagicMock):
26 | @classmethod
27 | def __getattr__(cls, name):
28 | return Mock()
29 |
30 | MOCK_MODULES = []
31 | sys.modules.update((mod_name, Mock()) for mod_name in MOCK_MODULES)
32 |
33 | # If extensions (or modules to document with autodoc) are in another directory,
34 | # add these directories to sys.path here. If the directory is relative to the
35 | # documentation root, use os.path.abspath to make it absolute, like shown here.
36 | #sys.path.insert(0, os.path.abspath('.'))
37 |
38 | # -- General configuration ------------------------------------------------
39 |
40 | # If your documentation needs a minimal Sphinx version, state it here.
41 | #needs_sphinx = '1.0'
42 |
43 | # Add any Sphinx extension module names here, as strings. They can be
44 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
45 | # ones.
46 | extensions = [
47 | 'sphinx.ext.autodoc',
48 | 'sphinx.ext.mathjax',
49 | 'sphinx.ext.napoleon'
50 | ]
51 |
52 | # Add any paths that contain templates here, relative to this directory.
53 | templates_path = ['_templates']
54 |
55 | # The suffix(es) of source filenames.
56 | # You can specify multiple suffix as a list of string:
57 | # source_suffix = ['.rst', '.md']
58 | source_suffix = '.rst'
59 |
60 | # The encoding of source files.
61 | #source_encoding = 'utf-8-sig'
62 |
63 | # The master toctree document.
64 | master_doc = 'index'
65 |
66 | # General information about the project.
67 | project = 'gpmap'
68 | copyright = '2016, Zach Sailer'
69 | author = 'Zach Sailer'
70 |
71 | # The version info for the project you're documenting, acts as replacement for
72 | # |version| and |release|, also used in various other places throughout the
73 | # built documents.
74 | #
75 | # The short X.Y version.
76 | version = '0.1'
77 | # The full version, including alpha/beta/rc tags.
78 | release = '0.1'
79 |
80 | # The language for content autogenerated by Sphinx. Refer to documentation
81 | # for a list of supported languages.
82 | #
83 | # This is also used if you do content translation via gettext catalogs.
84 | # Usually you set "language" from the command line for these cases.
85 | language = None
86 |
87 | # There are two options for replacing |today|: either, you set today to some
88 | # non-false value, then it is used:
89 | #today = ''
90 | # Else, today_fmt is used as the format for a strftime call.
91 | #today_fmt = '%B %d, %Y'
92 |
93 | # List of patterns, relative to source directory, that match files and
94 | # directories to ignore when looking for source files.
95 | # This patterns also effect to html_static_path and html_extra_path
96 | exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
97 |
98 | # The reST default role (used for this markup: `text`) to use for all
99 | # documents.
100 | #default_role = None
101 |
102 | # If true, '()' will be appended to :func: etc. cross-reference text.
103 | #add_function_parentheses = True
104 |
105 | # If true, the current module name will be prepended to all description
106 | # unit titles (such as .. function::).
107 | #add_module_names = True
108 |
109 | # If true, sectionauthor and moduleauthor directives will be shown in the
110 | # output. They are ignored by default.
111 | #show_authors = False
112 |
113 | # The name of the Pygments (syntax highlighting) style to use.
114 | pygments_style = 'default'
115 |
116 | # A list of ignored prefixes for module index sorting.
117 | #modindex_common_prefix = []
118 |
119 | # If true, keep warnings as "system message" paragraphs in the built documents.
120 | #keep_warnings = False
121 |
122 | # If true, `todo` and `todoList` produce output, else they produce nothing.
123 | todo_include_todos = False
124 |
125 |
126 | # -- Options for HTML output ----------------------------------------------
127 |
128 | # The theme to use for HTML and HTML Help pages. See the documentation for
129 | # a list of builtin themes.
130 | html_theme = 'alabaster'
131 |
132 | # Theme options are theme-specific and customize the look and feel of a theme
133 | # further. For a list of options available for each theme, see the
134 | # documentation.
135 | #html_theme_options = {}
136 |
137 | # Add any paths that contain custom themes here, relative to this directory.
138 | #html_theme_path = []
139 |
140 | # The name for this set of Sphinx documents.
141 | # " v documentation" by default.
142 | #html_title = 'gpmap v0.1'
143 |
144 | # A shorter title for the navigation bar. Default is the same as html_title.
145 | #html_short_title = None
146 |
147 | # The name of an image file (relative to this directory) to place at the top
148 | # of the sidebar.
149 | #html_logo = None
150 |
151 | # The name of an image file (relative to this directory) to use as a favicon of
152 | # the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
153 | # pixels large.
154 | #html_favicon = None
155 |
156 | # Add any paths that contain custom static files (such as style sheets) here,
157 | # relative to this directory. They are copied after the builtin static files,
158 | # so a file named "default.css" will overwrite the builtin "default.css".
159 | html_static_path = ['_static']
160 |
161 | # Add any extra paths that contain custom files (such as robots.txt or
162 | # .htaccess) here, relative to this directory. These files are copied
163 | # directly to the root of the documentation.
164 | #html_extra_path = []
165 |
166 | # If not None, a 'Last updated on:' timestamp is inserted at every page
167 | # bottom, using the given strftime format.
168 | # The empty string is equivalent to '%b %d, %Y'.
169 | #html_last_updated_fmt = None
170 |
171 | # If true, SmartyPants will be used to convert quotes and dashes to
172 | # typographically correct entities.
173 | #html_use_smartypants = True
174 |
175 | # Custom sidebar templates, maps document names to template names.
176 | #html_sidebars = {}
177 |
178 | # Additional templates that should be rendered to pages, maps page names to
179 | # template names.
180 | #html_additional_pages = {}
181 |
182 | # If false, no module index is generated.
183 | #html_domain_indices = True
184 |
185 | # If false, no index is generated.
186 | #html_use_index = True
187 |
188 | # If true, the index is split into individual pages for each letter.
189 | #html_split_index = False
190 |
191 | # If true, links to the reST sources are added to the pages.
192 | #html_show_sourcelink = True
193 |
194 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
195 | #html_show_sphinx = True
196 |
197 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
198 | #html_show_copyright = True
199 |
200 | # If true, an OpenSearch description file will be output, and all pages will
201 | # contain a tag referring to it. The value of this option must be the
202 | # base URL from which the finished HTML is served.
203 | #html_use_opensearch = ''
204 |
205 | # This is the file name suffix for HTML files (e.g. ".xhtml").
206 | #html_file_suffix = None
207 |
208 | # Language to be used for generating the HTML full-text search index.
209 | # Sphinx supports the following languages:
210 | # 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja'
211 | # 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr', 'zh'
212 | #html_search_language = 'en'
213 |
214 | # A dictionary with options for the search language support, empty by default.
215 | # 'ja' uses this config value.
216 | # 'zh' user can custom change `jieba` dictionary path.
217 | #html_search_options = {'type': 'default'}
218 |
219 | # The name of a javascript file (relative to the configuration directory) that
220 | # implements a search results scorer. If empty, the default will be used.
221 | #html_search_scorer = 'scorer.js'
222 |
223 | # Output file base name for HTML help builder.
224 | htmlhelp_basename = 'gpmapdoc'
225 |
226 | # -- Options for LaTeX output ---------------------------------------------
227 |
228 | latex_elements = {
229 | # The paper size ('letterpaper' or 'a4paper').
230 | #'papersize': 'letterpaper',
231 |
232 | # The font size ('10pt', '11pt' or '12pt').
233 | #'pointsize': '10pt',
234 |
235 | # Additional stuff for the LaTeX preamble.
236 | #'preamble': '',
237 |
238 | # Latex figure (float) alignment
239 | #'figure_align': 'htbp',
240 | }
241 |
242 | # Grouping the document tree into LaTeX files. List of tuples
243 | # (source start file, target name, title,
244 | # author, documentclass [howto, manual, or own class]).
245 | latex_documents = [
246 | (master_doc, 'gpmap.tex', 'gpmap Documentation',
247 | 'Zach Sailer', 'manual'),
248 | ]
249 |
250 | # The name of an image file (relative to this directory) to place at the top of
251 | # the title page.
252 | #latex_logo = None
253 |
254 | # For "manual" documents, if this is true, then toplevel headings are parts,
255 | # not chapters.
256 | #latex_use_parts = False
257 |
258 | # If true, show page references after internal links.
259 | #latex_show_pagerefs = False
260 |
261 | # If true, show URL addresses after external links.
262 | #latex_show_urls = False
263 |
264 | # Documents to append as an appendix to all manuals.
265 | #latex_appendices = []
266 |
267 | # If false, no module index is generated.
268 | #latex_domain_indices = True
269 |
270 |
271 | # -- Options for manual page output ---------------------------------------
272 |
273 | # One entry per manual page. List of tuples
274 | # (source start file, name, description, authors, manual section).
275 | man_pages = [
276 | (master_doc, 'gpmap', 'gpmap Documentation',
277 | [author], 1)
278 | ]
279 |
280 | # If true, show URL addresses after external links.
281 | #man_show_urls = False
282 |
283 |
284 | # -- Options for Texinfo output -------------------------------------------
285 |
286 | # Grouping the document tree into Texinfo files. List of tuples
287 | # (source start file, target name, title, author,
288 | # dir menu entry, description, category)
289 | texinfo_documents = [
290 | (master_doc, 'gpmap', 'gpmap Documentation',
291 | author, 'gpmap', 'One line description of project.',
292 | 'Miscellaneous'),
293 | ]
294 |
295 | # Documents to append as an appendix to all manuals.
296 | #texinfo_appendices = []
297 |
298 | # If false, no module index is generated.
299 | #texinfo_domain_indices = True
300 |
301 | # How to display URL addresses: 'footnote', 'no', or 'inline'.
302 | #texinfo_show_urls = 'footnote'
303 |
304 | # If true, do not generate a @detailmenu in the "Top" node's menu.
305 | #texinfo_no_detailmenu = False
306 |
307 | # Napoleon settings
308 | napoleon_google_docstring = False
309 | napoleon_numpy_docstring = True
310 | napoleon_include_private_with_doc = False
311 | napoleon_include_special_with_doc = True
312 | napoleon_use_admonition_for_examples = True
313 | napoleon_use_admonition_for_notes = False
314 | napoleon_use_admonition_for_references = False
315 | napoleon_use_ivar = False
316 | napoleon_use_param = True
317 | napoleon_use_rtype = True
318 |
--------------------------------------------------------------------------------
/gpmap/simulate/multipeak_fuji.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import random
3 | from gpmap.gpm import GenotypePhenotypeMap
4 | from gpmap import utils
5 | from .base import random_mutation_set, BaseSimulation
6 |
7 |
8 |
9 | class MultiPeakMountFujiSimulation(BaseSimulation):
10 | """Constructs a genotype-phenotype map from a Mount Fuji model. [1]_
11 |
12 | A Mount Fuji sets a "global" fitness peak (max) on a single genotype in
13 | the space. The fitness goes down as a function of hamming distance away
14 | from this genotype, called a "fitness field". The strength (or scale) of
15 | this field is linear and depends on the parameters `field_strength`.
16 | Roughness can be added to the Mount Fuji model using a random
17 | `roughness` parameter. This adds a random value (positive or negative)
18 | to each of the fitnesses. The multi-peak Mount Fuji model defines
19 | multiple fitness peaks across the map. Each of the peaks defines a
20 | fitness for every node as described above, but in the end, all but the
21 | highest fitness value will be discarded, i.e. the fitness of each node
22 | is defined by the nearest peak.
23 |
24 |
25 | .. math::
26 |
27 | f(g) = \\nu (g) + c \cdot d(g_0, g)
28 |
29 | where $\\nu$ is the roughness parameter, $c$ is the field strength,
30 | and $d$ is the hamming distance between genotype $g$ and the
31 | reference genotype.
32 |
33 | Parameters
34 | ----------
35 | wildtype : str
36 | reference genotype to put the
37 |
38 | mutations : dict
39 | mutations alphabet for each site
40 |
41 | field_strength : float
42 | field strength
43 |
44 | roughness_width : float
45 | Width of roughness distribution
46 |
47 | roughness_dist : str, 'normal'
48 | Distribution used to create noise around phenotypes.
49 |
50 | peak_n : int.
51 | The desired number of peaks in map. (Is only guaranteed if roughness_width is 0, else ruggedness might lead to
52 | additional local fitness peaks.)
53 |
54 | min_dist : int, None (default = 2).
55 | Minimal distance between any two peaks.
56 |
57 | max_dist : int, None (default = None).
58 | Maximal distance between any two peaks. If None, maximal distance is going to be the largest possible distance,
59 | i.e. 5 in a 5-site map.
60 |
61 | a_state : int, None (default = None).
62 | State that is guaranteed to become a peak. If None, a_state is going to be the wildtype genotype.
63 | This is done to have to well separated peaks that span the whole map. If unwanted, set any node as a_state
64 | and/or b_state.
65 |
66 | b_state : int, None (default = None).
67 | State that is guaranteed to become a peak. If None, b_state is going to be the farthest genotype from wildtype.
68 | This is done to have to well separated peaks that span the whole map. If unwanted, set any node as a_state
69 | and/or b_state.
70 |
71 | peaks : list, None (default = None).
72 | A list of nodes that will be defined as peaks. If None, peaks will be defined randomly using the other arguments
73 | as criteria.
74 |
75 | References
76 | ----------
77 |
78 | _ [1] Szendro, Ivan G., et al. "Quantitative analyses of empirical fitness
79 | landscapes." Journal of Statistical Mechanics: Theory and Experiment
80 | 2013.01 (2013): P01005.
81 | """
82 |
83 | def __init__(
84 | self,
85 | wildtype,
86 | mutations,
87 | field_strength=1,
88 | roughness_width=None,
89 | roughness_dist='normal',
90 | peak_n=2,
91 | min_dist=1,
92 | max_dist=None,
93 | a_state=None,
94 | b_state=None,
95 | peaks=None,
96 | *args,
97 | **kwargs):
98 | # Call parent class.
99 | super(MultiPeakMountFujiSimulation, self).__init__(wildtype, mutations,
100 | *args, **kwargs)
101 | # Set the field strength and roughness
102 | self._field_strength = field_strength
103 | self._roughness_width = roughness_width
104 | self._roughness_dist = roughness_dist
105 | self._roughness = None
106 | self._peak_n = peak_n
107 | self._peaks = peaks
108 | self._min_dist = min_dist
109 | self._max_dist = max_dist
110 | self._a_state = a_state
111 | self._b_state = b_state
112 | self.build()
113 |
114 | @classmethod
115 | def from_length(
116 | cls,
117 | length,
118 | field_strength=1,
119 | roughness_width=None,
120 | roughness_dist='normal',
121 | *args,
122 | **kwargs):
123 | """Constructs a genotype-phenotype map from a Mount Fuji model. [1]_
124 |
125 | A Mount Fuji sets a "global" fitness peak (max) on a single genotype in
126 | the space. The fitness goes down as a function of hamming distance away
127 | from this genotype, called a "fitness field". The strength (or scale) of
128 | this field is linear and depends on the parameters `field_strength`.
129 | Roughness can be added to the Mount Fuji model using a random
130 | `roughness` parameter. This assigns a random
131 |
132 | .. math::
133 |
134 | f(g) = \\nu (g) + c \cdot d(g_0, g)
135 |
136 | where $\\nu$ is the roughness parameter, $c$ is the field strength,
137 | and $d$ is the hamming distance between genotype $g$ and the
138 | reference genotype.
139 |
140 | Parameters
141 | ----------
142 | length : int
143 | length of the genotypes.
144 |
145 | field_strength : float
146 | field strength
147 |
148 | roughness_width : float
149 | Width of roughness distribution
150 |
151 | roughness_dist : str, 'normal'
152 | Distribution used to create noise around phenotypes.
153 | """
154 | cls = super(MultiPeakMountFujiSimulation, cls).from_length(
155 | length,
156 | field_strength=field_strength,
157 | roughness_width=roughness_width,
158 | roughness_dist=roughness_dist,
159 | *args,
160 | **kwargs
161 | )
162 | return cls
163 |
164 | @property
165 | def a_state(self):
166 | """Wild type state"""
167 | if self._a_state is not None:
168 | return self._a_state
169 | elif self._a_state is None:
170 | self._a_state = self.wildtype
171 | return self._a_state
172 |
173 | @property
174 | def b_state(self):
175 | """Derived state"""
176 | if self._b_state is not None:
177 | return self._b_state
178 | elif self._b_state is None:
179 | self._b_state = utils.farthest_genotype(self.a_state, self.genotypes)
180 | return self._b_state
181 |
182 | @property
183 | def peaks(self):
184 | if self._peaks:
185 | return self._peaks
186 | else:
187 | """Find n peaks that meet the max_dist/min_dist requirement"""
188 | self._peaks = [self.b_state, self.a_state]
189 | while len(self._peaks) < self.peak_n:
190 | proposed = random.choice(self.genotypes) # Propose a new peak.
191 | add = False
192 | for peak in self._peaks:
193 | dist = utils.hamming_distance(peak, proposed)
194 | if dist >= self.min_dist and dist <= self.max_dist: # Check dist. requirements
195 | add = True
196 | else:
197 | add = False
198 | break
199 | if add:
200 | self._peaks.append(proposed)
201 | return self._peaks
202 |
203 | @property
204 | def hamming(self):
205 | """Hamming distances from each peak"""
206 | try:
207 | return self._hamming
208 | # calculate the hamming distance if not done already
209 | except AttributeError:
210 | hd = np.empty([len(self.peaks), len(self.genotypes)], dtype=int)
211 | for i, peak in enumerate(self.peaks):
212 | for j, g in enumerate(self.genotypes):
213 | hd[i][j] = utils.hamming_distance(peak, g)
214 | self._hamming = hd
215 | return self._hamming
216 |
217 | @property
218 | def peak_n(self):
219 | """Number of peaks"""
220 | return self._peak_n
221 |
222 | @property
223 | def max_dist(self):
224 | """Maximum hamming distance between two peaks"""
225 | if self._max_dist is not None:
226 | return self._max_dist
227 | # If maximum distance between peaks is not given, set to the maximum hamming distance.
228 | elif self._max_dist is None:
229 | self._max_dist = len(list(self.genotypes[0]))
230 | return self._max_dist
231 |
232 | @property
233 | def min_dist(self):
234 | """Minimum hamming distance between two peaks"""
235 | return self._min_dist
236 |
237 | @property
238 | def roughness(self):
239 | """Array of roughness values for all genotypes"""
240 | if self._roughness is not None:
241 | return self._roughness
242 |
243 | elif self.roughness_width is None:
244 | return np.zeros(self.n)
245 |
246 | elif self.roughness_dist == 'normal':
247 | # Set roughness.
248 | self._roughness = np.random.normal(
249 | scale=self.roughness_width,
250 | size=self.n)
251 |
252 | return self._roughness
253 |
254 | elif self.roughness_dist == 'uniform':
255 | # Set roughness.
256 | self._roughness = np.random.uniform(
257 | high=self.roughness_width,
258 | low=-self.roughness_width,
259 | size=self.n)
260 |
261 | return self._roughness
262 |
263 | else:
264 | raise Exception("Roughness isn't set.")
265 |
266 | @property
267 | def roughness_dist(self):
268 | """Roughness distribution."""
269 | return self._roughness_dist
270 |
271 | @roughness_dist.setter
272 | def roughess_dist(self, roughness_dist):
273 | """Set the roughness distribution. Also sets the roughness array.s
274 | """
275 | # Make sure roughness dist is the right type.
276 | if not isinstance(roughness_dist, str):
277 | raise TypeError('roughness_dist must be a string.')
278 |
279 | # Get roughness distribution
280 | if roughness_dist not in ['normal', 'uniform']:
281 | raise AttributeError('roughness_dist must be '
282 | 'either normal or uniform')
283 |
284 | # Set roughness distribution and reset map
285 | self._roughness_dist = roughness_dist
286 | self._roughness = None
287 | self.build()
288 |
289 | @property
290 | def roughness_width(self):
291 | return self._roughness_width
292 |
293 | @roughness_width.setter
294 | def roughness_width(self, roughness_width):
295 | # Set roughness distribution and reset map
296 | self._roughness_width = roughness_width
297 | self._roughness = None
298 | self.build()
299 |
300 | @property
301 | def field_strength(self):
302 | return self._field_strength
303 |
304 | @field_strength.setter
305 | def field_strength(self, c):
306 | self._field_strength = c
307 | self.build()
308 |
309 | @property
310 | def scale(self):
311 | """Multipeak Mt. Fuji phenotypes without noise."""
312 | hd = np.empty([len(self.peaks), len(self.genotypes)])
313 | for i, peak in enumerate(self.peaks):
314 | newrow = self.hamming[i] * self.field_strength
315 | hd[i] = newrow
316 |
317 | min_hd = hd.min(0) # Column-wise minimum value of array.
318 | max_min = np.amax(min_hd) # Get the maximum value of the array for normalization.
319 | self._scale = 1 - (min_hd / max_min) # Subtract from one -> Larger hamming dist. from peak = lower phenotype.
320 | return self._scale
321 |
322 | def build(self):
323 | """Construct phenotypes using a rough Mount Fuji model."""
324 | self.data.phenotypes = self.roughness + self.scale
325 |
--------------------------------------------------------------------------------
/gpmap/utils.py:
--------------------------------------------------------------------------------
1 | __doc__ = """Utility functions for managing genotype-phenotype map data
2 | and conversions.
3 |
4 | Glossary:
5 | --------
6 | mutations : doct
7 | keys are site numbers in the genotypes. Values are alphabet of mutations at
8 | that sites
9 |
10 | encoding : dict
11 | keys are site numbers in genotype. Values are dictionaries mapping each
12 | mutation to its binary representation.
13 |
14 |
15 |
16 | """
17 |
18 | # -------------------------------------------------------
19 | # Miscellaneous Python functions for random task
20 | # -------------------------------------------------------
21 |
22 | import itertools as it
23 | import numpy as np
24 | from scipy.special import comb
25 | from collections import OrderedDict
26 | import warnings
27 | import pandas as pd
28 |
29 | # -------------------------------------------------------
30 | # Mutation alphabets
31 | # -------------------------------------------------------
32 |
33 | DNA = ["A", "C", "G", "T"]
34 |
35 | AMINO_ACIDS = ["D", "T", "S", "E", "P", "G", "A", "C", "V", "M", "I",
36 | "L", "Y", "F", "H", "K", "R", "W", "Q", "N"]
37 |
38 |
39 | # -------------------------------------------------------
40 | # Wrappers for methods that use optional imports
41 | # -------------------------------------------------------
42 |
43 |
44 | def ipywidgets_missing(function):
45 | """Wrapper checks that ipython widgets are install before trying to
46 | render them.
47 | """
48 |
49 | def wrapper(*args, **kwargs):
50 | try:
51 |
52 | import ipywidgets
53 | return function(*args, **kwargs)
54 |
55 | except ImportError:
56 | warnings.filterwarnings("once")
57 | warnings.warn(
58 | """Looks like `ipywidgets` is not installed, so widgets can't "
59 | "be constructed. Install before using this method.""",
60 | ImportWarning)
61 |
62 | return wrapper
63 |
64 |
65 | # -------------------------------------------------------
66 | # Useful methods for genotype-phenotype spaces
67 | # -------------------------------------------------------
68 |
69 |
70 | def get_base(logbase):
71 | """Get base from logbase
72 | Parameters
73 | ----------
74 | logbase : callable
75 | logarithm function
76 | Returns
77 | -------
78 | base : float
79 | returns base of logarithm.
80 | """
81 | testval = 10
82 | return np.exp(np.log(testval) / logbase(testval))
83 |
84 |
85 | def hamming_distance(s1, s2):
86 | """Return the Hamming distance between equal-length sequences """
87 | return sum(ch1 != ch2 for ch1, ch2 in zip(s1, s2))
88 |
89 |
90 | def sample_phenotypes(phenotypes, errors, n=1):
91 | """Generate `n` phenotypes from from normal distributions. """
92 | samples = np.random.randn(len(phenotypes), n)
93 | # Apply phenotype scale and variance
94 | for i in range(n):
95 | samples[:, i] = np.multiply(samples[:, i], errors) + phenotypes
96 | return samples
97 |
98 |
99 | # -------------------------------------------------------
100 | # Utilities for searching sequence space
101 | # -------------------------------------------------------
102 |
103 |
104 | def find_differences(s1, s2):
105 | """Return the index of differences between two sequences."""
106 | indices = list()
107 | for i in range(len(s1)):
108 | if s1[i] != s2[i]:
109 | indices.append(i)
110 | return indices
111 |
112 |
113 | def farthest_genotype(reference, genotypes):
114 | """Find the genotype in the system that differs at the most sites. """
115 | mutations = 0
116 | for genotype in genotypes:
117 | differs = hamming_distance(genotype, reference)
118 | if differs > mutations:
119 | mutations = int(differs)
120 | mutant = str(genotype)
121 | return mutant
122 |
123 |
124 | # -------------------------------------------------------
125 | # Space enumerations
126 | # -------------------------------------------------------
127 |
128 |
129 | def list_binary(length):
130 | """List all binary strings with given length.
131 | """
132 | return np.array(["".join(seq) for seq in it.product("01", repeat=length)])
133 |
134 |
135 | def get_encoding_table(wildtype, mutations, site_labels=None):
136 | """This function constructs a lookup table (pandas.DataFrame) for mutations
137 | in a given mutations dictionary. This table encodes mutations with a binary representation.
138 | """
139 |
140 | # Either grab or create site_labels. Force them to be strings.
141 | if site_labels is None:
142 | site_labels = ["{}".format(i) for i in range(len(wildtype))]
143 | else:
144 | if len(site_labels) != len(wildtype):
145 | err = "site_labels must be the same length as the number of sites per genotype\n"
146 | raise ValueError(err)
147 | site_labels = ["{}".format(x) for x in site_labels]
148 |
149 | # Initialize table
150 | table = []
151 | mutation_index_counter = 0
152 | binary_index_counter = 0
153 | for genotype_index, alphabet in mutations.items():
154 | # Ty pe check genotype_index
155 | genotype_index = int(genotype_index)
156 |
157 | # Handle sites that don't mutate.
158 | if alphabet is None:
159 |
160 | # Create a row for the encoding lookup table.
161 | table.append(dict(
162 | genotype_index=genotype_index,
163 | wildtype_letter=wildtype[genotype_index],
164 | mutation_letter=None,
165 | binary_repr="",
166 | binary_index_start=binary_index_counter,
167 | binary_index_stop=binary_index_counter,
168 | mutation_index=None,
169 | site_label=site_labels[genotype_index]
170 | ))
171 |
172 | # Determine mapping for all other sites.
173 | else:
174 | # copy alphabet to avoid removing items in main object.
175 | alphabet_cp = alphabet[:]
176 | n = len(alphabet_cp) - 1 # number of mutation neighbors
177 |
178 | # Set wildtype state at a given genotype_index.
179 | wt_site = wildtype[genotype_index]
180 | table.append(dict(
181 | genotype_index=genotype_index,
182 | wildtype_letter=wt_site,
183 | mutation_letter=wt_site,
184 | binary_repr="0" * n,
185 | binary_index_start=binary_index_counter,
186 | binary_index_stop=binary_index_counter + n,
187 | mutation_index=None,
188 | site_label=site_labels[genotype_index]
189 | ))
190 |
191 | # Copy alphabet again to prevent indexing error.
192 | alphabet_ = alphabet_cp[:]
193 | if wt_site in alphabet_:
194 | alphabet_.remove(wt_site)
195 |
196 | # Add all possible mutations at given site
197 | for j in range(n):
198 | binary_repr = list("0" * n)
199 | binary_repr[j] = "1"
200 | binary_repr = "".join(binary_repr)
201 | table.append(dict(
202 | genotype_index=genotype_index,
203 | wildtype_letter=wt_site,
204 | mutation_letter=alphabet_[j],
205 | binary_repr=binary_repr,
206 | binary_index_start=binary_index_counter,
207 | binary_index_stop=binary_index_counter + n,
208 | mutation_index=mutation_index_counter + 1,
209 | site_label=site_labels[genotype_index]
210 | ))
211 | mutation_index_counter += 1
212 | binary_index_counter += n
213 |
214 | # Turn table into DataFrame.
215 | df = pd.DataFrame(table)
216 | df.genotype_index = df.genotype_index.astype('Int64')
217 | df.mutation_index = df.mutation_index.astype('Int64')
218 | df.binary_index_start = df.binary_index_start.astype('Int64')
219 | df.binary_index_stop = df.binary_index_stop.astype('Int64')
220 | return df
221 |
222 |
223 | def genotypes_to_binary(genotypes, encoding_table):
224 | """Using an encoding table (see `get_encoding_table`
225 | function), build a set of binary genotypes.
226 |
227 | Parameters
228 | ----------
229 | genotypes :
230 | List of the genotypes to encode.
231 | encoding_table :
232 | DataFrame that encodes the binary representation of
233 | each mutation in the list of genotypes. (See the
234 | `get_encoding_table`).
235 | """
236 | # ---------- Sanity Checks ---------------
237 | # 1. Check genotypes are all same length
238 | length_of_genotypes = [len(g) for g in genotypes]
239 | length = length_of_genotypes[0]
240 |
241 | if len(set(length_of_genotypes)) > 1:
242 | raise Exception("Genotypes are not all the same length.")
243 |
244 | binary = []
245 | # Alias for encoding table
246 | t = encoding_table
247 | mapper = dict(zip(zip(t.genotype_index, t.mutation_letter), t.binary_repr))
248 | for g in genotypes:
249 | b = []
250 | for genotype_index, mutation_letter in enumerate(g):
251 | chunk = mapper[(genotype_index, mutation_letter)]
252 | b.append(chunk)
253 | binary.append("".join(b))
254 | return binary
255 |
256 |
257 | def mutations_to_encoding(wildtype, mutations):
258 | """ Encoding map for genotype-to-binary
259 |
260 | Parameters
261 | ---------
262 | wildtype: str
263 | Wildtype sequence.
264 | mutations: dict
265 | Mapping of each site's mutation alphabet.
266 | {site-number: [alphabet]}
267 |
268 | Returns
269 | -------
270 | encode: OrderedDict of OrderDicts
271 | Encoding dictionary that maps site number to mutation-binary map
272 |
273 |
274 | Examples
275 | --------
276 | ``{ : {: } }``
277 | """
278 | encoding = OrderedDict()
279 |
280 | for site_number, alphabet in mutations.items():
281 | site_number = int(site_number)
282 | # Handle sites that don't mutate.
283 | if alphabet is None:
284 | encoding[site_number] = wildtype[site_number]
285 |
286 | # All sites that mutate, give a mapping dictionary.
287 | else:
288 | # copy alphabet to avoid removing items in main object.
289 | alphabet_cp = alphabet[:]
290 | n = len(alphabet_cp) - 1 # number of mutation neighbors
291 | wt_site = wildtype[site_number] # wildtype letter
292 |
293 | # Build a binary representation of mutation alphabet
294 | indiv_encode = OrderedDict({wt_site: "0" * n})
295 | alphabet_ = alphabet_cp[:]
296 | alphabet_.remove(wt_site)
297 |
298 | for i in range(n):
299 | binary = list("0" * n)
300 | binary[i] = "1"
301 | indiv_encode[alphabet_[i]] = "".join(binary)
302 | encoding[site_number] = indiv_encode
303 |
304 | return encoding
305 |
306 |
307 | def mutations_to_genotypes(mutations, wildtype=None):
308 | """Use a mutations dictionary to construct an array of genotypes composed
309 | of those mutations.
310 |
311 | Parameters
312 | ----------
313 | mutations : dict
314 | A mapping dict with site numbers as keys and lists of mutations as
315 | values.
316 |
317 | wildtype : str
318 | wildtype genotype (as string).
319 |
320 | Returns
321 | -------
322 | genotypes : list
323 | list of genotypes comprised of mutations in given dictionary.
324 | """
325 | # Convert mutations dict to list of lists
326 | mutations_ = []
327 | for i, val in enumerate(mutations.values()):
328 | if val is None:
329 | mutations_.append(wildtype[i])
330 | else:
331 | mutations_.append(val)
332 | sequences = it.product(*mutations_)
333 | genotypes = ["".join(s) for s in sequences]
334 | return genotypes
335 |
336 |
337 | def genotypes_to_mutations(genotypes):
338 | """Create mutations dictionary from a list of mutations.
339 | """
340 | # Sequences to array
341 | arr = np.array([list(g) for g in genotypes])
342 |
343 | # Mutations dict
344 | mutations = {i: None for i in range(arr.shape[1])}
345 |
346 | # Find unique residues at all sites.
347 | for i, col in enumerate(arr.T):
348 | mutations[i] = list(np.unique(col))
349 |
350 | return mutations
351 |
352 |
353 | def get_missing_genotypes(genotypes, mutations=None):
354 | """Get a list of genotypes not found in the given genotypes list.
355 |
356 | Parameters
357 | ----------
358 | genotypes : list
359 | List of genotypes.
360 |
361 | mutations : dict (optional)
362 | Mutation dictionary
363 |
364 | Return
365 | ------
366 | missing_genotypes : list
367 | List of genotypes not found in genotypes list.
368 | """
369 | if mutations is None:
370 | mutations = genotypes_to_mutations(genotypes)
371 |
372 | # Need a wildtype--doesn't matter what it is.
373 | wildtype = "".join([sites[0] for sites in mutations.values()])
374 |
375 | # Get all genotypes.
376 | all_genotypes = mutations_to_genotypes(mutations, wildtype)
377 |
378 | # Find genotypes not found in genotypes list.
379 | missing_genotypes = set(all_genotypes).difference(set(genotypes))
380 | return list(missing_genotypes)
381 |
382 |
383 | def length_to_mutations(length, alphabet=["0", "1"]):
384 | """Build a mutations dictionary for a given alphabet
385 |
386 | Parameters
387 | ----------
388 | length : int
389 | length of the genotypes
390 |
391 | alphabet : list
392 | List of mutations at each site.
393 | """
394 | return {i: alphabet for i in range(length)}
395 |
--------------------------------------------------------------------------------
/gpmap/gpm.py:
--------------------------------------------------------------------------------
1 | #
2 | # Author: Zach Sailer
3 | #
4 | # ----------------------------------------------------------
5 | # Outside imports
6 | # ----------------------------------------------------------
7 |
8 | import json
9 | import pickle
10 | import numpy as np
11 | import pandas as pd
12 |
13 | # ----------------------------------------------------------
14 | # Local imports
15 | # ----------------------------------------------------------
16 |
17 | # import different maps into this module
18 | import gpmap.utils as utils
19 | import gpmap.errors as errors
20 |
21 |
22 | class GenotypePhenotypeMap(object):
23 | """Object for containing genotype-phenotype map data.
24 |
25 | Parameters
26 | ----------
27 | wildtype : string
28 | wildtype sequence.
29 |
30 | genotypes : array-like
31 | list of all genotypes
32 |
33 | phenotypes : array-like
34 | List of phenotypes in the same order as genotypes. If None,
35 | all genotypes are assigned a phenotype = np.nan.
36 |
37 | mutations : dict
38 | Dictionary that maps each site indice to their possible substitution
39 | alphabet.
40 |
41 | site_labels : array-like
42 | list of labels to apply to sites. If this is not specified, the
43 | first site is assigned a label 0, the next 1, etc. If specified, sites
44 | are assigned labels in the order given. For example, if the genotypes
45 | specify mutations at positions 12 and 75, this would be a list [12,75].
46 |
47 | n_replicates : int
48 | number of replicate measurements comprising the mean phenotypes
49 |
50 | include_binary : bool (default=True)
51 | Construct a binary representation of the space.
52 |
53 | Attributes
54 | ----------
55 | data : pandas.DataFrame
56 | The core data object. Columns are 'genotypes', 'phenotypes',
57 | 'n_replicates', 'stdeviations', and (option) 'binary'.
58 |
59 | complete_data : pandas.DataFrame (optional, created by BinaryMap)
60 | A dataframe mapping the complete set of genotypes possible, given
61 | the mutations dictionary. Contains all columns in `data`. Any missing
62 | data is reported as NaN.
63 |
64 | missing_data : pandas.DataFrame (optional, created by BinaryMap)
65 | A dataframe containing the set of missing genotypes; complte_data -
66 | data. Two columns: 'genotypes' and 'binary'.
67 |
68 | binary : BinaryMap
69 | object that gives you (the user) access to the binary representation
70 | of the map.
71 |
72 | encoding_table:
73 | Pandas DataFrame showing how mutations map to binary representation.
74 | """
75 | def __init__(self, wildtype,
76 | genotypes,
77 | phenotypes=None,
78 | stdeviations=None,
79 | mutations=None,
80 | site_labels=None,
81 | n_replicates=1,
82 | **kwargs):
83 |
84 | # Assign dummy phenotypes
85 | if phenotypes is None:
86 | phenotypes = np.zeros(len(genotypes),dtype=np.float64)
87 | phenotypes[:] = np.nan
88 |
89 | # Set mutations; if not given, assume binary space.
90 | if mutations is not None:
91 | # Make sure the keys in the mutations dict are integers, not
92 | # strings.
93 | self._mutations = dict([(int(key), val)
94 | for key, val in mutations.items()])
95 | else:
96 | # Get mutations dict from genotypes.
97 | mutations = utils.genotypes_to_mutations(genotypes)
98 | self._mutations = mutations
99 |
100 | # Leftover kwargs become metadata that is ignored.
101 | self.metadata = kwargs
102 |
103 | # Set wildtype.
104 | self._wildtype = wildtype
105 |
106 | # Store data in DataFrame
107 | data = dict(
108 | genotypes=genotypes,
109 | phenotypes=phenotypes,
110 | n_replicates=n_replicates,
111 | stdeviations=stdeviations
112 | )
113 | self.data = pd.DataFrame(data)
114 |
115 | # Construct a lookup table for all mutations.
116 | self.encoding_table = utils.get_encoding_table(
117 | self.wildtype,
118 | self.mutations,
119 | site_labels
120 | )
121 |
122 | # Add binary representation
123 | self.add_binary()
124 |
125 | # Add number of mutations
126 | self.add_n_mutations()
127 |
128 | # Construct the error maps
129 | self._add_error()
130 |
131 | def _repr_html_(self):
132 | """Represent the GenotypePhenotypeMap as an html table."""
133 | return self.data.to_html()
134 |
135 | def map(self, attr1, attr2):
136 | """Dictionary that maps attr1 to attr2."""
137 | return dict(zip(getattr(self, attr1), getattr(self, attr2)))
138 |
139 | @classmethod
140 | def read_dataframe(cls, dataframe, wildtype, **kwargs):
141 | """Construct a GenotypePhenotypeMap from a dataframe."""
142 | # Required arguments
143 | df = dataframe
144 | self = cls(wildtype,
145 | df.genotypes,
146 | df.phenotypes,
147 | stdeviations=df.stdeviations,
148 | n_replicates=df.n_replicates,
149 | **kwargs)
150 | return self
151 |
152 | @classmethod
153 | def read_pickle(cls, filename, **kwargs):
154 | """Read GenotypePhenotypeMap from pickle"""
155 | with open(filename, 'rb') as f:
156 | self = pickle.load(f)
157 |
158 | # Check input
159 | if not isinstance(self, GenotypePhenotypeMap):
160 | raise Exception("Pickle file does not contain a GenotypePhenotypeMap.")
161 |
162 | return self
163 |
164 | @classmethod
165 | def read_csv(cls, fname, wildtype, **kwargs):
166 | """"""
167 | dtypes = dict(
168 | genotypes=str,
169 | phenotypes=float,
170 | stdeviations=float,
171 | n_replicates=int
172 | )
173 | df = pd.read_csv(fname, dtype=dtypes)
174 | self = cls.read_dataframe(df, wildtype, **kwargs)
175 | return self
176 |
177 | @classmethod
178 | def read_excel(cls, fname, wildtype, **kwargs):
179 | """"""
180 | dtypes = dict(
181 | genotypes=str,
182 | phenotypes=float,
183 | stdeviations=float,
184 | n_replicates=int
185 | )
186 | df = pd.read_excel(fname, dtype=dtypes)
187 | self = cls.read_dataframe(df, wildtype, **kwargs)
188 | return self
189 |
190 | @classmethod
191 | def read_json(cls, filename, **kwargs):
192 | """Load a genotype-phenotype map directly from a json file.
193 | The JSON metadata must include the following attributes
194 |
195 | Note
196 | ----
197 | Keyword arguments override input that is loaded from the JSON file.
198 | """
199 | # Open, json load, and close a json file
200 | with open(filename, "r") as f:
201 | metadata = json.load(f)
202 |
203 | return cls.from_dict(metadata)
204 |
205 |
206 | @classmethod
207 | def from_dict(cls, metadata):
208 | """"""
209 | try:
210 | data = metadata["data"]
211 | except KeyError:
212 | data = metadata
213 |
214 | if "wildtype" in metadata:
215 | wildtype = metadata["wildtype"]
216 | metadata.pop("wildtype")
217 |
218 | # Check keys in dictionary.
219 | if not all(key in data for key in ["genotypes", "phenotypes", "stdeviations", "n_replicates"]):
220 | raise Exception('The "data" field must have the following keys: '
221 | 'genotypes", "phenotypes", "stdeviations", "n_replicates"')
222 |
223 | # Create an instance
224 | gpm = cls(
225 | wildtype,
226 | data["genotypes"],
227 | data["phenotypes"],
228 | stdeviations=data["stdeviations"],
229 | n_replicates=data["n_replicates"]
230 | )
231 | return gpm
232 |
233 | @classmethod
234 | def from_json(cls, json_str):
235 | """Load a genotype-phenotype map directly from a json.
236 | The JSON metadata must include the following attributes
237 |
238 | Note
239 | ----
240 | Keyword arguments override input that is loaded from the JSON file.
241 | """
242 | metadata = json.loads(json_str)
243 | return cls.from_dict(metadata)
244 |
245 | # ----------------------------------------------------------
246 | # Writing methods
247 | # ----------------------------------------------------------
248 |
249 | def to_pickle(self, filename, **kwargs):
250 | """Write GenotypePhenotypeMap object to a pickle file.
251 | """
252 | with open(filename, 'wb') as f:
253 | pickle.dump(self, f)
254 |
255 | def to_excel(self, filename=None, **kwargs):
256 | """Write genotype-phenotype map to excel spreadsheet.
257 |
258 | Keyword arguments are passed directly to Pandas dataframe to_excel
259 | method.
260 |
261 | Parameters
262 | ----------
263 | filename : str
264 | Name of file to write out.
265 | """
266 | self.data.to_excel(filename, **kwargs)
267 |
268 | def to_csv(self, filename=None, **kwargs):
269 | """Write genotype-phenotype map to csv spreadsheet.
270 |
271 | Keyword arguments are passed directly to Pandas dataframe to_csv
272 | method.
273 |
274 | Parameters
275 | ----------
276 | filename : str
277 | Name of file to write out.
278 | """
279 | self.data.to_csv(filename, **kwargs)
280 |
281 | def to_dict(self, complete=False):
282 | """Write genotype-phenotype map to dict."""
283 | if complete:
284 | data = self.complete_data.to_dict('list')
285 | else:
286 | data = self.data.to_dict('list')
287 |
288 | metadata = {
289 | "wildtype": self.wildtype,
290 | "mutations": self.mutations,
291 | "data": data
292 | }
293 | metadata.update(**self.metadata)
294 | return metadata
295 |
296 | def to_json(self, filename=None, complete=False):
297 | """Write genotype-phenotype map to json file. If no filename is given
298 | returns
299 | """
300 | # Get metadata.
301 | data = self.to_dict(complete=complete)
302 |
303 | # Write to json file.
304 | if filename is None:
305 | return json.dumps(data)
306 | else:
307 | with open(filename, "w") as f:
308 | json.dump(data, f)
309 |
310 | @property
311 | def length(self):
312 | """Get length of the genotypes. """
313 | return len(self.wildtype)
314 |
315 | @property
316 | def n(self):
317 | """Get number of genotypes, i.e. size of the genotype-phenotype map."""
318 | return len(self.genotypes)
319 |
320 | @property
321 | def wildtype(self):
322 | """Get reference genotypes for interactions. """
323 | return self._wildtype
324 |
325 | @wildtype.setter
326 | def wildtype(self, wildtype):
327 | """If a wildtype is given after init, rebuild binary genotypes."""
328 | self._wildtype = wildtype
329 | self.add_binary()
330 |
331 | @property
332 | def mutant(self):
333 | """Get the farthest mutant in genotype-phenotype map."""
334 | _mutant = []
335 | _wt = self.wildtype
336 | for i in range(0, len(self.mutations)):
337 | site = _wt[i]
338 | options = self.mutations[i]
339 | if options is None:
340 | _mutant.append(_wt[i])
341 | else:
342 | for o in options:
343 | if o != site:
344 | _mutant.append(o)
345 | return "".join(_mutant)
346 |
347 | @property
348 | def mutations(self):
349 | """Get the furthest genotype from the wildtype genotype."""
350 | return self._mutations
351 |
352 | @property
353 | def genotypes(self):
354 | """Get the genotypes of the system."""
355 | return self.data.genotypes.values
356 |
357 | @property
358 | def binary(self):
359 | """Binary representation of genotypes."""
360 | return self.data.binary.values
361 |
362 | @property
363 | def phenotypes(self):
364 | """Get the phenotypes of the system. """
365 | return self.data.phenotypes.values
366 |
367 | @property
368 | def stdeviations(self):
369 | """Get stdeviations"""
370 | return self.data.stdeviations.values
371 |
372 | @property
373 | def n_replicates(self):
374 | """Return the number of replicate measurements made of the phenotype"""
375 | return self.data.n_replicates.values
376 |
377 | @property
378 | def index(self):
379 | """Return numpy array of genotypes position. """
380 | return self.data.index.values
381 |
382 | def _add_error(self):
383 | """Store error maps"""
384 | self.std = errors.StandardDeviationMap(self)
385 | self.err = errors.StandardErrorMap(self)
386 |
387 | def add_binary(self):
388 | """Build a binary representation of set of genotypes.
389 |
390 | Add as a column to the main DataFrame.
391 | """
392 | binary = utils.genotypes_to_binary(self.genotypes, self.encoding_table)
393 |
394 | # Add this as a column to the map.
395 | self.data['binary'] = binary
396 |
397 | def add_n_mutations(self):
398 | """Build a column with the number of mutations in each genotype.
399 |
400 | Add as a column to the main DataFrame.
401 | """
402 | n_mutations = [binary.count('1') for binary in self.binary]
403 | self.data['n_mutations'] = n_mutations
404 |
405 |
406 | def get_missing_genotypes(self):
407 | """Get all genotypes missing from the complete genotype-phenotype map."""
408 | return utils.get_missing_genotypes(
409 | self.genotypes,
410 | mutations=self.mutations
411 | )
412 |
413 | def get_all_possible_genotypes(self):
414 | """Get the complete set of genotypes possible. There is no particular order
415 | to the genotypes. Consider sorting.
416 | """
417 | # Get all genotypes.
418 | return utils.mutations_to_genotypes(self.mutations, wildtype=self.wildtype)
419 |
420 | def get_missing_binary(self):
421 | """Get all binary representations of genotypes missing from the complete
422 | genotype-phenotype map.
423 | """
424 |
425 | geno = self.get_missing_genotypes()
426 | binary = utils.genotypes_to_binary(self.wildtype,
427 | geno,
428 | self.mutations
429 | )
430 |
431 | return binary
432 |
433 | def get_all_possible_binary(self):
434 | """Get the complete set of binary genotypes possible. There is no
435 | particular order to the genotypes. Consider sorting.
436 | """
437 |
438 | # Get all genotypes.
439 | geno = self.get_all_possible_genotypes()
440 | binary = utils.genotypes_to_binary(self.wildtype,
441 | geno,
442 | self.mutations
443 | )
444 |
445 | return binary
446 |
--------------------------------------------------------------------------------
/examples/Introduction to Genotype-Phenotype Map Module.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# Introduction to Genotype-Phenotype Map Module"
8 | ]
9 | },
10 | {
11 | "cell_type": "markdown",
12 | "metadata": {},
13 | "source": [
14 | "This notebook a brief introduction to how the genotype-phenotype map module works. "
15 | ]
16 | },
17 | {
18 | "cell_type": "markdown",
19 | "metadata": {},
20 | "source": [
21 | "External imports for plotting and other utilities "
22 | ]
23 | },
24 | {
25 | "cell_type": "code",
26 | "execution_count": 1,
27 | "metadata": {},
28 | "outputs": [],
29 | "source": [
30 | "%matplotlib inline\n",
31 | "import matplotlib.pyplot as plt\n",
32 | "import numpy as np"
33 | ]
34 | },
35 | {
36 | "cell_type": "markdown",
37 | "metadata": {},
38 | "source": [
39 | "Imports from `gpm` module."
40 | ]
41 | },
42 | {
43 | "cell_type": "code",
44 | "execution_count": 2,
45 | "metadata": {},
46 | "outputs": [],
47 | "source": [
48 | "from gpmap.gpm import GenotypePhenotypeMap"
49 | ]
50 | },
51 | {
52 | "cell_type": "markdown",
53 | "metadata": {},
54 | "source": [
55 | "Let's define and arbitrary space. Everything in the cell below will typically be given to you by the experimental data."
56 | ]
57 | },
58 | {
59 | "cell_type": "code",
60 | "execution_count": 3,
61 | "metadata": {},
62 | "outputs": [],
63 | "source": [
64 | "from gpmap import utils\n",
65 | "\n",
66 | "# Wildtype sequence\n",
67 | "wt = \"AAA\"\n",
68 | "\n",
69 | "# Micro-managing here, stating explicitly what substitutions are possible at each site.\n",
70 | "# See documentation for more detail.\n",
71 | "mutations = {\n",
72 | " 0:utils.DNA,\n",
73 | " 1:utils.DNA,\n",
74 | " 2:[\"A\",\"T\"]\n",
75 | "}\n",
76 | "encoding_table = utils.get_encoding_table(wt, mutations)\n",
77 | "\n",
78 | "genotypes = utils.mutations_to_genotypes(mutations, wildtype=wt)\n",
79 | "binary = utils.genotypes_to_binary(genotypes, encoding_table)\n",
80 | "\n",
81 | "# Generate random phenotype values\n",
82 | "phenotypes = np.random.rand(len(genotypes))"
83 | ]
84 | },
85 | {
86 | "cell_type": "markdown",
87 | "metadata": {},
88 | "source": [
89 | "## Creating a Genotype-phenotype map instance"
90 | ]
91 | },
92 | {
93 | "cell_type": "markdown",
94 | "metadata": {},
95 | "source": [
96 | "Create an instance of the `GenotypePhenotypeMap` object, passing in the wildtype sequence, genotypes and their phenotypes, and the substitution map. "
97 | ]
98 | },
99 | {
100 | "cell_type": "code",
101 | "execution_count": 4,
102 | "metadata": {},
103 | "outputs": [],
104 | "source": [
105 | "from gpmap.gpm import GenotypePhenotypeMap"
106 | ]
107 | },
108 | {
109 | "cell_type": "code",
110 | "execution_count": 5,
111 | "metadata": {},
112 | "outputs": [],
113 | "source": [
114 | "gpm = GenotypePhenotypeMap(wt, # wildtype sequence\n",
115 | " genotypes, # genotypes\n",
116 | " phenotypes, # phenotypes\n",
117 | " stdeviations=None, # errors in measured phenotypes\n",
118 | " log_transform=False, # Should the map log_transform the space?\n",
119 | " mutations=mutations # Substitution map to alphabet \n",
120 | ")"
121 | ]
122 | },
123 | {
124 | "cell_type": "code",
125 | "execution_count": 6,
126 | "metadata": {},
127 | "outputs": [
128 | {
129 | "data": {
130 | "text/html": [
131 | "\n",
132 | "\n",
145 | "
\n",
146 | " \n",
147 | " \n",
148 | " | \n",
149 | " genotypes | \n",
150 | " phenotypes | \n",
151 | " n_replicates | \n",
152 | " stdeviations | \n",
153 | " binary | \n",
154 | " n_mutations | \n",
155 | "
\n",
156 | " \n",
157 | " \n",
158 | " \n",
159 | " | 0 | \n",
160 | " AAA | \n",
161 | " 0.959714 | \n",
162 | " 1 | \n",
163 | " None | \n",
164 | " 0000000 | \n",
165 | " 0 | \n",
166 | "
\n",
167 | " \n",
168 | " | 1 | \n",
169 | " AAT | \n",
170 | " 0.040474 | \n",
171 | " 1 | \n",
172 | " None | \n",
173 | " 0000001 | \n",
174 | " 1 | \n",
175 | "
\n",
176 | " \n",
177 | " | 2 | \n",
178 | " ACA | \n",
179 | " 0.191467 | \n",
180 | " 1 | \n",
181 | " None | \n",
182 | " 0001000 | \n",
183 | " 1 | \n",
184 | "
\n",
185 | " \n",
186 | " | 3 | \n",
187 | " ACT | \n",
188 | " 0.659434 | \n",
189 | " 1 | \n",
190 | " None | \n",
191 | " 0001001 | \n",
192 | " 2 | \n",
193 | "
\n",
194 | " \n",
195 | " | 4 | \n",
196 | " AGA | \n",
197 | " 0.303127 | \n",
198 | " 1 | \n",
199 | " None | \n",
200 | " 0000100 | \n",
201 | " 1 | \n",
202 | "
\n",
203 | " \n",
204 | " | 5 | \n",
205 | " AGT | \n",
206 | " 0.617443 | \n",
207 | " 1 | \n",
208 | " None | \n",
209 | " 0000101 | \n",
210 | " 2 | \n",
211 | "
\n",
212 | " \n",
213 | " | 6 | \n",
214 | " ATA | \n",
215 | " 0.498385 | \n",
216 | " 1 | \n",
217 | " None | \n",
218 | " 0000010 | \n",
219 | " 1 | \n",
220 | "
\n",
221 | " \n",
222 | " | 7 | \n",
223 | " ATT | \n",
224 | " 0.120876 | \n",
225 | " 1 | \n",
226 | " None | \n",
227 | " 0000011 | \n",
228 | " 2 | \n",
229 | "
\n",
230 | " \n",
231 | " | 8 | \n",
232 | " CAA | \n",
233 | " 0.157659 | \n",
234 | " 1 | \n",
235 | " None | \n",
236 | " 1000000 | \n",
237 | " 1 | \n",
238 | "
\n",
239 | " \n",
240 | " | 9 | \n",
241 | " CAT | \n",
242 | " 0.719523 | \n",
243 | " 1 | \n",
244 | " None | \n",
245 | " 1000001 | \n",
246 | " 2 | \n",
247 | "
\n",
248 | " \n",
249 | " | 10 | \n",
250 | " CCA | \n",
251 | " 0.181312 | \n",
252 | " 1 | \n",
253 | " None | \n",
254 | " 1001000 | \n",
255 | " 2 | \n",
256 | "
\n",
257 | " \n",
258 | " | 11 | \n",
259 | " CCT | \n",
260 | " 0.590966 | \n",
261 | " 1 | \n",
262 | " None | \n",
263 | " 1001001 | \n",
264 | " 3 | \n",
265 | "
\n",
266 | " \n",
267 | " | 12 | \n",
268 | " CGA | \n",
269 | " 0.728670 | \n",
270 | " 1 | \n",
271 | " None | \n",
272 | " 1000100 | \n",
273 | " 2 | \n",
274 | "
\n",
275 | " \n",
276 | " | 13 | \n",
277 | " CGT | \n",
278 | " 0.825209 | \n",
279 | " 1 | \n",
280 | " None | \n",
281 | " 1000101 | \n",
282 | " 3 | \n",
283 | "
\n",
284 | " \n",
285 | " | 14 | \n",
286 | " CTA | \n",
287 | " 0.832555 | \n",
288 | " 1 | \n",
289 | " None | \n",
290 | " 1000010 | \n",
291 | " 2 | \n",
292 | "
\n",
293 | " \n",
294 | " | 15 | \n",
295 | " CTT | \n",
296 | " 0.971384 | \n",
297 | " 1 | \n",
298 | " None | \n",
299 | " 1000011 | \n",
300 | " 3 | \n",
301 | "
\n",
302 | " \n",
303 | " | 16 | \n",
304 | " GAA | \n",
305 | " 0.041217 | \n",
306 | " 1 | \n",
307 | " None | \n",
308 | " 0100000 | \n",
309 | " 1 | \n",
310 | "
\n",
311 | " \n",
312 | " | 17 | \n",
313 | " GAT | \n",
314 | " 0.035292 | \n",
315 | " 1 | \n",
316 | " None | \n",
317 | " 0100001 | \n",
318 | " 2 | \n",
319 | "
\n",
320 | " \n",
321 | " | 18 | \n",
322 | " GCA | \n",
323 | " 0.876725 | \n",
324 | " 1 | \n",
325 | " None | \n",
326 | " 0101000 | \n",
327 | " 2 | \n",
328 | "
\n",
329 | " \n",
330 | " | 19 | \n",
331 | " GCT | \n",
332 | " 0.291822 | \n",
333 | " 1 | \n",
334 | " None | \n",
335 | " 0101001 | \n",
336 | " 3 | \n",
337 | "
\n",
338 | " \n",
339 | " | 20 | \n",
340 | " GGA | \n",
341 | " 0.527104 | \n",
342 | " 1 | \n",
343 | " None | \n",
344 | " 0100100 | \n",
345 | " 2 | \n",
346 | "
\n",
347 | " \n",
348 | " | 21 | \n",
349 | " GGT | \n",
350 | " 0.492613 | \n",
351 | " 1 | \n",
352 | " None | \n",
353 | " 0100101 | \n",
354 | " 3 | \n",
355 | "
\n",
356 | " \n",
357 | " | 22 | \n",
358 | " GTA | \n",
359 | " 0.749004 | \n",
360 | " 1 | \n",
361 | " None | \n",
362 | " 0100010 | \n",
363 | " 2 | \n",
364 | "
\n",
365 | " \n",
366 | " | 23 | \n",
367 | " GTT | \n",
368 | " 0.121560 | \n",
369 | " 1 | \n",
370 | " None | \n",
371 | " 0100011 | \n",
372 | " 3 | \n",
373 | "
\n",
374 | " \n",
375 | " | 24 | \n",
376 | " TAA | \n",
377 | " 0.088041 | \n",
378 | " 1 | \n",
379 | " None | \n",
380 | " 0010000 | \n",
381 | " 1 | \n",
382 | "
\n",
383 | " \n",
384 | " | 25 | \n",
385 | " TAT | \n",
386 | " 0.099197 | \n",
387 | " 1 | \n",
388 | " None | \n",
389 | " 0010001 | \n",
390 | " 2 | \n",
391 | "
\n",
392 | " \n",
393 | " | 26 | \n",
394 | " TCA | \n",
395 | " 0.891447 | \n",
396 | " 1 | \n",
397 | " None | \n",
398 | " 0011000 | \n",
399 | " 2 | \n",
400 | "
\n",
401 | " \n",
402 | " | 27 | \n",
403 | " TCT | \n",
404 | " 0.289377 | \n",
405 | " 1 | \n",
406 | " None | \n",
407 | " 0011001 | \n",
408 | " 3 | \n",
409 | "
\n",
410 | " \n",
411 | " | 28 | \n",
412 | " TGA | \n",
413 | " 0.026427 | \n",
414 | " 1 | \n",
415 | " None | \n",
416 | " 0010100 | \n",
417 | " 2 | \n",
418 | "
\n",
419 | " \n",
420 | " | 29 | \n",
421 | " TGT | \n",
422 | " 0.138790 | \n",
423 | " 1 | \n",
424 | " None | \n",
425 | " 0010101 | \n",
426 | " 3 | \n",
427 | "
\n",
428 | " \n",
429 | " | 30 | \n",
430 | " TTA | \n",
431 | " 0.077798 | \n",
432 | " 1 | \n",
433 | " None | \n",
434 | " 0010010 | \n",
435 | " 2 | \n",
436 | "
\n",
437 | " \n",
438 | " | 31 | \n",
439 | " TTT | \n",
440 | " 0.534395 | \n",
441 | " 1 | \n",
442 | " None | \n",
443 | " 0010011 | \n",
444 | " 3 | \n",
445 | "
\n",
446 | " \n",
447 | "
\n",
448 | "
"
449 | ],
450 | "text/plain": [
451 | " genotypes phenotypes n_replicates stdeviations binary n_mutations\n",
452 | "0 AAA 0.959714 1 None 0000000 0\n",
453 | "1 AAT 0.040474 1 None 0000001 1\n",
454 | "2 ACA 0.191467 1 None 0001000 1\n",
455 | "3 ACT 0.659434 1 None 0001001 2\n",
456 | "4 AGA 0.303127 1 None 0000100 1\n",
457 | "5 AGT 0.617443 1 None 0000101 2\n",
458 | "6 ATA 0.498385 1 None 0000010 1\n",
459 | "7 ATT 0.120876 1 None 0000011 2\n",
460 | "8 CAA 0.157659 1 None 1000000 1\n",
461 | "9 CAT 0.719523 1 None 1000001 2\n",
462 | "10 CCA 0.181312 1 None 1001000 2\n",
463 | "11 CCT 0.590966 1 None 1001001 3\n",
464 | "12 CGA 0.728670 1 None 1000100 2\n",
465 | "13 CGT 0.825209 1 None 1000101 3\n",
466 | "14 CTA 0.832555 1 None 1000010 2\n",
467 | "15 CTT 0.971384 1 None 1000011 3\n",
468 | "16 GAA 0.041217 1 None 0100000 1\n",
469 | "17 GAT 0.035292 1 None 0100001 2\n",
470 | "18 GCA 0.876725 1 None 0101000 2\n",
471 | "19 GCT 0.291822 1 None 0101001 3\n",
472 | "20 GGA 0.527104 1 None 0100100 2\n",
473 | "21 GGT 0.492613 1 None 0100101 3\n",
474 | "22 GTA 0.749004 1 None 0100010 2\n",
475 | "23 GTT 0.121560 1 None 0100011 3\n",
476 | "24 TAA 0.088041 1 None 0010000 1\n",
477 | "25 TAT 0.099197 1 None 0010001 2\n",
478 | "26 TCA 0.891447 1 None 0011000 2\n",
479 | "27 TCT 0.289377 1 None 0011001 3\n",
480 | "28 TGA 0.026427 1 None 0010100 2\n",
481 | "29 TGT 0.138790 1 None 0010101 3\n",
482 | "30 TTA 0.077798 1 None 0010010 2\n",
483 | "31 TTT 0.534395 1 None 0010011 3"
484 | ]
485 | },
486 | "execution_count": 6,
487 | "metadata": {},
488 | "output_type": "execute_result"
489 | }
490 | ],
491 | "source": [
492 | "gpm.data"
493 | ]
494 | },
495 | {
496 | "cell_type": "code",
497 | "execution_count": null,
498 | "metadata": {},
499 | "outputs": [],
500 | "source": []
501 | }
502 | ],
503 | "metadata": {
504 | "kernelspec": {
505 | "display_name": "gptools_dev",
506 | "language": "python",
507 | "name": "gptools_dev"
508 | },
509 | "language_info": {
510 | "codemirror_mode": {
511 | "name": "ipython",
512 | "version": 3
513 | },
514 | "file_extension": ".py",
515 | "mimetype": "text/x-python",
516 | "name": "python",
517 | "nbconvert_exporter": "python",
518 | "pygments_lexer": "ipython3",
519 | "version": "3.6.10"
520 | }
521 | },
522 | "nbformat": 4,
523 | "nbformat_minor": 4
524 | }
525 |
--------------------------------------------------------------------------------
/Pipfile.lock:
--------------------------------------------------------------------------------
1 | {
2 | "_meta": {
3 | "hash": {
4 | "sha256": "851f88c424ecec41beab69bf42ce92280bc3ec8e5cb336fb9228ba52b96cb67e"
5 | },
6 | "pipfile-spec": 6,
7 | "requires": {
8 | "python_version": "3.7"
9 | },
10 | "sources": [
11 | {
12 | "name": "pypi",
13 | "url": "https://pypi.org/simple",
14 | "verify_ssl": true
15 | }
16 | ]
17 | },
18 | "default": {
19 | "atomicwrites": {
20 | "hashes": [
21 | "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197",
22 | "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"
23 | ],
24 | "markers": "sys_platform == 'win32'",
25 | "version": "==1.4.0"
26 | },
27 | "attrs": {
28 | "hashes": [
29 | "sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c",
30 | "sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72"
31 | ],
32 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
33 | "version": "==19.3.0"
34 | },
35 | "certifi": {
36 | "hashes": [
37 | "sha256:5930595817496dd21bb8dc35dad090f1c2cd0adfaf21204bf6732ca5d8ee34d3",
38 | "sha256:8fc0819f1f30ba15bdb34cceffb9ef04d99f420f68eb75d901e9560b8749fc41"
39 | ],
40 | "version": "==2020.6.20"
41 | },
42 | "colorama": {
43 | "hashes": [
44 | "sha256:7d73d2a99753107a36ac6b455ee49046802e59d9d076ef8e47b61499fa29afff",
45 | "sha256:e96da0d330793e2cb9485e9ddfd918d456036c7149416295932478192f4436a1"
46 | ],
47 | "markers": "sys_platform == 'win32'",
48 | "version": "==0.4.3"
49 | },
50 | "cycler": {
51 | "hashes": [
52 | "sha256:1d8a5ae1ff6c5cf9b93e8811e581232ad8920aeec647c37316ceac982b08cb2d",
53 | "sha256:cd7b2d1018258d7247a71425e9f26463dfb444d411c39569972f4ce586b0c9d8"
54 | ],
55 | "version": "==0.10.0"
56 | },
57 | "gpmap": {
58 | "editable": true,
59 | "path": "."
60 | },
61 | "importlib-metadata": {
62 | "hashes": [
63 | "sha256:90bb658cdbbf6d1735b6341ce708fc7024a3e14e99ffdc5783edea9f9b077f83",
64 | "sha256:dc15b2969b4ce36305c51eebe62d418ac7791e9a157911d58bfb1f9ccd8e2070"
65 | ],
66 | "markers": "python_version < '3.8'",
67 | "version": "==1.7.0"
68 | },
69 | "iniconfig": {
70 | "hashes": [
71 | "sha256:80cf40c597eb564e86346103f609d74efce0f6b4d4f30ec8ce9e2c26411ba437",
72 | "sha256:e5f92f89355a67de0595932a6c6c02ab4afddc6fcdc0bfc5becd0d60884d3f69"
73 | ],
74 | "version": "==1.0.1"
75 | },
76 | "kiwisolver": {
77 | "hashes": [
78 | "sha256:03662cbd3e6729f341a97dd2690b271e51a67a68322affab12a5b011344b973c",
79 | "sha256:18d749f3e56c0480dccd1714230da0f328e6e4accf188dd4e6884bdd06bf02dd",
80 | "sha256:247800260cd38160c362d211dcaf4ed0f7816afb5efe56544748b21d6ad6d17f",
81 | "sha256:38d05c9ecb24eee1246391820ed7137ac42a50209c203c908154782fced90e44",
82 | "sha256:443c2320520eda0a5b930b2725b26f6175ca4453c61f739fef7a5847bd262f74",
83 | "sha256:4eadb361baf3069f278b055e3bb53fa189cea2fd02cb2c353b7a99ebb4477ef1",
84 | "sha256:556da0a5f60f6486ec4969abbc1dd83cf9b5c2deadc8288508e55c0f5f87d29c",
85 | "sha256:603162139684ee56bcd57acc74035fceed7dd8d732f38c0959c8bd157f913fec",
86 | "sha256:60a78858580761fe611d22127868f3dc9f98871e6fdf0a15cc4203ed9ba6179b",
87 | "sha256:63f55f490b958b6299e4e5bdac66ac988c3d11b7fafa522800359075d4fa56d1",
88 | "sha256:7cc095a4661bdd8a5742aaf7c10ea9fac142d76ff1770a0f84394038126d8fc7",
89 | "sha256:be046da49fbc3aa9491cc7296db7e8d27bcf0c3d5d1a40259c10471b014e4e0c",
90 | "sha256:c31bc3c8e903d60a1ea31a754c72559398d91b5929fcb329b1c3a3d3f6e72113",
91 | "sha256:c955791d80e464da3b471ab41eb65cf5a40c15ce9b001fdc5bbc241170de58ec",
92 | "sha256:d069ef4b20b1e6b19f790d00097a5d5d2c50871b66d10075dab78938dc2ee2cf",
93 | "sha256:d52b989dc23cdaa92582ceb4af8d5bcc94d74b2c3e64cd6785558ec6a879793e",
94 | "sha256:e586b28354d7b6584d8973656a7954b1c69c93f708c0c07b77884f91640b7657",
95 | "sha256:efcf3397ae1e3c3a4a0a0636542bcad5adad3b1dd3e8e629d0b6e201347176c8",
96 | "sha256:fccefc0d36a38c57b7bd233a9b485e2f1eb71903ca7ad7adacad6c28a56d62d2"
97 | ],
98 | "markers": "python_version >= '3.6'",
99 | "version": "==1.2.0"
100 | },
101 | "matplotlib": {
102 | "hashes": [
103 | "sha256:0dc15e1ad84ec06bf0c315e6c4c2cced13a21ce4c2b4955bb75097064a4b1e92",
104 | "sha256:1507c2a8e4662f6fa1d3ecc760782b158df8a3244ecc21c1d8dbb1cd0b3f872e",
105 | "sha256:1f9cf2b8500b833714a193cb24281153f5072d55b2e486009f1e81f0b7da3410",
106 | "sha256:282f8a077a1217f9f2ac178596f27c1ae94abbc6e7b785e1b8f25e83918e9199",
107 | "sha256:2c3619ec2a5ead430a4536ebf8c77ea55d8ce36418919f831d35bc657ed5f27e",
108 | "sha256:636c6330a7dcb18bac114dbeaff314fbbb0c11682f9a9601de69a50e331d18d7",
109 | "sha256:6739b6cd9278d5cb337df0bd4400ad37bbd04c6dc7aa2c65e1e83a02bc4cc6fd",
110 | "sha256:73a493e340064e8fe03207d9333b68baca30d9f0da543ae4af6b6b4f13f0fe05",
111 | "sha256:79f0c4730ad422ecb6bda814c9a9b375df36d6bd5a49eaa14e92e5f5e3e95ac3",
112 | "sha256:83ae7261f4d5ab387be2caee29c4f499b1566f31c8ac97a0b8ab61afd9e3da92",
113 | "sha256:87f53bcce90772f942c2db56736788b39332d552461a5cb13f05ff45c1680f0e",
114 | "sha256:88c6ab4a32a7447dad236b8371612aaba5c967d632ff11999e0478dd687f2c58",
115 | "sha256:96a5e667308dbf45670370d9dffb974e73b15bac0df0b5f3fb0b0ac7a572290e",
116 | "sha256:9703bc00a94a94c4e94b2ea0fbfbc9d2bb21159733134639fd931b6606c5c47e",
117 | "sha256:bd8fceaa3494b531d43b6206966ba15705638137fc2dc5da5ee560cf9476867b",
118 | "sha256:cc2d6b47c8fee89da982a312b54949ec0cd6a7976a8cafb5b62dea6c9883a14d",
119 | "sha256:e4d6d3afc454b4afc0d9d0ed52a8fa40a1b0d8f33c8e143e49a5833a7e32266b",
120 | "sha256:ffbae66e2db70dc330cb3299525f97e1c0efdfc763e04e1a4e08f968c7ad21f0"
121 | ],
122 | "index": "pypi",
123 | "version": "==3.3.1"
124 | },
125 | "more-itertools": {
126 | "hashes": [
127 | "sha256:68c70cc7167bdf5c7c9d8f6954a7837089c6a36bf565383919bb595efb8a17e5",
128 | "sha256:b78134b2063dd214000685165d81c154522c3ee0a1c0d4d113c80361c234c5a2"
129 | ],
130 | "markers": "python_version >= '3.5'",
131 | "version": "==8.4.0"
132 | },
133 | "numpy": {
134 | "hashes": [
135 | "sha256:082f8d4dd69b6b688f64f509b91d482362124986d98dc7dc5f5e9f9b9c3bb983",
136 | "sha256:1bc0145999e8cb8aed9d4e65dd8b139adf1919e521177f198529687dbf613065",
137 | "sha256:309cbcfaa103fc9a33ec16d2d62569d541b79f828c382556ff072442226d1968",
138 | "sha256:3673c8b2b29077f1b7b3a848794f8e11f401ba0b71c49fbd26fb40b71788b132",
139 | "sha256:480fdd4dbda4dd6b638d3863da3be82873bba6d32d1fc12ea1b8486ac7b8d129",
140 | "sha256:56ef7f56470c24bb67fb43dae442e946a6ce172f97c69f8d067ff8550cf782ff",
141 | "sha256:5a936fd51049541d86ccdeef2833cc89a18e4d3808fe58a8abeb802665c5af93",
142 | "sha256:5b6885c12784a27e957294b60f97e8b5b4174c7504665333c5e94fbf41ae5d6a",
143 | "sha256:667c07063940e934287993366ad5f56766bc009017b4a0fe91dbd07960d0aba7",
144 | "sha256:7ed448ff4eaffeb01094959b19cbaf998ecdee9ef9932381420d514e446601cd",
145 | "sha256:8343bf67c72e09cfabfab55ad4a43ce3f6bf6e6ced7acf70f45ded9ebb425055",
146 | "sha256:92feb989b47f83ebef246adabc7ff3b9a59ac30601c3f6819f8913458610bdcc",
147 | "sha256:935c27ae2760c21cd7354402546f6be21d3d0c806fffe967f745d5f2de5005a7",
148 | "sha256:aaf42a04b472d12515debc621c31cf16c215e332242e7a9f56403d814c744624",
149 | "sha256:b12e639378c741add21fbffd16ba5ad25c0a1a17cf2b6fe4288feeb65144f35b",
150 | "sha256:b1cca51512299841bf69add3b75361779962f9cee7d9ee3bb446d5982e925b69",
151 | "sha256:b8456987b637232602ceb4d663cb34106f7eb780e247d51a260b84760fd8f491",
152 | "sha256:b9792b0ac0130b277536ab8944e7b754c69560dac0415dd4b2dbd16b902c8954",
153 | "sha256:c9591886fc9cbe5532d5df85cb8e0cc3b44ba8ce4367bd4cf1b93dc19713da72",
154 | "sha256:cf1347450c0b7644ea142712619533553f02ef23f92f781312f6a3553d031fc7",
155 | "sha256:de8b4a9b56255797cbddb93281ed92acbc510fb7b15df3f01bd28f46ebc4edae",
156 | "sha256:e1b1dc0372f530f26a03578ac75d5e51b3868b9b76cd2facba4c9ee0eb252ab1",
157 | "sha256:e45f8e981a0ab47103181773cc0a54e650b2aef8c7b6cd07405d0fa8d869444a",
158 | "sha256:e4f6d3c53911a9d103d8ec9518190e52a8b945bab021745af4939cfc7c0d4a9e",
159 | "sha256:ed8a311493cf5480a2ebc597d1e177231984c818a86875126cfd004241a73c3e",
160 | "sha256:ef71a1d4fd4858596ae80ad1ec76404ad29701f8ca7cdcebc50300178db14dfc"
161 | ],
162 | "index": "pypi",
163 | "version": "==1.19.1"
164 | },
165 | "packaging": {
166 | "hashes": [
167 | "sha256:4357f74f47b9c12db93624a82154e9b120fa8293699949152b22065d556079f8",
168 | "sha256:998416ba6962ae7fbd6596850b80e17859a5753ba17c32284f67bfff33784181"
169 | ],
170 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
171 | "version": "==20.4"
172 | },
173 | "pandas": {
174 | "hashes": [
175 | "sha256:0210f8fe19c2667a3817adb6de2c4fd92b1b78e1975ca60c0efa908e0985cbdb",
176 | "sha256:0227e3a6e3a22c0e283a5041f1e3064d78fbde811217668bb966ed05386d8a7e",
177 | "sha256:0bc440493cf9dc5b36d5d46bbd5508f6547ba68b02a28234cd8e81fdce42744d",
178 | "sha256:16504f915f1ae424052f1e9b7cd2d01786f098fbb00fa4e0f69d42b22952d798",
179 | "sha256:182a5aeae319df391c3df4740bb17d5300dcd78034b17732c12e62e6dd79e4a4",
180 | "sha256:35db623487f00d9392d8af44a24516d6cb9f274afaf73cfcfe180b9c54e007d2",
181 | "sha256:40ec0a7f611a3d00d3c666c4cceb9aa3f5bf9fbd81392948a93663064f527203",
182 | "sha256:47a03bfef80d6812c91ed6fae43f04f2fa80a4e1b82b35aa4d9002e39529e0b8",
183 | "sha256:4b21d46728f8a6be537716035b445e7ef3a75dbd30bd31aa1b251323219d853e",
184 | "sha256:4d1a806252001c5db7caecbe1a26e49a6c23421d85a700960f6ba093112f54a1",
185 | "sha256:60e20a4ab4d4fec253557d0fc9a4e4095c37b664f78c72af24860c8adcd07088",
186 | "sha256:9f61cca5262840ff46ef857d4f5f65679b82188709d0e5e086a9123791f721c8",
187 | "sha256:a15835c8409d5edc50b4af93be3377b5dd3eb53517e7f785060df1f06f6da0e2",
188 | "sha256:b39508562ad0bb3f384b0db24da7d68a2608b9ddc85b1d931ccaaa92d5e45273",
189 | "sha256:ed60848caadeacecefd0b1de81b91beff23960032cded0ac1449242b506a3b3f",
190 | "sha256:fc714895b6de6803ac9f661abb316853d0cd657f5d23985222255ad76ccedc25"
191 | ],
192 | "index": "pypi",
193 | "version": "==1.1.0"
194 | },
195 | "pillow": {
196 | "hashes": [
197 | "sha256:0295442429645fa16d05bd567ef5cff178482439c9aad0411d3f0ce9b88b3a6f",
198 | "sha256:06aba4169e78c439d528fdeb34762c3b61a70813527a2c57f0540541e9f433a8",
199 | "sha256:09d7f9e64289cb40c2c8d7ad674b2ed6105f55dc3b09aa8e4918e20a0311e7ad",
200 | "sha256:0a80dd307a5d8440b0a08bd7b81617e04d870e40a3e46a32d9c246e54705e86f",
201 | "sha256:1ca594126d3c4def54babee699c055a913efb01e106c309fa6b04405d474d5ae",
202 | "sha256:25930fadde8019f374400f7986e8404c8b781ce519da27792cbe46eabec00c4d",
203 | "sha256:431b15cffbf949e89df2f7b48528be18b78bfa5177cb3036284a5508159492b5",
204 | "sha256:52125833b070791fcb5710fabc640fc1df07d087fc0c0f02d3661f76c23c5b8b",
205 | "sha256:5e51ee2b8114def244384eda1c82b10e307ad9778dac5c83fb0943775a653cd8",
206 | "sha256:612cfda94e9c8346f239bf1a4b082fdd5c8143cf82d685ba2dba76e7adeeb233",
207 | "sha256:6d7741e65835716ceea0fd13a7d0192961212fd59e741a46bbed7a473c634ed6",
208 | "sha256:6edb5446f44d901e8683ffb25ebdfc26988ee813da3bf91e12252b57ac163727",
209 | "sha256:725aa6cfc66ce2857d585f06e9519a1cc0ef6d13f186ff3447ab6dff0a09bc7f",
210 | "sha256:8dad18b69f710bf3a001d2bf3afab7c432785d94fcf819c16b5207b1cfd17d38",
211 | "sha256:94cf49723928eb6070a892cb39d6c156f7b5a2db4e8971cb958f7b6b104fb4c4",
212 | "sha256:97f9e7953a77d5a70f49b9a48da7776dc51e9b738151b22dacf101641594a626",
213 | "sha256:9ad7f865eebde135d526bb3163d0b23ffff365cf87e767c649550964ad72785d",
214 | "sha256:9c87ef410a58dd54b92424ffd7e28fd2ec65d2f7fc02b76f5e9b2067e355ebf6",
215 | "sha256:a060cf8aa332052df2158e5a119303965be92c3da6f2d93b6878f0ebca80b2f6",
216 | "sha256:c79f9c5fb846285f943aafeafda3358992d64f0ef58566e23484132ecd8d7d63",
217 | "sha256:c92302a33138409e8f1ad16731568c55c9053eee71bb05b6b744067e1b62380f",
218 | "sha256:d08b23fdb388c0715990cbc06866db554e1822c4bdcf6d4166cf30ac82df8c41",
219 | "sha256:d350f0f2c2421e65fbc62690f26b59b0bcda1b614beb318c81e38647e0f673a1",
220 | "sha256:e901964262a56d9ea3c2693df68bc9860b8bdda2b04768821e4c44ae797de117",
221 | "sha256:ec29604081f10f16a7aea809ad42e27764188fc258b02259a03a8ff7ded3808d",
222 | "sha256:edf31f1150778abd4322444c393ab9c7bd2af271dd4dafb4208fb613b1f3cdc9",
223 | "sha256:f7e30c27477dffc3e85c2463b3e649f751789e0f6c8456099eea7ddd53be4a8a",
224 | "sha256:ffe538682dc19cc542ae7c3e504fdf54ca7f86fb8a135e59dd6bc8627eae6cce"
225 | ],
226 | "markers": "python_version >= '3.5'",
227 | "version": "==7.2.0"
228 | },
229 | "pluggy": {
230 | "hashes": [
231 | "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0",
232 | "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"
233 | ],
234 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
235 | "version": "==0.13.1"
236 | },
237 | "py": {
238 | "hashes": [
239 | "sha256:366389d1db726cd2fcfc79732e75410e5fe4d31db13692115529d34069a043c2",
240 | "sha256:9ca6883ce56b4e8da7e79ac18787889fa5206c79dcc67fb065376cd2fe03f342"
241 | ],
242 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
243 | "version": "==1.9.0"
244 | },
245 | "pyparsing": {
246 | "hashes": [
247 | "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1",
248 | "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"
249 | ],
250 | "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'",
251 | "version": "==2.4.7"
252 | },
253 | "pytest": {
254 | "hashes": [
255 | "sha256:85228d75db9f45e06e57ef9bf4429267f81ac7c0d742cc9ed63d09886a9fe6f4",
256 | "sha256:8b6007800c53fdacd5a5c192203f4e531eb2a1540ad9c752e052ec0f7143dbad"
257 | ],
258 | "index": "pypi",
259 | "version": "==6.0.1"
260 | },
261 | "python-dateutil": {
262 | "hashes": [
263 | "sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c",
264 | "sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a"
265 | ],
266 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
267 | "version": "==2.8.1"
268 | },
269 | "pytz": {
270 | "hashes": [
271 | "sha256:a494d53b6d39c3c6e44c3bec237336e14305e4f29bbf800b599253057fbb79ed",
272 | "sha256:c35965d010ce31b23eeb663ed3cc8c906275d6be1a34393a1d73a41febf4a048"
273 | ],
274 | "version": "==2020.1"
275 | },
276 | "scipy": {
277 | "hashes": [
278 | "sha256:066c513d90eb3fd7567a9e150828d39111ebd88d3e924cdfc9f8ce19ab6f90c9",
279 | "sha256:07e52b316b40a4f001667d1ad4eb5f2318738de34597bd91537851365b6c61f1",
280 | "sha256:0a0e9a4e58a4734c2eba917f834b25b7e3b6dc333901ce7784fd31aefbd37b2f",
281 | "sha256:1c7564a4810c1cd77fcdee7fa726d7d39d4e2695ad252d7c86c3ea9d85b7fb8f",
282 | "sha256:315aa2165aca31375f4e26c230188db192ed901761390be908c9b21d8b07df62",
283 | "sha256:6e86c873fe1335d88b7a4bfa09d021f27a9e753758fd75f3f92d714aa4093768",
284 | "sha256:8e28e74b97fc8d6aa0454989db3b5d36fc27e69cef39a7ee5eaf8174ca1123cb",
285 | "sha256:92eb04041d371fea828858e4fff182453c25ae3eaa8782d9b6c32b25857d23bc",
286 | "sha256:a0afbb967fd2c98efad5f4c24439a640d39463282040a88e8e928db647d8ac3d",
287 | "sha256:a785409c0fa51764766840185a34f96a0a93527a0ff0230484d33a8ed085c8f8",
288 | "sha256:cca9fce15109a36a0a9f9cfc64f870f1c140cb235ddf27fe0328e6afb44dfed0",
289 | "sha256:d56b10d8ed72ec1be76bf10508446df60954f08a41c2d40778bc29a3a9ad9bce",
290 | "sha256:dac09281a0eacd59974e24525a3bc90fa39b4e95177e638a31b14db60d3fa806",
291 | "sha256:ec5fe57e46828d034775b00cd625c4a7b5c7d2e354c3b258d820c6c72212a6ec",
292 | "sha256:eecf40fa87eeda53e8e11d265ff2254729d04000cd40bae648e76ff268885d66",
293 | "sha256:fc98f3eac993b9bfdd392e675dfe19850cc8c7246a8fd2b42443e506344be7d9"
294 | ],
295 | "index": "pypi",
296 | "version": "==1.5.2"
297 | },
298 | "six": {
299 | "hashes": [
300 | "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259",
301 | "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"
302 | ],
303 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
304 | "version": "==1.15.0"
305 | },
306 | "toml": {
307 | "hashes": [
308 | "sha256:926b612be1e5ce0634a2ca03470f95169cf16f939018233a670519cb4ac58b0f",
309 | "sha256:bda89d5935c2eac546d648028b9901107a595863cb36bae0c73ac804a9b4ce88"
310 | ],
311 | "version": "==0.10.1"
312 | },
313 | "zipp": {
314 | "hashes": [
315 | "sha256:aa36550ff0c0b7ef7fa639055d797116ee891440eac1a56f378e2d3179e0320b",
316 | "sha256:c599e4d75c98f6798c509911d08a22e6c021d074469042177c8c86fb92eefd96"
317 | ],
318 | "markers": "python_version >= '3.6'",
319 | "version": "==3.1.0"
320 | }
321 | },
322 | "develop": {
323 | "gpmap": {
324 | "editable": true,
325 | "path": "."
326 | },
327 | "numpy": {
328 | "hashes": [
329 | "sha256:082f8d4dd69b6b688f64f509b91d482362124986d98dc7dc5f5e9f9b9c3bb983",
330 | "sha256:1bc0145999e8cb8aed9d4e65dd8b139adf1919e521177f198529687dbf613065",
331 | "sha256:309cbcfaa103fc9a33ec16d2d62569d541b79f828c382556ff072442226d1968",
332 | "sha256:3673c8b2b29077f1b7b3a848794f8e11f401ba0b71c49fbd26fb40b71788b132",
333 | "sha256:480fdd4dbda4dd6b638d3863da3be82873bba6d32d1fc12ea1b8486ac7b8d129",
334 | "sha256:56ef7f56470c24bb67fb43dae442e946a6ce172f97c69f8d067ff8550cf782ff",
335 | "sha256:5a936fd51049541d86ccdeef2833cc89a18e4d3808fe58a8abeb802665c5af93",
336 | "sha256:5b6885c12784a27e957294b60f97e8b5b4174c7504665333c5e94fbf41ae5d6a",
337 | "sha256:667c07063940e934287993366ad5f56766bc009017b4a0fe91dbd07960d0aba7",
338 | "sha256:7ed448ff4eaffeb01094959b19cbaf998ecdee9ef9932381420d514e446601cd",
339 | "sha256:8343bf67c72e09cfabfab55ad4a43ce3f6bf6e6ced7acf70f45ded9ebb425055",
340 | "sha256:92feb989b47f83ebef246adabc7ff3b9a59ac30601c3f6819f8913458610bdcc",
341 | "sha256:935c27ae2760c21cd7354402546f6be21d3d0c806fffe967f745d5f2de5005a7",
342 | "sha256:aaf42a04b472d12515debc621c31cf16c215e332242e7a9f56403d814c744624",
343 | "sha256:b12e639378c741add21fbffd16ba5ad25c0a1a17cf2b6fe4288feeb65144f35b",
344 | "sha256:b1cca51512299841bf69add3b75361779962f9cee7d9ee3bb446d5982e925b69",
345 | "sha256:b8456987b637232602ceb4d663cb34106f7eb780e247d51a260b84760fd8f491",
346 | "sha256:b9792b0ac0130b277536ab8944e7b754c69560dac0415dd4b2dbd16b902c8954",
347 | "sha256:c9591886fc9cbe5532d5df85cb8e0cc3b44ba8ce4367bd4cf1b93dc19713da72",
348 | "sha256:cf1347450c0b7644ea142712619533553f02ef23f92f781312f6a3553d031fc7",
349 | "sha256:de8b4a9b56255797cbddb93281ed92acbc510fb7b15df3f01bd28f46ebc4edae",
350 | "sha256:e1b1dc0372f530f26a03578ac75d5e51b3868b9b76cd2facba4c9ee0eb252ab1",
351 | "sha256:e45f8e981a0ab47103181773cc0a54e650b2aef8c7b6cd07405d0fa8d869444a",
352 | "sha256:e4f6d3c53911a9d103d8ec9518190e52a8b945bab021745af4939cfc7c0d4a9e",
353 | "sha256:ed8a311493cf5480a2ebc597d1e177231984c818a86875126cfd004241a73c3e",
354 | "sha256:ef71a1d4fd4858596ae80ad1ec76404ad29701f8ca7cdcebc50300178db14dfc"
355 | ],
356 | "index": "pypi",
357 | "version": "==1.19.1"
358 | },
359 | "pandas": {
360 | "hashes": [
361 | "sha256:0210f8fe19c2667a3817adb6de2c4fd92b1b78e1975ca60c0efa908e0985cbdb",
362 | "sha256:0227e3a6e3a22c0e283a5041f1e3064d78fbde811217668bb966ed05386d8a7e",
363 | "sha256:0bc440493cf9dc5b36d5d46bbd5508f6547ba68b02a28234cd8e81fdce42744d",
364 | "sha256:16504f915f1ae424052f1e9b7cd2d01786f098fbb00fa4e0f69d42b22952d798",
365 | "sha256:182a5aeae319df391c3df4740bb17d5300dcd78034b17732c12e62e6dd79e4a4",
366 | "sha256:35db623487f00d9392d8af44a24516d6cb9f274afaf73cfcfe180b9c54e007d2",
367 | "sha256:40ec0a7f611a3d00d3c666c4cceb9aa3f5bf9fbd81392948a93663064f527203",
368 | "sha256:47a03bfef80d6812c91ed6fae43f04f2fa80a4e1b82b35aa4d9002e39529e0b8",
369 | "sha256:4b21d46728f8a6be537716035b445e7ef3a75dbd30bd31aa1b251323219d853e",
370 | "sha256:4d1a806252001c5db7caecbe1a26e49a6c23421d85a700960f6ba093112f54a1",
371 | "sha256:60e20a4ab4d4fec253557d0fc9a4e4095c37b664f78c72af24860c8adcd07088",
372 | "sha256:9f61cca5262840ff46ef857d4f5f65679b82188709d0e5e086a9123791f721c8",
373 | "sha256:a15835c8409d5edc50b4af93be3377b5dd3eb53517e7f785060df1f06f6da0e2",
374 | "sha256:b39508562ad0bb3f384b0db24da7d68a2608b9ddc85b1d931ccaaa92d5e45273",
375 | "sha256:ed60848caadeacecefd0b1de81b91beff23960032cded0ac1449242b506a3b3f",
376 | "sha256:fc714895b6de6803ac9f661abb316853d0cd657f5d23985222255ad76ccedc25"
377 | ],
378 | "index": "pypi",
379 | "version": "==1.1.0"
380 | },
381 | "python-dateutil": {
382 | "hashes": [
383 | "sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c",
384 | "sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a"
385 | ],
386 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
387 | "version": "==2.8.1"
388 | },
389 | "pytz": {
390 | "hashes": [
391 | "sha256:a494d53b6d39c3c6e44c3bec237336e14305e4f29bbf800b599253057fbb79ed",
392 | "sha256:c35965d010ce31b23eeb663ed3cc8c906275d6be1a34393a1d73a41febf4a048"
393 | ],
394 | "version": "==2020.1"
395 | },
396 | "scipy": {
397 | "hashes": [
398 | "sha256:066c513d90eb3fd7567a9e150828d39111ebd88d3e924cdfc9f8ce19ab6f90c9",
399 | "sha256:07e52b316b40a4f001667d1ad4eb5f2318738de34597bd91537851365b6c61f1",
400 | "sha256:0a0e9a4e58a4734c2eba917f834b25b7e3b6dc333901ce7784fd31aefbd37b2f",
401 | "sha256:1c7564a4810c1cd77fcdee7fa726d7d39d4e2695ad252d7c86c3ea9d85b7fb8f",
402 | "sha256:315aa2165aca31375f4e26c230188db192ed901761390be908c9b21d8b07df62",
403 | "sha256:6e86c873fe1335d88b7a4bfa09d021f27a9e753758fd75f3f92d714aa4093768",
404 | "sha256:8e28e74b97fc8d6aa0454989db3b5d36fc27e69cef39a7ee5eaf8174ca1123cb",
405 | "sha256:92eb04041d371fea828858e4fff182453c25ae3eaa8782d9b6c32b25857d23bc",
406 | "sha256:a0afbb967fd2c98efad5f4c24439a640d39463282040a88e8e928db647d8ac3d",
407 | "sha256:a785409c0fa51764766840185a34f96a0a93527a0ff0230484d33a8ed085c8f8",
408 | "sha256:cca9fce15109a36a0a9f9cfc64f870f1c140cb235ddf27fe0328e6afb44dfed0",
409 | "sha256:d56b10d8ed72ec1be76bf10508446df60954f08a41c2d40778bc29a3a9ad9bce",
410 | "sha256:dac09281a0eacd59974e24525a3bc90fa39b4e95177e638a31b14db60d3fa806",
411 | "sha256:ec5fe57e46828d034775b00cd625c4a7b5c7d2e354c3b258d820c6c72212a6ec",
412 | "sha256:eecf40fa87eeda53e8e11d265ff2254729d04000cd40bae648e76ff268885d66",
413 | "sha256:fc98f3eac993b9bfdd392e675dfe19850cc8c7246a8fd2b42443e506344be7d9"
414 | ],
415 | "index": "pypi",
416 | "version": "==1.5.2"
417 | },
418 | "six": {
419 | "hashes": [
420 | "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259",
421 | "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"
422 | ],
423 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
424 | "version": "==1.15.0"
425 | }
426 | }
427 | }
428 |
--------------------------------------------------------------------------------