├── utils ├── __init__.py ├── libpgmexceptions.py └── bntextutils.py ├── .gitignore ├── docs ├── _build │ ├── html │ │ ├── _sources │ │ │ ├── test.txt │ │ │ ├── nodedata.txt │ │ │ ├── dictionary.txt │ │ │ ├── pgmlearner.txt │ │ │ ├── graphskeleton.txt │ │ │ ├── tablecpdfactor.txt │ │ │ ├── orderedskeleton.txt │ │ │ ├── hybayesiannetwork.txt │ │ │ ├── lgbayesiannetwork.txt │ │ │ ├── sampleaggregator.txt │ │ │ ├── tablecpdfactorization.txt │ │ │ ├── discretebayesiannetwork.txt │ │ │ ├── dyndiscbayesiannetwork.txt │ │ │ ├── CPDtypes.txt │ │ │ ├── unittestlgdict.txt │ │ │ ├── unittestdict.txt │ │ │ ├── unittesthdict.txt │ │ │ ├── index.txt │ │ │ └── unittestdyndict.txt │ │ ├── objects.inv │ │ ├── _static │ │ │ ├── up.png │ │ │ ├── down.png │ │ │ ├── file.png │ │ │ ├── minus.png │ │ │ ├── plus.png │ │ │ ├── comment.png │ │ │ ├── ajax-loader.gif │ │ │ ├── up-pressed.png │ │ │ ├── comment-close.png │ │ │ ├── down-pressed.png │ │ │ ├── comment-bright.png │ │ │ ├── pygments.css │ │ │ ├── default.css │ │ │ ├── sidebar.js │ │ │ ├── doctools.js │ │ │ └── underscore.js │ │ ├── .buildinfo │ │ ├── search.html │ │ ├── _modules │ │ │ └── index.html │ │ ├── pgmlibrary.html │ │ ├── orderedskeleton.html │ │ ├── dictionary.html │ │ └── py-modindex.html │ └── doctrees │ │ ├── index.doctree │ │ ├── test.doctree │ │ ├── CPDtypes.doctree │ │ ├── nodedata.doctree │ │ ├── dictionary.doctree │ │ ├── environment.pickle │ │ ├── pgmlearner.doctree │ │ ├── pgmlibrary.doctree │ │ ├── graphskeleton.doctree │ │ ├── unittestdict.doctree │ │ ├── unittesthdict.doctree │ │ ├── orderedskeleton.doctree │ │ ├── sampleaggregator.doctree │ │ ├── tablecpdfactor.doctree │ │ ├── unittestdyndict.doctree │ │ ├── unittestlgdict.doctree │ │ ├── hybayesiannetwork.doctree │ │ ├── lgbayesiannetwork.doctree │ │ ├── tablecpdfactorization.doctree │ │ ├── discretebayesiannetwork.doctree │ │ └── dyndiscbayesiannetwork.doctree ├── nodedata.rst ├── dictionary.rst ├── pgmlearner.rst ├── graphskeleton.rst ├── tablecpdfactor.rst ├── orderedskeleton.rst ├── sampleaggregator.rst ├── hybayesiannetwork.rst ├── lgbayesiannetwork.rst ├── tablecpdfactorization.rst ├── dyndiscbayesiannetwork.rst ├── discretebayesiannetwork.rst ├── CPDtypes.rst ├── unittestlgdict.rst ├── unittestdict.rst ├── unittesthdict.rst ├── index.rst ├── Makefile └── unittestdyndict.rst ├── libpgm ├── CPDtypes │ ├── __init__.py │ ├── crazy.py │ ├── lg.py │ ├── discrete.py │ └── lgandd.py ├── __init__.py ├── orderedskeleton.py ├── dictionary.py ├── sampleaggregator.py ├── graphskeleton.py ├── nodedata.py └── hybayesiannetwork.py ├── MANIFEST.in ├── examples ├── exampleevidence.txt ├── examplequery.txt └── exampledata.txt ├── runtime-tests ├── results │ ├── PGMlibrary2.0_Runtime_Report.pdf │ ├── dbn_learningparams_rtFnumoutcomes.csv │ ├── dbn_querying_rtFindegree.csv │ ├── dbn_sampling_memFindegree.csv │ ├── dbn_paramlearning_rtFnumvertices.csv │ ├── dbn_querying_rtFnumoutcomes.csv │ ├── dbn_learning_rtFnumoutcomes.csv │ ├── dbn_paramlearning_rtFnumdatapoints.csv │ ├── dbn_learningparams_rtFindegree.csv │ ├── dbn_learning_rtFindegree.csv │ ├── dbn_learning_rtFnumvertices.csv │ ├── dbn_learning_rtFnumdatapoints.csv │ ├── dbn_sampling_memFnumoutcomes.csv │ ├── dbn_sampling_rtFnumvertices.csv │ ├── dbn_querying_rtFnumvertices.csv │ └── dbn_querying_rtFnumvertices2.csv ├── output.csv ├── processor.py ├── archives │ ├── rtFnumvertices_processor.py │ └── rtFnumvertices_timer.py ├── timer.py └── bn_generator.py ├── libpgm_examples ├── discrete_bayesian_network.py └── discrete_bayesian_network.py~ ├── setup.py ├── tests ├── unittestdict.txt ├── unittestlgdict.txt ├── unittesthdict.txt ├── unittesthdict_lgandd_no_lg_parents.txt └── unittestdyndict.txt ├── LICENSE.txt └── README.md /utils/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | dist/ 3 | MANIFEST 4 | -------------------------------------------------------------------------------- /docs/_build/html/_sources/test.txt: -------------------------------------------------------------------------------- 1 | test!! 2 | -------------------------------------------------------------------------------- /libpgm/CPDtypes/__init__.py: -------------------------------------------------------------------------------- 1 | __all__ = ['discrete', 'lg', 'lgandd', 'crazy'] 2 | -------------------------------------------------------------------------------- /docs/nodedata.rst: -------------------------------------------------------------------------------- 1 | nodedata 2 | ******** 3 | 4 | .. automodule:: libpgm.nodedata 5 | :members: 6 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include *.txt 2 | recursive-include examples *.txt *.py 3 | recursive-include tests *.txt *.py 4 | -------------------------------------------------------------------------------- /docs/_build/html/objects.inv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CyberPoint/libpgm/HEAD/docs/_build/html/objects.inv -------------------------------------------------------------------------------- /docs/dictionary.rst: -------------------------------------------------------------------------------- 1 | dictionary 2 | ********** 3 | 4 | .. automodule:: libpgm.dictionary 5 | :members: 6 | -------------------------------------------------------------------------------- /docs/pgmlearner.rst: -------------------------------------------------------------------------------- 1 | pgmlearner 2 | ********** 3 | 4 | .. automodule:: libpgm.pgmlearner 5 | :members: 6 | -------------------------------------------------------------------------------- /docs/_build/html/_static/up.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CyberPoint/libpgm/HEAD/docs/_build/html/_static/up.png -------------------------------------------------------------------------------- /docs/_build/doctrees/index.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CyberPoint/libpgm/HEAD/docs/_build/doctrees/index.doctree -------------------------------------------------------------------------------- /docs/_build/doctrees/test.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CyberPoint/libpgm/HEAD/docs/_build/doctrees/test.doctree -------------------------------------------------------------------------------- /docs/_build/html/_sources/nodedata.txt: -------------------------------------------------------------------------------- 1 | nodedata 2 | ******** 3 | 4 | .. automodule:: libpgm.nodedata 5 | :members: 6 | -------------------------------------------------------------------------------- /docs/_build/html/_static/down.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CyberPoint/libpgm/HEAD/docs/_build/html/_static/down.png -------------------------------------------------------------------------------- /docs/_build/html/_static/file.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CyberPoint/libpgm/HEAD/docs/_build/html/_static/file.png -------------------------------------------------------------------------------- /docs/_build/html/_static/minus.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CyberPoint/libpgm/HEAD/docs/_build/html/_static/minus.png -------------------------------------------------------------------------------- /docs/_build/html/_static/plus.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CyberPoint/libpgm/HEAD/docs/_build/html/_static/plus.png -------------------------------------------------------------------------------- /docs/graphskeleton.rst: -------------------------------------------------------------------------------- 1 | graphskeleton 2 | ************* 3 | 4 | .. automodule:: libpgm.graphskeleton 5 | :members: 6 | -------------------------------------------------------------------------------- /docs/_build/doctrees/CPDtypes.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CyberPoint/libpgm/HEAD/docs/_build/doctrees/CPDtypes.doctree -------------------------------------------------------------------------------- /docs/_build/doctrees/nodedata.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CyberPoint/libpgm/HEAD/docs/_build/doctrees/nodedata.doctree -------------------------------------------------------------------------------- /docs/_build/html/_static/comment.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CyberPoint/libpgm/HEAD/docs/_build/html/_static/comment.png -------------------------------------------------------------------------------- /docs/tablecpdfactor.rst: -------------------------------------------------------------------------------- 1 | tablecpdfactor 2 | ************** 3 | 4 | .. automodule:: libpgm.tablecpdfactor 5 | :members: 6 | -------------------------------------------------------------------------------- /docs/_build/doctrees/dictionary.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CyberPoint/libpgm/HEAD/docs/_build/doctrees/dictionary.doctree -------------------------------------------------------------------------------- /docs/_build/doctrees/environment.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CyberPoint/libpgm/HEAD/docs/_build/doctrees/environment.pickle -------------------------------------------------------------------------------- /docs/_build/doctrees/pgmlearner.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CyberPoint/libpgm/HEAD/docs/_build/doctrees/pgmlearner.doctree -------------------------------------------------------------------------------- /docs/_build/doctrees/pgmlibrary.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CyberPoint/libpgm/HEAD/docs/_build/doctrees/pgmlibrary.doctree -------------------------------------------------------------------------------- /docs/_build/html/_sources/dictionary.txt: -------------------------------------------------------------------------------- 1 | dictionary 2 | ********** 3 | 4 | .. automodule:: libpgm.dictionary 5 | :members: 6 | -------------------------------------------------------------------------------- /docs/_build/html/_sources/pgmlearner.txt: -------------------------------------------------------------------------------- 1 | pgmlearner 2 | ********** 3 | 4 | .. automodule:: libpgm.pgmlearner 5 | :members: 6 | -------------------------------------------------------------------------------- /docs/_build/html/_static/ajax-loader.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CyberPoint/libpgm/HEAD/docs/_build/html/_static/ajax-loader.gif -------------------------------------------------------------------------------- /docs/_build/html/_static/up-pressed.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CyberPoint/libpgm/HEAD/docs/_build/html/_static/up-pressed.png -------------------------------------------------------------------------------- /docs/orderedskeleton.rst: -------------------------------------------------------------------------------- 1 | orderedskeleton 2 | *************** 3 | 4 | .. automodule:: libpgm.orderedskeleton 5 | :members: 6 | -------------------------------------------------------------------------------- /docs/sampleaggregator.rst: -------------------------------------------------------------------------------- 1 | sampleaggregator 2 | **************** 3 | 4 | .. automodule:: libpgm.sampleaggregator 5 | :members: 6 | -------------------------------------------------------------------------------- /docs/_build/doctrees/graphskeleton.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CyberPoint/libpgm/HEAD/docs/_build/doctrees/graphskeleton.doctree -------------------------------------------------------------------------------- /docs/_build/doctrees/unittestdict.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CyberPoint/libpgm/HEAD/docs/_build/doctrees/unittestdict.doctree -------------------------------------------------------------------------------- /docs/_build/doctrees/unittesthdict.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CyberPoint/libpgm/HEAD/docs/_build/doctrees/unittesthdict.doctree -------------------------------------------------------------------------------- /docs/_build/html/_static/comment-close.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CyberPoint/libpgm/HEAD/docs/_build/html/_static/comment-close.png -------------------------------------------------------------------------------- /docs/_build/html/_static/down-pressed.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CyberPoint/libpgm/HEAD/docs/_build/html/_static/down-pressed.png -------------------------------------------------------------------------------- /docs/hybayesiannetwork.rst: -------------------------------------------------------------------------------- 1 | hybayesiannetwork 2 | ***************** 3 | 4 | .. automodule:: libpgm.hybayesiannetwork 5 | :members: 6 | -------------------------------------------------------------------------------- /docs/lgbayesiannetwork.rst: -------------------------------------------------------------------------------- 1 | lgbayesiannetwork 2 | ***************** 3 | 4 | .. automodule:: libpgm.lgbayesiannetwork 5 | :members: 6 | -------------------------------------------------------------------------------- /docs/_build/doctrees/orderedskeleton.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CyberPoint/libpgm/HEAD/docs/_build/doctrees/orderedskeleton.doctree -------------------------------------------------------------------------------- /docs/_build/doctrees/sampleaggregator.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CyberPoint/libpgm/HEAD/docs/_build/doctrees/sampleaggregator.doctree -------------------------------------------------------------------------------- /docs/_build/doctrees/tablecpdfactor.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CyberPoint/libpgm/HEAD/docs/_build/doctrees/tablecpdfactor.doctree -------------------------------------------------------------------------------- /docs/_build/doctrees/unittestdyndict.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CyberPoint/libpgm/HEAD/docs/_build/doctrees/unittestdyndict.doctree -------------------------------------------------------------------------------- /docs/_build/doctrees/unittestlgdict.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CyberPoint/libpgm/HEAD/docs/_build/doctrees/unittestlgdict.doctree -------------------------------------------------------------------------------- /docs/_build/html/_sources/graphskeleton.txt: -------------------------------------------------------------------------------- 1 | graphskeleton 2 | ************* 3 | 4 | .. automodule:: libpgm.graphskeleton 5 | :members: 6 | -------------------------------------------------------------------------------- /docs/_build/html/_static/comment-bright.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CyberPoint/libpgm/HEAD/docs/_build/html/_static/comment-bright.png -------------------------------------------------------------------------------- /docs/_build/doctrees/hybayesiannetwork.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CyberPoint/libpgm/HEAD/docs/_build/doctrees/hybayesiannetwork.doctree -------------------------------------------------------------------------------- /docs/_build/doctrees/lgbayesiannetwork.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CyberPoint/libpgm/HEAD/docs/_build/doctrees/lgbayesiannetwork.doctree -------------------------------------------------------------------------------- /docs/_build/html/_sources/tablecpdfactor.txt: -------------------------------------------------------------------------------- 1 | tablecpdfactor 2 | ************** 3 | 4 | .. automodule:: libpgm.tablecpdfactor 5 | :members: 6 | -------------------------------------------------------------------------------- /docs/_build/doctrees/tablecpdfactorization.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CyberPoint/libpgm/HEAD/docs/_build/doctrees/tablecpdfactorization.doctree -------------------------------------------------------------------------------- /docs/_build/html/_sources/orderedskeleton.txt: -------------------------------------------------------------------------------- 1 | orderedskeleton 2 | *************** 3 | 4 | .. automodule:: libpgm.orderedskeleton 5 | :members: 6 | -------------------------------------------------------------------------------- /docs/tablecpdfactorization.rst: -------------------------------------------------------------------------------- 1 | tablecpdfactorization 2 | ********************* 3 | 4 | .. automodule:: libpgm.tablecpdfactorization 5 | :members: 6 | -------------------------------------------------------------------------------- /docs/_build/doctrees/discretebayesiannetwork.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CyberPoint/libpgm/HEAD/docs/_build/doctrees/discretebayesiannetwork.doctree -------------------------------------------------------------------------------- /docs/_build/doctrees/dyndiscbayesiannetwork.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CyberPoint/libpgm/HEAD/docs/_build/doctrees/dyndiscbayesiannetwork.doctree -------------------------------------------------------------------------------- /docs/_build/html/_sources/hybayesiannetwork.txt: -------------------------------------------------------------------------------- 1 | hybayesiannetwork 2 | ***************** 3 | 4 | .. automodule:: libpgm.hybayesiannetwork 5 | :members: 6 | -------------------------------------------------------------------------------- /docs/_build/html/_sources/lgbayesiannetwork.txt: -------------------------------------------------------------------------------- 1 | lgbayesiannetwork 2 | ***************** 3 | 4 | .. automodule:: libpgm.lgbayesiannetwork 5 | :members: 6 | -------------------------------------------------------------------------------- /docs/_build/html/_sources/sampleaggregator.txt: -------------------------------------------------------------------------------- 1 | sampleaggregator 2 | **************** 3 | 4 | .. automodule:: libpgm.sampleaggregator 5 | :members: 6 | -------------------------------------------------------------------------------- /docs/dyndiscbayesiannetwork.rst: -------------------------------------------------------------------------------- 1 | dyndiscbayesiannetwork 2 | *********************** 3 | 4 | .. automodule:: libpgm.dyndiscbayesiannetwork 5 | :members: 6 | -------------------------------------------------------------------------------- /docs/discretebayesiannetwork.rst: -------------------------------------------------------------------------------- 1 | discretebayesiannetwork 2 | *********************** 3 | 4 | .. automodule:: libpgm.discretebayesiannetwork 5 | :members: 6 | -------------------------------------------------------------------------------- /examples/exampleevidence.txt: -------------------------------------------------------------------------------- 1 | # a sample dict holding evidence. this dict says that we know the Difficulty 2 | # is easy 3 | { 4 | 'Difficulty': 'easy' 5 | } 6 | -------------------------------------------------------------------------------- /runtime-tests/results/PGMlibrary2.0_Runtime_Report.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CyberPoint/libpgm/HEAD/runtime-tests/results/PGMlibrary2.0_Runtime_Report.pdf -------------------------------------------------------------------------------- /docs/_build/html/_sources/tablecpdfactorization.txt: -------------------------------------------------------------------------------- 1 | tablecpdfactorization 2 | ********************* 3 | 4 | .. automodule:: libpgm.tablecpdfactorization 5 | :members: 6 | -------------------------------------------------------------------------------- /docs/_build/html/_sources/discretebayesiannetwork.txt: -------------------------------------------------------------------------------- 1 | discretebayesiannetwork 2 | *********************** 3 | 4 | .. automodule:: libpgm.discretebayesiannetwork 5 | :members: 6 | -------------------------------------------------------------------------------- /docs/_build/html/_sources/dyndiscbayesiannetwork.txt: -------------------------------------------------------------------------------- 1 | dyndiscbayesiannetwork 2 | *********************** 3 | 4 | .. automodule:: libpgm.dyndiscbayesiannetwork 5 | :members: 6 | -------------------------------------------------------------------------------- /docs/_build/html/.buildinfo: -------------------------------------------------------------------------------- 1 | # Sphinx build info version 1 2 | # This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done. 3 | config: 4 | tags: 5 | -------------------------------------------------------------------------------- /libpgm/__init__.py: -------------------------------------------------------------------------------- 1 | __all__ = ['CPDtypes', 'dictionary', 'discretebayesiannetwork', 'graphskeleton', 'hybayesiannetwork', 'lgbayesiannetwork', 'nodedata', 'orderedskeleton', 'pgmlearner', 'sampleaggregator', 'tablecpdfactor', 'tablecpdfactorization'] 2 | -------------------------------------------------------------------------------- /runtime-tests/results/dbn_learningparams_rtFnumoutcomes.csv: -------------------------------------------------------------------------------- 1 | Discrete bn parameter learning. Runtime as a function of numoutcomes. numvertices=300. datapoints=1500. indegree=2. max witness size=1. 2 | 1, 1.060000 3 | 2, 1.130000 4 | 3, 1.210000 5 | 4, 1.270000 6 | 5, 1.480000 7 | -------------------------------------------------------------------------------- /examples/examplequery.txt: -------------------------------------------------------------------------------- 1 | # a sample query dictionary 2 | # 3 | # example: 4 | # 5 | # { 6 | # 'Grade': ['A', 'B'] 7 | # } 8 | # 9 | # means that you want to know the probability that Grade has outcome A or B. 10 | # 11 | { 12 | 'Grade': ['A', 'B'] 13 | } 14 | -------------------------------------------------------------------------------- /runtime-tests/results/dbn_querying_rtFindegree.csv: -------------------------------------------------------------------------------- 1 | Probability querying in a discrete BN. Runtime as a function of indegree. Numvertices=300. Numoutcomes=3. Evidence=None. Queried node=root of tree. 2 | 1, 0.060000 3 | 2, 0.080000 4 | 3, 0.150000 5 | 4, 0.340000 6 | 5, 0.920000 7 | 6, 2.770000 8 | 7, 8.010000 9 | -------------------------------------------------------------------------------- /runtime-tests/output.csv: -------------------------------------------------------------------------------- 1 | Discrete bn parameter learning. Runtime as a function of datapoints. numvertices=300. numoutcomes=2. indegree=2. max witness size=1. 2 | 500, 0.400000 3 | 1000, 0.800000 4 | 1500, 1.180000 5 | 2000, 1.580000 6 | 2500, 1.970000 7 | 3000, 2.360000 8 | 3500, 2.770000 9 | 4000, 3.230000 10 | 4500, 3.480000 11 | -------------------------------------------------------------------------------- /runtime-tests/results/dbn_sampling_memFindegree.csv: -------------------------------------------------------------------------------- 1 | Memory usage (nodedata and then skeleton) as a function of indegree. Numoutcomes=2. Numvertices=300.,, 1,621584,93848 2,821696,165152 3,1420312,237056 4,2196888,307816 5,4443488,378944 6,7792624,448712 7,17281032,520576 8,31969704,590600 9,72569936,660704 10,136750336,727904 11,309613872,797880 -------------------------------------------------------------------------------- /runtime-tests/results/dbn_paramlearning_rtFnumvertices.csv: -------------------------------------------------------------------------------- 1 | Discrete bn parameter learning. Runtime as a function of numvertices. numoutcomes=2. datapoints=1500. indegree=2. max witness size=1. 2 | 100, 0.360000 3 | 200, 0.740000 4 | 300, 1.140000 5 | 400, 1.520000 6 | 500, 1.930000 7 | 600, 2.390000 8 | 700, 2.860000 9 | 800, 3.160000 10 | -------------------------------------------------------------------------------- /runtime-tests/results/dbn_querying_rtFnumoutcomes.csv: -------------------------------------------------------------------------------- 1 | Probability querying in a discrete BN. Runtime as a function of numoutcomes. Numvertices=300. Indegree=3. Evidence=None. Queried node=root of tree. 2 | 2, 0.060000 3 | 3, 0.150000 4 | 4, 0.420000 5 | 5, 1.080000 6 | 6, 2.380000 7 | 7, 4.970000 8 | 8, 9.290000 9 | 9, 16.890000 10 | 10, 27.960000 11 | -------------------------------------------------------------------------------- /runtime-tests/results/dbn_learning_rtFnumoutcomes.csv: -------------------------------------------------------------------------------- 1 | Discrete bn learning. Runtime as a function of numoutcomes. Numvertices=30. datapoints=150. indegree=2. max witness size=1. 2 | 1, 0.390000 3 | 2, 1.170000 4 | 3, 1.500000 5 | 4, 2.000000 6 | 5, 2.800000 7 | 6, 4.220000 8 | 7, 6.310000 9 | 8, 9.010000 10 | 9, 12.470000 11 | 10, 16.860000 12 | 11, 22.110000 13 | -------------------------------------------------------------------------------- /runtime-tests/results/dbn_paramlearning_rtFnumdatapoints.csv: -------------------------------------------------------------------------------- 1 | Discrete bn parameter learning. Runtime as a function of datapoints. numvertices=300. numoutcomes=2. indegree=2. max witness size=1. 2 | 500, 0.400000 3 | 1000, 0.800000 4 | 1500, 1.180000 5 | 2000, 1.580000 6 | 2500, 1.970000 7 | 3000, 2.360000 8 | 3500, 2.770000 9 | 4000, 3.230000 10 | 4500, 3.480000 11 | -------------------------------------------------------------------------------- /runtime-tests/results/dbn_learningparams_rtFindegree.csv: -------------------------------------------------------------------------------- 1 | Discrete bn parameter learning. Runtime as a function of indegree. numvertices=300. numoutcomes=2. datapoints=1500. indegree=2. max witness size=1. 2 | 1, 0.980000 3 | 2, 1.170000 4 | 3, 1.350000 5 | 4, 1.490000 6 | 5, 1.810000 7 | 6, 2.060000 8 | 7, 2.410000 9 | 8, 3.050000 10 | 9, 4.060000 11 | 10, 5.950000 12 | -------------------------------------------------------------------------------- /utils/libpgmexceptions.py: -------------------------------------------------------------------------------- 1 | ''' 2 | Copyright 2013 CyberPoint International, LLC. 3 | All rights reserved. Use and disclosure prohibited 4 | except as permitted in writing by CyberPoint. 5 | 6 | libpgm exception handler 7 | 8 | Charlie Cabot 9 | Sept 27 2013 10 | 11 | ''' 12 | class libpgmError(Exception): 13 | pass 14 | 15 | class bntextError(libpgmError): 16 | pass 17 | -------------------------------------------------------------------------------- /runtime-tests/results/dbn_learning_rtFindegree.csv: -------------------------------------------------------------------------------- 1 | Discrete bn learning. Runtime as a function of indegree. Numvertices=30. datapoints=150. numoutcomes=2. max witness size=1. 2 | 1, 1.130000 3 | 2, 1.130000 4 | 3, 1.120000 5 | 4, 1.150000 6 | 5, 1.140000 7 | 6, 1.170000 8 | 7, 1.150000 9 | 8, 1.160000 10 | 9, 1.160000 11 | 10, 1.170000 12 | 11, 1.170000 13 | 12, 1.170000 14 | 13, 1.140000 15 | 14, 1.160000 16 | 15, 1.200000 17 | -------------------------------------------------------------------------------- /runtime-tests/results/dbn_learning_rtFnumvertices.csv: -------------------------------------------------------------------------------- 1 | Discrete BN learning. Runtime as a function of numvertices. Numoutcomes=2 Indegree=2. Data points=150. Max witness size=1. 2 | 5, 0.010000 3 | 10, 0.070000 4 | 15, 0.200000 5 | 20, 0.410000 6 | 25, 0.710000 7 | 30, 1.140000 8 | 35, 1.710000 9 | 40, 2.440000 10 | 45, 3.400000 11 | 50, 4.560000 12 | 55, 5.910000 13 | 60, 7.520000 14 | 65, 9.670000 15 | 70, 11.590000 16 | 75, 14.000000 17 | -------------------------------------------------------------------------------- /libpgm_examples/discrete_bayesian_network.py: -------------------------------------------------------------------------------- 1 | from libpgm.nodedata import NodeData 2 | from libpgm.graphskeleton import GraphSkeleton 3 | from libpgm.discretebayesiannetwork import DiscreteBayesianNetwork 4 | from libpgm.lgbayesiannetwork import LGBayesianNetwork 5 | from libpgm.hybayesiannetwork import HyBayesianNetwork 6 | from libpgm.dyndiscbayesiannetwork import DynDiscBayesianNetwork 7 | from libpgm.tablecpdfactorization import TableCPDFactorization 8 | from libpgm.sampleaggregator import SampleAggregator 9 | from libpgm.pgmlearner import PGMLearner 10 | -------------------------------------------------------------------------------- /runtime-tests/results/dbn_learning_rtFnumdatapoints.csv: -------------------------------------------------------------------------------- 1 | Discrete bn learning. Runtime as a function of data points. Numvertices=30. Indegree=2. numoutcomes=2. max witness size=1. 2 | 50, 0.540000 3 | 100, 0.820000 4 | 150, 1.130000 5 | 200, 1.440000 6 | 250, 1.750000 7 | 300, 2.060000 8 | 350, 2.360000 9 | 400, 2.690000 10 | 450, 3.130000 11 | 500, 3.300000 12 | 550, 3.630000 13 | 600, 4.070000 14 | 650, 4.260000 15 | 700, 4.590000 16 | 750, 4.890000 17 | 800, 5.370000 18 | 850, 5.490000 19 | 900, 5.930000 20 | 950, 6.180000 21 | 1000, 6.480000 22 | 1050, 6.740000 23 | 1100, 7.340000 24 | -------------------------------------------------------------------------------- /runtime-tests/results/dbn_sampling_memFnumoutcomes.csv: -------------------------------------------------------------------------------- 1 | memory space in megabytes used by (nodedata, graphskeleton) as a function of number of outcomes per node. indegree=2. numvertices=300. 2 | 1, 575760, 165152 3 | 2, 821696, 165152 4 | 3, 1501568, 165152 5 | 4, 2200560, 165152 6 | 5, 4125232, 165152 7 | 6, 5554624, 165152 8 | 7, 7379040, 165152 9 | 8, 9642160, 165152 10 | 9, 13951568, 165152 11 | 10, 20329632, 165152 12 | 11, 24571704, 165152 13 | 12, 29467040, 165152 14 | 13, 35058552, 165152 15 | 14, 41389152, 165152 16 | 15, 48501752, 165152 17 | 16, 56439264, 165152 18 | 17, 71468280, 165152 19 | 18, 81935384, 165152 20 | 19, 104384520, 165152 21 | -------------------------------------------------------------------------------- /runtime-tests/results/dbn_sampling_rtFnumvertices.csv: -------------------------------------------------------------------------------- 1 | Forward Sampling in a discrete BN. Runtime as a function of number of vertices. Indegree=3. Numoutcomes=3. Samples=100. 2 | 100, 0.050000 3 | 200, 0.100000 4 | 300, 0.150000 5 | 400, 0.210000 6 | 500, 0.260000 7 | 600, 0.310000 8 | 700, 0.380000 9 | 800, 0.440000 10 | 900, 0.500000 11 | 1000, 0.550000 12 | 1100, 0.620000 13 | 1200, 0.700000 14 | 1300, 0.760000 15 | 1400, 0.860000 16 | 1500, 0.910000 17 | 1600, 0.990000 18 | 1700, 1.060000 19 | 1800, 1.130000 20 | 1900, 1.220000 21 | 2000, 1.290000 22 | 2100, 1.350000 23 | 2200, 1.420000 24 | 2300, 1.490000 25 | 2400, 1.570000 26 | 2500, 1.640000 27 | -------------------------------------------------------------------------------- /runtime-tests/processor.py: -------------------------------------------------------------------------------- 1 | ''' 2 | Processes trials and outputs to a file. 3 | 4 | Charlie Cabot 5 | August 1, 2012 6 | 7 | ''' 8 | 9 | from bn_generator import disc_bn_generator 10 | from timer import timer 11 | 12 | r = range(500, 5000)[::500] 13 | 14 | op = open("output.csv", 'w') 15 | print >>op, "Discrete bn parameter learning. Runtime as a function of datapoints. numvertices=300. numoutcomes=2. indegree=2. max witness size=1." 16 | 17 | for dl in r: 18 | disc_bn_generator(300, 2, 2, "disc_bn_x.txt") 19 | runtime = timer("disc_bn_x.txt", 1, dl) 20 | line = "%d, %f" % (dl, runtime) 21 | print line 22 | print >>op, line 23 | 24 | 25 | 26 | -------------------------------------------------------------------------------- /runtime-tests/results/dbn_querying_rtFnumvertices.csv: -------------------------------------------------------------------------------- 1 | Probability querying in a discrete BN. Runtime as a function of number of vertices. Indegree=3. Numoutcomes=3. Evidence=None. Queried node=root of tree. 2 | 100, 0.050000 3 | 200, 0.120000 4 | 300, 0.190000 5 | 400, 0.280000 6 | 500, 0.380000 7 | 600, 0.480000 8 | 700, 0.600000 9 | 800, 0.720000 10 | 900, 0.850000 11 | 1000, 1.040000 12 | 1100, 1.160000 13 | 1200, 1.330000 14 | 1300, 1.500000 15 | 1400, 1.740000 16 | 1500, 1.970000 17 | 1600, 2.180000 18 | 1700, 2.400000 19 | 1800, 2.740000 20 | 1900, 2.830000 21 | 2000, 3.250000 22 | 2100, 3.320000 23 | 2200, 3.650000 24 | 2300, 3.970000 25 | 2400, 4.290000 26 | 2500, 4.580000 27 | -------------------------------------------------------------------------------- /runtime-tests/results/dbn_querying_rtFnumvertices2.csv: -------------------------------------------------------------------------------- 1 | Probability querying in a discrete BN. Runtime as a function of number of vertices. Indegree=3. Numoutcomes=3. Evidence=None. Queried node=leaf of tree. 2 | 100, 0.020000 3 | 200, 0.060000 4 | 300, 0.110000 5 | 400, 0.150000 6 | 500, 0.220000 7 | 600, 0.300000 8 | 700, 0.380000 9 | 800, 0.480000 10 | 900, 0.590000 11 | 1000, 0.720000 12 | 1100, 0.830000 13 | 1200, 0.970000 14 | 1300, 1.130000 15 | 1400, 1.280000 16 | 1500, 1.460000 17 | 1600, 1.650000 18 | 1700, 1.840000 19 | 1800, 2.060000 20 | 1900, 2.250000 21 | 2000, 2.580000 22 | 2100, 2.730000 23 | 2200, 2.990000 24 | 2300, 3.250000 25 | 2400, 3.560000 26 | 2500, 3.880000 27 | -------------------------------------------------------------------------------- /runtime-tests/archives/rtFnumvertices_processor.py: -------------------------------------------------------------------------------- 1 | ''' 2 | Processes trials and outputs to a file. 3 | 4 | Charlie Cabot 5 | August 1, 2012 6 | 7 | ''' 8 | 9 | from bn_generator import disc_bn_generator 10 | from timer import timer 11 | 12 | graphsizes = range(100, 10000)[::100] 13 | 14 | op = open("output.csv", 'w') 15 | print >>op, "Forward Sampling in a discrete BN. Runtime as a function of number of vertices. Indegree=3. Numoutcomes=3. Samples=100." 16 | 17 | for size in graphsizes: 18 | disc_bn_generator(size, 3, 3, "disc_bn_x.txt") 19 | runtime = timer("disc_bn_x.txt", 1) 20 | line = "%d, %f" % (size, runtime) 21 | print line 22 | print >>op, line 23 | 24 | 25 | 26 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | # run python setup.py install to install 2 | # run python setup.py sdist to generate a tarball 3 | 4 | from distutils.core import setup 5 | 6 | setup( 7 | 8 | name = 'libpgm', 9 | version = '1.3', 10 | author = 'CyberPoint International, LLC', 11 | author_email = 'mraugas@cyberpointllc.com', 12 | url = 'http://www.cyberpointllc.com', 13 | description = 'A library for creating and using probabilistic graphical models', 14 | long_description = 'This library provides tools for modeling large systems with Bayesian networks. Using these tools allows for efficient statistical analysis on large data sets.', 15 | packages = ['libpgm', 'libpgm.CPDtypes', 'utils'] 16 | ) 17 | -------------------------------------------------------------------------------- /docs/CPDtypes.rst: -------------------------------------------------------------------------------- 1 | CPDtypes 2 | ******** 3 | 4 | There are currently three real types of CPD nodes in this directory, but there could be infinitely many more. The *crazy* type listed last is meant to show that classes can exist for any computational way to sample a node based on its parent values. The flexibility provided allows for random sampling to exist in hybrid networks. 5 | 6 | 7 | discrete 8 | -------- 9 | 10 | .. automodule:: libpgm.CPDtypes.discrete 11 | :members: 12 | 13 | linear gaussian 14 | --------------- 15 | 16 | .. automodule:: libpgm.CPDtypes.lg 17 | :members: 18 | 19 | linear gaussian + discrete 20 | -------------------------- 21 | 22 | .. automodule:: libpgm.CPDtypes.lgandd 23 | :members: 24 | 25 | crazy (test type) 26 | ----------------- 27 | 28 | .. automodule:: libpgm.CPDtypes.crazy 29 | :members: 30 | -------------------------------------------------------------------------------- /docs/_build/html/_sources/CPDtypes.txt: -------------------------------------------------------------------------------- 1 | CPDtypes 2 | ******** 3 | 4 | There are currently three real types of CPD nodes in this directory, but there could be infinitely many more. The *crazy* type listed last is meant to show that classes can exist for any computational way to sample a node based on its parent values. The flexibility provided allows for random sampling to exist in hybrid networks. 5 | 6 | 7 | discrete 8 | -------- 9 | 10 | .. automodule:: libpgm.CPDtypes.discrete 11 | :members: 12 | 13 | linear gaussian 14 | --------------- 15 | 16 | .. automodule:: libpgm.CPDtypes.lg 17 | :members: 18 | 19 | linear gaussian + discrete 20 | -------------------------- 21 | 22 | .. automodule:: libpgm.CPDtypes.lgandd 23 | :members: 24 | 25 | crazy (test type) 26 | ----------------- 27 | 28 | .. automodule:: libpgm.CPDtypes.crazy 29 | :members: 30 | -------------------------------------------------------------------------------- /runtime-tests/archives/rtFnumvertices_timer.py: -------------------------------------------------------------------------------- 1 | ''' 2 | Tests the runtime of functions. 3 | 4 | Charlie Cabot 5 | Aug 1 2012 6 | 7 | ''' 8 | 9 | import time 10 | import sys 11 | 12 | sys.path.append("/home/ccabot/Documents/bayesian networks project/bayesian/v3_bayesian/PGMlibrary") 13 | 14 | from nodedata import NodeData 15 | from graphskeleton import GraphSkeleton 16 | from discretebayesiannetwork import DiscreteBayesianNetwork 17 | 18 | def timer(inputfile, trials): 19 | 20 | # load nodedata and graphskeleton 21 | nd = NodeData() 22 | skel = GraphSkeleton() 23 | nd.load(inputfile) 24 | skel.load(inputfile) 25 | 26 | # topologically order graphskeleton 27 | skel.toporder() 28 | 29 | # load bayesian network 30 | bn = DiscreteBayesianNetwork(skel, nd) 31 | 32 | # TIME 33 | totaltime = 0 34 | for _ in range(trials): 35 | start = time.clock() 36 | ret = bn.randomsample(100) 37 | elapsed = time.clock() - start 38 | totaltime += elapsed 39 | totaltime /= trials 40 | 41 | return totaltime 42 | 43 | #timer("/home/ccabot/Documents/bayesian networks project/bayesian/v3_bayesian/PGMlibrary/unittestdict.txt", 10) 44 | 45 | -------------------------------------------------------------------------------- /tests/unittestdict.txt: -------------------------------------------------------------------------------- 1 | { 2 | "V": ["Letter", "Grade", "Intelligence", "SAT", "Difficulty"], 3 | "E": [["Intelligence", "Grade"], 4 | ["Difficulty", "Grade"], 5 | ["Intelligence", "SAT"], 6 | ["Grade", "Letter"]], 7 | "Vdata": { 8 | "Letter": { 9 | "ord": 4, 10 | "numoutcomes": 2, 11 | "vals": ["weak", "strong"], 12 | "parents": ["Grade"], 13 | "children": None, 14 | "cprob": { 15 | "['A']": [.1, .9], 16 | "['B']": [.4, .6], 17 | "['C']": [.99, .01] 18 | } 19 | }, 20 | 21 | "SAT": { 22 | "ord": 3, 23 | "numoutcomes": 2, 24 | "vals": ["lowscore", "highscore"], 25 | "parents": ["Intelligence"], 26 | "children": None, 27 | "cprob": { 28 | "['low']": [.95, .05], 29 | "['high']": [.2, .8] 30 | } 31 | }, 32 | 33 | "Grade": { 34 | "ord": 2, 35 | "numoutcomes": 3, 36 | "vals": ["A", "B", "C"], 37 | "parents": ["Difficulty", "Intelligence"], 38 | "children": ["Letter"], 39 | "cprob": { 40 | "['easy', 'low']": [.3, .4, .3], 41 | "['easy', 'high']": [.9, .08, .02], 42 | "['hard', 'low']": [.05, .25, .7], 43 | "['hard', 'high']": [.5, .3, .2] 44 | } 45 | }, 46 | 47 | "Intelligence": { 48 | "ord": 1, 49 | "numoutcomes": 2, 50 | "vals": ["low", "high"], 51 | "parents": None, 52 | "children": ["SAT", "Grade"], 53 | "cprob": [.7, .3] 54 | }, 55 | 56 | "Difficulty": { 57 | "ord": 0, 58 | "numoutcomes": 2, 59 | "vals": ["easy", "hard"], 60 | "parents": None, 61 | "children": ["Grade"], 62 | "cprob": [.6, .4] 63 | } 64 | } 65 | } -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | Copyright (c) 2012, CyberPoint International, LLC 2 | All rights reserved. 3 | 4 | Redistribution and use in source and binary forms, with or without 5 | modification, are permitted provided that the following conditions are met: 6 | * Redistributions of source code must retain the above copyright 7 | notice, this list of conditions and the following disclaimer. 8 | * Redistributions in binary form must reproduce the above copyright 9 | notice, this list of conditions and the following disclaimer in the 10 | documentation and/or other materials provided with the distribution. 11 | * Neither the name of the CyberPoint International, LLC nor the 12 | names of its contributors may be used to endorse or promote products 13 | derived from this software without specific prior written permission. 14 | 15 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 16 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 17 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 18 | DISCLAIMED. IN NO EVENT SHALL CYBERPOINT INTERNATIONAL, LLC BE LIABLE FOR ANY 19 | DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 20 | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 21 | LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND 22 | ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 23 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 24 | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 25 | 26 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | libpgm 2 | ====== 3 | 4 | A library for creating and using probabilistic graphical models 5 | Developed by James Ulrich and Charlie Cabot at 6 | Cyberpoint LLC (www.cyberpointllc.com). 7 | 8 | Copyright 2013 CyberPoint International LLC. 9 | 10 | Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: (1) Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. (2) Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. (3) Neither the name of the CyberPoint International, LLC nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. 11 | 12 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL CYBERPOINT INTERNATIONAL, LLC BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 13 | -------------------------------------------------------------------------------- /examples/exampledata.txt: -------------------------------------------------------------------------------- 1 | # an example set of data to be used by the learning functions in the PGMlibrary 2 | # it is an array of dicts 3 | [ 4 | { 5 | "Grade": "B", 6 | "Difficulty": "easy", 7 | "SAT": "lowscore", 8 | "Letter": "strong", 9 | "Intelligence": "high" 10 | }, 11 | { 12 | "Grade": "B", 13 | "Difficulty": "hard", 14 | "SAT": "lowscore", 15 | "Letter": "weak", 16 | "Intelligence": "low" 17 | }, 18 | { 19 | "Grade": "C", 20 | "Difficulty": "easy", 21 | "SAT": "lowscore", 22 | "Letter": "weak", 23 | "Intelligence": "low" 24 | }, 25 | { 26 | "Grade": "A", 27 | "Difficulty": "easy", 28 | "SAT": "highscore", 29 | "Letter": "strong", 30 | "Intelligence": "low" 31 | }, 32 | { 33 | "Grade": "B", 34 | "Difficulty": "easy", 35 | "SAT": "lowscore", 36 | "Letter": "weak", 37 | "Intelligence": "low" 38 | }, 39 | { 40 | "Grade": "A", 41 | "Difficulty": "easy", 42 | "SAT": "highscore", 43 | "Letter": "strong", 44 | "Intelligence": "high" 45 | }, 46 | { 47 | "Grade": "A", 48 | "Difficulty": "easy", 49 | "SAT": "highscore", 50 | "Letter": "strong", 51 | "Intelligence": "high" 52 | }, 53 | { 54 | "Grade": "C", 55 | "Difficulty": "easy", 56 | "SAT": "lowscore", 57 | "Letter": "weak", 58 | "Intelligence": "low" 59 | }, 60 | { 61 | "Grade": "C", 62 | "Difficulty": "hard", 63 | "SAT": "lowscore", 64 | "Letter": "weak", 65 | "Intelligence": "high" 66 | }, 67 | { 68 | "Grade": "A", 69 | "Difficulty": "easy", 70 | "SAT": "highscore", 71 | "Letter": "weak", 72 | "Intelligence": "high" 73 | } 74 | ] 75 | -------------------------------------------------------------------------------- /tests/unittestlgdict.txt: -------------------------------------------------------------------------------- 1 | { 2 | "Vdata": { 3 | "Grade": { 4 | "mean_base": 80, 5 | "mean_scal": [ 6 | -0.25, 7 | 0.25 8 | ], 9 | "parents": [ 10 | "Difficulty", 11 | "Intelligence" 12 | ], 13 | "variance": 5, 14 | "type": "lg", 15 | "children": [ 16 | "Letter" 17 | ] 18 | }, 19 | "Intelligence": { 20 | "mean_base": 50, 21 | "mean_scal": [], 22 | "parents": null, 23 | "variance": 18, 24 | "type": "lg", 25 | "children": [ 26 | "SAT", 27 | "Grade" 28 | ] 29 | }, 30 | "Difficulty": { 31 | "mean_base": 50, 32 | "mean_scal": [], 33 | "parents": null, 34 | "variance": 18, 35 | "type": "lg", 36 | "children": [ 37 | "Grade" 38 | ] 39 | }, 40 | "Letter": { 41 | "mean_base": -110, 42 | "mean_scal": [ 43 | 2 44 | ], 45 | "parents": [ 46 | "Grade" 47 | ], 48 | "variance": 10, 49 | "type": "lg", 50 | "children": null 51 | }, 52 | "SAT": { 53 | "mean_base": 10, 54 | "mean_scal": [ 55 | 1 56 | ], 57 | "parents": [ 58 | "Intelligence" 59 | ], 60 | "variance": 10, 61 | "type": "lg", 62 | "children": null 63 | } 64 | } 65 | } -------------------------------------------------------------------------------- /runtime-tests/timer.py: -------------------------------------------------------------------------------- 1 | ''' 2 | Tests the runtime of functions. 3 | 4 | Charlie Cabot 5 | Aug 1 2012 6 | 7 | ''' 8 | 9 | import time 10 | import json 11 | import sys 12 | 13 | sys.path.append("../libpgm/") # make sure this is right 14 | 15 | #from pympler.asizeof import asizeof 16 | #from pympler import muppy, summary 17 | 18 | from nodedata import NodeData 19 | from graphskeleton import GraphSkeleton 20 | from discretebayesiannetwork import DiscreteBayesianNetwork 21 | from tablecpdfactorization import TableCPDFactorization 22 | from pgmlearner import PGMLearner 23 | 24 | op = open("output.csv", 'w') 25 | 26 | 27 | 28 | def timer(inputfile, trials, datalength): 29 | 30 | # load nodedata and graphskeleton 31 | nd = NodeData() 32 | skel = GraphSkeleton() 33 | #print "bp1" 34 | nd.load(inputfile) 35 | #print "bp2" 36 | skel.load(inputfile) 37 | #print "bp3" 38 | 39 | # msg = "%d, %d" % (asizeof(nd), asizeof(skel)) 40 | # print >>op, msg 41 | 42 | # topologically order graphskeleton 43 | skel.toporder() 44 | 45 | # load bayesian network 46 | bn = DiscreteBayesianNetwork(skel, nd) 47 | 48 | # instantiate pgm learner 49 | l = PGMLearner() 50 | 51 | # free unused memory 52 | del nd 53 | 54 | #sum1 = summary.summarize(muppy.get_objects()) 55 | #summary.print_(sum1) 56 | 57 | # TIME 58 | totaltime = 0 59 | for _ in range(trials): 60 | data = bn.randomsample(datalength) 61 | start = time.clock() 62 | ret = l.discrete_mle_estimateparams(skel, data) 63 | elapsed = time.clock() - start 64 | totaltime += elapsed 65 | totaltime /= trials 66 | 67 | 68 | print json.dumps(ret.Vdata, indent=1) 69 | return totaltime 70 | #timer("/home/ccabot/Documents/bayesian networks project/bayesian/v3_bayesian/PGMlibrary/unittestdict.txt", 10) 71 | 72 | -------------------------------------------------------------------------------- /libpgm_examples/discrete_bayesian_network.py~: -------------------------------------------------------------------------------- 1 | { 2 | "V": ["Letter", "Grade", "Intelligence", "SAT", "Difficulty"], 3 | "E": [["Intelligence", "Grade"], 4 | ["Difficulty", "Grade"], 5 | ["Intelligence", "SAT"], 6 | ["Grade", "Letter"]], 7 | "Vdata": { 8 | "Letter": { 9 | "ord": 4, 10 | "numoutcomes": 2, 11 | "vals": ["weak", "strong"], 12 | "parents": ["Grade"], 13 | "children": None, 14 | "cprob": { 15 | "['A']": [.1, .9], 16 | "['B']": [.4, .6], 17 | "['C']": [.99, .01] 18 | } 19 | }, 20 | 21 | "SAT": { 22 | "ord": 3, 23 | "numoutcomes": 2, 24 | "vals": ["lowscore", "highscore"], 25 | "parents": ["Intelligence"], 26 | "children": None, 27 | "cprob": { 28 | "['low']": [.95, .05], 29 | "['high']": [.2, .8] 30 | } 31 | }, 32 | 33 | "Grade": { 34 | "ord": 2, 35 | "numoutcomes": 3, 36 | "vals": ["A", "B", "C"], 37 | "parents": ["Difficulty", "Intelligence"], 38 | "children": ["Letter"], 39 | "cprob": { 40 | "['easy', 'low']": [.3, .4, .3], 41 | "['easy', 'high']": [.9, .08, .02], 42 | "['hard', 'low']": [.05, .25, .7], 43 | "['hard', 'high']": [.5, .3, .2] 44 | } 45 | }, 46 | 47 | "Intelligence": { 48 | "ord": 1, 49 | "numoutcomes": 2, 50 | "vals": ["low", "high"], 51 | "parents": None, 52 | "children": ["SAT", "Grade"], 53 | "cprob": [.7, .3] 54 | }, 55 | 56 | "Difficulty": { 57 | "ord": 0, 58 | "numoutcomes": 2, 59 | "vals": ["easy", "hard"], 60 | "parents": None, 61 | "children": ["Grade"], 62 | "cprob": [.6, .4] 63 | } 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /tests/unittesthdict.txt: -------------------------------------------------------------------------------- 1 | { 2 | "Vdata": { 3 | "Grade": { 4 | "parents": [ 5 | "Difficulty", 6 | "Intelligence" 7 | ], 8 | "type": "lgandd", 9 | "children": [ 10 | "Letter" 11 | ], 12 | "hybcprob": { 13 | "['high']": { 14 | "variance": 10, 15 | "mean_base": 20, 16 | "mean_scal": [ 17 | 1 18 | ] 19 | }, 20 | "['low']": { 21 | "variance": 10, 22 | "mean_base": 10, 23 | "mean_scal": [ 24 | 1 25 | ] 26 | } 27 | } 28 | }, 29 | "Intelligence": { 30 | "numoutcomes": 2, 31 | "cprob": [ 32 | 0.9, 33 | 0.1 34 | ], 35 | "parents": null, 36 | "vals": [ 37 | "low", 38 | "high" 39 | ], 40 | "type": "discrete", 41 | "children": [ 42 | "SAT", 43 | "Grade" 44 | ] 45 | }, 46 | "Difficulty": { 47 | "mean_base": 50, 48 | "mean_scal": [], 49 | "parents": null, 50 | "variance": 18, 51 | "type": "lg", 52 | "children": [ 53 | "Grade" 54 | ] 55 | }, 56 | "Letter": { 57 | "mean_base": -110, 58 | "mean_scal": [ 59 | 2 60 | ], 61 | "parents": [ 62 | "Grade" 63 | ], 64 | "variance": 10, 65 | "type": "lg", 66 | "children": null 67 | }, 68 | "SAT": { 69 | "parents": [ 70 | "Intelligence" 71 | ], 72 | "crazyinput": 7, 73 | "type": "crazy" 74 | } 75 | } 76 | } 77 | -------------------------------------------------------------------------------- /tests/unittesthdict_lgandd_no_lg_parents.txt: -------------------------------------------------------------------------------- 1 | { 2 | "V": ["Letter", "Grade", "Intelligence", "SAT", "Difficulty"], 3 | "E": [["Intelligence", "Grade"], 4 | ["Difficulty", "Grade"], 5 | ["Intelligence", "SAT"], 6 | ["Grade", "Letter"]], 7 | "Vdata": { 8 | "Grade": { 9 | "parents": [ 10 | "Intelligence" 11 | ], 12 | "type": "lgandd", 13 | "children": [ 14 | "Letter" 15 | ], 16 | "hybcprob": { 17 | "['high']": { 18 | "variance": 10, 19 | "mean_base": 20, 20 | "mean_scal": [ 21 | 1 22 | ] 23 | }, 24 | "['low']": { 25 | "variance": 10, 26 | "mean_base": 10, 27 | "mean_scal": [ 28 | 1 29 | ] 30 | } 31 | } 32 | }, 33 | "Intelligence": { 34 | "numoutcomes": 2, 35 | "cprob": [ 36 | 0.9, 37 | 0.1 38 | ], 39 | "parents": null, 40 | "vals": [ 41 | "low", 42 | "high" 43 | ], 44 | "type": "discrete", 45 | "children": [ 46 | "SAT", 47 | "Grade" 48 | ] 49 | }, 50 | "Difficulty": { 51 | "mean_base": 50, 52 | "mean_scal": [], 53 | "parents": null, 54 | "variance": 18, 55 | "type": "lg", 56 | "children": [ 57 | "Grade" 58 | ] 59 | }, 60 | "Letter": { 61 | "mean_base": -110, 62 | "mean_scal": [ 63 | 2 64 | ], 65 | "parents": [ 66 | "Grade" 67 | ], 68 | "variance": 10, 69 | "type": "lg", 70 | "children": null 71 | }, 72 | "SAT": { 73 | "parents": [ 74 | "Intelligence" 75 | ], 76 | "crazyinput": 7, 77 | "type": "crazy" 78 | } 79 | } 80 | } 81 | -------------------------------------------------------------------------------- /docs/unittestlgdict.rst: -------------------------------------------------------------------------------- 1 | linear gaussian bayesian network 2 | ================================ 3 | 4 | This is an example input file for a Bayesian network with linear Gaussian conditional probability distributions. It provides linear Gaussian CPD data for the same graph skeleton as in the :doc:`discrete case `:: 5 | 6 | { 7 | "Vdata": { 8 | "Grade": { 9 | "mean_base": 80, 10 | "mean_scal": [ 11 | -0.25, 12 | 0.25 13 | ], 14 | "parents": [ 15 | "Difficulty", 16 | "Intelligence" 17 | ], 18 | "variance": 5, 19 | "type": "lg", 20 | "children": [ 21 | "Letter" 22 | ] 23 | }, 24 | "Intelligence": { 25 | "mean_base": 50, 26 | "mean_scal": [], 27 | "parents": null, 28 | "variance": 18, 29 | "type": "lg", 30 | "children": [ 31 | "SAT", 32 | "Grade" 33 | ] 34 | }, 35 | "Difficulty": { 36 | "mean_base": 50, 37 | "mean_scal": [], 38 | "parents": null, 39 | "variance": 18, 40 | "type": "lg", 41 | "children": [ 42 | "Grade" 43 | ] 44 | }, 45 | "Letter": { 46 | "mean_base": -110, 47 | "mean_scal": [ 48 | 2 49 | ], 50 | "parents": [ 51 | "Grade" 52 | ], 53 | "variance": 10, 54 | "type": "lg", 55 | "children": null 56 | }, 57 | "SAT": { 58 | "mean_base": 10, 59 | "mean_scal": [ 60 | 1 61 | ], 62 | "parents": [ 63 | "Intelligence" 64 | ], 65 | "variance": 10, 66 | "type": "lg", 67 | "children": null 68 | } 69 | } 70 | } 71 | -------------------------------------------------------------------------------- /docs/_build/html/_sources/unittestlgdict.txt: -------------------------------------------------------------------------------- 1 | linear gaussian bayesian network 2 | ================================ 3 | 4 | This is an example input file for a Bayesian network with linear Gaussian conditional probability distributions. It provides linear Gaussian CPD data for the same graph skeleton as in the :doc:`discrete case `:: 5 | 6 | { 7 | "Vdata": { 8 | "Grade": { 9 | "mean_base": 80, 10 | "mean_scal": [ 11 | -0.25, 12 | 0.25 13 | ], 14 | "parents": [ 15 | "Difficulty", 16 | "Intelligence" 17 | ], 18 | "variance": 5, 19 | "type": "lg", 20 | "children": [ 21 | "Letter" 22 | ] 23 | }, 24 | "Intelligence": { 25 | "mean_base": 50, 26 | "mean_scal": [], 27 | "parents": null, 28 | "variance": 18, 29 | "type": "lg", 30 | "children": [ 31 | "SAT", 32 | "Grade" 33 | ] 34 | }, 35 | "Difficulty": { 36 | "mean_base": 50, 37 | "mean_scal": [], 38 | "parents": null, 39 | "variance": 18, 40 | "type": "lg", 41 | "children": [ 42 | "Grade" 43 | ] 44 | }, 45 | "Letter": { 46 | "mean_base": -110, 47 | "mean_scal": [ 48 | 2 49 | ], 50 | "parents": [ 51 | "Grade" 52 | ], 53 | "variance": 10, 54 | "type": "lg", 55 | "children": null 56 | }, 57 | "SAT": { 58 | "mean_base": 10, 59 | "mean_scal": [ 60 | 1 61 | ], 62 | "parents": [ 63 | "Intelligence" 64 | ], 65 | "variance": 10, 66 | "type": "lg", 67 | "children": null 68 | } 69 | } 70 | } 71 | -------------------------------------------------------------------------------- /docs/unittestdict.rst: -------------------------------------------------------------------------------- 1 | discrete bayesian network 2 | ========================= 3 | 4 | This is an example input file for a Bayesian network with discrete conditional probability distributions. The example is a small (5 node) graph modeling a student's performance. The graph skeleton data is also included, in the vertex set ("V") and the edge set ("E"). The graph itself is from Koller et al. 53.:: 5 | 6 | { 7 | "V": ["Letter", "Grade", "Intelligence", "SAT", "Difficulty"], 8 | "E": [["Intelligence", "Grade"], 9 | ["Difficulty", "Grade"], 10 | ["Intelligence", "SAT"], 11 | ["Grade", "Letter"]], 12 | "Vdata": { 13 | "Letter": { 14 | "ord": 4, 15 | "numoutcomes": 2, 16 | "vals": ["weak", "strong"], 17 | "parents": ["Grade"], 18 | "children": None, 19 | "cprob": { 20 | "['A']": [.1, .9], 21 | "['B']": [.4, .6], 22 | "['C']": [.99, .01] 23 | } 24 | }, 25 | 26 | "SAT": { 27 | "ord": 3, 28 | "numoutcomes": 2, 29 | "vals": ["lowscore", "highscore"], 30 | "parents": ["Intelligence"], 31 | "children": None, 32 | "cprob": { 33 | "['low']": [.95, .05], 34 | "['high']": [.2, .8] 35 | } 36 | }, 37 | 38 | "Grade": { 39 | "ord": 2, 40 | "numoutcomes": 3, 41 | "vals": ["A", "B", "C"], 42 | "parents": ["Difficulty", "Intelligence"], 43 | "children": ["Letter"], 44 | "cprob": { 45 | "['easy', 'low']": [.3, .4, .3], 46 | "['easy', 'high']": [.9, .08, .02], 47 | "['hard', 'low']": [.05, .25, .7], 48 | "['hard', 'high']": [.5, .3, .2] 49 | } 50 | }, 51 | 52 | "Intelligence": { 53 | "ord": 1, 54 | "numoutcomes": 2, 55 | "vals": ["low", "high"], 56 | "parents": None, 57 | "children": ["SAT", "Grade"], 58 | "cprob": [.7, .3] 59 | }, 60 | 61 | "Difficulty": { 62 | "ord": 0, 63 | "numoutcomes": 2, 64 | "vals": ["easy", "hard"], 65 | "parents": None, 66 | "children": ["Grade"], 67 | "cprob": [.6, .4] 68 | } 69 | } 70 | } 71 | -------------------------------------------------------------------------------- /docs/unittesthdict.rst: -------------------------------------------------------------------------------- 1 | hybrid bayesian network 2 | ======================= 3 | 4 | This is an example input file for a "hybrid" Bayesian network, i.e., one with varying types of conditional probability distributions. It provides hybrid CPD data for the same graph skeleton as in the :doc:`discrete case `:: 5 | 6 | { 7 | "Vdata": { 8 | "Grade": { 9 | "parents": [ 10 | "Difficulty", 11 | "Intelligence" 12 | ], 13 | "type": "lgandd", 14 | "children": [ 15 | "Letter" 16 | ], 17 | "hybcprob": { 18 | "['high']": { 19 | "variance": 10, 20 | "mean_base": 20, 21 | "mean_scal": [ 22 | 1 23 | ] 24 | }, 25 | "['low']": { 26 | "variance": 10, 27 | "mean_base": 10, 28 | "mean_scal": [ 29 | 1 30 | ] 31 | } 32 | } 33 | }, 34 | "Intelligence": { 35 | "numoutcomes": 2, 36 | "cprob": [ 37 | 0.9, 38 | 0.1 39 | ], 40 | "parents": null, 41 | "vals": [ 42 | "low", 43 | "high" 44 | ], 45 | "type": "discrete", 46 | "children": [ 47 | "SAT", 48 | "Grade" 49 | ] 50 | }, 51 | "Difficulty": { 52 | "mean_base": 50, 53 | "mean_scal": [], 54 | "parents": null, 55 | "variance": 18, 56 | "type": "lg", 57 | "children": [ 58 | "Grade" 59 | ] 60 | }, 61 | "Letter": { 62 | "mean_base": -110, 63 | "mean_scal": [ 64 | 2 65 | ], 66 | "parents": [ 67 | "Grade" 68 | ], 69 | "variance": 10, 70 | "type": "lg", 71 | "children": null 72 | }, 73 | "SAT": { 74 | "parents": [ 75 | "Intelligence" 76 | ], 77 | "crazyinput": 7, 78 | "type": "crazy" 79 | } 80 | } 81 | } 82 | -------------------------------------------------------------------------------- /docs/_build/html/_sources/unittestdict.txt: -------------------------------------------------------------------------------- 1 | discrete bayesian network 2 | ========================= 3 | 4 | This is an example input file for a Bayesian network with discrete conditional probability distributions. The example is a small (5 node) graph modeling a student's performance. The graph skeleton data is also included, in the vertex set ("V") and the edge set ("E"). The graph itself is from Koller et al. 53.:: 5 | 6 | { 7 | "V": ["Letter", "Grade", "Intelligence", "SAT", "Difficulty"], 8 | "E": [["Intelligence", "Grade"], 9 | ["Difficulty", "Grade"], 10 | ["Intelligence", "SAT"], 11 | ["Grade", "Letter"]], 12 | "Vdata": { 13 | "Letter": { 14 | "ord": 4, 15 | "numoutcomes": 2, 16 | "vals": ["weak", "strong"], 17 | "parents": ["Grade"], 18 | "children": None, 19 | "cprob": { 20 | "['A']": [.1, .9], 21 | "['B']": [.4, .6], 22 | "['C']": [.99, .01] 23 | } 24 | }, 25 | 26 | "SAT": { 27 | "ord": 3, 28 | "numoutcomes": 2, 29 | "vals": ["lowscore", "highscore"], 30 | "parents": ["Intelligence"], 31 | "children": None, 32 | "cprob": { 33 | "['low']": [.95, .05], 34 | "['high']": [.2, .8] 35 | } 36 | }, 37 | 38 | "Grade": { 39 | "ord": 2, 40 | "numoutcomes": 3, 41 | "vals": ["A", "B", "C"], 42 | "parents": ["Difficulty", "Intelligence"], 43 | "children": ["Letter"], 44 | "cprob": { 45 | "['easy', 'low']": [.3, .4, .3], 46 | "['easy', 'high']": [.9, .08, .02], 47 | "['hard', 'low']": [.05, .25, .7], 48 | "['hard', 'high']": [.5, .3, .2] 49 | } 50 | }, 51 | 52 | "Intelligence": { 53 | "ord": 1, 54 | "numoutcomes": 2, 55 | "vals": ["low", "high"], 56 | "parents": None, 57 | "children": ["SAT", "Grade"], 58 | "cprob": [.7, .3] 59 | }, 60 | 61 | "Difficulty": { 62 | "ord": 0, 63 | "numoutcomes": 2, 64 | "vals": ["easy", "hard"], 65 | "parents": None, 66 | "children": ["Grade"], 67 | "cprob": [.6, .4] 68 | } 69 | } 70 | } 71 | -------------------------------------------------------------------------------- /docs/_build/html/_sources/unittesthdict.txt: -------------------------------------------------------------------------------- 1 | hybrid bayesian network 2 | ======================= 3 | 4 | This is an example input file for a "hybrid" Bayesian network, i.e., one with varying types of conditional probability distributions. It provides hybrid CPD data for the same graph skeleton as in the :doc:`discrete case `:: 5 | 6 | { 7 | "Vdata": { 8 | "Grade": { 9 | "parents": [ 10 | "Difficulty", 11 | "Intelligence" 12 | ], 13 | "type": "lgandd", 14 | "children": [ 15 | "Letter" 16 | ], 17 | "hybcprob": { 18 | "['high']": { 19 | "variance": 10, 20 | "mean_base": 20, 21 | "mean_scal": [ 22 | 1 23 | ] 24 | }, 25 | "['low']": { 26 | "variance": 10, 27 | "mean_base": 10, 28 | "mean_scal": [ 29 | 1 30 | ] 31 | } 32 | } 33 | }, 34 | "Intelligence": { 35 | "numoutcomes": 2, 36 | "cprob": [ 37 | 0.9, 38 | 0.1 39 | ], 40 | "parents": null, 41 | "vals": [ 42 | "low", 43 | "high" 44 | ], 45 | "type": "discrete", 46 | "children": [ 47 | "SAT", 48 | "Grade" 49 | ] 50 | }, 51 | "Difficulty": { 52 | "mean_base": 50, 53 | "mean_scal": [], 54 | "parents": null, 55 | "variance": 18, 56 | "type": "lg", 57 | "children": [ 58 | "Grade" 59 | ] 60 | }, 61 | "Letter": { 62 | "mean_base": -110, 63 | "mean_scal": [ 64 | 2 65 | ], 66 | "parents": [ 67 | "Grade" 68 | ], 69 | "variance": 10, 70 | "type": "lg", 71 | "children": null 72 | }, 73 | "SAT": { 74 | "parents": [ 75 | "Intelligence" 76 | ], 77 | "crazyinput": 7, 78 | "type": "crazy" 79 | } 80 | } 81 | } 82 | -------------------------------------------------------------------------------- /libpgm/orderedskeleton.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2012, CyberPoint International, LLC 2 | # All rights reserved. 3 | # 4 | # Redistribution and use in source and binary forms, with or without 5 | # modification, are permitted provided that the following conditions are met: 6 | # * Redistributions of source code must retain the above copyright 7 | # notice, this list of conditions and the following disclaimer. 8 | # * Redistributions in binary form must reproduce the above copyright 9 | # notice, this list of conditions and the following disclaimer in the 10 | # documentation and/or other materials provided with the distribution. 11 | # * Neither the name of the CyberPoint International, LLC nor the 12 | # names of its contributors may be used to endorse or promote products 13 | # derived from this software without specific prior written permission. 14 | # 15 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 16 | # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 17 | # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 18 | # DISCLAIMED. IN NO EVENT SHALL CYBERPOINT INTERNATIONAL, LLC BE LIABLE FOR ANY 19 | # DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 20 | # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 21 | # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND 22 | # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 23 | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 24 | # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 25 | ''' 26 | This module facilitates the process of creating ordered graph skeletons by topologically ordering them automatically. 27 | 28 | ''' 29 | 30 | from .graphskeleton import GraphSkeleton 31 | 32 | class OrderedSkeleton(GraphSkeleton): 33 | ''' 34 | This class represents a graph skeleton (see :doc:`graphskeleton`) that is always topologically ordered. 35 | 36 | ''' 37 | 38 | def __init__(self, graphskeleton=None): 39 | self.V = None 40 | '''A list of names of vertices''' 41 | self.E = None 42 | '''A list of [origin, destination] pairs of verties that constitute edges.''' 43 | 44 | def load(self, path): 45 | '''Loads a dictionary from a file located at *path* in the same manner as :doc:`graphskeleton`, but includes a step where it topologically orders the nodes.''' 46 | 47 | self.dictload(path) 48 | self.V = self.alldata["V"] 49 | self.E = self.alldata["E"] 50 | 51 | # topologically order 52 | self.toporder() 53 | 54 | # free unused memory 55 | del self.alldata 56 | -------------------------------------------------------------------------------- /runtime-tests/bn_generator.py: -------------------------------------------------------------------------------- 1 | ''' 2 | For creating Bayesian network input files 3 | 4 | Charlie Cabot 5 | August 1, 2012 6 | 7 | ''' 8 | import json 9 | 10 | def disc_bn_generator(numvertices, numoutcomes, indegree, outputpath): 11 | ''' 12 | Creates a graph with a specified number of vertices, where all vertices 13 | except the roots have a specified number of parents and a 14 | specified number of children. 15 | 16 | Arguments: 17 | numvertices -- Number of desired vertices 18 | indegree -- Number of parents for all vertices except the roots 19 | outputpath -- Path to created .txt file 20 | 21 | Format is as corresponds to the PGMlibrary discrete-CPD Vdata format. 22 | See PGMlibrary/discretebayesiannetwork.py 23 | 24 | ''' 25 | import random 26 | 27 | op = open(outputpath, 'w') 28 | 29 | # lay out result 30 | result = dict() 31 | result["V"] = [] 32 | result["E"] = [] 33 | result["Vdata"] = dict() 34 | 35 | # make vertices 36 | for x in range(numvertices): 37 | result["V"].append(str(x)) 38 | result["Vdata"][str(x)] = dict() 39 | result["Vdata"][str(x)]["vals"] = [] 40 | result["Vdata"][str(x)]["parents"] = [] 41 | result["Vdata"][str(x)]["children"] = [] 42 | result["Vdata"][str(x)]["cprob"] = dict() 43 | 44 | for y in range(numoutcomes): 45 | result["Vdata"][str(x)]["vals"].append(str(y)) 46 | result["Vdata"][str(x)]["numoutcomes"] = len(result["Vdata"][str(x)]["vals"]) 47 | 48 | # make edges 49 | for x in range(numvertices): 50 | for j in range(indegree): 51 | if x + j + 1 < numvertices: 52 | result["E"].append([str(x), str(x + j + 1)]) 53 | result["Vdata"][str(x)]["children"].append(str(x + j + 1)) 54 | result["Vdata"][str(x + j + 1)]["parents"].append(str(x)) 55 | 56 | # make cprob recursively 57 | 58 | # define helper procedures 59 | def createinterval(n): 60 | '''divide [0, 1] into n slices"''' 61 | ret = [] 62 | nret = [] 63 | for i in range(n): 64 | nret.append(random.random()) 65 | s = sum(nret) 66 | ret = [x/float(s) for x in nret] 67 | return ret 68 | 69 | def explore(x, _dict, key, depth, totaldepth): 70 | '''recursively fill a cprob table''' 71 | if depth < totaldepth: 72 | for val in result["Vdata"][result["Vdata"][x]["parents"][depth]]["vals"]: 73 | ckey = key[:] 74 | ckey.append(val) 75 | explore(x, _dict, ckey, depth + 1, totaldepth) 76 | else: 77 | _dict[str(key)] = createinterval(result["Vdata"][x]["numoutcomes"]) 78 | 79 | for x in range(numvertices): 80 | if result["Vdata"][str(x)]["parents"]: 81 | explore(str(x), result["Vdata"][str(x)]["cprob"], [], 0, len(result["Vdata"][str(x)]["parents"])) 82 | else: 83 | result["Vdata"][str(x)]["cprob"] = createinterval(result["Vdata"][str(x)]["numoutcomes"]) 84 | 85 | print >>op, json.dumps(result) 86 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | .. libpgm documentation master file, created by 2 | sphinx-quickstart on Tue Aug 7 11:49:49 2012. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Welcome to libpgm! 7 | ================== 8 | 9 | libpgm is an endeavor to make Bayesian probability graphs easy to use. The effort originates from Daphne Koller and Nir Friedman's *Probabilistic Graphical Models* (2009), which provides an in-depth study of probabilistic graphical models and their applications. 10 | 11 | Install from pypi at `http://pypi.python.org/pypi/libpgm `_ or download a tarball `here `_. 12 | 13 | Documentation 14 | ------------- 15 | 16 | The library consists of a series of importable modules, which either represent types of Bayesian graphs, contain methods to operate on them, or both. The methods' individual documentation pages are found below: 17 | 18 | .. toctree:: 19 | 20 | dictionary 21 | graphskeleton 22 | orderedskeleton 23 | nodedata 24 | discretebayesiannetwork 25 | hybayesiannetwork 26 | lgbayesiannetwork 27 | dyndiscbayesiannetwork 28 | tablecpdfactorization 29 | tablecpdfactor 30 | sampleaggregator 31 | pgmlearner 32 | CPDtypes 33 | 34 | 35 | Note that `numpy `_, `scipy `_, and Python 2.7 are required for this library. 36 | 37 | Capabilities 38 | ------------ 39 | 40 | Briefly, the capabilities of this library are: 41 | 42 | - Sampling 43 | - Forward sampling in a discrete-CPD Bayesian network 44 | - Forward sampling in a linear Gaussian-CPD Bayesian network 45 | - Forward sampling in a hybrid (any CPD type) Bayesian network 46 | - Forward sampling in a dynamic 2-TBN Bayesian network 47 | - Gibbs sampling in a discrete-CPD Bayesian network (given evidence) 48 | - Deterministic Inference 49 | - Compute the probability distribution over a specific node or nodes in a discrete-CPD Bayesian network (given evidence, if present) 50 | - Compute the exact probability of an outcome in a discrete-CPD Bayesian network (given evidence, if present) 51 | - Approximative Inference 52 | - Compute the approximate probability distribution by generating samples 53 | - Learning 54 | - Learn the CPDs of a discrete-CPD Bayesian network, given data and a structure 55 | - Learn the structure of a discrete Bayesian network, given only data 56 | - Learn the CPDs of a linear Gaussian Bayesian network, given data and a structure 57 | - Learn the strcutre of a linear Gaussian Bayesian network, given only data 58 | - Learn entire Bayesian networks (structures and parameters) from data 59 | 60 | Input files 61 | ----------- 62 | 63 | Because Bayesian probability graphs are large and contain a lot of data, the library works with .txt files as inputs. The formatting used is JavaScript Object Notation (JSON), with some flexibility (the :doc:`dictionary` module has the capacity to transform python-style dicts to JSON, for instance). Internally, the library stores these files as *json* objects from python's `json `_ library. For examples of the formatting, and of the particular data required for each different Bayesian network type, see the example input files below: 64 | 65 | .. toctree:: 66 | 67 | unittestdict 68 | unittestlgdict 69 | unittesthdict 70 | unittestdyndict 71 | 72 | 73 | Indices and tables 74 | ================== 75 | 76 | * :ref:`genindex` 77 | * :ref:`modindex` 78 | * :ref:`search` 79 | 80 | -------------------------------------------------------------------------------- /docs/_build/html/_sources/index.txt: -------------------------------------------------------------------------------- 1 | .. libpgm documentation master file, created by 2 | sphinx-quickstart on Tue Aug 7 11:49:49 2012. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Welcome to libpgm! 7 | ================== 8 | 9 | libpgm is an endeavor to make Bayesian probability graphs easy to use. The effort originates from Daphne Koller and Nir Friedman's *Probabilistic Graphical Models* (2009), which provides an in-depth study of probabilistic graphical models and their applications. 10 | 11 | Install from pypi at `http://pypi.python.org/pypi/libpgm `_ or download a tarball `here `_. 12 | 13 | Documentation 14 | ------------- 15 | 16 | The library consists of a series of importable modules, which either represent types of Bayesian graphs, contain methods to operate on them, or both. The methods' individual documentation pages are found below: 17 | 18 | .. toctree:: 19 | 20 | dictionary 21 | graphskeleton 22 | orderedskeleton 23 | nodedata 24 | discretebayesiannetwork 25 | hybayesiannetwork 26 | lgbayesiannetwork 27 | dyndiscbayesiannetwork 28 | tablecpdfactorization 29 | tablecpdfactor 30 | sampleaggregator 31 | pgmlearner 32 | CPDtypes 33 | 34 | 35 | Note that `numpy `_, `scipy `_, and Python 2.7 are required for this library. 36 | 37 | Capabilities 38 | ------------ 39 | 40 | Briefly, the capabilities of this library are: 41 | 42 | - Sampling 43 | - Forward sampling in a discrete-CPD Bayesian network 44 | - Forward sampling in a linear Gaussian-CPD Bayesian network 45 | - Forward sampling in a hybrid (any CPD type) Bayesian network 46 | - Forward sampling in a dynamic 2-TBN Bayesian network 47 | - Gibbs sampling in a discrete-CPD Bayesian network (given evidence) 48 | - Deterministic Inference 49 | - Compute the probability distribution over a specific node or nodes in a discrete-CPD Bayesian network (given evidence, if present) 50 | - Compute the exact probability of an outcome in a discrete-CPD Bayesian network (given evidence, if present) 51 | - Approximative Inference 52 | - Compute the approximate probability distribution by generating samples 53 | - Learning 54 | - Learn the CPDs of a discrete-CPD Bayesian network, given data and a structure 55 | - Learn the structure of a discrete Bayesian network, given only data 56 | - Learn the CPDs of a linear Gaussian Bayesian network, given data and a structure 57 | - Learn the strcutre of a linear Gaussian Bayesian network, given only data 58 | - Learn entire Bayesian networks (structures and parameters) from data 59 | 60 | Input files 61 | ----------- 62 | 63 | Because Bayesian probability graphs are large and contain a lot of data, the library works with .txt files as inputs. The formatting used is JavaScript Object Notation (JSON), with some flexibility (the :doc:`dictionary` module has the capacity to transform python-style dicts to JSON, for instance). Internally, the library stores these files as *json* objects from python's `json `_ library. For examples of the formatting, and of the particular data required for each different Bayesian network type, see the example input files below: 64 | 65 | .. toctree:: 66 | 67 | unittestdict 68 | unittestlgdict 69 | unittesthdict 70 | unittestdyndict 71 | 72 | 73 | Indices and tables 74 | ================== 75 | 76 | * :ref:`genindex` 77 | * :ref:`modindex` 78 | * :ref:`search` 79 | 80 | -------------------------------------------------------------------------------- /docs/_build/html/search.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | Search — libpgm 1.1 documentation 12 | 13 | 14 | 15 | 16 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 34 | 35 | 36 | 37 | 38 | 50 | 51 |
52 |
53 |
54 |
55 | 56 |

Search

57 |
58 | 59 |

60 | Please activate JavaScript to enable the search 61 | functionality. 62 |

63 |
64 |

65 | From here you can search these documents. Enter your search 66 | words into the box below and click "search". Note that the search 67 | function will automatically search for all of the words. Pages 68 | containing fewer words won't appear in the result list. 69 |

70 |
71 | 72 | 73 | 74 |
75 | 76 |
77 | 78 |
79 | 80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 | 101 | 105 | 106 | -------------------------------------------------------------------------------- /libpgm/CPDtypes/crazy.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2012, CyberPoint International, LLC 2 | # All rights reserved. 3 | # 4 | # Redistribution and use in source and binary forms, with or without 5 | # modification, are permitted provided that the following conditions are met: 6 | # * Redistributions of source code must retain the above copyright 7 | # notice, this list of conditions and the following disclaimer. 8 | # * Redistributions in binary form must reproduce the above copyright 9 | # notice, this list of conditions and the following disclaimer in the 10 | # documentation and/or other materials provided with the distribution. 11 | # * Neither the name of the CyberPoint International, LLC nor the 12 | # names of its contributors may be used to endorse or promote products 13 | # derived from this software without specific prior written permission. 14 | # 15 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 16 | # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 17 | # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 18 | # DISCLAIMED. IN NO EVENT SHALL CYBERPOINT INTERNATIONAL, LLC BE LIABLE FOR ANY 19 | # DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 20 | # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 21 | # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND 22 | # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 23 | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 24 | # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 25 | ''' 26 | This module contains tools for representing "crazy" nodes -- nodes where the method for sampling is to multiply the crazyinput by -10 or 10 and add :math:`\pi` -- as class instances with their own *choose* method to choose an outcome for themselves based on parent outcomes. 27 | 28 | The existence of this 'crazy' type is meant to indicate the true universality of 29 | the universal sampling method found in :doc:`hybayesiannetwork`. While no CPD would 30 | actually be this crazy, the libary has the setup to support any type of CPD. 31 | 32 | 33 | ''' 34 | import math 35 | import random 36 | 37 | class Crazy(): 38 | ''' 39 | This class represents a crazy node, as described above. It contains the *Vdataentry* attribute and the *choose* method. 40 | 41 | ''' 42 | def __init__(self, Vdataentry): 43 | ''' 44 | This class is constructed with the argument *Vdataentry* which must be a dict containing a dictionary entry for this particualr node. The dict must contain an entry of the following form:: 45 | 46 | "crazyinput": 47 | 48 | This ``"crazyinput"`` entry contains the number that will be used in the crazy sampling function. The *Vdataentry* attribute is set equal to this *Vdataentry* input upon instantiation. 49 | ''' 50 | self.Vdataentry = Vdataentry 51 | '''A dict containing CPD data for the node.''' 52 | 53 | def choose(self, pvalues): 54 | ''' 55 | Randomly choose state of node from probability distribution conditioned on *pvalues*. 56 | 57 | This method has two parts: (1) determining the proper probability 58 | distribution, and (2) using that probability distribution to determine 59 | an outcome. 60 | 61 | Arguments: 62 | 1. *pvalues* -- An array containing the assigned states of the node's parents. This must be in the same order as the parents appear in self.Vdataentry['parents']. 63 | 64 | The function takes the crazyinput, multiplies it by either 10 or -10 randomly, adds :math:`\\pi`, converts it to a string, and appends the word "bluberries!". It returns this value. 65 | 66 | ''' 67 | crazyinput = self.Vdataentry["crazyinput"] 68 | answer = "%.2f blueberries!" % (random.choice([10, -10]) * crazyinput + math.pi) 69 | return answer 70 | -------------------------------------------------------------------------------- /tests/unittestdyndict.txt: -------------------------------------------------------------------------------- 1 | { 2 | "V": ["Letter", "Grade", "Intelligence", "SAT", "Difficulty"], 3 | "E": [["Intelligence", "Grade"], 4 | ["Difficulty", "Grade"], 5 | ["Intelligence", "SAT"], 6 | ["Grade", "Letter"]], 7 | "initial_Vdata": { 8 | "Letter": { 9 | "ord": 4, 10 | "numoutcomes": 2, 11 | "vals": ["weak", "strong"], 12 | "parents": ["Grade"], 13 | "children": None, 14 | "cprob": { 15 | "['A']": [.1, .9], 16 | "['B']": [.4, .6], 17 | "['C']": [.99, .01] 18 | } 19 | }, 20 | 21 | "SAT": { 22 | "ord": 3, 23 | "numoutcomes": 2, 24 | "vals": ["lowscore", "highscore"], 25 | "parents": ["Intelligence"], 26 | "children": None, 27 | "cprob": { 28 | "['low']": [.95, .05], 29 | "['high']": [.2, .8] 30 | } 31 | }, 32 | 33 | "Grade": { 34 | "ord": 2, 35 | "numoutcomes": 3, 36 | "vals": ["A", "B", "C"], 37 | "parents": ["Difficulty", "Intelligence"], 38 | "children": ["Letter"], 39 | "cprob": { 40 | "['easy', 'low']": [.3, .4, .3], 41 | "['easy', 'high']": [.9, .08, .02], 42 | "['hard', 'low']": [.05, .25, .7], 43 | "['hard', 'high']": [.5, .3, .2] 44 | } 45 | }, 46 | 47 | "Intelligence": { 48 | "ord": 1, 49 | "numoutcomes": 2, 50 | "vals": ["low", "high"], 51 | "parents": None, 52 | "children": ["SAT", "Grade"], 53 | "cprob": [.7, .3] 54 | }, 55 | 56 | "Difficulty": { 57 | "ord": 0, 58 | "numoutcomes": 2, 59 | "vals": ["easy", "hard"], 60 | "parents": None, 61 | "children": ["Grade"], 62 | "cprob": [.6, .4] 63 | } 64 | }, 65 | "twotbn_Vdata": { 66 | "Letter": { 67 | "ord": 4, 68 | "numoutcomes": 2, 69 | "vals": ["weak", "strong"], 70 | "parents": ["past_Grade", "past_Letter", "Grade"], 71 | "children": None, 72 | "cprob": { 73 | "['A', 'weak', 'A']": [.1, .9], 74 | "['A', 'weak', 'B']": [.15, .85], 75 | "['A', 'weak', 'C']": [.05, .95], 76 | "['A', 'strong', 'A']": [.1, .9], 77 | "['A', 'strong', 'B']": [.1, .9], 78 | "['A', 'strong', 'C']": [.1, .9], 79 | "['B', 'weak', 'A']": [.47, .53], 80 | "['B', 'weak', 'B']": [.4, .6], 81 | "['B', 'weak', 'C']": [.4, .6], 82 | "['B', 'strong', 'A']": [.4, .6], 83 | "['B', 'strong', 'B']": [.41, .59], 84 | "['B', 'strong', 'C']": [.42, .58], 85 | "['C', 'weak', 'A']": [.99, .01], 86 | "['C', 'weak', 'B']": [.99, .01], 87 | "['C', 'weak', 'C']": [.99, .01], 88 | "['C', 'strong', 'A']": [.99, .01], 89 | "['C', 'strong', 'B']": [.99, .01], 90 | "['C', 'strong', 'C']": [.99, .01] 91 | } 92 | }, 93 | 94 | "SAT": { 95 | "ord": 3, 96 | "numoutcomes": 2, 97 | "vals": ["lowscore", "highscore"], 98 | "parents": ["Intelligence"], 99 | "children": None, 100 | "cprob": { 101 | "['low']": [.95, .05], 102 | "['high']": [.2, .8] 103 | } 104 | }, 105 | 106 | "Grade": { 107 | "ord": 2, 108 | "numoutcomes": 3, 109 | "vals": ["A", "B", "C"], 110 | "parents": ["Difficulty", "Intelligence"], 111 | "children": ["Letter"], 112 | "cprob": { 113 | "['easy', 'low']": [.3, .4, .3], 114 | "['easy', 'high']": [.9, .08, .02], 115 | "['hard', 'low']": [.05, .25, .7], 116 | "['hard', 'high']": [.5, .3, .2] 117 | } 118 | }, 119 | 120 | "Intelligence": { 121 | "ord": 1, 122 | "numoutcomes": 2, 123 | "vals": ["low", "high"], 124 | "parents": ["past_Intelligence"], 125 | "children": ["SAT", "Grade"], 126 | "cprob": { 127 | "['high']": [.7, .3], 128 | "['low']": [.7, .3] 129 | } 130 | }, 131 | 132 | "Difficulty": { 133 | "ord": 0, 134 | "numoutcomes": 2, 135 | "vals": ["easy", "hard"], 136 | "parents": ["past_Difficulty"], 137 | "children": ["Grade"], 138 | "cprob": { 139 | "['easy']": [1, 0], 140 | "['hard']": [0, 1] 141 | } 142 | } 143 | } 144 | } 145 | -------------------------------------------------------------------------------- /docs/_build/html/_static/pygments.css: -------------------------------------------------------------------------------- 1 | .highlight .hll { background-color: #ffffcc } 2 | .highlight { background: #eeffcc; } 3 | .highlight .c { color: #408090; font-style: italic } /* Comment */ 4 | .highlight .err { border: 1px solid #FF0000 } /* Error */ 5 | .highlight .k { color: #007020; font-weight: bold } /* Keyword */ 6 | .highlight .o { color: #666666 } /* Operator */ 7 | .highlight .cm { color: #408090; font-style: italic } /* Comment.Multiline */ 8 | .highlight .cp { color: #007020 } /* Comment.Preproc */ 9 | .highlight .c1 { color: #408090; font-style: italic } /* Comment.Single */ 10 | .highlight .cs { color: #408090; background-color: #fff0f0 } /* Comment.Special */ 11 | .highlight .gd { color: #A00000 } /* Generic.Deleted */ 12 | .highlight .ge { font-style: italic } /* Generic.Emph */ 13 | .highlight .gr { color: #FF0000 } /* Generic.Error */ 14 | .highlight .gh { color: #000080; font-weight: bold } /* Generic.Heading */ 15 | .highlight .gi { color: #00A000 } /* Generic.Inserted */ 16 | .highlight .go { color: #303030 } /* Generic.Output */ 17 | .highlight .gp { color: #c65d09; font-weight: bold } /* Generic.Prompt */ 18 | .highlight .gs { font-weight: bold } /* Generic.Strong */ 19 | .highlight .gu { color: #800080; font-weight: bold } /* Generic.Subheading */ 20 | .highlight .gt { color: #0040D0 } /* Generic.Traceback */ 21 | .highlight .kc { color: #007020; font-weight: bold } /* Keyword.Constant */ 22 | .highlight .kd { color: #007020; font-weight: bold } /* Keyword.Declaration */ 23 | .highlight .kn { color: #007020; font-weight: bold } /* Keyword.Namespace */ 24 | .highlight .kp { color: #007020 } /* Keyword.Pseudo */ 25 | .highlight .kr { color: #007020; font-weight: bold } /* Keyword.Reserved */ 26 | .highlight .kt { color: #902000 } /* Keyword.Type */ 27 | .highlight .m { color: #208050 } /* Literal.Number */ 28 | .highlight .s { color: #4070a0 } /* Literal.String */ 29 | .highlight .na { color: #4070a0 } /* Name.Attribute */ 30 | .highlight .nb { color: #007020 } /* Name.Builtin */ 31 | .highlight .nc { color: #0e84b5; font-weight: bold } /* Name.Class */ 32 | .highlight .no { color: #60add5 } /* Name.Constant */ 33 | .highlight .nd { color: #555555; font-weight: bold } /* Name.Decorator */ 34 | .highlight .ni { color: #d55537; font-weight: bold } /* Name.Entity */ 35 | .highlight .ne { color: #007020 } /* Name.Exception */ 36 | .highlight .nf { color: #06287e } /* Name.Function */ 37 | .highlight .nl { color: #002070; font-weight: bold } /* Name.Label */ 38 | .highlight .nn { color: #0e84b5; font-weight: bold } /* Name.Namespace */ 39 | .highlight .nt { color: #062873; font-weight: bold } /* Name.Tag */ 40 | .highlight .nv { color: #bb60d5 } /* Name.Variable */ 41 | .highlight .ow { color: #007020; font-weight: bold } /* Operator.Word */ 42 | .highlight .w { color: #bbbbbb } /* Text.Whitespace */ 43 | .highlight .mf { color: #208050 } /* Literal.Number.Float */ 44 | .highlight .mh { color: #208050 } /* Literal.Number.Hex */ 45 | .highlight .mi { color: #208050 } /* Literal.Number.Integer */ 46 | .highlight .mo { color: #208050 } /* Literal.Number.Oct */ 47 | .highlight .sb { color: #4070a0 } /* Literal.String.Backtick */ 48 | .highlight .sc { color: #4070a0 } /* Literal.String.Char */ 49 | .highlight .sd { color: #4070a0; font-style: italic } /* Literal.String.Doc */ 50 | .highlight .s2 { color: #4070a0 } /* Literal.String.Double */ 51 | .highlight .se { color: #4070a0; font-weight: bold } /* Literal.String.Escape */ 52 | .highlight .sh { color: #4070a0 } /* Literal.String.Heredoc */ 53 | .highlight .si { color: #70a0d0; font-style: italic } /* Literal.String.Interpol */ 54 | .highlight .sx { color: #c65d09 } /* Literal.String.Other */ 55 | .highlight .sr { color: #235388 } /* Literal.String.Regex */ 56 | .highlight .s1 { color: #4070a0 } /* Literal.String.Single */ 57 | .highlight .ss { color: #517918 } /* Literal.String.Symbol */ 58 | .highlight .bp { color: #007020 } /* Name.Builtin.Pseudo */ 59 | .highlight .vc { color: #bb60d5 } /* Name.Variable.Class */ 60 | .highlight .vg { color: #bb60d5 } /* Name.Variable.Global */ 61 | .highlight .vi { color: #bb60d5 } /* Name.Variable.Instance */ 62 | .highlight .il { color: #208050 } /* Literal.Number.Integer.Long */ -------------------------------------------------------------------------------- /libpgm/dictionary.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2012, CyberPoint International, LLC 2 | # All rights reserved. 3 | # 4 | # Redistribution and use in source and binary forms, with or without 5 | # modification, are permitted provided that the following conditions are met: 6 | # * Redistributions of source code must retain the above copyright 7 | # notice, this list of conditions and the following disclaimer. 8 | # * Redistributions in binary form must reproduce the above copyright 9 | # notice, this list of conditions and the following disclaimer in the 10 | # documentation and/or other materials provided with the distribution. 11 | # * Neither the name of the CyberPoint International, LLC nor the 12 | # names of its contributors may be used to endorse or promote products 13 | # derived from this software without specific prior written permission. 14 | # 15 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 16 | # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 17 | # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 18 | # DISCLAIMED. IN NO EVENT SHALL CYBERPOINT INTERNATIONAL, LLC BE LIABLE FOR ANY 19 | # DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 20 | # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 21 | # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND 22 | # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 23 | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 24 | # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 25 | ''' 26 | Nearly all of the functions of this library require key indexing, which means it deals with dictionaries internally. This module deals with loading dictionaries and handles automatically converting from python-style dictionaries to condensed (no excess white space) JSON-style dictionaries. 27 | 28 | ''' 29 | import sys 30 | import json 31 | import string 32 | 33 | class Dictionary(object): 34 | ''' 35 | This class represents a JSON-style, key-indexable dictionary of data. It contains the attribute *alldata* and the method *dictload*. 36 | ''' 37 | 38 | def __init__(self): 39 | self.alldata = None 40 | '''An internal representation of a key-indexable dictionary.''' 41 | 42 | def dictload(self, path): 43 | ''' 44 | Load a dictionary from a JSON-like text in a text file located at *path* into the attribute *alldata*. 45 | 46 | In order for this function to execute successfully, the text file must have the proper formatting, particularly with regard to quotation marks. See :doc:`unittestdict` for an example. Specifically, the function can get rid of excess whitespace, convert ``.x`` to ``0.x`` in decimals, and convert ``None`` to ``null``, but nothing else. 47 | 48 | Arguments: 49 | 50 | 1. *path* -- Path to the text file (e.g. "mydictionary.txt") 51 | 52 | Attributes modified: 53 | 54 | 1. *alldata* -- The entire loaded dictionary. 55 | 56 | The function also returns an error if nothing was loaded into *alldata*. 57 | 58 | ''' 59 | f = open(path, 'r') 60 | ftext = f.read() 61 | assert (ftext and isinstance(ftext, str)), "Input file is empty or could not be read." 62 | 63 | 64 | # alter for json input, if necessary 65 | loaded = False 66 | try: 67 | self.alldata = json.loads(ftext) 68 | loaded = True 69 | except ValueError: 70 | pass 71 | 72 | if not loaded: 73 | try: 74 | ftext = ftext.translate('\t\n ') 75 | ftext = ftext.replace(':', ': ') 76 | ftext = ftext.replace(',', ', ') 77 | ftext = ftext.replace('None', 'null') 78 | ftext = ftext.replace('.', '0.') 79 | self.alldata = json.loads(ftext) 80 | except ValueError: 81 | raise ValueError("Convert to JSON from input file failed. Check formatting.") 82 | f.close() 83 | 84 | assert isinstance(self.alldata, dict), "In method dictload, path did not direct to a proper text file." 85 | 86 | 87 | 88 | -------------------------------------------------------------------------------- /libpgm/CPDtypes/lg.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2012, CyberPoint International, LLC 2 | # All rights reserved. 3 | # 4 | # Redistribution and use in source and binary forms, with or without 5 | # modification, are permitted provided that the following conditions are met: 6 | # * Redistributions of source code must retain the above copyright 7 | # notice, this list of conditions and the following disclaimer. 8 | # * Redistributions in binary form must reproduce the above copyright 9 | # notice, this list of conditions and the following disclaimer in the 10 | # documentation and/or other materials provided with the distribution. 11 | # * Neither the name of the CyberPoint International, LLC nor the 12 | # names of its contributors may be used to endorse or promote products 13 | # derived from this software without specific prior written permission. 14 | # 15 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 16 | # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 17 | # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 18 | # DISCLAIMED. IN NO EVENT SHALL CYBERPOINT INTERNATIONAL, LLC BE LIABLE FOR ANY 19 | # DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 20 | # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 21 | # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND 22 | # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 23 | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 24 | # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 25 | ''' 26 | This module contains tools for representing linear Gaussian nodes -- those with a continuous linear Gaussian distribution of outcomes and a finite number of linear Gaussian parents -- as class instances with their own *choose* method to choose an outcome for themselves based on parent outcomes. 27 | 28 | ''' 29 | import random 30 | import math 31 | 32 | class Lg(): 33 | ''' 34 | This class represents a linear Gaussian node, as described above. It contains the *Vdataentry* attribute and the *choose* method. 35 | 36 | ''' 37 | def __init__(self, Vdataentry): 38 | ''' 39 | This class is constructed with the argument *Vdataentry* which must be a dict containing a dictionary entry for this particular node. The dict must contain entries of the following form:: 40 | 41 | "mean_base": , 43 | "mean_scal": , 46 | "variance": 47 | 48 | See :doc:`lgbayesiannetwork` for an explanation of linear Gaussian sampling. 49 | 50 | The *Vdataentry* attribute is set equal to this *Vdataentry* input upon instantiation. 51 | 52 | ''' 53 | self.Vdataentry = Vdataentry 54 | '''A dict containing CPD data for the node.''' 55 | 56 | def choose(self, pvalues): 57 | ''' 58 | Randomly choose state of node from probability distribution conditioned on *pvalues*. 59 | 60 | This method has two parts: (1) determining the proper probability 61 | distribution, and (2) using that probability distribution to determine 62 | an outcome. 63 | 64 | Arguments: 65 | 1. *pvalues* -- An array containing the assigned states of the node's parents. This must be in the same order as the parents appear in ``self.Vdataentry['parents']``. 66 | 67 | The function creates a Gaussian distribution in the manner described in :doc:`lgbayesiannetwork`, and samples from that distribution, returning its outcome. 68 | 69 | ''' 70 | 71 | 72 | # calculate Bayesian parameters (mean and variance) 73 | mean = self.Vdataentry["mean_base"] 74 | if (self.Vdataentry["parents"] != None): 75 | for x in range(len(self.Vdataentry["parents"])): 76 | if (pvalues[x] != "default"): 77 | mean += pvalues[x] * self.Vdataentry["mean_scal"][x] 78 | else: 79 | print("Attempted to sample node with unassigned parents.") 80 | 81 | variance = self.Vdataentry["variance"] 82 | 83 | # draw random outcome from Gaussian 84 | # note that this built in function takes the standard deviation, not the 85 | # variance, thus requiring a square root 86 | return random.gauss(mean, math.sqrt(variance)) 87 | -------------------------------------------------------------------------------- /docs/_build/html/_modules/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | Overview: module code — libpgm 1.1 documentation 12 | 13 | 14 | 15 | 16 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 44 | 45 |
46 | 72 |
73 |
74 | 86 | 87 |
88 |
89 |
90 |
91 | 103 | 107 | 108 | -------------------------------------------------------------------------------- /libpgm/CPDtypes/discrete.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2012, CyberPoint International, LLC 2 | # All rights reserved. 3 | # 4 | # Redistribution and use in source and binary forms, with or without 5 | # modification, are permitted provided that the following conditions are met: 6 | # * Redistributions of source code must retain the above copyright 7 | # notice, this list of conditions and the following disclaimer. 8 | # * Redistributions in binary form must reproduce the above copyright 9 | # notice, this list of conditions and the following disclaimer in the 10 | # documentation and/or other materials provided with the distribution. 11 | # * Neither the name of the CyberPoint International, LLC nor the 12 | # names of its contributors may be used to endorse or promote products 13 | # derived from this software without specific prior written permission. 14 | # 15 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 16 | # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 17 | # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 18 | # DISCLAIMED. IN NO EVENT SHALL CYBERPOINT INTERNATIONAL, LLC BE LIABLE FOR ANY 19 | # DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 20 | # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 21 | # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND 22 | # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 23 | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 24 | # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 25 | ''' 26 | This module contains tools for representing discrete nodes -- those with a finite number of outcomes and a finite number of possible parent values -- as class instances with their own *choose* method to choose an outcome for themselves based on parent outcomes. 27 | 28 | ''' 29 | import random 30 | 31 | class Discrete(): 32 | ''' 33 | This class represents a discrete node, as described above. It contains the *Vdataentry* attribute and the *choose* method. 34 | 35 | ''' 36 | def __init__(self, Vdataentry): 37 | ''' 38 | This class is constructed with the argument *Vdataentry* which must be a dict containing a dictionary entry for this particular node. The dict must contain an entry of the following form:: 39 | 40 | "cprob": { 41 | "['',...,'']": [, ... , ], 42 | ... 43 | "['',...,'']": [, ... , ], 44 | } 45 | 46 | Where the keys are each possible combination of parent values and the values are the probability of each of the *n* possible node outcomes, given those parent outcomes. The *Vdataentry* attribute is set equal to this *Vdataentry* input upon instantiation. 47 | 48 | ''' 49 | self.Vdataentry = Vdataentry 50 | '''A dict containing CPD data for the node.''' 51 | 52 | def choose(self, pvalues): 53 | ''' 54 | Randomly choose state of node from a probability distribution conditioned on parent values *pvalues*. 55 | 56 | This method has two parts: (1) determining the proper probability 57 | distribution, and (2) using that probability distribution to determine 58 | an outcome. 59 | 60 | Arguments: 61 | 1. *pvalues* -- An array containing the assigned states of the node's parents. This must be in the same order as the parents appear in ``self.Vdataentry["parents"]``. 62 | The function goes to the proper entry in *Vdataentry*, as specified by *pvalues*, and samples the node based on the distribution found there. 63 | 64 | ''' 65 | 66 | 67 | p = self.Vdataentry["parents"] 68 | if (not p): 69 | distribution = self.Vdataentry["cprob"] 70 | else: 71 | pvalues = [str(outcome[t]) for t in self.Vdataentry["parents"]] # ideally can we pull this from the skeleton so as not to store parent data at all? 72 | for pvalue in pvalues: 73 | assert pvalue != 'default', "Graph skeleton was not topologically ordered." 74 | 75 | distribution = self.Vdataentry["cprob"][str(pvalues)] 76 | 77 | # choose 78 | rand = random.random() 79 | lbound = 0 80 | ubound = 0 81 | for interval in range(int(self.Vdataentry["numoutcomes"])): 82 | ubound += distribution[interval] 83 | if (lbound <= rand and rand < ubound): 84 | rindex = interval 85 | break 86 | else: 87 | lbound = ubound 88 | 89 | return str(self.Vdataentry["vals"][rindex]) 90 | -------------------------------------------------------------------------------- /docs/_build/html/pgmlibrary.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | <no title> — pgmpy 1.1 documentation 12 | 13 | 14 | 15 | 16 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 43 | 44 |
45 |
46 |
47 |
48 | 49 |

A module for loading dictionaries from text files.

50 |

Exported classes:

51 |

Dictionary – Loads a text file into an internal python dict.

52 |

Created on Jun 20, 2012 53 | @author: ccabot

54 |
55 |
56 | class pgmlibrary.dictionary.Dictionary[source]
57 |

Load a text file into an internal python dict.

58 |

Public functions:

59 |

dictload – Load a python dict from JSON-like text in a text file.

60 |
61 |
62 | dictload(path)[source]
63 |

Load a python dict from a JSON-like text in a text file.

64 |

Arguments:

65 |

path – Path to the text file (e.g. “mydictionary.txt”)

66 |

Attributes instantiated:

67 |

self.alldata – The entire loaded dictionary.

68 |

Other actions:

69 |

Checks that the dictionary was properly loaded.

70 |
71 | 72 |
73 | 74 | 75 | 76 |
77 |
78 |
79 |
80 |
81 |

This Page

82 | 86 | 98 | 99 |
100 |
101 |
102 |
103 | 115 | 119 | 120 | -------------------------------------------------------------------------------- /libpgm/sampleaggregator.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2012, CyberPoint International, LLC 2 | # All rights reserved. 3 | # 4 | # Redistribution and use in source and binary forms, with or without 5 | # modification, are permitted provided that the following conditions are met: 6 | # * Redistributions of source code must retain the above copyright 7 | # notice, this list of conditions and the following disclaimer. 8 | # * Redistributions in binary form must reproduce the above copyright 9 | # notice, this list of conditions and the following disclaimer in the 10 | # documentation and/or other materials provided with the distribution. 11 | # * Neither the name of the CyberPoint International, LLC nor the 12 | # names of its contributors may be used to endorse or promote products 13 | # derived from this software without specific prior written permission. 14 | # 15 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 16 | # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 17 | # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 18 | # DISCLAIMED. IN NO EVENT SHALL CYBERPOINT INTERNATIONAL, LLC BE LIABLE FOR ANY 19 | # DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 20 | # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 21 | # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND 22 | # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 23 | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 24 | # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 25 | ''' 26 | This module provides tools for collecting and managing sets of samples generated by the library's sampling functions. By averaging a series of samples, the progam can approximate a joint probability distribution without having to do the exact calculations, which may be useful in large networks. 27 | 28 | ''' 29 | 30 | 31 | class SampleAggregator(object): 32 | ''' 33 | This class is a machine for aggregating data from sample sequences. It contains the method *aggregate*. 34 | 35 | ''' 36 | def __init__(self): 37 | self.seq = None 38 | '''The sequence inputted.''' 39 | self.avg = None 40 | '''The average of all the entries in *seq*, represented as a dict where each vertex has an entry whose value is a dict of {key, value} pairs, where each key is a possible outcome of that vertex and its value is the approximate frequency.''' 41 | 42 | 43 | def aggregate(self, samplerstatement): 44 | ''' 45 | Generate a sequence of samples using *samplerstatement* and return the average of its results. 46 | 47 | Arguments: 48 | 1. *samplerstatement* -- The statement of a function (with inputs) that would output a sequence of samples. For example: ``bn.randomsample(50)`` where ``bn`` is an instance of the :doc:`DiscreteBayesianNetwork ` class. 49 | 50 | This function stores the output of *samplerstatement* in the attribute *seq*, and then averages *seq* and stores the approximate distribution found in the attribute *avg*. It then returns *avg*. 51 | 52 | Usage example: this would print the average of 10 data points:: 53 | 54 | import json 55 | 56 | from libpgm.nodedata import NodeData 57 | from libpgm.graphskeleton import GraphSkeleton 58 | from libpgm.discretebayesiannetwork import DiscreteBayesianNetwork 59 | from libpgm.sampleaggregator import SampleAggregator 60 | 61 | # load nodedata and graphskeleton 62 | nd = NodeData() 63 | skel = GraphSkeleton() 64 | nd.load("../tests/unittestdict.txt") 65 | skel.load("../tests/unittestdict.txt") 66 | 67 | # topologically order graphskeleton 68 | skel.toporder() 69 | 70 | # load bayesian network 71 | bn = DiscreteBayesianNetwork(skel, nd) 72 | 73 | # build aggregator 74 | agg = SampleAggregator() 75 | 76 | # average samples 77 | result = agg.aggregate(bn.randomsample(10)) 78 | 79 | # output 80 | print json.dumps(result, indent=2) 81 | 82 | ''' 83 | 84 | # get sequence 85 | seq = samplerstatement 86 | 87 | # denominator 88 | denom = len(seq) 89 | 90 | output = dict() 91 | for key in seq[0].keys(): 92 | output[key] = dict() 93 | for trial in seq: 94 | keyss = list(output[key].keys()) 95 | vall = trial[key] 96 | if (keyss.count(vall) > 0): 97 | output[key][trial[key]] += 1 98 | else: 99 | output[key][trial[key]] = 1 100 | 101 | # normalize 102 | for entry in output[key].keys(): 103 | output[key][entry] = output[key][entry] / float(denom) 104 | 105 | self.seq = seq 106 | self.avg = output 107 | 108 | return output 109 | -------------------------------------------------------------------------------- /docs/_build/html/_static/default.css: -------------------------------------------------------------------------------- 1 | /* 2 | * default.css_t 3 | * ~~~~~~~~~~~~~ 4 | * 5 | * Sphinx stylesheet -- default theme. 6 | * 7 | * :copyright: Copyright 2007-2011 by the Sphinx team, see AUTHORS. 8 | * :license: BSD, see LICENSE for details. 9 | * 10 | */ 11 | 12 | @import url("basic.css"); 13 | 14 | /* -- page layout ----------------------------------------------------------- */ 15 | 16 | body { 17 | font-family: sans-serif; 18 | font-size: 100%; 19 | background-color: #11303d; 20 | color: #000; 21 | margin: 0; 22 | padding: 0; 23 | } 24 | 25 | div.document { 26 | background-color: #1c4e63; 27 | } 28 | 29 | div.documentwrapper { 30 | float: left; 31 | width: 100%; 32 | } 33 | 34 | div.bodywrapper { 35 | margin: 0 0 0 230px; 36 | } 37 | 38 | div.body { 39 | background-color: #ffffff; 40 | color: #000000; 41 | padding: 0 20px 30px 20px; 42 | } 43 | 44 | div.footer { 45 | color: #ffffff; 46 | width: 100%; 47 | padding: 9px 0 9px 0; 48 | text-align: center; 49 | font-size: 75%; 50 | } 51 | 52 | div.footer a { 53 | color: #ffffff; 54 | text-decoration: underline; 55 | } 56 | 57 | div.related { 58 | background-color: #133f52; 59 | line-height: 30px; 60 | color: #ffffff; 61 | } 62 | 63 | div.related a { 64 | color: #ffffff; 65 | } 66 | 67 | div.sphinxsidebar { 68 | } 69 | 70 | div.sphinxsidebar h3 { 71 | font-family: 'Trebuchet MS', sans-serif; 72 | color: #ffffff; 73 | font-size: 1.4em; 74 | font-weight: normal; 75 | margin: 0; 76 | padding: 0; 77 | } 78 | 79 | div.sphinxsidebar h3 a { 80 | color: #ffffff; 81 | } 82 | 83 | div.sphinxsidebar h4 { 84 | font-family: 'Trebuchet MS', sans-serif; 85 | color: #ffffff; 86 | font-size: 1.3em; 87 | font-weight: normal; 88 | margin: 5px 0 0 0; 89 | padding: 0; 90 | } 91 | 92 | div.sphinxsidebar p { 93 | color: #ffffff; 94 | } 95 | 96 | div.sphinxsidebar p.topless { 97 | margin: 5px 10px 10px 10px; 98 | } 99 | 100 | div.sphinxsidebar ul { 101 | margin: 10px; 102 | padding: 0; 103 | color: #ffffff; 104 | } 105 | 106 | div.sphinxsidebar a { 107 | color: #98dbcc; 108 | } 109 | 110 | div.sphinxsidebar input { 111 | border: 1px solid #98dbcc; 112 | font-family: sans-serif; 113 | font-size: 1em; 114 | } 115 | 116 | 117 | 118 | /* -- hyperlink styles ------------------------------------------------------ */ 119 | 120 | a { 121 | color: #355f7c; 122 | text-decoration: none; 123 | } 124 | 125 | a:visited { 126 | color: #355f7c; 127 | text-decoration: none; 128 | } 129 | 130 | a:hover { 131 | text-decoration: underline; 132 | } 133 | 134 | 135 | 136 | /* -- body styles ----------------------------------------------------------- */ 137 | 138 | div.body h1, 139 | div.body h2, 140 | div.body h3, 141 | div.body h4, 142 | div.body h5, 143 | div.body h6 { 144 | font-family: 'Trebuchet MS', sans-serif; 145 | background-color: #f2f2f2; 146 | font-weight: normal; 147 | color: #20435c; 148 | border-bottom: 1px solid #ccc; 149 | margin: 20px -20px 10px -20px; 150 | padding: 3px 0 3px 10px; 151 | } 152 | 153 | div.body h1 { margin-top: 0; font-size: 200%; } 154 | div.body h2 { font-size: 160%; } 155 | div.body h3 { font-size: 140%; } 156 | div.body h4 { font-size: 120%; } 157 | div.body h5 { font-size: 110%; } 158 | div.body h6 { font-size: 100%; } 159 | 160 | a.headerlink { 161 | color: #c60f0f; 162 | font-size: 0.8em; 163 | padding: 0 4px 0 4px; 164 | text-decoration: none; 165 | } 166 | 167 | a.headerlink:hover { 168 | background-color: #c60f0f; 169 | color: white; 170 | } 171 | 172 | div.body p, div.body dd, div.body li { 173 | text-align: justify; 174 | line-height: 130%; 175 | } 176 | 177 | div.admonition p.admonition-title + p { 178 | display: inline; 179 | } 180 | 181 | div.admonition p { 182 | margin-bottom: 5px; 183 | } 184 | 185 | div.admonition pre { 186 | margin-bottom: 5px; 187 | } 188 | 189 | div.admonition ul, div.admonition ol { 190 | margin-bottom: 5px; 191 | } 192 | 193 | div.note { 194 | background-color: #eee; 195 | border: 1px solid #ccc; 196 | } 197 | 198 | div.seealso { 199 | background-color: #ffc; 200 | border: 1px solid #ff6; 201 | } 202 | 203 | div.topic { 204 | background-color: #eee; 205 | } 206 | 207 | div.warning { 208 | background-color: #ffe4e4; 209 | border: 1px solid #f66; 210 | } 211 | 212 | p.admonition-title { 213 | display: inline; 214 | } 215 | 216 | p.admonition-title:after { 217 | content: ":"; 218 | } 219 | 220 | pre { 221 | padding: 5px; 222 | background-color: #eeffcc; 223 | color: #333333; 224 | line-height: 120%; 225 | border: 1px solid #ac9; 226 | border-left: none; 227 | border-right: none; 228 | } 229 | 230 | tt { 231 | background-color: #ecf0f3; 232 | padding: 0 1px 0 1px; 233 | font-size: 0.95em; 234 | } 235 | 236 | th { 237 | background-color: #ede; 238 | } 239 | 240 | .warning tt { 241 | background: #efc2c2; 242 | } 243 | 244 | .note tt { 245 | background: #d6d6d6; 246 | } 247 | 248 | .viewcode-back { 249 | font-family: sans-serif; 250 | } 251 | 252 | div.viewcode-block:target { 253 | background-color: #f4debf; 254 | border-top: 1px solid #ac9; 255 | border-bottom: 1px solid #ac9; 256 | } -------------------------------------------------------------------------------- /docs/_build/html/_static/sidebar.js: -------------------------------------------------------------------------------- 1 | /* 2 | * sidebar.js 3 | * ~~~~~~~~~~ 4 | * 5 | * This script makes the Sphinx sidebar collapsible. 6 | * 7 | * .sphinxsidebar contains .sphinxsidebarwrapper. This script adds 8 | * in .sphixsidebar, after .sphinxsidebarwrapper, the #sidebarbutton 9 | * used to collapse and expand the sidebar. 10 | * 11 | * When the sidebar is collapsed the .sphinxsidebarwrapper is hidden 12 | * and the width of the sidebar and the margin-left of the document 13 | * are decreased. When the sidebar is expanded the opposite happens. 14 | * This script saves a per-browser/per-session cookie used to 15 | * remember the position of the sidebar among the pages. 16 | * Once the browser is closed the cookie is deleted and the position 17 | * reset to the default (expanded). 18 | * 19 | * :copyright: Copyright 2007-2011 by the Sphinx team, see AUTHORS. 20 | * :license: BSD, see LICENSE for details. 21 | * 22 | */ 23 | 24 | $(function() { 25 | // global elements used by the functions. 26 | // the 'sidebarbutton' element is defined as global after its 27 | // creation, in the add_sidebar_button function 28 | var bodywrapper = $('.bodywrapper'); 29 | var sidebar = $('.sphinxsidebar'); 30 | var sidebarwrapper = $('.sphinxsidebarwrapper'); 31 | 32 | // for some reason, the document has no sidebar; do not run into errors 33 | if (!sidebar.length) return; 34 | 35 | // original margin-left of the bodywrapper and width of the sidebar 36 | // with the sidebar expanded 37 | var bw_margin_expanded = bodywrapper.css('margin-left'); 38 | var ssb_width_expanded = sidebar.width(); 39 | 40 | // margin-left of the bodywrapper and width of the sidebar 41 | // with the sidebar collapsed 42 | var bw_margin_collapsed = '.8em'; 43 | var ssb_width_collapsed = '.8em'; 44 | 45 | // colors used by the current theme 46 | var dark_color = $('.related').css('background-color'); 47 | var light_color = $('.document').css('background-color'); 48 | 49 | function sidebar_is_collapsed() { 50 | return sidebarwrapper.is(':not(:visible)'); 51 | } 52 | 53 | function toggle_sidebar() { 54 | if (sidebar_is_collapsed()) 55 | expand_sidebar(); 56 | else 57 | collapse_sidebar(); 58 | } 59 | 60 | function collapse_sidebar() { 61 | sidebarwrapper.hide(); 62 | sidebar.css('width', ssb_width_collapsed); 63 | bodywrapper.css('margin-left', bw_margin_collapsed); 64 | sidebarbutton.css({ 65 | 'margin-left': '0', 66 | 'height': bodywrapper.height() 67 | }); 68 | sidebarbutton.find('span').text('»'); 69 | sidebarbutton.attr('title', _('Expand sidebar')); 70 | document.cookie = 'sidebar=collapsed'; 71 | } 72 | 73 | function expand_sidebar() { 74 | bodywrapper.css('margin-left', bw_margin_expanded); 75 | sidebar.css('width', ssb_width_expanded); 76 | sidebarwrapper.show(); 77 | sidebarbutton.css({ 78 | 'margin-left': ssb_width_expanded-12, 79 | 'height': bodywrapper.height() 80 | }); 81 | sidebarbutton.find('span').text('«'); 82 | sidebarbutton.attr('title', _('Collapse sidebar')); 83 | document.cookie = 'sidebar=expanded'; 84 | } 85 | 86 | function add_sidebar_button() { 87 | sidebarwrapper.css({ 88 | 'float': 'left', 89 | 'margin-right': '0', 90 | 'width': ssb_width_expanded - 28 91 | }); 92 | // create the button 93 | sidebar.append( 94 | '
«
' 95 | ); 96 | var sidebarbutton = $('#sidebarbutton'); 97 | light_color = sidebarbutton.css('background-color'); 98 | // find the height of the viewport to center the '<<' in the page 99 | var viewport_height; 100 | if (window.innerHeight) 101 | viewport_height = window.innerHeight; 102 | else 103 | viewport_height = $(window).height(); 104 | sidebarbutton.find('span').css({ 105 | 'display': 'block', 106 | 'margin-top': (viewport_height - sidebar.position().top - 20) / 2 107 | }); 108 | 109 | sidebarbutton.click(toggle_sidebar); 110 | sidebarbutton.attr('title', _('Collapse sidebar')); 111 | sidebarbutton.css({ 112 | 'color': '#FFFFFF', 113 | 'border-left': '1px solid ' + dark_color, 114 | 'font-size': '1.2em', 115 | 'cursor': 'pointer', 116 | 'height': bodywrapper.height(), 117 | 'padding-top': '1px', 118 | 'margin-left': ssb_width_expanded - 12 119 | }); 120 | 121 | sidebarbutton.hover( 122 | function () { 123 | $(this).css('background-color', dark_color); 124 | }, 125 | function () { 126 | $(this).css('background-color', light_color); 127 | } 128 | ); 129 | } 130 | 131 | function set_position_from_cookie() { 132 | if (!document.cookie) 133 | return; 134 | var items = document.cookie.split(';'); 135 | for(var k=0; k',...,'']": { 43 | "mean_base": , 45 | "mean_scal": , 48 | "variance": 49 | } 50 | ... 51 | "['',...,'']": { 52 | "mean_base": , 54 | "mean_scal": , 57 | "variance": 58 | } 59 | } 60 | 61 | This ``"cprob"`` entry contains a linear Gaussian distribution (conditioned on the Gaussian parents) for each combination of discrete parents. The *Vdataentry* attribute is set equal to this *Vdataentry* input upon instantiation. 62 | 63 | ''' 64 | self.Vdataentry = Vdataentry 65 | '''A dict containing CPD data for the node.''' 66 | 67 | def choose(self, pvalues): 68 | ''' 69 | Randomly choose state of node from probability distribution conditioned on *pvalues*. 70 | 71 | This method has two parts: (1) determining the proper probability 72 | distribution, and (2) using that probability distribution to determine 73 | an outcome. 74 | 75 | Arguments: 76 | 1. *pvalues* -- An array containing the assigned states of the node's parents. This must be in the same order as the parents appear in ``self.Vdataentry['parents']``. 77 | 78 | The function goes to the entry of ``"cprob"`` that matches the outcomes of its discrete parents. Then, it constructs a Gaussian distribution based on its Gaussian parents and the parameters found at that entry. Last, it samples from that distribution and returns its outcome. 79 | 80 | ''' 81 | 82 | 83 | # split parents by type 84 | dispvals = [] 85 | lgpvals = [] 86 | for pval in pvalues: 87 | if (isinstance(pval, str)): 88 | dispvals.append(pval) 89 | else: 90 | lgpvals.append(pval) 91 | 92 | 93 | # Check that we have at least one discrete parent. 94 | if not dispvals: 95 | print("Did not find any discrete parent. Consider using an Lg node.") 96 | 97 | # find correct Gaussian 98 | lgdistribution = self.Vdataentry["hybcprob"][str(dispvals)] 99 | 100 | # calculate Bayesian parameters (mean and variance) 101 | mean = lgdistribution["mean_base"] 102 | if (self.Vdataentry["parents"] != None): 103 | for x in range(len(lgpvals)): 104 | if (lgpvals[x] != "default"): 105 | mean += lgpvals[x] * lgdistribution["mean_scal"][x] 106 | else: 107 | 108 | # temporary error check 109 | print("Attempted to sample node with unassigned parents.") 110 | 111 | variance = lgdistribution["variance"] 112 | 113 | # draw random outcome from Gaussian (I love python) 114 | return random.gauss(mean, math.sqrt(variance)) 115 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = _build 9 | 10 | # Internal variables. 11 | PAPEROPT_a4 = -D latex_paper_size=a4 12 | PAPEROPT_letter = -D latex_paper_size=letter 13 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 14 | # the i18n builder cannot share the environment and doctrees with the others 15 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 16 | 17 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext 18 | 19 | help: 20 | @echo "Please use \`make ' where is one of" 21 | @echo " html to make standalone HTML files" 22 | @echo " dirhtml to make HTML files named index.html in directories" 23 | @echo " singlehtml to make a single large HTML file" 24 | @echo " pickle to make pickle files" 25 | @echo " json to make JSON files" 26 | @echo " htmlhelp to make HTML files and a HTML help project" 27 | @echo " qthelp to make HTML files and a qthelp project" 28 | @echo " devhelp to make HTML files and a Devhelp project" 29 | @echo " epub to make an epub" 30 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 31 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 32 | @echo " text to make text files" 33 | @echo " man to make manual pages" 34 | @echo " texinfo to make Texinfo files" 35 | @echo " info to make Texinfo files and run them through makeinfo" 36 | @echo " gettext to make PO message catalogs" 37 | @echo " changes to make an overview of all changed/added/deprecated items" 38 | @echo " linkcheck to check all external links for integrity" 39 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 40 | 41 | clean: 42 | -rm -rf $(BUILDDIR)/* 43 | 44 | html: 45 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 46 | @echo 47 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 48 | 49 | dirhtml: 50 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 51 | @echo 52 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 53 | 54 | singlehtml: 55 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 56 | @echo 57 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 58 | 59 | pickle: 60 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 61 | @echo 62 | @echo "Build finished; now you can process the pickle files." 63 | 64 | json: 65 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 66 | @echo 67 | @echo "Build finished; now you can process the JSON files." 68 | 69 | htmlhelp: 70 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 71 | @echo 72 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 73 | ".hhp project file in $(BUILDDIR)/htmlhelp." 74 | 75 | qthelp: 76 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 77 | @echo 78 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 79 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 80 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/pgmpy.qhcp" 81 | @echo "To view the help file:" 82 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/pgmpy.qhc" 83 | 84 | devhelp: 85 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 86 | @echo 87 | @echo "Build finished." 88 | @echo "To view the help file:" 89 | @echo "# mkdir -p $$HOME/.local/share/devhelp/pgmpy" 90 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/pgmpy" 91 | @echo "# devhelp" 92 | 93 | epub: 94 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 95 | @echo 96 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 97 | 98 | latex: 99 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 100 | @echo 101 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 102 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 103 | "(use \`make latexpdf' here to do that automatically)." 104 | 105 | latexpdf: 106 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 107 | @echo "Running LaTeX files through pdflatex..." 108 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 109 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 110 | 111 | text: 112 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 113 | @echo 114 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 115 | 116 | man: 117 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 118 | @echo 119 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 120 | 121 | texinfo: 122 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 123 | @echo 124 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 125 | @echo "Run \`make' in that directory to run these through makeinfo" \ 126 | "(use \`make info' here to do that automatically)." 127 | 128 | info: 129 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 130 | @echo "Running Texinfo files through makeinfo..." 131 | make -C $(BUILDDIR)/texinfo info 132 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 133 | 134 | gettext: 135 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 136 | @echo 137 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 138 | 139 | changes: 140 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 141 | @echo 142 | @echo "The overview file is in $(BUILDDIR)/changes." 143 | 144 | linkcheck: 145 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 146 | @echo 147 | @echo "Link check complete; look for any errors in the above output " \ 148 | "or in $(BUILDDIR)/linkcheck/output.txt." 149 | 150 | doctest: 151 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 152 | @echo "Testing of doctests in the sources finished, look at the " \ 153 | "results in $(BUILDDIR)/doctest/output.txt." 154 | -------------------------------------------------------------------------------- /utils/bntextutils.py: -------------------------------------------------------------------------------- 1 | 2 | ''' 3 | Copyright CyberPoint International LLC 4 | All rights reserved 5 | 6 | C. Cabot 7 | 09-19-13 8 | 9 | Functions to construct/modify a json-style txt file to be 10 | used as a discrete Bayesian network in libpgm. 11 | 12 | ''' 13 | 14 | import json 15 | import sys 16 | 17 | def list_edges(path): 18 | _validate(path) 19 | with open(path, "r") as f: 20 | j = json.load(f) 21 | for e in j["E"]: 22 | print("{} --> {}".format(e[0], e[1])) 23 | 24 | def list_nodes(path): 25 | _validate(path) 26 | with open(path, "r") as f: 27 | j = json.load(f) 28 | for v in j["V"]: 29 | print("{}".format(v)) 30 | 31 | def list_nodedata(path): 32 | _validate(path) 33 | with open(path, "r") as f: 34 | j = json.load(f) 35 | for v in j["Vdata"].keys(): 36 | print("{}".format(v)) 37 | 38 | def add_edge(path, edge): 39 | _validate(path) 40 | with open(path, "r") as f: 41 | j = json.load(f) 42 | assert edge[0] in j["V"] and edge[1] in j["V"], "bad edge" 43 | if edge not in j["E"]: 44 | j["E"].append(edge) 45 | with open(path, "w") as f: 46 | json.dump(j, f, indent=2) 47 | 48 | def remove_edge(path, edge): 49 | _validate(path) 50 | with open(path, "r") as f: 51 | j = json.load(f) 52 | assert edge in j["E"], "edge not present" 53 | j["E"].remove(edge) 54 | with open(path, "w") as f: 55 | json.dump(j, f, indent=2) 56 | 57 | def add_node(path, node): 58 | _validate(path) 59 | with open(path, "r") as f: 60 | j = json.load(f) 61 | if node not in j["V"]: 62 | j["V"].append(node) 63 | with open(path, "w") as f: 64 | json.dump(j, f, indent=2) 65 | 66 | def remove_node(path, node): 67 | _validate(path) 68 | with open(path, "r") as f: 69 | j = json.load(f) 70 | assert node in j["V"], "node not present" 71 | j["V"].remove(node) 72 | # delete associated Vdata and edges 73 | if node in j["Vdata"]: 74 | del j["Vdata"][node] 75 | for edge in j["E"]: 76 | if edge[0] == node or edge[1] == node: 77 | edges.remove(edge) 78 | with open(path, "w") as f: 79 | json.dump(j, f, indent=2) 80 | 81 | def alter_vdata(path, node): 82 | _validate(path) 83 | with open(path, "r") as f: 84 | j = json.load(f) 85 | assert node in j["V"], "node not present" 86 | print("Current node data: ") 87 | print("------------------ ") 88 | try: 89 | print(json.dumps(j["Vdata"][node], indent=2)) 90 | except KeyError: 91 | print("[uninitialized! you may create this node data]") 92 | print("enter new node data: ") 93 | while (1): 94 | try: 95 | minij = json.load(sys.stdin) 96 | break 97 | except: 98 | print("malformatted json, try again:") 99 | j["Vdata"][node] = minij 100 | with open(path, "w") as f: 101 | json.dump(j, f, indent=2) 102 | 103 | def refresh(path): 104 | """updates ord, numoutcomes, parents, and children in vdata""" 105 | with open(path, "r") as f: 106 | d = json.load(f) 107 | 108 | # topologically order vertices 109 | Ecopy = [x[:] for x in d["E"]] 110 | roots = [] 111 | toporder = [] 112 | 113 | for vertex in d["V"]: 114 | # find roots 115 | roots = d["V"][:] 116 | for e in Ecopy: 117 | try: 118 | roots.remove(e[1]) 119 | except: 120 | pass 121 | 122 | while roots != []: 123 | n = roots.pop() 124 | toporder.append(n) 125 | for edge in reversed(Ecopy): 126 | if edge[0] == n: 127 | m = edge[1] 128 | Ecopy.remove(edge) 129 | yesparent = False 130 | for e in Ecopy: 131 | if e[1] == m: 132 | yesparent = True 133 | break 134 | if yesparent == False: 135 | roots.append(m) 136 | assert (not Ecopy), ("Graph contains a cycle", Ecopy) 137 | d["V"] = toporder 138 | 139 | # clear attributes 140 | for entry in d["Vdata"]: 141 | d["Vdata"][entry]["parents"] = None 142 | d["Vdata"][entry]["children"] = None 143 | d["Vdata"][entry]["numoutcomes"] = len(d["Vdata"][entry]["vals"]) 144 | d["Vdata"][entry]["ord"] = d["V"].index(entry) 145 | 146 | # make parents and children 147 | for edge in d["E"]: 148 | parent = edge[0] 149 | child = edge[1] 150 | 151 | if d["Vdata"][child]["parents"] == None: 152 | d["Vdata"][child]["parents"] = [] 153 | if parent not in d["Vdata"][child]["parents"]: 154 | d["Vdata"][child]["parents"].append(parent) 155 | 156 | if d["Vdata"][parent]["children"] == None: 157 | d["Vdata"][parent]["children"] = [] 158 | if child not in d["Vdata"][parent]["children"]: 159 | d["Vdata"][parent]["children"].append(child) 160 | 161 | with open(path, "w") as f: 162 | json.dump(d, f, indent=2) 163 | 164 | def _validate(path): 165 | # is json correctly formatted? 166 | try: 167 | with open(path) as f: 168 | json.load(f) 169 | except: 170 | raise Exception("The network file you are trying to modify is invalid") 171 | 172 | # do the nodes match the node data? 173 | with open(path) as f: 174 | j = json.load(f) 175 | if not (sorted(j["V"]) == sorted(j["Vdata"].keys())): 176 | print("warning: nodes and node data do not match") 177 | 178 | # are the edges valid? 179 | for e in j["E"]: 180 | if (e[0] not in j["V"]) or (e[1] not in j["V"]): 181 | print("warning: nodes not found for this edge:") 182 | print(e) 183 | 184 | -------------------------------------------------------------------------------- /docs/unittestdyndict.rst: -------------------------------------------------------------------------------- 1 | dynamic discrete bayesian network 2 | ================================= 3 | 4 | This is an example input file for a dynamic Bayesian network with discete CPDs, i.e., a Bayesian network that changes over time wherein the Bayesian network at each time interval is influenced by the outcomes of the Bayesian network in the previous time interval. It is represented by a set of initial CPD data, ``initial_Vdata``, and dynamic CPD data, ``twotbn_Vdata``. See Koller et al. 204 for more on 2-TBN dynamic Bayesian networks. This example provides dynamic CPD data for the same graph skeleton as in the :doc:`discrete case `:: 5 | 6 | { 7 | "V": ["Letter", "Grade", "Intelligence", "SAT", "Difficulty"], 8 | "E": [["Intelligence", "Grade"], 9 | ["Difficulty", "Grade"], 10 | ["Intelligence", "SAT"], 11 | ["Grade", "Letter"]], 12 | "initial_Vdata": { 13 | "Letter": { 14 | "ord": 4, 15 | "numoutcomes": 2, 16 | "vals": ["weak", "strong"], 17 | "parents": ["Grade"], 18 | "children": None, 19 | "cprob": { 20 | "['A']": [.1, .9], 21 | "['B']": [.4, .6], 22 | "['C']": [.99, .01] 23 | } 24 | }, 25 | 26 | "SAT": { 27 | "ord": 3, 28 | "numoutcomes": 2, 29 | "vals": ["lowscore", "highscore"], 30 | "parents": ["Intelligence"], 31 | "children": None, 32 | "cprob": { 33 | "['low']": [.95, .05], 34 | "['high']": [.2, .8] 35 | } 36 | }, 37 | 38 | "Grade": { 39 | "ord": 2, 40 | "numoutcomes": 3, 41 | "vals": ["A", "B", "C"], 42 | "parents": ["Difficulty", "Intelligence"], 43 | "children": ["Letter"], 44 | "cprob": { 45 | "['easy', 'low']": [.3, .4, .3], 46 | "['easy', 'high']": [.9, .08, .02], 47 | "['hard', 'low']": [.05, .25, .7], 48 | "['hard', 'high']": [.5, .3, .2] 49 | } 50 | }, 51 | 52 | "Intelligence": { 53 | "ord": 1, 54 | "numoutcomes": 2, 55 | "vals": ["low", "high"], 56 | "parents": None, 57 | "children": ["SAT", "Grade"], 58 | "cprob": [.7, .3] 59 | }, 60 | 61 | "Difficulty": { 62 | "ord": 0, 63 | "numoutcomes": 2, 64 | "vals": ["easy", "hard"], 65 | "parents": None, 66 | "children": ["Grade"], 67 | "cprob": [.6, .4] 68 | } 69 | }, 70 | "twotbn_Vdata": { 71 | "Letter": { 72 | "ord": 4, 73 | "numoutcomes": 2, 74 | "vals": ["weak", "strong"], 75 | "parents": ["past_Grade", "past_Letter", "Grade"], 76 | "children": None, 77 | "cprob": { 78 | "['A', 'weak', 'A']": [.1, .9], 79 | "['A', 'weak', 'B']": [.15, .85], 80 | "['A', 'weak', 'C']": [.05, .95], 81 | "['A', 'strong', 'A']": [.1, .9], 82 | "['A', 'strong', 'B']": [.1, .9], 83 | "['A', 'strong', 'C']": [.1, .9], 84 | "['B', 'weak', 'A']": [.47, .53], 85 | "['B', 'weak', 'B']": [.4, .6], 86 | "['B', 'weak', 'C']": [.4, .6], 87 | "['B', 'strong', 'A']": [.4, .6], 88 | "['B', 'strong', 'B']": [.41, .59], 89 | "['B', 'strong', 'C']": [.42, .58], 90 | "['C', 'weak', 'A']": [.99, .01], 91 | "['C', 'weak', 'B']": [.99, .01], 92 | "['C', 'weak', 'C']": [.99, .01], 93 | "['C', 'strong', 'A']": [.99, .01], 94 | "['C', 'strong', 'B']": [.99, .01], 95 | "['C', 'strong', 'C']": [.99, .01] 96 | } 97 | }, 98 | 99 | "SAT": { 100 | "ord": 3, 101 | "numoutcomes": 2, 102 | "vals": ["lowscore", "highscore"], 103 | "parents": ["Intelligence"], 104 | "children": None, 105 | "cprob": { 106 | "['low']": [.95, .05], 107 | "['high']": [.2, .8] 108 | } 109 | }, 110 | 111 | "Grade": { 112 | "ord": 2, 113 | "numoutcomes": 3, 114 | "vals": ["A", "B", "C"], 115 | "parents": ["Difficulty", "Intelligence"], 116 | "children": ["Letter"], 117 | "cprob": { 118 | "['easy', 'low']": [.3, .4, .3], 119 | "['easy', 'high']": [.9, .08, .02], 120 | "['hard', 'low']": [.05, .25, .7], 121 | "['hard', 'high']": [.5, .3, .2] 122 | } 123 | }, 124 | 125 | "Intelligence": { 126 | "ord": 1, 127 | "numoutcomes": 2, 128 | "vals": ["low", "high"], 129 | "parents": ["past_Intelligence"], 130 | "children": ["SAT", "Grade"], 131 | "cprob": { 132 | "['high']": [.7, .3], 133 | "['low']": [.7, .3] 134 | } 135 | }, 136 | 137 | "Difficulty": { 138 | "ord": 0, 139 | "numoutcomes": 2, 140 | "vals": ["easy", "hard"], 141 | "parents": ["past_Difficulty"], 142 | "children": ["Grade"], 143 | "cprob": { 144 | "['easy']": [.9, .1], 145 | "['hard']": [.1, .9] 146 | } 147 | } 148 | } 149 | } 150 | -------------------------------------------------------------------------------- /docs/_build/html/_sources/unittestdyndict.txt: -------------------------------------------------------------------------------- 1 | dynamic discrete bayesian network 2 | ================================= 3 | 4 | This is an example input file for a dynamic Bayesian network with discete CPDs, i.e., a Bayesian network that changes over time wherein the Bayesian network at each time interval is influenced by the outcomes of the Bayesian network in the previous time interval. It is represented by a set of initial CPD data, ``initial_Vdata``, and dynamic CPD data, ``twotbn_Vdata``. See Koller et al. 204 for more on 2-TBN dynamic Bayesian networks. This example provides dynamic CPD data for the same graph skeleton as in the :doc:`discrete case `:: 5 | 6 | { 7 | "V": ["Letter", "Grade", "Intelligence", "SAT", "Difficulty"], 8 | "E": [["Intelligence", "Grade"], 9 | ["Difficulty", "Grade"], 10 | ["Intelligence", "SAT"], 11 | ["Grade", "Letter"]], 12 | "initial_Vdata": { 13 | "Letter": { 14 | "ord": 4, 15 | "numoutcomes": 2, 16 | "vals": ["weak", "strong"], 17 | "parents": ["Grade"], 18 | "children": None, 19 | "cprob": { 20 | "['A']": [.1, .9], 21 | "['B']": [.4, .6], 22 | "['C']": [.99, .01] 23 | } 24 | }, 25 | 26 | "SAT": { 27 | "ord": 3, 28 | "numoutcomes": 2, 29 | "vals": ["lowscore", "highscore"], 30 | "parents": ["Intelligence"], 31 | "children": None, 32 | "cprob": { 33 | "['low']": [.95, .05], 34 | "['high']": [.2, .8] 35 | } 36 | }, 37 | 38 | "Grade": { 39 | "ord": 2, 40 | "numoutcomes": 3, 41 | "vals": ["A", "B", "C"], 42 | "parents": ["Difficulty", "Intelligence"], 43 | "children": ["Letter"], 44 | "cprob": { 45 | "['easy', 'low']": [.3, .4, .3], 46 | "['easy', 'high']": [.9, .08, .02], 47 | "['hard', 'low']": [.05, .25, .7], 48 | "['hard', 'high']": [.5, .3, .2] 49 | } 50 | }, 51 | 52 | "Intelligence": { 53 | "ord": 1, 54 | "numoutcomes": 2, 55 | "vals": ["low", "high"], 56 | "parents": None, 57 | "children": ["SAT", "Grade"], 58 | "cprob": [.7, .3] 59 | }, 60 | 61 | "Difficulty": { 62 | "ord": 0, 63 | "numoutcomes": 2, 64 | "vals": ["easy", "hard"], 65 | "parents": None, 66 | "children": ["Grade"], 67 | "cprob": [.6, .4] 68 | } 69 | }, 70 | "twotbn_Vdata": { 71 | "Letter": { 72 | "ord": 4, 73 | "numoutcomes": 2, 74 | "vals": ["weak", "strong"], 75 | "parents": ["past_Grade", "past_Letter", "Grade"], 76 | "children": None, 77 | "cprob": { 78 | "['A', 'weak', 'A']": [.1, .9], 79 | "['A', 'weak', 'B']": [.15, .85], 80 | "['A', 'weak', 'C']": [.05, .95], 81 | "['A', 'strong', 'A']": [.1, .9], 82 | "['A', 'strong', 'B']": [.1, .9], 83 | "['A', 'strong', 'C']": [.1, .9], 84 | "['B', 'weak', 'A']": [.47, .53], 85 | "['B', 'weak', 'B']": [.4, .6], 86 | "['B', 'weak', 'C']": [.4, .6], 87 | "['B', 'strong', 'A']": [.4, .6], 88 | "['B', 'strong', 'B']": [.41, .59], 89 | "['B', 'strong', 'C']": [.42, .58], 90 | "['C', 'weak', 'A']": [.99, .01], 91 | "['C', 'weak', 'B']": [.99, .01], 92 | "['C', 'weak', 'C']": [.99, .01], 93 | "['C', 'strong', 'A']": [.99, .01], 94 | "['C', 'strong', 'B']": [.99, .01], 95 | "['C', 'strong', 'C']": [.99, .01] 96 | } 97 | }, 98 | 99 | "SAT": { 100 | "ord": 3, 101 | "numoutcomes": 2, 102 | "vals": ["lowscore", "highscore"], 103 | "parents": ["Intelligence"], 104 | "children": None, 105 | "cprob": { 106 | "['low']": [.95, .05], 107 | "['high']": [.2, .8] 108 | } 109 | }, 110 | 111 | "Grade": { 112 | "ord": 2, 113 | "numoutcomes": 3, 114 | "vals": ["A", "B", "C"], 115 | "parents": ["Difficulty", "Intelligence"], 116 | "children": ["Letter"], 117 | "cprob": { 118 | "['easy', 'low']": [.3, .4, .3], 119 | "['easy', 'high']": [.9, .08, .02], 120 | "['hard', 'low']": [.05, .25, .7], 121 | "['hard', 'high']": [.5, .3, .2] 122 | } 123 | }, 124 | 125 | "Intelligence": { 126 | "ord": 1, 127 | "numoutcomes": 2, 128 | "vals": ["low", "high"], 129 | "parents": ["past_Intelligence"], 130 | "children": ["SAT", "Grade"], 131 | "cprob": { 132 | "['high']": [.7, .3], 133 | "['low']": [.7, .3] 134 | } 135 | }, 136 | 137 | "Difficulty": { 138 | "ord": 0, 139 | "numoutcomes": 2, 140 | "vals": ["easy", "hard"], 141 | "parents": ["past_Difficulty"], 142 | "children": ["Grade"], 143 | "cprob": { 144 | "['easy']": [.9, .1], 145 | "['hard']": [.1, .9] 146 | } 147 | } 148 | } 149 | } 150 | -------------------------------------------------------------------------------- /libpgm/graphskeleton.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2012, CyberPoint International, LLC 2 | # All rights reserved. 3 | # 4 | # Redistribution and use in source and binary forms, with or without 5 | # modification, are permitted provided that the following conditions are met: 6 | # * Redistributions of source code must retain the above copyright 7 | # notice, this list of conditions and the following disclaimer. 8 | # * Redistributions in binary form must reproduce the above copyright 9 | # notice, this list of conditions and the following disclaimer in the 10 | # documentation and/or other materials provided with the distribution. 11 | # * Neither the name of the CyberPoint International, LLC nor the 12 | # names of its contributors may be used to endorse or promote products 13 | # derived from this software without specific prior written permission. 14 | # 15 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 16 | # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 17 | # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 18 | # DISCLAIMED. IN NO EVENT SHALL CYBERPOINT INTERNATIONAL, LLC BE LIABLE FOR ANY 19 | # DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 20 | # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 21 | # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND 22 | # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 23 | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 24 | # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 25 | ''' 26 | This module provides tools for creating and using graph skeletons for Bayesian networks. A graph skeleton in this case is a vertex set and a directed edge set, with no further information about the specific nodes. 27 | 28 | ''' 29 | from .dictionary import Dictionary 30 | 31 | import sys 32 | 33 | class GraphSkeleton(Dictionary): 34 | ''' 35 | This class represents a graph skeleton, meaning a vertex set and a directed edge set. It contains the attributes *V* and *E*, and the methods *load*, *getparents*, *getchildren*, and *toporder*. 36 | 37 | ''' 38 | 39 | def __init__(self): 40 | self.V = None 41 | '''A list of names of vertices.''' 42 | self.E = None 43 | '''A list of [origin, destination] pairs of vertices that constitute edges.''' 44 | self.alldata = None 45 | '''(Inherited from dictionary) A variable that stores a key-indexable dictionary once it is loaded from a file.''' 46 | 47 | def load(self, path): 48 | ''' 49 | Load the graph skeleton from a text file located at *path*. 50 | 51 | Text file must be a plaintext .txt file with a JSON-style representation of a dict. Dict must contain the top-level keys "V" and "E" with the following formats:: 52 | 53 | { 54 | 'V': ['', ... , ' 5 | 6 | 7 | 8 | 9 | 10 | 11 | orderedskeleton — libpgm 1.1 documentation 12 | 13 | 14 | 15 | 16 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 52 | 53 |
54 |
55 |
56 |
57 | 58 |
59 |

orderedskeleton

60 |

This module facilitates the process of creating ordered graph skeletons by topologically ordering them automatically.

61 |
62 |
63 | class libpgm.orderedskeleton.OrderedSkeleton(graphskeleton=None)[source]
64 |

This class represents a graph skeleton (see graphskeleton) that is always topologically ordered.

65 |
66 |
67 | V = None
68 |

A list of names of vertices

69 |
70 | 71 |
72 |
73 | E = None
74 |

A list of [origin, destination] pairs of verties that constitute edges.

75 |
76 | 77 |
78 |
79 | load(path)[source]
80 |

Loads a dictionary from a file located at path in the same manner as graphskeleton, but includes a step where it topologically orders the nodes.

81 |
82 | 83 |
84 | 85 |
86 | 87 | 88 |
89 |
90 |
91 |
92 |
93 |

Previous topic

94 |

graphskeleton

96 |

Next topic

97 |

nodedata

99 |

This Page

100 | 104 | 116 | 117 |
118 |
119 |
120 |
121 | 139 | 143 | 144 | -------------------------------------------------------------------------------- /libpgm/nodedata.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2012, CyberPoint International, LLC 2 | # All rights reserved. 3 | # 4 | # Redistribution and use in source and binary forms, with or without 5 | # modification, are permitted provided that the following conditions are met: 6 | # * Redistributions of source code must retain the above copyright 7 | # notice, this list of conditions and the following disclaimer. 8 | # * Redistributions in binary form must reproduce the above copyright 9 | # notice, this list of conditions and the following disclaimer in the 10 | # documentation and/or other materials provided with the distribution. 11 | # * Neither the name of the CyberPoint International, LLC nor the 12 | # names of its contributors may be used to endorse or promote products 13 | # derived from this software without specific prior written permission. 14 | # 15 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 16 | # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 17 | # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 18 | # DISCLAIMED. IN NO EVENT SHALL CYBERPOINT INTERNATIONAL, LLC BE LIABLE FOR ANY 19 | # DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 20 | # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 21 | # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND 22 | # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 23 | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 24 | # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 25 | ''' 26 | A module for creating and managing node data. Node data in this library can have many types, dependent on whether the conditional probability distributions are discrete, linear Gaussian, or hybrid, and on whether the Bayesian network is static or dynamic. For example input files, see :doc:`unittestdict`, :doc:`unittesthdict`, :doc:`unittestlgdict`, and :doc:`unittestdyndict`. 27 | 28 | ''' 29 | 30 | from .dictionary import Dictionary 31 | 32 | class NodeData(Dictionary): 33 | ''' 34 | This class represents the node data for each node in a graph. If the Bayesian network is static, it contains the attribute *Vdata*. If the Bayesian network is dynamic, it contains two attributes, *initial_Vdata* and *twotbn_Vdata*. If the Bayesian network has hybrid CPDs, it contains the additional attribute *nodes*. 35 | 36 | ''' 37 | def __init__(self): 38 | self.Vdata = None 39 | '''A dictionary of node data.''' 40 | self.initial_Vdata = None 41 | '''In dynamic graphs, a dictionary containing node data for the initial time interval.''' 42 | self.twotbn_Vdata = None 43 | '''In dynamic graphs, a dictionary containing node data for every time step interval after the first one.''' 44 | self.nodes = None 45 | '''In hybrid graphs, a dictionary of {key:value} pairs linking the name of each node (the key) to a clas instance (the value) which represents the node, its data, and its sampling function.''' 46 | 47 | 48 | def load(self, path): 49 | ''' 50 | Load node data from an input file located at *path*. Input file must be a plaintext .txt file with a JSON-style representation of a dict. The dict must have the top-level key ``Vdata`` or two top-level keys, ``initial_Vdata`` and ``twotbn_Vdata``. For example:: 51 | 52 | { 53 | "Vdata": { 54 | "": , 55 | ... 56 | "": 57 | } 58 | } 59 | 60 | or:: 61 | 62 | { 63 | "initial_Vdata": { 64 | "": , 65 | ... 66 | "": 67 | } 68 | "twotbn_Vdata": { 69 | "": , 70 | ... 71 | "": 72 | } 73 | } 74 | 75 | The function takes the following arguments: 76 | 1. *path* -- The path to the text file that contains input data (e.g., "mydictionary.txt") 77 | 78 | In the static case, it modifies *Vdata* to hold the dictionary found at path. In the dynamic case, it modifies the *initial_Vdata* and *twotbn_Vdata* attributes to hold the dictionaries found at path. 79 | 80 | ''' 81 | self.dictload(path) 82 | 83 | # try to load both for normal and dynamic cases 84 | try: 85 | self.Vdata = self.alldata["Vdata"] 86 | except KeyError: 87 | try: 88 | self.initial_Vdata = self.alldata["initial_Vdata"] 89 | self.twotbn_Vdata = self.alldata["twotbn_Vdata"] 90 | except KeyError: 91 | print("Error: NodeData did not recognize input file format.") 92 | 93 | 94 | # free unused memory 95 | del self.alldata 96 | 97 | def entriestoinstances(self): 98 | ''' 99 | For each node, convert dictionary entry to class instance. 100 | 101 | This method is used only when dealing with Hybrid Bayesian networks as found in the :doc:`hybayesiannetwork` module. 102 | 103 | The type of the node must be located in the 'type' attribute of the node's dictionary entry. To see an example of such a dictionary, see :doc:`unittesthdict`. This type is used to instantiate a corresponding class from libpgm/CPDtypes/, and store the node's dictionary info in that class. Thus we lose none of the dictionary data, yet we gain the ability to use the instantiated class's built-in function to choose its own outcome based on the outcomes of its parents. 104 | 105 | In order for this method to be called, the self.Vdata attribute must have dictionary entries of the following form:: 106 | 107 | : { 108 | 'type': , 109 | 'parents': , 110 | 'children': , 111 | 112 | } 113 | 114 | For instance, type "discrete" requires a "cprob" entry, while type "lg" 115 | requires "mean_base", "mean_scal", and "variance" entries. 116 | 117 | The function draws on the data in the *Vdata* attribute, and instantiates the attribute *nodes*, which is a dictionary of {name: instance} pairs where 'name' is the name of the node and 'instance' is a class instance containing the node data and the proper sampling function. 118 | 119 | ''' 120 | # declare result dict 121 | rarray = dict() 122 | 123 | # transform into class instances 124 | for entry in self.Vdata.keys(): 125 | 126 | # import module containing class 127 | path = str(self.Vdata[entry]["type"]) 128 | exec("from libpgm.CPDtypes import " + path) 129 | 130 | # instantiate class 131 | exec("tmpnode = " + path + "." + str.capitalize(path) + "(self.Vdata[entry])") 132 | 133 | # append to array 134 | exec("rarray['" + str(entry) + "'] = tmpnode") 135 | 136 | self.nodes = rarray 137 | 138 | -------------------------------------------------------------------------------- /docs/_build/html/dictionary.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | dictionary — libpgm 1.1 documentation 12 | 13 | 14 | 15 | 16 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 52 | 53 |
54 |
55 |
56 |
57 | 58 |
59 |

dictionary

60 |

Nearly all of the functions of this library require key indexing, which means it deals with dictionaries internally. This module deals with loading dictionaries and handles automatically converting from python-style dictionaries to condensed (no excess white space) JSON-style dictionaries.

61 |
62 |
63 | class libpgm.dictionary.Dictionary[source]
64 |

This class represents a JSON-style, key-indexable dictionary of data. It contains the attribute alldata and the method dictload.

65 |
66 |
67 | alldata = None
68 |

An internal representation of a key-indexable dictionary.

69 |
70 | 71 |
72 |
73 | dictload(path)[source]
74 |

Load a dictionary from a JSON-like text in a text file located at path into the attribute alldata.

75 |

In order for this function to execute successfully, the text file must have the proper formatting, particularly with regard to quotation marks. See discrete bayesian network for an example. Specifically, the function can get rid of excess whitespace, convert .x to 0.x in decimals, and convert None to null, but nothing else.

76 |

Arguments:

77 |
78 |
    79 |
  1. path – Path to the text file (e.g. “mydictionary.txt”)
  2. 80 |
81 |
82 |

Attributes modified:

83 |
84 |
    85 |
  1. alldata – The entire loaded dictionary.
  2. 86 |
87 |
88 |

The function also returns an error if nothing was loaded into alldata.

89 |
90 | 91 |
92 | 93 |
94 | 95 | 96 |
97 |
98 |
99 |
100 |
101 |

Previous topic

102 |

Welcome to libpgm!

104 |

Next topic

105 |

graphskeleton

107 |

This Page

108 | 112 | 124 | 125 |
126 |
127 |
128 |
129 | 147 | 151 | 152 | -------------------------------------------------------------------------------- /docs/_build/html/_static/doctools.js: -------------------------------------------------------------------------------- 1 | /* 2 | * doctools.js 3 | * ~~~~~~~~~~~ 4 | * 5 | * Sphinx JavaScript utilities for all documentation. 6 | * 7 | * :copyright: Copyright 2007-2011 by the Sphinx team, see AUTHORS. 8 | * :license: BSD, see LICENSE for details. 9 | * 10 | */ 11 | 12 | /** 13 | * select a different prefix for underscore 14 | */ 15 | $u = _.noConflict(); 16 | 17 | /** 18 | * make the code below compatible with browsers without 19 | * an installed firebug like debugger 20 | if (!window.console || !console.firebug) { 21 | var names = ["log", "debug", "info", "warn", "error", "assert", "dir", 22 | "dirxml", "group", "groupEnd", "time", "timeEnd", "count", "trace", 23 | "profile", "profileEnd"]; 24 | window.console = {}; 25 | for (var i = 0; i < names.length; ++i) 26 | window.console[names[i]] = function() {}; 27 | } 28 | */ 29 | 30 | /** 31 | * small helper function to urldecode strings 32 | */ 33 | jQuery.urldecode = function(x) { 34 | return decodeURIComponent(x).replace(/\+/g, ' '); 35 | } 36 | 37 | /** 38 | * small helper function to urlencode strings 39 | */ 40 | jQuery.urlencode = encodeURIComponent; 41 | 42 | /** 43 | * This function returns the parsed url parameters of the 44 | * current request. Multiple values per key are supported, 45 | * it will always return arrays of strings for the value parts. 46 | */ 47 | jQuery.getQueryParameters = function(s) { 48 | if (typeof s == 'undefined') 49 | s = document.location.search; 50 | var parts = s.substr(s.indexOf('?') + 1).split('&'); 51 | var result = {}; 52 | for (var i = 0; i < parts.length; i++) { 53 | var tmp = parts[i].split('=', 2); 54 | var key = jQuery.urldecode(tmp[0]); 55 | var value = jQuery.urldecode(tmp[1]); 56 | if (key in result) 57 | result[key].push(value); 58 | else 59 | result[key] = [value]; 60 | } 61 | return result; 62 | }; 63 | 64 | /** 65 | * small function to check if an array contains 66 | * a given item. 67 | */ 68 | jQuery.contains = function(arr, item) { 69 | for (var i = 0; i < arr.length; i++) { 70 | if (arr[i] == item) 71 | return true; 72 | } 73 | return false; 74 | }; 75 | 76 | /** 77 | * highlight a given string on a jquery object by wrapping it in 78 | * span elements with the given class name. 79 | */ 80 | jQuery.fn.highlightText = function(text, className) { 81 | function highlight(node) { 82 | if (node.nodeType == 3) { 83 | var val = node.nodeValue; 84 | var pos = val.toLowerCase().indexOf(text); 85 | if (pos >= 0 && !jQuery(node.parentNode).hasClass(className)) { 86 | var span = document.createElement("span"); 87 | span.className = className; 88 | span.appendChild(document.createTextNode(val.substr(pos, text.length))); 89 | node.parentNode.insertBefore(span, node.parentNode.insertBefore( 90 | document.createTextNode(val.substr(pos + text.length)), 91 | node.nextSibling)); 92 | node.nodeValue = val.substr(0, pos); 93 | } 94 | } 95 | else if (!jQuery(node).is("button, select, textarea")) { 96 | jQuery.each(node.childNodes, function() { 97 | highlight(this); 98 | }); 99 | } 100 | } 101 | return this.each(function() { 102 | highlight(this); 103 | }); 104 | }; 105 | 106 | /** 107 | * Small JavaScript module for the documentation. 108 | */ 109 | var Documentation = { 110 | 111 | init : function() { 112 | this.fixFirefoxAnchorBug(); 113 | this.highlightSearchWords(); 114 | this.initIndexTable(); 115 | }, 116 | 117 | /** 118 | * i18n support 119 | */ 120 | TRANSLATIONS : {}, 121 | PLURAL_EXPR : function(n) { return n == 1 ? 0 : 1; }, 122 | LOCALE : 'unknown', 123 | 124 | // gettext and ngettext don't access this so that the functions 125 | // can safely bound to a different name (_ = Documentation.gettext) 126 | gettext : function(string) { 127 | var translated = Documentation.TRANSLATIONS[string]; 128 | if (typeof translated == 'undefined') 129 | return string; 130 | return (typeof translated == 'string') ? translated : translated[0]; 131 | }, 132 | 133 | ngettext : function(singular, plural, n) { 134 | var translated = Documentation.TRANSLATIONS[singular]; 135 | if (typeof translated == 'undefined') 136 | return (n == 1) ? singular : plural; 137 | return translated[Documentation.PLURALEXPR(n)]; 138 | }, 139 | 140 | addTranslations : function(catalog) { 141 | for (var key in catalog.messages) 142 | this.TRANSLATIONS[key] = catalog.messages[key]; 143 | this.PLURAL_EXPR = new Function('n', 'return +(' + catalog.plural_expr + ')'); 144 | this.LOCALE = catalog.locale; 145 | }, 146 | 147 | /** 148 | * add context elements like header anchor links 149 | */ 150 | addContextElements : function() { 151 | $('div[id] > :header:first').each(function() { 152 | $('\u00B6'). 153 | attr('href', '#' + this.id). 154 | attr('title', _('Permalink to this headline')). 155 | appendTo(this); 156 | }); 157 | $('dt[id]').each(function() { 158 | $('\u00B6'). 159 | attr('href', '#' + this.id). 160 | attr('title', _('Permalink to this definition')). 161 | appendTo(this); 162 | }); 163 | }, 164 | 165 | /** 166 | * workaround a firefox stupidity 167 | */ 168 | fixFirefoxAnchorBug : function() { 169 | if (document.location.hash && $.browser.mozilla) 170 | window.setTimeout(function() { 171 | document.location.href += ''; 172 | }, 10); 173 | }, 174 | 175 | /** 176 | * highlight the search words provided in the url in the text 177 | */ 178 | highlightSearchWords : function() { 179 | var params = $.getQueryParameters(); 180 | var terms = (params.highlight) ? params.highlight[0].split(/\s+/) : []; 181 | if (terms.length) { 182 | var body = $('div.body'); 183 | window.setTimeout(function() { 184 | $.each(terms, function() { 185 | body.highlightText(this.toLowerCase(), 'highlighted'); 186 | }); 187 | }, 10); 188 | $('') 190 | .appendTo($('#searchbox')); 191 | } 192 | }, 193 | 194 | /** 195 | * init the domain index toggle buttons 196 | */ 197 | initIndexTable : function() { 198 | var togglers = $('img.toggler').click(function() { 199 | var src = $(this).attr('src'); 200 | var idnum = $(this).attr('id').substr(7); 201 | $('tr.cg-' + idnum).toggle(); 202 | if (src.substr(-9) == 'minus.png') 203 | $(this).attr('src', src.substr(0, src.length-9) + 'plus.png'); 204 | else 205 | $(this).attr('src', src.substr(0, src.length-8) + 'minus.png'); 206 | }).css('display', ''); 207 | if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) { 208 | togglers.click(); 209 | } 210 | }, 211 | 212 | /** 213 | * helper function to hide the search marks again 214 | */ 215 | hideSearchWords : function() { 216 | $('#searchbox .highlight-link').fadeOut(300); 217 | $('span.highlighted').removeClass('highlighted'); 218 | }, 219 | 220 | /** 221 | * make the url absolute 222 | */ 223 | makeURL : function(relativeURL) { 224 | return DOCUMENTATION_OPTIONS.URL_ROOT + '/' + relativeURL; 225 | }, 226 | 227 | /** 228 | * get the current relative url 229 | */ 230 | getCurrentURL : function() { 231 | var path = document.location.pathname; 232 | var parts = path.split(/\//); 233 | $.each(DOCUMENTATION_OPTIONS.URL_ROOT.split(/\//), function() { 234 | if (this == '..') 235 | parts.pop(); 236 | }); 237 | var url = parts.join('/'); 238 | return path.substring(url.lastIndexOf('/') + 1, path.length - 1); 239 | } 240 | }; 241 | 242 | // quick alias for translations 243 | _ = Documentation.gettext; 244 | 245 | $(document).ready(function() { 246 | Documentation.init(); 247 | }); 248 | -------------------------------------------------------------------------------- /docs/_build/html/py-modindex.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | Python Module Index — libpgm 1.1 documentation 12 | 13 | 14 | 15 | 16 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 47 | 48 |
49 |
50 |
51 |
52 | 53 | 54 |

Python Module Index

55 | 56 |
57 | l 58 |
59 | 60 | 61 | 62 | 64 | 65 | 67 | 70 | 71 | 72 | 75 | 76 | 77 | 80 | 81 | 82 | 85 | 86 | 87 | 90 | 91 | 92 | 95 | 96 | 97 | 100 | 101 | 102 | 105 | 106 | 107 | 110 | 111 | 112 | 115 | 116 | 117 | 120 | 121 | 122 | 125 | 126 | 127 | 130 | 131 | 132 | 135 | 136 | 137 | 140 | 141 | 142 | 145 | 146 | 147 | 150 |
 
63 | l
68 | libpgm 69 |
    73 | libpgm.CPDtypes.crazy 74 |
    78 | libpgm.CPDtypes.discrete 79 |
    83 | libpgm.CPDtypes.lg 84 |
    88 | libpgm.CPDtypes.lgandd 89 |
    93 | libpgm.dictionary 94 |
    98 | libpgm.discretebayesiannetwork 99 |
    103 | libpgm.dyndiscbayesiannetwork 104 |
    108 | libpgm.graphskeleton 109 |
    113 | libpgm.hybayesiannetwork 114 |
    118 | libpgm.lgbayesiannetwork 119 |
    123 | libpgm.nodedata 124 |
    128 | libpgm.orderedskeleton 129 |
    133 | libpgm.pgmlearner 134 |
    138 | libpgm.sampleaggregator 139 |
    143 | libpgm.tablecpdfactor 144 |
    148 | libpgm.tablecpdfactorization 149 |
151 | 152 | 153 |
154 |
155 |
156 |
157 |
158 | 170 | 171 |
172 |
173 |
174 |
175 | 187 | 191 | 192 | -------------------------------------------------------------------------------- /docs/_build/html/_static/underscore.js: -------------------------------------------------------------------------------- 1 | // Underscore.js 0.5.5 2 | // (c) 2009 Jeremy Ashkenas, DocumentCloud Inc. 3 | // Underscore is freely distributable under the terms of the MIT license. 4 | // Portions of Underscore are inspired by or borrowed from Prototype.js, 5 | // Oliver Steele's Functional, and John Resig's Micro-Templating. 6 | // For all details and documentation: 7 | // http://documentcloud.github.com/underscore/ 8 | (function(){var j=this,n=j._,i=function(a){this._wrapped=a},m=typeof StopIteration!=="undefined"?StopIteration:"__break__",b=j._=function(a){return new i(a)};if(typeof exports!=="undefined")exports._=b;var k=Array.prototype.slice,o=Array.prototype.unshift,p=Object.prototype.toString,q=Object.prototype.hasOwnProperty,r=Object.prototype.propertyIsEnumerable;b.VERSION="0.5.5";b.each=function(a,c,d){try{if(a.forEach)a.forEach(c,d);else if(b.isArray(a)||b.isArguments(a))for(var e=0,f=a.length;e=e.computed&&(e={value:f,computed:g})});return e.value};b.min=function(a,c,d){if(!c&&b.isArray(a))return Math.min.apply(Math,a);var e={computed:Infinity};b.each(a,function(f,g,h){g=c?c.call(d,f,g,h):f;gf?1:0}),"value")};b.sortedIndex=function(a,c,d){d=d||b.identity;for(var e=0,f=a.length;e>1;d(a[g])=0})})};b.zip=function(){for(var a=b.toArray(arguments),c=b.max(b.pluck(a,"length")),d=new Array(c),e=0;e0?f-c:c-f)>=0)return e;e[g++]=f}};b.bind=function(a,c){var d=b.rest(arguments,2);return function(){return a.apply(c||j,d.concat(b.toArray(arguments)))}};b.bindAll=function(a){var c=b.rest(arguments);if(c.length==0)c=b.functions(a);b.each(c,function(d){a[d]=b.bind(a[d],a)}); 17 | return a};b.delay=function(a,c){var d=b.rest(arguments,2);return setTimeout(function(){return a.apply(a,d)},c)};b.defer=function(a){return b.delay.apply(b,[a,1].concat(b.rest(arguments)))};b.wrap=function(a,c){return function(){var d=[a].concat(b.toArray(arguments));return c.apply(c,d)}};b.compose=function(){var a=b.toArray(arguments);return function(){for(var c=b.toArray(arguments),d=a.length-1;d>=0;d--)c=[a[d].apply(this,c)];return c[0]}};b.keys=function(a){if(b.isArray(a))return b.range(0,a.length); 18 | var c=[];for(var d in a)q.call(a,d)&&c.push(d);return c};b.values=function(a){return b.map(a,b.identity)};b.functions=function(a){return b.select(b.keys(a),function(c){return b.isFunction(a[c])}).sort()};b.extend=function(a,c){for(var d in c)a[d]=c[d];return a};b.clone=function(a){if(b.isArray(a))return a.slice(0);return b.extend({},a)};b.tap=function(a,c){c(a);return a};b.isEqual=function(a,c){if(a===c)return true;var d=typeof a;if(d!=typeof c)return false;if(a==c)return true;if(!a&&c||a&&!c)return false; 19 | if(a.isEqual)return a.isEqual(c);if(b.isDate(a)&&b.isDate(c))return a.getTime()===c.getTime();if(b.isNaN(a)&&b.isNaN(c))return true;if(b.isRegExp(a)&&b.isRegExp(c))return a.source===c.source&&a.global===c.global&&a.ignoreCase===c.ignoreCase&&a.multiline===c.multiline;if(d!=="object")return false;if(a.length&&a.length!==c.length)return false;d=b.keys(a);var e=b.keys(c);if(d.length!=e.length)return false;for(var f in a)if(!b.isEqual(a[f],c[f]))return false;return true};b.isEmpty=function(a){return b.keys(a).length== 20 | 0};b.isElement=function(a){return!!(a&&a.nodeType==1)};b.isArray=function(a){return!!(a&&a.concat&&a.unshift)};b.isArguments=function(a){return a&&b.isNumber(a.length)&&!b.isArray(a)&&!r.call(a,"length")};b.isFunction=function(a){return!!(a&&a.constructor&&a.call&&a.apply)};b.isString=function(a){return!!(a===""||a&&a.charCodeAt&&a.substr)};b.isNumber=function(a){return p.call(a)==="[object Number]"};b.isDate=function(a){return!!(a&&a.getTimezoneOffset&&a.setUTCFullYear)};b.isRegExp=function(a){return!!(a&& 21 | a.test&&a.exec&&(a.ignoreCase||a.ignoreCase===false))};b.isNaN=function(a){return b.isNumber(a)&&isNaN(a)};b.isNull=function(a){return a===null};b.isUndefined=function(a){return typeof a=="undefined"};b.noConflict=function(){j._=n;return this};b.identity=function(a){return a};b.breakLoop=function(){throw m;};var s=0;b.uniqueId=function(a){var c=s++;return a?a+c:c};b.template=function(a,c){a=new Function("obj","var p=[],print=function(){p.push.apply(p,arguments);};with(obj){p.push('"+a.replace(/[\r\t\n]/g, 22 | " ").replace(/'(?=[^%]*%>)/g,"\t").split("'").join("\\'").split("\t").join("'").replace(/<%=(.+?)%>/g,"',$1,'").split("<%").join("');").split("%>").join("p.push('")+"');}return p.join('');");return c?a(c):a};b.forEach=b.each;b.foldl=b.inject=b.reduce;b.foldr=b.reduceRight;b.filter=b.select;b.every=b.all;b.some=b.any;b.head=b.first;b.tail=b.rest;b.methods=b.functions;var l=function(a,c){return c?b(a).chain():a};b.each(b.functions(b),function(a){var c=b[a];i.prototype[a]=function(){var d=b.toArray(arguments); 23 | o.call(d,this._wrapped);return l(c.apply(b,d),this._chain)}});b.each(["pop","push","reverse","shift","sort","splice","unshift"],function(a){var c=Array.prototype[a];i.prototype[a]=function(){c.apply(this._wrapped,arguments);return l(this._wrapped,this._chain)}});b.each(["concat","join","slice"],function(a){var c=Array.prototype[a];i.prototype[a]=function(){return l(c.apply(this._wrapped,arguments),this._chain)}});i.prototype.chain=function(){this._chain=true;return this};i.prototype.value=function(){return this._wrapped}})(); 24 | -------------------------------------------------------------------------------- /libpgm/hybayesiannetwork.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2012, CyberPoint International, LLC 2 | # All rights reserved. 3 | # 4 | # Redistribution and use in source and binary forms, with or without 5 | # modification, are permitted provided that the following conditions are met: 6 | # * Redistributions of source code must retain the above copyright 7 | # notice, this list of conditions and the following disclaimer. 8 | # * Redistributions in binary form must reproduce the above copyright 9 | # notice, this list of conditions and the following disclaimer in the 10 | # documentation and/or other materials provided with the distribution. 11 | # * Neither the name of the CyberPoint International, LLC nor the 12 | # names of its contributors may be used to endorse or promote products 13 | # derived from this software without specific prior written permission. 14 | # 15 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 16 | # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 17 | # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 18 | # DISCLAIMED. IN NO EVENT SHALL CYBERPOINT INTERNATIONAL, LLC BE LIABLE FOR ANY 19 | # DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 20 | # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 21 | # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND 22 | # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 23 | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 24 | # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 25 | ''' 26 | This module provides tools to represent and handle Bayesian networks with conditional probability distributions that can be specified node-by-node. 27 | 28 | This method allows for the construction of a Bayesian network with every combination of every type of CPD, provided that the user provides a method for sampling each type of node and stores this method in the proper place, namely as the ``choose()`` method of a class in ``libpgm.CPDtypes/``. 29 | 30 | ''' 31 | 32 | import random 33 | import sys 34 | 35 | from .orderedskeleton import OrderedSkeleton 36 | 37 | class HyBayesianNetwork(OrderedSkeleton): 38 | ''' 39 | This class represents a Bayesian network with CPDs of any type. The nodes of the Bayesian network are stored first in a dictionary, specifying their "type", which should be descriptive ('discrete', 'lg', etc.). Furthermore, the types of each node associate them with a class found in ``libpgm/CPDtypes/``. The nodes are then stored also as instances of classes found in this directory. The purpose of this is that each node has its own method for being sampled given the outcomes of its parents. 40 | 41 | ''' 42 | 43 | def __init__(self, orderedskeleton=None, nodedata=None): 44 | ''' 45 | This class can be called either with or without arguments. If it is called without arguments, none of its attributes are instantiated and it is left to the user to instantiate them manually. If it is called with arguments, the attributes will be loaded directly from the inputs. The arguments must be (in order): 46 | 47 | 1. *orderedskeleton* -- An instance of the :doc:`OrderedSkeleton ` or :doc:`GraphSkeleton ` (as long as it's ordered) class. 48 | 2. *nodedata* -- An instance of the :doc:`NodeData ` class. 49 | 50 | It is required that the *nodedata* class instance inputted has its *nodes* attribute instantiated. In order for this to be the case, the instance must have run its *entriestoinstances* method. 51 | 52 | If the arguments above are present, all attributes of the class (*V*, *E*, *Vdata*, and *nodes*) will be automatically copied from the graph skeleton and node data inputs. 53 | 54 | Upon loading, the class will also check that the keys of *Vdata* correspond to the vertices in *V*. 55 | 56 | ''' 57 | if (orderedskeleton != None and nodedata != None): 58 | try: 59 | self.V = orderedskeleton.V 60 | '''A list of the names of the vertices.''' 61 | self.E = orderedskeleton.E 62 | '''A list of [origin, destination] pairs of vertices that make edges.''' 63 | self.Vdata = nodedata.Vdata 64 | '''A dictionary containing CPD data for the nodes.''' 65 | 66 | # specific to hybrid Bayesian network 67 | self.nodes = nodedata.nodes 68 | '''A dictionary of {key: value} pairs linking the node name (the key) to a class instance (the value) representing the node, its node data, and its sampling function.''' 69 | except: 70 | raise Exception("Inputs were malformed; first arg must contain V and E attributes and second arg must contain Vdata and nodes attributes.") 71 | 72 | # check that inputs match up 73 | assert sorted(self.V) == sorted(self.Vdata.keys()), "Node data did not match graph skeleton nodes." 74 | 75 | def randomsample(self, n, evidence=None): 76 | ''' 77 | Produce *n* random samples from the Bayesian networki, subject to *evidence*, and return them in a list. This function requires the *nodes* attribute to be instantiated. 78 | 79 | This function takes the following arguments: 80 | 81 | 1. *n* -- The number of random samples to produce. 82 | 2. *evidence* -- (Optional) A dict containing (vertex: value) pairs that describe the evidence. To be used carefully because it does manually overrides the nodes with evidence instead of affecting the joint probability distribution of the entire graph. 83 | 84 | And returns: 85 | A list of *n* independent random samples, each element of which is a dict containing (vertex: value) pairs. 86 | 87 | Usage example: this would generate a sequence of 10 random samples:: 88 | 89 | import json 90 | 91 | from libpgm.nodedata import NodeData 92 | from libpgm.graphskeleton import GraphSkeleton 93 | from libpgm.hybayesiannetwork import HyBayesianNetwork 94 | 95 | # load nodedata and graphskeleton 96 | nd = NodeData() 97 | skel = GraphSkeleton() 98 | nd.load("../tests/unittesthdict.txt") # an input file 99 | skel.load("../tests/unittestdict.txt") 100 | 101 | # topologically order graphskeleton 102 | skel.toporder() 103 | 104 | # convert nodes to class instances 105 | nd.entriestoinstances() 106 | 107 | # load bayesian network 108 | hybn = HyBayesianNetwork(skel, nd) 109 | 110 | # sample 111 | result = hybn.randomsample(10) 112 | 113 | # output 114 | print json.dumps(result, indent=2) 115 | 116 | 117 | 118 | ''' 119 | assert (isinstance(n, int) and n > 0), "Argument must be a positive integer." 120 | 121 | 122 | seq = [] 123 | for _ in range(n): 124 | outcome = dict() 125 | for vertex in self.V: 126 | outcome[vertex] = "default" 127 | 128 | def assignnode(name, node): 129 | 130 | # check if node is already observed 131 | if (evidence != None): 132 | if name in evidence.keys(): 133 | return evidence[name] 134 | 135 | # get parent values 136 | p = self.getparents(name) 137 | if (p == []): 138 | pvalues = [] 139 | else: 140 | pvalues = [outcome[t] for t in self.Vdata[name]["parents"]] # ideally can we pull this from the skeleton so as not to store parent data at all? 141 | for pvalue in pvalues: 142 | assert pvalue != 'default', "Graph skeleton was not topologically ordered." 143 | 144 | # use built in function to determine outcome 145 | return node.choose(pvalues) 146 | 147 | for s in self.V: 148 | if (outcome[s] == "default"): 149 | outcome[s] = assignnode(s, self.nodes[s]) 150 | 151 | seq.append(outcome) 152 | return seq 153 | 154 | --------------------------------------------------------------------------------