├── pyBreakDown
├── __init__.py
├── explainer.py
└── explanation.py
├── misc
├── output_22_0.png
├── output_23_0.png
└── output_24_0.png
├── docs
├── source
│ ├── .conf.py.swp
│ ├── images
│ │ ├── output_22_0.png
│ │ ├── output_23_0.png
│ │ └── output_24_0.png
│ ├── pybreakdown.rst
│ ├── index.rst
│ ├── conf.py
│ ├── usermanual.rst
│ └── comparison.rst
└── Makefile
├── requirements.txt
├── setup.py
├── .gitignore
├── README.md
└── LICENSE
/pyBreakDown/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/misc/output_22_0.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MI2DataLab/pyBreakDown/HEAD/misc/output_22_0.png
--------------------------------------------------------------------------------
/misc/output_23_0.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MI2DataLab/pyBreakDown/HEAD/misc/output_23_0.png
--------------------------------------------------------------------------------
/misc/output_24_0.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MI2DataLab/pyBreakDown/HEAD/misc/output_24_0.png
--------------------------------------------------------------------------------
/docs/source/.conf.py.swp:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MI2DataLab/pyBreakDown/HEAD/docs/source/.conf.py.swp
--------------------------------------------------------------------------------
/docs/source/images/output_22_0.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MI2DataLab/pyBreakDown/HEAD/docs/source/images/output_22_0.png
--------------------------------------------------------------------------------
/docs/source/images/output_23_0.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MI2DataLab/pyBreakDown/HEAD/docs/source/images/output_23_0.png
--------------------------------------------------------------------------------
/docs/source/images/output_24_0.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/MI2DataLab/pyBreakDown/HEAD/docs/source/images/output_24_0.png
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | numpy==1.14.2
2 | scikit-learn==0.19.1
3 | scipy==1.0.0
4 | blist==1.3.6
5 | sphinx-bootstrap-theme==0.6.5
6 | matplotlib==2.1.2
7 | recordclass==0.5
8 |
--------------------------------------------------------------------------------
/docs/source/pybreakdown.rst:
--------------------------------------------------------------------------------
1 | ====
2 | Code
3 | ====
4 | ----------
5 | Submodules
6 | ----------
7 | pyBreakDown.explanation module
8 | ==============================
9 |
10 | .. automodule:: pyBreakDown.explanation
11 | :members:
12 |
13 | pyBreakDown.explainer module
14 | ============================
15 |
16 | .. automodule:: pyBreakDown.explainer
17 | :members:
18 |
--------------------------------------------------------------------------------
/docs/source/index.rst:
--------------------------------------------------------------------------------
1 | .. pyBreakDown documentation master file, created by
2 | sphinx-quickstart on Tue Apr 24 20:26:05 2018.
3 | You can adapt this file completely to your liking, but it should at least
4 | contain the root `toctree` directive.
5 |
6 | pyBreakDown
7 | =======================================
8 |
9 | .. toctree::
10 | :maxdepth: 2
11 |
12 | usermanual
13 | pybreakdown
14 | comparison
15 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | from setuptools import setup, find_packages
2 |
3 | setup(name='pyBreakDown',
4 | version='0.0.1',
5 | description='breakDown python implementation',
6 | url='http://github.com/bondyra/pyBreakDown',
7 | author='Jakub Bondyra',
8 | author_email='jb10193@gmail.com',
9 | license='GPL-2',
10 | packages= find_packages(exclude=['tests']),
11 | install_requires=[
12 | 'numpy==1.14.2',
13 | 'scikit-learn==0.19.1',
14 | 'scipy==1.0.0',
15 | 'blist==1.3.6',
16 | 'sphinx-bootstrap-theme==0.6.5',
17 | 'matplotlib==2.1.2',
18 | 'recordclass==0.5'])
19 |
20 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line.
5 | SPHINXOPTS =
6 | SPHINXBUILD = sphinx-build
7 | SPHINXPROJ = pyBreakDown
8 | SOURCEDIR = source
9 | BUILDDIR = build
10 |
11 | # Put it first so that "make" without argument is like "make help".
12 | help:
13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
14 |
15 | .PHONY: help Makefile
16 |
17 | # Catch-all target: route all unknown targets to Sphinx using the new
18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
19 | %: Makefile
20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | env/
12 | build/
13 | develop-eggs/
14 | dist/
15 | downloads/
16 | eggs/
17 | .eggs/
18 | lib/
19 | lib64/
20 | parts/
21 | sdist/
22 | var/
23 | wheels/
24 | *.egg-info/
25 | .installed.cfg
26 | *.egg
27 |
28 | # PyInstaller
29 | # Usually these files are written by a python script from a template
30 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
31 | *.manifest
32 | *.spec
33 |
34 | # Installer logs
35 | pip-log.txt
36 | pip-delete-this-directory.txt
37 |
38 | # Unit test / coverage reports
39 | htmlcov/
40 | .tox/
41 | .coverage
42 | .coverage.*
43 | .cache
44 | nosetests.xml
45 | coverage.xml
46 | *.cover
47 | .hypothesis/
48 |
49 | # Translations
50 | *.mo
51 | *.pot
52 |
53 | # Django stuff:
54 | *.log
55 | local_settings.py
56 |
57 | # Flask stuff:
58 | instance/
59 | .webassets-cache
60 |
61 | # Scrapy stuff:
62 | .scrapy
63 |
64 | # Sphinx documentation
65 | docs/_build/
66 |
67 | # PyBuilder
68 | target/
69 |
70 | # Jupyter Notebook
71 | .ipynb_checkpoints
72 |
73 | # pyenv
74 | .python-version
75 |
76 | # celery beat schedule file
77 | celerybeat-schedule
78 |
79 | # SageMath parsed files
80 | *.sage.py
81 |
82 | # dotenv
83 | .env
84 |
85 | # virtualenv
86 | .venv
87 | venv/
88 | ENV/
89 |
90 | # Spyder project settings
91 | .spyderproject
92 | .spyproject
93 |
94 | # Rope project settings
95 | .ropeproject
96 |
97 | # mkdocs documentation
98 | /site
99 |
100 | # mypy
101 | .mypy_cache/
102 |
103 | .vscode/
104 |
--------------------------------------------------------------------------------
/docs/source/conf.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Configuration file for the Sphinx documentation builder.
4 | #
5 | # This file does only contain a selection of the most common options. For a
6 | # full list see the documentation:
7 | # http://www.sphinx-doc.org/en/master/config
8 |
9 | # -- Path setup --------------------------------------------------------------
10 |
11 | # If extensions (or modules to document with autodoc) are in another directory,
12 | # add these directories to sys.path here. If the directory is relative to the
13 | # documentation root, use os.path.abspath to make it absolute, like shown here.
14 | #
15 | import os
16 | import sys
17 | import sphinx_bootstrap_theme
18 | sys.path.insert(0, os.path.abspath('../..'))
19 |
20 |
21 | # -- Project information -----------------------------------------------------
22 |
23 | project = 'pyBreakDown'
24 | copyright = '2018, Jakub Bondyra'
25 | author = 'Jakub Bondyra'
26 |
27 | # The short X.Y version
28 | version = ''
29 | # The full version, including alpha/beta/rc tags
30 | release = '0.1'
31 |
32 |
33 | # -- General configuration ---------------------------------------------------
34 |
35 | # If your documentation needs a minimal Sphinx version, state it here.
36 | #
37 | # needs_sphinx = '1.0'
38 |
39 | # Add any Sphinx extension module names here, as strings. They can be
40 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
41 | # ones.
42 | extensions = [
43 | 'sphinx.ext.viewcode','sphinx.ext.autodoc'
44 | ]
45 |
46 | # Add any paths that contain templates here, relative to this directory.
47 | templates_path = ['_templates']
48 |
49 | # The suffix(es) of source filenames.
50 | # You can specify multiple suffix as a list of string:
51 | #
52 | # source_suffix = ['.rst', '.md']
53 | source_suffix = '.rst'
54 |
55 | # The master toctree document.
56 | master_doc = 'index'
57 |
58 | # The language for content autogenerated by Sphinx. Refer to documentation
59 | # for a list of supported languages.
60 | #
61 | # This is also used if you do content translation via gettext catalogs.
62 | # Usually you set "language" from the command line for these cases.
63 | language = None
64 |
65 | # List of patterns, relative to source directory, that match files and
66 | # directories to ignore when looking for source files.
67 | # This pattern also affects html_static_path and html_extra_path .
68 | exclude_patterns = []
69 |
70 | # The name of the Pygments (syntax highlighting) style to use.
71 | pygments_style = 'sphinx'
72 |
73 |
74 |
75 | html_static_path = ['_static']
76 | # Activate the theme.
77 | html_theme = 'bootstrap'
78 | html_theme_path = sphinx_bootstrap_theme.get_html_theme_path()
79 |
80 | # Custom sidebar templates, must be a dictionary that maps document names
81 | # to template names.
82 | #
83 | # The default sidebars (for documents that don't match any pattern) are
84 | # defined by theme itself. Builtin themes are using these templates by
85 | # default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
86 | # 'searchbox.html']``.
87 | #
88 | # html_sidebars = {}
89 |
90 |
91 | # -- Options for HTMLHelp output ---------------------------------------------
92 |
93 | # Output file base name for HTML help builder.
94 | htmlhelp_basename = 'pyBreakDowndoc'
95 |
96 |
97 | # -- Options for LaTeX output ------------------------------------------------
98 |
99 | latex_elements = {
100 | # The paper size ('letterpaper' or 'a4paper').
101 | #
102 | # 'papersize': 'letterpaper',
103 |
104 | # The font size ('10pt', '11pt' or '12pt').
105 | #
106 | # 'pointsize': '10pt',
107 |
108 | # Additional stuff for the LaTeX preamble.
109 | #
110 | # 'preamble': '',
111 |
112 | # Latex figure (float) alignment
113 | #
114 | # 'figure_align': 'htbp',
115 | }
116 |
117 | # Grouping the document tree into LaTeX files. List of tuples
118 | # (source start file, target name, title,
119 | # author, documentclass [howto, manual, or own class]).
120 | latex_documents = [
121 | (master_doc, 'pyBreakDown.tex', 'pyBreakDown Documentation',
122 | 'Jakub Bondyra', 'manual'),
123 | ]
124 |
125 |
126 | # -- Options for manual page output ------------------------------------------
127 |
128 | # One entry per manual page. List of tuples
129 | # (source start file, name, description, authors, manual section).
130 | man_pages = [
131 | (master_doc, 'pybreakdown', 'pyBreakDown Documentation',
132 | [author], 1)
133 | ]
134 |
135 |
136 | # -- Options for Texinfo output ----------------------------------------------
137 |
138 | # Grouping the document tree into Texinfo files. List of tuples
139 | # (source start file, target name, title, author,
140 | # dir menu entry, description, category)
141 | texinfo_documents = [
142 | (master_doc, 'pyBreakDown', 'pyBreakDown Documentation',
143 | author, 'pyBreakDown', 'One line description of project.',
144 | 'Miscellaneous'),
145 | ]
146 |
147 |
148 | # -- Extension configuration -------------------------------------------------
149 |
--------------------------------------------------------------------------------
/pyBreakDown/explainer.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from collections import deque
3 | from blist import blist
4 | from pyBreakDown import explanation as e
5 |
6 | class Explainer:
7 | """
8 | Explainer object.
9 |
10 | Parameters
11 | ----------
12 | clf : np.array
13 | Sklearn predicition model (regression or classification).
14 | data : np.array
15 | Baseline dataset for algorithm.
16 | colnames : np.array
17 | Dataset feature names.
18 | """
19 | def __init__(self, clf, data, colnames):
20 | assert len(colnames) == data.shape[1] #otherwise it wouldnt make any sense
21 | self.clf = clf
22 | self.data = data
23 | self.colnames = colnames
24 |
25 | def _transform_observation (self, observation):
26 | if observation.ndim < 2:
27 | observation = np.expand_dims(observation, axis=0)
28 | return observation
29 |
30 | def _get_initial_dataset(self, observation, data):
31 | assert observation.ndim == 2 and observation.shape[0] == 1
32 | return np.repeat(observation,repeats=data.shape[0], axis=0)
33 |
34 | def explain (self, observation, direction, useIntercept = False, baseline=0):
35 | """
36 | Make explanation for given observation and dataset.
37 |
38 | Method works with any sklearn prediction model
39 |
40 | Parameters
41 | ----------
42 | observation : np.array
43 | Observation to explain.
44 | direction : str
45 | Could be "up" or "down". Decides the direction of algorithm.
46 | useIntercept : bool
47 | If set, baseline argument will be ignored and baseline will be set to intercept.
48 | baseline : float
49 | Baseline of explanation.
50 |
51 | Returns
52 | -------
53 | Explanation
54 | Object that contains influences and descriptions of each relevant attribute.
55 |
56 | """
57 | data = np.copy(self.data)
58 | assert direction in ["up","down"]
59 | observation = self._transform_observation(observation) #expand dims from 1D to 2D if necessary
60 | assert len(self.colnames) == observation.shape[1]
61 |
62 | if direction=="up":
63 | exp = self._explain_up(observation, baseline, data)
64 | if direction=="down":
65 | exp = self._explain_down(observation, baseline, data)
66 |
67 | mean_prediction = np.mean(self.clf.predict(data))
68 |
69 | if useIntercept:
70 | baseline = mean_prediction
71 | bcont = 0
72 | else:
73 | bcont = mean_prediction - baseline
74 |
75 | exp.add_intercept(bcont)
76 | exp.add_baseline(baseline)
77 | exp.make_final_prediction()
78 | return exp
79 |
80 |
81 | def _explain_up (self, observation, baseline, data):
82 | new_data = self._get_initial_dataset(observation, data)
83 |
84 | baseline_yhat = np.mean(self.clf.predict(data))
85 |
86 | open_variables = blist(range(0,data.shape[1]))
87 | important_variables = deque()
88 | important_yhats = {}
89 |
90 | for i in range(0, data.shape[1]):
91 | yhats = {}
92 | yhats_diff = np.repeat(-float('inf'), data.shape[1])
93 |
94 | for variable in open_variables:
95 | tmp_data = np.copy(data)
96 | tmp_data[:,variable] = new_data[:,variable]
97 | yhats[variable] = self.clf.predict(tmp_data)
98 | yhats_diff[variable] = abs(baseline_yhat - np.mean(yhats[variable]))
99 |
100 | amax = np.argmax(yhats_diff)
101 | important_variables.append(amax)
102 | important_yhats[i] = yhats[amax]
103 | data[:,amax] = new_data[:,amax]
104 | open_variables.remove(amax)
105 |
106 | var_names = np.array(self.colnames)[important_variables]
107 | var_values = observation[0,important_variables]
108 | means = self._get_means_from_yhats(important_yhats)
109 | means.appendleft(baseline_yhat)
110 | contributions = np.diff(means)
111 | return e.Explanation(var_names, var_values, contributions, e.ExplainerDirection.Up)
112 |
113 | def _explain_down (self, observation, baseline, data):
114 | new_data = self._get_initial_dataset(observation, data)
115 |
116 | target_yhat = self.clf.predict(observation)
117 |
118 | open_variables = blist(range(0,data.shape[1]))
119 | important_variables = deque()
120 | important_yhats = {}
121 |
122 | for i in range(0, data.shape[1]):
123 | yhats = {}
124 | yhats_diff = np.repeat(float('inf'), data.shape[1])
125 |
126 | for variable in open_variables:
127 | tmp_data = np.copy(new_data)
128 | tmp_data[:,variable] = data[:,variable]
129 | yhats[variable] = self.clf.predict(tmp_data)
130 | yhats_diff[variable] = abs(target_yhat - np.mean(yhats[variable]))
131 |
132 | amin = np.argmin(yhats_diff)
133 | important_variables.append(amin)
134 | important_yhats[i] = yhats[amin]
135 | new_data[:,amin] = data[:,amin]
136 | open_variables.remove(amin)
137 |
138 | important_variables.reverse()
139 | var_names = np.array(self.colnames)[important_variables]
140 | var_values = observation[0,important_variables]
141 | means = self._get_means_from_yhats(important_yhats)
142 | means.appendleft(target_yhat[0])
143 | means.reverse()
144 | contributions = np.diff(means)
145 |
146 | return e.Explanation(var_names, var_values, contributions, e.ExplainerDirection.Down)
147 |
148 | def _get_means_from_yhats (self, important_yhats):
149 | return deque([np.array(v).mean() for k,v in important_yhats.items()])
150 |
--------------------------------------------------------------------------------
/docs/source/usermanual.rst:
--------------------------------------------------------------------------------
1 | Usage
2 | =====
3 |
4 | Requirements
5 | ------------
6 |
7 | Nothing fancy, just python 3.5.2+ and pip.
8 |
9 | Installation
10 | ------------
11 |
12 | Install directly from github
13 |
14 | ::
15 |
16 | git clone https://github.com/bondyra/pyBreakDown
17 | cd ./pyBreakDown
18 | python3 setup.py install # (or use pip install . instead)
19 |
20 | Basic usage
21 | -----------
22 |
23 | Load dataset
24 | ~~~~~~~~~~~~
25 |
26 | .. code:: python
27 |
28 | from sklearn import datasets
29 |
30 | .. code:: python
31 |
32 | x = datasets.load_boston()
33 |
34 | .. code:: python
35 |
36 | data = x.data
37 |
38 | .. code:: python
39 |
40 | feature_names = x.feature_names
41 |
42 | .. code:: python
43 |
44 | y = x.target
45 |
46 | Prepare model
47 | ~~~~~~~~~~~~~
48 |
49 | .. code:: python
50 |
51 | import numpy as np
52 |
53 | .. code:: python
54 |
55 | from sklearn import tree
56 |
57 | .. code:: python
58 |
59 | model = tree.DecisionTreeRegressor()
60 |
61 | Train model
62 | ~~~~~~~~~~~
63 |
64 | .. code:: python
65 |
66 | train_data = data[1:300,:]
67 | train_labels=y[1:300]
68 |
69 | .. code:: python
70 |
71 | model = model.fit(train_data,y=train_labels)
72 |
73 | Explain predictions on test data
74 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
75 |
76 | .. code:: python
77 |
78 | #necessary imports
79 | from pyBreakDown.explainer import Explainer
80 | from pyBreakDown.explanation import Explanation
81 |
82 | .. code:: python
83 |
84 | #make explainer object
85 | exp = Explainer(clf=model, data=train_data, colnames=feature_names)
86 |
87 | .. code:: python
88 |
89 | #make explanation object that contains all information
90 | explanation = exp.explain(observation=data[302,:],direction="up")
91 |
92 | Text form of explanations
93 | ~~~~~~~~~~~~~~~~~~~~~~~~~
94 |
95 | .. code:: python
96 |
97 | #get information in text form
98 | explanation.text()
99 |
100 | ::
101 |
102 | Feature Contribution Cumulative
103 | Intercept = 1 29.1 29.1
104 | RM = 6.495 -1.98 27.12
105 | TAX = 329.0 -0.2 26.92
106 | B = 383.61 -0.12 26.79
107 | CHAS = 0.0 -0.07 26.72
108 | NOX = 0.433 -0.02 26.7
109 | RAD = 7.0 0.0 26.7
110 | INDUS = 6.09 0.01 26.71
111 | DIS = 5.4917 -0.04 26.66
112 | ZN = 34.0 0.01 26.67
113 | PTRATIO = 16.1 0.04 26.71
114 | AGE = 18.4 0.06 26.77
115 | CRIM = 0.09266 1.33 28.11
116 | LSTAT = 8.67 4.6 32.71
117 | Final prediction 32.71
118 | Baseline = 0
119 |
120 | .. code:: python
121 |
122 | #customized text form
123 | explanation.text(fwidth=40, contwidth=40, cumulwidth = 40, digits=4)
124 |
125 | ::
126 |
127 | Feature Contribution Cumulative
128 | Intercept = 1 29.1 29.1
129 | RM = 6.495 -1.9826 27.1174
130 | TAX = 329.0 -0.2 26.9174
131 | B = 383.61 -0.1241 26.7933
132 | CHAS = 0.0 -0.0686 26.7247
133 | NOX = 0.433 -0.0241 26.7007
134 | RAD = 7.0 0.0 26.7007
135 | INDUS = 6.09 0.0074 26.708
136 | DIS = 5.4917 -0.0438 26.6642
137 | ZN = 34.0 0.0077 26.6719
138 | PTRATIO = 16.1 0.0385 26.7104
139 | AGE = 18.4 0.0619 26.7722
140 | CRIM = 0.09266 1.3344 28.1067
141 | LSTAT = 8.67 4.6037 32.7104
142 | Final prediction 32.7104
143 | Baseline = 0
144 |
145 | Visual form of explanations
146 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~
147 |
148 | .. code:: python
149 |
150 | explanation.visualize()
151 |
152 | .. figure:: images/output_22_0.png
153 | :alt: png
154 |
155 | png
156 |
157 | .. code:: python
158 |
159 | #customize height, width and dpi of plot
160 | explanation.visualize(figsize=(8,5),dpi=100)
161 |
162 | .. figure:: images/output_23_0.png
163 | :alt: png
164 |
165 | png
166 |
167 | .. code:: python
168 |
169 | #for different baselines than zero
170 | explanation = exp.explain(observation=data[302,:],direction="up",useIntercept=True) # baseline==intercept
171 | explanation.visualize(figsize=(8,5),dpi=100)
172 |
173 | .. figure:: images/output_24_0.png
174 | :alt: png
175 |
176 | png
177 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 | # pyBreakDown
4 |
5 | **Please note that the Break Down method is moved to the [dalex](http://dalex.drwhy.ai/) Python package which is actively maintained. If you will experience any problem with pyBreakDown please consider the [dalex](http://dalex.drwhy.ai/) implementation at https://dalex.drwhy.ai/python/api/.**
6 |
7 |
8 | Python implementation of breakDown package (https://github.com/pbiecek/breakDown).
9 |
10 | Docs: https://pybreakdown.readthedocs.io.
11 |
12 | ## Requirements
13 |
14 | Nothing fancy, just python 3.5.2+ and pip.
15 |
16 | ## Installation
17 |
18 | Install directly from github
19 | ```
20 | git clone https://github.com/bondyra/pyBreakDown
21 | cd ./pyBreakDown
22 | python3 setup.py install # (or use pip install . instead)
23 | ```
24 |
25 | ## Basic usage
26 |
27 | ### Load dataset
28 |
29 |
30 | ```python
31 | from sklearn import datasets
32 | ```
33 |
34 |
35 | ```python
36 | x = datasets.load_boston()
37 | ```
38 |
39 |
40 | ```python
41 | data = x.data
42 | ```
43 |
44 |
45 | ```python
46 | feature_names = x.feature_names
47 | ```
48 |
49 |
50 | ```python
51 | y = x.target
52 | ```
53 |
54 | ### Prepare model
55 |
56 |
57 | ```python
58 | import numpy as np
59 | ```
60 |
61 |
62 | ```python
63 | from sklearn import tree
64 | ```
65 |
66 |
67 | ```python
68 | model = tree.DecisionTreeRegressor()
69 | ```
70 |
71 | ### Train model
72 |
73 |
74 | ```python
75 | train_data = data[1:300,:]
76 | train_labels=y[1:300]
77 | ```
78 |
79 |
80 | ```python
81 | model = model.fit(train_data,y=train_labels)
82 | ```
83 |
84 | ### Explain predictions on test data
85 |
86 |
87 | ```python
88 | #necessary imports
89 | from pyBreakDown.explainer import Explainer
90 | from pyBreakDown.explanation import Explanation
91 | ```
92 |
93 |
94 | ```python
95 | #make explainer object
96 | exp = Explainer(clf=model, data=train_data, colnames=feature_names)
97 | ```
98 |
99 |
100 | ```python
101 | #make explanation object that contains all information
102 | explanation = exp.explain(observation=data[302,:],direction="up")
103 | ```
104 |
105 | ### Text form of explanations
106 |
107 |
108 | ```python
109 | #get information in text form
110 | explanation.text()
111 | ```
112 |
113 | Feature Contribution Cumulative
114 | Intercept = 1 29.1 29.1
115 | RM = 6.495 -1.98 27.12
116 | TAX = 329.0 -0.2 26.92
117 | B = 383.61 -0.12 26.79
118 | CHAS = 0.0 -0.07 26.72
119 | NOX = 0.433 -0.02 26.7
120 | RAD = 7.0 0.0 26.7
121 | INDUS = 6.09 0.01 26.71
122 | DIS = 5.4917 -0.04 26.66
123 | ZN = 34.0 0.01 26.67
124 | PTRATIO = 16.1 0.04 26.71
125 | AGE = 18.4 0.06 26.77
126 | CRIM = 0.09266 1.33 28.11
127 | LSTAT = 8.67 4.6 32.71
128 | Final prediction 32.71
129 | Baseline = 0
130 |
131 |
132 |
133 | ```python
134 | #customized text form
135 | explanation.text(fwidth=40, contwidth=40, cumulwidth = 40, digits=4)
136 | ```
137 |
138 | Feature Contribution Cumulative
139 | Intercept = 1 29.1 29.1
140 | RM = 6.495 -1.9826 27.1174
141 | TAX = 329.0 -0.2 26.9174
142 | B = 383.61 -0.1241 26.7933
143 | CHAS = 0.0 -0.0686 26.7247
144 | NOX = 0.433 -0.0241 26.7007
145 | RAD = 7.0 0.0 26.7007
146 | INDUS = 6.09 0.0074 26.708
147 | DIS = 5.4917 -0.0438 26.6642
148 | ZN = 34.0 0.0077 26.6719
149 | PTRATIO = 16.1 0.0385 26.7104
150 | AGE = 18.4 0.0619 26.7722
151 | CRIM = 0.09266 1.3344 28.1067
152 | LSTAT = 8.67 4.6037 32.7104
153 | Final prediction 32.7104
154 | Baseline = 0
155 |
156 |
157 | ### Visual form of explanations
158 |
159 |
160 | ```python
161 | explanation.visualize()
162 | ```
163 |
164 |
165 | 
166 |
167 |
168 |
169 | ```python
170 | #customize height, width and dpi of plot
171 | explanation.visualize(figsize=(8,5),dpi=100)
172 | ```
173 |
174 |
175 | 
176 |
177 |
178 |
179 | ```python
180 | #for different baselines than zero
181 | explanation = exp.explain(observation=data[302,:],direction="up",useIntercept=True) # baseline==intercept
182 | explanation.visualize(figsize=(8,5),dpi=100)
183 | ```
184 |
185 |
186 | 
187 |
188 |
--------------------------------------------------------------------------------
/docs/source/comparison.rst:
--------------------------------------------------------------------------------
1 | Performance
2 | ===========
3 |
4 | This document summarizes performance of R package breakDown and python package breakDown on the same data.
5 |
6 | Model-agnostic method (up and down approach) was executed on boston data for regression using standard benchmarking libraries from each language.
7 |
8 | R breakDown test code
9 | ~~~~~~~~~~~~~~~~~~~~~
10 |
11 | Following commands were executed:
12 |
13 | .. code:: python
14 |
15 | library(breakDown)
16 | library(rpart)
17 | library(MASS)
18 | library(microbenchmark)
19 | train_data = MASS::Boston[1:300,]
20 | model = rpart(medv~.,data=train_data)
21 |
22 | **Following commands were executed:**
23 |
24 | .. code:: python
25 |
26 | microbenchmark(breakDown::broken(model,MASS::Boston[400,],train_data,direction="up",baseline="Intercept"),times=100)
27 |
28 | .. code:: python
29 |
30 | r_results_up = [
31 | 151496945, 179019040, 174285649, 171657389, 186736577, 176873811, 149477996, 156078363, 153822111, 157110590, 164812549, 149597564, 161371930, 150536604, 159660424,
32 | 151099397, 164005548, 160877359, 155673724, 158594509, 156552341, 200853530, 160604062, 151016360, 172159991, 157366522, 147924708, 162123947, 147203023, 162070644,
33 | 151646715, 153851925, 156636659, 153249238, 156439382, 156615324, 157338560, 161933728, 149347225, 178186792, 151804799, 212896701, 153531788, 150779583, 154386491,
34 | 156761934, 188869215, 159124536, 152582601, 160098227, 155172859, 224314278, 210030058, 158369641, 155658547, 157556676, 159644216, 163577044, 160073830, 167877435,
35 | 162871403, 210618121, 152126449, 151551847, 154210243, 162995112, 158628385, 158397297, 156230989, 155259541, 210152892, 167261196, 157579803, 163454795, 210172135,
36 | 207497217, 217937407, 268007391, 238880808, 200768319, 298817194, 187716953, 332831036, 288971446, 310214308, 279432796, 314266951, 240378471, 275743596, 269309196,
37 | 222933544, 147644030, 236694466, 237237474, 166425943, 176675193, 162013906, 176361342, 171035583, 156164433]
38 |
39 | .. code:: python
40 |
41 | r_results_up_s = list(map((lambda x:x/1000000000.0), r_results_up)) #nanoseconds to seconds (float)
42 |
43 | **Results for following command**
44 |
45 | .. code:: python
46 |
47 | microbenchmark(breakDown::broken(model,MASS::Boston[400,],train_data,direction="down",baseline="Intercept"),times=100)
48 |
49 | .. code:: python
50 |
51 | r_results_down = [
52 | 157636586, 176977826, 152566930, 176370770, 180238813, 173332343, 155944278, 169256885, 154035595, 182050786, 163405722, 161102422, 150142820, 153812325, 167083885,
53 | 168320711, 172771259, 149702551, 150772637, 158442582, 164156678, 153659233, 171969098, 171280606, 170234515, 161486393, 165544387, 341738681, 166618779, 153261220,
54 | 267645521, 269206271, 364408941, 160329436, 161647353, 271923368, 283175933, 288241510, 210309194, 157228111, 206084831, 232462591, 151892180, 236498265, 256872159,
55 | 183687155, 156095554, 149535499, 270376952, 291976544, 285317108, 231151520, 151745569, 151015639, 193988674, 249337290, 230854189, 159672939, 165214270, 152861032,
56 | 186642382, 184334376, 168095304, 168336995, 169595482, 195182804, 168409596, 340181327, 252239990, 160054458, 222313336, 215507379, 256581788, 250826165, 185914535,
57 | 260640237, 157249833, 179897500, 183111127, 151472923, 167080830, 155694615, 141172815, 165356275, 154660090, 164839180, 150481817, 174504689, 158367491, 244537238,
58 | 200776265, 143306303, 174211859, 143917859, 152706348, 160942923, 169771885, 148698114, 148858718, 189644579
59 | ]
60 |
61 | .. code:: python
62 |
63 | r_results_down_s = list(map((lambda x:x/1000000000.0), r_results_down)) #nanoseconds to seconds (float)
64 |
65 | Python pyBreakDown test code
66 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
67 |
68 | Following code was executed:
69 |
70 | .. code:: python
71 |
72 | import timeit
73 | setup="""from pyBreakDown import explainer
74 | from pyBreakDown import explanation
75 | from sklearn import datasets, tree
76 | model = tree.DecisionTreeRegressor()
77 | boston = datasets.load_boston()
78 | train_data = boston.data[0:300,:]
79 | model = model.fit(X=train_data,y=boston.target[0:300])
80 | exp = explainer.Explainer(clf=model, data=train_data, colnames=boston.feature_names)"""
81 |
82 | **Similar commands for up and down method**
83 |
84 | .. code:: python
85 |
86 | t = timeit.Timer(stmt='exp.explain(observation=boston.data[399,:],direction=\"up", useIntercept=True)', setup=setup)
87 | p_results_up = t.repeat(number=1,repeat=100)
88 |
89 | .. code:: python
90 |
91 | t = timeit.Timer(stmt='exp.explain(observation=boston.data[399,:],direction=\"down", useIntercept=True)', setup=setup)
92 | p_results_down = t.repeat(number=1,repeat=100)
93 |
94 | .. code:: python
95 |
96 | import numpy as np
97 | def describe (arr):
98 | print ("Min".ljust(10)+str(np.min(arr)))
99 | print ("1Q".ljust(10)+str(np.percentile(arr,q=25)))
100 | print ("Median".ljust(10)+str(np.median(arr)))
101 | print ("Mean".ljust(10)+str(np.mean(arr)))
102 | print ("3Q".ljust(10)+str(np.percentile(arr,q=75)))
103 | print ("Max".ljust(10)+str(np.max(arr)))
104 |
105 | Basic statistics for breakDown
106 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
107 |
108 | .. code:: python
109 |
110 | describe(r_results_down_s)
111 |
112 |
113 | .. parsed-literal::
114 |
115 | Min 0.141172815
116 | 1Q 0.15694497175
117 | Median 0.16883324049999998
118 | Mean 0.19000790343999996
119 | 3Q 0.20714092175
120 | Max 0.364408941
121 |
122 |
123 | .. code:: python
124 |
125 | describe(r_results_up_s)
126 |
127 |
128 | .. parsed-literal::
129 |
130 | Min 0.147203023
131 | 1Q 0.15566992975000002
132 | Median 0.1611246445
133 | Mean 0.18094488425
134 | 3Q 0.18800501849999998
135 | Max 0.332831036
136 |
137 |
138 | Basic statitics for pyBreakDown
139 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
140 |
141 | .. code:: python
142 |
143 | describe(p_results_down)
144 |
145 |
146 | .. parsed-literal::
147 |
148 | Min 0.007466723000106867
149 | 1Q 0.007695871750911465
150 | Median 0.007944501499878243
151 | Mean 0.008533558790659299
152 | 3Q 0.008452101750663132
153 | Max 0.015690394000557717
154 |
155 |
156 | .. code:: python
157 |
158 | describe(p_results_up)
159 |
160 |
161 | .. parsed-literal::
162 |
163 | Min 0.007126873002562206
164 | 1Q 0.007325664251766284
165 | Median 0.007430911500705406
166 | Mean 0.007852593970528687
167 | 3Q 0.007539984750110307
168 | Max 0.015298425998480525
169 |
170 |
--------------------------------------------------------------------------------
/pyBreakDown/explanation.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from collections import deque
3 | from enum import Enum
4 | from recordclass import recordclass
5 | import matplotlib.patches as patches
6 | from matplotlib import pyplot as plt
7 |
8 | AttrInfo = recordclass("AttrInfo",["name","value","contribution","cumulative"])
9 |
10 | class ExplainerDirection (Enum):
11 | Up=1
12 | Down=2
13 |
14 | class Explanation:
15 | """
16 | Contains algorithm results, including contribiutions of each individual features.
17 | """
18 | _INTERCEPT_NAME = "Intercept"
19 | _INTERCEPT_VALUE = 1
20 |
21 | def __init__ (self, variable_names, variable_values, contributions, direction):
22 | self._direction = direction
23 | self._attributes = deque()
24 | csum = 0
25 | for (name, value, contribution) in zip(variable_names, variable_values, contributions):
26 | csum+=contribution
27 | self._attributes.append(
28 | AttrInfo(name=name, value=value, contribution=contribution, cumulative=csum)
29 | )
30 | self._has_intercept=False
31 | self._has_final_prognosis=False
32 |
33 | def text (self, fwidth=25, contwidth=20, cumulwidth = 20, digits=2):
34 | """
35 | Get user-friendly text from of explanation
36 |
37 | Parameters
38 | ----------
39 | fwidth : int
40 | Width of column with feature names, in digits.
41 | contwidth : int
42 | Width of column with contributions, in digits.
43 | cumulwidth : int
44 | Width of column with cumulative values, in digits.
45 | digits : int
46 | Number of decimal places for values.
47 | """
48 | if not self._has_intercept or not self._has_final_prognosis:
49 | return
50 |
51 | lines = [''.join(
52 | [
53 | ' = '.join([attr.name, str(attr.value)]).ljust(fwidth),
54 | str(round(attr.contribution,digits)).ljust(contwidth),
55 | str(round(attr.cumulative, digits)).ljust(cumulwidth)
56 | ]
57 | ) for attr in self._attributes]
58 |
59 | print (''.join(
60 | ["Feature".ljust(fwidth),
61 | "Contribution".ljust(contwidth),
62 | "Cumulative".ljust(cumulwidth)]))
63 | print('\n'.join(lines))
64 | print(''.join(
65 | ['Final prediction'.ljust(fwidth+contwidth),
66 | str(round(self._final_prediction, digits)).ljust(cumulwidth)]))
67 | print(' = '.join(["Baseline", str(round(self._baseline, digits))]))
68 |
69 | def visualize(self, figsize=(7,6), filename=None, dpi=90,fontsize=14):
70 | """
71 | Get user friendly visualization of explanation
72 |
73 | Parameters
74 | ----------
75 | figsize : tuple int
76 | Pyplot figure size
77 | filename : string
78 | Name of file to save the visualization.
79 | If not specified, standard pyplot.show() will be performed.
80 | dpi : int
81 | Digits per inch for saving to the file
82 | """
83 |
84 | if not self._has_intercept or not self._has_final_prognosis:
85 | return
86 |
87 | fig = plt.figure(figsize=figsize)
88 | ax = plt.axes()
89 | positions = list(range(len(self._attributes)+2))
90 | previous_value = self._baseline
91 | for (attr_info, position) in zip(self._attributes, positions[1:]):
92 | cumulative = attr_info.cumulative+self._baseline
93 | height=1
94 | left = previous_value if attr_info.contribution > 0 else cumulative
95 | width = abs(attr_info.contribution)
96 | color = "blue" if attr_info.contribution > 0 else "orange"
97 | rect = patches.Rectangle(
98 | xy=(left, position-0.5),width=width,height=height,alpha=0.8,color=color)
99 | ax.add_patch(rect)
100 | plt.errorbar(x=left, y=position, yerr=0.5, color="black")
101 | plt.errorbar(x=left+width, y=position, yerr=0.5, color="black")
102 | plt.text(left+width+0.15, y=position-0.2, size=fontsize,
103 | s = self._get_prefix(attr_info.contribution) + str(round(attr_info.contribution,2)))
104 | previous_value = cumulative
105 |
106 | #add final prediction bar
107 | rectf = patches.Rectangle(
108 | xy=(self._baseline,positions[len(positions)-1]-0.5),
109 | width=self._final_prediction,
110 | height=1, color="grey", alpha=0.8
111 | )
112 | ax.add_patch(rectf)
113 | ax.axvline(x=self._baseline,mew=3,color="black",alpha=1)
114 | plt.errorbar(x=self._baseline, y=len(positions)-1, yerr=0.5, color="black")
115 | plt.errorbar(x=self._baseline+self._final_prediction, y=len(positions)-1, yerr=0.5, color="black")
116 | plt.text(
117 | x=self._baseline+self._final_prediction+0.15,
118 | y=positions[len(positions)-1]-0.2,
119 | s=str(round(self._final_prediction+self._baseline,2)),size=fontsize,weight="bold")
120 |
121 | ax.set_yticks(positions[1:])
122 | ax.grid(color="gray",alpha=0.5)
123 | sign = "+" if self._direction==ExplainerDirection.Up else "-"
124 | labels=[sign + "=".join([attr.name,str(attr.value)]) for attr in self._attributes]+["Final Prognosis"]
125 | ax.set_yticklabels(labels,size=fontsize)
126 |
127 | all_cumulative = [attr.cumulative for attr in self._attributes]
128 | leftbound = min([min(all_cumulative), 0]) + self._baseline
129 | rightbound= max(max(all_cumulative)+self._baseline,self._baseline)
130 | plt.text(x=self._baseline+0.15, y=positions[0]-0.2, s="Baseline = "+str(round(self._baseline,2)),
131 | size=fontsize,color="red")
132 |
133 | ax.set_xlim(leftbound-1, rightbound+1)
134 | ax.set_ylim(-1,len(self._attributes)+2)
135 | ax.spines['right'].set_visible(False)
136 | ax.spines['top'].set_visible(False)
137 | approach = "\"up\"" if self._direction==ExplainerDirection.Up else "\"down\""
138 | plt.title("Prediction explanation for "+approach+" approach")
139 |
140 | #fig.tight_layout(pad=0, w_pad=0, h_pad=0.0)
141 | #fig.subplots_adjust(hspace=0, wspace=0.1)
142 | if filename is None:
143 | plt.show()
144 | else:
145 | fig.savefig(filename,dpi=dpi)
146 |
147 | def add_intercept (self, intercept_contribution):
148 | self._attributes.appendleft(AttrInfo(
149 | name=self._INTERCEPT_NAME,
150 | value=self._INTERCEPT_VALUE,
151 | contribution=intercept_contribution,
152 | cumulative=0)
153 | )
154 | self._correct_cumulatives()
155 | self._has_intercept = True
156 |
157 | def make_final_prediction (self):
158 | self._final_prediction = sum(attr.contribution for attr in self._attributes)
159 | self._has_final_prognosis = True
160 |
161 | def add_baseline (self, baseline):
162 | self._baseline = baseline
163 |
164 | def _correct_cumulatives(self):
165 | csum = 0
166 | for attribute in self._attributes:
167 | csum+=attribute.contribution
168 | attribute.cumulative = csum
169 |
170 | def _get_prefix(self, val):
171 | return "+" if val>=0 else ""
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright 2012 The Obvious Corporation and contributors.
2 |
3 |
4 | Licensed under the Apache License, Version 2.0 (the "License");
5 | you may not use this file except in compliance with the License.
6 | You may obtain a copy of the License at
7 |
8 |
9 |
10 | Unless required by applicable law or agreed to in writing, software
11 | distributed under the License is distributed on an "AS IS" BASIS,
12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | See the License for the specific language governing permissions and
14 | limitations under the License.
15 |
16 | ```
17 | -------------------------------------------------------------------------
18 | Apache License
19 | Version 2.0, January 2004
20 | http://www.apache.org/licenses/
21 |
22 |
23 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
24 |
25 | 1. Definitions.
26 |
27 | "License" shall mean the terms and conditions for use, reproduction,
28 | and distribution as defined by Sections 1 through 9 of this document.
29 |
30 | "Licensor" shall mean the copyright owner or entity authorized by
31 | the copyright owner that is granting the License.
32 |
33 | "Legal Entity" shall mean the union of the acting entity and all
34 | other entities that control, are controlled by, or are under common
35 | control with that entity. For the purposes of this definition,
36 | "control" means (i) the power, direct or indirect, to cause the
37 | direction or management of such entity, whether by contract or
38 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
39 | outstanding shares, or (iii) beneficial ownership of such entity.
40 |
41 | "You" (or "Your") shall mean an individual or Legal Entity
42 | exercising permissions granted by this License.
43 |
44 | "Source" form shall mean the preferred form for making modifications,
45 | including but not limited to software source code, documentation
46 | source, and configuration files.
47 |
48 | "Object" form shall mean any form resulting from mechanical
49 | transformation or translation of a Source form, including but
50 | not limited to compiled object code, generated documentation,
51 | and conversions to other media types.
52 |
53 | "Work" shall mean the work of authorship, whether in Source or
54 | Object form, made available under the License, as indicated by a
55 | copyright notice that is included in or attached to the work
56 | (an example is provided in the Appendix below).
57 |
58 | "Derivative Works" shall mean any work, whether in Source or Object
59 | form, that is based on (or derived from) the Work and for which the
60 | editorial revisions, annotations, elaborations, or other modifications
61 | represent, as a whole, an original work of authorship. For the purposes
62 | of this License, Derivative Works shall not include works that remain
63 | separable from, or merely link (or bind by name) to the interfaces of,
64 | the Work and Derivative Works thereof.
65 |
66 | "Contribution" shall mean any work of authorship, including
67 | the original version of the Work and any modifications or additions
68 | to that Work or Derivative Works thereof, that is intentionally
69 | submitted to Licensor for inclusion in the Work by the copyright owner
70 | or by an individual or Legal Entity authorized to submit on behalf of
71 | the copyright owner. For the purposes of this definition, "submitted"
72 | means any form of electronic, verbal, or written communication sent
73 | to the Licensor or its representatives, including but not limited to
74 | communication on electronic mailing lists, source code control systems,
75 | and issue tracking systems that are managed by, or on behalf of, the
76 | Licensor for the purpose of discussing and improving the Work, but
77 | excluding communication that is conspicuously marked or otherwise
78 | designated in writing by the copyright owner as "Not a Contribution."
79 |
80 | "Contributor" shall mean Licensor and any individual or Legal Entity
81 | on behalf of whom a Contribution has been received by Licensor and
82 | subsequently incorporated within the Work.
83 |
84 | 2. Grant of Copyright License. Subject to the terms and conditions of
85 | this License, each Contributor hereby grants to You a perpetual,
86 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
87 | copyright license to reproduce, prepare Derivative Works of,
88 | publicly display, publicly perform, sublicense, and distribute the
89 | Work and such Derivative Works in Source or Object form.
90 |
91 | 3. Grant of Patent License. Subject to the terms and conditions of
92 | this License, each Contributor hereby grants to You a perpetual,
93 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
94 | (except as stated in this section) patent license to make, have made,
95 | use, offer to sell, sell, import, and otherwise transfer the Work,
96 | where such license applies only to those patent claims licensable
97 | by such Contributor that are necessarily infringed by their
98 | Contribution(s) alone or by combination of their Contribution(s)
99 | with the Work to which such Contribution(s) was submitted. If You
100 | institute patent litigation against any entity (including a
101 | cross-claim or counterclaim in a lawsuit) alleging that the Work
102 | or a Contribution incorporated within the Work constitutes direct
103 | or contributory patent infringement, then any patent licenses
104 | granted to You under this License for that Work shall terminate
105 | as of the date such litigation is filed.
106 |
107 | 4. Redistribution. You may reproduce and distribute copies of the
108 | Work or Derivative Works thereof in any medium, with or without
109 | modifications, and in Source or Object form, provided that You
110 | meet the following conditions:
111 |
112 | (a) You must give any other recipients of the Work or
113 | Derivative Works a copy of this License; and
114 |
115 | (b) You must cause any modified files to carry prominent notices
116 | stating that You changed the files; and
117 |
118 | (c) You must retain, in the Source form of any Derivative Works
119 | that You distribute, all copyright, patent, trademark, and
120 | attribution notices from the Source form of the Work,
121 | excluding those notices that do not pertain to any part of
122 | the Derivative Works; and
123 |
124 | (d) If the Work includes a "NOTICE" text file as part of its
125 | distribution, then any Derivative Works that You distribute must
126 | include a readable copy of the attribution notices contained
127 | within such NOTICE file, excluding those notices that do not
128 | pertain to any part of the Derivative Works, in at least one
129 | of the following places: within a NOTICE text file distributed
130 | as part of the Derivative Works; within the Source form or
131 | documentation, if provided along with the Derivative Works; or,
132 | within a display generated by the Derivative Works, if and
133 | wherever such third-party notices normally appear. The contents
134 | of the NOTICE file are for informational purposes only and
135 | do not modify the License. You may add Your own attribution
136 | notices within Derivative Works that You distribute, alongside
137 | or as an addendum to the NOTICE text from the Work, provided
138 | that such additional attribution notices cannot be construed
139 | as modifying the License.
140 |
141 | You may add Your own copyright statement to Your modifications and
142 | may provide additional or different license terms and conditions
143 | for use, reproduction, or distribution of Your modifications, or
144 | for any such Derivative Works as a whole, provided Your use,
145 | reproduction, and distribution of the Work otherwise complies with
146 | the conditions stated in this License.
147 |
148 | 5. Submission of Contributions. Unless You explicitly state otherwise,
149 | any Contribution intentionally submitted for inclusion in the Work
150 | by You to the Licensor shall be under the terms and conditions of
151 | this License, without any additional terms or conditions.
152 | Notwithstanding the above, nothing herein shall supersede or modify
153 | the terms of any separate license agreement you may have executed
154 | with Licensor regarding such Contributions.
155 |
156 | 6. Trademarks. This License does not grant permission to use the trade
157 | names, trademarks, service marks, or product names of the Licensor,
158 | except as required for reasonable and customary use in describing the
159 | origin of the Work and reproducing the content of the NOTICE file.
160 |
161 | 7. Disclaimer of Warranty. Unless required by applicable law or
162 | agreed to in writing, Licensor provides the Work (and each
163 | Contributor provides its Contributions) on an "AS IS" BASIS,
164 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
165 | implied, including, without limitation, any warranties or conditions
166 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
167 | PARTICULAR PURPOSE. You are solely responsible for determining the
168 | appropriateness of using or redistributing the Work and assume any
169 | risks associated with Your exercise of permissions under this License.
170 |
171 | 8. Limitation of Liability. In no event and under no legal theory,
172 | whether in tort (including negligence), contract, or otherwise,
173 | unless required by applicable law (such as deliberate and grossly
174 | negligent acts) or agreed to in writing, shall any Contributor be
175 | liable to You for damages, including any direct, indirect, special,
176 | incidental, or consequential damages of any character arising as a
177 | result of this License or out of the use or inability to use the
178 | Work (including but not limited to damages for loss of goodwill,
179 | work stoppage, computer failure or malfunction, or any and all
180 | other commercial damages or losses), even if such Contributor
181 | has been advised of the possibility of such damages.
182 |
183 | 9. Accepting Warranty or Additional Liability. While redistributing
184 | the Work or Derivative Works thereof, You may choose to offer,
185 | and charge a fee for, acceptance of support, warranty, indemnity,
186 | or other liability obligations and/or rights consistent with this
187 | License. However, in accepting such obligations, You may act only
188 | on Your own behalf and on Your sole responsibility, not on behalf
189 | of any other Contributor, and only if You agree to indemnify,
190 | defend, and hold each Contributor harmless for any liability
191 | incurred by, or claims asserted against, such Contributor by reason
192 | of your accepting any such warranty or additional liability.
193 |
194 | END OF TERMS AND CONDITIONS
195 | ```
196 |
--------------------------------------------------------------------------------