├── .github
├── dependabot.yml
├── release.yml
└── workflows
│ ├── build_docs.yml
│ ├── release_and_publish.yml
│ └── unittests.yml
├── .gitignore
├── .pre-commit-config.yaml
├── LICENSE.txt
├── README.md
├── ci
├── 310-latest.yaml
├── 310-min.yaml
├── 311-latest.yaml
├── 312-dev.yaml
└── 312-latest.yaml
├── codecov.yml
├── docs
├── Makefile
├── _static
│ ├── images
│ │ └── pysal_favicon.ico
│ ├── pysal-styles.css
│ └── references.bib
├── api.rst
├── conf.py
├── index.rst
├── installation.rst
├── references.rst
└── tutorials.rst
├── environment.yml
├── notebooks
├── 10_specification_tests_properties.ipynb
├── 11_distance_decay.ipynb
├── 12_estimating_slx.ipynb
├── 13_ML_estimation_spatial_lag.ipynb
├── 14_IV_estimation_spatial_lag.ipynb
├── 15_ML_estimation_spatial_error.ipynb
├── 16_GMM_estimation_spatial_error.ipynb
├── 17_GMM_higher_order.ipynb
├── 1_sample_data.ipynb
├── 2_data_input_output.ipynb
├── 3_basic_mapping.ipynb
├── 4_spatial_weights.ipynb
├── 5_OLS.ipynb
├── 6_TWOSLS.ipynb
├── 7_spatial_models.ipynb
├── 8_spatial_multipliers.ipynb
├── 9_specification_tests.ipynb
├── GM_Lag_example.ipynb
├── LICENSE
├── Panel_FE_example.ipynb
├── README.md
└── skater_reg.ipynb
├── pyproject.toml
├── spreg
├── __init__.py
├── dgp.py
├── diagnostics.py
├── diagnostics_panel.py
├── diagnostics_probit.py
├── diagnostics_sp.py
├── diagnostics_sur.py
├── diagnostics_tsls.py
├── error_sp.py
├── error_sp_het.py
├── error_sp_het_regimes.py
├── error_sp_hom.py
├── error_sp_hom_regimes.py
├── error_sp_regimes.py
├── ml_error.py
├── ml_error_regimes.py
├── ml_lag.py
├── ml_lag_regimes.py
├── nslx.py
├── ols.py
├── ols_regimes.py
├── opt.py
├── optional_imports.ipynb
├── output.py
├── panel_fe.py
├── panel_re.py
├── panel_utils.py
├── probit.py
├── regimes.py
├── robust.py
├── skater_reg.py
├── sp_panels.py
├── spsearch.py
├── sputils.py
├── summary_output.py
├── summary_output.py.orig
├── sur.py
├── sur_error.py
├── sur_lag.py
├── sur_utils.py
├── tests
│ ├── __init__.py
│ ├── skip.py
│ ├── test_diagnostics.py
│ ├── test_diagnostics_panel.py
│ ├── test_diagnostics_sp.py
│ ├── test_diagnostics_tsls.py
│ ├── test_error_sp.py
│ ├── test_error_sp_het.py
│ ├── test_error_sp_het_regimes.py
│ ├── test_error_sp_het_sparse.py
│ ├── test_error_sp_hom.py
│ ├── test_error_sp_hom_regimes.py
│ ├── test_error_sp_hom_sparse.py
│ ├── test_error_sp_regimes.py
│ ├── test_error_sp_sparse.py
│ ├── test_ml_error.py
│ ├── test_ml_error_regimes.py
│ ├── test_ml_lag.py
│ ├── test_ml_lag_regimes.py
│ ├── test_nslx.py
│ ├── test_ols.py
│ ├── test_ols_regimes.py
│ ├── test_ols_sparse.py
│ ├── test_panel_fe.py
│ ├── test_panel_re.py
│ ├── test_probit.py
│ ├── test_sp_panels.py
│ ├── test_sputils.py
│ ├── test_sur.py
│ ├── test_sur_error.py
│ ├── test_sur_lag.py
│ ├── test_twosls.py
│ ├── test_twosls_regimes.py
│ ├── test_twosls_sp.py
│ ├── test_twosls_sp_regimes.py
│ ├── test_twosls_sp_sparse.py
│ └── test_twosls_sparse.py
├── twosls.py
├── twosls_regimes.py
├── twosls_sp.py
├── twosls_sp_regimes.py
├── user_output.py
├── utils.py
└── w_utils.py
└── tools
├── changelog_1.0.4.md
├── changelog_1.1.1.md
├── changelog_1.1.2.md
├── changelog_1.1.2.post1.md
├── changelog_1.2.0.md
├── changelog_1.2.0.post1.md
├── changelog_1.2.1.md
└── gitcount.ipynb
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | # To get started with Dependabot version updates, you'll need to specify which
2 | # package ecosystems to update and where the package manifests are located.
3 | # Please see the documentation for all configuration options:
4 | # https://help.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
5 |
6 | version: 2
7 | updates:
8 | - package-ecosystem: "github-actions"
9 | directory: "/"
10 | schedule:
11 | interval: "daily"
12 | reviewers:
13 | - "pedrovma"
14 |
15 | - package-ecosystem: "pip"
16 | directory: "/"
17 | schedule:
18 | interval: "daily"
19 | reviewers:
20 | - "pedrovma"
21 |
--------------------------------------------------------------------------------
/.github/release.yml:
--------------------------------------------------------------------------------
1 | changelog:
2 | exclude:
3 | labels:
4 | - ignore-for-release
5 | authors:
6 | - dependabot
7 | categories:
8 | - title: Bug Fixes
9 | labels:
10 | - bug
11 | - title: Enhancements
12 | labels:
13 | - enhancement
14 | - title: Other Changes
15 | labels:
16 | - "*"
17 |
--------------------------------------------------------------------------------
/.github/workflows/build_docs.yml:
--------------------------------------------------------------------------------
1 | name: Build Docs
2 | on:
3 | push:
4 | branches:
5 | - main
6 | workflow_dispatch:
7 | inputs:
8 | version:
9 | description: Manual Doc Build Reason
10 | default: test
11 | required: false
12 |
13 | jobs:
14 | docs:
15 | name: Build & Push Docs
16 | runs-on: ${{ matrix.os }}
17 | timeout-minutes: 90
18 | strategy:
19 | matrix:
20 | os: ['ubuntu-latest']
21 | environment-file: [ci/312-latest.yaml]
22 | experimental: [false]
23 | defaults:
24 | run:
25 | shell: bash -l {0}
26 |
27 | steps:
28 | - name: Checkout repo
29 | uses: actions/checkout@v4
30 | with:
31 | fetch-depth: 0 # Fetch all history for all branches and tags.
32 |
33 | - name: Setup micromamba
34 | uses: mamba-org/setup-micromamba@v1
35 | with:
36 | environment-file: ${{ matrix.environment-file }}
37 | micromamba-version: 'latest'
38 |
39 | - name: Install
40 | run: pip install -e . --no-deps --force-reinstall
41 |
42 | - name: Make Docs
43 | run: cd docs; make html
44 |
45 | - name: Commit Docs
46 | run: |
47 | git clone https://github.com/ammaraskar/sphinx-action-test.git --branch gh-pages --single-branch gh-pages
48 | cp -r docs/_build/html/* gh-pages/
49 | cd gh-pages
50 | git config --local user.email "action@github.com"
51 | git config --local user.name "GitHub Action"
52 | git add .
53 | git commit -m "Update documentation" -a || true
54 | # The above command will fail if no changes were present,
55 | # so we ignore the return code.
56 |
57 | - name: Push to gh-pages
58 | uses: ad-m/github-push-action@master
59 | with:
60 | branch: gh-pages
61 | directory: gh-pages
62 | github_token: ${{ secrets.GITHUB_TOKEN }}
63 | force: true
--------------------------------------------------------------------------------
/.github/workflows/release_and_publish.yml:
--------------------------------------------------------------------------------
1 | # Release package on GitHub and publish to PyPI
2 |
3 | # Important: In order to trigger this workflow for the organization
4 | # repo (organzation-name/repo-name vs. user-name/repo-name), a tagged
5 | # commit must be made to *organzation-name/repo-name*. If the tagged
6 | # commit is made to *user-name/repo-name*, a release will be published
7 | # under the user's name, not the organzation.
8 |
9 | #--------------------------------------------------
10 | name: Release & Publish
11 |
12 | on:
13 | push:
14 | # Sequence of patterns matched against refs/tags
15 | tags:
16 | - "v*" # Push events to matching v*, i.e. v1.0, v20.15.10
17 | workflow_dispatch:
18 | inputs:
19 | version:
20 | description: Manual Release
21 | default: test
22 | required: false
23 |
24 | jobs:
25 | build:
26 | name: Create release & publish to PyPI
27 | runs-on: ubuntu-latest
28 | steps:
29 | - name: Checkout repo
30 | uses: actions/checkout@v4
31 |
32 | - name: Set up python
33 | uses: actions/setup-python@v5
34 | with:
35 | python-version: "3.x"
36 |
37 | - name: Install Dependencies
38 | run: |
39 | python -m pip install --upgrade pip
40 | python -m pip install --upgrade build twine
41 | python -m build
42 | twine check --strict dist/*
43 |
44 | - name: Create Release Notes
45 | uses: actions/github-script@v7
46 | with:
47 | github-token: ${{secrets.GITHUB_TOKEN}}
48 | script: |
49 | await github.request(`POST /repos/${{ github.repository }}/releases`, {
50 | tag_name: "${{ github.ref }}",
51 | generate_release_notes: true
52 | });
53 |
54 | - name: Publish distribution 📦 to PyPI
55 | uses: pypa/gh-action-pypi-publish@release/v1
56 | with:
57 | user: __token__
58 | password: ${{ secrets.PYPI_PASSWORD }}
--------------------------------------------------------------------------------
/.github/workflows/unittests.yml:
--------------------------------------------------------------------------------
1 | name: Continuous Integration
2 |
3 | on:
4 | push:
5 | branches:
6 | - '*'
7 | pull_request:
8 | branches:
9 | - '*'
10 | #schedule:
11 | # - cron: '59 23 * * *'
12 | workflow_dispatch:
13 | inputs:
14 | version:
15 | description: Manual Unittest Run
16 | default: test
17 | required: false
18 |
19 | jobs:
20 | unittests:
21 | env:
22 | RUN_TEST: pytest -v spreg -n auto --cov=spreg --cov-report=xml --color yes --cov-append --cov-report term-missing
23 | ############################################## replace above with this chunk when docstring testing gets worked out
24 | #RUN_TEST: pytest -v spreg -n auto --cov=spreg --doctest-modules --cov-report=xml --color yes --cov-append --cov-report term-missing
25 | ##############################################
26 | name: ${{ matrix.os }}, ${{ matrix.environment-file }}
27 | runs-on: ${{ matrix.os }}
28 | timeout-minutes: 30
29 | strategy:
30 | matrix:
31 | os: [ubuntu-latest]
32 | environment-file:
33 | - ci/310-min.yaml
34 | - ci/310-latest.yaml
35 | - ci/311-latest.yaml
36 | - ci/312-latest.yaml
37 | - ci/312-dev.yaml
38 | include:
39 | - environment-file: ci/312-latest.yaml
40 | os: macos-13 # Intel
41 | - environment-file: ci/312-latest.yaml
42 | os: macos-14 # Apple Silicon
43 | - environment-file: ci/312-latest.yaml
44 | os: windows-latest
45 | fail-fast: false
46 |
47 | defaults:
48 | run:
49 | shell: bash -l {0}
50 |
51 | steps:
52 |
53 | - name: checkout repo
54 | uses: actions/checkout@v4
55 | with:
56 | fetch-depth: 0 # Fetch all history for all branches and tags.
57 |
58 | - name: setup micromamba
59 | uses: mamba-org/setup-micromamba@v2
60 | with:
61 | environment-file: ${{ matrix.environment-file }}
62 | micromamba-version: 'latest'
63 |
64 | - name: run tests
65 | run: ${{ env.RUN_TEST }}
66 |
67 | - name: codecov
68 | uses: codecov/codecov-action@v4
69 | with:
70 | token: ${{ secrets.CODECOV_TOKEN }}
71 | file: ./coverage.xml
72 | name: spreg-codecov
73 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | env/
12 | build/
13 | develop-eggs/
14 | dist/
15 | downloads/
16 | eggs/
17 | .eggs/
18 | lib/
19 | lib64/
20 | parts/
21 | sdist/
22 | var/
23 | wheels/
24 | *.egg-info/
25 | .installed.cfg
26 | *.egg
27 |
28 | # PyInstaller
29 | # Usually these files are written by a python script from a template
30 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
31 | *.manifest
32 | *.spec
33 |
34 | # Installer logs
35 | pip-log.txt
36 | pip-delete-this-directory.txt
37 |
38 | # Unit test / coverage reports
39 | htmlcov/
40 | .tox/
41 | .coverage
42 | .coverage.*
43 | .cache
44 | nosetests.xml
45 | coverage.xml
46 | *,cover
47 | .hypothesis/
48 |
49 | # Translations
50 | *.mo
51 | *.pot
52 |
53 | # Django stuff:
54 | *.log
55 | local_settings.py
56 |
57 | # Flask stuff:
58 | instance/
59 | .webassets-cache
60 |
61 | # Scrapy stuff:
62 | .scrapy
63 |
64 | # Sphinx documentation
65 | docs/_build/
66 |
67 | # PyBuilder
68 | target/
69 |
70 | # Jupyter Notebook
71 | .ipynb_checkpoints
72 |
73 | # pyenv
74 | .python-version
75 |
76 | # celery beat schedule file
77 | celerybeat-schedule
78 |
79 | # dotenv
80 | .env
81 |
82 | # virtualenv
83 | .venv
84 | venv/
85 | ENV/
86 |
87 | # Spyder project settings
88 | .spyderproject
89 |
90 | # Rope project settings
91 | .ropeproject
92 |
93 | # Pycharm
94 | .idea
95 | .vscode
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | files: "spreg\/"
2 | repos:
3 | - repo: https://github.com/astral-sh/ruff-pre-commit
4 | rev: "v0.6.3"
5 | hooks:
6 | - id: ruff-format
7 |
8 | ci:
9 | autofix_prs: false
10 | autoupdate_schedule: quarterly
11 |
--------------------------------------------------------------------------------
/LICENSE.txt:
--------------------------------------------------------------------------------
1 | Copyright (c) 2007-2018, PySAL Developers
2 | All rights reserved.
3 |
4 | Redistribution and use in source and binary forms, with or without
5 | modification, are permitted provided that the following conditions are met:
6 |
7 | * Redistributions of source code must retain the above copyright notice, this
8 | list of conditions and the following disclaimer.
9 |
10 | * Redistributions in binary form must reproduce the above copyright
11 | notice, this list of conditions and the following disclaimer in the
12 | documentation and/or other materials provided with the distribution.
13 |
14 | * Neither the name of the GeoDa Center for Geospatial Analysis and Computation
15 | nor the names of its contributors may be used to endorse or promote products
16 | derived from this software without specific prior written permission.
17 |
18 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
19 | CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
20 | INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
21 | MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
22 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
23 | CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
24 | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
25 | LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
26 | USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
27 | ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
28 | LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
29 | ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
30 | POSSIBILITY OF SUCH DAMAGE.
31 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # spreg
2 |
3 | [](https://github.com/pysal/spreg/actions/workflows/unittests.yml)
4 | [](https://codecov.io/gh/pysal/spreg)
5 | 
6 | 
7 | [](https://anaconda.org/conda-forge/spreg)
8 | 
9 | [](https://zenodo.org/record/4421373)
10 |
11 | ## PySAL Spatial Econometrics Package
12 |
13 | `spreg`, short for “spatial regression,” is a Python package to estimate simultaneous autoregressive spatial regression models. These models are useful when modeling processes where observations interact with one another.
14 | This package is part of a [refactoring of PySAL](https://github.com/pysal/pysal/wiki/PEP-13:-Refactor-PySAL-Using-Submodules).
15 |
16 | ## License information
17 |
18 | See the file "LICENSE.txt" for information on the history of this
19 | software, terms & conditions for usage, and a DISCLAIMER OF ALL
20 | WARRANTIES.
21 |
22 |
--------------------------------------------------------------------------------
/ci/310-latest.yaml:
--------------------------------------------------------------------------------
1 | name: test
2 | channels:
3 | - conda-forge
4 | dependencies:
5 | - python=3.10
6 | # required
7 | - libpysal>=4.12.1
8 | - numpy>=1.3
9 | - pandas>=1.0
10 | - scipy>=0.11
11 | # testing/formatting
12 | - codecov
13 | - pytest
14 | - pytest-cov
15 | - pytest-xdist
16 | # optional
17 | - bokeh>=0.11.1
18 | - folium>=0.2.1
19 | - geopandas>=0.2
20 | - geojson>=1.3.2
21 | - matplotlib>=1.5.1
22 | - mplleaflet>=0.0.5
23 | - numba
24 | - numexpr
25 | - scikit-learn>=0.17.1
26 | - seaborn>=0.7.0
27 | - statsmodels>=0.6.1
28 | # for docs build action (this env only)
29 | - nbsphinx
30 | - numpydoc
31 | - sphinx
32 | - sphinxcontrib-bibtex
33 | - sphinx_bootstrap_theme
34 |
--------------------------------------------------------------------------------
/ci/310-min.yaml:
--------------------------------------------------------------------------------
1 | name: test
2 | channels:
3 | - conda-forge
4 | dependencies:
5 | - python=3.10
6 | # required
7 | - libpysal>=4.12.1
8 | - numpy>=1.3
9 | - pandas>=1.0
10 | - scipy>=0.11
11 | # testing/formatting
12 | - codecov
13 | - pytest
14 | - pytest-cov
15 | - pytest-xdist
16 |
--------------------------------------------------------------------------------
/ci/311-latest.yaml:
--------------------------------------------------------------------------------
1 | name: test
2 | channels:
3 | - conda-forge
4 | dependencies:
5 | - python=3.11
6 | # required
7 | - libpysal>=4.12.1
8 | - numpy>=1.3
9 | - pandas>=1.0
10 | - scipy>=0.11
11 | # testing/formatting
12 | - codecov
13 | - pytest
14 | - pytest-cov
15 | - pytest-xdist
16 | # optional
17 | - bokeh>=0.11.1
18 | - folium>=0.2.1
19 | - geopandas>=0.2
20 | - geojson>=1.3.2
21 | - matplotlib>=1.5.1
22 | - mplleaflet>=0.0.5
23 | - numba
24 | - numexpr
25 | - scikit-learn>=0.17.1
26 | - seaborn>=0.7.0
27 | - statsmodels>=0.6.1
28 |
--------------------------------------------------------------------------------
/ci/312-dev.yaml:
--------------------------------------------------------------------------------
1 | name: test
2 | channels:
3 | - conda-forge
4 | dependencies:
5 | - python=3.12
6 | # required
7 | - numpy>=1.3
8 | - pandas>=1.0
9 | - pip
10 | - scipy>=0.11
11 | # testing/formatting
12 | - codecov
13 | - pytest
14 | - pytest-cov
15 | - pytest-xdist
16 | # optional
17 | - bokeh>=0.11.1
18 | - folium>=0.2.1
19 | - geopandas>=0.2
20 | - geojson>=1.3.2
21 | - matplotlib>=1.5.1
22 | - mplleaflet>=0.0.5
23 | - numba
24 | - numexpr
25 | - scikit-learn>=0.17.1
26 | - seaborn>=0.7.0
27 | - statsmodels>=0.6.1
28 | # with pip
29 | - pip:
30 | # dev versions of packages
31 | - git+https://github.com/pysal/libpysal.git
32 |
--------------------------------------------------------------------------------
/ci/312-latest.yaml:
--------------------------------------------------------------------------------
1 | name: test
2 | channels:
3 | - conda-forge
4 | dependencies:
5 | - python=3.12
6 | # required
7 | - libpysal>=4.12.1
8 | - numpy>=1.3
9 | - pandas>=1.0
10 | - scipy>=0.11
11 | # testing/formatting
12 | - codecov
13 | - pytest
14 | - pytest-cov
15 | - pytest-xdist
16 | # optional
17 | - bokeh>=0.11.1
18 | - folium>=0.2.1
19 | - geopandas>=0.2
20 | - geojson>=1.3.2
21 | - matplotlib>=1.5.1
22 | - mplleaflet>=0.0.5
23 | - numba
24 | - numexpr
25 | - scikit-learn>=0.17.1
26 | - seaborn>=0.7.0
27 | - statsmodels>=0.6.1
28 | # for docs build action (this env only)
29 | - nbsphinx
30 | - numpydoc
31 | - sphinx>=1.4.3
32 | - sphinxcontrib-bibtex
33 | - sphinx_bootstrap_theme
34 |
--------------------------------------------------------------------------------
/codecov.yml:
--------------------------------------------------------------------------------
1 | codecov:
2 | notify:
3 | after_n_builds: 7
4 | coverage:
5 | range: 50..95
6 | round: nearest
7 | precision: 1
8 | status:
9 | project:
10 | default:
11 | threshold: 2%
12 | patch:
13 | default:
14 | threshold: 2%
15 | target: 80%
16 | ignore:
17 | - "tests/*"
18 | comment:
19 | layout: "reach, diff, files"
20 | behavior: once
21 | after_n_builds: 7
22 | require_changes: true
23 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line.
5 | SPHINXOPTS =
6 | SPHINXBUILD = sphinx-build
7 | SPHINXPROJ = spreg
8 | SOURCEDIR = .
9 | BUILDDIR = _build
10 |
11 | # Put it first so that "make" without argument is like "make help".
12 | help:
13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
14 |
15 | .PHONY: help Makefile
16 |
17 | # Catch-all target: route all unknown targets to Sphinx using the new
18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
19 | %: Makefile
20 | @rsync -r --exclude '.ipynb_checkpoints/' ../notebooks/ ./notebooks/
21 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
22 |
23 | github:
24 | @make html
25 |
26 | sync:
27 | @rsync -avh _build/html/ ../docs/ --delete
28 | @make clean
29 | touch ../docs/.nojekyll
30 |
31 | clean:
32 | rm -rf $(BUILDDIR)/*
33 | rm -rf auto_examples/
34 | rm -rf generated/
35 |
--------------------------------------------------------------------------------
/docs/_static/images/pysal_favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pysal/spreg/7d415f2a38d7e10db23c1f1821e136c2dcd0b19a/docs/_static/images/pysal_favicon.ico
--------------------------------------------------------------------------------
/docs/_static/pysal-styles.css:
--------------------------------------------------------------------------------
1 | /* Make thumbnails with equal heights */
2 | @media only screen and (min-width : 481px) {
3 | .row.equal-height {
4 | display: flex;
5 | flex-wrap: wrap;
6 | }
7 | .row.equal-height > [class*='col-'] {
8 | display: flex;
9 | flex-direction: column;
10 | }
11 | .row.equal-height.row:after,
12 | .row.equal-height.row:before {
13 | display: flex;
14 | }
15 |
16 | .row.equal-height > [class*='col-'] > .thumbnail,
17 | .row.equal-height > [class*='col-'] > .thumbnail > .caption {
18 | display: flex;
19 | flex: 1 0 auto;
20 | flex-direction: column;
21 | }
22 | .row.equal-height > [class*='col-'] > .thumbnail > .caption > .flex-text {
23 | flex-grow: 1;
24 | }
25 | .row.equal-height > [class*='col-'] > .thumbnail > img {
26 | width: 100%;
27 | height: 200px; /* force image's height */
28 |
29 | /* force image fit inside it's "box" */
30 | -webkit-object-fit: cover;
31 | -moz-object-fit: cover;
32 | -ms-object-fit: cover;
33 | -o-object-fit: cover;
34 | object-fit: cover;
35 | }
36 | }
37 |
38 | .row.extra-bottom-padding{
39 | margin-bottom: 20px;
40 | }
41 |
42 |
43 | .topnavicons {
44 | margin-left: 10% !important;
45 | }
46 |
47 | .topnavicons li {
48 | margin-left: 0px !important;
49 | min-width: 100px;
50 | text-align: center;
51 | }
52 |
53 | .topnavicons .thumbnail {
54 | margin-right: 10px;
55 | border: none;
56 | box-shadow: none;
57 | text-align: center;
58 | font-size: 85%;
59 | font-weight: bold;
60 | line-height: 10px;
61 | height: 100px;
62 | }
63 |
64 | .topnavicons .thumbnail img {
65 | display: block;
66 | margin-left: auto;
67 | margin-right: auto;
68 | }
69 |
70 |
71 | /* Table with a scrollbar */
72 | .bodycontainer { max-height: 600px; width: 100%; margin: 0; overflow-y: auto; }
73 | .table-scrollable { margin: 0; padding: 0; }
74 |
75 | .label {
76 | color: #ff0000;
77 | /*font-size: 100%;*/
78 | }
79 |
80 | div.body {
81 | max-width: 1080px;
82 | }
--------------------------------------------------------------------------------
/docs/api.rst:
--------------------------------------------------------------------------------
1 | .. _api_ref:
2 |
3 | .. currentmodule:: spreg
4 |
5 | API reference
6 | =============
7 |
8 | .. _models_api:
9 |
10 | Classic Models
11 | --------------
12 |
13 | .. autosummary::
14 | :toctree: generated/
15 |
16 | spreg.OLS
17 | spreg.TSLS
18 |
19 | Spatial Regression Models
20 | -------------------------
21 |
22 | These are the standard spatial regression models supported by the `spreg` package. Each of them contains a significant amount of detail in their docstring discussing how they're used, how they're fit, and how to interpret the results.
23 |
24 | .. autosummary::
25 | :toctree: generated/
26 |
27 | spreg.GM_Lag
28 | spreg.ML_Lag
29 | spreg.ML_Error
30 | spreg.GMM_Error
31 | spreg.GM_Error
32 | spreg.GM_Error_Het
33 | spreg.GM_Error_Hom
34 | spreg.GM_Combo
35 | spreg.GM_Combo_Het
36 | spreg.GM_Combo_Hom
37 | spreg.GM_Endog_Error
38 | spreg.GM_Endog_Error_Het
39 | spreg.GM_Endog_Error_Hom
40 | spreg.NSLX
41 |
42 | Discrete Choice Models
43 | ----------------------
44 |
45 | .. autosummary::
46 | :toctree: generated/
47 |
48 | spreg.Probit
49 |
50 | Regimes Models
51 | ---------------
52 |
53 | Regimes models are variants of spatial regression models which allow for structural instability in parameters. That means that these models allow different coefficient values in distinct subsets of the data.
54 |
55 | .. autosummary::
56 | :toctree: generated/
57 |
58 | spreg.OLS_Regimes
59 | spreg.TSLS_Regimes
60 | spreg.ML_Lag_Regimes
61 | spreg.ML_Error_Regimes
62 | spreg.GM_Lag_Regimes
63 | spreg.GM_Error_Regimes
64 | spreg.GM_Error_Het_Regimes
65 | spreg.GM_Error_Hom_Regimes
66 | spreg.GM_Combo_Regimes
67 | spreg.GM_Combo_Hom_Regimes
68 | spreg.GM_Combo_Het_Regimes
69 | spreg.GM_Endog_Error_Regimes
70 | spreg.GM_Endog_Error_Hom_Regimes
71 | spreg.GM_Endog_Error_Het_Regimes
72 | spreg.OLS_Endog_Regimes
73 | spreg.GM_Lag_Endog_Regimes
74 | spreg.Skater_reg
75 |
76 | Seemingly-Unrelated Regressions
77 | --------------------------------
78 |
79 | Seemingly-unrelated regression models are a generalization of linear regression. These models (and their spatial generalizations) allow for correlation in the residual terms between groups that use the same model. In spatial Seeimingly-Unrelated Regressions, the error terms across groups are allowed to exhibit a structured type of correlation: spatial correlation.
80 |
81 | .. autosummary::
82 | :toctree: generated/
83 |
84 | spreg.SUR
85 | spreg.SURerrorGM
86 | spreg.SURerrorML
87 | spreg.SURlagIV
88 | spreg.ThreeSLS
89 |
90 | Spatial Panel Models
91 | --------------------
92 |
93 | Spatial panel models allow for evaluating correlation in both spatial and time dimensions.
94 |
95 | .. autosummary::
96 | :toctree: generated/
97 |
98 | spreg.Panel_FE_Lag
99 | spreg.Panel_FE_Error
100 | spreg.Panel_RE_Lag
101 | spreg.Panel_RE_Error
102 | spreg.GM_KKP
103 |
104 | Diagnostics
105 | -----------
106 |
107 | Diagnostic tests are useful for identifying model fit, sufficiency, and specification correctness.
108 |
109 | .. autosummary::
110 | :toctree: generated/
111 |
112 | spreg.f_stat
113 | spreg.t_stat
114 | spreg.r2
115 | spreg.ar2
116 | spreg.se_betas
117 | spreg.log_likelihood
118 | spreg.akaike
119 | spreg.schwarz
120 | spreg.condition_index
121 | spreg.dwh
122 | spreg.jarque_bera
123 | spreg.breusch_pagan
124 | spreg.white
125 | spreg.koenker_bassett
126 | spreg.vif
127 | spreg.likratiotest
128 | spreg.LMtests
129 | spreg.MoranRes
130 | spreg.AKtest
131 | spreg.sur_setp
132 | spreg.sur_lrtest
133 | spreg.sur_lmtest
134 | spreg.lam_setp
135 | spreg.surLMe
136 | spreg.surLMlag
137 | spreg.constant_check
138 | spreg.panel_LMlag
139 | spreg.panel_LMerror
140 | spreg.panel_rLMlag
141 | spreg.panel_rLMerror
142 | spreg.panel_Hausman
143 | spreg.sputils.spmultiplier
144 | spreg.diagnostics_probit.sp_tests
145 |
146 |
147 | Spatial Specification Search
148 | --------------------------------
149 |
150 | The `spsearch` module contains tools for conducting incremental specification searches for spatial econometric models following the approach of :cite:p:`anselin2024SpatialEconometric`
151 |
152 | .. autosummary::
153 | :toctree: generated/
154 |
155 | spreg.spsearch.stge_classic
156 | spreg.spsearch.stge_kb
157 | spreg.spsearch.stge_pre
158 | spreg.spsearch.gets_gns
159 | spreg.spsearch.gets_sdm
160 |
161 |
162 | DGP
163 | -----------
164 |
165 | Tools for simulating synthetic data according to data-generating processes implied by different spatial model specifications
166 |
167 | .. autosummary::
168 | :toctree: generated/
169 |
170 | spreg.dgp.make_error
171 | spreg.dgp.make_x
172 | spreg.dgp.make_wx
173 | spreg.dgp.make_xb
174 | spreg.dgp.make_wxg
175 | spreg.dgp.dgp_errproc
176 | spreg.dgp.dgp_ols
177 | spreg.dgp.dgp_slx
178 | spreg.dgp.dgp_sperror
179 | spreg.dgp.dgp_slxerror
180 | spreg.dgp.dgp_lag
181 | spreg.dgp.dgp_spdurbin
182 | spreg.dgp.dgp_lagerr
183 | spreg.dgp.dgp_gns
184 | spreg.dgp.dgp_mess
185 | spreg.dgp.dgp_probit
186 | spreg.dgp.make_bin
187 | spreg.dgp.make_heterror
188 | spreg.dgp.make_vmult
189 |
--------------------------------------------------------------------------------
/docs/conf.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # giddy documentation build configuration file, created by
4 | # sphinx-quickstart on Wed Jun 6 15:54:22 2018.
5 | #
6 | # This file is execfile()d with the current directory set to its
7 | # containing dir.
8 | #
9 | # Note that not all possible configuration values are present in this
10 | # autogenerated file.
11 | #
12 | # All configuration values have a default; values that are commented out
13 | # serve to show the default.
14 |
15 | # If extensions (or modules to document with autodoc) are in another directory,
16 | # add these directories to sys.path here. If the directory is relative to the
17 | # documentation root, use os.path.abspath to make it absolute, like shown here.
18 | #
19 | import sphinx_bootstrap_theme
20 |
21 | # import your package to obtain the version info to display on the docs website
22 | import spreg
23 |
24 |
25 | # -- General configuration ------------------------------------------------
26 |
27 | # If your documentation needs a minimal Sphinx version, state it here.
28 | #
29 | # needs_sphinx = '1.0'
30 | # Add any Sphinx extension module names here, as strings. They can be
31 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
32 | # ones.
33 | extensions = [ #'sphinx_gallery.gen_gallery',
34 | "sphinx.ext.autodoc",
35 | "sphinx.ext.autosummary",
36 | "sphinx.ext.viewcode",
37 | "sphinxcontrib.bibtex",
38 | "sphinx.ext.mathjax",
39 | "sphinx.ext.doctest",
40 | "sphinx.ext.intersphinx",
41 | "matplotlib.sphinxext.plot_directive",
42 | "nbsphinx",
43 | "numpydoc",
44 | ]
45 | bibtex_bibfiles = ["_static/references.bib"]
46 |
47 |
48 | # Add any paths that contain templates here, relative to this directory.
49 | templates_path = ["_templates"]
50 |
51 | # The suffix(es) of source filenames.
52 | # You can specify multiple suffix as a list of string:
53 | #
54 | # source_suffix = ['.rst', '.md']
55 | source_suffix = ".rst"
56 |
57 | # The master toctree document.
58 | master_doc = "index"
59 |
60 | # General information about the project.
61 | project = "spreg"
62 | copyright = "2018-, pysal developers"
63 | author = "pysal developers"
64 |
65 | # The version info for the project you're documenting, acts as replacement for
66 | # |version| and |release|, also used in various other places throughout the
67 | # built documents.
68 | #
69 | # The full version.
70 | version = spreg.__version__ # should replace it with your spreg
71 | release = spreg.__version__ # should replace it with your spreg
72 |
73 | # The language for content autogenerated by Sphinx. Refer to documentation
74 | # for a list of supported languages.
75 | #
76 | # This is also used if you do content translation via gettext catalogs.
77 | # Usually you set "language" from the command line for these cases.
78 | language = None
79 |
80 | # List of patterns, relative to source directory, that match files and
81 | # directories to ignore when looking for source files.
82 | # This patterns also effect to html_static_path and html_extra_path
83 | exclude_patterns = ["_build", "Thumbs.db", ".DS_Store", "tests/*"]
84 |
85 | # The name of the Pygments (syntax highlighting) style to use.
86 | pygments_style = "sphinx"
87 |
88 | # If true, `todo` and `todoList` produce output, else they produce nothing.
89 | todo_include_todos = False
90 |
91 | # -- Options for HTML output ----------------------------------------------
92 |
93 | # The theme to use for HTML and HTML Help pages. See the documentation for
94 | # a list of builtin themes.
95 | #
96 | # html_theme = 'alabaster'
97 | html_theme = "bootstrap"
98 | html_theme_path = sphinx_bootstrap_theme.get_html_theme_path()
99 | html_title = "%s v%s Manual" % (project, version)
100 |
101 | # (Optional) Logo of your package. Should be small enough to fit the navbar (ideally 24x24).
102 | # Path should be relative to the ``_static`` files directory.
103 | # html_logo = "_static/images/package_logo.jpg"
104 |
105 | # (Optional) PySAL favicon
106 | html_favicon = "_static/images/pysal_favicon.ico"
107 |
108 |
109 | # Theme options are theme-specific and customize the look and feel of a theme
110 | # further. For a list of options available for each theme, see the
111 | # documentation.
112 | #
113 | html_theme_options = {
114 | # Navigation bar title. (Default: ``project`` value)
115 | "navbar_title": "spreg", # string of your project name, for example, 'giddy'
116 | # Render the next and previous page links in navbar. (Default: true)
117 | "navbar_sidebarrel": False,
118 | # Render the current pages TOC in the navbar. (Default: true)
119 | #'navbar_pagenav': True,
120 | #'navbar_pagenav': False,
121 | # No sidebar
122 | "nosidebar": True,
123 | # Tab name for the current pages TOC. (Default: "Page")
124 | #'navbar_pagenav_name': "Page",
125 | # Global TOC depth for "site" navbar tab. (Default: 1)
126 | # Switching to -1 shows all levels.
127 | "globaltoc_depth": 2,
128 | # Include hidden TOCs in Site navbar?
129 | #
130 | # Note: If this is "false", you cannot have mixed ``:hidden:`` and
131 | # non-hidden ``toctree`` directives in the same page, or else the build
132 | # will break.
133 | #
134 | # Values: "true" (default) or "false"
135 | "globaltoc_includehidden": "true",
136 | # HTML navbar class (Default: "navbar") to attach to
element.
137 | # For black navbar, do "navbar navbar-inverse"
138 | #'navbar_class': "navbar navbar-inverse",
139 | # Fix navigation bar to top of page?
140 | # Values: "true" (default) or "false"
141 | "navbar_fixed_top": "true",
142 | # Location of link to source.
143 | # Options are "nav" (default), "footer" or anything else to exclude.
144 | "source_link_position": "footer",
145 | # Bootswatch (http://bootswatch.com/) theme.
146 | #
147 | # Options are nothing (default) or the name of a valid theme
148 | # such as "amelia" or "cosmo", "yeti", "flatly".
149 | "bootswatch_theme": "yeti",
150 | # Choose Bootstrap version.
151 | # Values: "3" (default) or "2" (in quotes)
152 | "bootstrap_version": "3",
153 | # Navigation bar menu
154 | "navbar_links": [
155 | ("Installation", "installation"),
156 | ("Tutorials", "tutorials"),
157 | ("API", "api"),
158 | ("References", "references"),
159 | ],
160 | }
161 |
162 | # Add any paths that contain custom static files (such as style sheets) here,
163 | # relative to this directory. They are copied after the builtin static files,
164 | # so a file named "default.css" will overwrite the builtin "default.css".
165 | html_static_path = ["_static"]
166 |
167 | # Custom sidebar templates, maps document names to template names.
168 | # html_sidebars = {}
169 | # html_sidebars = {'sidebar': ['localtoc.html', 'sourcelink.html', 'searchbox.html']}
170 |
171 | # -- Options for HTMLHelp output ------------------------------------------
172 |
173 | # Output file base name for HTML help builder.
174 | htmlhelp_basename = project + "doc"
175 |
176 |
177 | # -- Options for LaTeX output ---------------------------------------------
178 |
179 | latex_elements = {
180 | # The paper size ('letterpaper' or 'a4paper').
181 | #
182 | # 'papersize': 'letterpaper',
183 | # The font size ('10pt', '11pt' or '12pt').
184 | #
185 | # 'pointsize': '10pt',
186 | # Additional stuff for the LaTeX preamble.
187 | #
188 | # 'preamble': '',
189 | # Latex figure (float) alignment
190 | #
191 | # 'figure_align': 'htbp',
192 | }
193 |
194 | # Grouping the document tree into LaTeX files. List of tuples
195 | # (source start file, target name, title,
196 | # author, documentclass [howto, manual, or own class]).
197 | latex_documents = [
198 | (master_doc, "spreg.tex", "spreg Documentation", "pysal developers", "manual"),
199 | ]
200 |
201 |
202 | # -- Options for manual page output ---------------------------------------
203 |
204 | # One entry per manual page. List of tuples
205 | # (source start file, name, description, authors, manual section).
206 | man_pages = [(master_doc, project, "%s Documentation" % project, [author], 1)]
207 |
208 |
209 | # -- Options for Texinfo output -------------------------------------------
210 |
211 | # Grouping the document tree into Texinfo files. List of tuples
212 | # (source start file, target name, title, author,
213 | # dir menu entry, description, category)
214 | texinfo_documents = [
215 | (
216 | master_doc,
217 | project,
218 | "%s Documentation" % project,
219 | author,
220 | project,
221 | "The Python Spatial Econometrics Package.",
222 | "Miscellaneous",
223 | ),
224 | ]
225 |
226 |
227 | # -----------------------------------------------------------------------------
228 | # Autosummary
229 | # -----------------------------------------------------------------------------
230 |
231 | # Generate the API documentation when building
232 | autosummary_generate = True
233 |
234 | # avoid showing members twice
235 | numpydoc_show_class_members = False
236 | numpydoc_use_plots = True
237 | class_members_toctree = True
238 | numpydoc_show_inherited_class_members = True
239 | numpydoc_xref_param_type = True
240 |
241 | # automatically document class members
242 | autodoc_default_options = {
243 | "members": True,
244 | "undoc-members": True,
245 | "inherited-members": True,
246 | }
247 |
248 | # display the source code for Plot directive
249 | plot_include_source = True
250 |
251 |
252 | def setup(app):
253 | app.add_css_file("pysal-styles.css")
254 |
255 |
256 | # Configuration for intersphinx
257 | intersphinx_mapping = {
258 | "libpysal": ("https://pysal.org/libpysal/", None),
259 | "matplotlib": ("https://matplotlib.org/", None),
260 | "numpy": ("https://docs.scipy.org/doc/numpy", None),
261 | "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None),
262 | "python": ("https://docs.python.org/3", None),
263 | "scipy": ("https://docs.scipy.org/doc/scipy/reference/", None),
264 | }
265 |
266 |
267 | # This is processed by Jinja2 and inserted before each notebook
268 | nbsphinx_prolog = r"""
269 | {% set docname = env.doc2path(env.docname, base='') %}
270 |
271 | .. only:: html
272 |
273 | .. role:: raw-html(raw)
274 | :format: html
275 |
276 | .. nbinfo::
277 |
278 | This page was generated from `{{ docname }}`__.
279 | Interactive online version:
280 | :raw-html:`

`
281 |
282 | __ https://github.com/pysal/spreg/blob/master/{{ docname }}
283 |
284 | .. raw:: latex
285 |
286 | \nbsphinxstartnotebook{\scriptsize\noindent\strut
287 | \textcolor{gray}{The following section was generated from
288 | \sphinxcode{\sphinxupquote{\strut {{ docname | escape_latex }}}} \dotfill}}
289 | """
290 |
291 | # This is processed by Jinja2 and inserted after each notebook
292 | nbsphinx_epilog = r"""
293 | .. raw:: latex
294 |
295 | \nbsphinxstopnotebook{\scriptsize\noindent\strut
296 | \textcolor{gray}{\dotfill\ \sphinxcode{\sphinxupquote{\strut
297 | {{ env.doc2path(env.docname, base='doc') | escape_latex }}}} ends here.}}
298 | """
299 |
300 | # List of arguments to be passed to the kernel that executes the notebooks:
301 | nbsphinx_execute_arguments = [
302 | "--InlineBackend.figure_formats={'svg', 'pdf'}",
303 | "--InlineBackend.rc={'figure.dpi': 96}",
304 | ]
305 |
306 | mathjax_config = {
307 | "TeX": {"equationNumbers": {"autoNumber": "AMS", "useLabelIds": True}},
308 | }
309 |
--------------------------------------------------------------------------------
/docs/index.rst:
--------------------------------------------------------------------------------
1 | .. documentation master file
2 |
3 | Spatial Regression Models (spreg)
4 | =================================
5 |
6 | `spreg`, short for "spatial regression," is a python package to estimate simultaneous autoregressive spatial regression models. These models are useful when modeling processes where observations interact with one another. For more information on these models, consult the Spatial Regression short course by Luc Anselin (Spring, 2017), with the Center for Spatial Data Science at the University of Chicago:
7 |
8 | ----
9 |
10 | .. raw:: html
11 |
12 |
13 |
14 |
15 |
16 | .. toctree::
17 | :hidden:
18 | :maxdepth: 3
19 | :caption: Contents:
20 |
21 | Installation
22 | Tutorials
23 | API
24 | References
25 |
26 |
27 | .. _PySAL: https://github.com/pysal/pysal
28 |
--------------------------------------------------------------------------------
/docs/installation.rst:
--------------------------------------------------------------------------------
1 | .. Installation
2 |
3 | Installation
4 | ============
5 |
6 | spreg is installable using the Python Package Manager, `pip`. To install::
7 |
8 | pip install spreg
9 |
10 | Further, all of the stable functionality is *also* available in PySAL, the
11 | Python Spatial Analysis Library. PySAL can be installed using `pip` or `conda`::
12 |
13 | pip install pysal #or
14 | conda install pysal
15 |
--------------------------------------------------------------------------------
/docs/references.rst:
--------------------------------------------------------------------------------
1 | .. reference for the docs
2 |
3 | References
4 | ==========
5 |
6 | .. bibliography:: _static/references.bib
7 | :all:
8 |
--------------------------------------------------------------------------------
/docs/tutorials.rst:
--------------------------------------------------------------------------------
1 | .. tutorials
2 |
3 |
4 | Tutorials
5 | =========
6 |
7 | .. toctree::
8 | :maxdepth: 1
9 | :caption: Sample Data Sets
10 |
11 | notebooks/1_sample_data.ipynb
12 |
13 | .. toctree::
14 | :maxdepth: 1
15 | :caption: Data Input/Output
16 |
17 | notebooks/2_data_input_output.ipynb
18 |
19 | .. toctree::
20 | :maxdepth: 1
21 | :caption: Basic Mapping
22 |
23 | notebooks/3_basic_mapping.ipynb
24 |
25 | .. toctree::
26 | :maxdepth: 1
27 | :caption: Spatial Weights
28 |
29 | notebooks/4_spatial_weights.ipynb
30 |
31 | .. toctree::
32 | :maxdepth: 1
33 | :caption: Basic Ordinary Least Squares Regression (OLS)
34 |
35 | notebooks/5_OLS.ipynb
36 |
37 | .. toctree::
38 | :maxdepth: 1
39 | :caption: Two Stage Least Squares Regression (2SLS)
40 |
41 | notebooks/6_TWOSLS.ipynb
42 |
43 | .. toctree::
44 | :maxdepth: 1
45 | :caption: Spatial Model Specifications
46 |
47 | notebooks/7_spatial_models.ipynb
48 |
49 | .. toctree::
50 | :maxdepth: 1
51 | :caption: Spatial Multipliers
52 |
53 | notebooks/8_spatial_multipliers.ipynb
54 |
55 | .. toctree::
56 | :maxdepth: 1
57 | :caption: Specification Tests
58 |
59 | notebooks/9_specification_tests.ipynb
60 |
61 | .. toctree::
62 | :maxdepth: 1
63 | :caption: Specification Tests - Properties
64 |
65 | notebooks/10_specification_tests_properties.ipynb
66 |
67 | .. toctree::
68 | :maxdepth: 1
69 | :caption: Distance Decay
70 |
71 | notebooks/11_distance_decay.ipynb
72 |
73 | .. toctree::
74 | :maxdepth: 1
75 | :caption: Estimating SLX Models
76 |
77 | notebooks/12_estimating_slx.ipynb
78 |
79 | .. toctree::
80 | :maxdepth: 1
81 | :caption: Maximum Likelihood Estimation - Spatial Lag Model
82 |
83 | notebooks/13_ML_estimation_spatial_lag.ipynb
84 |
85 | .. toctree::
86 | :maxdepth: 1
87 | :caption: Instrumental Variables Estimation - Spatial Lag Model
88 |
89 | notebooks/14_IV_estimation_spatial_lag.ipynb
90 |
91 | .. toctree::
92 | :maxdepth: 1
93 | :caption: Maximum Likelihood Estimation - Spatial Error Model
94 |
95 | notebooks/15_ML_estimation_spatial_error.ipynb
96 |
97 | .. toctree::
98 | :maxdepth: 1
99 | :caption: GMM Estimation - Spatial Error Model
100 |
101 | notebooks/16_GMM_estimation_spatial_error.ipynb
102 |
103 | .. toctree::
104 | :maxdepth: 1
105 | :caption: GMM Estimation - Higher Order Models
106 |
107 | notebooks/17_GMM_higher_order.ipynb
108 |
109 | .. toctree::
110 | :maxdepth: 1
111 | :caption: Spatial Panel Models with Fixed Effects
112 |
113 | notebooks/Panel_FE_example.ipynb
114 |
115 | .. toctree::
116 | :maxdepth: 1
117 | :caption: Skater_reg: Endogenous Spatial Regimes
118 |
119 | notebooks/skater_reg.ipynb
120 |
--------------------------------------------------------------------------------
/environment.yml:
--------------------------------------------------------------------------------
1 | name: spreg
2 | channels:
3 | - conda-forge
4 | dependencies:
5 | - python>=3.10
6 | - bokeh
7 | - folium
8 | - geojson
9 | - geopandas
10 | - libpysal
11 | - matplotlib
12 | - mplleaflet
13 | - numba
14 | - numexpr
15 | - numpy
16 | - pandas
17 | - scikit-learn
18 | - scipy
19 | - seaborn
20 | - statsmodels
21 | - watermark
22 | - pip
23 | - pip:
24 | - git+https://github.com/pysal/spreg.git@main
25 |
--------------------------------------------------------------------------------
/notebooks/LICENSE:
--------------------------------------------------------------------------------
1 | Creative Commons Legal Code
2 |
3 | CC0 1.0 Universal
4 |
5 | CREATIVE COMMONS CORPORATION IS NOT A LAW FIRM AND DOES NOT PROVIDE
6 | LEGAL SERVICES. DISTRIBUTION OF THIS DOCUMENT DOES NOT CREATE AN
7 | ATTORNEY-CLIENT RELATIONSHIP. CREATIVE COMMONS PROVIDES THIS
8 | INFORMATION ON AN "AS-IS" BASIS. CREATIVE COMMONS MAKES NO WARRANTIES
9 | REGARDING THE USE OF THIS DOCUMENT OR THE INFORMATION OR WORKS
10 | PROVIDED HEREUNDER, AND DISCLAIMS LIABILITY FOR DAMAGES RESULTING FROM
11 | THE USE OF THIS DOCUMENT OR THE INFORMATION OR WORKS PROVIDED
12 | HEREUNDER.
13 |
14 | Statement of Purpose
15 |
16 | The laws of most jurisdictions throughout the world automatically confer
17 | exclusive Copyright and Related Rights (defined below) upon the creator
18 | and subsequent owner(s) (each and all, an "owner") of an original work of
19 | authorship and/or a database (each, a "Work").
20 |
21 | Certain owners wish to permanently relinquish those rights to a Work for
22 | the purpose of contributing to a commons of creative, cultural and
23 | scientific works ("Commons") that the public can reliably and without fear
24 | of later claims of infringement build upon, modify, incorporate in other
25 | works, reuse and redistribute as freely as possible in any form whatsoever
26 | and for any purposes, including without limitation commercial purposes.
27 | These owners may contribute to the Commons to promote the ideal of a free
28 | culture and the further production of creative, cultural and scientific
29 | works, or to gain reputation or greater distribution for their Work in
30 | part through the use and efforts of others.
31 |
32 | For these and/or other purposes and motivations, and without any
33 | expectation of additional consideration or compensation, the person
34 | associating CC0 with a Work (the "Affirmer"), to the extent that he or she
35 | is an owner of Copyright and Related Rights in the Work, voluntarily
36 | elects to apply CC0 to the Work and publicly distribute the Work under its
37 | terms, with knowledge of his or her Copyright and Related Rights in the
38 | Work and the meaning and intended legal effect of CC0 on those rights.
39 |
40 | 1. Copyright and Related Rights. A Work made available under CC0 may be
41 | protected by copyright and related or neighboring rights ("Copyright and
42 | Related Rights"). Copyright and Related Rights include, but are not
43 | limited to, the following:
44 |
45 | i. the right to reproduce, adapt, distribute, perform, display,
46 | communicate, and translate a Work;
47 | ii. moral rights retained by the original author(s) and/or performer(s);
48 | iii. publicity and privacy rights pertaining to a person's image or
49 | likeness depicted in a Work;
50 | iv. rights protecting against unfair competition in regards to a Work,
51 | subject to the limitations in paragraph 4(a), below;
52 | v. rights protecting the extraction, dissemination, use and reuse of data
53 | in a Work;
54 | vi. database rights (such as those arising under Directive 96/9/EC of the
55 | European Parliament and of the Council of 11 March 1996 on the legal
56 | protection of databases, and under any national implementation
57 | thereof, including any amended or successor version of such
58 | directive); and
59 | vii. other similar, equivalent or corresponding rights throughout the
60 | world based on applicable law or treaty, and any national
61 | implementations thereof.
62 |
63 | 2. Waiver. To the greatest extent permitted by, but not in contravention
64 | of, applicable law, Affirmer hereby overtly, fully, permanently,
65 | irrevocably and unconditionally waives, abandons, and surrenders all of
66 | Affirmer's Copyright and Related Rights and associated claims and causes
67 | of action, whether now known or unknown (including existing as well as
68 | future claims and causes of action), in the Work (i) in all territories
69 | worldwide, (ii) for the maximum duration provided by applicable law or
70 | treaty (including future time extensions), (iii) in any current or future
71 | medium and for any number of copies, and (iv) for any purpose whatsoever,
72 | including without limitation commercial, advertising or promotional
73 | purposes (the "Waiver"). Affirmer makes the Waiver for the benefit of each
74 | member of the public at large and to the detriment of Affirmer's heirs and
75 | successors, fully intending that such Waiver shall not be subject to
76 | revocation, rescission, cancellation, termination, or any other legal or
77 | equitable action to disrupt the quiet enjoyment of the Work by the public
78 | as contemplated by Affirmer's express Statement of Purpose.
79 |
80 | 3. Public License Fallback. Should any part of the Waiver for any reason
81 | be judged legally invalid or ineffective under applicable law, then the
82 | Waiver shall be preserved to the maximum extent permitted taking into
83 | account Affirmer's express Statement of Purpose. In addition, to the
84 | extent the Waiver is so judged Affirmer hereby grants to each affected
85 | person a royalty-free, non transferable, non sublicensable, non exclusive,
86 | irrevocable and unconditional license to exercise Affirmer's Copyright and
87 | Related Rights in the Work (i) in all territories worldwide, (ii) for the
88 | maximum duration provided by applicable law or treaty (including future
89 | time extensions), (iii) in any current or future medium and for any number
90 | of copies, and (iv) for any purpose whatsoever, including without
91 | limitation commercial, advertising or promotional purposes (the
92 | "License"). The License shall be deemed effective as of the date CC0 was
93 | applied by Affirmer to the Work. Should any part of the License for any
94 | reason be judged legally invalid or ineffective under applicable law, such
95 | partial invalidity or ineffectiveness shall not invalidate the remainder
96 | of the License, and in such case Affirmer hereby affirms that he or she
97 | will not (i) exercise any of his or her remaining Copyright and Related
98 | Rights in the Work or (ii) assert any associated claims and causes of
99 | action with respect to the Work, in either case contrary to Affirmer's
100 | express Statement of Purpose.
101 |
102 | 4. Limitations and Disclaimers.
103 |
104 | a. No trademark or patent rights held by Affirmer are waived, abandoned,
105 | surrendered, licensed or otherwise affected by this document.
106 | b. Affirmer offers the Work as-is and makes no representations or
107 | warranties of any kind concerning the Work, express, implied,
108 | statutory or otherwise, including without limitation warranties of
109 | title, merchantability, fitness for a particular purpose, non
110 | infringement, or the absence of latent or other defects, accuracy, or
111 | the present or absence of errors, whether or not discoverable, all to
112 | the greatest extent permissible under applicable law.
113 | c. Affirmer disclaims responsibility for clearing rights of other persons
114 | that may apply to the Work or any use thereof, including without
115 | limitation any person's Copyright and Related Rights in the Work.
116 | Further, Affirmer disclaims responsibility for obtaining any necessary
117 | consents, permissions or other rights required for any use of the
118 | Work.
119 | d. Affirmer understands and acknowledges that Creative Commons is not a
120 | party to this document and has no duty or obligation with respect to
121 | this CC0 or use of the Work.
122 |
--------------------------------------------------------------------------------
/notebooks/README.md:
--------------------------------------------------------------------------------
1 | # spatial_regression_notebooks
2 | Jupyter notebooks to illustrate spatial regression functionality
3 |
4 | ## Recommended Installation
5 | The safest installation is in a separate *environment* with the following installed packages. The version listed is the one used for the notebooks:
6 |
7 | - numpy 2.1.1
8 | - scipy 1.14.1
9 | - scikit-learn 1.5.1
10 | - pandas 2.2.2
11 | - matplotlib 3.9.2
12 | - seaborn 0.13.2
13 | - pysal 24.07 (this will also install geopandas, otherwise, a separate installation of geopandas is required)
14 | - jupyter 1.1.1
15 |
16 | Make sure the version of spreg is 1.7. pysal 24.07 includes 1.6.1. To obtain the latest version, use pip install --upgrade spreg.
17 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [build-system]
2 | requires = ["setuptools>=61.0", "setuptools_scm[toml]>=6.2"]
3 | build-backend = "setuptools.build_meta"
4 |
5 | [tool.setuptools_scm]
6 |
7 | [project]
8 | name = "spreg"
9 | dynamic = ["version"]
10 | authors = [
11 | { name = "Luc Anselin", email = "anselin@uchicago.edu" },
12 | { name = "Serge Rey", email = "sjsrey@gmail.com" },
13 | { name = "Pedo Amaral", email = "pedrovma@gmail.com" },
14 | ]
15 | maintainers = [{ name = "pysal contributors" }]
16 | license = { text = "BSD 3-Clause" }
17 | description = "PySAL Spatial Econometric Regression in Python"
18 | keywords = [
19 | "spatial econometrics, regression, statistics, spatial modeling"
20 | ]
21 | readme = { text = """\
22 | Spatial Econometric Regression in Python"
23 |
24 | """, content-type = "text/x-rst" }
25 | classifiers = [
26 | "Programming Language :: Python :: 3",
27 | "License :: OSI Approved :: BSD License",
28 | "Operating System :: OS Independent",
29 | "Intended Audience :: Science/Research",
30 | "Topic :: Scientific/Engineering :: GIS",
31 | ]
32 | requires-python = ">=3.9"
33 | dependencies = [
34 | "scipy>=0.11",
35 | "numpy>=1.23",
36 | "pandas",
37 | "libpysal>=4.0.0",
38 | "scikit-learn>=0.22",
39 | ]
40 |
41 | [project.urls]
42 | Home = "https://github.com/pysal/spreg/"
43 | Repository = "https://github.com/pysal/spreg"
44 |
45 | [project.optional-dependencies]
46 | dev = ["pre-commit"]
47 | docs = [
48 | "nbsphinx",
49 | "numpydoc",
50 | "pandoc",
51 | "sphinx",
52 | "sphinxcontrib-bibtex",
53 | "sphinx_bootstrap_theme",
54 | "mkdocs-jupyter",
55 | "myst-parser"
56 | ]
57 | tests = [
58 | "codecov",
59 | "coverage",
60 | "pytest",
61 | "pytest-mpl",
62 | "pytest-cov",
63 | "watermark",
64 |
65 | ]
66 |
67 | [tool.setuptools.packages.find]
68 | include = ["spreg", "spreg.*"]
69 |
70 | [tool.ruff]
71 | line-length = 88
72 | lint.select = ["E", "F", "W", "I", "UP", "N", "B", "A", "C4", "SIM", "ARG"]
73 | lint.ignore = [
74 | "B006",
75 | "B008",
76 | "B009",
77 | "B010",
78 | "C408",
79 | "E731",
80 | "F401",
81 | "F403",
82 | "N803",
83 | "N806",
84 | "N999",
85 | "UP007"
86 | ]
87 | exclude = ["spreg/tests/*", "docs/*"]
88 |
89 | [tool.coverage.run]
90 | source = ["./spreg"]
91 |
92 | [tool.coverage.report]
93 | exclude_lines = [
94 | "if self.debug:",
95 | "pragma: no cover",
96 | "raise NotImplementedError",
97 | "except ModuleNotFoundError:",
98 | "except ImportError",
99 | ]
100 | ignore_errors = true
101 | omit = ["spreg/tests/*", "docs/conf.py"]
102 |
--------------------------------------------------------------------------------
/spreg/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | import contextlib
3 | from importlib.metadata import PackageNotFoundError, version
4 |
5 | from .dgp import *
6 | from .diagnostics import *
7 | from .diagnostics_panel import *
8 | from .diagnostics_sp import *
9 | from .diagnostics_sur import *
10 | from .diagnostics_tsls import *
11 | from .diagnostics_probit import *
12 | from .error_sp import *
13 | from .error_sp_het import *
14 | from .error_sp_het_regimes import *
15 | from .error_sp_hom import *
16 | from .error_sp_hom_regimes import *
17 | from .error_sp_regimes import *
18 | from .ml_error import *
19 | from .ml_error_regimes import *
20 | from .ml_lag import *
21 | from .ml_lag_regimes import *
22 | from .nslx import *
23 | from .ols import *
24 | from .ols_regimes import *
25 | from .panel_fe import *
26 | from .panel_re import *
27 | from .probit import *
28 | from .regimes import *
29 | from .skater_reg import *
30 | from .spsearch import *
31 | from .sp_panels import *
32 | from .sputils import *
33 | from .sur import *
34 | from .sur_error import *
35 | from .sur_lag import *
36 | from .sur_utils import *
37 | from .twosls import *
38 | from .twosls_regimes import *
39 | from .twosls_sp import *
40 | from .twosls_sp_regimes import *
41 | from .user_output import *
42 | from .utils import *
43 |
44 | with contextlib.suppress(PackageNotFoundError):
45 | __version__ = version("spreg")
46 |
--------------------------------------------------------------------------------
/spreg/diagnostics_panel.py:
--------------------------------------------------------------------------------
1 | """
2 | Diagnostics for panel data estimation
3 | """
4 |
5 | __author__ = "Wei Kang weikang9009@gmail.com, \
6 | Pedro Amaral pedroamaral@cedeplar.ufmg.br, \
7 | Pablo Estrada pabloestradace@gmail.com"
8 |
9 | import numpy as np
10 | import numpy.linalg as la
11 | from scipy import sparse as sp
12 | from . import user_output as USER
13 | from .ols import OLS
14 | from .utils import spdot
15 | from scipy import stats
16 | from .panel_utils import check_panel
17 |
18 | chisqprob = lambda chisq, df: stats.chi2.sf(chisq, df)
19 |
20 | __all__ = [
21 | "panel_LMlag",
22 | "panel_LMerror",
23 | "panel_rLMlag",
24 | "panel_rLMerror",
25 | "panel_Hausman",
26 | ]
27 |
28 |
29 | def panel_LMlag(y, x, w):
30 | """
31 | Lagrange Multiplier test on lag spatial autocorrelation in panel data.
32 | :cite:`Anselin2008`.
33 |
34 | Parameters
35 | ----------
36 | y : array
37 | nxt or (nxt)x1 array for dependent variable
38 | x : array
39 | nx(txk) or (nxt)xk array for independent (exogenous)
40 | variables, excluding the constant
41 | w : pysal W object
42 | Spatial weights object
43 |
44 | Returns
45 | -------
46 | lme : tuple
47 | Pair of statistic and p-value for the LM lag test.
48 | """
49 | y, x, name_y, name_x, warn = check_panel(y, x, w, None, None)
50 | x, name_x, warn = USER.check_constant(x, name_x)
51 | ols = OLS(y, x)
52 | n = w.n
53 | t = y.shape[0] // n
54 | W = w.full()[0]
55 | Wsp_nt = sp.kron(sp.identity(t), w.sparse, format="csr")
56 | wxb = spdot(Wsp_nt, ols.predy)
57 | ww = spdot(W, W)
58 | wTw = spdot(W.T, W)
59 | trw = ww.diagonal().sum() + wTw.diagonal().sum()
60 | num1 = np.asarray(sp.identity(t * n) - spdot(x, spdot(ols.xtxi, x.T)))
61 | num2 = spdot(wxb.T, spdot(num1, wxb))
62 | num = num2 + (trw * trw * ols.sig2)
63 | J = num / ols.sig2
64 | utwy = spdot(ols.u.T, spdot(Wsp_nt, y))
65 | lm = utwy**2 / (ols.sig2**2 * J)
66 | pval = chisqprob(lm, 1)
67 | return (lm[0][0], pval[0][0])
68 |
69 |
70 | def panel_LMerror(y, x, w):
71 | """
72 | Lagrange Multiplier test on error spatial autocorrelation in panel data.
73 | :cite:`Anselin2008`.
74 |
75 | Parameters
76 | ----------
77 | y : array
78 | nxt or (nxt)x1 array for dependent variable
79 | x : array
80 | nx(txk) or (nxt)xk array for independent (exogenous)
81 | variables, excluding the constant
82 | w : pysal W object
83 | Spatial weights object
84 |
85 | Returns
86 | -------
87 | lme : tuple
88 | Pair of statistic and p-value for the LM error test.
89 | """
90 | y, x, name_y, name_x, warn = check_panel(y, x, w, None, None)
91 | x, name_x, warn = USER.check_constant(x, name_x)
92 | ols = OLS(y, x)
93 | n = w.n
94 | t = y.shape[0] // n
95 | W = w.full()[0]
96 | Wsp_nt = sp.kron(sp.identity(t), w.sparse, format="csr")
97 | ww = spdot(W, W)
98 | wTw = spdot(W.T, W)
99 | trw = ww.diagonal().sum() + wTw.diagonal().sum()
100 | utwu = spdot(ols.u.T, spdot(Wsp_nt, ols.u))
101 | lm = utwu**2 / (ols.sig2**2 * t * trw)
102 | pval = chisqprob(lm, 1)
103 | return (lm[0][0], pval[0][0])
104 |
105 |
106 | def panel_rLMlag(y, x, w):
107 | """
108 | Robust Lagrange Multiplier test on lag spatial autocorrelation in
109 | panel data. :cite:`Elhorst2014`.
110 |
111 | Parameters
112 | ----------
113 | y : array
114 | nxt or (nxt)x1 array for dependent variable
115 | x : array
116 | nx(txk) or (nxt)xk array for independent (exogenous)
117 | variables, excluding the constant
118 | w : pysal W object
119 | Spatial weights object
120 |
121 | Returns
122 | -------
123 | lme : tuple
124 | Pair of statistic and p-value for the Robust LM lag test.
125 | """
126 | y, x, name_y, name_x, warn = check_panel(y, x, w, None, None)
127 | x, name_x, warn = USER.check_constant(x, name_x)
128 | ols = OLS(y, x)
129 | n = w.n
130 | t = y.shape[0] // n
131 | W = w.full()[0]
132 | Wsp_nt = sp.kron(sp.identity(t), w.sparse, format="csr")
133 | wxb = spdot(Wsp_nt, ols.predy)
134 | ww = spdot(W, W)
135 | wTw = spdot(W.T, W)
136 | trw = ww.diagonal().sum() + wTw.diagonal().sum()
137 | utwu = spdot(ols.u.T, spdot(Wsp_nt, ols.u))
138 | num1 = np.asarray(sp.identity(t * n) - spdot(x, spdot(ols.xtxi, x.T)))
139 | num2 = spdot(wxb.T, spdot(num1, wxb))
140 | num = num2 + (t * trw * ols.sig2)
141 | J = num / ols.sig2
142 | utwy = spdot(ols.u.T, spdot(Wsp_nt, y))
143 | lm = (utwy / ols.sig2 - utwu / ols.sig2) ** 2 / (J - t * trw)
144 | pval = chisqprob(lm, 1)
145 | return (lm[0][0], pval[0][0])
146 |
147 |
148 | def panel_rLMerror(y, x, w):
149 | """
150 | Robust Lagrange Multiplier test on error spatial autocorrelation in
151 | panel data. :cite:`Elhorst2014`.
152 |
153 | Parameters
154 | ----------
155 | y : array
156 | nxt or (nxt)x1 array for dependent variable
157 | x : array
158 | nx(txk) or (nxt)xk array for independent (exogenous)
159 | variables, excluding the constant
160 | w : pysal W object
161 | Spatial weights object
162 |
163 | Returns
164 | -------
165 | lme : tuple
166 | Pair of statistic and p-value for the Robust LM error test.
167 | """
168 | y, x, name_y, name_x, warn = check_panel(y, x, w, None, None)
169 | x, name_x, warn = USER.check_constant(x, name_x)
170 | ols = OLS(y, x)
171 | n = w.n
172 | t = y.shape[0] // n
173 | W = w.full()[0]
174 | Wsp_nt = sp.kron(sp.identity(t), w.sparse, format="csr")
175 | wxb = spdot(Wsp_nt, ols.predy)
176 | ww = spdot(W, W)
177 | wTw = spdot(W.T, W)
178 | trw = ww.diagonal().sum() + wTw.diagonal().sum()
179 | utwu = spdot(ols.u.T, spdot(Wsp_nt, ols.u))
180 | num1 = np.asarray(sp.identity(t * n) - spdot(x, spdot(ols.xtxi, x.T)))
181 | num2 = spdot(wxb.T, spdot(num1, wxb))
182 | num = num2 + (t * trw * ols.sig2)
183 | J = num / ols.sig2
184 | utwy = spdot(ols.u.T, spdot(Wsp_nt, y))
185 | lm = (utwu / ols.sig2 - t * trw / J * utwy / ols.sig2) ** 2 / (
186 | t * trw * (1 - t * trw / J)
187 | )
188 | pval = chisqprob(lm, 1)
189 | return (lm[0][0], pval[0][0])
190 |
191 |
192 | def panel_Hausman(panel_fe, panel_re, sp_lag=True):
193 | """
194 | Hausman test on panel data with spatial interactions. :cite:`Elhorst2014`.
195 |
196 | Parameters
197 | ----------
198 | panel_fe : panel_fe
199 | Instance from a fixed effects panel spatial regression
200 | panel_re : panel_re
201 | Instance from a random effects panel spatial regression
202 | sp_lag : boolean
203 | if True, calculate Hausman test for spatial lag model.
204 |
205 | Returns
206 | -------
207 | h : tuple
208 | Pair of statistic and p-value for the Hausman test.
209 | """
210 | if hasattr(panel_fe, "rho") & hasattr(panel_re, "rho"):
211 | d = panel_fe.betas - panel_re.betas[1:-1]
212 | elif hasattr(panel_fe, "lam") & hasattr(panel_re, "lam"):
213 | d = panel_fe.betas[0:-1] - panel_re.betas[1:-2]
214 | else:
215 | raise Exception("Only same spatial interaction allowed")
216 |
217 | vard = panel_re.varb[1:, 1:] - panel_fe.varb
218 | vardi = la.inv(vard)
219 | h = spdot(d.T, spdot(vardi, d))
220 | pval = chisqprob(h, panel_re.k)
221 | return (h[0][0], pval[0][0])
222 |
--------------------------------------------------------------------------------
/spreg/diagnostics_probit.py:
--------------------------------------------------------------------------------
1 | """
2 | Diagnostics in probit regression.
3 |
4 | """
5 | __author__ = (
6 | "Luc Anselin lanselin@gmail.com, Pedro Amaral pedrovma@gmail.com "
7 | )
8 |
9 | from math import sqrt, pi
10 |
11 | from libpysal.common import MISSINGVALUE
12 | import numpy as np
13 | import numpy.linalg as la
14 | import scipy.sparse as SP
15 | from scipy import stats
16 | from scipy.stats import norm
17 |
18 | __all__ = [
19 | "pred_table",
20 | "probit_fit",
21 | "probit_lrtest",
22 | "mcfad_rho",
23 | "probit_ape",
24 | "sp_tests",
25 | "moran_KP",
26 | ]
27 |
28 |
29 | def pred_table(reg):
30 | """
31 | Calculates a table comparing predicted to actual outcomes for a
32 | discrete choice model
33 |
34 | Parameters
35 | ----------
36 | reg : regression object
37 | output instance from a probit regression model
38 |
39 | Returns
40 | ----------
41 | predtab_vals : dictionary
42 | includes margins and cells of actual and predicted
43 | values for discrete choice model
44 | actpos : observed positives (=1)
45 | actneg : observed negatives (=0)
46 | predpos : predicted positives
47 | predneg : predicted negatives
48 | truepos : predicted 1 when actual = 1
49 | falsepos : predicted 1 when actual = 0
50 | trueneg : predicted 0 when actual = 0
51 | falseneg : predicted 0 when actual = 1
52 |
53 | """
54 | predtab_vals = {}
55 | pos = reg.y.sum()
56 | predtab_vals["actpos"] = int(pos)
57 | neg = reg.n - pos
58 | predtab_vals["actneg"] = int(neg)
59 | act1 = (reg.y == 1) * 1
60 | act0 = (reg.y == 0) * 1
61 | ppos = reg.predybin.sum()
62 | predtab_vals["predpos"] = ppos
63 | pneg = reg.n - ppos
64 | predtab_vals["predneg"] = pneg
65 | pred1 = (reg.predybin == 1) * 1
66 | pred0 = (reg.predybin == 0) * 1
67 | truep = (pred1 * act1) * 1
68 | predtab_vals["truepos"] = truep.sum()
69 | truen = (pred0 * act0) * 1
70 | predtab_vals["trueneg"] = truen.sum()
71 | fpos = (pred1 * act0) * 1
72 | predtab_vals["falsepos"] = fpos.sum()
73 | fneg = (pred0 * act1) * 1
74 | predtab_vals["falseneg"] = fneg.sum()
75 |
76 | return predtab_vals
77 |
78 |
79 | def probit_fit(reg):
80 | """
81 | Various measures of fit for discrete choice models, derived from the
82 | prediction table (pred_table)
83 |
84 | Parameters
85 | ----------
86 | reg : regression object
87 | output instance from a probit regression model
88 | must contain predtable attribute
89 |
90 | Returns
91 | ----------
92 | prob_fit : a dictionary containing various measures of fit
93 | TPR : true positive rate (sensitivity, recall, hit rate)
94 | TNR : true negative rate (specificity, selectivity)
95 | PREDPC : accuracy, percent correctly predicted
96 | BA : balanced accuracy
97 |
98 | """
99 |
100 | prob_fit = {}
101 | prob_fit["TPR"] = 100.0 * reg.predtable["truepos"] / reg.predtable["actpos"]
102 | prob_fit["TNR"] = 100.0 * reg.predtable["trueneg"] / reg.predtable["actneg"]
103 | prob_fit["BA"] = (prob_fit["TPR"] + prob_fit["TNR"])/2.0
104 | prob_fit["PREDPC"] = 100.0 * (reg.predtable["truepos"] + reg.predtable["trueneg"]) / reg.n
105 |
106 | return prob_fit
107 |
108 | def probit_lrtest(regprob):
109 | """
110 | Likelihood ratio test statistic for probit model
111 |
112 | Parameters
113 | ----------
114 | regprob : probit regression object
115 |
116 | Returns
117 | -------
118 |
119 | likratio : dictionary
120 | contains the statistic for the null model (L0), the LR test(likr),
121 | the degrees of freedom (df) and the p-value (pvalue)
122 | L0 : float
123 | log likelihood of null model
124 | likr : float
125 | likelihood ratio statistic
126 | df : integer
127 | degrees of freedom
128 | p-value : float
129 | p-value
130 | """
131 |
132 | likratio = {}
133 | P = np.mean(regprob.y)
134 | L0 = regprob.n * (P * np.log(P) + (1 - P) * np.log(1 - P))
135 | likratio["L0"] = L0
136 | LR = -2.0 * (L0 - regprob.logl)
137 | likratio["likr"] = LR
138 | likratio["df"] = regprob.k
139 | pval = stats.chisqprob(LR, regprob.k)
140 | likratio["p-value"] = pval
141 |
142 | return likratio
143 |
144 | def mcfad_rho(regprob):
145 | """
146 | McFadden's rho measure of fit
147 |
148 | Parameters
149 | ---------
150 | regprob : probit regression object
151 |
152 | Returns
153 | -------
154 | rho : McFadden's rho (1 - L/L0)
155 |
156 | """
157 |
158 | rho = 1.0 - (regprob.logl / regprob.L0)
159 | return rho
160 |
161 | def probit_ape(regprob):
162 | """
163 | Average partial effects
164 |
165 | Parameters
166 | ----------
167 | regprob : probit regression object
168 |
169 | Returns
170 | -------
171 | tuple with:
172 | scale : the scale of the marginal effects, determined by regprob.scalem
173 | Default: 'phimean' (Mean of individual marginal effects)
174 | Alternative: 'xmean' (Marginal effects at variables mean)
175 | slopes : marginal effects or average partial effects (not for constant)
176 | slopes_vm : estimates of variance of marginal effects (not for constant)
177 | slopes_std_err : estimates of standard errors of marginal effects
178 | slopes_z_stat : tuple with z-statistics and p-values for marginal effects
179 |
180 | """
181 |
182 |
183 | if regprob.scalem == "xmean":
184 | xmb = regprob.xmean.T @ regprob.betas
185 | scale = stats.norm.pdf(xmb)
186 |
187 | elif regprob.scalem == "phimean":
188 | scale = np.mean(regprob.phiy,axis=0)
189 |
190 | # average partial effects (no constant)
191 | slopes = (regprob.betas[1:,0] * scale).reshape(-1,1)
192 |
193 | # variance of partial effects
194 | xmb = regprob.xmean.T @ regprob.betas
195 | bxt = regprob.betas @ regprob.xmean.T
196 | dfdb = np.eye(regprob.k) - xmb * bxt
197 | slopes_vm = (scale ** 2) * ((dfdb @ regprob.vm) @ dfdb.T)
198 |
199 | # standard errors
200 | slopes_std_err = np.sqrt(slopes_vm[1:,1:].diagonal()).reshape(-1,1)
201 |
202 | # z-stats and p-values
203 | sl_zStat = slopes / slopes_std_err
204 | slopes_z_stat = [(sl_zStat[i,0],stats.norm.sf(abs(sl_zStat[i,0])) * 2) for i in range(len(slopes))]
205 |
206 |
207 | return (scale, slopes,slopes_vm[1:,1:],slopes_std_err,slopes_z_stat)
208 |
209 |
210 | def sp_tests(regprob=None, obj_list=None):
211 | """
212 | Calculates tests for spatial dependence in Probit models
213 |
214 | Parameters
215 | ----------
216 | regprob : regression object from spreg
217 | output instance from a probit model
218 | obj_list : list
219 | list of regression elements from both libpysal and statsmodels' ProbitResults
220 | The list should be such as:
221 | [libpysal.weights, ProbitResults.fittedvalues, ProbitResults.resid_response, ProbitResults.resid_generalized]
222 |
223 | Returns
224 | -------
225 | tuple with LM_Err, moran, ps as 2x1 arrays with statistic and p-value
226 | LM_Err: Pinkse
227 | moran : Kelejian-Prucha generalized Moran
228 | ps : Pinkse-Slade
229 |
230 | Examples
231 | --------
232 | The results of this function will be automatically added to the output of the probit model if using spreg.
233 | If using the Probit estimator from statsmodels, the user can call the function with the obj_list argument.
234 | The argument obj_list should be a list with the following elements, in this order:
235 | [libpysal.weights, ProbitResults.fittedvalues, ProbitResults.resid_response, ProbitResults.resid_generalized]
236 | The function will then return and print the results of the spatial diagnostics.
237 |
238 | >>> import libpysal
239 | >>> import statsmodels.api as sm
240 | >>> import geopandas as gpd
241 | >>> from spreg.diagnostics_probit import sp_tests
242 |
243 | >>> columb = libpysal.examples.load_example('Columbus')
244 | >>> dfs = gpd.read_file(columb.get_path("columbus.shp"))
245 | >>> w = libpysal.weights.Queen.from_dataframe(dfs)
246 | >>> w.transform='r'
247 |
248 | >>> y = (dfs["CRIME"] > 40).astype(float)
249 | >>> X = dfs[["INC","HOVAL"]]
250 | >>> X = sm.add_constant(X)
251 |
252 | >>> probit_mod = sm.Probit(y, X)
253 | >>> probit_res = probit_mod.fit(disp=False)
254 | >>> LM_err, moran, ps = sp_tests(obj_list=[w, probit_res.fittedvalues, probit_res.resid_response, probit_res.resid_generalized])
255 | PROBIT MODEL DIAGNOSTICS FOR SPATIAL DEPENDENCE
256 | TEST DF VALUE PROB
257 | Kelejian-Prucha (error) 1 1.721 0.0852
258 | Pinkse (error) 1 3.132 0.0768
259 | Pinkse-Slade (error) 1 2.558 0.1097
260 |
261 | """
262 | if regprob:
263 | w, Phi, phi, u_naive, u_gen, n = regprob.w, regprob.predy, regprob.phiy, regprob.u_naive, regprob.u_gen, regprob.n
264 | elif obj_list:
265 | w, fittedvalues, u_naive, u_gen = obj_list
266 | Phi = norm.cdf(fittedvalues)
267 | phi = norm.pdf(fittedvalues)
268 | n = w.n
269 |
270 | try:
271 | w = w.sparse
272 | except:
273 | w = w
274 |
275 | # Pinkse_error:
276 | Phi_prod = Phi * (1 - Phi)
277 | sig2 = np.sum((phi * phi) / Phi_prod) / n
278 | LM_err_num = np.dot(u_gen.T, (w @ u_gen)) ** 2
279 | trWW = np.sum((w @ w).diagonal())
280 | trWWWWp = trWW + np.sum((w @ w.T).diagonal())
281 | LM_err = float(1.0 * LM_err_num / (sig2 ** 2 * trWWWWp))
282 | LM_err = np.array([LM_err, stats.chisqprob(LM_err, 1)])
283 | # KP_error:
284 | moran = moran_KP(w, u_naive, Phi_prod)
285 | # Pinkse-Slade_error:
286 | u_std = u_naive / np.sqrt(Phi_prod)
287 | ps_num = np.dot(u_std.T, (w @ u_std)) ** 2
288 | trWpW = np.sum((w.T @ w).diagonal())
289 | ps = float(ps_num / (trWW + trWpW))
290 | # chi-square instead of bootstrap.
291 | ps = np.array([ps, stats.chisqprob(ps, 1)])
292 |
293 | if obj_list:
294 | from .output import _probit_out
295 | reg_simile = type('reg_simile', (object,), {})()
296 | reg_simile.Pinkse_error = LM_err
297 | reg_simile.KP_error = moran
298 | reg_simile.PS_error = ps
299 | print("PROBIT MODEL "+_probit_out(reg_simile, spat_diag=True, sptests_only=True)[1:])
300 |
301 | return LM_err, moran, ps
302 |
303 | def moran_KP(w, u, sig2i):
304 | """
305 | Calculates Kelejian-Prucha Moran-flavoured tests
306 |
307 | Parameters
308 | ----------
309 |
310 | w : W
311 | PySAL weights instance aligned with y
312 | u : array
313 | nx1 array of naive residuals
314 | sig2i : array
315 | nx1 array of individual variance
316 |
317 | Returns
318 | -------
319 | moran : array, Kelejian-Prucha Moran's I with p-value
320 | """
321 | try:
322 | w = w.sparse
323 | except:
324 | pass
325 | moran_num = np.dot(u.T, (w @ u))
326 | E = SP.lil_matrix(w.shape)
327 | E.setdiag(sig2i.flat)
328 | E = E.asformat("csr")
329 | WE = w @ E
330 | moran_den = np.sqrt(np.sum((WE @ WE + (w.T @ E) @ WE).diagonal()))
331 | moran = float(1.0 * moran_num / moran_den)
332 | moran = np.array([moran, stats.norm.sf(abs(moran)) * 2.0])
333 | return moran
334 |
335 |
336 | def _test():
337 | import doctest
338 |
339 | doctest.testmod()
340 |
341 |
342 | if __name__ == "__main__":
343 | _test()
344 |
--------------------------------------------------------------------------------
/spreg/diagnostics_sur.py:
--------------------------------------------------------------------------------
1 | """
2 | Diagnostics for SUR and 3SLS estimation
3 | """
4 |
5 | __author__ = "Luc Anselin lanselin@gmail.com, \
6 | Pedro V. Amaral pedrovma@gmail.com \
7 | Tony Aburaad taburaad@uchicago.edu"
8 |
9 |
10 | import numpy as np
11 | import scipy.stats as stats
12 | import numpy.linalg as la
13 | from .sur_utils import sur_dict2mat, sur_mat2dict, sur_corr, spdot
14 | from .regimes import buildR1var, wald_test
15 |
16 |
17 | __all__ = ["sur_setp", "sur_lrtest", "sur_lmtest", "lam_setp", "surLMe", "surLMlag"]
18 |
19 |
20 | def sur_setp(bigB, varb):
21 | """
22 | Utility to compute standard error, t and p-value
23 |
24 | Parameters
25 | ----------
26 | bigB : dictionary
27 | of regression coefficient estimates,
28 | one vector by equation
29 | varb : array
30 | variance-covariance matrix of coefficients
31 |
32 | Returns
33 | -------
34 | surinfdict : dictionary
35 | with standard error, t-value, and
36 | p-value array, one for each equation
37 |
38 | """
39 | vvb = varb.diagonal()
40 | n_eq = len(bigB.keys())
41 | bigK = np.zeros((n_eq, 1), dtype=np.int_)
42 | for r in range(n_eq):
43 | bigK[r] = bigB[r].shape[0]
44 | b = sur_dict2mat(bigB)
45 | se = np.sqrt(vvb)
46 | se.resize(len(se), 1)
47 | t = np.divide(b, se)
48 | tp = stats.norm.sf(abs(t)) * 2
49 | surinf = np.hstack((se, t, tp))
50 | surinfdict = sur_mat2dict(surinf, bigK)
51 | return surinfdict
52 |
53 |
54 | def lam_setp(lam, vm):
55 | """
56 | Standard errors, t-test and p-value for lambda in SUR Error ML
57 |
58 | Parameters
59 | ----------
60 | lam : array
61 | n_eq x 1 array with ML estimates for spatial error
62 | autoregressive coefficient
63 | vm : array
64 | n_eq x n_eq subset of variance-covariance matrix for
65 | lambda and Sigma in SUR Error ML
66 | (needs to be subset from full vm)
67 |
68 | Returns
69 | -------
70 | : tuple
71 | with arrays for standard error, t-value and p-value
72 | (each element in the tuple is an n_eq x 1 array)
73 |
74 | """
75 | vvb = vm.diagonal()
76 | se = np.sqrt(vvb)
77 | se.resize(len(se), 1)
78 | t = np.divide(lam, se)
79 | tp = stats.norm.sf(abs(t)) * 2
80 | return (se, t, tp)
81 |
82 |
83 | def sur_lrtest(n, n_eq, ldetS0, ldetS1):
84 | """
85 | Likelihood Ratio test on off-diagonal elements of Sigma
86 |
87 | Parameters
88 | ----------
89 | n : int
90 | cross-sectional dimension (number of observations for an equation)
91 | n_eq : int
92 | number of equations
93 | ldetS0 : float
94 | log determinant of Sigma for OLS case
95 | ldetS1 : float
96 | log determinant of Sigma for SUR case (should be iterated)
97 |
98 | Returns
99 | -------
100 | (lrtest,M,pvalue) : tuple
101 | with value of test statistic (lrtest),
102 | degrees of freedom (M, as an integer)
103 | p-value
104 |
105 | """
106 | M = n_eq * (n_eq - 1) / 2.0
107 | lrtest = n * (ldetS0 - ldetS1)
108 | pvalue = stats.chi2.sf(lrtest, M)
109 | return (lrtest, int(M), pvalue)
110 |
111 |
112 | def sur_lmtest(n, n_eq, sig):
113 | """
114 | Lagrange Multiplier test on off-diagonal elements of Sigma
115 |
116 | Parameters
117 | ----------
118 | n : int
119 | cross-sectional dimension (number of observations for an equation)
120 | n_eq : int
121 | number of equations
122 | sig : array
123 | inter-equation covariance matrix for null model (OLS)
124 |
125 | Returns
126 | -------
127 | (lmtest,M,pvalue) : tuple
128 | with value of test statistic (lmtest),
129 | degrees of freedom (M, as an integer)
130 | p-value
131 | """
132 | R = sur_corr(sig)
133 | tr = np.trace(np.dot(R.T, R))
134 | M = n_eq * (n_eq - 1) / 2.0
135 | lmtest = (n / 2.0) * (tr - n_eq)
136 | pvalue = stats.chi2.sf(lmtest, M)
137 | return (lmtest, int(M), pvalue)
138 |
139 |
140 | def surLMe(n_eq, WS, bigE, sig):
141 | """
142 | Lagrange Multiplier test on error spatial autocorrelation in SUR
143 |
144 | Parameters
145 | ----------
146 | n_eq : int
147 | number of equations
148 | WS : array
149 | spatial weights matrix in sparse form
150 | bigE : array
151 | n x n_eq matrix of residuals by equation
152 | sig : array
153 | cross-equation error covariance matrix
154 |
155 | Returns
156 | -------
157 | (LMe,n_eq,pvalue) : tuple
158 | with value of statistic (LMe), degrees
159 | of freedom (n_eq) and p-value
160 |
161 | """
162 | # spatially lagged residuals
163 | WbigE = WS @ bigE
164 | # score
165 | EWE = np.dot(bigE.T, WbigE)
166 | sigi = la.inv(sig)
167 | SEWE = sigi * EWE
168 | # score = SEWE.sum(axis=1)
169 | # score.resize(n_eq,1)
170 | # note score is column sum of Sig_i * E'WE, a 1 by n_eq row vector
171 | # previously stored as column
172 | score = SEWE.sum(axis=0)
173 | score.resize(1, n_eq)
174 |
175 | # trace terms
176 | WW = WS @ WS
177 | trWW = np.sum(WW.diagonal())
178 | WTW = WS.T @ WS
179 | trWtW = np.sum(WTW.diagonal())
180 | # denominator
181 | SiS = sigi * sig
182 | Tii = trWW * np.identity(n_eq)
183 | tSiS = trWtW * SiS
184 | denom = Tii + tSiS
185 | idenom = la.inv(denom)
186 | # test statistic
187 | # LMe = np.dot(np.dot(score.T,idenom),score)[0][0]
188 | # score is now row vector
189 | LMe = np.dot(np.dot(score, idenom), score.T)[0][0]
190 | pvalue = stats.chi2.sf(LMe, n_eq)
191 | return (LMe, n_eq, pvalue)
192 |
193 |
194 | def surLMlag(n_eq, WS, bigy, bigX, bigE, bigYP, sig, varb):
195 | """
196 | Lagrange Multiplier test on lag spatial autocorrelation in SUR
197 |
198 | Parameters
199 | ----------
200 | n_eq : int
201 | number of equations
202 | WS : spatial weights matrix in sparse form
203 | bigy : dictionary
204 | with y values
205 | bigX : dictionary
206 | with X values
207 | bigE : array
208 | n x n_eq matrix of residuals by equation
209 | bigYP : array
210 | n x n_eq matrix of predicted values by equation
211 | sig : array
212 | cross-equation error covariance matrix
213 | varb : array
214 | variance-covariance matrix for b coefficients (inverse of Ibb)
215 |
216 | Returns
217 | -------
218 | (LMlag,n_eq,pvalue) : tuple
219 | with value of statistic (LMlag), degrees
220 | of freedom (n_eq) and p-value
221 |
222 | """
223 | # Score
224 | Y = np.hstack([bigy[r] for r in range(n_eq)])
225 | WY = WS @ Y
226 | EWY = np.dot(bigE.T, WY)
227 | sigi = la.inv(sig)
228 | SEWE = sigi @ EWY
229 | score = SEWE.sum(axis=0) # column sums
230 | score.resize(1, n_eq) # score as a row vector
231 |
232 | # I(rho,rho) as partitioned inverse, eq 72
233 | # trace terms
234 | WW = WS @ WS
235 | trWW = np.sum(WW.diagonal()) # T1
236 | WTW = WS.T @ WS
237 | trWtW = np.sum(WTW.diagonal()) # T2
238 |
239 | # I(rho,rho)
240 | SiS = sigi * sig
241 | Tii = trWW * np.identity(n_eq) # T1It
242 | tSiS = trWtW * SiS
243 | firstHalf = Tii + tSiS
244 | WbigYP = WS @ bigYP
245 | inner = np.dot(WbigYP.T, WbigYP)
246 | secondHalf = sigi * inner
247 | Ipp = firstHalf + secondHalf # eq. 75
248 |
249 | # I(b,b) inverse is varb
250 |
251 | # I(b,rho)
252 | bp = sigi[0,] * spdot(bigX[0].T, WbigYP) # initialize
253 | for r in range(1, n_eq):
254 | bpwork = sigi[r,] * spdot(bigX[r].T, WbigYP)
255 | bp = np.vstack((bp, bpwork))
256 | # partitioned part
257 | i_inner = Ipp - np.dot(np.dot(bp.T, varb), bp)
258 | # partitioned inverse of information matrix
259 | Ippi = la.inv(i_inner)
260 |
261 | # test statistic
262 | LMlag = np.dot(np.dot(score, Ippi), score.T)[0][0]
263 | # p-value
264 | pvalue = stats.chi2.sf(LMlag, n_eq)
265 | return (LMlag, n_eq, pvalue)
266 |
267 |
268 | def sur_chow(n_eq, bigK, bSUR, varb):
269 | """
270 | test on constancy of regression coefficients across equations in
271 | a SUR specification
272 |
273 | Note: requires a previous check on constancy of number of coefficients
274 | across equations; no other checks are carried out, so it is possible
275 | that the results are meaningless if the variables are not listed in
276 | the same order in each equation.
277 |
278 | Parameters
279 | ----------
280 | n_eq : int
281 | number of equations
282 | bigK : array
283 | with the number of variables by equation (includes constant)
284 | bSUR : dictionary
285 | with the SUR regression coefficients by equation
286 | varb : array
287 | the variance-covariance matrix for the SUR regression
288 | coefficients
289 |
290 | Returns
291 | -------
292 | test : array
293 | a list with for each coefficient (in order) a tuple with the
294 | value of the test statistic, the degrees of freedom, and the
295 | p-value
296 |
297 | """
298 | kr = bigK[0][0]
299 | test = []
300 | bb = sur_dict2mat(bSUR)
301 | kf = 0
302 | nr = n_eq
303 | df = n_eq - 1
304 | for i in range(kr):
305 | Ri = buildR1var(i, kr, kf, 0, nr)
306 | tt, p = wald_test(bb, Ri, np.zeros((df, 1)), varb)
307 | test.append((tt, df, p))
308 | return test
309 |
310 |
311 | def sur_joinrho(n_eq, bigK, bSUR, varb):
312 | """
313 | Test on joint significance of spatial autoregressive coefficient in SUR
314 |
315 | Parameters
316 | ----------
317 | n_eq : int
318 | number of equations
319 | bigK : array
320 | n_eq x 1 array with number of variables by equation
321 | (includes constant term, exogenous and endogeneous and
322 | spatial lag)
323 | bSUR : dictionary
324 | with regression coefficients by equation, with
325 | the spatial autoregressive term as last
326 | varb : array
327 | variance-covariance matrix for regression coefficients
328 |
329 | Returns
330 | -------
331 | : tuple
332 | with test statistic, degrees of freedom, p-value
333 |
334 | """
335 | bb = sur_dict2mat(bSUR)
336 | R = np.zeros((n_eq, varb.shape[0]))
337 | q = np.zeros((n_eq, 1))
338 | kc = -1
339 | for i in range(n_eq):
340 | kc = kc + bigK[i]
341 | R[i, kc] = 1
342 | w, p = wald_test(bb, R, q, varb)
343 | return (w, n_eq, p)
344 |
--------------------------------------------------------------------------------
/spreg/opt.py:
--------------------------------------------------------------------------------
1 | import copy
2 |
3 |
4 | def simport(modname):
5 | """
6 | Safely import a module without raising an error.
7 |
8 | Parameters
9 | -----------
10 | modname : str
11 | module name needed to import
12 |
13 | Returns
14 | --------
15 | tuple of (True, Module) or (False, None) depending on whether the import
16 | succeeded.
17 |
18 | Notes
19 | ------
20 | Wrapping this function around an iterative context or a with context would
21 | allow the module to be used without necessarily attaching it permanently in
22 | the global namespace:
23 |
24 | # >>> for t,mod in simport('pandas'):
25 | # if t:
26 | # mod.DataFrame()
27 | # else:
28 | # #do alternative behavior here
29 | # del mod #or don't del, your call
30 | #
31 | # instead of:
32 | #
33 | # >>> t, mod = simport('pandas')
34 | # >>> if t:
35 | # mod.DataFrame()
36 | # else:
37 | # #do alternative behavior here
38 |
39 | The first idiom makes it work kind of a like a with statement.
40 | """
41 | try:
42 | exec("import {}".format(modname))
43 | return True, eval(modname)
44 | except:
45 | return False, None
46 |
47 |
48 | def requires(*args, **kwargs):
49 | """
50 | Decorator to wrap functions with extra dependencies:
51 |
52 | Arguments
53 | ---------
54 | args : list
55 | list of strings containing module to import
56 | verbose : bool
57 | boolean describing whether to print a warning message on import
58 | failure
59 | Returns
60 | -------
61 | Original function is all arg in args are importable, otherwise returns a
62 | function that passes.
63 | """
64 | v = kwargs.pop("verbose", True)
65 | wanted = copy.deepcopy(args)
66 |
67 | def inner(function):
68 | available = [simport(arg)[0] for arg in args]
69 | if all(available):
70 | return function
71 | else:
72 |
73 | def passer(*args, **kwargs):
74 | if v:
75 | missing = [arg for i, arg in enumerate(wanted) if not available[i]]
76 | print("missing dependencies: {d}".format(d=missing))
77 | print("not running {}".format(function.__name__))
78 | else:
79 | pass
80 |
81 | return passer
82 |
83 | return inner
84 |
85 |
86 | if __name__ == "__main__":
87 |
88 | @requires("pandas")
89 | def test():
90 | import pandas
91 |
92 | print("ASDF")
93 |
94 | @requires("thisisnotarealmodule")
95 | def test2():
96 | print("you shouldnt see this")
97 |
98 | test()
99 | test2()
100 |
--------------------------------------------------------------------------------
/spreg/optional_imports.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "This details how the `requires` decorator can be used."
8 | ]
9 | },
10 | {
11 | "cell_type": "code",
12 | "execution_count": 1,
13 | "metadata": {
14 | "collapsed": true
15 | },
16 | "outputs": [],
17 | "source": [
18 | "from opt import requires"
19 | ]
20 | },
21 | {
22 | "cell_type": "markdown",
23 | "metadata": {},
24 | "source": [
25 | "The function takes an arbitrary number of strings that describe the dependencies introduced by the class or function. "
26 | ]
27 | },
28 | {
29 | "cell_type": "code",
30 | "execution_count": 2,
31 | "metadata": {
32 | "collapsed": false
33 | },
34 | "outputs": [],
35 | "source": [
36 | "@requires('pandas')\n",
37 | "def test():\n",
38 | " import pandas\n",
39 | " print('yay pandas version {}'.format(pandas.__version__))"
40 | ]
41 | },
42 | {
43 | "cell_type": "code",
44 | "execution_count": 3,
45 | "metadata": {
46 | "collapsed": false
47 | },
48 | "outputs": [
49 | {
50 | "name": "stdout",
51 | "output_type": "stream",
52 | "text": [
53 | "yay pandas version 0.16.2\n"
54 | ]
55 | }
56 | ],
57 | "source": [
58 | "test()"
59 | ]
60 | },
61 | {
62 | "cell_type": "markdown",
63 | "metadata": {},
64 | "source": [
65 | "So, neat. What if we didn't have the module needed?"
66 | ]
67 | },
68 | {
69 | "cell_type": "code",
70 | "execution_count": 4,
71 | "metadata": {
72 | "collapsed": true
73 | },
74 | "outputs": [],
75 | "source": [
76 | "@requires(\"notarealmodule\")\n",
77 | "def test2():\n",
78 | " print(\"you shouldn't see this\")"
79 | ]
80 | },
81 | {
82 | "cell_type": "code",
83 | "execution_count": 5,
84 | "metadata": {
85 | "collapsed": false
86 | },
87 | "outputs": [
88 | {
89 | "name": "stdout",
90 | "output_type": "stream",
91 | "text": [
92 | "missing dependencies: ['notarealmodule']\n",
93 | "not running test2\n"
94 | ]
95 | }
96 | ],
97 | "source": [
98 | "test2()"
99 | ]
100 | },
101 | {
102 | "cell_type": "markdown",
103 | "metadata": {},
104 | "source": [
105 | "So, if the requirements aren't met, your original function is replaced with a new function.\n",
106 | "\n",
107 | "it looks kind of like this:"
108 | ]
109 | },
110 | {
111 | "cell_type": "code",
112 | "execution_count": 6,
113 | "metadata": {
114 | "collapsed": false
115 | },
116 | "outputs": [],
117 | "source": [
118 | "def passer():\n",
119 | " if verbose:\n",
120 | " missing = [arg for i,arg in enumerate(args) if not available[i]]\n",
121 | " print(\"missing dependencies: {d}\".format(d=missing))\n",
122 | " print(\"not running {}\".format(function.__name__))\n",
123 | " else:\n",
124 | " pass"
125 | ]
126 | },
127 | {
128 | "cell_type": "markdown",
129 | "metadata": {},
130 | "source": [
131 | "By deafult, the function is replaced with a verbose version. If you pass `verbose=False`, then the function gets replaced with a nonprinting version. "
132 | ]
133 | },
134 | {
135 | "cell_type": "code",
136 | "execution_count": 7,
137 | "metadata": {
138 | "collapsed": true
139 | },
140 | "outputs": [],
141 | "source": [
142 | "@requires(\"notarealmodule\", verbose=False)\n",
143 | "def test3():\n",
144 | " print(\"you shouldn't see this, either\")"
145 | ]
146 | },
147 | {
148 | "cell_type": "code",
149 | "execution_count": 8,
150 | "metadata": {
151 | "collapsed": true
152 | },
153 | "outputs": [],
154 | "source": [
155 | "test3()"
156 | ]
157 | },
158 | {
159 | "cell_type": "markdown",
160 | "metadata": {},
161 | "source": [
162 | "The cool thing is, this works on class definitions as well:"
163 | ]
164 | },
165 | {
166 | "cell_type": "code",
167 | "execution_count": 9,
168 | "metadata": {
169 | "collapsed": true
170 | },
171 | "outputs": [],
172 | "source": [
173 | "@requires(\"notarealmodule\")\n",
174 | "class OLS_mock(object):\n",
175 | " def __init__(self, *args, **kwargs):\n",
176 | " for arg in args:\n",
177 | " print(arg)"
178 | ]
179 | },
180 | {
181 | "cell_type": "code",
182 | "execution_count": 10,
183 | "metadata": {
184 | "collapsed": false
185 | },
186 | "outputs": [
187 | {
188 | "name": "stdout",
189 | "output_type": "stream",
190 | "text": [
191 | "missing dependencies: ['notarealmodule']\n",
192 | "not running OLS_mock\n"
193 | ]
194 | }
195 | ],
196 | "source": [
197 | "OLS_mock(1,2,3,4,5, w='Tom')"
198 | ]
199 | },
200 | {
201 | "cell_type": "code",
202 | "execution_count": 11,
203 | "metadata": {
204 | "collapsed": true
205 | },
206 | "outputs": [],
207 | "source": [
208 | "@requires(\"pymc3\")\n",
209 | "class BayesianHLM(object):\n",
210 | " def __init__(self, *args, **kwargs):\n",
211 | " print(arg)for arg in args[0:]:\n"
212 | ]
213 | },
214 | {
215 | "cell_type": "code",
216 | "execution_count": 12,
217 | "metadata": {
218 | "collapsed": false
219 | },
220 | "outputs": [
221 | {
222 | "name": "stdout",
223 | "output_type": "stream",
224 | "text": [
225 | "1\n",
226 | "2\n",
227 | "3\n",
228 | "4\n",
229 | "5\n"
230 | ]
231 | },
232 | {
233 | "data": {
234 | "text/plain": [
235 | "<__main__.BayesianHLM at 0x7fe3a32f9e90>"
236 | ]
237 | },
238 | "execution_count": 12,
239 | "metadata": {},
240 | "output_type": "execute_result"
241 | }
242 | ],
243 | "source": [
244 | "BayesianHLM(1,2,3,4,5, w='Tom')"
245 | ]
246 | },
247 | {
248 | "cell_type": "markdown",
249 | "metadata": {},
250 | "source": [
251 | "I think this could make more sense for `spreg` dependencies, where the extra stuff might be reasonable at a function or class level, rather than at the module level. \n",
252 | "\n",
253 | "In addition, it could be useful for any plans for a viz setup, where maybe a projected map would require `cartopy`, but an unprojected map could just use `matplotlib`. This would let us do this function-by-function and print or raise reasonable fail states. "
254 | ]
255 | }
256 | ],
257 | "metadata": {
258 | "kernelspec": {
259 | "display_name": "Python 2",
260 | "language": "python",
261 | "name": "python2"
262 | },
263 | "language_info": {
264 | "codemirror_mode": {
265 | "name": "ipython",
266 | "version": 2
267 | },
268 | "file_extension": ".py",
269 | "mimetype": "text/x-python",
270 | "name": "python",
271 | "nbconvert_exporter": "python",
272 | "pygments_lexer": "ipython2",
273 | "version": "2.7.10"
274 | }
275 | },
276 | "nbformat": 4,
277 | "nbformat_minor": 0
278 | }
279 |
--------------------------------------------------------------------------------
/spreg/panel_utils.py:
--------------------------------------------------------------------------------
1 | """
2 | Utilities for panel data estimation
3 | """
4 |
5 | __author__ = "Wei Kang weikang9009@gmail.com, \
6 | Pedro Amaral pedroamaral@cedeplar.ufmg.br, \
7 | Pablo Estrada pabloestradace@gmail.com"
8 |
9 | import numpy as np
10 | import pandas as pd
11 | from scipy import sparse as sp
12 | from .sputils import spdot
13 |
14 | __all__ = ["check_panel", "demean_panel"]
15 |
16 |
17 | def check_panel(y, x, w, name_y, name_x):
18 | """
19 | Check the data structure and converts from wide to long if needed.
20 |
21 | Parameters
22 | ----------
23 | y : array
24 | n*tx1 or nxt array for dependent variable
25 | x : array
26 | Two dimensional array with n*t rows and k columns for
27 | independent (exogenous) variable or n rows and k*t columns
28 | (note, must not include a constant term)
29 | name_y : string or list of strings
30 | Name of dependent variable for use in output
31 | name_x : list of strings
32 | Names of independent variables for use in output
33 | """
34 |
35 | if isinstance(y, (pd.Series, pd.DataFrame)):
36 | if name_y is None:
37 | try:
38 | name_y = y.columns.to_list()
39 | except AttributeError:
40 | name_y = y.name
41 | y = y.to_numpy()
42 |
43 | if isinstance(x, (pd.Series, pd.DataFrame)):
44 | if name_x is None:
45 | try:
46 | name_x = x.columns.to_list()
47 | except AttributeError:
48 | name_x = x.name
49 | x = x.to_numpy()
50 |
51 | # Check if 'y' is a balanced panel with respect to 'W'
52 | if y.shape[0] / w.n != y.shape[0] // w.n:
53 | raise Exception("y must be ntx1 or nxt, and w must be an nxn PySAL W" "object.")
54 | # Wide format
55 | if y.shape[1] > 1:
56 | warn = (
57 | "Assuming panel is in wide format.\n"
58 | "y[:, 0] refers to T0, y[:, 1] refers to T1, etc.\n"
59 | "x[:, 0:T] refers to T periods of k1, x[:, T+1:2T] refers "
60 | "to k2, etc."
61 | )
62 | N, T = y.shape[0], y.shape[1]
63 | k = x.shape[1] // T
64 | bigy = y.reshape((y.size, 1), order="F")
65 | bigx = x[:, 0:T].reshape((N * T, 1), order="F")
66 | for i in range(1, k):
67 | bigx = np.hstack(
68 | (bigx, x[:, T * i : T * (i + 1)].reshape((N * T, 1), order="F"))
69 | )
70 | # Long format
71 | else:
72 | warn = (
73 | "Assuming panel is in long format.\n"
74 | "y[0:N] refers to T0, y[N+1:2N] refers to T1, etc.\n"
75 | "x[0:N] refers to T0, x[N+1:2N] refers to T1, etc."
76 | )
77 | T = y.shape[0] // w.n
78 | N = w.n
79 | k = x.shape[1]
80 | bigy, bigx = y, x
81 | # Fix name_y and name_x
82 | if name_y:
83 | if not isinstance(name_y, str) and not isinstance(name_y, list):
84 | raise Exception("name_y must either be strings or a list of" "strings.")
85 | if len(name_y) > 1 and isinstance(name_y, list):
86 | name_y = "".join([i for i in name_y[0] if not i.isdigit()])
87 | if len(name_y) == 1 and isinstance(name_y, list):
88 | name_y = name_y[0]
89 | if name_x:
90 | if len(name_x) != k * T and len(name_x) != k:
91 | raise Exception(
92 | "Names of columns in X must have exactly either" "k or k*t elements."
93 | )
94 | if len(name_x) > k:
95 | name_bigx = []
96 | for i in range(k):
97 | namek = "".join([j for j in name_x[i * T] if not j.isdigit()])
98 | name_bigx.append(namek)
99 | name_x = name_bigx
100 |
101 | return bigy, bigx, name_y, name_x, warn
102 |
103 |
104 | def demean_panel(arr, n, t, phi=0):
105 | """
106 | Returns demeaned variable.
107 |
108 | Parameters
109 | ----------
110 | arr : array
111 | n*tx1 array
112 | n : integer
113 | Number of observations
114 | t : integer
115 | Number of time periods
116 | phi : float
117 | Weight from 0 to 1 attached to the cross-sectional component
118 | of the data. If phi=0, then it is the demeaning procedure.
119 | If phi=1, then the data doesn't change at all.
120 |
121 | Returns
122 | -------
123 | arr_dm : array
124 | Demeaned variable
125 | """
126 |
127 | one = np.ones((t, 1))
128 | J = sp.identity(t) - (1 - phi) * (1 / t) * spdot(one, one.T)
129 | Q = sp.kron(J, sp.identity(n), format="csr")
130 | arr_dm = spdot(Q, arr)
131 |
132 | return arr_dm
133 |
--------------------------------------------------------------------------------
/spreg/robust.py:
--------------------------------------------------------------------------------
1 | __author__ = "Luc Anselin lanselin@gmail.com, \
2 | Pedro V. Amaral pedrovma@gmail.com, \
3 | David C. Folch david.folch@asu.edu"
4 |
5 | import numpy as np
6 | import numpy.linalg as la
7 | from libpysal.weights.spatial_lag import lag_spatial
8 | from .utils import spdot, spbroadcast
9 | from .user_output import check_constant
10 |
11 |
12 | def robust_vm(reg, gwk=None, sig2n_k=False):
13 | """
14 | Robust estimation of the variance-covariance matrix. Estimated by White (default) or HAC (if wk is provided).
15 |
16 | Parameters
17 | ----------
18 |
19 | reg : Regression object (OLS or TSLS)
20 | output instance from a regression model
21 |
22 | gwk : PySAL weights object
23 | Optional. Spatial weights based on kernel functions
24 | If provided, returns the HAC variance estimation
25 | sig2n_k : boolean
26 | If True, then use n-k to rescale the vc matrix.
27 | If False, use n. (White only)
28 |
29 | Returns
30 | --------
31 |
32 | psi : kxk array
33 | Robust estimation of the variance-covariance
34 |
35 | Examples
36 | --------
37 |
38 | >>> import numpy as np
39 | >>> import libpysal
40 | >>> from spreg import OLS
41 | >>> from spreg import TSLS
42 | >>> db=libpysal.io.open(libpysal.examples.get_path("NAT.dbf"),"r")
43 | >>> y = np.array(db.by_col("HR90"))
44 | >>> y = np.reshape(y, (y.shape[0],1))
45 | >>> X = []
46 | >>> X.append(db.by_col("RD90"))
47 | >>> X.append(db.by_col("DV90"))
48 | >>> X = np.array(X).T
49 |
50 | Example with OLS with unadjusted standard errors
51 |
52 | >>> ols = OLS(y,X)
53 | >>> ols.vm
54 | array([[ 0.17004545, 0.00226532, -0.02243898],
55 | [ 0.00226532, 0.00941319, -0.00031638],
56 | [-0.02243898, -0.00031638, 0.00313386]])
57 |
58 | Example with OLS and White
59 |
60 | >>> ols = OLS(y,X, robust='white')
61 | >>> ols.vm
62 | array([[ 0.24515481, 0.01093322, -0.03441966],
63 | [ 0.01093322, 0.01798616, -0.00071414],
64 | [-0.03441966, -0.00071414, 0.0050153 ]])
65 |
66 | Example with OLS and HAC
67 |
68 | >>> wk = libpysal.weights.Kernel.from_shapefile(libpysal.examples.get_path('NAT.shp'),k=15,function='triangular', fixed=False)
69 | >>> wk.transform = 'o'
70 | >>> ols = OLS(y,X, robust='hac', gwk=wk)
71 | >>> ols.vm
72 | array([[ 0.29213532, 0.01670361, -0.03948199],
73 | [ 0.01655557, 0.02295829, -0.00116874],
74 | [-0.03941483, -0.00119077, 0.00568314]])
75 |
76 | Example with 2SLS and White
77 |
78 | >>> yd = []
79 | >>> yd.append(db.by_col("UE90"))
80 | >>> yd = np.array(yd).T
81 | >>> q = []
82 | >>> q.append(db.by_col("UE80"))
83 | >>> q = np.array(q).T
84 | >>> tsls = TSLS(y, X, yd, q=q, robust='white')
85 | >>> tsls.vm
86 | array([[ 0.29569954, 0.04119843, -0.02496858, -0.01640185],
87 | [ 0.04119843, 0.03647762, 0.004702 , -0.00987345],
88 | [-0.02496858, 0.004702 , 0.00648262, -0.00292891],
89 | [-0.01640185, -0.00987345, -0.00292891, 0.0053322 ]])
90 |
91 | Example with 2SLS and HAC
92 |
93 | >>> tsls = TSLS(y, X, yd, q=q, robust='hac', gwk=wk)
94 | >>> tsls.vm
95 | array([[ 0.41985329, 0.06823119, -0.02883889, -0.02788116],
96 | [ 0.06867042, 0.04887508, 0.00497443, -0.01367746],
97 | [-0.02856454, 0.00501402, 0.0072195 , -0.00321604],
98 | [-0.02810131, -0.01364908, -0.00318197, 0.00713251]])
99 |
100 | """
101 | if hasattr(reg, "h"): # If reg has H, do 2SLS estimator. OLS otherwise.
102 | tsls = True
103 | xu = spbroadcast(reg.h, reg.u)
104 | else:
105 | tsls = False
106 | xu = spbroadcast(reg.x, reg.u)
107 |
108 | if gwk: # If gwk do HAC. White otherwise.
109 | gwkxu = lag_spatial(gwk, xu)
110 | psi0 = spdot(xu.T, gwkxu)
111 | else:
112 | psi0 = spdot(xu.T, xu)
113 | if sig2n_k:
114 | psi0 = psi0 * (1.0 * reg.n / (reg.n - reg.k))
115 | if tsls:
116 | psi1 = spdot(reg.varb, reg.zthhthi)
117 | psi = spdot(psi1, np.dot(psi0, psi1.T))
118 | else:
119 | psi = spdot(reg.xtxi, np.dot(psi0, reg.xtxi))
120 |
121 | return psi
122 |
123 |
124 | def hac_multi(reg, gwk, constant=False):
125 | """
126 | HAC robust estimation of the variance-covariance matrix for multi-regression object
127 |
128 | Parameters
129 | ----------
130 |
131 | reg : Regression object (OLS or TSLS)
132 | output instance from a regression model
133 |
134 | gwk : PySAL weights object
135 | Spatial weights based on kernel functions
136 |
137 | Returns
138 | --------
139 |
140 | psi : kxk array
141 | Robust estimation of the variance-covariance
142 |
143 | """
144 | if not constant:
145 | reg.hac_var = check_constant(reg.hac_var)
146 | xu = spbroadcast(reg.hac_var, reg.u)
147 | gwkxu = lag_spatial(gwk, xu)
148 | psi0 = spdot(xu.T, gwkxu)
149 | counter = 0
150 | for m in reg.multi:
151 | reg.multi[m].robust = "hac"
152 | reg.multi[m].name_gwk = reg.name_gwk
153 | try:
154 | psi1 = spdot(reg.multi[m].varb, reg.multi[m].zthhthi)
155 | reg.multi[m].vm = spdot(psi1, np.dot(psi0, psi1.T))
156 | except:
157 | reg.multi[m].vm = spdot(reg.multi[m].xtxi, np.dot(psi0, reg.multi[m].xtxi))
158 | reg.vm[
159 | (counter * reg.kr) : ((counter + 1) * reg.kr),
160 | (counter * reg.kr) : ((counter + 1) * reg.kr),
161 | ] = reg.multi[m].vm
162 | counter += 1
163 |
164 |
165 | def _test():
166 | import doctest
167 |
168 | doctest.testmod()
169 |
170 |
171 | if __name__ == "__main__":
172 | _test()
173 |
--------------------------------------------------------------------------------
/spreg/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pysal/spreg/7d415f2a38d7e10db23c1f1821e136c2dcd0b19a/spreg/tests/__init__.py
--------------------------------------------------------------------------------
/spreg/tests/skip.py:
--------------------------------------------------------------------------------
1 | """
2 | Set SKIP to True to skip any tests in this directory with the skip decorator
3 |
4 | To run all tests set SKIP=False
5 | """
6 | SKIP = True
7 |
--------------------------------------------------------------------------------
/spreg/tests/test_diagnostics.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | import numpy as np
3 | import libpysal
4 | from spreg import diagnostics
5 | from spreg.ols import OLS
6 | from libpysal.common import RTOL
7 |
8 | # create regression object used by all the tests below
9 | db = libpysal.io.open(libpysal.examples.get_path("columbus.dbf"))
10 | y = np.array(db.by_col("CRIME"))
11 | y = np.reshape(y, (49, 1))
12 | X = []
13 | X.append(db.by_col("INC"))
14 | X.append(db.by_col("HOVAL"))
15 | X = np.array(X).T
16 | reg = OLS(y, X)
17 |
18 |
19 | class TestFStat(unittest.TestCase):
20 | def test_f_stat(self):
21 | obs = diagnostics.f_stat(reg)
22 | exp = (28.385629224695, 0.000000009340747)
23 | for i in range(2):
24 | np.testing.assert_allclose(obs[i], exp[i], RTOL)
25 |
26 |
27 | class TestTStat(unittest.TestCase):
28 | def test_t_stat(self):
29 | obs = diagnostics.t_stat(reg)
30 | exp = [
31 | (14.490373143689094, 9.2108899889173982e-19),
32 | (-4.7804961912965762, 1.8289595070843232e-05),
33 | (-2.6544086427176916, 0.010874504909754612),
34 | ]
35 | for i in range(3):
36 | for j in range(2):
37 | np.testing.assert_allclose(obs[i][j], exp[i][j], RTOL)
38 |
39 |
40 | class TestR2(unittest.TestCase):
41 | def test_r2(self):
42 | obs = diagnostics.r2(reg)
43 | exp = 0.55240404083742334
44 | np.testing.assert_allclose(obs, exp, RTOL)
45 |
46 |
47 | class TestAr2(unittest.TestCase):
48 | def test_ar2(self):
49 | obs = diagnostics.ar2(reg)
50 | exp = 0.5329433469607896
51 | np.testing.assert_allclose(obs, exp, RTOL)
52 |
53 |
54 | class TestSeBetas(unittest.TestCase):
55 | def test_se_betas(self):
56 | obs = diagnostics.se_betas(reg)
57 | exp = np.array([4.73548613, 0.33413076, 0.10319868])
58 | np.testing.assert_allclose(obs, exp, RTOL)
59 |
60 |
61 | class TestLogLikelihood(unittest.TestCase):
62 | def test_log_likelihood(self):
63 | obs = diagnostics.log_likelihood(reg)
64 | exp = -187.3772388121491
65 | np.testing.assert_allclose(obs, exp, RTOL)
66 |
67 |
68 | class TestAkaike(unittest.TestCase):
69 | def test_akaike(self):
70 | obs = diagnostics.akaike(reg)
71 | exp = 380.7544776242982
72 | np.testing.assert_allclose(obs, exp, RTOL)
73 |
74 |
75 | class TestSchwarz(unittest.TestCase):
76 | def test_schwarz(self):
77 | obs = diagnostics.schwarz(reg)
78 | exp = 386.42993851863008
79 | np.testing.assert_allclose(obs, exp, RTOL)
80 |
81 |
82 | class TestConditionIndex(unittest.TestCase):
83 | def test_condition_index(self):
84 | obs = diagnostics.condition_index(reg)
85 | exp = 6.541827751444
86 | np.testing.assert_allclose(obs, exp, RTOL)
87 |
88 |
89 | class TestJarqueBera(unittest.TestCase):
90 | def test_jarque_bera(self):
91 | obs = diagnostics.jarque_bera(reg)
92 | exp = {"df": 2, "jb": 1.835752520076, "pvalue": 0.399366291249}
93 | np.testing.assert_equal(obs["df"], exp["df"])
94 | np.testing.assert_allclose(obs["jb"], exp["jb"], RTOL)
95 | np.testing.assert_allclose(obs["pvalue"], exp["pvalue"], RTOL)
96 |
97 |
98 | class TestBreuschPagan(unittest.TestCase):
99 | def test_breusch_pagan(self):
100 | obs = diagnostics.breusch_pagan(reg)
101 | exp = {"df": 2, "bp": 7.900441675960, "pvalue": 0.019250450075}
102 | np.testing.assert_equal(obs["df"], exp["df"])
103 | np.testing.assert_allclose(obs["bp"], exp["bp"])
104 | np.testing.assert_allclose(obs["pvalue"], exp["pvalue"])
105 |
106 |
107 | class TestWhite(unittest.TestCase):
108 | def test_white(self):
109 | obs = diagnostics.white(reg)
110 | exp = {"df": 5, "wh": 19.946008239903, "pvalue": 0.001279222817}
111 | np.testing.assert_equal(obs["df"], exp["df"])
112 | np.testing.assert_allclose(obs["wh"], exp["wh"], RTOL)
113 | np.testing.assert_allclose(obs["pvalue"], exp["pvalue"], RTOL)
114 |
115 |
116 | class TestKoenkerBassett(unittest.TestCase):
117 | def test_koenker_bassett(self):
118 | obs = diagnostics.koenker_bassett(reg)
119 | exp = {"df": 2, "kb": 5.694087931707, "pvalue": 0.058015563638}
120 | np.testing.assert_equal(obs["df"], exp["df"])
121 | np.testing.assert_allclose(obs["kb"], exp["kb"], RTOL)
122 | np.testing.assert_allclose(obs["pvalue"], exp["pvalue"], RTOL)
123 |
124 |
125 | class TestVif(unittest.TestCase):
126 | def test_vif(self):
127 | obs = diagnostics.vif(reg)
128 | exp = [
129 | (0.0, 0.0), # note [0][1] should actually be infiniity...
130 | (1.3331174971891975, 0.75012142748740696),
131 | (1.3331174971891973, 0.75012142748740707),
132 | ]
133 | for i in range(1, 3):
134 | for j in range(2):
135 | np.testing.assert_allclose(obs[i][j], exp[i][j], RTOL)
136 |
137 |
138 | class TestConstantCheck(unittest.TestCase):
139 | def test_constant_check(self):
140 | obs = diagnostics.constant_check(reg.x)
141 | exp = True
142 | np.testing.assert_equal(obs, exp)
143 |
144 |
145 | if __name__ == "__main__":
146 | unittest.main()
147 |
--------------------------------------------------------------------------------
/spreg/tests/test_diagnostics_panel.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | import libpysal
3 | import numpy as np
4 | import pandas as pd
5 | from spreg.diagnostics_panel import panel_LMlag, panel_LMerror, panel_rLMlag
6 | from spreg.diagnostics_panel import panel_rLMerror, panel_Hausman
7 | from spreg.panel_fe import Panel_FE_Lag, Panel_FE_Error
8 | from spreg.panel_re import Panel_RE_Lag, Panel_RE_Error
9 | from libpysal.common import RTOL
10 | from libpysal.weights import w_subset
11 |
12 |
13 | class Test_Panel_Diagnostics(unittest.TestCase):
14 | def setUp(self):
15 | self.ds_name = "NCOVR"
16 | nat = libpysal.examples.load_example(self.ds_name)
17 | self.db = libpysal.io.open(nat.get_path("NAT.dbf"), "r")
18 | nat_shp = libpysal.examples.get_path("NAT.shp")
19 | w_full = libpysal.weights.Queen.from_shapefile(nat_shp)
20 | self.y_name = ["HR70", "HR80", "HR90"]
21 | self.x_names = ["RD70", "RD80", "RD90", "PS70", "PS80", "PS90"]
22 | c_names = ["STATE_NAME", "FIPSNO"]
23 | y_full = [self.db.by_col(name) for name in self.y_name]
24 | y_full = np.array(y_full).T
25 | x_full = [self.db.by_col(name) for name in self.x_names]
26 | x_full = np.array(x_full).T
27 | c_full = [self.db.by_col(name) for name in c_names]
28 | c_full = pd.DataFrame(c_full, index=c_names).T
29 | filter_states = ["Kansas", "Missouri", "Oklahoma", "Arkansas"]
30 | filter_counties = c_full[c_full["STATE_NAME"].isin(filter_states)]
31 | filter_counties = filter_counties["FIPSNO"].values
32 | counties = np.array(self.db.by_col("FIPSNO"))
33 | subid = np.where(np.isin(counties, filter_counties))[0]
34 | self.w = w_subset(w_full, subid)
35 | self.w.transform = "r"
36 | self.y = y_full[
37 | subid,
38 | ]
39 | self.x = x_full[
40 | subid,
41 | ]
42 |
43 | def test_LM(self):
44 | lmlag = panel_LMlag(self.y, self.x, self.w)
45 | exp = np.array([1.472807526666869, 0.22490325114767176])
46 | np.testing.assert_allclose(lmlag, exp, RTOL)
47 | lmerror = panel_LMerror(self.y, self.x, self.w)
48 | exp = np.array([81.69630396101608, 1.5868998506678388e-19])
49 | np.testing.assert_allclose(lmerror, exp, RTOL)
50 | rlmlag = panel_rLMlag(self.y, self.x, self.w)
51 | exp = np.array([2.5125780962741793, 0.11294102977710921])
52 | np.testing.assert_allclose(rlmlag, exp, RTOL)
53 | rlmerror = panel_rLMerror(self.y, self.x, self.w)
54 | exp = np.array([32.14155241279442, 1.4333858484607395e-08])
55 | np.testing.assert_allclose(rlmerror, exp, RTOL)
56 |
57 | def test_Hausman(self):
58 | fe_lag = Panel_FE_Lag(self.y, self.x, self.w)
59 | # fe_error = Panel_FE_Error(self.y, self.x, self.w)
60 | re_lag = Panel_RE_Lag(self.y, self.x, self.w)
61 | # re_error = Panel_RE_Error(self.y, self.x, self.w)
62 | Hlag = panel_Hausman(fe_lag, re_lag)
63 | exp = np.array([-67.26822586935438, 1.0])
64 | np.testing.assert_allclose(Hlag, exp, RTOL)
65 | # Herror = panel_Hausman(fe_error, re_error)
66 | # exp = np.array([-84.38351088621853, 1.0])
67 | # np.testing.assert_allclose(Herror, exp, RTOL)
68 |
69 |
70 | if __name__ == "__main__":
71 | unittest.main()
72 |
--------------------------------------------------------------------------------
/spreg/tests/test_diagnostics_sp.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | import numpy as np
3 | import libpysal
4 | from spreg import diagnostics
5 | from spreg.ols import OLS as OLS
6 | from spreg.twosls import TSLS as TSLS
7 | from spreg.twosls_sp import GM_Lag
8 | from spreg.diagnostics_sp import LMtests, MoranRes, spDcache, AKtest
9 | from libpysal.common import RTOL
10 |
11 |
12 | class TestLMtests(unittest.TestCase):
13 | def setUp(self):
14 | db = libpysal.io.open(libpysal.examples.get_path("columbus.dbf"), "r")
15 | y = np.array(db.by_col("HOVAL"))
16 | y = np.reshape(y, (49, 1))
17 | X = []
18 | X.append(db.by_col("INC"))
19 | X.append(db.by_col("CRIME"))
20 | X = np.array(X).T
21 | self.y = y
22 | self.X = X
23 | ols = OLS(self.y, self.X)
24 | self.ols = ols
25 | w = libpysal.io.open(libpysal.examples.get_path("columbus.gal"), "r").read()
26 | w.transform = "r"
27 | self.w = w
28 |
29 | def test_lm_err(self):
30 | lms = LMtests(self.ols, self.w)
31 | lme = np.array([3.097094, 0.078432])
32 | np.testing.assert_allclose(lms.lme, lme, RTOL)
33 |
34 | def test_lm_lag(self):
35 | lms = LMtests(self.ols, self.w)
36 | lml = np.array([0.981552, 0.321816])
37 | np.testing.assert_allclose(lms.lml, lml, RTOL)
38 |
39 | def test_rlm_err(self):
40 | lms = LMtests(self.ols, self.w)
41 | rlme = np.array([3.209187, 0.073226])
42 | np.testing.assert_allclose(lms.rlme, rlme, RTOL)
43 |
44 | def test_rlm_lag(self):
45 | lms = LMtests(self.ols, self.w)
46 | rlml = np.array([1.093645, 0.295665])
47 | np.testing.assert_allclose(lms.rlml, rlml, RTOL)
48 |
49 | def test_lm_sarma(self):
50 | lms = LMtests(self.ols, self.w)
51 | sarma = np.array([4.190739, 0.123025])
52 | np.testing.assert_allclose(lms.sarma, sarma, RTOL)
53 |
54 |
55 | class TestMoranRes(unittest.TestCase):
56 | def setUp(self):
57 | db = libpysal.io.open(libpysal.examples.get_path("columbus.dbf"), "r")
58 | y = np.array(db.by_col("HOVAL"))
59 | y = np.reshape(y, (49, 1))
60 | X = []
61 | X.append(db.by_col("INC"))
62 | X.append(db.by_col("CRIME"))
63 | X = np.array(X).T
64 | self.y = y
65 | self.X = X
66 | ols = OLS(self.y, self.X)
67 | self.ols = ols
68 | w = libpysal.io.open(libpysal.examples.get_path("columbus.gal"), "r").read()
69 | w.transform = "r"
70 | self.w = w
71 |
72 | def test_get_m_i(self):
73 | m = MoranRes(self.ols, self.w, z=True)
74 | np.testing.assert_allclose(m.I, 0.17130999999999999, RTOL)
75 |
76 | def test_get_v_i(self):
77 | m = MoranRes(self.ols, self.w, z=True)
78 | np.testing.assert_allclose(m.vI, 0.0081304900000000001, RTOL)
79 |
80 | def test_get_e_i(self):
81 | m = MoranRes(self.ols, self.w, z=True)
82 | np.testing.assert_allclose(m.eI, -0.034522999999999998, RTOL)
83 |
84 | def test_get_z_i(self):
85 | m = MoranRes(self.ols, self.w, z=True)
86 | np.testing.assert_allclose(m.zI, 2.2827389999999999, RTOL)
87 |
88 |
89 | class TestAKTest(unittest.TestCase):
90 | def setUp(self):
91 | db = libpysal.io.open(libpysal.examples.get_path("columbus.dbf"), "r")
92 | y = np.array(db.by_col("CRIME"))
93 | y = np.reshape(y, (49, 1))
94 | self.y = y
95 | X = []
96 | X.append(db.by_col("INC"))
97 | X = np.array(X).T
98 | self.X = X
99 | yd = []
100 | yd.append(db.by_col("HOVAL"))
101 | yd = np.array(yd).T
102 | self.yd = yd
103 | q = []
104 | q.append(db.by_col("DISCBD"))
105 | q = np.array(q).T
106 | self.q = q
107 | reg = TSLS(y, X, yd, q=q)
108 | self.reg = reg
109 | w = libpysal.weights.Rook.from_shapefile(
110 | libpysal.examples.get_path("columbus.shp")
111 | )
112 | w.transform = "r"
113 | self.w = w
114 |
115 | def test_gen_mi(self):
116 | ak = AKtest(self.reg, self.w)
117 | np.testing.assert_allclose(ak.mi, 0.2232672865437263, RTOL)
118 |
119 | def test_gen_ak(self):
120 | ak = AKtest(self.reg, self.w)
121 | np.testing.assert_allclose(ak.ak, 4.6428948758930852, RTOL)
122 |
123 | def test_gen_p(self):
124 | ak = AKtest(self.reg, self.w)
125 | np.testing.assert_allclose(ak.p, 0.031182360054340875, RTOL)
126 |
127 | def test_sp_mi(self):
128 | ak = AKtest(self.reg, self.w, case="gen")
129 | np.testing.assert_allclose(ak.mi, 0.2232672865437263, RTOL)
130 |
131 | def test_sp_ak(self):
132 | ak = AKtest(self.reg, self.w, case="gen")
133 | np.testing.assert_allclose(ak.ak, 1.1575928784397795, RTOL)
134 |
135 | def test_sp_p(self):
136 | ak = AKtest(self.reg, self.w, case="gen")
137 | np.testing.assert_allclose(ak.p, 0.28196531619791054, RTOL)
138 |
139 |
140 | class TestSpDcache(unittest.TestCase):
141 | def setUp(self):
142 | db = libpysal.io.open(libpysal.examples.get_path("columbus.dbf"), "r")
143 | y = np.array(db.by_col("HOVAL"))
144 | y = np.reshape(y, (49, 1))
145 | X = []
146 | X.append(db.by_col("INC"))
147 | X.append(db.by_col("CRIME"))
148 | X = np.array(X).T
149 | self.y = y
150 | self.X = X
151 | ols = OLS(self.y, self.X)
152 | self.ols = ols
153 | w = libpysal.io.open(libpysal.examples.get_path("columbus.gal"), "r").read()
154 | w.transform = "r"
155 | self.w = w
156 |
157 | def test_j(self):
158 | cache = spDcache(self.ols, self.w)
159 | np.testing.assert_allclose(cache.j[0][0], 0.62330311259039439, RTOL)
160 |
161 | def test_t(self):
162 | cache = spDcache(self.ols, self.w)
163 | np.testing.assert_allclose(cache.t, 22.751186696900984, RTOL)
164 |
165 | def test_trA(self):
166 | cache = spDcache(self.ols, self.w)
167 | np.testing.assert_allclose(cache.trA, 1.5880426389276328, RTOL)
168 |
169 | def test_utwuDs(self):
170 | cache = spDcache(self.ols, self.w)
171 | np.testing.assert_allclose(cache.utwuDs[0][0], 8.3941977502916068, RTOL)
172 |
173 | def test_utwyDs(self):
174 | cache = spDcache(self.ols, self.w)
175 | np.testing.assert_allclose(cache.utwyDs[0][0], 5.475255215067957, RTOL)
176 |
177 | def test_wu(self):
178 | cache = spDcache(self.ols, self.w)
179 | np.testing.assert_allclose(cache.wu[0][0], -10.681344941514411, RTOL)
180 |
181 |
182 | if __name__ == "__main__":
183 | unittest.main()
184 |
--------------------------------------------------------------------------------
/spreg/tests/test_diagnostics_tsls.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | import numpy as np
3 | import libpysal
4 | import spreg.diagnostics_tsls as diagnostics_tsls
5 | import spreg.diagnostics as diagnostics
6 | from spreg.ols import OLS as OLS
7 | from spreg.twosls import TSLS as TSLS
8 | from spreg.twosls_sp import GM_Lag
9 | from scipy.stats import pearsonr
10 | from libpysal.common import RTOL
11 |
12 | # create regression object used by the apatial tests
13 | db = libpysal.io.open(libpysal.examples.get_path("columbus.dbf"), "r")
14 | y = np.array(db.by_col("CRIME"))
15 | y = np.reshape(y, (49, 1))
16 | X = []
17 | X.append(db.by_col("INC"))
18 | X = np.array(X).T
19 | yd = []
20 | yd.append(db.by_col("HOVAL"))
21 | yd = np.array(yd).T
22 | q = []
23 | q.append(db.by_col("DISCBD"))
24 | q = np.array(q).T
25 | reg = TSLS(y, X, yd, q)
26 |
27 | # create regression object for spatial test
28 | db = libpysal.io.open(libpysal.examples.get_path("columbus.dbf"), "r")
29 | y = np.array(db.by_col("HOVAL"))
30 | y = np.reshape(y, (49, 1))
31 | X = np.array(db.by_col("INC"))
32 | X = np.reshape(X, (49, 1))
33 | yd = np.array(db.by_col("CRIME"))
34 | yd = np.reshape(yd, (49, 1))
35 | q = np.array(db.by_col("DISCBD"))
36 | q = np.reshape(q, (49, 1))
37 | w = libpysal.weights.Rook.from_shapefile(libpysal.examples.get_path("columbus.shp"))
38 | w.transform = "r"
39 | regsp = GM_Lag(y, X, w=w, yend=yd, q=q, w_lags=2)
40 |
41 |
42 | class TestTStat(unittest.TestCase):
43 | def test_t_stat(self):
44 | obs = diagnostics_tsls.t_stat(reg)
45 | exp = [
46 | (5.8452644704588588, 4.9369075950019865e-07),
47 | (0.36760156683572748, 0.71485634049075841),
48 | (-1.9946891307832111, 0.052021795864651159),
49 | ]
50 | np.testing.assert_allclose(obs, exp, RTOL)
51 |
52 |
53 | class TestPr2Aspatial(unittest.TestCase):
54 | def test_pr2_aspatial(self):
55 | obs = diagnostics_tsls.pr2_aspatial(reg)
56 | exp = 0.2793613712817381
57 | np.testing.assert_allclose(obs, exp, RTOL)
58 |
59 |
60 | class TestPr2Spatial(unittest.TestCase):
61 | def test_pr2_spatial(self):
62 | obs = diagnostics_tsls.pr2_spatial(regsp)
63 | exp = 0.29964855438065163
64 | np.testing.assert_allclose(obs, exp, RTOL)
65 |
66 |
67 | if __name__ == "__main__":
68 | unittest.main()
69 |
--------------------------------------------------------------------------------
/spreg/tests/test_ml_error.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | import libpysal
3 | import numpy as np
4 | from scipy import sparse
5 | from spreg.ml_error import ML_Error
6 | from libpysal.common import RTOL, ATOL
7 | from warnings import filterwarnings
8 |
9 | filterwarnings("ignore", category=sparse.SparseEfficiencyWarning)
10 | filterwarnings("ignore", message="^Method 'bounded' does not support")
11 |
12 |
13 | class TestMLError(unittest.TestCase):
14 | def setUp(self):
15 | db = libpysal.io.open(libpysal.examples.get_path("baltim.dbf"), "r")
16 | self.ds_name = "baltim.dbf"
17 | self.y_name = "PRICE"
18 | self.y = np.array(db.by_col(self.y_name)).T
19 | self.y.shape = (len(self.y), 1)
20 | self.x_names = ["NROOM", "AGE", "SQFT"]
21 | self.x = np.array([db.by_col(var) for var in self.x_names]).T
22 | ww = libpysal.io.open(libpysal.examples.get_path("baltim_q.gal"))
23 | self.w = ww.read()
24 | ww.close()
25 | self.w_name = "baltim_q.gal"
26 | self.w.transform = "r"
27 |
28 | def _estimate_and_compare(self, method="FULL", RTOL=RTOL):
29 | reg = ML_Error(
30 | self.y,
31 | self.x,
32 | w=self.w,
33 | name_y=self.y_name,
34 | name_x=self.x_names,
35 | name_w=self.w_name,
36 | method=method,
37 | )
38 | betas = np.array([[19.45930348],
39 | [ 3.98928064],
40 | [-0.16714232],
41 | [ 0.57336871],
42 | [ 0.71757002]])
43 | np.testing.assert_allclose(reg.betas, betas, RTOL + 0.0001)
44 | u = np.array([29.870239])
45 | np.testing.assert_allclose(reg.u[0], u, RTOL)
46 | predy = np.array([17.129761])
47 | np.testing.assert_allclose(reg.predy[0], predy, RTOL)
48 | n = 211
49 | np.testing.assert_allclose(reg.n, n, RTOL)
50 | k = 4
51 | np.testing.assert_allclose(reg.k, k, RTOL)
52 | y = np.array([47.])
53 | np.testing.assert_allclose(reg.y[0], y, RTOL)
54 | x = np.array([ 1. , 4. , 148. , 11.25])
55 | np.testing.assert_allclose(reg.x[0], x, RTOL)
56 | e = np.array([44.392043])
57 | np.testing.assert_allclose(reg.e_filtered[0], e, RTOL)
58 | my = 44.30718
59 | np.testing.assert_allclose(reg.mean_y, my)
60 | sy = 23.606077
61 | np.testing.assert_allclose(reg.std_y, sy)
62 | vm = np.array(
63 | [3.775969e+01, 1.337534e+00, 4.440495e-03, 2.890193e-02,
64 | 3.496050e-03]
65 | )
66 | np.testing.assert_allclose(reg.vm.diagonal(), vm, RTOL)
67 | sig2 = np.array([[219.239799]])
68 | np.testing.assert_allclose(reg.sig2, sig2, RTOL)
69 | pr2 = 0.341471
70 | np.testing.assert_allclose(reg.pr2, pr2, RTOL)
71 | std_err = np.array(
72 | [6.144892, 1.156518, 0.066637, 0.170006, 0.059127]
73 | )
74 | np.testing.assert_allclose(reg.std_err, std_err, RTOL)
75 | z_stat = [(3.166744811610107, 0.0015415552994677963),
76 | (3.4493895324306485, 0.0005618555635414317),
77 | (-2.5082495410045618, 0.012133094835810014),
78 | (3.3726442232925864, 0.0007445008419860677),
79 | (12.13599679437352, 6.807593113579489e-34)]
80 | np.testing.assert_allclose(reg.z_stat, z_stat, RTOL, atol=ATOL)
81 | logll = -881.269405
82 | np.testing.assert_allclose(reg.logll, logll, RTOL)
83 | aic = 1770.538809
84 | np.testing.assert_allclose(reg.aic, aic, RTOL)
85 | schwarz = 1783.946242
86 | np.testing.assert_allclose(reg.schwarz, schwarz, RTOL)
87 | def test_dense(self):
88 | self._estimate_and_compare(method="FULL")
89 |
90 | def test_LU(self):
91 | self._estimate_and_compare(method="LU", RTOL=RTOL * 10)
92 |
93 | def test_ord(self):
94 | reg = ML_Error(
95 | self.y,
96 | self.x,
97 | w=self.w,
98 | name_y=self.y_name,
99 | name_x=self.x_names,
100 | name_w=self.w_name,
101 | method="ORD",
102 | )
103 | betas = np.array([[19.45930348],
104 | [ 3.98928064],
105 | [-0.16714232],
106 | [ 0.57336871],
107 | [ 0.71757002]])
108 | np.testing.assert_allclose(reg.betas, betas, RTOL)
109 | u = np.array([29.870239])
110 | np.testing.assert_allclose(reg.u[0], u, RTOL)
111 | predy = np.array([17.129761])
112 | np.testing.assert_allclose(reg.predy[0], predy, RTOL)
113 | n = 211
114 | np.testing.assert_allclose(reg.n, n, RTOL)
115 | k = 4
116 | np.testing.assert_allclose(reg.k, k, RTOL)
117 | y = np.array([47.])
118 | np.testing.assert_allclose(reg.y[0], y, RTOL)
119 | x = np.array([ 1. , 4. , 148. , 11.25])
120 | np.testing.assert_allclose(reg.x[0], x, RTOL)
121 | e = np.array([44.392043])
122 | np.testing.assert_allclose(reg.e_filtered[0], e, RTOL)
123 | my = 44.30718
124 | np.testing.assert_allclose(reg.mean_y, my)
125 | sy = 23.606077
126 | np.testing.assert_allclose(reg.std_y, sy)
127 | vm = np.array(
128 | [3.775969e+01, 1.337534e+00, 4.440495e-03, 2.890193e-02,
129 | 3.586781e-03]
130 | )
131 | np.testing.assert_allclose(reg.vm.diagonal(), vm, RTOL * 10)
132 | sig2 = np.array([[219.239799]])
133 | np.testing.assert_allclose(reg.sig2, sig2, RTOL)
134 | pr2 = 0.34147059826596426
135 | np.testing.assert_allclose(reg.pr2, pr2)
136 | std_err = np.array(
137 | [6.144892, 1.156518, 0.066637, 0.170006, 0.05989 ]
138 | )
139 | np.testing.assert_allclose(reg.std_err, std_err, RTOL * 10)
140 | z_stat = [(3.166744811610107, 0.0015415552994677963),
141 | (3.4493895324306485, 0.0005618555635414317),
142 | (-2.5082495410045618, 0.012133094835810014),
143 | (3.3726442232925864, 0.0007445008419860677),
144 | (11.981517603949666, 4.441183328428627e-33)]
145 | np.testing.assert_allclose(reg.z_stat, z_stat, rtol=RTOL, atol=ATOL)
146 | logll = -881.269405
147 | np.testing.assert_allclose(reg.logll, logll, RTOL)
148 | aic = 1770.538809
149 | np.testing.assert_allclose(reg.aic, aic, RTOL)
150 | schwarz = 1783.946242
151 | np.testing.assert_allclose(reg.schwarz, schwarz, RTOL)
152 |
153 |
154 | if __name__ == "__main__":
155 | unittest.main()
156 |
--------------------------------------------------------------------------------
/spreg/tests/test_ml_error_regimes.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | import libpysal
3 | import numpy as np
4 | from scipy import sparse
5 | from spreg.ml_error_regimes import ML_Error_Regimes
6 | from spreg.ml_error import ML_Error
7 | from libpysal.common import RTOL
8 | from warnings import filterwarnings
9 |
10 | filterwarnings("ignore", category=sparse.SparseEfficiencyWarning)
11 |
12 |
13 | class TestMLError(unittest.TestCase):
14 | def setUp(self):
15 | db = libpysal.io.open(libpysal.examples.get_path("baltim.dbf"), "r")
16 | self.ds_name = "baltim.dbf"
17 | self.y_name = "PRICE"
18 | self.y = np.array(db.by_col(self.y_name)).T
19 | self.y.shape = (len(self.y), 1)
20 | self.x_names = ["NROOM", "AGE", "SQFT"]
21 | self.x = np.array([db.by_col(var) for var in self.x_names]).T
22 | ww = libpysal.io.open(libpysal.examples.get_path("baltim_q.gal"))
23 | self.w = ww.read()
24 | ww.close()
25 | self.w_name = "baltim_q.gal"
26 | self.w.transform = "r"
27 | self.regimes = db.by_col("CITCOU")
28 | # Artficial:
29 | n = 256
30 | self.n2 = int(n / 2)
31 | self.x_a1 = np.random.uniform(-10, 10, (n, 1))
32 | self.x_a2 = np.random.uniform(1, 5, (n, 1))
33 | self.q_a = self.x_a2 + np.random.normal(0, 1, (n, 1))
34 | self.x_a = np.hstack((self.x_a1, self.x_a2))
35 | self.y_a = np.dot(
36 | np.hstack((np.ones((n, 1)), self.x_a)), np.array([[1], [0.5], [2]])
37 | ) + np.random.normal(0, 1, (n, 1))
38 | latt = int(np.sqrt(n))
39 | self.w_a = libpysal.weights.util.lat2W(latt, latt)
40 | self.w_a.transform = "r"
41 | self.regi_a = [0] * (n // 2) + [1] * (n // 2) # must be floor!
42 | self.w_a1 = libpysal.weights.util.lat2W(latt // 2, latt)
43 | self.w_a1.transform = "r"
44 |
45 | def test_model1(self):
46 | reg = ML_Error_Regimes(
47 | self.y,
48 | self.x,
49 | self.regimes,
50 | w=self.w,
51 | name_y=self.y_name,
52 | name_x=self.x_names,
53 | name_w=self.w_name,
54 | name_ds=self.ds_name,
55 | name_regimes="CITCOU",
56 | regime_err_sep=False,
57 | )
58 | betas = np.array(
59 | [
60 | [-2.39491278],
61 | [4.873757],
62 | [-0.02911854],
63 | [0.33275008],
64 | [31.79618475],
65 | [2.98102401],
66 | [-0.23710892],
67 | [0.80581127],
68 | [0.61770744],
69 | ]
70 | )
71 | np.testing.assert_allclose(reg.betas, betas, RTOL)
72 | u = np.array([30.46599009])
73 | np.testing.assert_allclose(reg.u[0], u, RTOL)
74 | predy = np.array([16.53400991])
75 | np.testing.assert_allclose(reg.predy[0], predy, RTOL)
76 | n = 211
77 | np.testing.assert_allclose(reg.n, n, RTOL)
78 | k = 8
79 | np.testing.assert_allclose(reg.k, k, RTOL)
80 | y = np.array([47.0])
81 | np.testing.assert_allclose(reg.y[0], y, RTOL)
82 | x = np.array([1.0, 4.0, 148.0, 11.25, 0.0, 0.0, 0.0, 0.0])
83 | np.testing.assert_allclose(reg.x[0], x, RTOL)
84 | e = np.array([34.69181334])
85 | np.testing.assert_allclose(reg.e_filtered[0], e, RTOL)
86 | my = 44.307180094786695
87 | np.testing.assert_allclose(reg.mean_y, my, RTOL)
88 | sy = 23.606076835380495
89 | np.testing.assert_allclose(reg.std_y, sy, RTOL)
90 | vm = np.array(
91 | [
92 | 58.50551173,
93 | 2.42952002,
94 | 0.00721525,
95 | 0.06391736,
96 | 80.59249161,
97 | 3.1610047,
98 | 0.0119782,
99 | 0.0499432,
100 | 0.00502785,
101 | ]
102 | )
103 | np.testing.assert_allclose(reg.vm.diagonal(), vm, RTOL)
104 | sig2 = np.array([[209.60639741]])
105 | np.testing.assert_allclose(reg.sig2, sig2, RTOL)
106 | pr2 = 0.43600837301477025
107 | np.testing.assert_allclose(reg.pr2, pr2, RTOL)
108 | std_err = np.array(
109 | [
110 | 7.64888957,
111 | 1.55869177,
112 | 0.08494262,
113 | 0.25281882,
114 | 8.9773321,
115 | 1.77792146,
116 | 0.10944497,
117 | 0.22347975,
118 | 0.07090735,
119 | ]
120 | )
121 | np.testing.assert_allclose(reg.std_err, std_err, RTOL)
122 | logll = -870.3331059537576
123 | np.testing.assert_allclose(reg.logll, logll, RTOL)
124 | aic = 1756.6662119075154
125 | np.testing.assert_allclose(reg.aic, aic, RTOL)
126 | schwarz = 1783.481076975324
127 | np.testing.assert_allclose(reg.schwarz, schwarz, RTOL)
128 | chow_r = np.array(
129 | [
130 | [8.40437046, 0.0037432],
131 | [0.64080535, 0.42341932],
132 | [2.25389396, 0.13327865],
133 | [1.96544702, 0.16093197],
134 | ]
135 | )
136 | np.testing.assert_allclose(reg.chow.regi, chow_r, RTOL)
137 | chow_j = 25.367913028011799
138 | np.testing.assert_allclose(reg.chow.joint[0], chow_j, RTOL)
139 |
140 | def test_model2(self):
141 | reg = ML_Error_Regimes(
142 | self.y,
143 | self.x,
144 | self.regimes,
145 | w=self.w,
146 | name_y=self.y_name,
147 | name_x=self.x_names,
148 | name_w=self.w_name,
149 | name_ds=self.ds_name,
150 | name_regimes="CITCOU",
151 | regime_err_sep=True,
152 | )
153 | betas = np.array(
154 | [
155 | [3.66158216],
156 | [4.55700255],
157 | [-0.08045502],
158 | [0.44800318],
159 | [0.17774677],
160 | [33.3086368],
161 | [2.44709405],
162 | [-0.18803509],
163 | [0.68956598],
164 | [0.75599089],
165 | ]
166 | )
167 | np.testing.assert_allclose(reg.betas, betas, RTOL)
168 | vm = np.array(
169 | [
170 | 40.60994599,
171 | -7.25413138,
172 | -0.16605501,
173 | 0.48961884,
174 | 0.0,
175 | 0.0,
176 | 0.0,
177 | 0.0,
178 | 0.0,
179 | 0.0,
180 | ]
181 | )
182 | np.testing.assert_allclose(reg.vm[0], vm, RTOL)
183 | u = np.array([31.97771505])
184 | np.testing.assert_allclose(reg.u[0], u, RTOL)
185 | predy = np.array([15.02228495])
186 | np.testing.assert_allclose(reg.predy[0], predy, RTOL)
187 | e = np.array([33.83065421])
188 | np.testing.assert_allclose(reg.e_filtered[0], e, RTOL)
189 | chow_r = np.array(
190 | [
191 | [6.88023639, 0.0087154],
192 | [0.90512612, 0.34141092],
193 | [0.75996258, 0.38334023],
194 | [0.56882946, 0.45072443],
195 | [12.18358581, 0.00048212],
196 | ]
197 | )
198 | np.testing.assert_allclose(reg.chow.regi, chow_r, RTOL)
199 | chow_j = 26.673798071789673
200 | np.testing.assert_allclose(reg.chow.joint[0], chow_j, RTOL)
201 | # Artficial:
202 | model = ML_Error_Regimes(
203 | self.y_a, self.x_a, self.regi_a, w=self.w_a, regime_err_sep=True
204 | )
205 | model1 = ML_Error(
206 | self.y_a[0 : (self.n2)].reshape((self.n2), 1),
207 | self.x_a[0 : (self.n2)],
208 | w=self.w_a1,
209 | )
210 | model2 = ML_Error(
211 | self.y_a[(self.n2) :].reshape((self.n2), 1),
212 | self.x_a[(self.n2) :],
213 | w=self.w_a1,
214 | )
215 | tbetas = np.vstack((model1.betas, model2.betas))
216 | np.testing.assert_allclose(model.betas, tbetas)
217 | vm = np.hstack((model1.vm.diagonal(), model2.vm.diagonal()))
218 | np.testing.assert_allclose(model.vm.diagonal(), vm, 4)
219 |
220 |
221 | if __name__ == "__main__":
222 | unittest.main()
223 |
--------------------------------------------------------------------------------
/spreg/tests/test_ml_lag.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | import libpysal
3 | from scipy import sparse
4 | import numpy as np
5 | from spreg.ml_lag import ML_Lag
6 | from libpysal.common import RTOL
7 | from warnings import filterwarnings
8 |
9 | filterwarnings("ignore", category=sparse.SparseEfficiencyWarning)
10 |
11 |
12 | class TestMLError(unittest.TestCase):
13 | def setUp(self):
14 | db = libpysal.io.open(libpysal.examples.get_path("baltim.dbf"), "r")
15 | self.ds_name = "baltim.dbf"
16 | self.y_name = "PRICE"
17 | self.y = np.array(db.by_col(self.y_name)).T
18 | self.y.shape = (len(self.y), 1)
19 | self.x_names = ["NROOM", "AGE", "SQFT"]
20 | self.x = np.array([db.by_col(var) for var in self.x_names]).T
21 | ww = libpysal.io.open(libpysal.examples.get_path("baltim_q.gal"))
22 | self.w = ww.read()
23 | ww.close()
24 | self.w_name = "baltim_q.gal"
25 | self.w.transform = "r"
26 |
27 | def _estimate_and_compare(self, **kwargs):
28 | reg = ML_Lag(
29 | self.y,
30 | self.x,
31 | w=self.w,
32 | name_y=self.y_name,
33 | name_x=self.x_names,
34 | name_w=self.w_name,
35 | name_ds=self.ds_name,
36 | **kwargs
37 | )
38 | betas = np.array(
39 | [[-6.04040164], [3.48995114], [-0.20103955], [0.65462382], [0.62351143]]
40 | )
41 | np.testing.assert_allclose(reg.betas, betas, RTOL)
42 | u = np.array([47.51218398])
43 | np.testing.assert_allclose(reg.u[0], u, RTOL)
44 | predy = np.array([-0.51218398])
45 | np.testing.assert_allclose(reg.predy[0], predy, RTOL)
46 | n = 211
47 | np.testing.assert_allclose(reg.n, n, RTOL)
48 | k = 5
49 | np.testing.assert_allclose(reg.k, k, RTOL)
50 | y = np.array([47.0])
51 | np.testing.assert_allclose(reg.y[0], y, RTOL)
52 | x = np.array([1.0, 4.0, 148.0, 11.25])
53 | np.testing.assert_allclose(reg.x[0], x, RTOL)
54 | e = np.array([41.99251608])
55 | np.testing.assert_allclose(reg.e_pred[0], e, RTOL)
56 | my = 44.307180094786695
57 | np.testing.assert_allclose(reg.mean_y, my)
58 | sy = 23.606076835380495
59 | np.testing.assert_allclose(reg.std_y, sy)
60 | vm = np.array([28.57288755, 1.42341656, 0.00288068, 0.02956392, 0.00332139])
61 | np.testing.assert_allclose(reg.vm.diagonal(), vm, RTOL)
62 | sig2 = 216.27525647243797
63 | np.testing.assert_allclose(reg.sig2, sig2, RTOL)
64 | pr2 = 0.6133020721559487
65 | np.testing.assert_allclose(reg.pr2, pr2)
66 | std_err = np.array([5.34536131, 1.19307022, 0.05367198, 0.17194162, 0.05763147])
67 | np.testing.assert_allclose(reg.std_err, std_err, RTOL)
68 | logll = -875.92771143484833
69 | np.testing.assert_allclose(reg.logll, logll, RTOL)
70 | aic = 1761.8554228696967
71 | np.testing.assert_allclose(reg.aic, aic, RTOL)
72 | schwarz = 1778.614713537077
73 | np.testing.assert_allclose(reg.schwarz, schwarz, RTOL)
74 |
75 | def test_dense(self):
76 | self._estimate_and_compare(method="FULL")
77 |
78 | def test_ord(self):
79 | reg = ML_Lag(
80 | self.y,
81 | self.x,
82 | w=self.w,
83 | name_y=self.y_name,
84 | name_x=self.x_names,
85 | name_w=self.w_name,
86 | name_ds=self.ds_name,
87 | method="ORD",
88 | )
89 | betas = np.array(
90 | [[-6.04040164], [3.48995114], [-0.20103955], [0.65462382], [0.62351143]]
91 | )
92 | np.testing.assert_allclose(reg.betas, betas, RTOL)
93 | u = np.array([47.51218398])
94 | np.testing.assert_allclose(reg.u[0], u, RTOL)
95 | predy = np.array([-0.51218398])
96 | np.testing.assert_allclose(reg.predy[0], predy, RTOL)
97 | n = 211
98 | np.testing.assert_allclose(reg.n, n, RTOL)
99 | k = 5
100 | np.testing.assert_allclose(reg.k, k, RTOL)
101 | y = np.array([47.0])
102 | np.testing.assert_allclose(reg.y[0], y, RTOL)
103 | x = np.array([1.0, 4.0, 148.0, 11.25])
104 | np.testing.assert_allclose(reg.x[0], x, RTOL)
105 | e = np.array([41.99251608])
106 | np.testing.assert_allclose(reg.e_pred[0], e, RTOL)
107 | my = 44.307180094786695
108 | np.testing.assert_allclose(reg.mean_y, my)
109 | sy = 23.606076835380495
110 | np.testing.assert_allclose(reg.std_y, sy)
111 | vm = np.array([28.63404, 1.423698, 0.002884738, 0.02957845, 0.003379166])
112 | np.testing.assert_allclose(reg.vm.diagonal(), vm, RTOL)
113 | sig2 = 216.27525647243797
114 | np.testing.assert_allclose(reg.sig2, sig2, RTOL)
115 | pr2 = 0.6133020721559487
116 | np.testing.assert_allclose(reg.pr2, pr2)
117 | std_err = np.array([5.351078, 1.193188, 0.05371, 0.171984, 0.058131])
118 | np.testing.assert_allclose(reg.std_err, std_err, RTOL)
119 | logll = -875.92771143484833
120 | np.testing.assert_allclose(reg.logll, logll, RTOL)
121 | aic = 1761.8554228696967
122 | np.testing.assert_allclose(reg.aic, aic, RTOL)
123 | schwarz = 1778.614713537077
124 | np.testing.assert_allclose(reg.schwarz, schwarz, RTOL)
125 |
126 | def test_LU(self):
127 | self._estimate_and_compare(method="LU")
128 |
129 |
130 | if __name__ == "__main__":
131 | unittest.main()
132 |
--------------------------------------------------------------------------------
/spreg/tests/test_ml_lag_regimes.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | import libpysal
3 | from scipy import sparse
4 | import numpy as np
5 | from spreg.ml_lag_regimes import ML_Lag_Regimes
6 | from libpysal.common import RTOL
7 |
8 | from warnings import filterwarnings
9 |
10 | filterwarnings("ignore", category=sparse.SparseEfficiencyWarning)
11 |
12 |
13 | class TestMLError(unittest.TestCase):
14 | def setUp(self):
15 | db = libpysal.io.open(libpysal.examples.get_path("baltim.dbf"), "r")
16 | self.ds_name = "baltim.dbf"
17 | self.y_name = "PRICE"
18 | self.y = np.array(db.by_col(self.y_name)).T
19 | self.y.shape = (len(self.y), 1)
20 | self.x_names = ["NROOM", "AGE", "SQFT"]
21 | self.x = np.array([db.by_col(var) for var in self.x_names]).T
22 | ww = libpysal.io.open(libpysal.examples.get_path("baltim_q.gal"))
23 | self.w = ww.read()
24 | ww.close()
25 | self.w_name = "baltim_q.gal"
26 | self.w.transform = "r"
27 | self.regimes = db.by_col("CITCOU")
28 |
29 | def test_model1(self):
30 | reg = ML_Lag_Regimes(
31 | self.y,
32 | self.x,
33 | self.regimes,
34 | w=self.w,
35 | name_y=self.y_name,
36 | name_x=self.x_names,
37 | name_w=self.w_name,
38 | name_ds=self.ds_name,
39 | name_regimes="CITCOU",
40 | regime_lag_sep=False,
41 | )
42 | betas = np.array(
43 | [
44 | [-15.00586577],
45 | [4.49600801],
46 | [-0.03180518],
47 | [0.34995882],
48 | [-4.54040395],
49 | [3.92187578],
50 | [-0.17021393],
51 | [0.81941371],
52 | [0.53850323],
53 | ]
54 | )
55 | np.testing.assert_allclose(reg.betas, betas, RTOL)
56 | u = np.array([32.73718478])
57 | np.testing.assert_allclose(reg.u[0], u, RTOL)
58 | predy = np.array([14.26281522])
59 | np.testing.assert_allclose(reg.predy[0], predy, RTOL)
60 | n = 211
61 | np.testing.assert_allclose(reg.n, n, RTOL)
62 | k = 9
63 | np.testing.assert_allclose(reg.k, k, RTOL)
64 | y = np.array([47.0])
65 | np.testing.assert_allclose(reg.y[0], y, RTOL)
66 | x = np.array([[1.0, 4.0, 148.0, 11.25, 0.0, 0.0, 0.0, 0.0]])
67 | np.testing.assert_allclose(reg.x[0].toarray(), x, RTOL)
68 | e = np.array([29.45407124])
69 | np.testing.assert_allclose(reg.e_pred[0], e, RTOL)
70 | my = 44.307180094786695
71 | np.testing.assert_allclose(reg.mean_y, my)
72 | sy = 23.606076835380495
73 | np.testing.assert_allclose(reg.std_y, sy)
74 | vm = np.array(
75 | [
76 | 47.42000914,
77 | 2.39526578,
78 | 0.00506895,
79 | 0.06480022,
80 | 69.67653371,
81 | 3.20661492,
82 | 0.01156766,
83 | 0.04862014,
84 | 0.00400775,
85 | ]
86 | )
87 | np.testing.assert_allclose(reg.vm.diagonal(), vm, RTOL)
88 | sig2 = 200.04433357145007
89 | np.testing.assert_allclose(reg.sig2, sig2, RTOL)
90 | pr2 = 0.6404460298085746
91 | np.testing.assert_allclose(reg.pr2, pr2)
92 | std_err = np.array(
93 | [
94 | 6.88621878,
95 | 1.54766462,
96 | 0.07119654,
97 | 0.25455888,
98 | 8.34724707,
99 | 1.79070235,
100 | 0.10755305,
101 | 0.22049975,
102 | 0.0633068,
103 | ]
104 | )
105 | np.testing.assert_allclose(reg.std_err, std_err, RTOL)
106 | logll = -864.98505596489736
107 | np.testing.assert_allclose(reg.logll, logll, RTOL)
108 | aic = 1747.9701119297947
109 | np.testing.assert_allclose(reg.aic, aic, RTOL)
110 | schwarz = 1778.1368351310794
111 | np.testing.assert_allclose(reg.schwarz, schwarz, RTOL)
112 | chow_r = np.array(
113 | [
114 | [1.00180776, 0.31687348],
115 | [0.05904944, 0.8080047],
116 | [1.16987812, 0.27942629],
117 | [1.95931177, 0.16158694],
118 | ]
119 | )
120 | np.testing.assert_allclose(reg.chow.regi, chow_r, RTOL)
121 | chow_j = 21.648337464039283
122 | np.testing.assert_allclose(reg.chow.joint[0], chow_j, RTOL)
123 |
124 | def test_model2(self):
125 | reg = ML_Lag_Regimes(
126 | self.y,
127 | self.x,
128 | self.regimes,
129 | w=self.w,
130 | name_y=self.y_name,
131 | name_x=self.x_names,
132 | name_w=self.w_name,
133 | name_ds=self.ds_name,
134 | name_regimes="CITCOU",
135 | regime_lag_sep=True,
136 | )
137 | betas = np.array(
138 | [
139 | [-0.71589799],
140 | [4.40910538],
141 | [-0.08652467],
142 | [0.46266265],
143 | [0.1627765],
144 | [-5.00594358],
145 | [2.91060349],
146 | [-0.18207394],
147 | [0.71129227],
148 | [0.66753263],
149 | ]
150 | )
151 | np.testing.assert_allclose(reg.betas, betas, RTOL)
152 | vm = np.array(
153 | [
154 | 55.3593679,
155 | -7.22927797,
156 | -0.19487326,
157 | 0.6030953,
158 | -0.52249569,
159 | 0.0,
160 | 0.0,
161 | 0.0,
162 | 0.0,
163 | 0.0,
164 | ]
165 | )
166 | np.testing.assert_allclose(reg.vm[0], vm, RTOL)
167 | u = np.array([34.03630518])
168 | np.testing.assert_allclose(reg.u[0], u, RTOL)
169 | predy = np.array([12.96369482])
170 | np.testing.assert_allclose(reg.predy[0], predy, RTOL)
171 | e = np.array([32.46466912])
172 | np.testing.assert_allclose(reg.e_pred[0], e, RTOL)
173 | chow_r = np.array(
174 | [
175 | [0.15654726, 0.69235548],
176 | [0.43533847, 0.509381],
177 | [0.60552514, 0.43647766],
178 | [0.59214981, 0.441589],
179 | [11.69437282, 0.00062689],
180 | ]
181 | )
182 | np.testing.assert_allclose(reg.chow.regi, chow_r, RTOL)
183 | chow_j = 21.978012275873063
184 | np.testing.assert_allclose(reg.chow.joint[0], chow_j, RTOL)
185 |
186 |
187 | if __name__ == "__main__":
188 | unittest.main()
189 |
--------------------------------------------------------------------------------
/spreg/tests/test_nslx.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | import numpy as np
3 | import libpysal
4 | import spreg
5 | import geopandas as gpd
6 | RTOL = 1e-04
7 |
8 | class TestNSLX(unittest.TestCase):
9 | def setUp(self):
10 | csdoh = libpysal.examples.load_example('chicagoSDOH')
11 | dfs = gpd.read_file(csdoh.get_path('Chi-SDOH.shp'))
12 | self.y = dfs[['HIS_ct']]
13 | self.x = dfs[['Blk14P','Hisp14P','EP_NOHSDP']]
14 | self.coords = dfs[["COORD_X","COORD_Y"]]
15 |
16 | def test_nslx_slxvars(self):
17 | reg = spreg.NSLX(self.y, self.x, self.coords, var_flag=1,
18 | slx_vars=[False,False,True], params=[(6,np.inf,"exponential")])
19 | np.testing.assert_allclose(reg.betas,
20 | np.array([17.878828, 0.180593, 0.056209, 0.647127, 6.969201]), rtol=RTOL)
21 | vm = np.array([[ 1.91361545e-01, -2.09518978e-03, -2.89344531e-03, 1.50324352e-04,
22 | 0.00000000e+00],
23 | [-2.09518978e-03, 6.58549881e-05, 9.80509736e-05, -1.50773218e-04,
24 | 0.00000000e+00],
25 | [-2.89344531e-03, 9.80509736e-05, 2.35720689e-04, -3.57313408e-04,
26 | 0.00000000e+00],
27 | [ 1.50324352e-04, -1.50773218e-04, -3.57313408e-04, 7.66414008e-04,
28 | 0.00000000e+00],
29 | [ 0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
30 | 3.41278119e-02]])
31 | np.testing.assert_allclose(reg.vm, vm,RTOL)
32 |
33 | if __name__ == '__main__':
34 | unittest.main()
35 |
--------------------------------------------------------------------------------
/spreg/tests/test_ols.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | import numpy as np
3 | import libpysal
4 | import spreg as EC
5 | from spreg import utils
6 | from libpysal.common import RTOL
7 |
8 | PEGP = libpysal.examples.get_path
9 |
10 |
11 | class TestBaseOLS(unittest.TestCase):
12 | def setUp(self):
13 | db = libpysal.io.open(PEGP("columbus.dbf"), "r")
14 | y = np.array(db.by_col("HOVAL"))
15 | self.y = np.reshape(y, (49, 1))
16 | X = []
17 | X.append(db.by_col("INC"))
18 | X.append(db.by_col("CRIME"))
19 | self.X = np.array(X).T
20 | self.w = libpysal.weights.Rook.from_shapefile(PEGP("columbus.shp"))
21 |
22 | def test_ols(self):
23 | self.X = np.hstack((np.ones(self.y.shape), self.X))
24 | ols = EC.ols.BaseOLS(self.y, self.X)
25 | np.testing.assert_allclose(
26 | ols.betas, np.array([[46.42818268], [0.62898397], [-0.48488854]])
27 | )
28 | vm = np.array(
29 | [
30 | [1.74022453e02, -6.52060364e00, -2.15109867e00],
31 | [-6.52060364e00, 2.87200008e-01, 6.80956787e-02],
32 | [-2.15109867e00, 6.80956787e-02, 3.33693910e-02],
33 | ]
34 | )
35 | np.testing.assert_allclose(ols.vm, vm, RTOL)
36 |
37 | def test_ols_white1(self):
38 | self.X = np.hstack((np.ones(self.y.shape), self.X))
39 | ols = EC.ols.BaseOLS(self.y, self.X, robust="white", sig2n_k=True)
40 | np.testing.assert_allclose(
41 | ols.betas, np.array([[46.42818268], [0.62898397], [-0.48488854]])
42 | )
43 | vm = np.array(
44 | [
45 | [2.05819450e02, -6.83139266e00, -2.64825846e00],
46 | [-6.83139266e00, 2.58480813e-01, 8.07733167e-02],
47 | [-2.64825846e00, 8.07733167e-02, 3.75817181e-02],
48 | ]
49 | )
50 | np.testing.assert_allclose(ols.vm, vm, RTOL)
51 |
52 | def test_ols_white2(self):
53 | self.X = np.hstack((np.ones(self.y.shape), self.X))
54 | ols = EC.ols.BaseOLS(self.y, self.X, robust="white", sig2n_k=False)
55 | np.testing.assert_allclose(
56 | ols.betas, np.array([[46.42818268], [0.62898397], [-0.48488854]])
57 | )
58 | vm = np.array(
59 | [
60 | [1.93218259e02, -6.41314413e00, -2.48612018e00],
61 | [-6.41314413e00, 2.42655457e-01, 7.58280116e-02],
62 | [-2.48612018e00, 7.58280116e-02, 3.52807966e-02],
63 | ]
64 | )
65 | np.testing.assert_allclose(ols.vm, vm, RTOL)
66 |
67 | def test_OLS(self):
68 | ols = EC.OLS(
69 | self.y,
70 | self.X,
71 | self.w,
72 | spat_diag=True,
73 | moran=True,
74 | white_test=True,
75 | name_y="home value",
76 | name_x=["income", "crime"],
77 | name_ds="columbus",
78 | )
79 |
80 | np.testing.assert_allclose(ols.aic, 408.73548964604873, RTOL)
81 | np.testing.assert_allclose(ols.ar2, 0.32123239427957662, RTOL)
82 | np.testing.assert_allclose(
83 | ols.betas, np.array([[46.42818268], [0.62898397], [-0.48488854]]), RTOL
84 | )
85 | bp = np.array([2, 5.7667905131212587, 0.05594449410070558])
86 | ols_bp = np.array(
87 | [
88 | ols.breusch_pagan["df"],
89 | ols.breusch_pagan["bp"],
90 | ols.breusch_pagan["pvalue"],
91 | ]
92 | )
93 | np.testing.assert_allclose(bp, ols_bp, RTOL)
94 | np.testing.assert_allclose(
95 | ols.f_stat, (12.358198885356581, 5.0636903313953024e-05), RTOL
96 | )
97 | jb = np.array([2, 39.706155069114878, 2.387360356860208e-09])
98 | ols_jb = np.array(
99 | [ols.jarque_bera["df"], ols.jarque_bera["jb"], ols.jarque_bera["pvalue"]]
100 | )
101 | np.testing.assert_allclose(ols_jb, jb, RTOL)
102 | white = np.array([5, 2.90606708, 0.71446484])
103 | ols_white = np.array([ols.white["df"], ols.white["wh"], ols.white["pvalue"]])
104 | np.testing.assert_allclose(ols_white, white, RTOL)
105 | np.testing.assert_equal(ols.k, 3)
106 | kb = {"df": 2, "kb": 2.2700383871478675, "pvalue": 0.32141595215434604}
107 | for key in kb:
108 | np.testing.assert_allclose(ols.koenker_bassett[key], kb[key], RTOL)
109 | np.testing.assert_allclose(
110 | ols.lm_error, (4.1508117035117893, 0.041614570655392716), RTOL
111 | )
112 | np.testing.assert_allclose(
113 | ols.lm_lag, (0.98279980617162233, 0.32150855529063727), RTOL
114 | )
115 | np.testing.assert_allclose(
116 | ols.lm_sarma, (4.3222725729143736, 0.11519415308749938), RTOL
117 | )
118 | np.testing.assert_allclose(ols.logll, -201.3677448230244, RTOL)
119 | np.testing.assert_allclose(ols.mean_y, 38.436224469387746, RTOL)
120 | np.testing.assert_allclose(ols.moran_res[0], 0.20373540938, RTOL)
121 | np.testing.assert_allclose(ols.moran_res[1], 2.59180452208, RTOL)
122 | np.testing.assert_allclose(ols.moran_res[2], 0.00954740031251, RTOL)
123 | np.testing.assert_allclose(ols.mulColli, 12.537554873824675, RTOL)
124 | np.testing.assert_allclose(ols.n, 49, RTOL)
125 | np.testing.assert_string_equal(ols.name_ds, "columbus")
126 | np.testing.assert_equal(ols.name_gwk, None)
127 | np.testing.assert_string_equal(ols.name_w, "unknown")
128 | np.testing.assert_equal(ols.name_x, ["CONSTANT", "income", "crime"])
129 | np.testing.assert_string_equal(ols.name_y, "home value")
130 | np.testing.assert_allclose(ols.predy[3], np.array([33.53969014]), RTOL)
131 | np.testing.assert_allclose(ols.r2, 0.34951437785126105, RTOL)
132 | np.testing.assert_allclose(
133 | ols.rlm_error, (3.3394727667427513, 0.067636278225568919), RTOL
134 | )
135 | np.testing.assert_allclose(
136 | ols.rlm_lag, (0.17146086940258459, 0.67881673703455414), RTOL
137 | )
138 | np.testing.assert_equal(ols.robust, "unadjusted")
139 | np.testing.assert_allclose(ols.schwarz, 414.41095054038061, 7)
140 | np.testing.assert_allclose(ols.sig2, 231.4568494392652, 7)
141 | np.testing.assert_allclose(ols.sig2ML, 217.28602192257551, 7)
142 | np.testing.assert_allclose(ols.sig2n, 217.28602192257551, RTOL)
143 |
144 | np.testing.assert_allclose(ols.t_stat[2][0], -2.65440864272, RTOL)
145 | np.testing.assert_allclose(ols.t_stat[2][1], 0.0108745049098, RTOL)
146 |
147 |
148 | if __name__ == "__main__":
149 | unittest.main()
150 |
--------------------------------------------------------------------------------
/spreg/tests/test_ols_regimes.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | import numpy as np
3 | import libpysal
4 | from spreg.ols import OLS
5 | from spreg.ols_regimes import OLS_Regimes
6 | from libpysal.common import RTOL
7 |
8 | PEGP = libpysal.examples.get_path
9 |
10 |
11 | class TestOLS_regimes(unittest.TestCase):
12 | def setUp(self):
13 | db = libpysal.io.open(libpysal.examples.get_path("columbus.dbf"), "r")
14 | self.y_var = "CRIME"
15 | self.y = np.array([db.by_col(self.y_var)]).reshape(49, 1)
16 | self.x_var = ["INC", "HOVAL"]
17 | self.x = np.array([db.by_col(name) for name in self.x_var]).T
18 | self.r_var = "NSA"
19 | self.regimes = db.by_col(self.r_var)
20 | self.w = libpysal.weights.Rook.from_shapefile(
21 | libpysal.examples.get_path("columbus.shp")
22 | )
23 | self.w.transform = "r"
24 |
25 | def test_OLS(self):
26 | start_suppress = np.get_printoptions()["suppress"]
27 | np.set_printoptions(suppress=True)
28 | ols = OLS_Regimes(
29 | self.y,
30 | self.x,
31 | self.regimes,
32 | w=self.w,
33 | regime_err_sep=False,
34 | constant_regi="many",
35 | nonspat_diag=False,
36 | spat_diag=True,
37 | name_y=self.y_var,
38 | name_x=self.x_var,
39 | name_ds="columbus",
40 | name_regimes=self.r_var,
41 | name_w="columbus.gal",
42 | )
43 | # np.testing.assert_allclose(ols.aic, 408.73548964604873 ,RTOL)
44 | np.testing.assert_allclose(ols.ar2, 0.50761700679873101, RTOL)
45 | np.testing.assert_allclose(
46 | ols.betas,
47 | np.array(
48 | [
49 | [68.78670869],
50 | [-1.9864167],
51 | [-0.10887962],
52 | [67.73579559],
53 | [-1.36937552],
54 | [-0.31792362],
55 | ]
56 | ),
57 | )
58 | vm = np.array([48.81339213, -2.14959579, -0.19968157, 0.0, 0.0, 0.0])
59 | np.testing.assert_allclose(ols.vm[0], vm, RTOL)
60 | np.testing.assert_allclose(ols.lm_error, (5.92970357, 0.01488775), RTOL)
61 | np.testing.assert_allclose(ols.lm_lag, (8.78315751, 0.00304024), RTOL)
62 | np.testing.assert_allclose(ols.lm_sarma, (8.89955982, 0.01168114), RTOL)
63 | np.testing.assert_allclose(ols.mean_y, 35.1288238979591, RTOL)
64 | np.testing.assert_equal(ols.k, 6)
65 | np.testing.assert_equal(ols.kf, 0)
66 | np.testing.assert_equal(ols.kr, 3)
67 | np.testing.assert_equal(ols.n, 49)
68 | np.testing.assert_equal(ols.nr, 2)
69 | np.testing.assert_equal(ols.name_ds, "columbus")
70 | np.testing.assert_equal(ols.name_gwk, None)
71 | np.testing.assert_equal(ols.name_w, "columbus.gal")
72 | np.testing.assert_equal(
73 | ols.name_x,
74 | ["0_CONSTANT", "0_INC", "0_HOVAL", "1_CONSTANT", "1_INC", "1_HOVAL"],
75 | )
76 | np.testing.assert_equal(ols.name_y, "CRIME")
77 | np.testing.assert_allclose(ols.predy[3], np.array([51.05003696]), RTOL)
78 | np.testing.assert_allclose(ols.r2, 0.55890690192386316, RTOL)
79 | np.testing.assert_allclose(ols.rlm_error, (0.11640231, 0.73296972), RTOL)
80 | np.testing.assert_allclose(ols.rlm_lag, (2.96985625, 0.08482939), RTOL)
81 | np.testing.assert_equal(ols.robust, "unadjusted")
82 | np.testing.assert_allclose(ols.sig2, 137.84897351821013, RTOL)
83 | np.testing.assert_allclose(ols.sig2n, 120.96950737312316, RTOL)
84 | np.testing.assert_allclose(ols.t_stat[2][0], -0.43342216706091791, RTOL)
85 | np.testing.assert_allclose(ols.t_stat[2][1], 0.66687472578594531, RTOL)
86 | np.set_printoptions(suppress=start_suppress)
87 |
88 | """
89 | def test_OLS_regi(self):
90 | #Artficial:
91 | n = 256
92 | x1 = np.random.uniform(-10,10,(n,1))
93 | y = np.dot(np.hstack((np.ones((n,1)),x1)),np.array([[1],[0.5]])) + np.random.normal(0,1,(n,1))
94 | latt = int(np.sqrt(n))
95 | regi = [0]*(n/2) + [1]*(n/2)
96 | model = OLS_Regimes(y, x1, regimes=regi, regime_err_sep=True, sig2n_k=False)
97 | model1 = OLS(y[0:(n/2)].reshape((n/2),1), x1[0:(n/2)], sig2n_k=False)
98 | model2 = OLS(y[(n/2):n].reshape((n/2),1), x1[(n/2):n], sig2n_k=False)
99 | tbetas = np.vstack((model1.betas, model2.betas))
100 | np.testing.assert_allclose(model.betas,tbetas,RTOL)
101 | vm = np.hstack((model1.vm.diagonal(),model2.vm.diagonal()))
102 | np.testing.assert_allclose(model.vm.diagonal(), vm,RTOL)
103 | #Columbus:
104 | reg = OLS_Regimes(self.y, self.x, self.regimes, w=self.w, constant_regi='many', nonspat_diag=True, spat_diag=True, name_y=self.y_var, name_x=self.x_var, name_ds='columbus', name_regimes=self.r_var, name_w='columbus.gal', regime_err_sep=True)
105 | np.testing.assert_allclose(reg.multi[0].aic, 192.96044303402897 ,RTOL)
106 | tbetas = np.array([[ 68.78670869],
107 | [ -1.9864167 ],
108 | [ -0.10887962],
109 | [ 67.73579559],
110 | [ -1.36937552],
111 | [ -0.31792362]])
112 | np.testing.assert_allclose(tbetas, reg.betas,RTOL)
113 | vm = np.array([ 41.68828023, -1.83582717, -0.17053478, 0. ,
114 | 0. , 0. ])
115 | np.testing.assert_allclose(reg.vm[0], vm,RTOL)
116 | u_3 = np.array([[ 0.31781838],
117 | [-5.6905584 ],
118 | [-6.8819715 ]])
119 | np.testing.assert_allclose(reg.u[0:3], u_3,RTOL)
120 | predy_3 = np.array([[ 15.40816162],
121 | [ 24.4923124 ],
122 | [ 37.5087525 ]])
123 | np.testing.assert_allclose(reg.predy[0:3], predy_3,RTOL)
124 | chow_regi = np.array([[ 0.01002733, 0.92023592],
125 | [ 0.46017009, 0.49754449],
126 | [ 0.60732697, 0.43579603]])
127 | np.testing.assert_allclose(reg.chow.regi, chow_regi,RTOL)
128 | np.testing.assert_allclose(reg.chow.joint[0], 0.67787986791767096,RTOL)
129 | """
130 |
131 | def test_OLS_fixed(self):
132 | start_suppress = np.get_printoptions()["suppress"]
133 | np.set_printoptions(suppress=True)
134 | ols = OLS_Regimes(
135 | self.y,
136 | self.x,
137 | self.regimes,
138 | w=self.w,
139 | cols2regi=[False, True],
140 | regime_err_sep=True,
141 | constant_regi="one",
142 | nonspat_diag=False,
143 | spat_diag=True,
144 | name_y=self.y_var,
145 | name_x=self.x_var,
146 | name_ds="columbus",
147 | name_regimes=self.r_var,
148 | name_w="columbus.gal",
149 | )
150 | np.testing.assert_allclose(
151 | ols.betas,
152 | np.array([[-0.24385565], [-0.26335026], [68.89701137], [-1.67389685]]),
153 | RTOL,
154 | )
155 | vm = np.array([0.02354271, 0.01246677, 0.00424658, -0.04921356])
156 | np.testing.assert_allclose(ols.vm[0], vm, RTOL)
157 | np.testing.assert_allclose(ols.lm_error, (5.62668744, 0.01768903), RTOL)
158 | np.testing.assert_allclose(ols.lm_lag, (9.43264957, 0.00213156), RTOL)
159 | np.testing.assert_allclose(ols.mean_y, 35.12882389795919, RTOL)
160 | np.testing.assert_equal(ols.kf, 2)
161 | np.testing.assert_equal(ols.kr, 1)
162 | np.testing.assert_equal(ols.n, 49)
163 | np.testing.assert_equal(ols.nr, 2)
164 | np.testing.assert_equal(ols.name_ds, "columbus")
165 | np.testing.assert_equal(ols.name_gwk, None)
166 | np.testing.assert_equal(ols.name_w, "columbus.gal")
167 | np.testing.assert_equal(
168 | ols.name_x, ["0_HOVAL", "1_HOVAL", "_Global_CONSTANT", "_Global_INC"]
169 | )
170 | np.testing.assert_equal(ols.name_y, "CRIME")
171 | np.testing.assert_allclose(ols.predy[3], np.array([52.65974636]), RTOL)
172 | np.testing.assert_allclose(ols.r2, 0.5525561183786056, RTOL)
173 | np.testing.assert_equal(ols.robust, "unadjusted")
174 | np.testing.assert_allclose(ols.t_stat[2][0], 13.848705206463748, RTOL)
175 | np.testing.assert_allclose(ols.t_stat[2][1], 7.776650625274256e-18, RTOL)
176 | np.set_printoptions(suppress=start_suppress)
177 |
178 |
179 | if __name__ == "__main__":
180 | unittest.main()
181 |
--------------------------------------------------------------------------------
/spreg/tests/test_ols_sparse.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | import numpy as np
3 | import libpysal
4 | import spreg as EC
5 | from scipy import sparse
6 | from libpysal.common import RTOL
7 |
8 | PEGP = libpysal.examples.get_path
9 |
10 |
11 | class TestBaseOLS(unittest.TestCase):
12 | def setUp(self):
13 | db = libpysal.io.open(PEGP("columbus.dbf"), "r")
14 | y = np.array(db.by_col("HOVAL"))
15 | self.y = np.reshape(y, (49, 1))
16 | X = []
17 | X.append(db.by_col("INC"))
18 | X.append(db.by_col("CRIME"))
19 | self.X = np.array(X).T
20 | self.w = libpysal.weights.Rook.from_shapefile(PEGP("columbus.shp"))
21 |
22 | def test_ols(self):
23 | self.X = np.hstack((np.ones(self.y.shape), self.X))
24 | self.X = sparse.csr_matrix(self.X)
25 | ols = EC.ols.BaseOLS(self.y, self.X)
26 | np.testing.assert_allclose(
27 | ols.betas, np.array([[46.42818268], [0.62898397], [-0.48488854]])
28 | )
29 | vm = np.array(
30 | [
31 | [1.74022453e02, -6.52060364e00, -2.15109867e00],
32 | [-6.52060364e00, 2.87200008e-01, 6.80956787e-02],
33 | [-2.15109867e00, 6.80956787e-02, 3.33693910e-02],
34 | ]
35 | )
36 | np.testing.assert_allclose(ols.vm, vm, RTOL)
37 |
38 | def test_OLS(self):
39 | self.X = sparse.csr_matrix(self.X)
40 | ols = EC.OLS(
41 | self.y,
42 | self.X,
43 | self.w,
44 | spat_diag=True,
45 | moran=True,
46 | name_y="home value",
47 | name_x=["income", "crime"],
48 | name_ds="columbus",
49 | nonspat_diag=True,
50 | white_test=True,
51 | )
52 |
53 | np.testing.assert_allclose(ols.aic, 408.73548964604873, RTOL)
54 | np.testing.assert_allclose(ols.ar2, 0.32123239427957662, RTOL)
55 | np.testing.assert_allclose(
56 | ols.betas, np.array([[46.42818268], [0.62898397], [-0.48488854]]), RTOL
57 | )
58 | bp = np.array([2, 5.7667905131212587, 0.05594449410070558])
59 | ols_bp = np.array(
60 | [
61 | ols.breusch_pagan["df"],
62 | ols.breusch_pagan["bp"],
63 | ols.breusch_pagan["pvalue"],
64 | ]
65 | )
66 | np.testing.assert_allclose(bp, ols_bp, RTOL)
67 | np.testing.assert_allclose(
68 | ols.f_stat, (12.358198885356581, 5.0636903313953024e-05), RTOL
69 | )
70 | jb = np.array([2, 39.706155069114878, 2.387360356860208e-09])
71 | ols_jb = np.array(
72 | [ols.jarque_bera["df"], ols.jarque_bera["jb"], ols.jarque_bera["pvalue"]]
73 | )
74 | np.testing.assert_allclose(ols_jb, jb, RTOL)
75 | white = np.array([5, 2.90606708, 0.71446484])
76 | ols_white = np.array([ols.white["df"], ols.white["wh"], ols.white["pvalue"]])
77 | np.testing.assert_allclose(ols_white, white, RTOL)
78 | np.testing.assert_equal(ols.k, 3)
79 | kb = {"df": 2, "kb": 2.2700383871478675, "pvalue": 0.32141595215434604}
80 | for key in kb:
81 | np.testing.assert_allclose(ols.koenker_bassett[key], kb[key], RTOL)
82 | np.testing.assert_allclose(
83 | ols.lm_error, (4.1508117035117893, 0.041614570655392716), RTOL
84 | )
85 | np.testing.assert_allclose(
86 | ols.lm_lag, (0.98279980617162233, 0.32150855529063727), RTOL
87 | )
88 | np.testing.assert_allclose(
89 | ols.lm_sarma, (4.3222725729143736, 0.11519415308749938), RTOL
90 | )
91 | np.testing.assert_allclose(ols.logll, -201.3677448230244, RTOL)
92 | np.testing.assert_allclose(ols.mean_y, 38.436224469387746, RTOL)
93 | np.testing.assert_allclose(ols.moran_res[0], 0.20373540938, RTOL)
94 | np.testing.assert_allclose(ols.moran_res[1], 2.59180452208, RTOL)
95 | np.testing.assert_allclose(ols.moran_res[2], 0.00954740031251, RTOL)
96 | np.testing.assert_allclose(ols.mulColli, 12.537554873824675, RTOL)
97 | np.testing.assert_equal(ols.n, 49)
98 | np.testing.assert_equal(ols.name_ds, "columbus")
99 | np.testing.assert_equal(ols.name_gwk, None)
100 | np.testing.assert_equal(ols.name_w, "unknown")
101 | np.testing.assert_equal(ols.name_x, ["CONSTANT", "income", "crime"])
102 | np.testing.assert_equal(ols.name_y, "home value")
103 | np.testing.assert_allclose(ols.predy[3], np.array([33.53969014]), RTOL)
104 | np.testing.assert_allclose(ols.r2, 0.34951437785126105, RTOL)
105 | np.testing.assert_allclose(
106 | ols.rlm_error, (3.3394727667427513, 0.067636278225568919), RTOL
107 | )
108 | np.testing.assert_allclose(
109 | ols.rlm_lag, (0.17146086940258459, 0.67881673703455414), RTOL
110 | )
111 | np.testing.assert_equal(ols.robust, "unadjusted")
112 | np.testing.assert_allclose(ols.schwarz, 414.41095054038061, RTOL)
113 | np.testing.assert_allclose(ols.sig2, 231.4568494392652, RTOL)
114 | np.testing.assert_allclose(ols.sig2ML, 217.28602192257551, RTOL)
115 | np.testing.assert_allclose(ols.sig2n, 217.28602192257551, RTOL)
116 |
117 | np.testing.assert_allclose(ols.t_stat[2][0], -2.65440864272, RTOL)
118 | np.testing.assert_allclose(ols.t_stat[2][1], 0.0108745049098, RTOL)
119 |
120 |
121 | if __name__ == "__main__":
122 | unittest.main()
123 |
--------------------------------------------------------------------------------
/spreg/tests/test_panel_fe.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | import libpysal
3 | import numpy as np
4 | from spreg.panel_fe import Panel_FE_Lag, Panel_FE_Error
5 | from libpysal.common import RTOL
6 |
7 |
8 | class Test_Panel_FE_Lag(unittest.TestCase):
9 | def setUp(self):
10 | self.ds_name = "NCOVR"
11 | nat = libpysal.examples.load_example(self.ds_name)
12 | self.db = libpysal.io.open(nat.get_path("NAT.dbf"), "r")
13 | nat_shp = libpysal.examples.get_path("NAT.shp")
14 | self.w = libpysal.weights.Queen.from_shapefile(nat_shp)
15 | self.w.transform = "r"
16 | self.y_name = ["HR70", "HR80", "HR90"]
17 | self.x_names = ["RD70", "RD80", "RD90", "PS70", "PS80", "PS90"]
18 | self.y = np.array([self.db.by_col(name) for name in self.y_name]).T
19 | self.x = np.array([self.db.by_col(name) for name in self.x_names]).T
20 |
21 | def test_Panel(self):
22 | reg = Panel_FE_Lag(
23 | self.y,
24 | self.x,
25 | w=self.w,
26 | name_y=self.y_name,
27 | name_x=self.x_names,
28 | name_ds=self.ds_name,
29 | )
30 | betas = np.array([[0.80058859], [-2.60035236], [0.19030424]])
31 | np.testing.assert_allclose(reg.betas, betas, RTOL)
32 | u = np.array([-2.70317346])
33 | np.testing.assert_allclose(reg.u[0], u, RTOL)
34 | predy = np.array([-0.24876891])
35 | np.testing.assert_allclose(reg.predy[0], predy, RTOL)
36 | vm = np.array([0.02606527, 0.24359025, 0.00025597])
37 | np.testing.assert_allclose(reg.vm.diagonal(), vm, RTOL)
38 | sig2 = np.array([[14.93535335]])
39 | np.testing.assert_allclose(reg.sig2, sig2, RTOL)
40 | pr2 = 0.03191868031797557
41 | np.testing.assert_allclose(reg.pr2, pr2)
42 | std_err = np.array([0.16144743, 0.49354863, 0.01599908])
43 | np.testing.assert_allclose(reg.std_err, std_err, RTOL)
44 | logll = -67936.53303509377
45 | np.testing.assert_allclose(reg.logll, logll, RTOL)
46 | aic = 135879.06607018755
47 | np.testing.assert_allclose(reg.aic, aic, RTOL)
48 | schwarz = 135900.46482786257
49 | np.testing.assert_allclose(reg.schwarz, schwarz, RTOL)
50 |
51 |
52 | class Test_Panel_FE_Error(unittest.TestCase):
53 | def setUp(self):
54 | self.ds_name = "NCOVR"
55 | nat = libpysal.examples.load_example(self.ds_name)
56 | self.db = libpysal.io.open(nat.get_path("NAT.dbf"), "r")
57 | nat_shp = libpysal.examples.get_path("NAT.shp")
58 | self.w = libpysal.weights.Queen.from_shapefile(nat_shp)
59 | self.w.transform = "r"
60 | self.y_name = ["HR70", "HR80", "HR90"]
61 | self.x_names = ["RD70", "RD80", "RD90", "PS70", "PS80", "PS90"]
62 | self.y = np.array([self.db.by_col(name) for name in self.y_name]).T
63 | self.x = np.array([self.db.by_col(name) for name in self.x_names]).T
64 |
65 | def test_Panel(self):
66 | reg = Panel_FE_Error(
67 | self.y,
68 | self.x,
69 | w=self.w,
70 | name_y=self.y_name,
71 | name_x=self.x_names,
72 | name_ds=self.ds_name,
73 | )
74 | betas = np.array([[0.86979232], [-2.96606744], [0.19434604]])
75 | np.testing.assert_allclose(reg.betas, betas, RTOL)
76 | u = np.array([-3.02217669])
77 | np.testing.assert_allclose(reg.u[0], u, RTOL)
78 | predy = np.array([0.07023431])
79 | np.testing.assert_allclose(reg.predy[0], predy, RTOL)
80 | vm = np.array([0.02951625, 0.29645666, 0.00025681])
81 | np.testing.assert_allclose(reg.vm.diagonal(), vm, RTOL)
82 | sig2 = np.array([[14.92289858]])
83 | np.testing.assert_allclose(reg.sig2, sig2, RTOL)
84 | pr2 = 0.008359611052787335
85 | np.testing.assert_allclose(reg.pr2, pr2)
86 | std_err = np.array([0.17180294, 0.54447834, 0.01602534])
87 | np.testing.assert_allclose(reg.std_err, std_err, RTOL)
88 | logll = -67934.00512805565
89 | np.testing.assert_allclose(reg.logll, logll, RTOL)
90 | aic = 135872.0102561113
91 | np.testing.assert_allclose(reg.aic, aic, RTOL)
92 | schwarz = 135886.27609456133
93 | np.testing.assert_allclose(reg.schwarz, schwarz, RTOL)
94 |
95 |
96 | if __name__ == "__main__":
97 | unittest.main()
98 |
--------------------------------------------------------------------------------
/spreg/tests/test_panel_re.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | import libpysal
3 | import numpy as np
4 | import pandas as pd
5 | from spreg.panel_re import Panel_RE_Lag, Panel_RE_Error
6 | from libpysal.common import RTOL
7 | from libpysal.weights import w_subset
8 |
9 |
10 | class Test_Panel_RE_Lag(unittest.TestCase):
11 | def setUp(self):
12 | self.ds_name = "NCOVR"
13 | nat = libpysal.examples.load_example(self.ds_name)
14 | self.db = libpysal.io.open(nat.get_path("NAT.dbf"), "r")
15 | nat_shp = libpysal.examples.get_path("NAT.shp")
16 | w_full = libpysal.weights.Queen.from_shapefile(nat_shp)
17 | self.y_name = ["HR70", "HR80", "HR90"]
18 | self.x_names = ["RD70", "RD80", "RD90", "PS70", "PS80", "PS90"]
19 | c_names = ["STATE_NAME", "FIPSNO"]
20 | y_full = [self.db.by_col(name) for name in self.y_name]
21 | y_full = np.array(y_full).T
22 | x_full = [self.db.by_col(name) for name in self.x_names]
23 | x_full = np.array(x_full).T
24 | c_full = [self.db.by_col(name) for name in c_names]
25 | c_full = pd.DataFrame(c_full, index=c_names).T
26 | filter_states = ["Kansas", "Missouri", "Oklahoma", "Arkansas"]
27 | filter_counties = c_full[c_full["STATE_NAME"].isin(filter_states)]
28 | filter_counties = filter_counties["FIPSNO"].values
29 | counties = np.array(self.db.by_col("FIPSNO"))
30 | subid = np.where(np.isin(counties, filter_counties))[0]
31 | self.w = w_subset(w_full, subid)
32 | self.w.transform = "r"
33 | self.y = y_full[
34 | subid,
35 | ]
36 | self.x = x_full[
37 | subid,
38 | ]
39 |
40 | def test_Panel(self):
41 | reg = Panel_RE_Lag(
42 | self.y,
43 | self.x,
44 | w=self.w,
45 | name_y=self.y_name,
46 | name_x=self.x_names,
47 | name_ds=self.ds_name,
48 | )
49 | betas = np.array(
50 | [[4.44421994], [2.52821717], [2.24768846], [0.25846846], [0.68426639]]
51 | )
52 | np.testing.assert_allclose(reg.betas, betas, RTOL)
53 | u = np.array([1.17169293])
54 | np.testing.assert_allclose(reg.u[0], u, RTOL)
55 | predy = np.array([2.43910394])
56 | np.testing.assert_allclose(reg.predy[0], predy, RTOL)
57 | vm = np.array([0.08734092, 0.05232857, 0.05814063, 0.00164801, 0.00086908])
58 | np.testing.assert_allclose(reg.vm.diagonal(), vm, RTOL)
59 | sig2 = np.array([[15.71234238]])
60 | np.testing.assert_allclose(reg.sig2, sig2, RTOL)
61 | pr2 = 0.2634518198611293
62 | np.testing.assert_allclose(reg.pr2, pr2)
63 | std_err = np.array([0.29553498, 0.22875438, 0.24112368, 0.04059565, 0.02948021])
64 | np.testing.assert_allclose(reg.std_err, std_err, RTOL)
65 | logll = -3127.652757262218
66 | np.testing.assert_allclose(reg.logll, logll, RTOL)
67 | aic = 6263.305514524436
68 | np.testing.assert_allclose(reg.aic, aic, RTOL)
69 | schwarz = 6283.3755390962015
70 | np.testing.assert_allclose(reg.schwarz, schwarz, RTOL)
71 |
72 |
73 | class Test_Panel_RE_Error(unittest.TestCase):
74 | def setUp(self):
75 | self.ds_name = "NCOVR"
76 | nat = libpysal.examples.load_example(self.ds_name)
77 | self.db = libpysal.io.open(nat.get_path("NAT.dbf"), "r")
78 | nat_shp = libpysal.examples.get_path("NAT.shp")
79 | w_full = libpysal.weights.Queen.from_shapefile(nat_shp)
80 | self.y_name = ["HR70", "HR80", "HR90"]
81 | self.x_names = ["RD70", "RD80", "RD90", "PS70", "PS80", "PS90"]
82 | c_names = ["STATE_NAME", "FIPSNO"]
83 | y_full = [self.db.by_col(name) for name in self.y_name]
84 | y_full = np.array(y_full).T
85 | x_full = [self.db.by_col(name) for name in self.x_names]
86 | x_full = np.array(x_full).T
87 | c_full = [self.db.by_col(name) for name in c_names]
88 | c_full = pd.DataFrame(c_full, index=c_names).T
89 | filter_states = ["Kansas", "Missouri", "Oklahoma", "Arkansas"]
90 | filter_counties = c_full[c_full["STATE_NAME"].isin(filter_states)]
91 | filter_counties = filter_counties["FIPSNO"].values
92 | counties = np.array(self.db.by_col("FIPSNO"))
93 | subid = np.where(np.isin(counties, filter_counties))[0]
94 | self.w = w_subset(w_full, subid)
95 | self.w.transform = "r"
96 | self.y = y_full[
97 | subid,
98 | ]
99 | self.x = x_full[
100 | subid,
101 | ]
102 |
103 | # def test_Panel(self):
104 | # reg = Panel_RE_Error(
105 | # self.y,
106 | # self.x,
107 | # w=self.w,
108 | # name_y=self.y_name,
109 | # name_x=self.x_names,
110 | # name_ds=self.ds_name,
111 | # )
112 | # betas = np.array(
113 | # [[5.87893756], [3.23269025], [2.62996804], [0.34042682], [4.9782446]]
114 | # )
115 | # np.testing.assert_allclose(reg.betas, betas, RTOL)
116 | # u = np.array([-0.2372652])
117 | # np.testing.assert_allclose(reg.u[0], u, RTOL)
118 | # predy = np.array([4.27277771])
119 | # np.testing.assert_allclose(reg.predy[0], predy, RTOL)
120 | # vm = np.array([0.05163595, 0.05453637, 0.06134783, 0.00025012, 0.0030366])
121 | # np.testing.assert_allclose(reg.vm.diagonal(), vm, RTOL)
122 | # sig2 = np.array([[16.10231419]])
123 | # np.testing.assert_allclose(reg.sig2, sig2, RTOL)
124 | # pr2 = 0.3256008995950422
125 | # np.testing.assert_allclose(reg.pr2, pr2, RTOL)
126 | # std_err = np.array([0.22723545, 0.23353024, 0.24768493, 0.01581518, 0.05510535])
127 | # np.testing.assert_allclose(reg.std_err, std_err, RTOL)
128 | # logll = -7183.836220934392
129 | # np.testing.assert_allclose(reg.logll, logll, RTOL)
130 | # aic = 14373.672441868785
131 | # np.testing.assert_allclose(reg.aic, aic, RTOL)
132 | # schwarz = 14388.724960297608
133 | # np.testing.assert_allclose(reg.schwarz, schwarz, RTOL)
134 |
135 |
136 | if __name__ == "__main__":
137 | unittest.main()
138 |
--------------------------------------------------------------------------------
/spreg/tests/test_probit.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | import libpysal
3 | import numpy as np
4 | from spreg import probit as PB
5 | from libpysal.common import RTOL
6 |
7 | class TestBaseProbit(unittest.TestCase):
8 | def setUp(self):
9 | db=libpysal.io.open(libpysal.examples.get_path("columbus.dbf"),"r")
10 | y = np.array(db.by_col("CRIME"))
11 | y = np.reshape(y, (49,1))
12 | self.y = (y>40).astype(float)
13 | X = []
14 | X.append(db.by_col("INC"))
15 | X.append(db.by_col("HOVAL"))
16 | self.X = np.array(X).T
17 | self.X = np.hstack((np.ones(self.y.shape),self.X))
18 | self.w = libpysal.weights.Rook.from_shapefile(libpysal.examples.get_path("columbus.shp"))
19 | self.w.transform = 'r'
20 |
21 | def test_model(self):
22 | reg = PB.BaseProbit(self.y, self.X)
23 | betas = np.array([[ 3.35381078], [-0.1996531 ], [-0.02951371]])
24 | np.testing.assert_allclose(reg.betas,betas,RTOL)
25 | predy = np.array([ 0.00174739])
26 | np.testing.assert_allclose(reg.predy[0],predy,RTOL)
27 | n = 49
28 | np.testing.assert_allclose(reg.n,n,RTOL)
29 | k = 3
30 | np.testing.assert_allclose(reg.k,k,RTOL)
31 | y = np.array([ 0.])
32 | np.testing.assert_allclose(reg.y[0],y,RTOL)
33 | x = np.array([ 1. , 19.531 , 80.467003])
34 | np.testing.assert_allclose(reg.x[0],x,RTOL)
35 | vm = np.array([[ 8.52813879e-01, -4.36272459e-02, -8.05171472e-03], [ -4.36272459e-02, 4.11381444e-03, -1.92834842e-04], [ -8.05171472e-03, -1.92834842e-04, 3.09660240e-04]])
36 | np.testing.assert_allclose(reg.vm,vm,RTOL)
37 | xmean = np.array([[ 1. ], [ 14.37493876], [ 38.43622447 ]])
38 | np.testing.assert_allclose(reg.xmean,xmean,RTOL)
39 | logl = -20.06009093055782
40 | np.testing.assert_allclose(reg.logl,logl,RTOL)
41 |
42 |
43 | class TestProbit(unittest.TestCase):
44 | def setUp(self):
45 | db=libpysal.io.open(libpysal.examples.get_path("columbus.dbf"),"r")
46 | y = np.array(db.by_col("CRIME"))
47 | y = np.reshape(y, (49,1))
48 | self.y = (y>40).astype(float)
49 | X = []
50 | X.append(db.by_col("INC"))
51 | X.append(db.by_col("HOVAL"))
52 | self.X = np.array(X).T
53 | self.w = libpysal.weights.Rook.from_shapefile(libpysal.examples.get_path("columbus.shp"))
54 | self.w.transform = 'r'
55 |
56 | def test_model(self):
57 | reg = PB.Probit(self.y, self.X, w=self.w, spat_diag=True)
58 | betas = np.array([[ 3.35381078], [-0.1996531 ], [-0.02951371]])
59 | np.testing.assert_allclose(reg.betas,betas,RTOL)
60 | predy = np.array([ 0.00174739])
61 | np.testing.assert_allclose(reg.predy[0],predy,RTOL)
62 | n = 49
63 | np.testing.assert_allclose(reg.n,n,RTOL)
64 | k = 3
65 | np.testing.assert_allclose(reg.k,k,RTOL)
66 | y = np.array([ 0.])
67 | np.testing.assert_allclose(reg.y[0],y,RTOL)
68 | x = np.array([ 1. , 19.531 , 80.467003])
69 | np.testing.assert_allclose(reg.x[0],x,RTOL)
70 | vm = np.array([[ 8.52813879e-01, -4.36272459e-02, -8.05171472e-03], [ -4.36272459e-02, 4.11381444e-03, -1.92834842e-04], [ -8.05171472e-03, -1.92834842e-04, 3.09660240e-04]])
71 | np.testing.assert_allclose(reg.vm,vm,RTOL)
72 | xmean = np.array([[ 1. ], [ 14.37493876], [ 38.43622447 ]])
73 | np.testing.assert_allclose(reg.xmean,xmean,RTOL)
74 | predpc = 85.714285714285708
75 | np.testing.assert_allclose(reg.predpc,predpc,RTOL)
76 | logl = -20.06009093055782
77 | np.testing.assert_allclose(reg.logl,logl,RTOL)
78 | scale = 0.23309310130643665
79 | np.testing.assert_allclose(reg.scale,scale,RTOL)
80 | slopes = np.array([[-0.04653776], [-0.00687944]])
81 | np.testing.assert_allclose(reg.slopes,slopes,RTOL)
82 | slopes_vm = np.array([[ 1.77101993e-04, -1.65021168e-05], [ -1.65021168e-05, 1.60575016e-05]])
83 | np.testing.assert_allclose(reg.slopes_vm,slopes_vm,RTOL)
84 | LR = 25.317683245671716
85 | np.testing.assert_allclose(reg.LR[0],LR,RTOL)
86 | Pinkse_error = 2.9632385352516728
87 | np.testing.assert_allclose(reg.Pinkse_error[0],Pinkse_error,RTOL)
88 | KP_error = 1.6509224700582124
89 | np.testing.assert_allclose(reg.KP_error[0],KP_error,RTOL)
90 | PS_error = 2.3732463777623511
91 | np.testing.assert_allclose(reg.PS_error[0],PS_error,RTOL)
92 |
93 | if __name__ == '__main__':
94 | unittest.main()
95 |
--------------------------------------------------------------------------------
/spreg/tests/test_sp_panels.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | import numpy as np
3 | import libpysal
4 | from libpysal.common import RTOL
5 | from spreg.sp_panels import *
6 | ATOL = 1e-12
7 |
8 |
9 | class Test_GM_KKP(unittest.TestCase):
10 | def setUp(self):
11 | nat = libpysal.examples.load_example('NCOVR')
12 | self.db = libpysal.io.open(nat.get_path("NAT.dbf"),'r')
13 | self.w = libpysal.weights.Queen.from_shapefile(libpysal.examples.get_path("NAT.shp"))
14 | self.w.transform = 'r'
15 | self.y_var0 = ['HR70','HR80','HR90']
16 | self.x_var0 = ['RD70','RD80','RD90','PS70','PS80','PS90']
17 | self.y = np.array([self.db.by_col(name) for name in self.y_var0]).T
18 | self.x = np.array([self.db.by_col(name) for name in self.x_var0]).T
19 |
20 |
21 | def test_wide_ident(self):
22 | reg = GM_KKP(self.y,self.x,self.w,full_weights=False,name_y=self.y_var0, name_x=self.x_var0)
23 | np.testing.assert_allclose(reg.betas,np.array([[ 6.49221562],
24 | [ 3.62445753],
25 | [ 1.31187779],
26 | [ 0.41777589],
27 | [22.81908224],
28 | [39.90993228]]),RTOL)
29 | np.testing.assert_allclose(reg.vm,np.array([[ 1.26948117e-02, -1.98160325e-06, 7.38157674e-05],
30 | [-1.98160325e-06, 7.69961725e-03, 1.13099329e-03],
31 | [ 7.38157674e-05, 1.13099329e-03, 7.26783636e-03]]),RTOL*10)
32 | np.testing.assert_equal(reg.name_x, ['CONSTANT', 'RD', 'PS', 'lambda', ' sigma2_v', 'sigma2_1'])
33 | np.testing.assert_equal(reg.name_y, 'HR')
34 |
35 | def test_wide_full(self):
36 | reg = GM_KKP(self.y,self.x,self.w,full_weights=True)
37 |
38 | np.testing.assert_allclose(reg.betas,np.array([[ 6.49193589],
39 | [ 3.55740165],
40 | [ 1.29462748],
41 | [ 0.4263399 ],
42 | [22.47241979],
43 | [45.82593532]]),RTOL)
44 | np.testing.assert_allclose(reg.vm,np.array([[ 1.45113773e-02, -2.14882672e-06, 8.54997693e-05],
45 | [-2.14882672e-06, 8.41929187e-03, 1.24553497e-03],
46 | [ 8.54997693e-05, 1.24553497e-03, 8.12448812e-03]]),RTOL)
47 |
48 | def test_long_ident(self):
49 | bigy = self.y.reshape((self.y.size,1),order="F")
50 | bigx = self.x[:,0:3].reshape((self.x.shape[0]*3,1),order='F')
51 | bigx = np.hstack((bigx,self.x[:,3:6].reshape((self.x.shape[0]*3,1),order='F')))
52 | reg = GM_KKP(bigy,bigx,self.w,full_weights=False,name_y=['HR'], name_x=['RD','PS'])
53 |
54 | np.testing.assert_allclose(reg.betas,np.array([[ 6.49221562],
55 | [ 3.62445753],
56 | [ 1.31187779],
57 | [ 0.41777589],
58 | [22.81908224],
59 | [39.90993228]]),RTOL)
60 | np.testing.assert_allclose(reg.vm,np.array([[ 1.26948117e-02, -1.98160325e-06, 7.38157674e-05],
61 | [-1.98160325e-06, 7.69961725e-03, 1.13099329e-03],
62 | [ 7.38157674e-05, 1.13099329e-03, 7.26783636e-03]]),RTOL*10)
63 | np.testing.assert_equal(reg.name_x, ['CONSTANT', 'RD', 'PS', 'lambda', ' sigma2_v', 'sigma2_1'])
64 | np.testing.assert_equal(reg.name_y, 'HR')
65 |
66 | def test_regimes(self):
67 | regimes = self.db.by_col("SOUTH")
68 | reg = GM_KKP(self.y,self.x,self.w,full_weights=False,regimes=regimes,
69 | name_y=self.y_var0, name_x=self.x_var0)
70 | np.testing.assert_allclose(reg.betas,np.array([[ 5.25856482],
71 | [ 3.19249165],
72 | [ 1.0056967 ],
73 | [ 7.94560642],
74 | [ 3.13931041],
75 | [ 1.53700634],
76 | [ 0.35979407],
77 | [22.5650005 ],
78 | [39.71516708]]),RTOL)
79 | np.testing.assert_allclose(np.sqrt(reg.vm.diagonal()),np.array([0.158986, 0.157543, 0.104128, 0.165254, 0.117737, 0.136666]),RTOL)
80 | np.testing.assert_equal(reg.name_x, ['0_CONSTANT', '0_RD', '0_PS', '1_CONSTANT', '1_RD', '1_PS', 'lambda', ' sigma2_v', 'sigma2_1'])
81 | np.testing.assert_equal(reg.name_y, 'HR')
82 | np.testing.assert_allclose(reg.chow.regi,np.array([[1.420430e+02, 9.516507e-33],
83 | [7.311490e-02, 7.868543e-01],
84 | [9.652492e+00, 1.890949e-03]]),RTOL*10)
85 | np.testing.assert_allclose(reg.chow.joint[0],158.7225,RTOL)
86 |
87 | if __name__ == '__main__':
88 | unittest.main()
89 |
90 |
--------------------------------------------------------------------------------
/spreg/tests/test_sputils.py:
--------------------------------------------------------------------------------
1 | import spreg as EC
2 | from spreg import sputils as spu
3 | from warnings import warn as Warn, filterwarnings
4 | import unittest as ut
5 | import numpy as np
6 | import scipy.sparse as spar
7 |
8 | filterwarnings("ignore", category=spar.SparseEfficiencyWarning)
9 |
10 | ALL_FUNCS = [
11 | f for f, v in list(spu.__dict__.items()) if (callable(v) and not f.startswith("_"))
12 | ]
13 | COVERAGE = [
14 | "spinv",
15 | "splogdet",
16 | "spisfinite",
17 | "spmin",
18 | "spfill_diagonal",
19 | "spmax",
20 | "spbroadcast",
21 | "sphstack",
22 | "spmultiply",
23 | "spdot",
24 | ]
25 |
26 | NOT_COVERED = set(ALL_FUNCS).difference(COVERAGE)
27 |
28 | if len(NOT_COVERED) > 0:
29 | Warn(
30 | "The following functions in {} are not covered:\n"
31 | "{}".format(spu.__file__, NOT_COVERED)
32 | )
33 |
34 |
35 | class Test_Sparse_Utils(ut.TestCase):
36 | def setUp(self):
37 | np.random.seed(8879)
38 |
39 | self.n = 20
40 | self.dense0 = np.random.randint(2, size=(self.n, self.n))
41 | self.d0td0 = self.dense0.T.dot(self.dense0)
42 | self.dense1 = np.eye(self.n)
43 | self.sparse0 = spar.csc_matrix(self.dense0)
44 | self.s0ts0 = self.sparse0.T.dot(self.sparse0)
45 | self.sparse1 = spar.csc_matrix(spar.identity(self.n))
46 |
47 | def test_inv(self):
48 | r = spu.spinv(self.d0td0)
49 | rs = spu.spinv(self.s0ts0)
50 | rs2d = rs.toarray()
51 |
52 | self.assertIsInstance(r, np.ndarray)
53 | self.assertTrue(spar.issparse(rs))
54 | self.assertIsInstance(rs2d, np.ndarray)
55 |
56 | np.testing.assert_allclose(r, rs2d)
57 |
58 | def test_spdot(self):
59 | dd = spu.spdot(self.dense0, self.dense1)
60 | ds = spu.spdot(self.dense0, self.sparse1)
61 | sd = spu.spdot(self.sparse0, self.dense1)
62 | ss = spu.spdot(self.sparse0, self.sparse1, array_out=False)
63 |
64 | # typing tests
65 | self.assertIsInstance(dd, np.ndarray)
66 | self.assertIsInstance(ds, np.ndarray)
67 | self.assertIsInstance(sd, np.ndarray)
68 | self.assertIsInstance(ss, spar.csc_matrix)
69 |
70 | # product test
71 | np.testing.assert_array_equal(dd, ds)
72 | np.testing.assert_array_equal(dd, sd)
73 | np.testing.assert_array_equal(dd, ss.toarray())
74 |
75 | def test_logdet(self):
76 | dld = spu.splogdet(self.d0td0)
77 | sld = spu.splogdet(self.s0ts0)
78 |
79 | np.testing.assert_allclose(dld, sld)
80 |
81 | def test_isfinite(self):
82 | self.assertTrue(spu.spisfinite(self.dense0))
83 | self.assertTrue(spu.spisfinite(self.sparse0))
84 |
85 | dense_inf = np.float64(self.dense0.copy())
86 | dense_inf[0, 0] = np.nan
87 | sparse_inf = spar.csc_matrix(dense_inf)
88 |
89 | self.assertTrue(not spu.spisfinite(dense_inf))
90 | self.assertTrue(not spu.spisfinite(sparse_inf))
91 |
92 | def test_min(self):
93 | self.assertEqual(spu.spmin(self.dense0), 0)
94 | self.assertEqual(spu.spmin(self.sparse0), 0)
95 |
96 | def test_max(self):
97 | self.assertEqual(spu.spmax(self.dense1), 1)
98 | self.assertEqual(spu.spmax(self.sparse1), 1)
99 |
100 | def test_fill_diagonal(self):
101 | current_dsum = self.dense0.trace()
102 | current_ssum = self.sparse0.diagonal().sum()
103 | self.assertEqual(current_dsum, 7)
104 | self.assertEqual(current_ssum, 7)
105 |
106 | tmpd = self.dense0.copy()
107 | tmps = self.sparse0.copy()
108 | d_4diag = spu.spfill_diagonal(tmpd, 4)
109 | s_4diag = spu.spfill_diagonal(tmps, 4)
110 |
111 | known = 4 * self.n
112 |
113 | self.assertEqual(known, d_4diag.trace())
114 | self.assertEqual(known, s_4diag.diagonal().sum())
115 |
116 | def test_broadcast(self):
117 | test_vec = np.ones((self.n, 1)) * 0.2
118 |
119 | tmpd = spu.spbroadcast(self.dense0, test_vec)
120 | tmps = spu.spbroadcast(self.sparse0.tocsr(), test_vec)
121 |
122 | self.assertIsInstance(tmpd, np.ndarray)
123 | self.assertIsInstance(tmps, spar.csr_matrix)
124 |
125 | np.testing.assert_allclose(tmpd, tmps.toarray())
126 |
127 | def test_hstack(self):
128 | tmpd = spu.sphstack(self.dense0, self.dense1)
129 | tmps = spu.sphstack(self.sparse0.tocsr(), self.sparse1.tocsr())
130 |
131 | self.assertIsInstance(tmpd, np.ndarray)
132 | self.assertIsInstance(tmps, spar.csr_matrix)
133 |
134 | np.testing.assert_allclose(tmpd, tmps.toarray())
135 |
136 | def test_multiply(self):
137 | dd = spu.spmultiply(self.dense0, self.dense1)
138 | ss = spu.spmultiply(self.sparse0, self.sparse1, array_out=False)
139 |
140 | # typing
141 | self.assertIsInstance(dd, np.ndarray)
142 | self.assertIsInstance(ss, spar.csc_matrix)
143 |
144 | # equality
145 | np.testing.assert_array_equal(dd, ss.toarray())
146 |
147 |
148 | if __name__ == "__main__":
149 | ut.main()
150 |
--------------------------------------------------------------------------------
/spreg/tests/test_sur_lag.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | import numpy as np
3 | from spreg.sur_utils import sur_dictxy, sur_dictZ
4 | from spreg.sur_lag import SURlagIV
5 | import libpysal
6 | from libpysal.common import RTOL
7 | from libpysal.examples import load_example
8 | import geopandas as gpd
9 |
10 | ATOL = 0.0001
11 |
12 | def dict_compare(actual, desired, rtol, atol=1e-7):
13 | for i in actual.keys():
14 | np.testing.assert_allclose(actual[i], desired[i], rtol, atol=atol)
15 |
16 | class Test_SURlagIV(unittest.TestCase):
17 | def setUp(self):
18 | nat = load_example('NCOVR')
19 | self.db = libpysal.io.open(nat.get_path('NAT.dbf'), 'r')
20 | self.w = libpysal.weights.Queen.from_shapefile(nat.get_path("NAT.shp"))
21 | self.w.transform = 'r'
22 |
23 | self.dbs = gpd.read_file(nat.get_path("NAT.shp"))
24 |
25 | def test_3SLS(self): #2 equations, same K in each
26 | y_var0 = ['HR80','HR90']
27 | x_var0 = [['PS80','UE80'],['PS90','UE90']]
28 | reg = SURlagIV(y_var0,x_var0,w=self.w,df=self.dbs,\
29 | name_ds="NAT",name_w="nat_queen")
30 |
31 | dict_compare(reg.b3SLS,{0: np.array([[ 4.79766641],[ 0.66900706],[ 0.45430715],\
32 | [-0.13665465]]), 1: np.array([[ 2.27972563],[ 0.99252289],[ 0.52280565],[ 0.06909469]])},RTOL)
33 | dict_compare(reg.tsls_inf,{0: np.array([[ 4.55824001e+00, 1.05252606e+00, 2.92558259e-01],\
34 | [ 3.54744447e-01, 1.88588453e+00, 5.93105171e-02],\
35 | [ 7.79071951e-02, 5.83138887e+00, 5.49679157e-09],\
36 | [ 6.74318852e-01, -2.02655838e-01, 8.39404043e-01]]),\
37 | 1: np.array([[ 3.90351092e-01, 5.84019280e+00, 5.21404469e-09],\
38 | [ 1.21674079e-01, 8.15722547e+00, 3.42808098e-16],\
39 | [ 4.47686969e-02, 1.16779288e+01, 1.65273681e-31],\
40 | [ 7.99640809e-02, 8.64071585e-01, 3.87548567e-01]])},RTOL)
41 | np.testing.assert_allclose(reg.corr,np.array([[ 1. , 0.525751],
42 | [ 0.525751, 1. ]]),RTOL)
43 | np.testing.assert_allclose(reg.surchow,[(0.3178787640240518, 1, 0.57288522734425285),\
44 | (1.0261877219299562, 1, 0.31105574708021311),\
45 | (0.76852435750330428, 1, 0.38067394159083323),\
46 | (0.099802260814129934, 1, 0.75206705793155604)],RTOL)
47 |
48 | def test_3SLS_3eq(self): #Three equations, no endogenous
49 | y_var1 = ['HR60','HR70','HR80']
50 | x_var1 = [['RD60','PS60'],['RD70','PS70','UE70'],['RD80','PS80']]
51 | bigy1,bigX1,bigyvars1,bigXvars1 = sur_dictxy(self.db,y_var1,x_var1)
52 | reg = SURlagIV(bigy1,bigX1,w=self.w,name_bigy=bigyvars1,name_bigX=bigXvars1,\
53 | name_ds="NAT",name_w="nat_queen")
54 |
55 | dict_compare(reg.b2SLS,{0: np.array([[ 2.42754085],[ 1.48928052],[ 0.33812558],\
56 | [ 0.45567848]]), 1: np.array([[ 4.83887747],[ 2.86272903],[ 0.96950417],\
57 | [-0.12928124],[ 0.33328525]]), 2: np.array([[ 6.69407561],[ 3.81449588],\
58 | [ 1.44603996],[ 0.03355501]])},RTOL)
59 | dict_compare(reg.b3SLS,{0: np.array([[ 2.1646724 ],[ 1.31916307],[ 0.3398716 ],
60 | [ 0.51336281]]), 1: np.array([[ 4.87587006],[ 2.68927603],
61 | [ 0.94945336],[-0.145607 ],[ 0.33901794]]), 2: np.array([[ 6.48848271],
62 | [ 3.53936913],[ 1.34731149],[ 0.06309451]])},RTOL)
63 | dict_compare(reg.tsls_inf,{0: np.array([[ 3.51568531e-01, 6.15718476e+00, 7.40494437e-10],\
64 | [ 1.86875349e-01, 7.05905340e+00, 1.67640650e-12],\
65 | [ 9.04557549e-02, 3.75732426e+00, 1.71739894e-04],\
66 | [ 7.48661202e-02, 6.85707782e+00, 7.02833502e-12]]),\
67 | 1: np.array([[ 4.72342840e-01, 1.03227352e+01, 5.56158073e-25],\
68 | [ 2.12539934e-01, 1.26530388e+01, 1.07629786e-36],\
69 | [ 1.21325632e-01, 7.82566179e+00, 5.04993280e-15],\
70 | [ 4.61662438e-02, -3.15397123e+00, 1.61064963e-03],\
71 | [ 5.41804741e-02, 6.25719766e+00, 3.91956530e-10]]),\
72 | 2: np.array([[ 3.36526688e-001, 1.92807374e+001, 7.79572152e-083],\
73 | [ 1.59012676e-001, 2.22584087e+001, 9.35079396e-110],\
74 | [ 1.08370073e-001, 1.24325052e+001, 1.74091603e-035],\
75 | [ 4.61776859e-002, 1.36634202e+000, 1.71831639e-001]])},RTOL)
76 |
77 | reg = SURlagIV(bigy1,bigX1,w=self.w,w_lags=2,name_bigy=bigyvars1,name_bigX=bigXvars1,\
78 | name_ds="NAT",name_w="nat_queen")
79 |
80 | dict_compare(reg.b3SLS,{0: np.array([[ 1.77468937],[ 1.14510457],[ 0.30768813],\
81 | [ 0.5989414 ]]), 1: np.array([[ 4.26823484],[ 2.43651351],[ 0.8683601 ],[-0.12672555],\
82 | [ 0.4208373 ]]), 2: np.array([[ 6.02334209],[ 3.38056146],[ 1.30003556],[ 0.12992573]])},RTOL)
83 | dict_compare(reg.tsls_inf,{0: np.array([[ 3.27608281e-01, 5.41710779e+00, 6.05708284e-08],\
84 | [ 1.76245578e-01, 6.49721025e+00, 8.18230736e-11],\
85 | [ 8.95068772e-02, 3.43759205e+00, 5.86911195e-04],\
86 | [ 6.94610221e-02, 8.62269771e+00, 6.53949186e-18]]),\
87 | 1: np.array([[ 4.52225005e-01, 9.43829906e+00, 3.78879655e-21],\
88 | [ 2.03807701e-01, 1.19549629e+01, 6.11608551e-33],\
89 | [ 1.19004906e-01, 7.29684281e+00, 2.94598624e-13],\
90 | [ 4.57552474e-02, -2.76963964e+00, 5.61183429e-03],\
91 | [ 5.13101239e-02, 8.20183745e+00, 2.36740266e-16]]),\
92 | 2: np.array([[ 3.27580342e-001, 1.83873735e+001, 1.65820984e-075],\
93 | [ 1.55771577e-001, 2.17020429e+001, 1.96247435e-104],\
94 | [ 1.06817752e-001, 1.21705946e+001, 4.45822889e-034],\
95 | [ 4.48871540e-002, 2.89449691e+000, 3.79766647e-003]])},RTOL)
96 |
97 | def test_3SLS_3eq_end(self): #Three equations, two endogenous, three instruments
98 | y_var2 = ['HR60','HR70','HR80']
99 | x_var2 = [['RD60','PS60'],['RD70','PS70','MA70'],['RD80','PS80']]
100 | yend_var2 = [['UE60','DV60'],['UE70','DV70'],['UE80','DV80']]
101 | q_var2 = [['FH60','FP59','GI59'],['FH70','FP69','GI69'],['FH80','FP79','GI79']]
102 | bigy2,bigX2,bigyvars2,bigXvars2 = sur_dictxy(self.db,y_var2,x_var2)
103 | bigyend2,bigyendvars2 = sur_dictZ(self.db,yend_var2)
104 | bigq2,bigqvars2 = sur_dictZ(self.db,q_var2)
105 | reg = SURlagIV(bigy2,bigX2,bigyend2,bigq2,w=self.w,name_bigy=bigyvars2,name_bigX=bigXvars2,\
106 | name_bigyend=bigyendvars2,name_bigq=bigqvars2,spat_diag=True,name_ds="NAT",name_w="nat_queen")
107 |
108 | dict_compare(reg.b2SLS,{0: np.array([[-2.36265226],[ 1.69785946],[ 0.65777251],[-0.07519173],[ 2.15755822],\
109 | [ 0.69200015]]), 1: np.array([[ 8.13716008],[ 3.28583832],[ 0.90311859],[-0.21702098],[-1.04365606],\
110 | [ 2.8597322 ],[ 0.39935589]]), 2: np.array([[-5.8117312 ],[ 3.49934818],[ 0.56523782],[ 0.09653315],\
111 | [ 2.31166815],[ 0.20602185]])},RTOL)
112 | dict_compare(reg.b3SLS,{0: np.array([[-2.33115839],[ 1.43097732],[ 0.57312948],[ 0.03474891],[ 1.78825098],\
113 | [ 0.7145636 ]]), 1: np.array([[ 8.34932294],[ 3.28396774],[ 0.95119978],[-0.19323687],[-1.1750583 ],\
114 | [ 2.75925141],[ 0.38544424]]), 2: np.array([[-5.2395274 ],[ 3.38941755],[ 0.55897901],[ 0.08212108],\
115 | [ 2.19387428],[ 0.21582944]])},RTOL)
116 | dict_compare(reg.tsls_inf,{0: np.array([[ 7.31246733e-01, -3.18792315e+00, 1.43298614e-03],\
117 | [ 2.07089585e-01, 6.90994348e+00, 4.84846854e-12],\
118 | [ 1.15296751e-01, 4.97090750e+00, 6.66402399e-07],\
119 | [ 8.75272616e-02, 3.97006755e-01, 6.91362479e-01],\
120 | [ 3.10638495e-01, 5.75669472e+00, 8.57768262e-09],\
121 | [ 5.40333500e-02, 1.32244919e+01, 6.33639937e-40]]),\
122 | 1: np.array([[ 1.71703190e+00, 4.86264870e+00, 1.15825305e-06],\
123 | [ 2.79253520e-01, 1.17598079e+01, 6.28772226e-32],\
124 | [ 1.27575632e-01, 7.45596763e+00, 8.92106480e-14],\
125 | [ 3.31742265e-02, -5.82490950e+00, 5.71435564e-09],\
126 | [ 2.19785746e-01, -5.34638083e+00, 8.97303096e-08],\
127 | [ 3.29882178e-01, 8.36435430e+00, 6.04450321e-17],\
128 | [ 5.54968909e-02, 6.94533032e+00, 3.77575814e-12]]),\
129 | 2: np.array([[ 9.77398092e-01, -5.36068920e+00, 8.29050465e-08],\
130 | [ 1.67632600e-01, 2.02193222e+01, 6.61862485e-91],\
131 | [ 1.24321379e-01, 4.49624202e+00, 6.91650078e-06],\
132 | [ 6.94834624e-02, 1.18187957e+00, 2.37253491e-01],\
133 | [ 1.68013780e-01, 1.30577045e+01, 5.74336064e-39],\
134 | [ 4.16751208e-02, 5.17885587e+00, 2.23250870e-07]])},RTOL)
135 | np.testing.assert_allclose(reg.joinrho,(215.897034, 3, 1.54744730e-46))
136 |
137 | def test_3SLS_3eq_2or(self): # Second order spatial lags, no instrument lags
138 | y_var2 = ['HR60','HR70','HR80']
139 | x_var2 = [['RD60','PS60'],['RD70','PS70','MA70'],['RD80','PS80']]
140 | yend_var2 = [['UE60','DV60'],['UE70','DV70'],['UE80','DV80']]
141 | q_var2 = [['FH60','FP59','GI59'],['FH70','FP69','GI69'],['FH80','FP79','GI79']]
142 |
143 | bigy2,bigX2,bigyvars2,bigXvars2 = sur_dictxy(self.db,y_var2,x_var2)
144 | bigyend2,bigyendvars2 = sur_dictZ(self.db,yend_var2)
145 | bigq2,bigqvars2 = sur_dictZ(self.db,q_var2)
146 | reg = SURlagIV(bigy2,bigX2,bigyend2,bigq2,w=self.w,w_lags=2,lag_q=False,\
147 | name_bigy=bigyvars2,name_bigX=bigXvars2,\
148 | name_bigyend=bigyendvars2,name_bigq=bigqvars2,\
149 | name_ds="NAT",name_w="nat_queen")
150 |
151 | dict_compare(reg.b3SLS,{0: np.array([[-2.40071969],[ 1.2933015 ],[ 0.53165876],[ 0.04883189],[ 1.6663233 ],\
152 | [ 0.76473297]]), 1: np.array([[ 7.24987963],[ 2.96110365],[ 0.86322179],[-0.17847268],[-1.1332928 ],\
153 | [ 2.69573919],[ 0.48295237]]), 2: np.array([[-7.55692635],[ 3.17561152],[ 0.37487877],[ 0.1816544 ],\
154 | [ 2.45768258],[ 0.27716717]])},RTOL)
155 | dict_compare(reg.tsls_inf,{0: np.array([[ 7.28635609e-01, -3.29481522e+00, 9.84864177e-04],\
156 | [ 2.44756930e-01, 5.28402406e+00, 1.26376643e-07],\
157 | [ 1.26021571e-01, 4.21879172e+00, 2.45615028e-05],\
158 | [ 1.03323393e-01, 4.72612122e-01, 6.36489932e-01],\
159 | [ 3.48694501e-01, 4.77874843e+00, 1.76389726e-06],\
160 | [ 6.10435763e-02, 1.25276568e+01, 5.26966810e-36]]),\
161 | 1: np.array([[ 1.76286536e+00, 4.11255436e+00, 3.91305295e-05],\
162 | [ 2.78649343e-01, 1.06266306e+01, 2.24061686e-26],\
163 | [ 1.28607242e-01, 6.71207766e+00, 1.91872523e-11],\
164 | [ 3.21721548e-02, -5.54742685e+00, 2.89904383e-08],\
165 | [ 2.09773378e-01, -5.40246249e+00, 6.57322045e-08],\
166 | [ 3.06806758e-01, 8.78644007e+00, 1.54373978e-18],\
167 | [ 5.88231798e-02, 8.21023915e+00, 2.20748374e-16]]),\
168 | 2: np.array([[ 1.10429601e+00, -6.84320712e+00, 7.74395589e-12],\
169 | [ 1.81002635e-01, 1.75445597e+01, 6.54581911e-69],\
170 | [ 1.33983129e-01, 2.79795505e+00, 5.14272697e-03],\
171 | [ 7.56814009e-02, 2.40025154e+00, 1.63838090e-02],\
172 | [ 1.83365858e-01, 1.34031635e+01, 5.79398038e-41],\
173 | [ 4.61324726e-02, 6.00807101e+00, 1.87743612e-09]])},RTOL)
174 |
175 |
176 | if __name__ == '__main__':
177 | unittest.main()
178 |
179 |
--------------------------------------------------------------------------------
/spreg/w_utils.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import scipy.sparse as SPARSE
3 |
4 |
5 | def symmetrize(w):
6 | """Generate symmetric matrix that has same eigenvalues as an asymmetric row
7 | standardized matrix w
8 |
9 | Parameters
10 | ----------
11 | w: weights object that has been row standardized
12 |
13 | Returns
14 | -------
15 | a sparse symmetric matrix with same eigenvalues as w
16 |
17 | """
18 | current = w.transform
19 | w.transform = "B"
20 | d = w.sparse.sum(axis=1) # row sum
21 | d.shape = (w.n,)
22 | d = np.sqrt(d)
23 | Di12 = SPARSE.dia_matrix((1.0 / d, [0]), shape=(w.n, w.n))
24 | D12 = SPARSE.dia_matrix((d, [0]), shape=(w.n, w.n))
25 | w.transform = "r"
26 | return D12 @ w.sparse @ Di12
27 |
--------------------------------------------------------------------------------
/tools/changelog_1.0.4.md:
--------------------------------------------------------------------------------
1 | # Changes
2 |
3 | Version 1.0.4 (2018-08-24)
4 |
5 | We closed a total of 16 issues (enhancements and bug fixes) through 7 pull requests, since our last release on 2018-05-11.
6 |
7 | ## Main Release Enhancements.
8 |
9 | This release adds additional estimators and tests for [seemingly-unrelated regression](https://en.wikipedia.org/wiki/Seemingly_unrelated_regressions) models with endogenous spatial lag & spatial error structures. These methods in `spreg.SURerrorGM`, and `spreg.SURerrorML` have also been extended to incorporate spatial regimes regression.
10 |
11 | ## Issues Closed
12 | - Libpysal refresh (#11)
13 | - update docstrings for libpysal API changes (#9)
14 | - Merging in spanel & spreg2 code necessary for new spatial panel & GeoDaSpace (#10)
15 | - move to silence_warnings from current libpysal (#7)
16 | - add init to ensure tests are shipped (#6)
17 | - weights typechecking will only accept things from `pysal`. (#3)
18 | - relax error checking in check_weights (#4)
19 | - simplify testing (#5)
20 | - Convert spreg to common subset 2,3 code (#2)
21 |
22 | ## Pull Requests
23 | - Libpysal refresh (#11)
24 | - Merging in spanel & spreg2 code necessary for new spatial panel & GeoDaSpace (#10)
25 | - move to silence_warnings from current libpysal (#7)
26 | - add init to ensure tests are shipped (#6)
27 | - relax error checking in check_weights (#4)
28 | - simplify testing (#5)
29 | - Convert spreg to common subset 2,3 code (#2)
30 |
31 | The following individuals contributed to this release:
32 |
33 | - Luc Anselin (@lanselin)
34 | - Pedro V. Amaral (@pedrovma)
35 |
36 | - Levi John Wolf (@levijohnwolf)
--------------------------------------------------------------------------------
/tools/changelog_1.1.1.md:
--------------------------------------------------------------------------------
1 | # Changes
2 |
3 | Version 1.1.1 (2020-02-24)
4 |
5 | We closed a total of 14 issues (enhancements and bug fixes) through 6 pull requests, since our last release on 2019-06-29.
6 |
7 | ## Issues Closed
8 | - use flatten instead of setting shape directly (#34)
9 | - (Bug) deal with libpysal example data sets change and fix inline doctests (#30)
10 | - add online docs badge to readme (#28)
11 | - Docstrings use references to older pysal bibliography (#1)
12 | - spreg Documentation website (#27)
13 | - pysal.github.io issue #98 (#20)
14 | - REL: bumping version (#25)
15 | - (docs, bug) change to spatial error model description for spreg.ML_Error (#24)
16 |
17 | ## Pull Requests
18 | - use flatten instead of setting shape directly (#34)
19 | - (Bug) deal with libpysal example data sets change and fix inline doctests (#30)
20 | - add online docs badge to readme (#28)
21 | - spreg Documentation website (#27)
22 | - REL: bumping version (#25)
23 | - (docs, bug) change to spatial error model description for spreg.ML_Error (#24)
24 |
25 | The following individuals contributed to this release:
26 |
27 | - Pedro Amaral
28 | - Eli Knaap
29 | - Wei Kang
30 | - James Gaboardi
--------------------------------------------------------------------------------
/tools/changelog_1.1.2.md:
--------------------------------------------------------------------------------
1 | # Changes
2 |
3 | Version 1.1.2 (2020-08-05)
4 |
5 | We closed a total of 24 issues (enhancements and bug fixes) through 8 pull requests, since our last release on 2020-02-24.
6 |
7 | ## Issues Closed
8 | - fix doctest of check_constant (#48)
9 | - Fixed Effects Panel - Spatial Lag model (#42)
10 | - Fixed Effects Panel - Spatial Lag (#41)
11 | - Removing unused spat_diag argument. (#43)
12 | - Remove `spat_diag` options altogether for `ML_Lag` & `ML_Error` or document they're ignored (#13)
13 | - spreg 1.1.0 was not released on pypi? (#26)
14 | - spreg GitHub home needs title (#29)
15 | - transfer of closed pr from pysal-meta (#18)
16 | - transfer of closed pr in pysal-meta to pysal/spreg (#17)
17 | - Rebasing panel (#40)
18 | - branch "panel" for gsoc 2020 (#39)
19 | - Updates on all spreg functions (#37)
20 | - Adding spatial panels: KKP (#31)
21 | - ml_lag example failing? (#33)
22 | - release prep (#36)
23 | - Allowing single dimensions array as y and fixing BaseGM_Lag (#35)
24 |
25 | ## Pull Requests
26 | - fix doctest of check_constant (#48)
27 | - Fixed Effects Panel - Spatial Lag (#41)
28 | - Removing unused spat_diag argument. (#43)
29 | - Rebasing panel (#40)
30 | - branch "panel" for gsoc 2020 (#39)
31 | - Updates on all spreg functions (#37)
32 | - release prep (#36)
33 | - Allowing single dimensions array as y and fixing BaseGM_Lag (#35)
34 |
35 | The following individuals contributed to this release:
36 |
37 | - Pedro Amaral
38 | - Wei Kang
39 | - Eli Knaap
40 |
--------------------------------------------------------------------------------
/tools/changelog_1.1.2.post1.md:
--------------------------------------------------------------------------------
1 | # Changes
2 |
3 | Version 1.1.2.post1 (2020-08-06)
4 |
5 | We closed a total of 24 issues (enhancements and bug fixes) through 8 pull requests, since our last release on 2020-02-24.
6 |
7 | ## Issues Closed
8 | - fix doctest of check_constant (#48)
9 | - Fixed Effects Panel - Spatial Lag model (#42)
10 | - Fixed Effects Panel - Spatial Lag (#41)
11 | - Removing unused spat_diag argument. (#43)
12 | - Remove `spat_diag` options altogether for `ML_Lag` & `ML_Error` or document they're ignored (#13)
13 | - spreg 1.1.0 was not released on pypi? (#26)
14 | - spreg GitHub home needs title (#29)
15 | - transfer of closed pr from pysal-meta (#18)
16 | - transfer of closed pr in pysal-meta to pysal/spreg (#17)
17 | - Rebasing panel (#40)
18 | - branch "panel" for gsoc 2020 (#39)
19 | - Updates on all spreg functions (#37)
20 | - Adding spatial panels: KKP (#31)
21 | - ml_lag example failing? (#33)
22 | - release prep (#36)
23 | - Allowing single dimensions array as y and fixing BaseGM_Lag (#35)
24 |
25 | ## Pull Requests
26 | - fix doctest of check_constant (#48)
27 | - Fixed Effects Panel - Spatial Lag (#41)
28 | - Removing unused spat_diag argument. (#43)
29 | - Rebasing panel (#40)
30 | - branch "panel" for gsoc 2020 (#39)
31 | - Updates on all spreg functions (#37)
32 | - release prep (#36)
33 | - Allowing single dimensions array as y and fixing BaseGM_Lag (#35)
34 |
35 | The following individuals contributed to this release:
36 |
37 | - Pedro Amaral
38 | - Wei Kang
39 | - Eli Knaap
--------------------------------------------------------------------------------
/tools/changelog_1.2.0.md:
--------------------------------------------------------------------------------
1 | # Changes
2 |
3 | Version 1.2.0 (2020-12-30)
4 |
5 | We closed a total of 9 issues (enhancements and bug fixes) through 3 pull requests, since our last release on 2020-08-07.
6 |
7 | ## Issues Closed
8 | - Spatial dependence diagnostics returns nan (#47)
9 | - Random Effects for Spatial Panels (#46)
10 | - Random Effects Panel (#50)
11 | - Add GM_Lag_example notebook (#38)
12 | - Fixed Effects Panel - Spatial Error model (#44)
13 | - Fixed Effects Panel - Spatial Error (#45)
14 |
15 | ## Pull Requests
16 | - Random Effects Panel (#50)
17 | - Fixed Effects Panel (#45)
18 | - Add GM_Lag_example notebook (#38)
19 |
20 | The following individuals contributed to this release:
21 |
22 | - Pedro Amaral
23 | - Pabloestradac
24 |
--------------------------------------------------------------------------------
/tools/changelog_1.2.0.post1.md:
--------------------------------------------------------------------------------
1 | # Changes
2 |
3 | Version 1.2.0.post1 (2020-12-30)
4 |
5 | We closed a total of 11 issues (enhancements and bug fixes) through 4 pull requests, since our last release on 2020-08-07.
6 |
7 | ## Issues Closed
8 | - update env variable syntax (#52)
9 | - Spatial dependence diagnostics returns nan (#47)
10 | - Random Effects for Spatial Panels (#46)
11 | - Random Effects Panel (#50)
12 | - Add GM_Lag_example notebook (#38)
13 | - Fixed Effects Panel - Spatial Error model (#44)
14 | - Fixed Effects Panel - Spatial Error (#45)
15 |
16 | ## Pull Requests
17 | - update env variable syntax (#52)
18 | - Random Effects Panel (#50)
19 | - Fixed Effects Panel (#45)
20 | - Add GM_Lag_example notebook (#38)
21 |
22 | The following individuals contributed to this release:
23 |
24 | - Pedro Amaral
25 | - Pabloestradac
26 |
--------------------------------------------------------------------------------
/tools/changelog_1.2.1.md:
--------------------------------------------------------------------------------
1 | # Changes
2 |
3 | Version 1.2.1 (2021-01-06)
4 |
5 | We closed a total of 19 issues (enhancements and bug fixes) through 7 pull requests, since our last release on 2020-12-30.
6 |
7 | ## Issues Closed
8 | - Docs To Do (#54)
9 | - Missing classes in API docs (#63)
10 | - notebook and API docs cleanup (#64)
11 | - Add tutorials to docs (#60)
12 | - Travis --> GHA (#56)
13 | - CI with GitHub Actions (#55)
14 | - Migrate from RTD to GH-pages for automated doc builds and hosting (#53)
15 | - update env variable syntax (#52)
16 | - Spatial dependence diagnostics returns nan (#47)
17 | - Random Effects for Spatial Panels (#46)
18 | - Random Effects Panel (#50)
19 | - Add GM_Lag_example notebook (#38)
20 |
21 | ## Pull Requests
22 | - notebook and API docs cleanup (#64)
23 | - Add tutorials to docs (#60)
24 | - CI with GitHub Actions (#55)
25 | - Migrate from RTD to GH-pages for automated doc builds and hosting (#53)
26 | - update env variable syntax (#52)
27 | - Random Effects Panel (#50)
28 | - Add GM_Lag_example notebook (#38)
29 |
30 | The following individuals contributed to this release:
31 |
32 | - James Gaboardi
33 | - Pedro Amaral
--------------------------------------------------------------------------------