├── .circleci └── config.yml ├── .drone.yml ├── .gh-pages-skeleton └── .nojekyll ├── .gitignore ├── .travis.yml ├── AUTHORS ├── CHANGES.rst ├── LICENSE ├── MANIFEST.in ├── README.rst ├── appveyor.yml ├── benchmarks ├── asv.conf.json └── benchmarks │ ├── Lambdify.py │ ├── __init__.py │ └── differentiation.py ├── conda-recipe └── meta.yaml ├── examples ├── diff.ipynb ├── differentiation.py └── tests │ └── test_examples.py ├── scripts ├── benchmark.sh ├── build_conda_recipe.sh ├── check_clean_repo_on_master.sh ├── ci.sh ├── coverage_badge.py ├── dir_to_branch.sh ├── generate_docs.sh ├── post_release.sh ├── prepare_deploy.sh ├── release.sh ├── render_examples.sh ├── render_index.sh ├── render_notebooks.sh ├── run_tests.sh └── update-gh-pages.sh ├── setup.cfg ├── setup.py └── sym ├── __init__.py ├── _release.py ├── _sympy_Lambdify.py ├── backend.py ├── tests ├── __init__.py ├── test_Dummy.py ├── test_Lambdify.py ├── test_Matrix.py ├── test_Symbol.py ├── test_banded_jacobian.py ├── test_cse.py ├── test_sparse_jacobian.py ├── test_symarray.py ├── test_sympy_Lambdify.py └── test_util.py └── util.py /.circleci/config.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | 3 | jobs: 4 | build: 5 | docker: 6 | - image: continuumio/miniconda3 7 | steps: 8 | - checkout 9 | - run: apt-get update && apt-get --quiet --assume-yes install gcc g++ 10 | - run: conda config --set always_yes yes 11 | - run: conda update python 12 | - run: conda install conda-build 13 | - run: conda config --add channels conda-forge 14 | - run: conda config --add channels bjodah 15 | - run: conda config --set show_channel_urls true 16 | - run: conda build conda-recipe 17 | -------------------------------------------------------------------------------- /.drone.yml: -------------------------------------------------------------------------------- 1 | pipeline: 2 | build: 3 | image: bjodah/bjodahimg20dot:2.1.1.2 4 | environment: 5 | - CC=gcc-10 6 | - CXX=g++-10 7 | - SymEngine_DIR=/opt/symengine-46090cf-rel 8 | commands: 9 | - python3 -m pip install https://github.com/symengine/symengine.py/archive/62a0d89b5b9ad00814c5cd2f72e697aa1c21dccd.tar.gz 10 | - SYM_STRICT_TESTING=1 ./scripts/ci.sh sym 11 | - ./scripts/prepare_deploy.sh 12 | 13 | deploy: 14 | image: drillster/drone-rsync 15 | when: 16 | event: [push] 17 | hosts: [ "hera.physchem.kth.se" ] 18 | port: 22 19 | user: sym 20 | secrets: [ rsync_key ] # secret only set from event "push" not "pull_request" 21 | source: ./deploy/public_html 22 | target: ~/ 23 | -------------------------------------------------------------------------------- /.gh-pages-skeleton/.nojekyll: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bjodah/sym/33e8dcb938ebe45a486ff322c392e3561f1a9e11/.gh-pages-skeleton/.nojekyll -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | **/*.pyc 2 | **/__pycache__ 3 | .*cache/ 4 | dist/ 5 | MANIFEST 6 | build/ 7 | **/.ipynb_checkpoints/ 8 | doc/ 9 | examples/*.wav 10 | examples/*.html 11 | examples/thumbs/ 12 | **.egg-info 13 | benchmarks/* 14 | !benchmarks/asv.conf.json 15 | !benchmarks/benchmarks 16 | !benchmarks/profile.py 17 | !benchmarks/run_profile.sh 18 | **.so 19 | _*.c 20 | cython_debug/ 21 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: generic 2 | os: osx 3 | osx_image: xcode6.4 4 | 5 | before_install: 6 | - | 7 | echo "" 8 | echo "Removing homebrew from Travis CI to avoid conflicts." 9 | curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/uninstall > ~/uninstall_homebrew 10 | chmod +x ~/uninstall_homebrew 11 | ~/uninstall_homebrew -fq 12 | rm ~/uninstall_homebrew 13 | 14 | install: 15 | - | 16 | echo "" 17 | echo "Installing a fresh version of Miniconda." 18 | MINICONDA_URL="https://repo.continuum.io/miniconda" 19 | MINICONDA_FILE="Miniconda3-latest-MacOSX-x86_64.sh" 20 | curl -L -O "${MINICONDA_URL}/${MINICONDA_FILE}" 21 | bash $MINICONDA_FILE -b 22 | 23 | # Configure conda. 24 | - | 25 | echo "" 26 | echo "Configuring conda." 27 | source /Users/travis/miniconda3/bin/activate root 28 | conda config --set always_yes yes 29 | conda update python 30 | conda install conda-build 31 | conda config --remove channels defaults 32 | conda config --add channels defaults 33 | conda config --add channels conda-forge 34 | conda config --set show_channel_urls true 35 | 36 | script: 37 | - sed -i.bak0 '/pysym/d' conda-recipe/meta.yaml 38 | - sed -i.bak1 '/symcxx/d' conda-recipe/meta.yaml 39 | - conda build conda-recipe 40 | 41 | notifications: 42 | email: false 43 | -------------------------------------------------------------------------------- /AUTHORS: -------------------------------------------------------------------------------- 1 | Bjoern I. Dahlgren 2 | -------------------------------------------------------------------------------- /CHANGES.rst: -------------------------------------------------------------------------------- 1 | v0.3.5 2 | ====== 3 | - Lambdify accepts kwarg 'cse' 4 | - Updates to interface to e.g. symengine.py 5 | 6 | v0.3.4 7 | ====== 8 | - DenseMatrix got 2 new methods: 9 | - sparse_jacobian_csc 10 | - sparse_jacobian_csr 11 | 12 | v0.3.3 13 | ====== 14 | - Backend now support ``cse`` & ``ccode`` for symengine 15 | 16 | v0.3.2 17 | ====== 18 | - Lambdify now supports ``sign`` 19 | 20 | v0.3.0 21 | ====== 22 | - Lambdify now supports multiple outputs 23 | 24 | v0.2.0 25 | ====== 26 | - linear_rref now handles symbolic entries in the augmented part. 27 | 28 | v0.1.8 29 | ====== 30 | - Provisional support for mpmath, numpy now coerces to float64 31 | 32 | v0.1.7 33 | ====== 34 | - Fix sdist / conda package. 35 | 36 | v0.1.6 37 | ====== 38 | - Added ``.util.check_transforms`` 39 | 40 | v0.1 41 | ==== 42 | - Support differentiation 43 | - Support for numerical evaluation 44 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2016, Björn Dahlgren 2 | All rights reserved. 3 | 4 | Redistribution and use in source and binary forms, with or without modification, 5 | are permitted provided that the following conditions are met: 6 | 7 | Redistributions of source code must retain the above copyright notice, this 8 | list of conditions and the following disclaimer. 9 | 10 | Redistributions in binary form must reproduce the above copyright notice, this 11 | list of conditions and the following disclaimer in the documentation and/or 12 | other materials provided with the distribution. 13 | 14 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 15 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 16 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 17 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR 18 | ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 19 | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 20 | LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON 21 | ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 22 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 23 | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 24 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include AUTHORS 2 | include CHANGES.rst 3 | include LICENSE 4 | include README.rst 5 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | sym 2 | === 3 | .. image:: http://hera.physchem.kth.se:9090/api/badges/bjodah/sym/status.svg 4 | :target: http://hera.physchem.kth.se:9090/bjodah/sym 5 | :alt: Build status 6 | .. image:: https://circleci.com/gh/bjodah/sym.svg?style=svg 7 | :target: https://circleci.com/gh/bjodah/sym 8 | :alt: Build status on CircleCI 9 | .. image:: https://secure.travis-ci.org/bjodah/sym.svg?branch=master 10 | :target: http://travis-ci.org/bjodah/sym 11 | :alt: Build status on Travis-CI 12 | .. image:: https://img.shields.io/pypi/v/sym.svg 13 | :target: https://pypi.python.org/pypi/sym 14 | :alt: PyPI version 15 | .. image:: https://img.shields.io/badge/python-2.7,3.5,3.6-blue.svg 16 | :target: https://www.python.org/ 17 | :alt: Python version 18 | .. image:: https://img.shields.io/pypi/l/sym.svg 19 | :target: https://github.com/bjodah/sym/blob/master/LICENSE 20 | :alt: License 21 | .. image:: http://img.shields.io/badge/benchmarked%20by-asv-green.svg?style=flat 22 | :target: http://hera.physchem.kth.se/~sym/benchmarks 23 | :alt: airspeedvelocity 24 | .. image:: http://hera.physchem.kth.se/~sym/branches/master/htmlcov/coverage.svg 25 | :target: http://hera.physchem.kth.se/~sym/branches/master/htmlcov 26 | :alt: coverage 27 | 28 | 29 | ``sym`` provides a unified wrapper to some 30 | symbolic manipulation libraries in Python. It makes it easy for library authors 31 | to test their packages against several symbolic manipulation libraries. 32 | 33 | Currently the following Python pacakges are available as "backends": 34 | 35 | - `SymPy `_ 36 | - `SymEngine `_ 37 | - `PySym `_ 38 | - `SymCXX `_ 39 | - `Diofant `_ 40 | 41 | The capabilities exposed here are those needed by 42 | 43 | - `pyodesys `_ 44 | - `pyneqsys `_ 45 | 46 | and include: 47 | 48 | - Differentiation 49 | - Numerical evaluation (including "lambdify" support) 50 | 51 | see `tests `_ for examples. 52 | Note that ``pyodesys`` and ``pyneqsys`` also act as test suits for this package. 53 | 54 | 55 | Documentation 56 | ------------- 57 | Auto-generated API documentation for the latest stable release is found here: 58 | ``_ 59 | (and the development version for the current master branch is found here: 60 | ``_). 61 | 62 | Installation 63 | ------------ 64 | Simplest way to install sym and its (optional) dependencies is to use pip: 65 | 66 | :: 67 | 68 | $ pip install --user sym pytest 69 | $ python -m pytest --pyargs sym 70 | 71 | or the `conda package manager `_: 72 | 73 | :: 74 | 75 | $ conda install -c bjodah sym pytest 76 | $ python -m pytest --pyargs sym 77 | 78 | Source distribution is available here: 79 | ``_ 80 | 81 | Example 82 | ------- 83 | Differentiation 84 | 85 | .. code:: python 86 | 87 | >>> from sym import Backend 88 | >>> be = Backend('pysym') # just an example, use SymPy rather than pysym 89 | >>> x, y = map(be.Symbol, 'x y'.split()) 90 | >>> expr = x*y**2 - be.tan(2*x) 91 | >>> print(expr.diff(x)) 92 | ((y**2) - ((1 + (tan((2*x))**2))*2)) 93 | 94 | 95 | for more examples, see `examples/ `_, and rendered jupyter notebooks here: 96 | ``_ 97 | 98 | License 99 | ------- 100 | The source code is Open Source and is released under the simplified 2-clause BSD license. See `LICENSE `_ for further details. 101 | Contributors are welcome to suggest improvements at https://github.com/bjodah/sym 102 | 103 | Author 104 | ------ 105 | Björn I. Dahlgren, contact: 106 | 107 | - gmail address: bjodah 108 | -------------------------------------------------------------------------------- /appveyor.yml: -------------------------------------------------------------------------------- 1 | environment: 2 | 3 | matrix: 4 | - TARGET_ARCH: x86 5 | CONDA_PY: 35 6 | CONDA_INSTALL_LOCN: C:\\Miniconda35 7 | 8 | - TARGET_ARCH: x64 9 | CONDA_PY: 35 10 | CONDA_INSTALL_LOCN: C:\\Miniconda35-x64 11 | 12 | platform: 13 | - x64 14 | 15 | install: 16 | # Cywing's git breaks conda-build. https://github.com/conda-forge/conda-smithy-feedstock/pull/2 17 | - cmd: rmdir C:\cygwin /s /q 18 | 19 | # Add path, activate `conda` and update conda. 20 | - cmd: call %CONDA_INSTALL_LOCN%\Scripts\activate.bat 21 | - cmd: conda update --yes --quiet conda 22 | 23 | - cmd: set PYTHONUNBUFFERED=1 24 | 25 | - cmd: conda config --set show_channel_urls true 26 | - cmd: conda install --yes --quiet conda-build 27 | - cmd: conda config --add channels conda-forge 28 | 29 | # Skip .NET project specific build phase. 30 | build: off 31 | 32 | test_script: 33 | - "sed -i '/pysym/d' conda-recipe/meta.yaml" 34 | - "sed -i '/symcxx/d' conda-recipe/meta.yaml" 35 | - conda.exe build conda-recipe --quiet 36 | -------------------------------------------------------------------------------- /benchmarks/asv.conf.json: -------------------------------------------------------------------------------- 1 | { 2 | // The version of the config file format. Do not change, unless 3 | // you know what you are doing. 4 | "version": 1, 5 | 6 | // The name of the project being benchmarked 7 | "project": "sym", 8 | 9 | // The project's homepage 10 | "project_url": "http://github.com/bjodah/sym", 11 | 12 | // The URL or local path of the source code repository for the 13 | // project being benchmarked 14 | "repo": "https://github.com/bjodah/sym.git", 15 | 16 | // List of branches to benchmark. If not provided, defaults to "master" 17 | // (for git) or "tip" (for mercurial). 18 | // "branches": ["master"], // for git 19 | // "branches": ["tip"], // for mercurial 20 | 21 | // The DVCS being used. If not set, it will be automatically 22 | // determined from "repo" by looking at the protocol in the URL 23 | // (if remote), or by looking for special directories, such as 24 | // ".git" (if local). 25 | // "dvcs": "git", 26 | 27 | // The tool to use to create environments. May be "conda", 28 | // "virtualenv" or other value depending on the plugins in use. 29 | // If missing or the empty string, the tool will be automatically 30 | // determined by looking for tools on the PATH environment 31 | // variable. 32 | "environment_type": "conda", 33 | 34 | // the base URL to show a commit for the project. 35 | "show_commit_url": "http://github.com/bjodah/sym/commit/", 36 | 37 | // The Pythons you'd like to test against. If not provided, defaults 38 | // to the current version of Python used to run `asv`. 39 | // "pythons": ["2.7", "3.3"], 40 | 41 | // The matrix of dependencies to test. Each key is the name of a 42 | // package (in PyPI) and the values are version numbers. An empty 43 | // list indicates to just test against the default (latest) 44 | // version. 45 | "matrix": { 46 | "numpy": [], 47 | "numba": [], 48 | "sympy": [], 49 | "fastcache": [], 50 | "python-symengine": [], 51 | "pysym": [], 52 | "symcxx": [], 53 | }, 54 | 55 | // The directory (relative to the current directory) that benchmarks are 56 | // stored in. If not provided, defaults to "benchmarks" 57 | // "benchmark_dir": "benchmarks", 58 | 59 | // The directory (relative to the current directory) to cache the Python 60 | // environments in. If not provided, defaults to "env" 61 | // "env_dir": "env", 62 | 63 | 64 | // The directory (relative to the current directory) that raw benchmark 65 | // results are stored in. If not provided, defaults to "results". 66 | // "results_dir": "results", 67 | 68 | // The directory (relative to the current directory) that the html tree 69 | // should be written to. If not provided, defaults to "html". 70 | // "html_dir": "html", 71 | 72 | // The number of characters to retain in the commit hashes. 73 | // "hash_length": 8, 74 | 75 | // `asv` will cache wheels of the recent builds in each 76 | // environment, making them faster to install next time. This is 77 | // number of builds to keep, per environment. 78 | // "wheel_cache_size": 0 79 | } 80 | -------------------------------------------------------------------------------- /benchmarks/benchmarks/Lambdify.py: -------------------------------------------------------------------------------- 1 | from functools import reduce 2 | from operator import add 3 | import numpy as np 4 | import sym 5 | 6 | # Real-life example (ion speciation problem in water chemistry) 7 | 8 | _ref = np.array([37.252574322668998, 22.321937961124899, 10.9011158998744, 9 | 20.190422234652999, 27.8679190043357, 33.933606208922598, 10 | 33.552055153126204, 31.440168027241697, 37.999293413509498, 11 | 41.071619997204103, -20.619381941508539, 111.68831884983794, 12 | 29.210791083803763, 18.901100113049495, 17.18281828459045]) 13 | 14 | 15 | def get_syms_exprs(backend): 16 | x = backend.symarray('x', 14) 17 | p = backend.symarray('p', 14) 18 | syms = np.concatenate((x, p)) 19 | exp = backend.exp 20 | exprs = [ 21 | x[0] + x[1] - x[4] + 36.252574322669, 22 | x[0] - x[2] + x[3] + 21.3219379611249, 23 | x[3] + x[5] - x[6] + 9.9011158998744, 24 | 2*x[3] + x[5] - x[7] + 18.190422234653, 25 | 3*x[3] + x[5] - x[8] + 24.8679190043357, 26 | 4*x[3] + x[5] - x[9] + 29.9336062089226, 27 | -x[10] + 5*x[3] + x[5] + 28.5520551531262, 28 | 2*x[0] + x[11] - 2*x[4] - 2*x[5] + 32.4401680272417, 29 | 3*x[1] - x[12] + x[5] + 34.9992934135095, 30 | 4*x[1] - x[13] + x[5] + 37.0716199972041, 31 | ( 32 | p[0] - p[1] + 2*p[10] + 2*p[11] - p[12] - 2*p[13] + 33 | p[2] + 2*p[5] + 2*p[6] + 2*p[7] + 2*p[8] + 2*p[9] - 34 | exp(x[0]) + exp(x[1]) - 2*exp(x[10]) - 2*exp(x[11]) + 35 | exp(x[12]) + 2*exp(x[13]) - exp(x[2]) - 2*exp(x[5]) - 36 | 2*exp(x[6]) - 2*exp(x[7]) - 2*exp(x[8]) - 2*exp(x[9]) 37 | ), ( 38 | -p[0] - p[1] - 15*p[10] - 2*p[11] - 3*p[12] - 4*p[13] - 39 | 4*p[2] - 3*p[3] - 2*p[4] - 3*p[6] - 6*p[7] - 9*p[8] - 40 | 12*p[9] + exp(x[0]) + exp(x[1]) + 15*exp(x[10]) + 41 | 2*exp(x[11]) + 3*exp(x[12]) + 4*exp(x[13]) + 4*exp(x[2]) + 42 | 3*exp(x[3]) + 2*exp(x[4]) + 3*exp(x[6]) + 6*exp(x[7]) + 43 | 9*exp(x[8]) + 12*exp(x[9]) 44 | ), ( 45 | -5*p[10] - p[2] - p[3] - p[6] - 2*p[7] - 3*p[8] - 4*p[9] + 46 | 5*exp(x[10]) + exp(x[2]) + exp(x[3]) + exp(x[6]) + 47 | 2*exp(x[7]) + 3*exp(x[8]) + 4*exp(x[9]) 48 | ), ( 49 | -p[1] - 2*p[11] - 3*p[12] - 4*p[13] - p[4] + exp(x[1]) + 50 | 2*exp(x[11]) + 3*exp(x[12]) + 4*exp(x[13]) + exp(x[4]) 51 | ), ( 52 | -p[10] - 2*p[11] - p[12] - p[13] - p[5] - p[6] - p[7] - 53 | p[8] - p[9] + exp(x[10]) + 2*exp(x[11]) + exp(x[12]) + 54 | exp(x[13]) + exp(x[5]) + exp(x[6]) + exp(x[7]) + 55 | exp(x[8]) + exp(x[9]) 56 | ) 57 | ] 58 | return syms, exprs 59 | 60 | 61 | class TimeLambdifyInit: 62 | 63 | params = ['sympy', 'symengine', 'pysym', 'symcxx'] 64 | 65 | def time_init(self, name): 66 | be = sym.Backend(name) 67 | self.syms, self.exprs = get_syms_exprs(be) 68 | cb = be.Lambdify(self.syms, self.exprs) 69 | 70 | 71 | backend_names = list(sym.Backend.backends.keys()) 72 | n_backends = len(backend_names) 73 | _backend_numba = list(zip(backend_names, zip(*[[False]*n_backends]*2))) + [('sympy', (True, False)), ('sympy', (True, True))] 74 | 75 | 76 | 77 | class TimeLambdifyEval: 78 | 79 | params = ([1, 100], _backend_numba) 80 | param_names = ('n', 'backend_numba') 81 | 82 | def setup(self, n, backend_numba): 83 | name, (use_numba, warm_up) = backend_numba 84 | self.inp = np.ones(28) 85 | self.backend = sym.Backend(name) 86 | self.syms, self.exprs = get_syms_exprs(self.backend) 87 | kwargs = {'use_numba': use_numba} if name == 'sympy' else {} 88 | self.lmb = self.backend.Lambdify(self.syms, self.exprs, **kwargs) 89 | self.values = {} 90 | if warm_up: 91 | self.time_evaluate(n, backend_numba) 92 | 93 | def time_evaluate(self, n, backend_numba): 94 | name, (use_numba, warm_up) = backend_numba 95 | for i in range(n): 96 | res = self.lmb(self.inp) 97 | if not np.allclose(res, _ref): 98 | raise ValueError('Incorrect result') 99 | 100 | 101 | def _mk_long_evaluator(backend, n, **kwargs): 102 | x = backend.symarray('x', n) 103 | p, q, r = 17, 42, 13 104 | terms = [i*s for i, s in enumerate(x, p)] 105 | exprs = [reduce(add, terms), r + x[0], -99] 106 | callback = backend.Lambdify(x, exprs, **kwargs) 107 | input_arr = np.arange(q, q + n*n).reshape((n, n)) 108 | ref = np.empty((n, 3)) 109 | coeffs = np.arange(p, p + n) 110 | for i in range(n): 111 | ref[i, 0] = coeffs.dot(np.arange(q + n*i, q + n*(i+1))) 112 | ref[i, 1] = q + n*i + r 113 | ref[:, 2] = -99 114 | return callback, input_arr, ref 115 | 116 | 117 | class TimeLambdifyManyArgs: 118 | 119 | params = ([100, 200, 300], _backend_numba) 120 | param_names = ('n', 'backend_numba') 121 | 122 | def setup(self, n, backend_numba): 123 | name, (use_numba, warm_up) = backend_numba 124 | self.backend = sym.Backend(name) 125 | kwargs = {'use_numba': use_numba} if name == 'sympy' else {} 126 | self.callback, self.input_arr, self.ref = _mk_long_evaluator(self.backend, n, **kwargs) 127 | if warm_up: 128 | self.time_evaluate(n, backend_numba) 129 | 130 | 131 | def time_evaluate(self, n, backend_numba): 132 | name, (use_numba, warm_up) = backend_numba 133 | out = self.callback(self.input_arr) 134 | if not np.allclose(out, self.ref): 135 | raise ValueError('Incorrect result') 136 | -------------------------------------------------------------------------------- /benchmarks/benchmarks/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bjodah/sym/33e8dcb938ebe45a486ff322c392e3561f1a9e11/benchmarks/benchmarks/__init__.py -------------------------------------------------------------------------------- /benchmarks/benchmarks/differentiation.py: -------------------------------------------------------------------------------- 1 | from __future__ import (absolute_import, division, print_function) 2 | 3 | 4 | import functools 5 | import operator 6 | 7 | import pysym 8 | 9 | 10 | class TimeDiff: 11 | 12 | def setup(self): 13 | x, y, z = self.symbols = map(pysym.Symbol, 'x y z'.split()) 14 | self.expr = functools.reduce(operator.add, [x**i/(y**i - i/z) for i in range(3)]) 15 | 16 | def time_diff_x(self): 17 | self.expr.diff(self.symbols[0]) 18 | 19 | def time_diff_y(self): 20 | self.expr.diff(self.symbols[1]) 21 | 22 | def time_diff_z(self): 23 | self.expr.diff(self.symbols[2]) 24 | -------------------------------------------------------------------------------- /conda-recipe/meta.yaml: -------------------------------------------------------------------------------- 1 | {% set name = "sym" %} 2 | {% set version = "0.3.0.dev0+git" %} 3 | 4 | package: 5 | name: {{ name }} 6 | version: {{ version }} 7 | 8 | source: 9 | git_url: ../ 10 | 11 | build: 12 | noarch: python 13 | number: 0 14 | script: python -m pip install --no-deps --ignore-installed . 15 | 16 | requirements: 17 | build: 18 | - python 19 | - setuptools 20 | - numpy 21 | run: 22 | - python 23 | - numpy 24 | 25 | test: 26 | imports: 27 | - {{ name }} 28 | 29 | requires: 30 | - pytest 31 | - numpy 32 | - sympy 33 | - symcxx 34 | - python-symengine 35 | - pysym 36 | 37 | commands: 38 | - py.test -k "not diofant" --pyargs {{ name }} 39 | 40 | about: 41 | home: https://github.com/bjodah/{{ name }} 42 | license: BSD 2-Clause 43 | license_file: LICENSE 44 | summary: 'Unified wrapper to symbolic manipulation libraries in Python.' 45 | doc_url: https://bjodah.github.io/{{ name }}/latest 46 | 47 | extra: 48 | recipe-maintainers: 49 | - bjodah 50 | -------------------------------------------------------------------------------- /examples/diff.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "metadata": { 7 | "collapsed": true 8 | }, 9 | "outputs": [], 10 | "source": [ 11 | "from sym import Backend" 12 | ] 13 | }, 14 | { 15 | "cell_type": "code", 16 | "execution_count": null, 17 | "metadata": { 18 | "collapsed": false 19 | }, 20 | "outputs": [], 21 | "source": [ 22 | "be = Backend('sympy')\n", 23 | "x, y = map(be.Symbol, 'x y'.split())\n", 24 | "print((3*y+4).diff(y))" 25 | ] 26 | }, 27 | { 28 | "cell_type": "code", 29 | "execution_count": null, 30 | "metadata": { 31 | "collapsed": false 32 | }, 33 | "outputs": [], 34 | "source": [ 35 | "lmb = be.Lambdify([x, y], [x**2 + y**2, x+y, x/y])\n", 36 | "lmb([2, 3])" 37 | ] 38 | } 39 | ], 40 | "metadata": { 41 | "kernelspec": { 42 | "display_name": "Python 2", 43 | "language": "python", 44 | "name": "python2" 45 | }, 46 | "language_info": { 47 | "codemirror_mode": { 48 | "name": "ipython", 49 | "version": 2 50 | }, 51 | "file_extension": ".py", 52 | "mimetype": "text/x-python", 53 | "name": "python", 54 | "nbconvert_exporter": "python", 55 | "pygments_lexer": "ipython2", 56 | "version": "2.7.6" 57 | } 58 | }, 59 | "nbformat": 4, 60 | "nbformat_minor": 0 61 | } 62 | -------------------------------------------------------------------------------- /examples/differentiation.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | from sym import Backend 5 | 6 | 7 | def main(): 8 | for key in 'sympy pysym symengine'.split(): 9 | print(key) 10 | print(' Differentiation:') 11 | be = Backend(key) 12 | x, y = map(be.Symbol, 'x y'.split()) 13 | expr = (x - be.acos(y))*be.exp(x + y) 14 | print(expr) 15 | Dexpr = expr.diff(y) 16 | print(Dexpr) 17 | print("") 18 | 19 | if __name__ == '__main__': 20 | main() 21 | -------------------------------------------------------------------------------- /examples/tests/test_examples.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | import glob 4 | import os 5 | import subprocess 6 | import sys 7 | 8 | import pytest 9 | 10 | 11 | tests = glob.glob(os.path.join(os.path.dirname(__file__), '../*.py')) 12 | 13 | 14 | @pytest.mark.parametrize('pypath', tests) 15 | def test_examples(pypath): 16 | py_exe = 'python3' if sys.version_info.major == 3 else 'python' 17 | p = subprocess.Popen([py_exe, pypath]) 18 | assert p.wait() == 0 # SUCCESS==0 19 | 20 | py_exe = 'python3' if sys.version_info.major == 3 else 'python' 21 | p = subprocess.Popen([py_exe, pypath]) 22 | assert p.wait() == 0 # SUCCESS==0 23 | -------------------------------------------------------------------------------- /scripts/benchmark.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | asv run --show-stderr >asv-run.log 3 | asv publish 4 | -------------------------------------------------------------------------------- /scripts/build_conda_recipe.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -ex 2 | # Usage: 3 | # 4 | # $ ./scripts/build_conda_recipe.sh v1.2.3 5 | # 6 | if [[ $1 != v* ]]; then 7 | echo "Argument does not start with 'v'" 8 | exit 1 9 | fi 10 | ./scripts/check_clean_repo_on_master.sh 11 | echo ${1#v}>__conda_version__.txt 12 | trap "rm __conda_version__.txt" EXIT SIGINT SIGTERM 13 | for CPY in {27,34}; do 14 | CONDA_PY=$CPY conda build --no-test conda-recipe 15 | done 16 | -------------------------------------------------------------------------------- /scripts/check_clean_repo_on_master.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | if [[ $(git rev-parse --abbrev-ref HEAD) != master ]]; then 3 | echo "We are not on the master branch. Aborting..." 4 | exit 1 5 | fi 6 | if [[ ! -z $(git status -s) ]]; then 7 | echo "'git status' show there are some untracked/uncommited changes. Aborting..." 8 | exit 1 9 | fi 10 | if grep -e "^v" CHANGES.rst >/dev/null; then 11 | if ! grep -e "^$1" CHANGES.rst >/dev/null; then 12 | >&2 echo "CHANGES.rst does not contain an entry for: $1" 13 | exit 1 14 | fi 15 | else 16 | >&2 echo "CHANGES.rst does not start with v*" 17 | exit 1 18 | fi 19 | -------------------------------------------------------------------------------- /scripts/ci.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -xe 2 | if [[ "$DRONE_BRANCH" =~ ^v[0-9]+.[0-9]?* ]]; then 3 | eval export ${1^^}_RELEASE_VERSION=\$CI_BRANCH 4 | fi 5 | 6 | python3 -m pip install symengine 7 | python3 -m pip install --user .[all] 8 | ./scripts/run_tests.sh 9 | ./scripts/render_notebooks.sh examples/ 10 | ./scripts/generate_docs.sh 11 | 12 | ! grep "DO-NOT-MERGE!" -R . --exclude ci.sh 13 | -------------------------------------------------------------------------------- /scripts/coverage_badge.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | """ 5 | This script generates a "coverage" badge as a svg file from 6 | the html report from coverage.py 7 | 8 | Usage: 9 | 10 | $ ./coverage_badge.py htmlcov/ coverage.svg 11 | 12 | """ 13 | 14 | from __future__ import (absolute_import, division, print_function) 15 | import os 16 | 17 | # this template was generated from shields.io on 2015-10-11 18 | template = """ 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 34 | coverage 35 | coverage 36 | {1:s}% 37 | {1:s}% 38 | 39 | 40 | """ 41 | 42 | 43 | def get_coverage(htmldir): 44 | for line in open(os.path.join(htmldir, 'index.html'), 'rt'): 45 | if 'pc_cov' in line: 46 | return int(line.split('pc_cov')[1].split( 47 | '>')[1].split('<')[0].rstrip('%')) 48 | raise ValueError("Could not find pc_cov in index.html") 49 | 50 | 51 | def write_cov_badge_svg(path, percent): 52 | colors = '#e05d44 #fe7d37 #dfb317 #a4a61d #97CA00 #4c1'.split() 53 | limits_le = 50, 60, 70, 80, 90, 100 54 | c = next(clr for lim, clr in zip(limits_le, colors) if percent <= lim) 55 | with open(path, 'wt') as f: 56 | f.write(template.format(c, str(percent))) 57 | 58 | if __name__ == '__main__': 59 | import sys 60 | assert len(sys.argv) == 3 61 | cov_percent = get_coverage(sys.argv[1]) 62 | write_cov_badge_svg(sys.argv[2], cov_percent) 63 | -------------------------------------------------------------------------------- /scripts/dir_to_branch.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -eux 2 | # 3 | # Careful - this script rebases and pushes forcefully! 4 | # 5 | # Remember to set user and email in git: 6 | # 7 | # $ git config --global user.name "First Lastname" 8 | # $ git config --global user.email "first.lastname@email.domain" 9 | # 10 | 11 | UPLOAD_DIR=$1 12 | GITHUB_USER=$2 13 | GITHUB_REPO=$3 14 | OVERWRITE_UPLOAD_BRANCH=$4 15 | OUTPUTDIR=$5 16 | WORKDIR=$(pwd) 17 | git clone --quiet git://github.com/${GITHUB_USER}/${GITHUB_REPO} $OUTPUTDIR > /dev/null 18 | cd $OUTPUTDIR 19 | git checkout --orphan $OVERWRITE_UPLOAD_BRANCH 20 | git rm -rf . > /dev/null 21 | cd $WORKDIR 22 | cp -r ${UPLOAD_DIR}/. $OUTPUTDIR/ 23 | cd $OUTPUTDIR 24 | git add -f . > /dev/null 25 | git commit -m "Lastest docs from successful drone build (hash: ${DRONE_COMMIT})" 26 | #git push -f origin $OVERWRITE_UPLOAD_BRANCH 27 | -------------------------------------------------------------------------------- /scripts/generate_docs.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -xe 2 | # 3 | # Usage: 4 | # 5 | # $ ./scripts/generate_docs.sh 6 | # 7 | # Usage if doc/ is actually published in master branch on github: 8 | # 9 | # $ ./scripts/generate_docs.sh my_github_username my_github_repo master 10 | # 11 | NARGS=$# 12 | PKG=$(find . -maxdepth 2 -name __init__.py -print0 | xargs -0 -n1 dirname | xargs basename) 13 | AUTHOR=$(head -n 1 AUTHORS) 14 | sphinx-apidoc --full --force -A "$AUTHOR" --module-first --doc-version=$(python3 setup.py --version) -F -o doc $PKG/ $PKG/tests/ 15 | #sed -i 's/Contents/.. include:: ..\/README.rst\n\nContents/g' doc/index.rst 16 | echo -e "\n.. include:: ../README.rst" >>doc/index.rst 17 | sed -i "s/\('sphinx.ext.viewcode'\)/\1,\n 'sphinx.ext.autosummary',\n 'numpydoc'/g" doc/conf.py 18 | sed -i "s/alabaster/sphinx_rtd_theme/g" doc/conf.py 19 | if [[ $NARGS -eq 3 ]]; then 20 | cat <>doc/conf.py 21 | context = { 22 | 'conf_py_path': '/doc/', 23 | 'github_user': '$1', 24 | 'github_repo': '$2', 25 | 'github_version': '$3', 26 | 'display_github': True, 27 | 'source_suffix': '.rst', 28 | } 29 | 30 | if 'html_context' in globals(): 31 | html_context.update(context) 32 | else: 33 | html_context = context 34 | EOF 35 | fi 36 | echo "numpydoc_class_members_toctree = False" >>doc/conf.py 37 | ABS_REPO_PATH=$(unset CDPATH && cd "$(dirname "$0")/.." && echo $PWD) 38 | ( cd doc; PYTHONPATH=$ABS_REPO_PATH make html ) 39 | -------------------------------------------------------------------------------- /scripts/post_release.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -xeu 2 | # Usage: 3 | # 4 | # $ ./scripts/post_release.sh v1.2.3 githubuser myserver 5 | # 6 | VERSION=${1#v} 7 | GITHUBUSER=$2 8 | SERVER=$3 9 | PKG=$(find . -maxdepth 2 -name __init__.py -print0 | xargs -0 -n1 dirname | xargs basename) 10 | PKG_UPPER=$(echo $PKG | tr '[:lower:]' '[:upper:]') 11 | SDIST_FILE=dist/${PKG}-$VERSION.tar.gz 12 | if [[ ! -f "$SDIST_FILE" ]]; then 13 | >&2 echo "Nonexistent file $SDIST_FILE" 14 | exit 1 15 | fi 16 | SHA256=$(openssl sha256 "$SDIST_FILE" | cut -f2 -d' ') 17 | if [[ -d "dist/conda-recipe-$VERSION" ]]; then 18 | rm -r "dist/conda-recipe-$VERSION" 19 | fi 20 | cp -r conda-recipe/ dist/conda-recipe-$VERSION 21 | sed -i -E \ 22 | -e "s/\{\% set version(.+)/\{\% set version = \"$VERSION\" \%\}\n\{\% set sha256 = \"$SHA256\" \%\}/" \ 23 | -e "s/git_url:(.+)/fn: \{\{ name \}\}-\{\{ version \}\}.tar.gz\n url: https:\/\/pypi.io\/packages\/source\/\{\{ name\[0\] \}\}\/\{\{ name \}\}\/\{\{ name \}\}-\{\{ version \}\}.tar.gz\n sha256: \{\{ sha256 \}\}/" \ 24 | dist/conda-recipe-$VERSION/meta.yaml 25 | 26 | ssh $PKG@$SERVER 'mkdir -p ~/public_html/conda-packages; mkdir -p ~/public_html/conda-recipes' 27 | 28 | # https://github.com/bjodah/anfilte 29 | anfilte-build . dist/conda-recipe-$VERSION dist/ 30 | scp dist/noarch/${PKG}-${VERSION}*.bz2 $PKG@$SERVER:~/public_html/conda-packages/ 31 | scp -r dist/conda-recipe-$VERSION/ $PKG@$SERVER:~/public_html/conda-recipes/ 32 | scp "$SDIST_FILE" "$PKG@$SERVER:~/public_html/releases/" 33 | ./scripts/update-gh-pages.sh v$VERSION 34 | -------------------------------------------------------------------------------- /scripts/prepare_deploy.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | touch doc/_build/html/.nojekyll 3 | cp LICENSE doc/_build/html/.nojekyll 4 | git config --global user.name "drone" 5 | git config --global user.email "drone@nohost.com" 6 | mkdir -p deploy/public_html/branches/"${CI_BRANCH}" deploy/script_queue 7 | cp -r dist/* htmlcov/ examples/ doc/_build/html/ deploy/public_html/branches/"${CI_BRANCH}"/ 8 | if bash -c '[[ "$CI_BRANCH" == "master" ]]'; then 9 | sed -e "s/\$1/public_html\/branches\/${CI_BRANCH}\/html/" -e "s/\$2/bjodah/" -e "s/\$3/${CI_REPO}/" -e 's/$4/gh-pages/' deploy/script_queue/gh-pages.sh 10 | chmod +x deploy/script_queue/gh-pages.sh 11 | fi 12 | -------------------------------------------------------------------------------- /scripts/release.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -xeu 2 | # Usage: 3 | # 4 | # $ ./scripts/release.sh v1.2.3 GITHUB_USER GITHUB_REPO 5 | # 6 | 7 | if [[ $1 != v* ]]; then 8 | echo "Argument does not start with 'v'" 9 | exit 1 10 | fi 11 | VERSION=${1#v} 12 | find . -type f -iname "*.pyc" -exec rm {} + 13 | find . -type f -iname "*.o" -exec rm {} + 14 | find . -type f -iname "*.so" -exec rm {} + 15 | find . -type d -name "__pycache__" -exec rmdir {} + 16 | ./scripts/check_clean_repo_on_master.sh 17 | cd $(dirname $0)/.. 18 | # PKG will be name of the directory one level up containing "__init__.py" 19 | PKG=$(find . -maxdepth 2 -name __init__.py -print0 | xargs -0 -n1 dirname | xargs basename) 20 | ! grep --include "*.py" "will_be_missing_in='$VERSION'" -R $PKG/ # see deprecation() 21 | PKG_UPPER=$(echo $PKG | tr '[:lower:]' '[:upper:]') 22 | ./scripts/run_tests.sh 23 | env ${PKG_UPPER}_RELEASE_VERSION=v$VERSION python3 setup.py sdist 24 | if [[ -e ./scripts/generate_docs.sh ]]; then 25 | env ${PKG_UPPER}_RELEASE_VERSION=v$VERSION ./scripts/generate_docs.sh 26 | fi 27 | 28 | # All went well, add a tag and push it. 29 | git tag -a v$VERSION -m v$VERSION 30 | git push 31 | git push --tags 32 | twine upload dist/${PKG}-$VERSION.tar.gz 33 | 34 | set +x 35 | echo "" 36 | echo " You may now create a new github release at with the tag \"v$VERSION\", here is a link:" 37 | echo " https://github.com/$2/${3:-$PKG}/releases/new " 38 | echo " name the release \"${PKG}-${VERSION}\", and don't foreget to manually attach the file:" 39 | echo " $(openssl sha256 $(pwd)/dist/${PKG}-${VERSION}.tar.gz)" 40 | echo " Then run:" 41 | echo "" 42 | echo " $ ./scripts/post_release.sh $1 $2 " 43 | echo "" 44 | -------------------------------------------------------------------------------- /scripts/render_examples.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | jupyter nbconvert --to=html --debug --ExecutePreprocessor.enabled=True --ExecutePreprocessor.timeout=300 examples/*.ipynb 3 | ./scripts/render_index.sh *.html 4 | -------------------------------------------------------------------------------- /scripts/render_index.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | # 3 | # Usage (assuming: shopt -s extglob): 4 | # 5 | # $ cd examples/ && ../scripts/render_index.sh !(index).html 6 | # 7 | mkdir -p thumbs 8 | tmpdir=$(mktemp -d) 9 | trap "rm -r $tmpdir" INT TERM EXIT 10 | cat <index.html 11 | 12 | 13 | 14 | Notebook gallery 15 | 16 | 17 | EOF 18 | for f in $@; do 19 | img=$(basename $f .html).png 20 | wkhtmltopdf $f $tmpdir/$img # --crop-w 1200px --crop-h 900px 21 | convert $tmpdir/$img -resize 400x300 thumbs/$img 22 | cat <>index.html 23 |

24 | 25 |
26 | $f 27 |

28 | EOF 29 | done 30 | cat <>index.html 31 | 32 | 33 | EOF 34 | -------------------------------------------------------------------------------- /scripts/render_notebooks.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | QUIET_EXIT_CODE=0 3 | function quiet_unless_fail { 4 | # suppresses function output unless exit status is != 0 5 | OUTPUT_FILE=$(tempfile) 6 | #/bin/rm --force /tmp/suppress.out 2>/dev/null 7 | EXECMD=${1+"$@"} 8 | $EXECMD > ${OUTPUT_FILE} 2>&1 9 | QUIET_EXIT_CODE=$? 10 | if [ ${QUIET_EXIT_CODE} -ne 0 ]; then 11 | cat ${OUTPUT_FILE} 12 | echo "The following command exited with exit status ${QUIET_EXIT_CODE}: ${EXECMD}" 13 | /bin/rm ${OUTPUT_FILE} 14 | fi 15 | /bin/rm ${OUTPUT_FILE} 16 | } 17 | 18 | if [ -f index.ipynb ]; then 19 | sed -i.bak0 's/ipynb/html/' index.ipynb 20 | sed -i.bak1 's/filepath=index.html/filepath=index.ipynb/' index.ipynb # mybinder link fix 21 | fi 22 | set +e 23 | for dir in $@; do 24 | cd $dir 25 | for fname in *.ipynb; do 26 | echo "rendering ${fname}..." 27 | quiet_unless_fail jupyter nbconvert --debug --to=html --ExecutePreprocessor.enabled=True --ExecutePreprocessor.timeout=300 "${fname}" \ 28 | | grep -v -e "^\[NbConvertApp\] content: {'data':.*'image/png'" 29 | if [ ${QUIET_EXIT_CODE} -ne 0 ]; then 30 | exit ${QUIET_EXIT_CODE} 31 | fi 32 | done 33 | cd - 34 | done 35 | set -e 36 | cd examples/ 37 | ../scripts/render_index.sh *.html 38 | -------------------------------------------------------------------------------- /scripts/run_tests.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -ex 2 | # Usage 3 | # $ ./scripts/run_tests.sh 4 | # or 5 | # $ ./scripts/run_tests.sh --cov sym --cov-report html 6 | SYM_USE_CSE=1 ${PYTHON:-python3} -m pytest sym/tests/test_Lambdify.py 7 | SYM_USE_NUMBA=1 ${PYTHON:-python3} -m pytest sym/tests/test_Lambdify.py 8 | SYM_USE_CSE=1 SYM_USE_NUMBA=1 ${PYTHON:-python3} -m pytest sym/tests/test_Lambdify.py 9 | ${PYTHON:-python3} -m pytest --doctest-modules $@ # --flake8 10 | ${PYTHON:-python3} -m doctest README.rst 11 | -------------------------------------------------------------------------------- /scripts/update-gh-pages.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -x 2 | # 3 | # Usage: 4 | # 5 | # $ ./scripts/update-gh-pages.sh v0.6.0 origin 6 | # 7 | 8 | tag=${1:-master} 9 | remote=${2:-origin} 10 | 11 | ori_branch=$(git rev-parse --symbolic-full-name --abbrev-ref HEAD) 12 | tmpdir=$(mktemp -d) 13 | cleanup() { 14 | rm -r $tmpdir 15 | } 16 | trap cleanup INT TERM 17 | 18 | cp -r doc/_build/html/ $tmpdir 19 | git ls-files --others | tar cf $tmpdir/untracked.tar -T - 20 | if [[ -d .gh-pages-skeleton ]]; then 21 | cp -r .gh-pages-skeleton $tmpdir 22 | fi 23 | 24 | git fetch $remote 25 | git checkout gh-pages 26 | if [[ $? -ne 0 ]]; then 27 | git checkout --orphan gh-pages 28 | if [[ $? -ne 0 ]]; then 29 | >&2 echo "Failed to switch to 'gh-pages' branch." 30 | cleanup 31 | exit 1 32 | fi 33 | preexisting=0 34 | else 35 | preexisting=1 36 | git pull 37 | fi 38 | 39 | if [[ $preexisting == 1 ]]; then 40 | while [[ "$(git log -1 --pretty=%B)" == Volatile* ]]; do 41 | # overwrite previous docs 42 | git reset --hard HEAD~1 43 | done 44 | else 45 | git reset --hard 46 | fi 47 | 48 | git clean -xfd 49 | if [[ $preexisting == 1 ]]; then 50 | mv v*/ $tmpdir 51 | git rm -rf * > /dev/null 52 | fi 53 | cp -r $tmpdir/html/ $tag 54 | if [[ $preexisting == 1 ]]; then 55 | mv $tmpdir/v*/ . 56 | fi 57 | if [[ -d $tmpdir/.gh-pages-skeleton ]]; then 58 | cp -r $tmpdir/.gh-pages-skeleton/. . 59 | fi 60 | if [[ "$tag" == v* ]]; then 61 | if [[ -L latest ]]; then 62 | rm latest 63 | fi 64 | ln -s $tag latest 65 | commit_msg="Release docs for $tag" 66 | else 67 | if [[ $preexisting == 1 ]]; then 68 | commit_msg="Volatile ($tag) docs" 69 | else 70 | commit_msg="Initial commit" 71 | fi 72 | fi 73 | git add -f . >/dev/null 74 | git commit -m "$commit_msg" 75 | if [[ $preexisting == 1 ]]; then 76 | git push -f $remote gh-pages 77 | else 78 | git push --set-upstream $remote gh-pages 79 | fi 80 | git checkout $ori_branch 81 | tar xf $tmpdir/untracked.tar 82 | cleanup 83 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [tool:pytest] 2 | norecursedirs = .git .cache scripts build dist conda-recipe benchmarks __pycache__ doc 3 | flake8-max-line-length=119 4 | flake8-ignore = 5 | __init__.py UnusedImport 6 | doc/conf.py ALL 7 | 8 | [bdist_wheel] 9 | universal=1 10 | 11 | # https://github.com/pytest-dev/pytest/issues/1445 12 | [easy_install] 13 | zip_ok = 0 14 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | import sys 5 | import io 6 | from itertools import chain 7 | import os 8 | import shutil 9 | import warnings 10 | 11 | from setuptools import setup 12 | 13 | 14 | pkg_name = 'sym' 15 | url = 'https://github.com/bjodah/' + pkg_name 16 | license = 'BSD' 17 | 18 | SYM_RELEASE_VERSION = os.environ.get('SYM_RELEASE_VERSION', '') # v* 19 | 20 | # http://conda.pydata.org/docs/build.html#environment-variables-set-during-the-build-process 21 | if os.environ.get('CONDA_BUILD', '0') == '1': 22 | try: 23 | SYM_RELEASE_VERSION = 'v' + open( 24 | '__conda_version__.txt', 'rt').readline().rstrip() 25 | except IOError: 26 | pass 27 | 28 | 29 | def _path_under_setup(*args): 30 | return os.path.join(os.path.dirname(__file__), *args) 31 | 32 | release_py_path = _path_under_setup(pkg_name, '_release.py') 33 | 34 | if len(SYM_RELEASE_VERSION) > 0: 35 | if SYM_RELEASE_VERSION[0] == 'v': 36 | TAGGED_RELEASE = True 37 | __version__ = SYM_RELEASE_VERSION[1:] 38 | else: 39 | raise ValueError("Ill formated version") 40 | else: 41 | TAGGED_RELEASE = False 42 | # read __version__ attribute from _release.py: 43 | exec(open(release_py_path).read()) 44 | 45 | 46 | classifiers = [ 47 | "Development Status :: 3 - Alpha", 48 | 'License :: OSI Approved :: BSD License', 49 | 'Operating System :: OS Independent', 50 | 'Topic :: Scientific/Engineering', 51 | 'Topic :: Scientific/Engineering :: Mathematics', 52 | ] 53 | 54 | tests = [ 55 | 'sym.tests', 56 | ] 57 | 58 | with open(_path_under_setup(pkg_name, '__init__.py'), 'rt') as f: 59 | short_description = f.read().split('"""')[1].split('\n')[1] 60 | if not 10 < len(short_description) < 255: 61 | warnings.warn("Short description from __init__.py proably not read correctly") 62 | long_descr = io.open(_path_under_setup('README.rst'), encoding='utf-8').read() 63 | if not len(long_descr) > 100: 64 | warnings.warn("Long description from README.rst probably not read correctly.") 65 | _author, _author_email = open(_path_under_setup('AUTHORS'), 'rt').readline().split('<') 66 | 67 | extras_req = { 68 | 'symbolic': ['sympy>=1.0', 'pysym', 'symcxx>=0.1.10'], # use conda for symengine 69 | 'docs': ['Sphinx', 'sphinx_rtd_theme', 'numpydoc'], 70 | 'testing': ['pytest', 'pytest-cov', 'pytest-flakes', 'pytest-pep8'] 71 | } 72 | if sys.version_info[0] > 2: 73 | extras_req['symbolic'].append('diofant') 74 | extras_req['all'] = list(chain(extras_req.values())) 75 | 76 | 77 | setup_kwargs = dict( 78 | name=pkg_name, 79 | version=__version__, 80 | description=short_description, 81 | long_description=long_descr, 82 | classifiers=classifiers, 83 | author=_author, 84 | author_email=_author_email.split('>')[0].strip(), 85 | url=url, 86 | license=license, 87 | packages=[pkg_name] + tests, 88 | install_requires=['numpy'], 89 | extras_require=extras_req 90 | ) 91 | 92 | if __name__ == '__main__': 93 | try: 94 | if TAGGED_RELEASE: 95 | # Same commit should generate different sdist 96 | # depending on tagged version (set $SYM_RELEASE_VERSION) 97 | # e.g.: $ SYM_RELEASE_VERSION=v1.2.3 python setup.py sdist 98 | # this will ensure source distributions contain the correct version 99 | shutil.move(release_py_path, release_py_path+'__temp__') 100 | open(release_py_path, 'wt').write( 101 | "__version__ = '{}'\n".format(__version__)) 102 | setup(**setup_kwargs) 103 | finally: 104 | if TAGGED_RELEASE: 105 | shutil.move(release_py_path+'__temp__', release_py_path) 106 | -------------------------------------------------------------------------------- /sym/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Unified wrapper for symbolic manipulation libraries in Python. 4 | """ 5 | 6 | from __future__ import (absolute_import, division, print_function) 7 | 8 | from ._release import __version__ 9 | from .backend import Backend 10 | -------------------------------------------------------------------------------- /sym/_release.py: -------------------------------------------------------------------------------- 1 | __version__ = '0.4.0.dev0+git' 2 | -------------------------------------------------------------------------------- /sym/_sympy_Lambdify.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import (absolute_import, division, print_function) 3 | 4 | import math 5 | import os 6 | from functools import reduce 7 | from operator import mul 8 | 9 | import numpy as np # Lambdify requires numpy 10 | import warnings 11 | 12 | class _Lambdify(object): 13 | """ See docstring of symengine.Lambdify """ 14 | # Note that this is a reimplementation of symengine.Lambdify. 15 | # If any modifications are to be made, they need to be implemented 16 | # in symengine.Lambdify *first*, and then reimplemented here. 17 | 18 | def __init__(self, args, *exprs, **kwargs): 19 | real = kwargs.pop('real', True) 20 | order = kwargs.pop('order', 'C') 21 | module = kwargs.pop('module', 'numpy') 22 | use_numba = kwargs.pop('use_numba', None) 23 | cse = kwargs.pop('cse', os.environ.get('SYM_USE_CSE', '0') == '1') 24 | backend = kwargs.pop('backend', 'sympy') 25 | self._backend = __import__(backend) 26 | self.args = np.asanyarray(args) 27 | self.args_size = self.args.size 28 | self.exprs = tuple(np.asanyarray(expr) for expr in exprs) 29 | self.out_shapes = [expr.shape for expr in self.exprs] 30 | self.n_exprs = len(self.exprs) 31 | 32 | out_sizes, self.accum_out_sizes = [], [] 33 | self.tot_out_size = 0 34 | for idx, shape in enumerate(self.out_shapes): 35 | out_sizes.append(reduce(mul, shape or (1,))) 36 | self.tot_out_size += out_sizes[idx] 37 | for i in range(self.n_exprs + 1): 38 | self.accum_out_sizes.append(0) 39 | for j in range(i): 40 | self.accum_out_sizes[i] += out_sizes[j] 41 | 42 | args_, outs_ = [], [] 43 | self.order = order 44 | for arg in np.ravel(self.args, order=self.order): 45 | args_.append(self._backend.sympify(arg)) 46 | 47 | for curr_expr in self.exprs: 48 | if curr_expr.ndim == 0: 49 | outs_.append(self._backend.sympify(curr_expr.item())) 50 | else: 51 | for e in np.ravel(curr_expr, order=self.order): 52 | outs_.append(self._backend.sympify(e)) 53 | 54 | self.real = real 55 | self.dtype = kwargs.pop('dtype', np.float64 if self.real else np.complex128) 56 | if use_numba is None and module == 'numpy': 57 | _true = ('1', 't', 'true') 58 | use_numba = os.environ.get('SYM_USE_NUMBA', '0').lower() in _true 59 | elif use_numba and module != 'numpy': 60 | raise ValueError("Numba only available when using numpy as module.") 61 | self.use_numba = use_numba 62 | self._callback = _callback_factory(args_, outs_, module, self.dtype, 63 | self.order, self.use_numba, backend, cse=cse) 64 | if kwargs: 65 | warnings.warn("unused keywords: %s" % ", ".join(kwargs)) 66 | 67 | def __call__(self, inp, out=None): 68 | try: 69 | inp = np.asanyarray(inp, dtype=self.dtype) 70 | except TypeError: 71 | inp = np.fromiter(inp, dtype=self.dtype) 72 | 73 | if inp.size < self.args_size or inp.size % self.args_size != 0: 74 | raise ValueError("Broadcasting failed (input/arg size mismatch)") 75 | nbroadcast = inp.size // self.args_size 76 | 77 | if inp.ndim > 1: 78 | if self.args_size > 1: 79 | if self.order == 'C': 80 | if inp.shape[inp.ndim-1] != self.args_size: 81 | raise ValueError(("C order implies last dim (%d) == len(args)" 82 | " (%d)") % (inp.shape[inp.ndim-1], self.args_size)) 83 | extra_dim = inp.shape[:inp.ndim-1] 84 | elif self.order == 'F': 85 | if inp.shape[0] != self.args_size: 86 | raise ValueError("F order implies first dim (%d) == len(args) (%d)" 87 | % (inp.shape[0], self.args_size)) 88 | extra_dim = inp.shape[1:] 89 | else: 90 | extra_dim = inp.shape 91 | else: 92 | if nbroadcast > 1 and inp.ndim == 1: 93 | extra_dim = (nbroadcast,) # special case 94 | else: 95 | extra_dim = () 96 | extra_left = extra_dim if self.order == 'C' else () 97 | extra_right = () if self.order == 'C' else extra_dim 98 | new_out_shapes = [extra_left + out_shape + extra_right 99 | for out_shape in self.out_shapes] 100 | 101 | new_tot_out_size = nbroadcast * self.tot_out_size 102 | if out is None: 103 | out = np.empty(new_tot_out_size, dtype=self.dtype, order=self.order) 104 | else: 105 | if out.size < new_tot_out_size: 106 | raise ValueError("Incompatible size of output argument") 107 | if out.ndim > 1: 108 | if len(self.out_shapes) > 1: 109 | raise ValueError("output array with ndim > 1 assumes one output") 110 | out_shape, = self.out_shapes 111 | if self.order == 'C': 112 | if not out.flags['C_CONTIGUOUS']: 113 | raise ValueError("Output argument needs to be C-contiguous") 114 | if out.shape[-len(out_shape):] != tuple(out_shape): 115 | raise ValueError("shape mismatch for output array") 116 | elif self.order == 'F': 117 | if not out.flags['F_CONTIGUOUS']: 118 | raise ValueError("Output argument needs to be F-contiguous") 119 | if out.shape[:len(out_shape)] != tuple(out_shape): 120 | raise ValueError("shape mismatch for output array") 121 | else: 122 | if not out.flags['F_CONTIGUOUS']: # or C_CONTIGUOUS (ndim <= 1) 123 | raise ValueError("Output array need to be contiguous") 124 | if not out.flags['WRITEABLE']: 125 | raise ValueError("Output argument needs to be writeable") 126 | out = out.ravel(order=self.order) 127 | 128 | inp = np.ascontiguousarray(inp.ravel(order=self.order)) 129 | res_exprs = self._callback(inp if nbroadcast == 1 else inp.reshape( 130 | (nbroadcast, inp.size//nbroadcast) 131 | )) 132 | assert len(res_exprs) == self.tot_out_size 133 | for idx, res in enumerate(res_exprs): 134 | out.flat[idx::self.tot_out_size] = res 135 | 136 | if self.order == 'C': 137 | out = out.reshape((nbroadcast, self.tot_out_size), order='C') 138 | result = [ 139 | out[:, self.accum_out_sizes[idx]:self.accum_out_sizes[idx+1]].reshape( 140 | new_out_shapes[idx], order='C') for idx in range(self.n_exprs) 141 | ] 142 | elif self.order == 'F': 143 | out = out.reshape((self.tot_out_size, nbroadcast), order='F') 144 | result = [ 145 | out[self.accum_out_sizes[idx]:self.accum_out_sizes[idx+1], :].reshape( 146 | new_out_shapes[idx], order='F') for idx in range(self.n_exprs) 147 | ] 148 | if self.n_exprs == 1: 149 | return result[0] 150 | else: 151 | return result 152 | 153 | 154 | def mk_func(v): 155 | def prnt(self, e): 156 | return '%s(%s)' % (v, ', '.join(self._print(a) for a in e.args)) 157 | return prnt 158 | 159 | 160 | def _callback_factory(args, flat_exprs, module, dtype, order, use_numba=False, backend='sympy', cse=False): 161 | if module == 'numpy': 162 | TRANSLATIONS = { 163 | "acos": "arccos", 164 | "acosh": "arccosh", 165 | "arg": "angle", 166 | "asin": "arcsin", 167 | "asinh": "arcsinh", 168 | "atan": "arctan", 169 | "atan2": "arctan2", 170 | "atanh": "arctanh", 171 | "ceiling": "ceil", 172 | "E": "e", 173 | "im": "imag", 174 | "ln": "log", 175 | "Mod": "mod", 176 | "oo": "inf", 177 | "re": "real", 178 | "SparseMatrix": "array", 179 | "ImmutableSparseMatrix": "array", 180 | "Matrix": "array", 181 | "MutableDenseMatrix": "array", 182 | "ImmutableDenseMatrix": "array", 183 | "__NONEXISTANT__": "inf", 184 | "sign": "sign", 185 | } 186 | NumPyPrinter = __import__(backend + '.printing.lambdarepr', 187 | fromlist=['NumPyPrinter']).NumPyPrinter 188 | 189 | class MyPrinter(NumPyPrinter): 190 | pass 191 | 192 | for k, v in TRANSLATIONS.items(): 193 | setattr(MyPrinter, '_print_%s' % k, mk_func(v)) 194 | 195 | p = MyPrinter() 196 | 197 | def lambdarepr(_x): 198 | return p.doprint(_x) 199 | else: 200 | lambdarepr = __import__(backend + '.printing.lambdarepr', 201 | fromlist=['lambdarepr']).lambdarepr 202 | if module == 'mpmath': 203 | TRANSLATIONS = { 204 | "Abs": "fabs", 205 | "elliptic_k": "ellipk", 206 | "elliptic_f": "ellipf", 207 | "elliptic_e": "ellipe", 208 | "elliptic_pi": "ellippi", 209 | "ceiling": "ceil", 210 | "chebyshevt": "chebyt", 211 | "chebyshevu": "chebyu", 212 | "E": "e", 213 | "I": "j", 214 | "ln": "log", 215 | # "lowergamma":"lower_gamma", 216 | "oo": "inf", 217 | # "uppergamma":"upper_gamma", 218 | "LambertW": "lambertw", 219 | "MutableDenseMatrix": "matrix", 220 | "ImmutableDenseMatrix": "matrix", 221 | "conjugate": "conj", 222 | "dirichlet_eta": "altzeta", 223 | "Ei": "ei", 224 | "Shi": "shi", 225 | "Chi": "chi", 226 | "Si": "si", 227 | "Ci": "ci", 228 | "sign": "sign", 229 | } 230 | 231 | elif module == 'sympy': 232 | TRANSLATIONS = {} 233 | else: 234 | raise NotImplementedError("Lambdify does not yet support %s" % module) 235 | 236 | ordering = '..., %d' # if order == 'C' else '%d, ...' 237 | mod = __import__(backend) 238 | indices = [mod.Symbol(ordering % i) for i in range(len(args))] 239 | dummy_subs = dict(zip(args, [mod.Symbol('x[%s]' % i) for i in indices])) 240 | dummified = [expr.xreplace(dummy_subs) for expr in flat_exprs] 241 | body = [] 242 | if cse is True: 243 | cse = mod.cse 244 | if cse: 245 | cses, dummified = cse(dummified) 246 | else: 247 | cses = () 248 | for s, e in cses: 249 | body.append("{} = {}".format(s, lambdarepr(e))) 250 | body += ["return " + lambdarepr(dummified)] 251 | mod = __import__(module) 252 | namespace = mod.__dict__.copy() 253 | 254 | # NumPyPrinter incomplete: https://github.com/sympy/sympy/issues/11023 255 | for k, v in TRANSLATIONS.items(): 256 | namespace[k] = namespace[v] 257 | 258 | if module != 'mpmath': 259 | namespace['Abs'] = abs 260 | 261 | namespace['numpy'] = np 262 | namespace['math'] = math 263 | # namespace['_transpose'] = _transpose 264 | exec("""def _SYM_generated(x): 265 | {} 266 | """.format("\n ".join(body)), namespace) 267 | func = namespace['_SYM_generated'] 268 | if use_numba: 269 | from numba import jit 270 | func = jit(func) 271 | if module == 'numpy': 272 | def wrapper(x): 273 | arg = np.atleast_1d(np.asanyarray(x, dtype=dtype)) 274 | res = func(arg) 275 | return res 276 | else: 277 | wrapper = func 278 | wrapper.__doc__ = "\n ".join(body) + '\n\n' 279 | return wrapper 280 | -------------------------------------------------------------------------------- /sym/backend.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import (absolute_import, division, print_function) 3 | 4 | import os 5 | import sys 6 | import numpy as np 7 | 8 | from .util import banded_jacobian, sparse_jacobian_csc, sparse_jacobian_csr 9 | 10 | 11 | def _DenseMatrix(be, *args, **kwargs): 12 | if len(args) == 1: 13 | return be.Matrix(len(args[0]), 1, args[0], **kwargs) 14 | else: 15 | nr, nc, elems = args 16 | return be.Matrix(nr, nc, elems, **kwargs) 17 | 18 | 19 | class _Base(object): 20 | 21 | def __getattr__(self, key): 22 | return getattr(self.__sym_backend__, key) 23 | 24 | def banded_jacobian(self, exprs, dep, ml, mu): 25 | """ Wraps Matrix around result of .util.banded_jacobian """ 26 | exprs = banded_jacobian(exprs, dep, ml, mu) 27 | return self.Matrix(ml+mu+1, len(dep), list(exprs.flat)) 28 | 29 | def sparse_jacobian_csc(self, exprs, dep): 30 | """ Wraps Matrix/ndarray around results of .util.sparse_jacobian_csc """ 31 | jac_exprs, colptrs, rowvals = sparse_jacobian_csc(exprs, dep) 32 | nnz = len(jac_exprs) 33 | return ( 34 | self.Matrix(1, nnz, jac_exprs), 35 | np.asarray(colptrs, dtype=int), 36 | np.asarray(rowvals, dtype=int) 37 | ) 38 | 39 | def sparse_jacobian_csr(self, exprs, dep): 40 | """ Wraps Matrix/ndarray around results of .util.sparse_jacobian_csr """ 41 | jac_exprs, rowptrs, colvals = sparse_jacobian_csr(exprs, dep) 42 | nnz = len(jac_exprs) 43 | return ( 44 | self.Matrix(1, nnz, jac_exprs), 45 | np.asarray(rowptrs, dtype=int), 46 | np.asarray(colvals, dtype=int) 47 | ) 48 | 49 | 50 | class _SymPy(_Base): 51 | 52 | def __init__(self): 53 | self.__sym_backend__ = __import__('sympy') 54 | from ._sympy_Lambdify import _Lambdify 55 | self.Lambdify = _Lambdify 56 | 57 | def real_symarray(self, prefix, shape): 58 | return self.symarray(prefix, shape, real=True) 59 | 60 | DenseMatrix = _DenseMatrix 61 | 62 | 63 | class _SymPySymEngine(_SymPy): 64 | 65 | def __init__(self): 66 | self.__sym_backend__ = __import__('sympy') 67 | from symengine import Lambdify 68 | self.Lambdify = Lambdify 69 | 70 | 71 | class _Diofant(_SymPy): 72 | 73 | def __init__(self): 74 | self.__sym_backend__ = __import__('diofant') 75 | from ._sympy_Lambdify import _Lambdify 76 | 77 | class DiofantLambdify(_Lambdify): 78 | def __init__(self, args, *exprs, **kwargs): 79 | kwargs['backend'] = 'diofant' 80 | super().__init__(args, *exprs, **kwargs) 81 | 82 | self.Lambdify = DiofantLambdify 83 | 84 | DenseMatrix = _DenseMatrix 85 | 86 | __sym_backend_name__ = 'diofant' 87 | 88 | 89 | class _SymEngine(_Base): 90 | 91 | _dummy_counter = [0] 92 | 93 | def __init__(self): 94 | self.__sym_backend__ = __import__('symengine') 95 | # cse isn't in any symengine release yet; only in dev version 96 | # this will allow backend use with older symengine versions, 97 | # failing gracefully only if cse is invoked 98 | self._cse = getattr(self.__sym_backend__, 'cse', None) 99 | 100 | def Matrix(self, *args, **kwargs): 101 | return self.DenseMatrix(*args, **kwargs) 102 | 103 | def real_symarray(self, prefix, shape): 104 | return self.symarray(prefix, shape) 105 | 106 | def Dummy(self): 107 | self._dummy_counter[0] += 1 108 | return self.Symbol('Dummy_'+str(self._dummy_counter[0] - 1)) 109 | 110 | def numbered_symbols(self, prefix='x', cls=None, start=0, exclude=None, *args): 111 | exclude = set(exclude or []) 112 | if cls is None: 113 | cls = self.Symbol 114 | 115 | while True: 116 | name = '%s%s' % (prefix, start) 117 | s = cls(name, *args) 118 | if s not in exclude: 119 | yield s 120 | start += 1 121 | 122 | def cse(self, exprs, symbols=None): 123 | # symengine's cse, but augmented with custom cse symbols ala sympy 124 | if self._cse is None: 125 | raise NotImplementedError("CSE not yet supported in symengine version %s" % 126 | self.__sym_backend__.__version__) 127 | if symbols is None: 128 | symbols = self.numbered_symbols() 129 | else: 130 | symbols = iter(symbols) 131 | 132 | old_repl, old_reduced = self._cse(exprs) 133 | if not old_repl: 134 | return old_repl, old_reduced 135 | old_cse_symbols, old_cses = zip(*old_repl) 136 | cse_symbols = [next(symbols) for _ in range(len(old_cse_symbols))] 137 | subsd = dict(zip(old_cse_symbols, cse_symbols)) 138 | 139 | cses = [c.xreplace(subsd) for c in old_cses] 140 | reduced = [e.xreplace(subsd) for e in old_reduced] 141 | return list(zip(cse_symbols, cses)), reduced 142 | 143 | 144 | class _PySym(_Base): 145 | 146 | def __init__(self): 147 | self.__sym_backend__ = __import__('pysym') 148 | 149 | def real_symarray(self, prefix, shape): 150 | return self.symarray(prefix, shape) 151 | 152 | DenseMatrix = _DenseMatrix 153 | 154 | 155 | class _SymCXX(_Base): 156 | 157 | def __init__(self): 158 | self.__sym_backend__ = __import__('symcxx').NameSpace() 159 | 160 | def real_symarray(self, prefix, shape): 161 | return self.symarray(prefix, shape) 162 | 163 | DenseMatrix = _DenseMatrix 164 | 165 | 166 | def Backend(name=None, envvar='SYM_BACKEND', default='sympy'): 167 | """ Backend for the underlying symbolic manipulation packages 168 | 169 | Parameters 170 | ---------- 171 | name: str (default: None) 172 | Name of package e.g. 'sympy' 173 | envvar: str (default: 'SYM_BACKEND') 174 | name of environment variable to read name from (when name is ``None``) 175 | default: str 176 | name to use when the environment variable described by ``envvar`` is 177 | unset or empty (default: 'sympy') 178 | 179 | Examples 180 | -------- 181 | >>> be = Backend('sympy') # or e.g. 'symengine' 182 | >>> x, y = map(be.Symbol, 'xy') 183 | >>> exprs = [x + y + 1, x*y**2] 184 | >>> lmb = be.Lambdify([x, y], exprs) 185 | >>> import numpy as np 186 | >>> lmb(np.arange(6.0).reshape((3, 2))) # doctest: +NORMALIZE_WHITESPACE 187 | array([[ 2., 0.], 188 | [ 6., 18.], 189 | [ 10., 100.]]) 190 | 191 | """ 192 | if name is None: 193 | name = os.environ.get(envvar, '') or default 194 | if isinstance(name, _Base): 195 | return name 196 | else: 197 | return Backend.backends[name]() 198 | 199 | 200 | Backend.backends = { 201 | 'sympy': _SymPy, 202 | 'symengine': _SymEngine, 203 | 'sympysymengine': _SymPySymEngine, # uses selected parts from SymEngine to augment SymPy 204 | 'pysym': _PySym, 205 | 'symcxx': _SymCXX, 206 | } 207 | 208 | if sys.version_info[0] > 2: 209 | Backend.backends['diofant'] = _Diofant 210 | -------------------------------------------------------------------------------- /sym/tests/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import (absolute_import, division, print_function) 3 | 4 | import os 5 | from .. import Backend 6 | 7 | if os.environ.get('SYM_STRICT_TESTING', '0')[:1].lower() in ('1', 't'): 8 | AVAILABLE_BACKENDS = list(Backend.backends) 9 | else: 10 | AVAILABLE_BACKENDS = [] 11 | 12 | for k in Backend.backends: 13 | try: 14 | __import__(k) 15 | except ImportError: 16 | pass 17 | else: 18 | AVAILABLE_BACKENDS.append(k) 19 | -------------------------------------------------------------------------------- /sym/tests/test_Dummy.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import (absolute_import, division, print_function) 3 | 4 | import pytest 5 | from .. import Backend 6 | from . import AVAILABLE_BACKENDS 7 | 8 | 9 | @pytest.mark.parametrize('key', AVAILABLE_BACKENDS) 10 | def test_Dummy(key): 11 | be = Backend(key) 12 | d0 = be.Dummy() 13 | d1 = be.Dummy() 14 | assert d0 == d0 15 | assert d0 != d1 16 | -------------------------------------------------------------------------------- /sym/tests/test_Lambdify.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import (absolute_import, division, print_function) 3 | 4 | from functools import reduce 5 | from operator import add, mul 6 | 7 | import math 8 | import numpy as np 9 | 10 | import pytest 11 | from pytest import raises 12 | from .. import Backend 13 | 14 | # This tests Lambdify (see SymEngine), it offers essentially the same 15 | # functionality as SymPy's lambdify but works for arbitrarily long input 16 | 17 | backends = [] 18 | for bk in Backend.backends.keys(): 19 | try: 20 | _be = Backend(bk) 21 | except ImportError: 22 | continue 23 | 24 | _x = _be.Symbol('x') 25 | try: 26 | _be.Lambdify([_x], [_x**2], order='F') # new signature in symengine 0.4 (?) 27 | except: 28 | continue 29 | 30 | backends.append(bk) 31 | 32 | 33 | @pytest.mark.parametrize('key', backends) 34 | def test_Lambdify_single_arg(key): 35 | be = Backend(key) 36 | x = be.Symbol('x') 37 | lmb = be.Lambdify([x], [x**2]) 38 | assert np.allclose([4], lmb([2.0])) 39 | 40 | 41 | @pytest.mark.parametrize('key', backends) 42 | def test_Lambdify_Abs(key): 43 | if key == 'symengine': # currently no Abs in symengine.py 44 | return # (see https://github.com/symengine/symengine.py/commit/699d49ef09e2f5262381ae971f723ea4a284f0de) 45 | 46 | be = Backend(key) 47 | x = be.Symbol('x') 48 | lmb = be.Lambdify([x], [be.Abs(x)]) 49 | assert np.allclose([2], lmb([-2.0])) 50 | 51 | 52 | @pytest.mark.parametrize('key', backends) 53 | def test_Lambdify_sign(key): 54 | if key.endswith('symengine'): # currently no sign in symengine.py 55 | return # (see https://github.com/symengine/symengine.py/commit/699d49ef09e2f5262381ae971f723ea4a284f0de) 56 | 57 | be = Backend(key) 58 | x = be.Symbol('x') 59 | lmb = be.Lambdify([x], [be.sign(x)]) 60 | assert np.allclose([1], lmb([3.0])) 61 | assert np.allclose([-1], lmb([-2.0])) 62 | assert np.allclose([0], lmb([0.0])) 63 | 64 | 65 | @pytest.mark.parametrize('key', backends) 66 | def test_Lambdify_matrix(key): 67 | be = Backend(key) 68 | x, y = arr = be.symarray('x', 2) 69 | mat = be.Matrix(2, 2, [x, 1+y, 2*y*x**2, 3]) 70 | lmb = be.Lambdify(arr, mat) 71 | result = lmb([3, 5]) 72 | assert result.shape == (2, 2) 73 | assert np.allclose(result, [[3, 6], [90, 3]]) 74 | 75 | 76 | @pytest.mark.parametrize('key', backends) 77 | def test_Lambdify_jacobian(key): 78 | be = Backend(key) 79 | x = be.Symbol('x') 80 | y = be.Symbol('y') 81 | a = be.Matrix(2, 1, [x+y, y*x**2]) 82 | b = be.Matrix(2, 1, [x, y]) 83 | J = a.jacobian(b) 84 | lmb = be.Lambdify(b, J) 85 | result = lmb([3, 5]) 86 | assert result.shape == (2, 2) 87 | assert np.allclose(result, [[1, 1], [2*3*5, 3**2]]) 88 | 89 | 90 | @pytest.mark.parametrize('key', filter(lambda k: k not in ('pysym',), 91 | backends)) 92 | def test_broadcast(key): # test is from symengine test suite 93 | be = Backend(key) 94 | a = np.linspace(-np.pi, np.pi) 95 | inp = np.vstack((np.cos(a), np.sin(a))).T # 50 rows 2 cols 96 | x, y = be.symbols('x y') 97 | distance = be.Lambdify([x, y], [be.sqrt(x**2 + y**2)]) 98 | dists = distance(inp) 99 | assert np.allclose(distance([inp[0, 0], inp[0, 1]]), [1]) 100 | assert dists.shape == (50, 1) 101 | assert np.allclose(dists, 1) 102 | 103 | 104 | @pytest.mark.parametrize('key', filter(lambda k: k not in ('pysym',), 105 | backends)) 106 | def test_broadcast_shapes(key): # test is from symengine test suite 107 | be = Backend(key) 108 | x, y = be.symbols('x y') 109 | lmb = be.Lambdify([x, y], [x+y, x-y, x/y]) 110 | assert lmb(np.asarray([2, 3])).shape == (3,) 111 | assert lmb(np.asarray([[2, 3]])).shape == (1, 3) 112 | assert lmb(np.asarray([[[2, 3]]])).shape == (1, 1, 3) 113 | assert lmb(np.arange(5*7*6*2).reshape((5, 7, 6, 2))).shape == (5, 7, 6, 3) 114 | 115 | 116 | @pytest.mark.parametrize('key', filter(lambda k: k not in ('pysym', 'symcxx'), 117 | backends)) 118 | def test_broadcast_multiple_extra_dimensions(key): 119 | se = Backend(key) 120 | inp = np.arange(12.).reshape((4, 3, 1)) 121 | x = se.symbols('x') 122 | cb = se.Lambdify([x], [x**2, x**3]) 123 | assert np.allclose(cb([inp[0, 2]]), [4, 8]) 124 | out = cb(inp) 125 | assert out.shape == (4, 3, 1, 2) 126 | out = out.squeeze() 127 | assert abs(out[2, 1, 0] - 7**2) < 1e-14 128 | assert abs(out[2, 1, 1] - 7**3) < 1e-14 129 | assert abs(out[-1, -1, 0] - 11**2) < 1e-14 130 | assert abs(out[-1, -1, 1] - 11**3) < 1e-14 131 | 132 | 133 | @pytest.mark.parametrize('key', filter(lambda k: k not in ('pysym',), 134 | backends)) 135 | def test_more_than_255_args(key): 136 | # SymPy's lambdify can handle at most 255 arguments 137 | # this is a proof of concept that this limitation does 138 | # not affect SymEngine's Lambdify class 139 | se = Backend(key) 140 | for n in [130, 257]: 141 | x = se.symarray('x', n) 142 | p, q, r = 17, 42, 13 143 | terms = [i*s for i, s in enumerate(x, p)] 144 | exprs = [reduce(add, terms), r + x[0], -99] 145 | callback = se.Lambdify(x, exprs) 146 | input_arr = np.arange(q, q + n*n, dtype=np.float64).reshape((n, n)) 147 | out = callback(input_arr) 148 | ref = np.empty((n, 3)) 149 | coeffs = np.arange(p, p + n, dtype=np.float64) 150 | for i in range(n): 151 | ref[i, 0] = coeffs.dot(np.arange(q + n*i, q + n*(i+1), dtype=np.float64)) 152 | ref[i, 1] = q + n*i + r 153 | ref[:, 2] = -99 154 | assert np.allclose(out, ref) 155 | 156 | 157 | @pytest.mark.parametrize('key', backends) 158 | def test_Lambdify(key): 159 | se = Backend(key) 160 | n = 7 161 | args = x, y, z = se.symbols('x y z') 162 | l = se.Lambdify(args, [x+y+z, x**2, (x-y)/z, x*y*z]) 163 | assert np.allclose(l(range(n, n+len(args))), 164 | [3*n+3, n**2, -1/(n+2), n*(n+1)*(n+2)]) 165 | 166 | 167 | def _get_2_to_2by2_numpy(se): 168 | args = x, y = se.symbols('x y') 169 | exprs = np.array([[x+y+1.0, x*y], 170 | [x/y, x**y]]) 171 | l = se.Lambdify(args, exprs) 172 | 173 | def check(A, inp): 174 | X, Y = inp 175 | assert abs(A[0, 0] - (X+Y+1.0)) < 1e-15 176 | assert abs(A[0, 1] - (X*Y)) < 1e-15 177 | assert abs(A[1, 0] - (X/Y)) < 1e-15 178 | assert abs(A[1, 1] - (X**Y)) < 1e-13 179 | return l, check 180 | 181 | 182 | @pytest.mark.parametrize('key', backends) 183 | def test_Lambdify_2dim_numpy(key): 184 | se = Backend(key) 185 | lmb, check = _get_2_to_2by2_numpy(se) 186 | for inp in [(5, 7), np.array([5, 7]), [5.0, 7.0]]: 187 | A = lmb(inp) 188 | assert A.shape == (2, 2) 189 | check(A, inp) 190 | 191 | 192 | @pytest.mark.parametrize('key', filter(lambda k: k not in ('pysym',), 193 | backends)) 194 | def test_Lambdify_invalid_args(key): 195 | se = Backend(key) 196 | x = se.Symbol('x') 197 | log = se.Lambdify([x], [se.log(x)]) 198 | div = se.Lambdify([x], [1/x]) 199 | assert math.isnan(log([-1])[0]) 200 | assert math.isinf(-log([0])[0]) 201 | assert math.isinf(div([0])[0]) 202 | assert math.isinf(-div([-0])[0]) 203 | 204 | 205 | def test_Lambdify_mpamath_mpf(): 206 | import mpmath 207 | from mpmath import mpf 208 | mpmath.mp.dps = 30 209 | p0 = [mpf('0.7'), mpf('1.3')] 210 | p1 = [3] 211 | be = Backend('sympy') 212 | x, y, z = map(be.Symbol, 'xyz') 213 | lmb = be.Lambdify([x, y, z], [x*y*z - 1, -1 + be.exp(-y) + be.exp(-z) - 1/x], module='mpmath') 214 | p = np.concatenate((p0, p1)) 215 | lmb(p) 216 | 217 | lmb2 = be.Lambdify([x], [1-x], module='mpmath', dtype=object) 218 | assert 9e-21 < (1 - lmb2(mpf('1e-20'))) < 11e-21 219 | 220 | 221 | def _Lambdify_heterogeneous_output(se): 222 | x, y = se.symbols('x, y') 223 | args = se.DenseMatrix(2, 1, [x, y]) 224 | v = se.DenseMatrix(2, 1, [x**3 * y, (x+1)*(y+1)]) 225 | jac = v.jacobian(args) 226 | exprs = [jac, x+y, v, (x+1)*(y+1)] 227 | lmb = se.Lambdify(args, *exprs) 228 | inp0 = 7, 11 229 | inp1 = 8, 13 230 | inp2 = 5, 9 231 | inp = np.array([inp0, inp1, inp2]) 232 | o_j, o_xpy, o_v, o_xty = lmb(inp) 233 | for idx, (X, Y) in enumerate([inp0, inp1, inp2]): 234 | assert np.allclose(o_j[idx, ...], [[3 * X**2 * Y, X**3], 235 | [Y + 1, X + 1]]) 236 | assert np.allclose(o_xpy[idx, ...], [X+Y]) 237 | assert np.allclose(o_v[idx, ...], [[X**3 * Y], [(X+1)*(Y+1)]]) 238 | assert np.allclose(o_xty[idx, ...], [(X+1)*(Y+1)]) 239 | 240 | 241 | @pytest.mark.parametrize('key', filter(lambda k: k not in ('pysym', 'symcxx'), 242 | backends)) 243 | def test_Lambdify_heterogeneous_output(key): 244 | _Lambdify_heterogeneous_output(se=Backend(key)) 245 | 246 | 247 | def _test_Lambdify_scalar_vector_matrix(se): 248 | args = x, y = se.symbols('x y') 249 | vec = se.DenseMatrix([x+y, x*y]) 250 | jac = vec.jacobian(se.DenseMatrix(args)) 251 | f = se.Lambdify(args, x**y, vec, jac) 252 | assert f.n_exprs == 3 253 | s, v, m = f([2, 3]) 254 | assert s == 2**3 255 | assert np.allclose(v, [[2+3], [2*3]]) 256 | assert np.allclose(m, [ 257 | [1, 1], 258 | [3, 2] 259 | ]) 260 | 261 | for inp in [[2, 3, 5, 7], np.array([[2, 3], [5, 7]])]: 262 | s2, v2, m2 = f(inp) 263 | assert np.allclose(s2, [2**3, 5**7]) 264 | assert np.allclose(v2, [ 265 | [[2+3], [2*3]], 266 | [[5+7], [5*7]] 267 | ]) 268 | assert np.allclose(m2, [ 269 | [ 270 | [1, 1], 271 | [3, 2] 272 | ], 273 | [ 274 | [1, 1], 275 | [7, 5] 276 | ] 277 | ]) 278 | 279 | 280 | @pytest.mark.parametrize('key', filter(lambda k: k not in ('pysym', 'symcxx'), 281 | backends)) 282 | def test_Lambdify_scalar_vector_matrix(key): 283 | _test_Lambdify_scalar_vector_matrix(se=Backend(key)) 284 | 285 | 286 | @pytest.mark.parametrize('key', filter(lambda k: k not in ('pysym', 'symcxx'), 287 | backends)) 288 | def test_Lambdify_gh174(key): 289 | # Tests array broadcasting if the expressions form an N-dimensional array 290 | # of say shape (k, l, m) and it contains 'n' arguments (x1, ... xn), then 291 | # if the user provides a Fortran ordered (column-major) input array of shape 292 | # (n, o, p, q), then the returned array will be of shape (k, l, m, o, p, q) 293 | se = Backend(key) 294 | args = x, y = se.symbols('x y') 295 | vec1 = se.DenseMatrix([x, x**2, x**3]) 296 | assert vec1.shape == (3, 1) 297 | assert np.asarray(vec1).shape == (3, 1) 298 | lmb1 = se.Lambdify([x], vec1) 299 | out1 = lmb1(3) 300 | assert out1.shape == (3, 1) 301 | assert np.all(out1 == [[3], [9], [27]]) 302 | assert lmb1([2, 3]).shape == (2, 3, 1) 303 | lmb1.order = 'F' # change order 304 | out1a = lmb1([2, 3]) 305 | assert out1a.shape == (3, 1, 2) 306 | ref1a_squeeze = [[2, 3], 307 | [4, 9], 308 | [8, 27]] 309 | assert np.all(out1a.squeeze() == ref1a_squeeze) 310 | assert out1a.flags['F_CONTIGUOUS'] 311 | assert not out1a.flags['C_CONTIGUOUS'] 312 | 313 | lmb2c = se.Lambdify(args, vec1, x+y, order='C') 314 | lmb2f = se.Lambdify(args, vec1, x+y, order='F') 315 | for out2a in [lmb2c([2, 3]), lmb2f([2, 3])]: 316 | assert np.all(out2a[0] == [[2], [4], [8]]) 317 | assert out2a[0].ndim == 2 318 | assert out2a[1] == 5 319 | assert out2a[1].ndim == 0 320 | inp2b = np.array([ 321 | [2.0, 3.0], 322 | [1.0, 2.0], 323 | [0.0, 6.0] 324 | ]) 325 | raises(ValueError, lambda: (lmb2c(inp2b.T))) 326 | out2c = lmb2c(inp2b) 327 | out2f = lmb2f(np.asfortranarray(inp2b.T)) 328 | assert out2c[0].shape == (3, 3, 1) 329 | assert out2f[0].shape == (3, 1, 3) 330 | for idx, (_x, _y) in enumerate(inp2b): 331 | assert np.all(out2c[0][idx, ...] == [[_x], [_x**2], [_x**3]]) 332 | 333 | assert np.all(out2c[1] == [5, 3, 6]) 334 | assert np.all(out2f[1] == [5, 3, 6]) 335 | assert out2c[1].shape == (3,) 336 | assert out2f[1].shape == (3,) 337 | 338 | def _mtx3(_x, _y): 339 | return [[_x**row_idx + _y**col_idx for col_idx in range(3)] 340 | for row_idx in range(4)] 341 | mtx3c = np.array(_mtx3(x, y), order='C') 342 | mtx3f = np.array(_mtx3(x, y), order='F') 343 | lmb3c = se.Lambdify([x, y], x*y, mtx3c, vec1, order='C') 344 | lmb3f = se.Lambdify([x, y], x*y, mtx3f, vec1, order='F') 345 | inp3c = np.array([[2., 3], [3, 4], [5, 7], [6, 2], [3, 1]]) 346 | inp3f = np.asfortranarray(inp3c.T) 347 | raises(ValueError, lambda: (lmb3c(inp3c.T))) 348 | out3c = lmb3c(inp3c) 349 | assert out3c[0].shape == (5,) 350 | assert out3c[1].shape == (5, 4, 3) 351 | assert out3c[2].shape == (5, 3, 1) # user can apply numpy.squeeze if they want to. 352 | for a, b in zip(out3c, lmb3c(np.ravel(inp3c))): 353 | assert np.all(a == b) 354 | 355 | out3f = lmb3f(inp3f) 356 | assert out3f[0].shape == (5,) 357 | assert out3f[1].shape == (4, 3, 5) 358 | assert out3f[2].shape == (3, 1, 5) # user can apply numpy.squeeze if they want to. 359 | for a, b in zip(out3f, lmb3f(np.ravel(inp3f, order='F'))): 360 | assert np.all(a == b) 361 | 362 | for idx, (_x, _y) in enumerate(inp3c): 363 | assert out3c[0][idx] == _x*_y 364 | assert out3f[0][idx] == _x*_y 365 | assert np.all(out3c[1][idx, ...] == _mtx3(_x, _y)) 366 | assert np.all(out3f[1][..., idx] == _mtx3(_x, _y)) 367 | assert np.all(out3c[2][idx, ...] == [[_x], [_x**2], [_x**3]]) 368 | assert np.all(out3f[2][..., idx] == [[_x], [_x**2], [_x**3]]) 369 | 370 | 371 | def _get_Ndim_args_exprs_funcs(order, se): 372 | args = x, y = se.symbols('x y') 373 | 374 | # Higher dimensional inputs 375 | def f_a(index, _x, _y): 376 | a, b, c, d = index 377 | return _x**a + _y**b + (_x+_y)**-d 378 | 379 | nd_exprs_a = np.zeros((3, 5, 1, 4), dtype=object, order=order) 380 | for index in np.ndindex(*nd_exprs_a.shape): 381 | nd_exprs_a[index] = f_a(index, x, y) 382 | 383 | def f_b(index, _x, _y): 384 | a, b, c = index 385 | return b/(_x + _y) 386 | 387 | nd_exprs_b = np.zeros((1, 7, 1), dtype=object, order=order) 388 | for index in np.ndindex(*nd_exprs_b.shape): 389 | nd_exprs_b[index] = f_b(index, x, y) 390 | return args, nd_exprs_a, nd_exprs_b, f_a, f_b 391 | 392 | 393 | @pytest.mark.parametrize('key', filter(lambda k: k not in ('pysym', 'symcxx'), 394 | backends)) 395 | def test_Lambdify_Ndimensional_order_C(key): 396 | se = Backend(key) 397 | args, nd_exprs_a, nd_exprs_b, f_a, f_b = _get_Ndim_args_exprs_funcs(order='C', se=se) 398 | lmb4 = se.Lambdify(args, nd_exprs_a, nd_exprs_b, order='C') 399 | nargs = len(args) 400 | 401 | inp_extra_shape = (3, 5, 4) 402 | inp_shape = inp_extra_shape + (nargs,) 403 | inp4 = np.arange(reduce(mul, inp_shape)*1.0).reshape(inp_shape, order='C') 404 | out4a, out4b = lmb4(inp4) 405 | assert out4a.ndim == 7 406 | assert out4a.shape == inp_extra_shape + nd_exprs_a.shape 407 | assert out4b.ndim == 6 408 | assert out4b.shape == inp_extra_shape + nd_exprs_b.shape 409 | raises(ValueError, lambda: (lmb4(inp4.T))) 410 | for b, c, d in np.ndindex(inp_extra_shape): 411 | _x, _y = inp4[b, c, d, :] 412 | for index in np.ndindex(*nd_exprs_a.shape): 413 | assert np.isclose(out4a[(b, c, d) + index], f_a(index, _x, _y)) 414 | for index in np.ndindex(*nd_exprs_b.shape): 415 | assert np.isclose(out4b[(b, c, d) + index], f_b(index, _x, _y)) 416 | 417 | 418 | @pytest.mark.parametrize('key', filter(lambda k: k not in ('pysym', 'symcxx'), 419 | backends)) 420 | def test_Lambdify_Ndimensional_order_F(key): 421 | se = Backend(key) 422 | args, nd_exprs_a, nd_exprs_b, f_a, f_b = _get_Ndim_args_exprs_funcs(order='F', se=se) 423 | lmb4 = se.Lambdify(args, nd_exprs_a, nd_exprs_b, order='F') 424 | nargs = len(args) 425 | 426 | inp_extra_shape = (3, 5, 4) 427 | inp_shape = (nargs,)+inp_extra_shape 428 | inp4 = np.arange(reduce(mul, inp_shape)*1.0).reshape(inp_shape, order='F') 429 | out4a, out4b = lmb4(inp4) 430 | assert out4a.ndim == 7 431 | assert out4a.shape == nd_exprs_a.shape + inp_extra_shape 432 | assert out4b.ndim == 6 433 | assert out4b.shape == nd_exprs_b.shape + inp_extra_shape 434 | raises(ValueError, lambda: (lmb4(inp4.T))) 435 | for b, c, d in np.ndindex(inp_extra_shape): 436 | _x, _y = inp4[:, b, c, d] 437 | for index in np.ndindex(*nd_exprs_a.shape): 438 | assert np.isclose(out4a[index + (b, c, d)], f_a(index, _x, _y)) 439 | for index in np.ndindex(*nd_exprs_b.shape): 440 | assert np.isclose(out4b[index + (b, c, d)], f_b(index, _x, _y)) 441 | 442 | 443 | @pytest.mark.parametrize('key', filter(lambda k: k not in ('pysym', 'symcxx'), 444 | backends)) 445 | def test_Lambdify_inp_exceptions(key): 446 | se = Backend(key) 447 | args = x, y = se.symbols('x y') 448 | lmb1 = se.Lambdify([x], x**2) 449 | raises(ValueError, lambda: (lmb1([]))) 450 | assert lmb1(4) == 16 451 | assert np.all(lmb1([4, 2]) == [16, 4]) 452 | 453 | lmb2 = se.Lambdify(args, x**2+y**2) 454 | assert lmb2([2, 3]) == 13 455 | raises(ValueError, lambda: lmb2([])) 456 | raises(ValueError, lambda: lmb2([2])) 457 | raises(ValueError, lambda: lmb2([2, 3, 4])) 458 | assert np.all(lmb2([2, 3, 4, 5]) == [13, 16+25]) 459 | 460 | def _mtx(_x, _y): 461 | return [ 462 | [_x-_y, _y**2], 463 | [_x+_y, _x**2], 464 | [_x*_y, _x**_y] 465 | ] 466 | 467 | mtx = np.array(_mtx(x, y), order='F') 468 | lmb3 = se.Lambdify(args, mtx, order='F') 469 | inp3a = [2, 3] 470 | assert np.all(lmb3(inp3a) == _mtx(*inp3a)) 471 | inp3b = np.array([2, 3, 4, 5, 3, 2, 1, 5]) 472 | for inp in [inp3b, inp3b.tolist(), inp3b.reshape((2, 4), order='F')]: 473 | out3b = lmb3(inp) 474 | assert out3b.shape == (3, 2, 4) 475 | for i in range(4): 476 | assert np.all(out3b[..., i] == _mtx(*inp3b[2*i:2*(i+1)])) 477 | raises(ValueError, lambda: lmb3(inp3b.reshape((4, 2)))) 478 | raises(ValueError, lambda: lmb3(inp3b.reshape((2, 4)).T)) 479 | -------------------------------------------------------------------------------- /sym/tests/test_Matrix.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import (absolute_import, division, print_function) 3 | 4 | import numpy as np 5 | import pytest 6 | from .. import Backend 7 | from . import AVAILABLE_BACKENDS 8 | 9 | 10 | @pytest.mark.parametrize('key', AVAILABLE_BACKENDS) 11 | def test_Matrix(key): 12 | be = Backend(key) 13 | x = be.Symbol('x') 14 | mat = be.Matrix(2, 2, [x, 1, x**2, 3]) 15 | assert mat[0, 0] == x 16 | assert mat[0, 1] == 1 17 | assert mat[1, 0] == x**2 18 | assert mat[1, 1] == 3 19 | 20 | 21 | @pytest.mark.parametrize('key', AVAILABLE_BACKENDS) 22 | def test_Matrix_jacobian(key): 23 | be = Backend(key) 24 | x = be.Symbol('x') 25 | y = be.Symbol('y') 26 | a = be.Matrix(2, 1, [x+y, y*x**2]) 27 | b = be.Matrix(2, 1, [x, y]) 28 | J = a.jacobian(b) 29 | assert J[0, 0] == 1 30 | assert J[0, 1] == 1 31 | lmb = be.Lambdify([x, y], [J[1, 0], J[1, 1]]) 32 | assert np.allclose(lmb(np.array([3, 5])), [2*3*5, 9]) 33 | -------------------------------------------------------------------------------- /sym/tests/test_Symbol.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import (absolute_import, division, print_function) 3 | 4 | import pytest 5 | from .. import Backend 6 | from . import AVAILABLE_BACKENDS 7 | 8 | 9 | @pytest.mark.parametrize('key', AVAILABLE_BACKENDS) 10 | def test_Symbol(key): 11 | be = Backend(key) 12 | x = be.Symbol('x') 13 | y = be.Symbol('y') 14 | assert x != y 15 | assert x == be.Symbol('x') 16 | assert x != be.Symbol('y') 17 | assert x - x == 0 18 | assert x*x == x**2 # this is starting to look like a CAS requirement 19 | -------------------------------------------------------------------------------- /sym/tests/test_banded_jacobian.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import (absolute_import, division, print_function) 3 | 4 | import numpy as np 5 | import pytest 6 | 7 | from .. import Backend 8 | from . import AVAILABLE_BACKENDS 9 | 10 | 11 | @pytest.mark.parametrize('key', AVAILABLE_BACKENDS) 12 | def test_banded_jacobian(key): 13 | be = Backend(key) 14 | n = 3 15 | x = be.real_symarray('x', n) 16 | exprs = [-x[0]] + [x[i-1] - x[i] for i in range(1, n-1)] + [x[-2]] 17 | bj = be.banded_jacobian(exprs, x, 1, 0) 18 | cb = be.Lambdify(x, bj) 19 | inp = np.arange(3.0, 3.0 + n) 20 | out = cb(inp) 21 | assert np.allclose(out, [[-1, -1, 0], [1, 1, 0]]) 22 | -------------------------------------------------------------------------------- /sym/tests/test_cse.py: -------------------------------------------------------------------------------- 1 | from .. import Backend 2 | import pytest 3 | 4 | 5 | backends = [] 6 | for bk in Backend.backends.keys(): 7 | try: 8 | _be = Backend(bk) 9 | except ImportError: 10 | continue 11 | 12 | _x = _be.Symbol('x') 13 | try: 14 | _be.cse([_x]) 15 | except: 16 | continue 17 | 18 | backends.append(bk) 19 | 20 | 21 | def _inverse_cse(subs_cses, cse_exprs): 22 | subs = dict(subs_cses) 23 | return [expr.subs(subs) for expr in cse_exprs] 24 | 25 | 26 | @pytest.mark.parametrize('key', backends) 27 | def test_basic_cse(key): 28 | be = Backend(key) 29 | x, y = map(be.Symbol, "xy") 30 | exprs = [x**2 + y**2 + 3, be.exp(x**2 + y**2)] 31 | subs_cses, cse_exprs = be.cse(exprs) 32 | subs, cses = zip(*subs_cses) 33 | assert cses[0] == x**2 + y**2 34 | for cse_expr in cse_exprs: 35 | assert x not in cse_expr.atoms() 36 | assert y not in cse_expr.atoms() 37 | assert _inverse_cse(subs_cses, cse_exprs) == exprs 38 | 39 | 40 | @pytest.mark.parametrize('key', backends) 41 | def test_moot_cse(key): 42 | be = Backend(key) 43 | x, y = map(be.Symbol, "xy") 44 | exprs = [x**2 + y**2, y] 45 | subs_cses, cse_exprs = be.cse(exprs) 46 | assert not subs_cses 47 | assert _inverse_cse(subs_cses, cse_exprs) == exprs 48 | 49 | 50 | @pytest.mark.parametrize('key', backends) 51 | def test_cse_with_symbols(key): 52 | be = Backend(key) 53 | x = be.Symbol('x') 54 | exprs = [x**2, 1/(1 + x**2), be.log(x + 2), be.exp(x + 2)] 55 | subs_cses, cse_exprs = be.cse(exprs, symbols=be.numbered_symbols('y')) 56 | subs, cses = zip(*subs_cses) 57 | assert subs[0] == be.Symbol('y0') 58 | assert subs[1] == be.Symbol('y1') 59 | assert _inverse_cse(subs_cses, cse_exprs) == exprs 60 | 61 | 62 | @pytest.mark.parametrize('key', backends) 63 | def test_cse_with_symbols_overlap(key): 64 | be = Backend(key) 65 | x0, x1, y = map(be.Symbol, "x0 x1 y".split()) 66 | exprs = [x0**2, x0**2 + be.exp(y)**2 + 3, x1 * be.exp(y), be.sin(x1 * be.exp(y) + 1)] 67 | subs_cses, cse_exprs = be.cse(exprs) 68 | assert _inverse_cse(subs_cses, cse_exprs) == exprs 69 | -------------------------------------------------------------------------------- /sym/tests/test_sparse_jacobian.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import (absolute_import, division, print_function) 3 | 4 | import numpy as np 5 | import pytest 6 | 7 | from .. import Backend 8 | from . import AVAILABLE_BACKENDS 9 | 10 | 11 | @pytest.mark.parametrize('key', AVAILABLE_BACKENDS) 12 | def test_sparse_jacobian_csr(key): 13 | be = Backend(key) 14 | n = 3 15 | x = be.real_symarray('x', n) 16 | exprs = [-x[0]] + [x[i-1] - x[i] for i in range(1, n-1)] + [x[-2]] 17 | sj, colptrs, rowvals = be.sparse_jacobian_csc(exprs, x) 18 | cb = be.Lambdify(x, sj) 19 | inp = np.arange(3.0, 3.0 + n) 20 | out = cb(inp) 21 | assert np.allclose(out, [-1, 1, -1, 1]) 22 | assert np.all(colptrs == np.array([0, 2, 4, 4], dtype=int)) 23 | assert np.all(rowvals == np.array([0, 1, 1, 2], dtype=int)) 24 | 25 | 26 | @pytest.mark.parametrize('key', AVAILABLE_BACKENDS) 27 | def test_sparse_jacobian_csc(key): 28 | be = Backend(key) 29 | n = 3 30 | x = be.real_symarray('x', n) 31 | exprs = [-x[0]] + [x[i-1] - x[i] for i in range(1, n-1)] + [x[-2]] 32 | sj, rowptrs, colvals = be.sparse_jacobian_csr(exprs, x) 33 | cb = be.Lambdify(x, sj) 34 | inp = np.arange(3.0, 3.0 + n) 35 | out = cb(inp) 36 | assert np.allclose(out, [-1, 1, -1, 1]) 37 | assert np.all(rowptrs == np.array([0, 1, 3, 4], dtype=int)) 38 | assert np.all(colvals == np.array([0, 0, 1, 1], dtype=int)) 39 | -------------------------------------------------------------------------------- /sym/tests/test_symarray.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import (absolute_import, division, print_function) 3 | 4 | import pytest 5 | from .. import Backend 6 | from . import AVAILABLE_BACKENDS 7 | 8 | 9 | @pytest.mark.parametrize('key', AVAILABLE_BACKENDS) 10 | def test_symarray(key): 11 | be = Backend(key) 12 | x0, x1, x2 = be.symarray('x', 3) 13 | 14 | assert be.symarray('x', (3, 2)).shape == (3, 2) 15 | -------------------------------------------------------------------------------- /sym/tests/test_sympy_Lambdify.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import (absolute_import, division, print_function) 3 | 4 | from .._sympy_Lambdify import _callback_factory 5 | 6 | from sympy import symbols, atan 7 | import numpy as np 8 | import pytest 9 | 10 | try: 11 | import numba 12 | except ImportError: 13 | numba = None 14 | 15 | 16 | def test_callback_factory(): 17 | args = x, y = symbols('x y') 18 | expr = x + atan(y) 19 | cb = _callback_factory(args, [expr], 'numpy', np.float64, 'C') 20 | inp = np.array([17, 1]) 21 | ref = 17 + np.arctan(1) 22 | assert np.allclose(cb(inp), ref) 23 | 24 | 25 | def test_callback_factory__broadcast(): 26 | args = x, y = symbols('x y') 27 | expr = x + atan(y) 28 | cb = _callback_factory(args, [expr], 'numpy', np.float64, 'C') 29 | inp = np.array([[17, 1], [18, 2]]) 30 | ref = [17 + np.arctan(1), 18 + np.arctan(2)] 31 | assert np.allclose(cb(inp), ref) 32 | 33 | inp2 = np.array([ 34 | [[17, 1], [18, 2]], 35 | [[27, 21], [28, 22]] 36 | ]) 37 | ref2 = [ 38 | [17 + np.arctan(1), 18 + np.arctan(2)], 39 | [27 + np.arctan(21), 28 + np.arctan(22)] 40 | ] 41 | assert np.allclose(cb(inp2), ref2) 42 | 43 | 44 | @pytest.mark.skipif(numba is None, reason='numba not available') 45 | def test_callback_factory__numba(): 46 | args = x, y = symbols('x y') 47 | expr = x + atan(y) 48 | cb = _callback_factory(args, [expr], 'numpy', np.float64, 'C', use_numba=True) 49 | n = 500 50 | inp = np.empty((n, 2)) 51 | inp[:, 0] = np.linspace(0, 1, n) 52 | inp[:, 1] = np.linspace(-10, 10, n) 53 | assert np.allclose(cb(inp), inp[:, 0] + np.arctan(inp[:, 1])) 54 | -------------------------------------------------------------------------------- /sym/tests/test_util.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import (absolute_import, division, print_function) 3 | 4 | import pytest 5 | from ..util import linear_rref 6 | from .. import Backend 7 | 8 | 9 | @pytest.mark.parametrize('key', ['sympy']) 10 | def test_linear_rref(key): 11 | be = Backend(key) 12 | A1 = be.Matrix([[8, 8, 4], [8, 8, 5], [2, 4, 4]]) 13 | b1 = [80, 88, 52] 14 | A2, b2, colidxs = linear_rref(A1, b1, backend=be) 15 | A2ref = be.Matrix([[1, 0, 0], [0, 1, 0], [0, 0, 1]]) 16 | assert A2 - A2ref == A2*0 17 | assert (b2[0], b2[1], b2[2]) == (2, 4, 8) 18 | assert colidxs == [0, 1, 2] 19 | 20 | 21 | @pytest.mark.parametrize('key', ['sympy']) 22 | def test_linear_rref_aug(key): 23 | be = Backend(key) 24 | x, y, z = map(be.Symbol, 'x y z'.split()) 25 | A1 = be.Matrix([[1, 2, 3], [3, 4, 7], [6, 5, 9]]) 26 | B1a = [[0], [2], [11]] 27 | R1a, B1a, pivots1a = linear_rref(A1, B1a, backend=be) 28 | assert R1a == be.eye(3) and pivots1a == [0, 1, 2] and B1a == be.Matrix([[4], [1], [-2]]) 29 | 30 | B1b = [[x], [y], [z]] 31 | R1b, B1b, pivots1b = linear_rref(A1, B1b, backend=be) 32 | delta1b = B1b - be.Matrix([ 33 | [(x - 3*y + 2*z)/4], 34 | [(15*x - 9*y + 2*z)/4], 35 | [(7*y - 2*z - 9*x)/4] 36 | ]) 37 | delta1b.simplify() 38 | assert R1b == be.eye(3) and pivots1b == [0, 1, 2] and delta1b == be.Matrix([[0], [0], [0]]) 39 | -------------------------------------------------------------------------------- /sym/util.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import (absolute_import, division, print_function) 3 | 4 | import numpy as np 5 | 6 | 7 | def banded_jacobian(y, x, ml, mu): 8 | """ Calculates a banded version of the jacobian 9 | 10 | Compatible with the format requested by 11 | :func:`scipy.integrate.ode` (for SciPy >= v0.15). 12 | 13 | Parameters 14 | ---------- 15 | y: array_like of expressions 16 | x: array_like of symbols 17 | ml: int 18 | number of lower bands 19 | mu: int 20 | number of upper bands 21 | 22 | Returns 23 | ------- 24 | 2D array of shape ``(1+ml+mu, len(y))`` 25 | 26 | """ 27 | ny = len(y) 28 | nx = len(x) 29 | packed = np.zeros((mu+ml+1, nx), dtype=object) 30 | 31 | def set(ri, ci, val): 32 | packed[ri-ci+mu, ci] = val 33 | 34 | for ri in range(ny): 35 | for ci in range(max(0, ri-ml), min(nx, ri+mu+1)): 36 | set(ri, ci, y[ri].diff(x[ci])) 37 | return packed 38 | 39 | 40 | def sparse_jacobian_csc(y, x): 41 | """ Calculates a compressed sparse column (CSC) 42 | version of the jacobian 43 | 44 | Parameters 45 | ---------- 46 | y: array_like of expressions 47 | x: array_like of symbols 48 | 49 | Returns 50 | ------- 51 | jac_exprs: flattened list of expressions for nonzero entries of dy/dx in column-major order 52 | colptrs: list of length ``len(y) + 1``, where ``jac_exprs[colptrs[i]:colptrs[i+1]]`` are 53 | the nonzero entries of column ``i`` in ``dy/dx`` 54 | rowvals: list of length ``len(jac_exprs``, denoting the row index in ``dy/dx`` for each 55 | entry in ``jac_exprs`` 56 | """ 57 | n = len(x) 58 | try: 59 | # backends with free_symbols and hashable symbols 60 | idx = dict(zip(x, range(n))) 61 | cols = [[] for _ in range(n)] 62 | fs = [yi.free_symbols for yi in y] 63 | for i, fi in enumerate(fs): 64 | for j in sorted(list(map(idx.get, fi))): 65 | cols[j].append(i) 66 | except (AttributeError, TypeError): 67 | # backends without free_symbols or with unhashable symbols 68 | cols = [[i for i, yi in enumerate(y) if yi.has(xj)] for xj in x] 69 | 70 | rowvals = [i for col in cols for i in col] 71 | colptrs = np.cumsum([0] + list(map(len, cols))).astype(int) 72 | jac_exprs = [y[i].diff(xj) for j, xj in enumerate(x) for i in cols[j]] 73 | return jac_exprs, colptrs, rowvals 74 | 75 | 76 | def sparse_jacobian_csr(y, x): 77 | """ Calculates a compressed sparse row (CSR) 78 | version of the jacobian 79 | 80 | Parameters 81 | ---------- 82 | y: array_like of expressions 83 | x: array_like of symbols 84 | 85 | Returns 86 | ------- 87 | jac_exprs: flattened list of expressions for nonzero entries of dy/dx in row-major order 88 | rowptrs: list of length ``len(y) + 1``, where ``jac_exprs[colptrs[i]:colptrs[i+1]]`` are 89 | the nonzero entries of row ``i`` in ``dy/dx`` 90 | colvals: list of length ``len(jac_exprs``, denoting the column index in ``dy/dx`` for each 91 | entry in ``jac_exprs`` 92 | """ 93 | n = len(x) 94 | try: 95 | # backends with free_symbols and hashable symbols 96 | idx = dict(zip(x, range(n))) 97 | rows = [sorted(list(map(idx.get, yi.free_symbols))) for yi in y] 98 | except (AttributeError, TypeError): 99 | # backends without free_symbols or with unhashable symbols 100 | rows = [[j for j, xj in enumerate(x) if yi.has(xj)] for yi in y] 101 | 102 | colvals = [j for row in rows for j in row] 103 | rowptrs = np.cumsum([0] + list(map(len, rows))).astype(int) 104 | jac_exprs = [yi.diff(x[j]) for i, yi in enumerate(y) for j in rows[i]] 105 | return jac_exprs, rowptrs, colvals 106 | 107 | 108 | def check_transforms(fw, bw, symbs): 109 | """ Verify validity of a pair of forward and backward transformations 110 | 111 | Parameters 112 | ---------- 113 | fw: expression 114 | forward transformation 115 | bw: expression 116 | backward transformation 117 | symbs: iterable of symbols 118 | the variables that are transformed 119 | """ 120 | for f, b, y in zip(fw, bw, symbs): 121 | if f.subs(y, b) - y != 0: 122 | raise ValueError('Cannot prove correctness (did you set real=True?) fw: %s' 123 | % str(f)) 124 | if b.subs(y, f) - y != 0: 125 | raise ValueError('Cannot prove correctness (did you set real=True?) bw: %s' 126 | % str(b)) 127 | 128 | 129 | def _map2(cb, iterable): 130 | if cb is None: # identity function is assumed 131 | return iterable 132 | else: 133 | return map(cb, iterable) 134 | 135 | 136 | def _map2l(cb, iterable): # Py2 type of map in Py3 137 | return list(_map2(cb, iterable)) 138 | 139 | 140 | def linear_rref(A, b, backend): 141 | """ Transform a linear system to reduced row-echelon form 142 | 143 | Transforms both the matrix and right-hand side of a linear 144 | system of equations to reduced row echelon form 145 | 146 | Parameters 147 | ---------- 148 | A: Matrix-like 149 | iterable of rows 150 | b: iterable 151 | 152 | Returns 153 | ------- 154 | A', b' - transformed versions 155 | 156 | """ 157 | try: 158 | b = b.as_mutable() 159 | except: 160 | b = backend.MutableMatrix(b) 161 | 162 | Aug = A.col_insert(A.cols, backend.eye(A.rows)) 163 | rAug, pivots = Aug.rref() 164 | colidxs = [i for i in pivots if i < A.cols] 165 | b = backend.Matrix(rAug[:, A.cols:]*b) 166 | return rAug[:, :A.cols], b, colidxs 167 | --------------------------------------------------------------------------------