├── .coveragerc ├── .gitignore ├── .travis.yml ├── CHANGELOG.rst ├── LICENSE ├── MANIFEST.in ├── README.rst ├── clean.py ├── covrun.py ├── docs ├── Makefile ├── make.bat └── source │ ├── _ext │ └── sphinxtogithub.py │ ├── _static │ ├── .keep │ └── net.jpg │ ├── _templates │ ├── layout.html │ ├── sidebarintro.html │ └── sidebarlogo.html │ ├── _theme │ └── pulsar │ │ ├── static │ │ └── pulsar.css_t │ │ └── theme.conf │ ├── api │ ├── fields.rst │ ├── index.rst │ ├── models.rst │ └── utility.rst │ ├── backends │ ├── api.rst │ ├── index.rst │ └── redis.rst │ ├── changelog.rst │ ├── conf.py │ ├── examples │ ├── asynchronous.rst │ ├── columnts.rst │ ├── extensions.rst │ ├── index.rst │ ├── json.rst │ ├── local.rst │ ├── performance.rst │ ├── permissions.rst │ ├── primary.rst │ ├── query.rst │ ├── registration.rst │ ├── related.rst │ ├── search.rst │ ├── serialise.rst │ ├── sessions.rst │ ├── sorting.rst │ ├── structures.rst │ ├── ts.py │ ├── tutorial.rst │ ├── twitter.rst │ └── underscore.rst │ ├── faq.rst │ ├── index.rst │ ├── overview.rst │ └── stdnetredis.rst ├── examples ├── __init__.py ├── data.py ├── dynamo.py ├── m2m.py ├── models.py ├── observer.py ├── permissions.py ├── redis.lua ├── spelling │ ├── __init__.py │ └── spelling.py ├── sql.py ├── tsmodels.py └── wordsearch │ ├── __init__.py │ ├── basicwords.py │ └── models.py ├── luatests.lua ├── requirements.txt ├── requirements_dev.txt ├── runtests.py ├── setup.py ├── stdnet ├── __init__.py ├── apps │ ├── __init__.py │ ├── columnts │ │ ├── __init__.py │ │ ├── models.py │ │ ├── npts.py │ │ └── redis.py │ ├── searchengine │ │ ├── __init__.py │ │ ├── models.py │ │ └── processors │ │ │ ├── __init__.py │ │ │ ├── ignore.py │ │ │ ├── metaphone.py │ │ │ └── porter.py │ └── tasks │ │ ├── __init__.py │ │ └── models.py ├── backends │ ├── __init__.py │ └── redisb │ │ ├── __init__.py │ │ ├── client │ │ ├── __init__.py │ │ ├── async.py │ │ ├── client.py │ │ ├── extensions.py │ │ └── prefixed.py │ │ └── lua │ │ ├── columnts │ │ ├── columnts.lua │ │ ├── reduce.lua │ │ ├── runts.lua │ │ └── stats.lua │ │ ├── commands │ │ ├── keyinfo.lua │ │ ├── move2set.lua │ │ ├── timeseries.lua │ │ ├── utils.lua │ │ ├── zdiffstore.lua │ │ └── zpop.lua │ │ ├── numberarray.lua │ │ ├── odm.lua │ │ ├── structures.lua │ │ ├── tabletools.lua │ │ ├── ts.lua │ │ └── where.lua ├── odm │ ├── __init__.py │ ├── base.py │ ├── fields.py │ ├── globals.py │ ├── mapper.py │ ├── models.py │ ├── query.py │ ├── related.py │ ├── search.py │ ├── session.py │ ├── struct.py │ ├── structfields.py │ └── utils.py └── utils │ ├── __init__.py │ ├── dates.py │ ├── encoders.py │ ├── exceptions.py │ ├── fallbacks │ ├── __init__.py │ ├── _collections.py │ ├── _importlib.py │ └── py2 │ │ └── __init__.py │ ├── importer.py │ ├── jsontools.py │ ├── populate.py │ ├── py2py3.py │ ├── skiplist.py │ ├── structures.py │ ├── test.py │ ├── version.py │ └── zset.py └── tests ├── __init__.py ├── all ├── __init__.py ├── apps │ ├── __init__.py │ ├── columnts │ │ ├── __init__.py │ │ ├── evaluate.py │ │ ├── field.py │ │ ├── main.py │ │ ├── manipulate.py │ │ ├── npts.py │ │ ├── readonly.py │ │ └── test1.lua │ └── searchengine │ │ ├── __init__.py │ │ ├── add.py │ │ ├── meta.py │ │ └── search.py ├── backends │ ├── __init__.py │ ├── interface.py │ └── redis │ │ ├── __init__.py │ │ ├── async.py │ │ ├── client.py │ │ ├── info.py │ │ └── prefixed.py ├── benchmarks │ └── __init__.py ├── fields │ ├── __init__.py │ ├── fk.py │ ├── fknotrequired.py │ ├── id.py │ ├── integer.py │ ├── jsonfield.py │ ├── meta.py │ ├── pickle.py │ ├── pk.py │ └── scalar.py ├── lib │ ├── __init__.py │ ├── autoincrement.py │ ├── local.py │ ├── me.py │ ├── meta.py │ └── register.py ├── multifields │ ├── __init__.py │ ├── hash.py │ ├── list.py │ ├── set.py │ ├── string.py │ ├── struct.py │ └── timeseries.py ├── query │ ├── __init__.py │ ├── contains.py │ ├── delete.py │ ├── get_field.py │ ├── instruments.py │ ├── load_only.py │ ├── load_related.py │ ├── manager.py │ ├── manytomany.py │ ├── meta.py │ ├── ranges.py │ ├── related.py │ ├── session.py │ ├── signal.py │ ├── slice.py │ ├── sorting.py │ ├── transaction.py │ ├── unique.py │ └── where.py ├── serialize │ ├── __init__.py │ ├── base.py │ ├── csv.py │ └── json.py ├── structures │ ├── __init__.py │ ├── base.py │ ├── hash.py │ ├── list.py │ ├── numarray.py │ ├── set.py │ ├── string.py │ ├── ts.py │ └── zset.py ├── topics │ ├── __init__.py │ ├── finance.py │ ├── observer.py │ ├── permissions.py │ └── twitter.py └── utils │ ├── __init__.py │ ├── intervals.py │ ├── tools.py │ └── zset.py └── lua ├── columnts.lua ├── odm.lua ├── redis.lua ├── rserver.lua └── utils.lua /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | source = stdnet 3 | omit = 4 | *stdnet/apps/searchengine/processors/metaphone.py 5 | *stdnet/apps/searchengine/processors/porter.py 6 | *stdnet/utils/fallbacks/* 7 | *stdnet/apps/columnts/npts.py 8 | 9 | 10 | [report] 11 | # Regexes for lines to exclude from consideration 12 | exclude_lines = 13 | (?i)# *pragma[: ]*no *cover 14 | raise NotImplementedError -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | *.egg 3 | *.so 4 | *.prof 5 | *.pyd 6 | *.o 7 | *.def 8 | dist 9 | __pycache__ 10 | extensions/src/cparser.cpp 11 | build 12 | .settings 13 | docs/build 14 | .coverage 15 | htmlcov 16 | htmlprof 17 | .project 18 | .pydevproject 19 | MANIFEST 20 | Include 21 | Lib 22 | Scripts 23 | *~ 24 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | 3 | python: 4 | - "2.6" 5 | - "2.7" 6 | - "3.2" 7 | - "3.3" 8 | - "pypy" 9 | 10 | install: 11 | - if [[ $TRAVIS_PYTHON_VERSION == '2.6' ]]; then pip install --use-mirrors argparse unittest2; fi 12 | - pip install -r requirements_dev.txt --use-mirrors 13 | - git clone https://github.com/quantmind/pulsar.git 14 | - cd pulsar 15 | - python setup.py install 16 | - cd .. 17 | - sudo rm -rf pulsar 18 | - python setup.py install 19 | - sudo rm -rf /dev/shm && sudo ln -s /run/shm /dev/shm 20 | 21 | services: 22 | - redis-server 23 | 24 | script: 25 | - pep8 stdnet --exclude stdnet/apps/searchengine/processors 26 | - sudo rm -rf stdnet 27 | - python -m covrun 28 | 29 | notifications: 30 | email: false 31 | 32 | # Only test master and dev 33 | branches: 34 | only: 35 | - master 36 | - dev 37 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2010-2013, Luca Sbardella 2 | All rights reserved. 3 | 4 | Redistribution and use in source and binary forms, with or without modification, 5 | are permitted provided that the following conditions are met: 6 | 7 | * Redistributions of source code must retain the above copyright notice, 8 | this list of conditions and the following disclaimer. 9 | * Redistributions in binary form must reproduce the above copyright notice, 10 | this list of conditions and the following disclaimer in the documentation 11 | and/or other materials provided with the distribution. 12 | * Neither the name of the author nor the names of its contributors 13 | may be used to endorse or promote products derived from this software without 14 | specific prior written permission. 15 | 16 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 17 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 18 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. 19 | IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, 20 | INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, 21 | BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 22 | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF 23 | LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE 24 | OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED 25 | OF THE POSSIBILITY OF SUCH DAMAGE. 26 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include LICENSE 2 | include MANIFEST.in 3 | include README.rst 4 | include CHANGELOG.rst 5 | include requirements.txt 6 | include docs/make.bat 7 | include docs/Makefile 8 | include docs/source/conf.py 9 | include stdnet/utils/dispatch/license.txt 10 | recursive-include stdnet/backends/redisb/lua *.lua 11 | recursive-include extensions * 12 | recursive-include docs *.rst 13 | exclude extensions/src/cparser.cpp -------------------------------------------------------------------------------- /clean.py: -------------------------------------------------------------------------------- 1 | import os 2 | import shutil 3 | 4 | def rmgeneric(path, __func__): 5 | try: 6 | __func__(path) 7 | #print 'Removed ', path 8 | return 1 9 | except OSError as e: 10 | print('Could not remove {0}, {1}'.format(path,e)) 11 | return 0 12 | 13 | 14 | def rmfiles(path, ext = None, rmcache = True): 15 | if not os.path.isdir(path): 16 | return 0 17 | trem = 0 18 | tall = 0 19 | files = os.listdir(path) 20 | for f in files: 21 | fullpath = os.path.join(path, f) 22 | if os.path.isfile(fullpath): 23 | sf = f.split('.') 24 | if len(sf) == 2: 25 | if ext == None or sf[1] == ext: 26 | tall += 1 27 | trem += rmgeneric(fullpath, os.remove) 28 | elif f == '__pycache__' and rmcache: 29 | shutil.rmtree(fullpath) 30 | tall += 1 31 | elif os.path.isdir(fullpath): 32 | r,ra = rmfiles(fullpath, ext) 33 | trem += r 34 | tall += ra 35 | return trem, tall 36 | 37 | 38 | 39 | if __name__ == '__main__': 40 | path = os.curdir 41 | removed, allfiles = rmfiles(path,'pyc') 42 | print('removed {0} pyc files out of {1}'.format(removed, allfiles)) 43 | 44 | -------------------------------------------------------------------------------- /covrun.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import os 3 | 4 | from runtests import run 5 | 6 | 7 | if __name__ == '__main__': 8 | if sys.version_info > (3, 3): 9 | run(coverage=True, coveralls=True) 10 | else: 11 | run() 12 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = ../../docs/python-stdnet 9 | 10 | # Internal variables. 11 | PAPEROPT_a4 = -D latex_paper_size=a4 12 | PAPEROPT_letter = -D latex_paper_size=letter 13 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source 14 | 15 | .PHONY: help clean html dirhtml pickle json htmlhelp qthelp latex changes linkcheck doctest 16 | 17 | help: 18 | @echo "Please use \`make ' where is one of" 19 | @echo " html to make standalone HTML files" 20 | @echo " dirhtml to make HTML files named index.html in directories" 21 | @echo " pickle to make pickle files" 22 | @echo " json to make JSON files" 23 | @echo " htmlhelp to make HTML files and a HTML help project" 24 | @echo " qthelp to make HTML files and a qthelp project" 25 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 26 | @echo " changes to make an overview of all changed/added/deprecated items" 27 | @echo " linkcheck to check all external links for integrity" 28 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 29 | 30 | clean: 31 | -rm -rf $(BUILDDIR)/* 32 | 33 | html: 34 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 35 | @echo 36 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 37 | 38 | dirhtml: 39 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 40 | @echo 41 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 42 | 43 | pickle: 44 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 45 | @echo 46 | @echo "Build finished; now you can process the pickle files." 47 | 48 | json: 49 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 50 | @echo 51 | @echo "Build finished; now you can process the JSON files." 52 | 53 | htmlhelp: 54 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 55 | @echo 56 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 57 | ".hhp project file in $(BUILDDIR)/htmlhelp." 58 | 59 | qthelp: 60 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 61 | @echo 62 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 63 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 64 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/PythonStdNet.qhcp" 65 | @echo "To view the help file:" 66 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/PythonStdNet.qhc" 67 | 68 | latex: 69 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 70 | @echo 71 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 72 | @echo "Run \`make all-pdf' or \`make all-ps' in that directory to" \ 73 | "run these through (pdf)latex." 74 | 75 | changes: 76 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 77 | @echo 78 | @echo "The overview file is in $(BUILDDIR)/changes." 79 | 80 | linkcheck: 81 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 82 | @echo 83 | @echo "Link check complete; look for any errors in the above output " \ 84 | "or in $(BUILDDIR)/linkcheck/output.txt." 85 | 86 | doctest: 87 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 88 | @echo "Testing of doctests in the sources finished, look at the " \ 89 | "results in $(BUILDDIR)/doctest/output.txt." 90 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | REM Command file for Sphinx documentation 4 | 5 | set SPHINXBUILD=sphinx-build 6 | set BUILDDIR=build 7 | set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% source 8 | if NOT "%PAPER%" == "" ( 9 | set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% 10 | ) 11 | 12 | if "%1" == "" goto help 13 | 14 | if "%1" == "help" ( 15 | :help 16 | echo.Please use `make ^` where ^ is one of 17 | echo. html to make standalone HTML files 18 | echo. dirhtml to make HTML files named index.html in directories 19 | echo. pickle to make pickle files 20 | echo. json to make JSON files 21 | echo. htmlhelp to make HTML files and a HTML help project 22 | echo. qthelp to make HTML files and a qthelp project 23 | echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter 24 | echo. changes to make an overview over all changed/added/deprecated items 25 | echo. linkcheck to check all external links for integrity 26 | echo. doctest to run all doctests embedded in the documentation if enabled 27 | goto end 28 | ) 29 | 30 | if "%1" == "clean" ( 31 | for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i 32 | del /q /s %BUILDDIR%\* 33 | goto end 34 | ) 35 | 36 | if "%1" == "html" ( 37 | %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html 38 | echo. 39 | echo.Build finished. The HTML pages are in %BUILDDIR%/html. 40 | goto end 41 | ) 42 | 43 | if "%1" == "dirhtml" ( 44 | %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml 45 | echo. 46 | echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. 47 | goto end 48 | ) 49 | 50 | if "%1" == "pickle" ( 51 | %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle 52 | echo. 53 | echo.Build finished; now you can process the pickle files. 54 | goto end 55 | ) 56 | 57 | if "%1" == "json" ( 58 | %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json 59 | echo. 60 | echo.Build finished; now you can process the JSON files. 61 | goto end 62 | ) 63 | 64 | if "%1" == "htmlhelp" ( 65 | %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp 66 | echo. 67 | echo.Build finished; now you can run HTML Help Workshop with the ^ 68 | .hhp project file in %BUILDDIR%/htmlhelp. 69 | goto end 70 | ) 71 | 72 | if "%1" == "qthelp" ( 73 | %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp 74 | echo. 75 | echo.Build finished; now you can run "qcollectiongenerator" with the ^ 76 | .qhcp project file in %BUILDDIR%/qthelp, like this: 77 | echo.^> qcollectiongenerator %BUILDDIR%\qthelp\PythonStdNet.qhcp 78 | echo.To view the help file: 79 | echo.^> assistant -collectionFile %BUILDDIR%\qthelp\PythonStdNet.ghc 80 | goto end 81 | ) 82 | 83 | if "%1" == "latex" ( 84 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 85 | echo. 86 | echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. 87 | goto end 88 | ) 89 | 90 | if "%1" == "changes" ( 91 | %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes 92 | echo. 93 | echo.The overview file is in %BUILDDIR%/changes. 94 | goto end 95 | ) 96 | 97 | if "%1" == "linkcheck" ( 98 | %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck 99 | echo. 100 | echo.Link check complete; look for any errors in the above output ^ 101 | or in %BUILDDIR%/linkcheck/output.txt. 102 | goto end 103 | ) 104 | 105 | if "%1" == "doctest" ( 106 | %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest 107 | echo. 108 | echo.Testing of doctests in the sources finished, look at the ^ 109 | results in %BUILDDIR%/doctest/output.txt. 110 | goto end 111 | ) 112 | 113 | :end 114 | -------------------------------------------------------------------------------- /docs/source/_static/.keep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lsbardel/python-stdnet/78db5320bdedc3f28c5e4f38cda13a4469e35db7/docs/source/_static/.keep -------------------------------------------------------------------------------- /docs/source/_static/net.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lsbardel/python-stdnet/78db5320bdedc3f28c5e4f38cda13a4469e35db7/docs/source/_static/net.jpg -------------------------------------------------------------------------------- /docs/source/_templates/layout.html: -------------------------------------------------------------------------------- 1 | {% extends "!layout.html" %} 2 | 3 | {% block relbar1 %} 4 | {{ super() }} 5 |
6 |
7 | {% if release_version %} 8 |

9 | Documentation for stdnet {{ version }}. For development docs, 10 | go here. 11 |

12 | {% else %} 13 |

14 | Documentation for pulsar's DEVELOPMENT version. Get the 15 | release docs here. 16 |

17 | {% endif %} 18 |
19 |
20 | {% endblock %} 21 | {% block relbar2 %}{% endblock %} 22 | 23 | {% block footer %} 24 | {{ super() }} 25 | 42 | {% endblock %} -------------------------------------------------------------------------------- /docs/source/_templates/sidebarintro.html: -------------------------------------------------------------------------------- 1 | 2 |
3 | 4 | 11 |
12 |

Python Stdnet

13 |

14 | Stdnet is an object data mapper and advanced query manager for non relational databases. 15 | Designed to be fast, memory efficient and highly customisable. 16 |

17 | 22 |

Useful Links

23 | 38 | -------------------------------------------------------------------------------- /docs/source/_templates/sidebarlogo.html: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /docs/source/_theme/pulsar/theme.conf: -------------------------------------------------------------------------------- 1 | [theme] 2 | inherit = basic 3 | stylesheet = pulsar.css 4 | 5 | [options] -------------------------------------------------------------------------------- /docs/source/api/index.rst: -------------------------------------------------------------------------------- 1 | .. _model-index: 2 | 3 | .. module:: stdnet.odm 4 | 5 | ============================ 6 | API 7 | ============================ 8 | 9 | Stdnet is an object data mapper for non-relational databases or nosql_ as 10 | they are known. It is also a lightweight module, which deal only with data mapping, 11 | advanced queries and nothing else. 12 | 13 | 14 | **Contents** 15 | 16 | .. toctree:: 17 | :maxdepth: 2 18 | 19 | models 20 | fields 21 | utility 22 | 23 | 24 | .. _nosql: http://nosql-database.org/ -------------------------------------------------------------------------------- /docs/source/api/utility.rst: -------------------------------------------------------------------------------- 1 | .. _utility-index: 2 | 3 | ============================ 4 | Internals and Utilities 5 | ============================ 6 | 7 | .. _settings: 8 | 9 | Default Settings 10 | ========================= 11 | 12 | .. automodule:: stdnet.utils.conf 13 | 14 | 15 | .. module:: stdnet.odm 16 | 17 | 18 | .. _serialize-models: 19 | 20 | Serialization 21 | ====================== 22 | 23 | Stdnet comes with a bunch of extendible utilities for 24 | :ref:`serializing model ` data into different formats. 25 | 26 | Get serializer 27 | ~~~~~~~~~~~~~~~~~~~~~~~~ 28 | 29 | .. autofunction:: get_serializer 30 | 31 | 32 | Register serializer 33 | ~~~~~~~~~~~~~~~~~~~~~~~~ 34 | 35 | .. autofunction:: register_serializer 36 | 37 | 38 | Serializer 39 | ~~~~~~~~~~~~~~~~~~~~~~~~ 40 | 41 | .. autoclass:: Serializer 42 | :members: 43 | :member-order: bysource 44 | 45 | 46 | JsonSerializer 47 | ~~~~~~~~~~~~~~~~~~~~~~~~ 48 | 49 | .. autoclass:: JsonSerializer 50 | :members: 51 | :member-order: bysource 52 | 53 | 54 | .. module:: stdnet.utils 55 | 56 | JSON utilities 57 | ===================== 58 | 59 | .. automodule:: stdnet.utils.jsontools 60 | 61 | .. _encoders: 62 | 63 | Encoders 64 | ======================= 65 | 66 | .. automodule:: stdnet.utils.encoders 67 | 68 | 69 | .. module:: stdnet 70 | 71 | Exceptions 72 | ============================ 73 | 74 | .. autoclass:: StdNetException 75 | :members: 76 | :member-order: bysource 77 | 78 | .. autoclass:: ImproperlyConfigured 79 | :members: 80 | :member-order: bysource 81 | 82 | .. autoclass:: QuerySetError 83 | :members: 84 | :member-order: bysource 85 | 86 | .. autoclass:: FieldError 87 | :members: 88 | :member-order: bysource 89 | 90 | .. autoclass:: FieldValueError 91 | :members: 92 | :member-order: bysource 93 | 94 | 95 | .. _signal-api: 96 | 97 | Signals 98 | ===================== 99 | Stdnet includes a signal dispatcher which helps allow decoupled 100 | applications get notified when actions occur elsewhere in the framework. 101 | In a nutshell, signals allow certain senders to notify a set of receivers 102 | that some action has taken place. 103 | They are especially useful when many pieces of code may be interested in 104 | the same events. 105 | 106 | The data mapper provide with the following built-in signals in the :mod:`stdnet.odm` 107 | module: 108 | 109 | * ``pre_commit`` triggered before new instances or changes on existing instances 110 | are committed to the backend server. 111 | * ``post_commit`` triggered after new instances or changes on existing instances 112 | are committed to the backend server. 113 | 114 | It is also possible to add callback to single instances in the following way:: 115 | 116 | instance = MyModel(...) 117 | instance.post_commit(callable) 118 | 119 | 120 | Miscellaneous 121 | ============================ 122 | 123 | Populate 124 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~ 125 | 126 | .. autofunction:: stdnet.utils.populate 127 | 128 | 129 | .. _api-testing: 130 | 131 | Testing 132 | ====================== 133 | 134 | .. automodule:: stdnet.utils.test -------------------------------------------------------------------------------- /docs/source/backends/api.rst: -------------------------------------------------------------------------------- 1 | .. _db-api: 2 | 3 | .. module:: stdnet 4 | 5 | =========== 6 | API 7 | =========== 8 | 9 | High Level Functions 10 | ======================== 11 | 12 | getdb 13 | ~~~~~~~~~~~~~~~~ 14 | 15 | .. autofunction:: getdb 16 | 17 | 18 | getcache 19 | ~~~~~~~~~~~~~~~~ 20 | 21 | .. autofunction:: getcache 22 | 23 | 24 | Interfaces 25 | ======================== 26 | 27 | Backend data server 28 | ~~~~~~~~~~~~~~~~~~~~~~~~~ 29 | 30 | .. autoclass:: BackendDataServer 31 | :members: 32 | :member-order: bysource 33 | 34 | 35 | Cache Server 36 | ~~~~~~~~~~~~~~~~~~~~~~~~ 37 | 38 | .. autoclass:: CacheServer 39 | :members: 40 | :member-order: bysource 41 | 42 | 43 | Backend Structure 44 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~ 45 | 46 | .. autoclass:: BackendStructure 47 | :members: 48 | :member-order: bysource 49 | 50 | 51 | Asynchronous Components 52 | =============================== 53 | 54 | .. automodule:: stdnet.utils.async 55 | 56 | 57 | .. _JSON: http://www.json.org/ -------------------------------------------------------------------------------- /docs/source/backends/index.rst: -------------------------------------------------------------------------------- 1 | .. _db-index: 2 | 3 | .. module:: stdnet 4 | 5 | ================================ 6 | Backend Servers 7 | ================================ 8 | 9 | Behind the scenes we have the database. 10 | Currently stdnet supports Redis_. 11 | 12 | Backends 13 | =========== 14 | 15 | .. toctree:: 16 | :maxdepth: 1 17 | 18 | api 19 | redis 20 | 21 | .. _connection-string: 22 | 23 | A backend instance is usually obtained via the :func:`getdb` function by 24 | passing a valid connection string:: 25 | 26 | from pulsar import getdb 27 | 28 | b1 = getdb('redis://127.0.0.1:9739?db=7&namespace=test.') 29 | 30 | Check :ref:`redis connection strings ` for a 31 | full list of valid parameters. 32 | 33 | .. _Redis: http://redis.io/ 34 | -------------------------------------------------------------------------------- /docs/source/changelog.rst: -------------------------------------------------------------------------------- 1 | .. _changelog: ========================= Changelog ========================= .. include:: ../../CHANGELOG.rst -------------------------------------------------------------------------------- /docs/source/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Python StdNet documentation build configuration file, created by 4 | # sphinx-quickstart on Thu Jun 17 11:24:36 2010. 5 | # 6 | # This file is execfile()d with the current directory set to its containing dir. 7 | # 8 | # Note that not all possible configuration values are present in this 9 | # autogenerated file. 10 | # 11 | # All configuration values have a default; values that are commented out 12 | # serve to show the default. 13 | 14 | import sys, os 15 | 16 | # If your extensions are in another directory, add it here. 17 | source_dir = os.path.split(os.path.abspath(__file__))[0] 18 | docs_dir = os.path.split(source_dir)[0] 19 | base_dir = os.path.split(docs_dir)[0] 20 | sys.path.append(os.path.join(source_dir, "_ext")) 21 | sys.path.append(base_dir) 22 | import stdnet 23 | version = stdnet.__version__ 24 | release = version 25 | import runtests # so that it import pulsar if available 26 | # -- General configuration ----------------------------------------------------- 27 | 28 | # Add any Sphinx extension module names here, as strings. They can be extensions 29 | # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. 30 | extensions = ['sphinx.ext.autodoc', 'sphinx.ext.todo', 'sphinx.ext.pngmath'] 31 | 32 | # Beta version is published in github pages 33 | if stdnet.VERSION[3] == 'beta': 34 | extensions.append('sphinxtogithub') 35 | html_context = {'release_version': stdnet.VERSION[3] == 'final'} 36 | 37 | # The suffix of source filenames. 38 | source_suffix = '.rst' 39 | 40 | # The encoding of source files. 41 | #source_encoding = 'utf-8' 42 | 43 | # The master toctree document. 44 | master_doc = 'index' 45 | 46 | # General information about the project. 47 | project = 'python stdnet' 48 | copyright = '2010-2013, Luca Sbardella' 49 | 50 | html_theme = 'pulsar' 51 | 52 | templates_path = ['_templates'] 53 | html_static_path = ['_static'] 54 | html_theme_path = ["_theme"] 55 | html_sidebars = { 56 | 'index': ['sidebarlogo.html', 'sidebarintro.html', 57 | 'sourcelink.html', 'searchbox.html'], 58 | '**': ['sidebarlogo.html', 'localtoc.html', 'relations.html', 59 | 'sourcelink.html', 'searchbox.html'], 60 | } 61 | exclude_trees = [] 62 | pygments_style = 'sphinx' 63 | 64 | # Output file base name for HTML help builder. 65 | htmlhelp_basename = 'PythonStdNetdoc' 66 | 67 | # -- Options for LaTeX output -------------------------------------------------- 68 | 69 | # The paper size ('letter' or 'a4'). 70 | #latex_paper_size = 'letter' 71 | 72 | # The font size ('10pt', '11pt' or '12pt'). 73 | #latex_font_size = '10pt' 74 | 75 | # Grouping the document tree into LaTeX files. List of tuples 76 | # (source start file, target name, title, author, documentclass [howto/manual]). 77 | latex_documents = [ 78 | ('index', 'PythonStdNet.tex', 'Python Stdnet Documentation', 79 | 'Luca Sbardella', 'manual'), 80 | ] 81 | 82 | # The name of an image file (relative to this directory) to place at the top of 83 | # the title page. 84 | #latex_logo = None 85 | 86 | # For "manual" documents, if this is true, then toplevel headings are parts, 87 | # not chapters. 88 | #latex_use_parts = False 89 | 90 | # Additional stuff for the LaTeX preamble. 91 | #latex_preamble = '' 92 | 93 | # Documents to append as an appendix to all manuals. 94 | #latex_appendices = [] 95 | 96 | # If false, no module index is generated. 97 | #latex_use_modindex = True 98 | -------------------------------------------------------------------------------- /docs/source/examples/asynchronous.rst: -------------------------------------------------------------------------------- 1 | .. _tutorial-asynchronous: 2 | 3 | 4 | ======================== 5 | Asynchronous usage 6 | ======================== 7 | 8 | Stdnet has been designed so that it can operate in fully asynchronous mode 9 | if the backend connection class is asynchronous. 10 | The :ref:`redis backend ` is shipped with an asychronous client 11 | written using pulsar_. 12 | 13 | When using stdnet in asynchronous mode, a user writes generator method 14 | which pulsar_ treats as asynchronous coroutines. In this way adding instances 15 | of Found model in :ref:`our tutorial application ` 16 | becomes:: 17 | 18 | fund = yield Fund(name='Markowitz', ccy='EUR').save() 19 | 20 | or using a :ref:`session `:: 21 | 22 | with Fund.objects.session().begin() as t: 23 | t.add(Fund(name='Markowitz', ccy='EUR')) 24 | yield t.on_result 25 | 26 | 27 | 28 | .. _pulsar: http://quantmind.github.com/pulsar/ 29 | -------------------------------------------------------------------------------- /docs/source/examples/columnts.rst: -------------------------------------------------------------------------------- 1 | .. _apps-timeserie: 2 | 3 | =========================== 4 | Multivariate Time Series 5 | =========================== 6 | 7 | .. automodule:: stdnet.apps.columnts 8 | 9 | -------------------------------------------------------------------------------- /docs/source/examples/extensions.rst: -------------------------------------------------------------------------------- 1 | .. _c-extensions: 2 | 3 | ======================= 4 | C/C++ Extensions 5 | ======================= 6 | 7 | If cython_ is available during installation, stdnet will compile and install 8 | a set of extensions which greatly speed-up the library when large amount of data 9 | is retrieved or saved. 10 | 11 | 12 | Installing Cython 13 | ====================== 14 | 15 | In linux or Mac OS it is as simple as:: 16 | 17 | pip install cython 18 | 19 | for windows, you are better off to download and install a binary distribution 20 | from `this site`_. 21 | 22 | 23 | .. _cython: http://cython.org/ 24 | .. _`this site`: http://www.lfd.uci.edu/~gohlke/pythonlibs/#cython -------------------------------------------------------------------------------- /docs/source/examples/index.rst: -------------------------------------------------------------------------------- 1 | .. _intro-example: 2 | 3 | ====================== 4 | Tutorials & Examples 5 | ====================== 6 | 7 | The best way to get familiar with stdnet API is to dive into the tutorials. 8 | In this section we will walk you though all the main aspects of the library, 9 | following simple applications as examples. 10 | We will refer back to the :ref:`library API ` 11 | as much as possible so that 12 | advanced configuration parameters and functionalities can be investigated. 13 | 14 | The collection of tutorials examples is available in the 15 | :mod:`examples` module in the source distribution. 16 | 17 | **Tutorials** 18 | 19 | .. toctree:: 20 | :maxdepth: 1 21 | 22 | tutorial 23 | registration 24 | query 25 | sorting 26 | search 27 | sessions 28 | twitter 29 | permissions 30 | columnts 31 | 32 | 33 | **Topics** 34 | 35 | .. toctree:: 36 | :maxdepth: 1 37 | 38 | related 39 | structures 40 | json 41 | primary 42 | performance 43 | underscore 44 | serialise 45 | asynchronous 46 | local 47 | extensions -------------------------------------------------------------------------------- /docs/source/examples/json.rst: -------------------------------------------------------------------------------- 1 | .. _field-generator: 2 | 3 | .. module:: stdnet.odm 4 | 5 | ======================= 6 | Field Generator 7 | ======================= 8 | 9 | This tutorial is about the use of :class:`JSONField` with parameter 10 | :class:`JSONField.as_string` set to ``False``. 11 | Lets start with a model for a general data feed:: 12 | 13 | class Feed(odm.StdModel): 14 | name = odm.SymbolField() 15 | data = odm.JSONField(as_string=False) 16 | 17 | lets create an instance:: 18 | 19 | feed = Feed(name='goog').save() 20 | feed.data = {'price': {'bid': 750, 'offer':751}, 21 | 'volume': 2762355371, 22 | 'mkt_cap': '255B', 23 | 'pe': 23} 24 | feed.save() 25 | 26 | When loading the instance one can access all the fields in the following way:: 27 | 28 | feed.data['price']['bid'] 29 | feed.data['price']['offer'] 30 | 31 | or equivalently:: 32 | 33 | feed.data__price__bid 34 | feed.data__price__offer 35 | 36 | 37 | 38 | -------------------------------------------------------------------------------- /docs/source/examples/local.rst: -------------------------------------------------------------------------------- 1 | .. _local-models: 2 | 3 | .. module:: stdnet.odm 4 | 5 | ====================== 6 | Local Models 7 | ====================== 8 | 9 | Stdnet provides a tool for creating and manipulating :class:`Model` which 10 | are not backed by a :class:`BackendDataServer` but requires an 11 | interface similar to :class:`StdModel` classes. 12 | 13 | 14 | Creating a model 15 | ======================= 16 | 17 | The primary tool for creating local models is the :func:`create_model` 18 | API function:: 19 | 20 | create_model('RedisDb', 'db') 21 | -------------------------------------------------------------------------------- /docs/source/examples/permissions.rst: -------------------------------------------------------------------------------- 1 | .. _permissions-example: 2 | 3 | ============================== 4 | Role based access control 5 | ============================== 6 | 7 | .. automodule:: examples.permissions -------------------------------------------------------------------------------- /docs/source/examples/primary.rst: -------------------------------------------------------------------------------- 1 | .. _tutorial-primary-unique: 2 | 3 | .. module:: stdnet.odm 4 | 5 | ======================================= 6 | Primary Keys and Unique Fields 7 | ======================================= 8 | 9 | Each :class:`StdModel` must have a primary key. The model primary key :class:`Field` 10 | can be obtained via the :meth:`StdModel.pk` class method. A model can have one and only one 11 | primary key which is specified by passing ``primary_key=True`` during 12 | model definition:: 13 | 14 | class MyModel(odm.StdModel): 15 | id = odm.SymbolField(primary_key=True) 16 | ... 17 | 18 | A primary key field has the :attr:`Field.primary_key` attribute ``True``. 19 | 20 | .. _tutorial-compositeid: 21 | 22 | Composite ID 23 | ========================= 24 | 25 | The :class:`CompositeIdField` enforces a group of fields in a model to be 26 | unique (together). Let`s consider the following model:: 27 | 28 | class Vote(StdModel): 29 | full_name = odm.SymbolField() 30 | address = odm.SymbolField() 31 | result = odm.SymbolField() 32 | 33 | Now I want to make ``full_name`` and ``address`` unique together, so that 34 | given a name and address I can uniquely identify a vote. 35 | This is achieved by introducing a :class:`CompositeIdField`:: 36 | 37 | class Vote(StdModel): 38 | id = odm.CompositeIdField('full_name', 'address') 39 | full_name = odm.SymbolField() 40 | address = odm.SymbolField() 41 | result = odm.SymbolField() 42 | 43 | .. note:: 44 | 45 | The :class:`CompositeIdField` is used, behind the scenes, 46 | by the :ref:`through model ` in the :class:`ManyToManyField`. 47 | 48 | 49 | .. _tutorial-unique: 50 | 51 | Unique Fields 52 | ========================= 53 | 54 | A :class:`Field` can be enforced to be unique by passing the ``unique=True`` 55 | during model definition. The following models specifies two fields to be unique 56 | across the whole model instances:: 57 | 58 | class MyModel(odm.StdModel): 59 | full_name = odm.SymbolField() 60 | username = odm.SymbolField(unique=True) 61 | email = odm.SymbolField(unique=True) -------------------------------------------------------------------------------- /docs/source/examples/search.rst: -------------------------------------------------------------------------------- 1 | .. _tutorial-search: 2 | 3 | .. module:: stdnet.odm 4 | 5 | ========================================= 6 | Full text search 7 | ========================================= 8 | 9 | Full text search in most key-value stores is not provided out of the box as 10 | it is in traditional relational databases. 11 | One may argue that no-sql databases don't need to provide such feature since 12 | search engines such as Solr_, ElasticSearch_ or Sphinx_ can be used to 13 | provide a full text search solution. 14 | 15 | Stdnet provides the :class:`stdnet.odm.SearchEngine` interface 16 | for implementing full text search of stdnet models. The interface 17 | can be customized with third party applications. 18 | 19 | 20 | Redis based solution 21 | ======================== 22 | 23 | .. automodule:: stdnet.apps.searchengine 24 | 25 | .. _solr: http://lucene.apache.org/solr/ 26 | .. _ElasticSearch: http://www.elasticsearch.org/ 27 | .. _Sphinx: http://sphinxsearch.com/ 28 | 29 | -------------------------------------------------------------------------------- /docs/source/examples/serialise.rst: -------------------------------------------------------------------------------- 1 | .. _tutorial-serialise: 2 | 3 | .. module:: stdnet.odm 4 | 5 | ======================= 6 | Export & Load data 7 | ======================= 8 | 9 | Stdnet comes with utilities for exporting and loading models from files. These 10 | :ref:`serialization utilities ` are useful for backing up 11 | your models, porting your data to other databases or creating test databases. 12 | 13 | There are two serializers included in the standard distribution: **json** and **csv**. 14 | 15 | Exporting Data 16 | ==================== 17 | 18 | To export data is quite strightforward, you create a :class:`Query` and 19 | pass it to a :meth:`Serializer.dump` method. Using the 20 | :ref:`models router ` in the first tutorial:: 21 | 22 | >>> from stdnet import odm 23 | >>> json = odm.get_serializer('json') 24 | >>> qs = models.instrument.query() 25 | >>> json.dump(qs) 26 | 27 | So fat the ``json`` serializer has not written anything to file. Therefore 28 | we can add additional queries:: 29 | 30 | >>> qs = models.fund.query() 31 | >>> json.dump(qs) 32 | 33 | To write to file we use the :meth:`Serializer.write` methods:: 34 | 35 | >>> with open('data.json','w') as stream: 36 | >>> json.write(stream) 37 | 38 | 39 | Loading Data 40 | ==================== 41 | 42 | To load data from a file or a stream we starts from a :class:`Router` 43 | which contains all the models we need:: 44 | 45 | >>> json = odm.get_serializer('json') 46 | >>> with open('data.json','r') as f: 47 | >>> data = f.read() 48 | >>> json.load(models, data) 49 | 50 | 51 | Creating a Serializer 52 | ========================== 53 | To create a new serializer, one starts by subclassing the :class:`Serializer` 54 | and implement the :meth:`Serializer.dump` and :meth:`Serializer.load` and 55 | :meth:`Serializer.write` methods:: 56 | 57 | from stdnet import dm 58 | 59 | class MySerializer(odm.Serializer): 60 | 61 | def dump(self, qs): 62 | ... 63 | 64 | def write(self, stream=None): 65 | ... 66 | 67 | def load(self, stream, model=None): 68 | ... 69 | 70 | To be able to use the ``MySerializer`` it needs to be registered via 71 | the :func:`register_serializer`:: 72 | 73 | odm.register_serializer('custom', MySerializer) -------------------------------------------------------------------------------- /docs/source/examples/sessions.rst: -------------------------------------------------------------------------------- 1 | .. _model-session: 2 | 3 | .. module:: stdnet.odm 4 | 5 | ============================ 6 | Sessions 7 | ============================ 8 | 9 | A :class:`Session` is a lightweight component which establishes all 10 | conversations with backend databases. It is the middleware 11 | between :class:`Model` and :class:`Router` on one side and the 12 | :class:`stdnet.BackendDataServer` on the other side. 13 | 14 | 15 | Obtaining a session 16 | ===================== 17 | 18 | :class:`Session` is a regular Python class which is obtained from 19 | a :class:`Router` via the :meth:`Router.session` method. We continue to use the 20 | :ref:`models router ` 21 | created for our :ref:`tutorial application `:: 22 | 23 | session = models.session() 24 | session2 = models.session() 25 | 26 | 27 | Query a model 28 | ==================== 29 | 30 | Once a session is obtained, one can create a query on a model by simply invoking 31 | the :meth:`Session.query` method:: 32 | 33 | query = session.query(models.fund) 34 | 35 | A less verbose way of obtaining a query is to use the :meth:`Manager.query` 36 | method directly:: 37 | 38 | query = models.fund.query() 39 | 40 | 41 | 42 | .. _transactional-state: 43 | 44 | Transactional State 45 | ========================= 46 | 47 | A :class:`Session` is said to be in a **transactional state** when its 48 | :class:`Session.transaction` attribute is not ``None``. A transactional state is 49 | obtained via the :meth:`Session.begin` method:: 50 | 51 | transaction = session.begin() 52 | 53 | The returned transaction instance is the same as the value stored at the 54 | :class:`Session.transaction` attribute. Note that if we try to obtain a new transaction 55 | from a session already in a transactional state an :class:`InvalidTransaction` 56 | exception will occur. 57 | 58 | -------------------------------------------------------------------------------- /docs/source/examples/sorting.rst: -------------------------------------------------------------------------------- 1 | .. _sorting: 2 | 3 | .. module:: stdnet.odm 4 | 5 | ======================= 6 | Sorting and Ordering 7 | ======================= 8 | Stdnet can sort instances of a model in three different ways: 9 | 10 | * :ref:`Explicit sorting ` using the 11 | :attr:`Query.sort_by` method. 12 | * :ref:`Implicit sorting ` via the 13 | :attr:`Metaclass.ordering` attribute of the model metaclass. 14 | * :ref:`Incremental sorting `, a variant of the 15 | implicit sorting for models which require to keep track how many 16 | times instances with the same id are created. 17 | 18 | 19 | .. _explicit-sorting: 20 | 21 | Explicit Sorting 22 | ======================= 23 | 24 | Sorting is usually achieved by using the :meth:`Query.sort_by` 25 | method with a field name as parameter. Lets consider the following model:: 26 | 27 | class SportActivity(odm.StdNet): 28 | person = odm.SymbolField() 29 | activity = odm.SymbolField() 30 | dt = odm.DateTimeField() 31 | 32 | models = odm.Router() 33 | models.register(SportActivity) 34 | 35 | To obtained a sorted query on dates for a given person:: 36 | 37 | qs = models.sportactivity.filter(person='pippo').sort_by('-dt') 38 | 39 | The negative sign in front of ``dt`` indicates descending order. 40 | 41 | 42 | .. _implicit-sorting: 43 | 44 | Implicit Sorting 45 | =================== 46 | 47 | Implicit sorting is achieved by setting the :attr:`Metaclass.ordering` 48 | attribute in the model ``Meta`` class. 49 | Let's consider the following Log model example:: 50 | 51 | class Log(odm.StdModel): 52 | '''A database log entry''' 53 | timestamp = odm.DateTimeField(default=datetime.now) 54 | level = odm.SymbolField() 55 | msg = odm.CharField() 56 | source = odm.CharField() 57 | host = odm.CharField() 58 | user = odm.SymbolField(required=False) 59 | client = odm.CharField() 60 | 61 | class Meta: 62 | ordering = '-timestamp' 63 | 64 | models.register(Log) 65 | 66 | It makes lots of sense to have the log entries always sorted in a descending 67 | order with respect to the ``timestamp`` field. 68 | This solution always returns :class:`Query` in this order, without the need to 69 | call ``sort_by`` method. 70 | 71 | .. note:: Implicit sorting is a much faster solution than explicit sorting, 72 | since there is no sorting step involved (which is a ``N log(N)`` 73 | time complexity algorithm). Instead, the order is maintained by using 74 | sorted sets as indices rather than sets. 75 | 76 | 77 | .. _incremental-sorting: 78 | 79 | Incremental Sorting 80 | ======================== 81 | 82 | -------------------------------------------------------------------------------- /docs/source/examples/ts.py: -------------------------------------------------------------------------------- 1 | from datetime import date 2 | 3 | from stdnet import odm 4 | from stdnet.contrib.timeseries.models import TimeSeries 5 | 6 | 7 | class FinanceTimeSeries(TimeSeries): 8 | ticker = odm.SymbolField(unique = True) 9 | 10 | def __unicode__(self): 11 | return '%s - %s' % (self.ticker,self.data.size()) 12 | 13 | 14 | 15 | if __name__ == '__main__': 16 | odm.register(FinanceTimeSeries) 17 | ts = FinanceTimeSeries(ticker = 'GOOG').save() 18 | ts.data[date(2010,2,25)] = 610.5 19 | ts.save() -------------------------------------------------------------------------------- /docs/source/examples/twitter.rst: -------------------------------------------------------------------------------- 1 | .. _twitter-example: 2 | 3 | ============================== 4 | A twitter clone 5 | ============================== 6 | 7 | This is the stdnet equivalent of the `redis twitter clone`_ example. 8 | It illustrates the use of :class:`stdnet.odm.ManyToManyField` and 9 | :ref:`implicit sorting `:: 10 | 11 | from datetime import datetime 12 | from stdnet import odm 13 | 14 | class Post(odm.StdModel): 15 | timestamp = odm.DateTimeField(default = datetime.now) 16 | data = odm.CharField() 17 | user = odm.ForeignKey("User") 18 | 19 | def __unicode__(self): 20 | return self.data 21 | 22 | class Meta: 23 | ordering = '-timestamp' 24 | 25 | 26 | class User(odm.StdModel): 27 | '''A model for holding information about users''' 28 | username = odm.SymbolField(unique = True) 29 | password = odm.CharField(required = True) 30 | following = odm.ManyToManyField(model = 'self', 31 | related_name = 'followers') 32 | 33 | def __unicode__(self): 34 | return self.username 35 | 36 | def newupdate(self, data): 37 | return Post(data = data, user = self).save() 38 | 39 | 40 | These models are available in the :mod:`stdnet.tests` module. 41 | We can import them by using:: 42 | 43 | from stdnet.tests.examples.models import Post, User 44 | 45 | Before using the models, :ref:`we need to register ` 46 | them to a back-end. If your redis server is running locally type:: 47 | 48 | >>> from stdnet import odm 49 | >>> models = odm.Router('redis://') 50 | >>> models.register(User) 51 | >>> models.register(Post) 52 | 53 | Now lets try it out:: 54 | 55 | >>> u = models.user.new(username='pluto', password='bla') 56 | >>> u 57 | User: pluto 58 | 59 | Ok we have a user. Lets add few updates:: 60 | 61 | >>> u.newupdate('my name is Luka and I live on second floor') 62 | Post: my name is Luka and I live on second floor 63 | >>> u.newupdate('ciao') 64 | Post: ciao 65 | >>> u.save() 66 | User: pluto 67 | >>> u.updates.size() 68 | 2 69 | >>> for p in u.updates: 70 | ... print('%s : %s' % (p.dt,p)) 71 | ... 72 | 2010-11-10 18:05:59 : ciao 73 | 2010-11-10 18:05:24 : my name is Luka and I live on second floor 74 | >>> 75 | 76 | 77 | .. _redis twitter clone: http://redis.io/topics/twitter-clone 78 | -------------------------------------------------------------------------------- /docs/source/examples/underscore.rst: -------------------------------------------------------------------------------- 1 | .. _tutorial-underscore: 2 | 3 | .. module:: stdnet.odm 4 | 5 | ======================================= 6 | Double Underscore Notation 7 | ======================================= 8 | 9 | Stdnet makes extensive use of the ``__`` **double-underscore notation** in 10 | several parts of the API. 11 | 12 | * A :class:`Query` with :ref:`range ` and 13 | :ref:`text ` lookups:: 14 | 15 | >>> from stdnet import odm 16 | >>> models = odm.Router() 17 | >>> models.register(MyModel) 18 | >>> qs = odm.mymodel.filter(size__gt=40, description__contains='technology') 19 | 20 | * An :class:`Query` on a :class:`Field` of a related model. For example, in the 21 | :ref:`Position model ` one can :ref:`filter `, 22 | :ref:`exclude ` or :ref:`sort `, with respect 23 | the instrument ``ccy`` :class:`Field` in this way:: 24 | 25 | qs = models.position.filter(instrument__ccy='EUR') 26 | qs = models.position.exclude(instrument__ccy='EUR') 27 | qs = models.position.query().sort_by('instrument__ccy') 28 | 29 | * In conjunction with :ref:`load_only ` query method when 30 | you need to load only a subset of a related model fields:: 31 | 32 | qs = models.position.query().load_only('size', 'instrument__ccy') 33 | 34 | * In the :meth:`StdModel.get_attr_value` method, for example:: 35 | 36 | p.get_attr_value('instrument') 37 | # same as 38 | p.instrument 39 | 40 | and:: 41 | 42 | p.get_attr_value('instrument__ccy') 43 | # same as 44 | p.instrument.ccy 45 | 46 | 47 | -------------------------------------------------------------------------------- /docs/source/faq.rst: -------------------------------------------------------------------------------- 1 | .. _faq: 2 | 3 | .. module:: stdnet 4 | 5 | FAQ 6 | =========== 7 | 8 | This is a list of Frequently Asked Questions regarding stdnet. 9 | 10 | .. contents:: 11 | :local: 12 | 13 | 14 | General 15 | --------------------- 16 | 17 | What is python-stdnet? 18 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 19 | Check the :ref:`overview `. 20 | 21 | Why should I use stdnet? 22 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 23 | * Organise your data in :ref:`models `, in one place, no repetitions. 24 | * Rich and speedy :ref:`query api ` for retrieving data 25 | in an efficient and elegant way. 26 | * Out of the box, fully customizable, :ref:`full text search `. 27 | 28 | 29 | Fields 30 | -------------------- 31 | 32 | Can I specify a Field to be unique? 33 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 34 | Yes, by settings the ``unique`` parameter to ``True``. Check the 35 | :ref:`primary keys and unique fields ` documentation. 36 | -------------------------------------------------------------------------------- /docs/source/index.rst: -------------------------------------------------------------------------------- 1 | .. _stdnet-doc: 2 | 3 | ====================== 4 | Python Stdnet 5 | ====================== 6 | 7 | .. rubric:: Object data mapper and advanced query manager for redis. 8 | 9 | .. _requirements: 10 | 11 | Requirements 12 | ================ 13 | 14 | * redis-py_ for :ref:`redis backend `. Redis 2.6 or above. 15 | * Optional cython_ for :ref:`ultra-fast C-extensions `. Recommended. 16 | * Optional pymongo_ for :ref:`mongo db backend ` (pre-alpha). 17 | * Optional pulsar_ for :ref:`asynchronous database connection `. 18 | 19 | .. _contents: 20 | 21 | Contents 22 | =========== 23 | 24 | .. toctree:: 25 | :maxdepth: 1 26 | 27 | overview 28 | examples/index 29 | backends/index 30 | changelog 31 | faq 32 | api/index 33 | 34 | Indices and tables 35 | ================== 36 | 37 | * :ref:`genindex` 38 | * :ref:`modindex` 39 | 40 | 41 | Legacy 42 | ============ 43 | 44 | .. toctree:: 45 | :maxdepth: 1 46 | 47 | stdnetredis 48 | 49 | .. _Redis: http://redis.io/ 50 | .. _redis-py: https://github.com/andymccurdy/redis-py 51 | .. _cython: http://cython.org/ 52 | .. _Mongodb: http://www.mongodb.org/ 53 | .. _pymongo: http://api.mongodb.org/python/current/index.html 54 | .. _pulsar: https://pypi.python.org/pypi/pulsar 55 | -------------------------------------------------------------------------------- /docs/source/overview.rst: -------------------------------------------------------------------------------- 1 | .. _intro-overview: 2 | 3 | ============================== 4 | Overview and Installation 5 | ============================== 6 | 7 | .. include:: ../../README.rst -------------------------------------------------------------------------------- /examples/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lsbardel/python-stdnet/78db5320bdedc3f28c5e4f38cda13a4469e35db7/examples/__init__.py -------------------------------------------------------------------------------- /examples/dynamo.py: -------------------------------------------------------------------------------- 1 | from stdnet import odm 2 | 3 | 4 | class Base(odm.StdModel): 5 | name = odm.SymbolField(primary_key=True) 6 | ccy = odm.SymbolField() 7 | 8 | def __unicode__(self): 9 | return self.name 10 | 11 | class Meta: 12 | abstract = True 13 | 14 | 15 | class Instrument(Base): 16 | type = odm.SymbolField() 17 | description = odm.CharField() 18 | -------------------------------------------------------------------------------- /examples/m2m.py: -------------------------------------------------------------------------------- 1 | from stdnet import odm 2 | 3 | 4 | class Element(odm.StdModel): 5 | name = odm.SymbolField() 6 | 7 | 8 | class CompositeElement(odm.StdModel): 9 | weight = odm.FloatField() 10 | 11 | 12 | class Composite(odm.StdModel): 13 | name = odm.SymbolField() 14 | elements = odm.ManyToManyField(Element, through=CompositeElement, 15 | related_name='composites') -------------------------------------------------------------------------------- /examples/observer.py: -------------------------------------------------------------------------------- 1 | '''This example is an implementation of the Observer design-pattern 2 | when Observers receives multiple updates from several instances they are 3 | observing. 4 | ''' 5 | from time import time 6 | from stdnet import odm 7 | from stdnet.odm import struct 8 | from stdnet.backends import redisb 9 | 10 | 11 | class update_observer(redisb.RedisScript): 12 | '''Script for adding/updating an observer. The ARGV contains, the member 13 | value, the initial score (usually a timestamp) and the increment for 14 | subsequent additions.''' 15 | script = '''\ 16 | local key = KEYS[1] 17 | local index = 0 18 | local n = 0 19 | while index < # ARGV do 20 | local score = ARGV[index+1]+0 21 | local penalty = ARGV[index+2]+0 22 | local member = ARGV[index+3] 23 | index = index + 3 24 | if redis.call('zrank', key, member) then 25 | redis.call('zincrby', key, -penalty, member) 26 | else 27 | n = n + redis.call('zadd', key, score, member) 28 | end 29 | end 30 | return n 31 | ''' 32 | 33 | 34 | class RedisUpdateZset(redisb.Zset): 35 | '''Redis backend structure override Zset''' 36 | def flush(self): 37 | cache = self.instance.cache 38 | result = None 39 | if cache.toadd: 40 | flat = tuple(self.flat(cache.toadd.items())) 41 | self.client.execute_script('update_observer', (self.id,), *flat) 42 | result = True 43 | if cache.toremove: 44 | flat = tuple((el[1] for el in cache.toremove)) 45 | self.client.zrem(self.id, *flat) 46 | result = True 47 | return result 48 | 49 | def flat(self, zs): 50 | for s, el in zs: 51 | yield s 52 | yield el[1] 53 | yield el[2] 54 | 55 | 56 | class UpdateZset(odm.Zset): 57 | penalty = 0 # penalty in seconds 58 | 59 | def __init__(self, *args, **kwargs): 60 | self.penalty = kwargs.pop('penalty', self.penalty) 61 | super(UpdateZset, self).__init__(*args, **kwargs) 62 | 63 | def dump_data(self, instances): 64 | dt = time() 65 | for n, instance in enumerate(instances): 66 | if hasattr(instance, 'pkvalue'): 67 | instance = instance.pkvalue() 68 | # put n so that it allows for repeated values 69 | yield dt, (n, self.penalty, instance) 70 | 71 | # Register the new structure with redis backend 72 | redisb.BackendDataServer.struct_map['updatezset'] = RedisUpdateZset 73 | 74 | 75 | class UpdatesField(odm.StructureField): 76 | 77 | def structure_class(self): 78 | return UpdateZset 79 | 80 | 81 | class Observable(odm.StdModel): 82 | name = odm.CharField() 83 | 84 | 85 | class Observer(odm.StdModel): 86 | # Underlyings are the Obsarvable this Observer is tracking for updates 87 | name = odm.CharField() 88 | underlyings = odm.ManyToManyField(Observable, related_name='observers') 89 | 90 | # field with a 5 seconds penalty 91 | updates = UpdatesField(class_field=True, penalty=5) 92 | 93 | 94 | def update_observers(signal, sender, instances=None, session=None, **kwargs): 95 | # This callback must be registered with the router 96 | # post_commit method 97 | # Instances of observable got an update. Loop through the updated 98 | # observables and push to the observer class updates all the observers 99 | # of the observable. 100 | models = session.router 101 | observers = models.observer 102 | through = models[observers.underlyings.model] 103 | return through.backend.execute( 104 | through.filter(observable=instances).get_field('observer').all(), 105 | observers.updates.update) 106 | -------------------------------------------------------------------------------- /examples/spelling/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lsbardel/python-stdnet/78db5320bdedc3f28c5e4f38cda13a4469e35db7/examples/spelling/__init__.py -------------------------------------------------------------------------------- /examples/spelling/spelling.py: -------------------------------------------------------------------------------- 1 | # 2 | # 3 | # idea from 4 | # http://norvig.com/spell-correct.html 5 | # 6 | # To train you can use 7 | # http://norvig.com/big.txt 8 | # 9 | import os 10 | import re 11 | import collections 12 | 13 | CURDIR = os.path.split(os.path.abspath(__file__))[0] 14 | 15 | 16 | def words(text): 17 | return re.findall('[a-z]+', text.lower()) 18 | 19 | 20 | def train(features): 21 | model = collections.defaultdict(lambda: 1) 22 | for f in features: 23 | model[f] += 1 24 | return model 25 | 26 | NWORDS = train(words(open(os.path.join(CURDIR, 'big.txt')).read())) 27 | 28 | 29 | alphabet = 'abcdefghijklmnopqrstuvwxyz' 30 | 31 | 32 | def edits1(word): 33 | s = [(word[:i], word[i:]) for i in range(len(word) + 1)] 34 | deletes = [a + b[1:] for a, b in s if b] 35 | transposes = [a + b[1] + b[0] + b[2:] for a, b in s if len(b) > 1] 36 | replaces = [a + c + b[1:] for a, b in s for c in alphabet if b] 37 | inserts = [a + c + b for a, b in s for c in alphabet] 38 | return set(deletes + transposes + replaces + inserts) 39 | 40 | 41 | def known_edits2(word): 42 | return set(e2 for e1 in edits1(word) for e2 in edits1(e1) if e2 in NWORDS) 43 | 44 | 45 | def known(words): 46 | return set(w for w in words if w in NWORDS) 47 | 48 | 49 | def correct(word): 50 | candidates = (known([word]) or known(edits1(word)) or known_edits2(word) 51 | or [word]) 52 | return max(candidates, key=NWORDS.get) 53 | -------------------------------------------------------------------------------- /examples/sql.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy.ext.declarative import declarative_base 2 | from sqlalchemy import Column, Integer, String 3 | 4 | Base = declarative_base() 5 | 6 | 7 | class User(Base): 8 | __tablename__ = 'users' 9 | id = Column(Integer, primary_key=True) 10 | fullname = Column(String) 11 | password = Column(String) 12 | email = Column(String) -------------------------------------------------------------------------------- /examples/tsmodels.py: -------------------------------------------------------------------------------- 1 | from stdnet import odm 2 | from stdnet.utils import encoders, todatetime, todate, missing_intervals 3 | from stdnet.apps.columnts import ColumnTSField 4 | 5 | 6 | class TimeSeries(odm.StdModel): 7 | ticker = odm.SymbolField(unique=True) 8 | data = odm.TimeSeriesField() 9 | 10 | def todate(self, v): 11 | return todatetime(v) 12 | 13 | def dates(self): 14 | return self.data 15 | 16 | def items(self): 17 | return self.data.items() 18 | 19 | def __get_start(self): 20 | r = self.data.front() 21 | if r: 22 | return r[0] 23 | data_start = property(__get_start) 24 | 25 | def __get_end(self): 26 | r = self.data.back() 27 | if r: 28 | return r[0] 29 | data_end = property(__get_end) 30 | 31 | def size(self): 32 | '''number of dates in timeseries''' 33 | return self.data.size() 34 | 35 | def intervals(self, startdate, enddate, parseinterval=None): 36 | '''Given a ``startdate`` and an ``enddate`` dates, evaluate the 37 | date intervals from which data is not available. It return a list 38 | of two-dimensional tuples containing start and end date for the 39 | interval. The list could contain 0, 1 or 2 tuples.''' 40 | return missing_intervals(startdate, enddate, self.data_start, 41 | self.data_end, dateconverter=self.todate, 42 | parseinterval=parseinterval) 43 | 44 | 45 | class DateTimeSeries(TimeSeries): 46 | data = odm.TimeSeriesField(pickler=encoders.DateConverter()) 47 | 48 | def todate(self, v): 49 | return todate(v) 50 | 51 | 52 | class BigTimeSeries(DateTimeSeries): 53 | data = odm.TimeSeriesField(pickler=encoders.DateConverter(), 54 | value_pickler=encoders.PythonPickle()) 55 | 56 | 57 | class ColumnTimeSeries(odm.StdModel): 58 | ticker = odm.SymbolField(unique=True) 59 | data = ColumnTSField() 60 | -------------------------------------------------------------------------------- /examples/wordsearch/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lsbardel/python-stdnet/78db5320bdedc3f28c5e4f38cda13a4469e35db7/examples/wordsearch/__init__.py -------------------------------------------------------------------------------- /examples/wordsearch/models.py: -------------------------------------------------------------------------------- 1 | from stdnet import odm 2 | 3 | 4 | class RelatedItem(odm.StdModel): 5 | name = odm.SymbolField() 6 | 7 | 8 | class Item(odm.StdModel): 9 | name = odm.SymbolField() 10 | content = odm.CharField() 11 | counter = odm.IntegerField() 12 | related = odm.ForeignKey(RelatedItem, required=False) 13 | secret = odm.CharField(hidden=True) 14 | -------------------------------------------------------------------------------- /luatests.lua: -------------------------------------------------------------------------------- 1 | -- TEST SUITE FOR LUA SCRIPTS 2 | -- Requires lunatests from https://github.com/silentbicycle/lunatest 3 | -- To run tests simply 4 | -- lua luatests.lua 5 | -- 6 | package.path = package.path .. ";stdnet/lib/lua/?.lua;tests/lua/?.lua" 7 | -- To run the debugger in eclipse you need to install the DBGp Client 8 | -- http://wiki.eclipse.org/Koneki/LDT/User_Guide/Concepts/Debugger#Source_Mapping 9 | -- Then create a lua project where to run the debug server 10 | pcall(function() require("debugger")() end) 11 | 12 | require("lunatest") 13 | print '==============================' 14 | print('To run just some tests, add "-t [pattern]"') 15 | print '==============================' 16 | 17 | lunatest.suite("tests/lua/utils") 18 | lunatest.suite("tests/lua/odm") 19 | lunatest.suite("tests/lua/columnts") 20 | 21 | 22 | lunatest.run() -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | redis -------------------------------------------------------------------------------- /requirements_dev.txt: -------------------------------------------------------------------------------- 1 | redis 2 | pep8 3 | cython 4 | coverage 5 | mock 6 | pulsar==0.7.4 7 | -------------------------------------------------------------------------------- /runtests.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | '''Stdnet asynchronous test suite. Requires pulsar.''' 3 | import sys 4 | import os 5 | from multiprocessing import current_process 6 | 7 | ## This is for dev environment with pulsar and dynts. 8 | ## If not available, some tests won't run 9 | p = os.path 10 | dir = p.dirname(p.dirname(p.abspath(__file__))) 11 | try: 12 | import pulsar 13 | except ImportError: 14 | pdir = p.join(dir, 'pulsar') 15 | if os.path.isdir(pdir): 16 | sys.path.append(pdir) 17 | import pulsar 18 | from pulsar.apps.test import TestSuite 19 | from pulsar.apps.test.plugins import bench, profile 20 | from pulsar.utils.path import Path 21 | # 22 | try: 23 | import dynts 24 | except ImportError: 25 | pdir = p.join(dir, 'dynts') 26 | if os.path.isdir(pdir): 27 | sys.path.append(pdir) 28 | try: 29 | import dynts 30 | except ImportError: 31 | pass 32 | 33 | 34 | def run(**params): 35 | args = params.get('argv', sys.argv) 36 | if '--coverage' in args or params.get('coverage'): 37 | import coverage 38 | p = current_process() 39 | p._coverage = coverage.coverage(data_suffix=True) 40 | p._coverage.start() 41 | runtests(**params) 42 | 43 | 44 | def runtests(**params): 45 | import stdnet 46 | from stdnet.utils import test 47 | # 48 | strip_dirs = [Path(stdnet.__file__).parent.parent, os.getcwd()] 49 | # 50 | suite = TestSuite(description='Stdnet Asynchronous test suite', 51 | modules=('tests.all',), 52 | plugins=(test.StdnetPlugin(), 53 | bench.BenchMark(), 54 | profile.Profile()), 55 | **params) 56 | suite.bind_event('tests', test.create_tests) 57 | suite.start() 58 | # 59 | if suite.cfg.coveralls: 60 | from pulsar.utils.cov import coveralls 61 | coveralls(strip_dirs=strip_dirs, 62 | stream=suite.stream, 63 | repo_token='ZQinNe5XNbzQ44xYGTljP8R89jrQ5xTKB') 64 | 65 | 66 | if __name__ == '__main__': 67 | run() 68 | -------------------------------------------------------------------------------- /stdnet/__init__.py: -------------------------------------------------------------------------------- 1 | '''Object data mapper and advanced query manager for non relational 2 | databases. 3 | ''' 4 | from .utils.exceptions import * 5 | from .utils.version import get_version, stdnet_version 6 | from .backends import * 7 | 8 | VERSION = stdnet_version(0, 9, 0, 'alpha', 3) 9 | 10 | 11 | __version__ = version = get_version(VERSION) 12 | __license__ = "BSD" 13 | __author__ = "Luca Sbardella" 14 | __contact__ = "luca.sbardella@gmail.com" 15 | __homepage__ = "https://github.com/lsbardel/python-stdnet" 16 | CLASSIFIERS = ['Development Status :: 4 - Beta', 17 | 'Environment :: Plugins', 18 | 'Environment :: Console', 19 | 'Environment :: Web Environment', 20 | 'Intended Audience :: Developers', 21 | 'License :: OSI Approved :: BSD License', 22 | 'Operating System :: OS Independent', 23 | 'Programming Language :: Python', 24 | 'Programming Language :: Python :: 2', 25 | 'Programming Language :: Python :: 2.6', 26 | 'Programming Language :: Python :: 2.7', 27 | 'Programming Language :: Python :: 3', 28 | 'Programming Language :: Python :: 3.2', 29 | 'Programming Language :: Python :: 3.3', 30 | 'Programming Language :: Python :: Implementation :: PyPy', 31 | 'Topic :: Utilities', 32 | 'Topic :: Database', 33 | 'Topic :: Internet' 34 | ] 35 | -------------------------------------------------------------------------------- /stdnet/apps/__init__.py: -------------------------------------------------------------------------------- 1 | '''\ 2 | Collection of applications which may be relevant or not to the user. 3 | They show off the main features of the core library. 4 | ''' 5 | -------------------------------------------------------------------------------- /stdnet/apps/columnts/__init__.py: -------------------------------------------------------------------------------- 1 | '''\ 2 | **backends**: :ref:`Redis `. 3 | 4 | An application which implements a specialised remote 5 | :class:`stdnet.odm.Structure` for managing numeric multivariate 6 | timeseries_ and perform remote analysis on them. The main classes 7 | for this application are :class:`ColumnTS`, the stand alone data structure, 8 | and the correspondent :class:`ColumnTSField` which can be used as 9 | a :class:`stdnet.odm.StructureField` on a :class:`stdnet.odm.StdModel`. 10 | 11 | 12 | The API is straightforward:: 13 | 14 | from datetime import date 15 | from stdnet.apps.columnts import ColumnTS 16 | 17 | ts = ColumnTS(id='test') 18 | ts.add(date(2012,2,21), {'open': 603.87, 'close': 614.00}) 19 | 20 | It can also be used as a :ref:`datastructure fields `. 21 | For example:: 22 | 23 | from stdnet import odm 24 | from stdnet.apps.columnts import ColumnTSField 25 | 26 | class Ticker(odm.StdModel): 27 | code = odm.SymbolField() 28 | data = ColumnTSField() 29 | 30 | 31 | Statistical Analysis 32 | ================================= 33 | 34 | istats & stats 35 | ~~~~~~~~~~~~~~~~~ 36 | 37 | These two methods execute statistical analysis on the data stored in one 38 | :class:`ColumnTS`. The :class:`ColumnTS.istats` method performs analysis by 39 | selecting time range by rank, while :class:`ColumnTS.stats` method performs 40 | analysis by selecting ranges by a *start* and an *end* date (or datetime). 41 | 42 | multi 43 | ~~~~~~~ 44 | 45 | 46 | evaluate 47 | ~~~~~~~~~~~~~~~ 48 | To perform analysis you write lua scripts:: 49 | 50 | self:range() 51 | 52 | ts.evaluate(script) 53 | 54 | API 55 | ====== 56 | 57 | ColumnTS 58 | ~~~~~~~~~~~~~~ 59 | 60 | .. autoclass:: ColumnTS 61 | :members: 62 | :member-order: bysource 63 | 64 | 65 | ColumnTSField 66 | ~~~~~~~~~~~~~~ 67 | 68 | .. autoclass:: ColumnTSField 69 | :members: 70 | :member-order: bysource 71 | 72 | 73 | Redis Implementation 74 | ======================== 75 | The implementation uses several redis structures for a given 76 | :class:`ColumnTS` instance. 77 | 78 | * A *zset* for holding times in an ordered fashion. 79 | * A *set* for holding *fields* names, obtained via the :meth:`ColumnTS.fields` 80 | method. 81 | * A *string* for each *field* to hold numeric values. 82 | 83 | This composite data-structure looks and feels like a redis zset. 84 | However, the ordered set doesn't actually store the data, it is there to 85 | maintain order and facilitate retrieval by times (scores) and rank. 86 | 87 | For a given *field*, the data is stored in a sequence of 9-bytes string 88 | with the initial byte (``byte0``) indicating the type of data:: 89 | 90 | 91 | 92 | 93 | 94 | .. _timeseries: http://en.wikipedia.org/wiki/Time_series 95 | ''' 96 | from . import redis 97 | from .models import * 98 | -------------------------------------------------------------------------------- /stdnet/apps/columnts/npts.py: -------------------------------------------------------------------------------- 1 | '''Experimental! 2 | This is an experimental module for converting ColumnTS into 3 | dynts.timeseries. It requires dynts_. 4 | 5 | .. _dynts: https://github.com/quantmind/dynts 6 | ''' 7 | from collections import Mapping 8 | 9 | from . import models as columnts 10 | 11 | import numpy as ny 12 | 13 | from dynts import timeseries, tsname 14 | 15 | 16 | class ColumnTS(columnts.ColumnTS): 17 | '''Integrate stdnet timeseries with dynts_ TimeSeries''' 18 | 19 | def front(self, *fields): 20 | '''Return the front pair of the structure''' 21 | ts = self.irange(0, 0, fields=fields) 22 | if ts: 23 | return ts.start(), ts[0] 24 | 25 | def back(self, *fields): 26 | '''Return the back pair of the structure''' 27 | ts = self.irange(-1, -1, fields=fields) 28 | if ts: 29 | return ts.end(), ts[0] 30 | 31 | def load_data(self, result): 32 | loads = self.pickler.loads 33 | vloads = self.value_pickler.loads 34 | dt, va = result 35 | if result[0] and va: 36 | dates = ny.array([loads(t) for t in dt]) 37 | fields = [] 38 | vals = [] 39 | if not isinstance(va, Mapping): 40 | va = dict(va) 41 | for f in sorted(va): 42 | fields.append(f) 43 | data = va[f] 44 | vals.append((vloads(v) for v in data)) 45 | values = ny.array(list(zip(*vals))) 46 | name = tsname(*fields) 47 | else: 48 | name = None 49 | dates = None 50 | values = None 51 | return timeseries(name=name, date=dates, data=values) 52 | 53 | def _get(self, result): 54 | ts = self.load_data(result) 55 | return ts[0] 56 | 57 | 58 | class ColumnTSField(columnts.ColumnTSField): 59 | 60 | def structure_class(self): 61 | return ColumnTS 62 | -------------------------------------------------------------------------------- /stdnet/apps/searchengine/models.py: -------------------------------------------------------------------------------- 1 | '''\ 2 | Search Engine and Tagging models. Just two of them, one for storing Words and 3 | one for linking other objects to Words. 4 | ''' 5 | from inspect import isclass 6 | 7 | from stdnet import odm 8 | 9 | 10 | class WordItemManager(odm.Manager): 11 | 12 | def for_model(self, model): 13 | q = self.query() 14 | if not isclass(model): 15 | return q.filter(model_type=model.__class__, object_id=model.id) 16 | else: 17 | return q.filter(model_type=model) 18 | 19 | 20 | class WordItem(odm.StdModel): 21 | '''A model for associating a word with general 22 | :class:`stdnet.odm.StdModel` instance.''' 23 | id = odm.CompositeIdField('word', 'model_type', 'object_id') 24 | word = odm.SymbolField() 25 | model_type = odm.ModelField() 26 | object_id = odm.SymbolField() 27 | 28 | def __unicode__(self): 29 | return self.word 30 | 31 | manager_class = WordItemManager 32 | 33 | class Meta: 34 | ordering = -odm.autoincrement() 35 | 36 | def object(self, session): 37 | '''Instance of :attr:`model_type` with id :attr:`object_id`.''' 38 | if not hasattr(self, '_object'): 39 | pkname = self.model_type._meta.pkname() 40 | query = session.query(self.model_type).filter(**{pkname: 41 | self.object_id}) 42 | return query.items(callback=self.__set_object) 43 | else: 44 | return self._object 45 | 46 | def __set_object(self, items): 47 | try: 48 | self._object = self.get_unique_instance(items) 49 | except self.DoesNotExist: 50 | self._object = None 51 | return self._object 52 | -------------------------------------------------------------------------------- /stdnet/apps/searchengine/processors/__init__.py: -------------------------------------------------------------------------------- 1 | from .ignore import STOP_WORDS, PUNCTUATION_CHARS 2 | from .metaphone import dm as double_metaphone 3 | from .porter import PorterStemmer 4 | 5 | 6 | class stopwords: 7 | 8 | def __init__(self, stp=None): 9 | self.stp = stp if stp is not None else STOP_WORDS 10 | 11 | def __call__(self, words): 12 | stp = self.stp 13 | for word in words: 14 | if word not in stp: 15 | yield word 16 | 17 | 18 | def metaphone_processor(words): 19 | '''Double metaphone word processor.''' 20 | for word in words: 21 | for w in double_metaphone(word): 22 | if w: 23 | w = w.strip() 24 | if w: 25 | yield w 26 | 27 | 28 | def tolerant_metaphone_processor(words): 29 | '''Double metaphone word processor slightly modified so that when no 30 | words are returned by the algorithm, the original word is returned.''' 31 | for word in words: 32 | r = 0 33 | for w in double_metaphone(word): 34 | if w: 35 | w = w.strip() 36 | if w: 37 | r += 1 38 | yield w 39 | if not r: 40 | yield word 41 | 42 | 43 | def stemming_processor(words): 44 | '''Porter Stemmer word processor''' 45 | stem = PorterStemmer().stem 46 | for word in words: 47 | word = stem(word, 0, len(word)-1) 48 | yield word 49 | -------------------------------------------------------------------------------- /stdnet/apps/searchengine/processors/ignore.py: -------------------------------------------------------------------------------- 1 | __test__ = False 2 | # from 3 | # http://www.textfixer.com/resources/common-english-words.txt 4 | STOP_WORDS = set('''a,able,about,across,after,all,almost,also,am,among,an,and,\ 5 | any,are,as,at,be,because,been,but,by,can,cannot,could,dear,did,do,does,either,\ 6 | else,ever,every,for,from,get,got,had,has,have,he,her,hers,him,his,how,however,\ 7 | i,if,in,into,is,it,its,just,least,let,like,likely,may,me,might,most,must,my,\ 8 | neither,no,nor,not,of,off,often,on,only,or,other,our,own,rather,said,say,says,\ 9 | she,should,since,so,some,than,that,the,their,them,then,there,these,they,this,\ 10 | tis,to,too,twas,us,wants,was,we,were,what,when,where,which,while,who,whom,\ 11 | why,will,with,would,yet,you,your 12 | '''.split(',')) 13 | 14 | 15 | ALPHABET = 'abcdefghijklmnopqrstuvwxyz' 16 | NUMBERS = '0123456789' 17 | ALPHA_NUMERIC = ALPHABET+NUMBERS 18 | 19 | # Consider these characters to be punctuation 20 | # they will be replaced with spaces prior to word extraction 21 | PUNCTUATION_CHARS = ",.;:'@~#[]{}`!$%^&*()_-+" 22 | -------------------------------------------------------------------------------- /stdnet/apps/tasks/__init__.py: -------------------------------------------------------------------------------- 1 | from pulsar.apps import data 2 | from pulsar.apps import tasks 3 | 4 | from .models import TaskData 5 | 6 | 7 | class Store(data.Store): 8 | pass 9 | 10 | 11 | class TaskBackend(tasks.TaskBackend): 12 | 13 | def get_task(self, task_id=None, timeout=1): 14 | task_manager = self.task_manager() 15 | # 16 | if not task_id: 17 | task_id = yield task_manager.queue.block_pop_front(timeout=timeout) 18 | if task_id: 19 | task_data = yield self._get_task(task_id) 20 | if task_data: 21 | yield task_data.as_task() 22 | 23 | 24 | tasks.task_backends['stdnet'] = TaskBackend 25 | 26 | 27 | data.register_store('redis', 'stdnet.apps.tasks.Store') 28 | -------------------------------------------------------------------------------- /stdnet/apps/tasks/models.py: -------------------------------------------------------------------------------- 1 | 2 | from stdnet import odm 3 | 4 | 5 | class TaskData(odm.StdModel): 6 | id = odm.SymbolField(primary_key=True) 7 | overlap_id = odm.SymbolField(required=False) 8 | name = odm.SymbolField() 9 | status = odm.SymbolField() 10 | args = odm.PickleObjectField() 11 | kwargs = odm.PickleObjectField() 12 | result = odm.PickleObjectField() 13 | from_task = odm.SymbolField(required=False) 14 | time_executed = odm.DateTimeField(index=False) 15 | time_started = odm.DateTimeField(required=False, index=False) 16 | time_ended = odm.DateTimeField(required=False, index=False) 17 | expiry = odm.DateTimeField(required=False, index=False) 18 | meta = odm.JSONField() 19 | # 20 | # List where all TaskData ids are queued 21 | queue = odm.ListField(class_field=True) 22 | # Set where TaskData ids under execution are stored 23 | executing = odm.SetField(class_field=True) 24 | 25 | class Meta: 26 | app_label = 'tasks' 27 | 28 | def as_task(self): 29 | params = dict(self.meta or {}) 30 | for field in self._meta.scalarfields: 31 | params[field.name] = getattr(self, field.attname, None) 32 | return backends.Task(self.id, **params) 33 | 34 | def __unicode__(self): 35 | return '%s (%s)' % (self.name, self.status) 36 | -------------------------------------------------------------------------------- /stdnet/backends/redisb/client/__init__.py: -------------------------------------------------------------------------------- 1 | try: 2 | from . import async 3 | except ImportError: 4 | async = None 5 | 6 | from .extensions import (RedisScript, read_lua_file, redis, get_script, 7 | RedisDb, RedisKey, RedisDataFormatter) 8 | from .client import Redis 9 | 10 | RedisError = redis.RedisError 11 | 12 | __all__ = ['redis_client', 'RedisScript', 'read_lua_file', 'RedisError', 13 | 'RedisDb', 'RedisKey', 'RedisDataFormatter', 'get_script'] 14 | 15 | 16 | def redis_client(address=None, connection_pool=None, timeout=None, 17 | parser=None, **kwargs): 18 | '''Get a new redis client. 19 | 20 | :param address: a ``host``, ``port`` tuple. 21 | :param connection_pool: optional connection pool. 22 | :param timeout: socket timeout. 23 | :param timeout: socket timeout. 24 | ''' 25 | if not connection_pool: 26 | if timeout == 0: 27 | if not async: 28 | raise ImportError('Asynchronous connection requires async ' 29 | 'bindings installed.') 30 | return async.pool.redis(address, **kwargs) 31 | else: 32 | kwargs['socket_timeout'] = timeout 33 | return Redis(address[0], address[1], **kwargs) 34 | else: 35 | return Redis(connection_pool=connection_pool) 36 | -------------------------------------------------------------------------------- /stdnet/backends/redisb/client/async.py: -------------------------------------------------------------------------------- 1 | '''The :mod:`stdnet.backends.redisb.async` module implements an asynchronous 2 | connector for redis-py_. It uses pulsar_ asynchronous framework. 3 | To use this connector, 4 | add ``timeout=0`` to redis :ref:`connection string `:: 5 | 6 | 'redis://127.0.0.1:6378?password=bla&timeout=0' 7 | 8 | Usage:: 9 | 10 | from stdnet import getdb 11 | 12 | db = getdb('redis://127.0.0.1:6378?password=bla&timeout=0') 13 | 14 | ''' 15 | from pulsar.apps import redis 16 | from pulsar.apps.redis.client import BasePipeline 17 | 18 | from .extensions import (RedisExtensionsMixin, get_script, RedisError, 19 | all_loaded_scripts) 20 | from .prefixed import PrefixedRedisMixin 21 | 22 | 23 | class Redis(RedisExtensionsMixin, redis.Redis): 24 | 25 | @property 26 | def is_async(self): 27 | return True 28 | 29 | def address(self): 30 | return self.connection_info[0] 31 | 32 | def prefixed(self, prefix): 33 | '''Return a new :class:`PrefixedRedis` client. 34 | ''' 35 | return PrefixedRedis(self, prefix) 36 | 37 | def pipeline(self, transaction=True, shard_hint=None): 38 | return Pipeline(self, self.response_callbacks, transaction, shard_hint) 39 | 40 | def execute_script(self, name, keys, *args, **options): 41 | '''Execute a script. 42 | 43 | makes sure all required scripts are loaded. 44 | ''' 45 | script = get_script(name) 46 | if not script: 47 | raise redis.RedisError('No such script "%s"' % name) 48 | address = self.address() 49 | if address not in all_loaded_scripts: 50 | all_loaded_scripts[address] = set() 51 | loaded = all_loaded_scripts[address] 52 | toload = script.required_scripts.difference(loaded) 53 | for name in toload: 54 | s = get_script(name) 55 | yield self.script_load(s.script) 56 | loaded.update(toload) 57 | yield script(self, keys, args, options) 58 | 59 | 60 | class PrefixedRedis(PrefixedRedisMixin, Redis): 61 | pass 62 | 63 | 64 | class Pipeline(BasePipeline, Redis): 65 | 66 | def execute_script(self, name, keys, *args, **options): 67 | '''Execute a script. 68 | 69 | makes sure all required scripts are loaded. 70 | ''' 71 | script = get_script(name) 72 | if not script: 73 | raise redis.RedisError('No such script "%s"' % name) 74 | address = self.address() 75 | if address not in all_loaded_scripts: 76 | all_loaded_scripts[address] = set() 77 | loaded = all_loaded_scripts[address] 78 | toload = script.required_scripts.difference(loaded) 79 | for name in toload: 80 | s = get_script(name) 81 | self.script_load(s.script) 82 | loaded.update(toload) 83 | return script(self, keys, args, options) 84 | 85 | 86 | class RedisPool(redis.RedisPool): 87 | 88 | def redis(self, address, db=0, password=None, timeout=None, **kw): 89 | timeout = int(timeout or self.timeout) 90 | info = redis.connection_info(address, db, password, timeout) 91 | return Redis(self, info, **kw) 92 | 93 | 94 | pool = RedisPool() 95 | -------------------------------------------------------------------------------- /stdnet/backends/redisb/client/client.py: -------------------------------------------------------------------------------- 1 | '''The :mod:`stdnet.backends.redisb.client` implements several extensions 2 | to the standard redis client in redis-py_ 3 | 4 | 5 | Client 6 | ~~~~~~~~~~~~~~ 7 | 8 | .. autoclass:: Redis 9 | :members: 10 | :member-order: bysource 11 | 12 | Prefixed Client 13 | ~~~~~~~~~~~~~~~~~~ 14 | 15 | .. autoclass:: PrefixedRedis 16 | :members: 17 | :member-order: bysource 18 | 19 | RedisScript 20 | ~~~~~~~~~~~~~~~ 21 | 22 | .. autoclass:: RedisScript 23 | :members: 24 | :member-order: bysource 25 | 26 | ''' 27 | import os 28 | import io 29 | import socket 30 | from copy import copy 31 | 32 | from .extensions import RedisExtensionsMixin, redis, BasePipeline 33 | from .prefixed import PrefixedRedisMixin 34 | 35 | 36 | class Redis(RedisExtensionsMixin, redis.StrictRedis): 37 | 38 | @property 39 | def encoding(self): 40 | return self.connection_pool.connection_kwargs.get('encoding', 'utf-8') 41 | 42 | def address(self): 43 | kw = self.connection_pool.connection_kwargs 44 | return (kw['host'], kw['port']) 45 | 46 | def prefixed(self, prefix): 47 | '''Return a new :class:`PrefixedRedis` client. 48 | ''' 49 | return PrefixedRedis(self, prefix) 50 | 51 | def pipeline(self, transaction=True, shard_hint=None): 52 | return Pipeline( 53 | self, 54 | transaction, 55 | shard_hint) 56 | 57 | 58 | class PrefixedRedis(PrefixedRedisMixin, Redis): 59 | pass 60 | 61 | 62 | class Pipeline(BasePipeline, Redis): 63 | 64 | def __init__(self, client, transaction, shard_hint): 65 | self.client = client 66 | self.response_callbacks = client.response_callbacks 67 | self.transaction = transaction 68 | self.shard_hint = shard_hint 69 | self.watching = False 70 | self.connection = None 71 | self.reset() 72 | 73 | @property 74 | def connection_pool(self): 75 | return self.client.connection_pool 76 | 77 | @property 78 | def is_pipeline(self): 79 | return True 80 | -------------------------------------------------------------------------------- /stdnet/backends/redisb/lua/columnts/reduce.lua: -------------------------------------------------------------------------------- 1 | local command = ARGV[1] 2 | local start = ARGV[2] 3 | local stop = ARGV[3] 4 | local points = math.max(ARGV[4] + 0, 2) 5 | local method = ARGV[5] -- One of 'mean', 'geometric', 'ma' 6 | local alpha = ARGV[6] + 0 -- paramether for moving avaerage reduction 7 | local num_fields = ARGV[7] 8 | local fields = tabletools.slice(ARGV, 8, -1) 9 | local ts = columnts:new(KEYS[1]) 10 | local time,values = unpack(ts:range(command, start, stop, fields)) 11 | local N = # times 12 | if N < points then 13 | return {times,values} 14 | else 15 | local space = math.floor(N/points) -- spacing for reduction 16 | while N/space > points do 17 | space = space + 1 18 | end 19 | local rtime, reduced = {}, {} 20 | for field,value do 21 | local index, stop, fvalues = 1, 0, {} 22 | table.insert(reduced,field) 23 | table.insert(reduced,fvalues) 24 | while stop <= N do 25 | start = stop + 1 26 | stop = N - (points-index)*space 27 | rtime[index] = time[stop] 28 | if method == 'mean' then 29 | fvalues[index] = reduce_mean(value,start,stop) 30 | elseif method == 'geometric' then 31 | fvalues[index] = reduce_geo(value,start,stop) 32 | elseif method == 'ma' then 33 | fvalues[index] = reduce_ma(value,start,stop,alpha) 34 | else 35 | fvalues[index] = value[stop] 36 | end 37 | index = index + 1 38 | end 39 | end 40 | return {rtime, reduced} 41 | end 42 | 43 | 44 | -------------------------------------------------------------------------------- /stdnet/backends/redisb/lua/commands/keyinfo.lua: -------------------------------------------------------------------------------- 1 | -- Retrieve information about keys 2 | -- A list of keys, sorted in alphabetical order, is returned 3 | local start, stop, keys, num 4 | if # ARGV > 0 then -- If argv is provided, it is the pattern to search 5 | keys = redis.call('KEYS', ARGV[1]) 6 | if # ARGV > 1 then 7 | start = ARGV[2] + 0 8 | num = ARGV[3] + 0 9 | else 10 | start = 1 11 | num = # keys 12 | end 13 | elseif # KEYS > 0 then -- If keys are provided, use them 14 | keys = KEYS 15 | start = 1 16 | num = # keys 17 | else -- Nothing to do 18 | return {} 19 | end 20 | local type_table = {} 21 | type_table['set'] = 'scard' 22 | type_table['zset'] = 'zcard' 23 | type_table['list'] = 'llen' 24 | type_table['hash'] = 'hlen' 25 | type_table['ts'] = 'tslen' -- stdnet branch 26 | type_table['string'] = 'strlen' 27 | local typ, command, len, key, idletime 28 | local stats = {} 29 | local num_keys = # keys 30 | local j = 0 31 | while j < num and start+j <= num_keys do 32 | key = keys[start+j] 33 | j = j + 1 34 | idletime = redis.call('object','idletime',key) 35 | typ = redis.call('type',key)['ok'] 36 | command = type_table[typ] 37 | len = 0 38 | if command then 39 | len = len + redis.call(command, key) 40 | end 41 | stats[j] = {key,typ,len,redis.call('ttl',key), 42 | redis.call('object','encoding',key), 43 | idletime} 44 | end 45 | return stats -------------------------------------------------------------------------------- /stdnet/backends/redisb/lua/commands/move2set.lua: -------------------------------------------------------------------------------- 1 | -- Move a list of keys to sets or zsets 2 | local N = # KEYS 3 | local moved = 0 4 | local s = ARGV[1] 5 | if N > 0 then 6 | local typ = 'set' 7 | if s == 'z' then 8 | typ = 'zset' 9 | end 10 | local n = 0 11 | while n < N do 12 | n = n + 1 13 | local key = KEYS[n] 14 | if redis_type(key) ~= typ then 15 | local vals = redis_members(key) 16 | local timeout = redis.call('ttl', key) + 0 17 | redis.call('del',key) 18 | moved = moved + 1 19 | if typ == 'set' then 20 | for i,m in pairs(vals) do 21 | redis.call('sadd', key, m) 22 | end 23 | else 24 | for i,m in pairs(vals) do 25 | redis.call('zadd', key, 0, m) 26 | end 27 | end 28 | if timeout > 0 then 29 | redis.call('expire',key,timeout) 30 | end 31 | end 32 | end 33 | end 34 | 35 | return {N, moved} -------------------------------------------------------------------------------- /stdnet/backends/redisb/lua/commands/utils.lua: -------------------------------------------------------------------------------- 1 | -- Collection of utilities used across scripts. 2 | -- Included in all scripts 3 | -- SCRIPT_START_TIME = os.clock() 4 | local type_table = {} 5 | type_table['set'] = 'scard' 6 | type_table['zset'] = 'zcard' 7 | type_table['list'] = 'llen' 8 | type_table['hash'] = 'hlen' 9 | type_table['ts'] = 'tslen' 10 | type_table['string'] = 'strlen' 11 | 12 | local function redis_result(result) 13 | return {result,os.clock() - SCRIPT_START_TIME} 14 | end 15 | 16 | 17 | local function redis_type(key) 18 | return redis.call('type', key)['ok'] 19 | end 20 | 21 | -- The length of any structure in redis 22 | local function redis_len(key) 23 | local command = type_table[redis_type(key)] 24 | if command then 25 | return redis.call(command, key) + 0 26 | else 27 | return 0 28 | end 29 | end 30 | 31 | -- Create a unique random key 32 | local function redis_randomkey(prefix) 33 | local rnd_key = prefix .. ':tmp:' .. math.random(1,100000000) 34 | if redis.call('exists', rnd_key) + 0 == 1 then 35 | return randomkey() 36 | else 37 | return rnd_key 38 | end 39 | end 40 | 41 | -- table of all members at key. 42 | -- If the key is a string returns an empty table 43 | -- If an argumnet is passed with value true all elements of the structure are returned. 44 | local function redis_members(key, all, typ) 45 | if not typ then 46 | typ = redis.call('type',key)['ok'] 47 | end 48 | if typ == 'set' then 49 | return redis.call('smembers', key) 50 | elseif typ == 'zset' then 51 | if all then 52 | return redis.call('zrange', key, 0, -1, 'withscores') 53 | else 54 | return redis.call('zrange', key, 0, -1) 55 | end 56 | elseif typ == 'list' then 57 | return redis.call('lrange', key, 0, -1) 58 | elseif typ == 'hash' then 59 | if all then 60 | return redis.call('hgetall', key) 61 | else 62 | return redis.call('hkeys', key) 63 | end 64 | elseif typ == 'ts' then 65 | return timeseries.call('irange', key, 0, -1) 66 | else 67 | return {} 68 | end 69 | end 70 | 71 | -- delete keys from a table 72 | local function redis_delete(keys) 73 | local n = table.getn(keys) 74 | if n > 0 then 75 | return redis.call('del', unpack(keys)) + 0 76 | end 77 | return n 78 | end 79 | -------------------------------------------------------------------------------- /stdnet/backends/redisb/lua/commands/zdiffstore.lua: -------------------------------------------------------------------------------- 1 | -- Implements the ZDIFFSTORE command 2 | local dest = KEYS[1] 3 | local withscores = ARGV[1] 4 | local key = KEYS[2] 5 | if dest ~= key then -- Small optimization which takes care of the -= operation 6 | redis.call('del', dest) 7 | local data = redis.call('zrange',key,0,-1,'WITHSCORES') 8 | local i = 0 9 | while i < # data do 10 | redis.call('zadd',dest,data[i+2],data[i+1]) 11 | i = i + 2 12 | end 13 | end 14 | local i = 2 15 | if withscores == 'withscores' then -- REMOVE ONLY IF SUBTRACTING SCORES IS EQUAL TO 0 16 | while i < # KEYS do 17 | local data = redis.call('zrange',KEYS[i+1],0,-1,'WITHSCORES') 18 | i = i + 1 19 | local j = 0 20 | while j < # data do 21 | local value, score = data[j+1], data[j+2] 22 | j = j + 2 23 | if redis.call('zscore', dest, value) then 24 | redis.call('zincrby', dest, -score, value) 25 | end 26 | end 27 | end 28 | redis.call('zremrangebyscore', dest, 0, 0) 29 | else -- REMOVE REGARDLESS OF SCORE 30 | while i < # KEYS do 31 | local data = redis.call('zrange',KEYS[i+1],0,-1) 32 | i = i + 1 33 | for _,value in pairs(data) do 34 | redis.call('zrem',dest,value) 35 | end 36 | end 37 | end 38 | return redis.call('zcard', dest) 39 | 40 | -------------------------------------------------------------------------------- /stdnet/backends/redisb/lua/commands/zpop.lua: -------------------------------------------------------------------------------- 1 | -- REDIS ZPOP command. you can pop one or more elements form the sorted set 2 | -- either by range or by score 3 | local key = KEYS[1] 4 | local start = ARGV[2] 5 | local stop = ARGV[3] 6 | local desc = ARGV[4] + 0 7 | local args = {start, stop} 8 | local range 9 | 10 | if ARGV[5] + 0 == 1 then 11 | args[3] = 'withscores' 12 | end 13 | 14 | -- POP by RANK 15 | if ARGV[1] == 'rank' then 16 | if desc == 1 then 17 | range = redis.call('ZREVRANGE', key, unpack(args)) 18 | else 19 | range = redis.call('ZRANGE', key, unpack(args)) 20 | end 21 | redis.call('ZREMRANGEBYRANK', key, start, stop) 22 | -- POP by SCORE 23 | else 24 | if desc == 1 then 25 | range = redis.call('ZREVRANGEBYSCORE', key, unpack(args)) 26 | else 27 | range = redis.call('ZRANGEBYSCORE', key, unpack(args)) 28 | end 29 | redis.call('ZREMRANGEBYSCORE', key, start, stop) 30 | end 31 | 32 | return range -------------------------------------------------------------------------------- /stdnet/backends/redisb/lua/numberarray.lua: -------------------------------------------------------------------------------- 1 | -- An array of numbers for redis-lua 2 | 3 | -- Not a number 4 | local nan = 0/0 5 | -- 8 bytes string for nil data 6 | local nildata = string.char(0,0,0,0,0,0,0,0) 7 | 8 | local array = { 9 | -- 10 | -- Initialize with key and optional initial size and value 11 | init = function (self, key, size, value) 12 | self.key = key 13 | self:resize(size, value) 14 | end, 15 | -- length of array 16 | length = function (self) 17 | return (redis.call('strlen', self.key) + 0)/8 18 | end, 19 | -- Allocate extra size for the array 20 | resize = function (self, size, value) 21 | if size then 22 | size = size + 0 23 | local length = self:length() 24 | if size > length then 25 | if value then 26 | value = self:pack(value) 27 | else 28 | value = nildata 29 | end 30 | value = string.rep(value,size-length) 31 | redis.call('setrange', self.key, 8*length, value) 32 | end 33 | end 34 | return self:length() 35 | end, 36 | -- 37 | get = function (self, index, packed) 38 | index = index + 0 39 | assert(index > 0 and index <= self:length(),"Out of bound.") 40 | local start = 8*(index - 1) 41 | local val = redis.call('getrange', self.key, start, start+7) 42 | if packed then 43 | return val 44 | else 45 | return self:unpack(val) 46 | end 47 | end, 48 | set = function(self, index, value, packed) 49 | index = index + 0 50 | assert(index > 0 and index <= self:length(),"Out of bound.") 51 | local start = 8*(index - 1) 52 | if packed then 53 | value = self:pack(value) 54 | end 55 | return redis.call('setrange', self.key, start, value) 56 | end, 57 | -- 58 | -- push_back 59 | push_back = function(self, value, packed) 60 | local start = 8*self:length() 61 | if not packed then 62 | value = self:pack(value) 63 | end 64 | redis.call('setrange', self.key, start, value) 65 | end, 66 | -- 67 | all_raw = function(self) 68 | local start 69 | local data = {} 70 | local i=0 71 | while i < self:length() do 72 | start = 8*i 73 | i = i + 1 74 | data[i] = redis.call('getrange', self.key, start, start+7) 75 | end 76 | return data 77 | end, 78 | -- 79 | -- Internal functions 80 | pack = function(self, value) 81 | return pack('>d',value) 82 | end, 83 | unpack = function(self, value) 84 | return unpack('>d',value) 85 | end 86 | } 87 | 88 | 89 | local columnts_meta = { 90 | __index = function(self,index) 91 | return self:get(index) 92 | end, 93 | __newindex = function(self,index) 94 | return self:set(index,value) 95 | end 96 | } 97 | -- Constructor 98 | function array:new(key) 99 | local result = {} 100 | for k,v in pairs(array) do 101 | result[k] = v 102 | end 103 | result:init(key) 104 | return setmetatable(result, columnts_meta) 105 | end 106 | -------------------------------------------------------------------------------- /stdnet/backends/redisb/lua/structures.lua: -------------------------------------------------------------------------------- 1 | local types = {} 2 | for _, key in ipairs(KEYS) do 3 | table.insert(types, redis.call('type', key)) 4 | end 5 | return types -------------------------------------------------------------------------------- /stdnet/backends/redisb/lua/tabletools.lua: -------------------------------------------------------------------------------- 1 | local tabletools = {} 2 | 3 | -- Initialize an array of size *size* fill with *value* 4 | tabletools.init = function (size, value) 5 | local vector = {} 6 | for i = 1, size do 7 | vector[i] = value 8 | end 9 | return vector 10 | end 11 | 12 | -- Check if two arrays are equals 13 | tabletools.equal = function (v1, v2) 14 | if # v1 == # v2 then 15 | for i, v in ipairs(v1) do 16 | if v ~= v2[i] then 17 | return false 18 | end 19 | end 20 | return true 21 | else 22 | return false 23 | end 24 | end 25 | 26 | -- Slice a lua table between i1 and i2 27 | tabletools.slice = function (values, i1, i2) 28 | local res = {} 29 | local n = #values 30 | -- default values for range 31 | i1 = i1 or 1 32 | i2 = i2 or n 33 | if i2 < 0 then 34 | i2 = n + i2 + 1 35 | elseif i2 > n then 36 | i2 = n 37 | end 38 | if i1 < 1 or i1 > n then 39 | return {} 40 | end 41 | local k = 1 42 | for i = i1,i2 do 43 | res[k] = values[i] 44 | k = k + 1 45 | end 46 | return res 47 | end 48 | 49 | -- Convert a dictionary into a flat array. For example {bla = 'foo', planet = 'mars'} 50 | -- becomes {'bla', 'foo', 'planet', 'mars'} 51 | tabletools.flat = function (tbl) 52 | local result = {} 53 | for name,value in pairs(tbl) do 54 | table.insert(result,name) 55 | table.insert(result,value) 56 | end 57 | return result 58 | end 59 | 60 | tabletools.asdict = function (tbl) 61 | local result, key = {} 62 | for i, value in ipairs(tbl) do 63 | if 2*math.floor(i/2) == i then 64 | result[key] = value 65 | else 66 | key = value 67 | end 68 | end 69 | return result 70 | end 71 | 72 | tabletools.load_code = function(code, environment) 73 | if setfenv and loadstring then 74 | local f = assert(loadstring(code)) 75 | setfenv(f, environment) 76 | return f 77 | else 78 | return assert(load(code, nil,"t",environment)) 79 | end 80 | end 81 | 82 | tabletools.json_clean = function (meta) 83 | local m, t = {} 84 | for k, v in pairs(meta) do 85 | t = type(v) 86 | -- json return null as a function while cjson as userdata. In both 87 | -- cases we don't want the values. 88 | if t ~= 'function' and t ~= 'userdata' then 89 | if t == 'table' then 90 | v = tabletools.json_clean(v) 91 | end 92 | m[k] = v 93 | end 94 | end 95 | return m 96 | end 97 | 98 | -- Return the module only when this module is not in REDIS 99 | if not (KEYS and ARGV) then 100 | return tabletools 101 | end 102 | -------------------------------------------------------------------------------- /stdnet/backends/redisb/lua/ts.lua: -------------------------------------------------------------------------------- 1 | -- THE FIRST ARGUMENT IS THE COMMAND NAME 2 | if # ARGV == 0 then 3 | error('The first argument must be the name of the script. Got nothing.') 4 | end 5 | return timeseries.call_mult(ARGV[1], KEYS, unpack(tabletools.slice(ARGV, 2, -1))) 6 | -------------------------------------------------------------------------------- /stdnet/backends/redisb/lua/where.lua: -------------------------------------------------------------------------------- 1 | if redis then 2 | -- THE FIRST ARGUMENT IS THE NAME OF THE SCRIPT 3 | if # ARGV < 1 then 4 | error('Wrong number of arguments.') 5 | end 6 | if # KEYS < 2 then 7 | error('Wrong number of keys.') 8 | end 9 | local destkey, key = KEYS[1], KEYS[2] 10 | local meta = cjson.decode(ARGV[1]) 11 | local load_only 12 | local ids = redis.call('smembers', key) 13 | if destkey == key then 14 | redis.call('del', key) 15 | end 16 | if # ARGV == 2 then 17 | load_only = cjson.decode(ARGV[2]) 18 | end 19 | 20 | local function setnumber(this, name, field) 21 | this[name] = field + 0 22 | end 23 | 24 | for _, id in ipairs(ids) do 25 | local okey = meta.namespace .. ':obj:' .. id 26 | local this = {{}} 27 | if load_only == nil then 28 | local fields = redis.call('hgetall', okey) 29 | local name = nil 30 | for _, field in ipairs(fields) do 31 | if name == nil then 32 | name = field 33 | else 34 | if pcall(setnumber, this, name, field) == false then 35 | this[name] = field 36 | end 37 | name = nil 38 | end 39 | end 40 | else 41 | local fields = redis.call('hmget', okey, unpack(load_only)) 42 | for i, field in ipairs(fields) do 43 | local name = load_only[i] 44 | if pcall(setnumber, this, name, field) == false then 45 | this[name] = field 46 | end 47 | end 48 | end 49 | if {0[where_clause]} then 50 | redis.call('sadd', destkey, id) 51 | end 52 | end 53 | end -------------------------------------------------------------------------------- /stdnet/odm/__init__.py: -------------------------------------------------------------------------------- 1 | from .query import * 2 | from .session import * 3 | from .related import * 4 | from .fields import * 5 | from .base import * 6 | from .mapper import * 7 | from .models import * 8 | from .struct import * 9 | from .structfields import * 10 | from .globals import * 11 | from .utils import * 12 | from .search import * 13 | -------------------------------------------------------------------------------- /stdnet/odm/globals.py: -------------------------------------------------------------------------------- 1 | import hashlib 2 | from collections import namedtuple 3 | 4 | from stdnet.utils import to_bytes, JSPLITTER 5 | 6 | __all__ = ['get_model_from_hash', 7 | 'get_hash_from_model', 8 | 'hashmodel', 9 | 'JSPLITTER'] 10 | 11 | # Information about a lookup in a query 12 | lookup_value = namedtuple('lookup_value', 'lookup value') 13 | 14 | # Utilities for sorting and range lookups 15 | orderinginfo = namedtuple('orderinginfo', 'name field desc model nested auto') 16 | 17 | # attribute name, field, model where to do lookup, nested lookup_info 18 | range_lookup_info = namedtuple('range_lookup_info', 'name field model nested') 19 | 20 | 21 | class ModelDict(dict): 22 | 23 | def from_hash(self, hash): 24 | return self.get(hash) 25 | 26 | def to_hash(self, model): 27 | return model._meta.hash 28 | 29 | _model_dict = ModelDict() 30 | 31 | 32 | def get_model_from_hash(hash): 33 | return _model_dict.from_hash(hash) 34 | 35 | 36 | def get_hash_from_model(model): 37 | return _model_dict.to_hash(model) 38 | 39 | 40 | def hashmodel(model, library=None): 41 | '''Calculate the Hash id of metaclass ``meta``''' 42 | library = library or 'python-stdnet' 43 | meta = model._meta 44 | sha = hashlib.sha1(to_bytes('{0}({1})'.format(library, meta))) 45 | hash = sha.hexdigest()[:8] 46 | meta.hash = hash 47 | if hash in _model_dict: 48 | raise KeyError('Model "{0}" already in hash table.\ 49 | Rename your model or the module containing the model.'.format(meta)) 50 | _model_dict[hash] = model 51 | 52 | 53 | def _make_id(target): 54 | if hasattr(target, '__func__'): 55 | return (id(target.__self__), id(target.__func__)) 56 | return id(target) 57 | 58 | 59 | class Event: 60 | 61 | def __init__(self): 62 | self.callbacks = [] 63 | 64 | def bind(self, callback, sender=None): 65 | '''Bind a ``callback`` for a given ``sender``.''' 66 | key = (_make_id(callback), _make_id(sender)) 67 | self.callbacks.append((key, callback)) 68 | 69 | def fire(self, sender=None, **params): 70 | '''Fire callbacks from a ``sender``.''' 71 | keys = (_make_id(None), _make_id(sender)) 72 | results = [] 73 | for (_, key), callback in self.callbacks: 74 | if key in keys: 75 | results.append(callback(self, sender, **params)) 76 | return results 77 | 78 | def unbind(self, callback, sender=None): 79 | key = (_make_id(callback), _make_id(sender)) 80 | for index, key_cbk in enumerate(self.callbacks): 81 | if key == key_cbk[0]: 82 | del self.callbacks[index] 83 | break 84 | -------------------------------------------------------------------------------- /stdnet/utils/__init__.py: -------------------------------------------------------------------------------- 1 | from inspect import istraceback 2 | from itertools import chain 3 | from collections import Mapping 4 | from uuid import uuid4 5 | 6 | from .py2py3 import * 7 | 8 | if ispy3k: # pragma: no cover 9 | import pickle 10 | unichr = chr 11 | 12 | def raise_error_trace(err, traceback): 13 | if istraceback(traceback): 14 | raise err.with_traceback(traceback) 15 | else: 16 | raise err 17 | 18 | else: # pragma: no cover 19 | import cPickle as pickle 20 | unichr = unichr 21 | from .fallbacks.py2 import raise_error_trace 22 | 23 | from .jsontools import * 24 | from .populate import populate 25 | from .dates import * 26 | 27 | 28 | def gen_unique_id(short=True): 29 | id = str(uuid4()) 30 | if short: 31 | id = id[:8] 32 | return id 33 | 34 | 35 | def iterpair(iterable): 36 | if isinstance(iterable, Mapping): 37 | return iteritems(iterable) 38 | else: 39 | return iterable 40 | 41 | 42 | def int_or_float(v): 43 | v = float(v) 44 | i = int(v) 45 | return i if v == i else v 46 | 47 | 48 | def grouper(n, iterable, padvalue=None): 49 | '''grouper(3, 'abcdefg', 'x') --> ('a','b','c'), ('d','e','f'), 50 | ('g','x','x')''' 51 | return zip_longest(*[iter(iterable)]*n, fillvalue=padvalue) 52 | 53 | 54 | def _format_int(val): 55 | positive = val >= 0 56 | sval = ''.join(reversed(','.join(( 57 | ''.join(g) for g in grouper(3, reversed(str(abs(val))), ''))))) 58 | return sval if positive else '-'+sval 59 | 60 | 61 | def format_int(val): 62 | try: # for python 2.7 and up 63 | return '{:,}'.format(val) 64 | except ValueError: # pragma nocover 65 | _format_int(val) 66 | 67 | 68 | def flat_mapping(mapping): 69 | items = [] 70 | extend = items.extend 71 | for pair in iterpair(mapping): 72 | extend(pair) 73 | return items 74 | 75 | 76 | def _flat2d_gen(iterable): 77 | for v in iterable: 78 | yield v[0] 79 | yield v[1] 80 | 81 | 82 | def flat2d(iterable): 83 | if hasattr(iterable, '__len__'): 84 | return chain(*iterable) 85 | else: 86 | return _flat2d_gen(iterable) 87 | 88 | 89 | def _flatzsetdict(kwargs): 90 | for k, v in iteritems(kwargs): 91 | yield v 92 | yield k 93 | 94 | 95 | def flatzset(iterable=None, kwargs=None): 96 | if iterable: 97 | c = flat2d(iterable) 98 | if kwargs: 99 | c = chain(c, _flatzsetdict(kwargs)) 100 | elif kwargs: 101 | c = _flatzsetdict(kwargs) 102 | return tuple(c) 103 | 104 | 105 | def unique_tuple(*iterables): 106 | vals = [] 107 | for v in chain(*[it for it in iterables if it]): 108 | if v not in vals: 109 | vals.append(v) 110 | return tuple(vals) 111 | -------------------------------------------------------------------------------- /stdnet/utils/exceptions.py: -------------------------------------------------------------------------------- 1 | 2 | class StdNetException(Exception): 3 | '''A general StdNet exception''' 4 | pass 5 | 6 | 7 | class ConnectionError(StdNetException): 8 | pass 9 | 10 | 11 | class SessionNotAvailable(StdNetException): 12 | pass 13 | 14 | 15 | class ModelNotAvailable(StdNetException): 16 | pass 17 | 18 | 19 | class InvalidTransaction(StdNetException): 20 | '''A :class:`StdNetException` raised when trying to create a transaction 21 | with models registered with different backends.''' 22 | pass 23 | 24 | 25 | class ResponseError(StdNetException): 26 | '''Raised when an invalid response is returned from the backend server.''' 27 | pass 28 | 29 | 30 | class CommitException(ResponseError): 31 | '''A :class:`StdNetException` raised when trying to create a transaction 32 | with models registered with different backends.''' 33 | def __init__(self, msg, failures=1): 34 | self.failures = failures 35 | super(CommitException, self).__init__(msg) 36 | 37 | 38 | class AlreadyRegistered(StdNetException): 39 | pass 40 | 41 | 42 | class ObjectNotValidated(StdNetException): 43 | '''A :class:`StdNetException` raised when an instance of a 44 | :class:`stdnet.odm.StdModel` fails to validate 45 | (probably required :class:`stdnet.odm.Field` are missing from the 46 | instance). 47 | ''' 48 | pass 49 | 50 | 51 | class ImproperlyConfigured(StdNetException): 52 | ''''A :class:`stdnet.StdNetException` raised when stdnet is somehow 53 | improperly configured''' 54 | pass 55 | 56 | 57 | class BadCacheDataStructure(StdNetException): 58 | pass 59 | 60 | 61 | class FieldError(StdNetException): 62 | '''Generic Field error''' 63 | pass 64 | 65 | 66 | class StructureFieldError(StdNetException): 67 | '''A :class:`stdnet.FieldError` for :class:stdnet.odm.StructureField`.''' 68 | pass 69 | 70 | 71 | class FieldValueError(FieldError): 72 | '''A :class:`stdnet.FieldError` raised when passing a wrong 73 | value to a field. This exception is cought during the model instance 74 | validation algorithm in :meth:`stdnet.odm.base.Metaclass.is_valid`.''' 75 | pass 76 | 77 | 78 | class QuerySetError(StdNetException): 79 | '''A :class:`stdnet.StdNetException` raised by a 80 | :class:`stdnet.odm.Query`.''' 81 | pass 82 | 83 | 84 | class ManyToManyError(QuerySetError): 85 | pass 86 | 87 | 88 | class ObjectNotFound(QuerySetError): 89 | '''A :class:`QuerySetError` raised when an object is not found.''' 90 | pass 91 | -------------------------------------------------------------------------------- /stdnet/utils/fallbacks/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lsbardel/python-stdnet/78db5320bdedc3f28c5e4f38cda13a4469e35db7/stdnet/utils/fallbacks/__init__.py -------------------------------------------------------------------------------- /stdnet/utils/fallbacks/_collections.py: -------------------------------------------------------------------------------- 1 | from UserDict import DictMixin 2 | 3 | __all__ = ['OrderedDict'] 4 | 5 | 6 | class OrderedDict(dict, DictMixin): 7 | '''Drop-in substitute for Py2.7's new collections.OrderedDict. 8 | The recipe has big-oh performance that matches regular dictionaries 9 | (amortized O(1) insertion/deletion/lookup and O(n) 10 | iteration/repr/copy/equality_testing). 11 | 12 | From http://code.activestate.com/recipes/576693/''' 13 | 14 | def __init__(self, *args, **kwds): 15 | if len(args) > 1: 16 | raise TypeError('expected at most 1 arguments, got %d' % len(args)) 17 | try: 18 | self.__end 19 | except AttributeError: 20 | self.clear() 21 | self.update(*args, **kwds) 22 | 23 | def clear(self): 24 | self.__end = end = [] 25 | end += [None, end, end] # sentinel node for doubly linked list 26 | self.__map = {} # key --> [key, prev, next] 27 | dict.clear(self) 28 | 29 | def __setitem__(self, key, value): 30 | if key not in self: 31 | end = self.__end 32 | curr = end[1] 33 | curr[2] = end[1] = self.__map[key] = [key, curr, end] 34 | dict.__setitem__(self, key, value) 35 | 36 | def __delitem__(self, key): 37 | dict.__delitem__(self, key) 38 | key, prev, next = self.__map.pop(key) 39 | prev[2] = next 40 | next[1] = prev 41 | 42 | def __iter__(self): 43 | end = self.__end 44 | curr = end[2] 45 | while curr is not end: 46 | yield curr[0] 47 | curr = curr[2] 48 | 49 | def __reversed__(self): 50 | end = self.__end 51 | curr = end[1] 52 | while curr is not end: 53 | yield curr[0] 54 | curr = curr[1] 55 | 56 | def popitem(self, last=True): 57 | if not self: 58 | raise KeyError('dictionary is empty') 59 | if last: 60 | key = reversed(self).next() 61 | else: 62 | key = iter(self).next() 63 | value = self.pop(key) 64 | return key, value 65 | 66 | def __reduce__(self): 67 | items = [[k, self[k]] for k in self] 68 | tmp = self.__map, self.__end 69 | del self.__map, self.__end 70 | inst_dict = vars(self).copy() 71 | self.__map, self.__end = tmp 72 | if inst_dict: 73 | return (self.__class__, (items,), inst_dict) 74 | return self.__class__, (items,) 75 | 76 | def keys(self): 77 | return list(self) 78 | 79 | setdefault = DictMixin.setdefault 80 | update = DictMixin.update 81 | pop = DictMixin.pop 82 | values = DictMixin.values 83 | items = DictMixin.items 84 | iterkeys = DictMixin.iterkeys 85 | itervalues = DictMixin.itervalues 86 | iteritems = DictMixin.iteritems 87 | 88 | def __repr__(self): 89 | if not self: 90 | return '%s()' % (self.__class__.__name__,) 91 | return '%s(%r)' % (self.__class__.__name__, self.items()) 92 | 93 | def copy(self): 94 | return self.__class__(self) 95 | 96 | @classmethod 97 | def fromkeys(cls, iterable, value=None): 98 | d = cls() 99 | for key in iterable: 100 | d[key] = value 101 | return d 102 | 103 | def __eq__(self, other): 104 | if isinstance(other, OrderedDict): 105 | return len(self) == len(other) and self.items() == other.items() 106 | return dict.__eq__(self, other) 107 | 108 | def __ne__(self, other): 109 | return not self == other 110 | -------------------------------------------------------------------------------- /stdnet/utils/fallbacks/_importlib.py: -------------------------------------------------------------------------------- 1 | # Taken from Python 2.7 2 | import sys 3 | 4 | 5 | def _resolve_name(name, package, level): 6 | """Return the absolute name of the module to be imported.""" 7 | if not hasattr(package, 'rindex'): 8 | raise ValueError("'package' not set to a string") 9 | dot = len(package) 10 | for x in xrange(level, 1, -1): 11 | try: 12 | dot = package.rindex('.', 0, dot) 13 | except ValueError: 14 | raise ValueError("attempted relative import beyond top-level " 15 | "package") 16 | return "%s.%s" % (package[:dot], name) 17 | 18 | 19 | def import_module(name, package=None): 20 | """Import a module. 21 | 22 | The 'package' argument is required when performing a relative import. It 23 | specifies the package to use as the anchor point from which to resolve the 24 | relative import to an absolute import. 25 | 26 | """ 27 | if name.startswith('.'): 28 | if not package: 29 | raise TypeError("relative imports require the 'package' argument") 30 | level = 0 31 | for character in name: 32 | if character != '.': 33 | break 34 | level += 1 35 | name = _resolve_name(name[level:], package, level) 36 | __import__(name) 37 | return sys.modules[name] 38 | -------------------------------------------------------------------------------- /stdnet/utils/fallbacks/py2/__init__.py: -------------------------------------------------------------------------------- 1 | # This is for python 2.x 2 | from inspect import istraceback 3 | 4 | 5 | def raise_error_trace(err, traceback): 6 | if istraceback(traceback): 7 | raise err.__class__, err, traceback 8 | else: 9 | raise err.__class__, err, None 10 | -------------------------------------------------------------------------------- /stdnet/utils/importer.py: -------------------------------------------------------------------------------- 1 | try: # pragma nocover 2 | from importlib import * 3 | except ImportError: # pragma nocover 4 | from .fallbacks._importlib import * 5 | -------------------------------------------------------------------------------- /stdnet/utils/populate.py: -------------------------------------------------------------------------------- 1 | from datetime import date, timedelta 2 | from random import uniform, randint, choice 3 | import string 4 | 5 | from stdnet.utils import ispy3k 6 | 7 | if ispy3k: # pragma nocover 8 | characters = string.ascii_letters + string.digits 9 | else: # pragma nocover 10 | characters = string.letters + string.digits 11 | range = xrange 12 | 13 | def_converter = lambda x: x 14 | 15 | 16 | def populate(datatype='string', size=10, start=None, end=None, 17 | converter=None, choice_from=None, **kwargs): 18 | '''Utility function for populating lists with random data. 19 | Useful for populating database with data for fuzzy testing. 20 | Supported data-types 21 | 22 | * *string* 23 | For example:: 24 | 25 | populate('string',100, min_len=3, max_len=10) 26 | 27 | create a 100 elements list with random strings 28 | with random length between 3 and 10 29 | 30 | * *date* 31 | For example:: 32 | 33 | from datetime import date 34 | populate('date',200, start = date(1997,1,1), end = date.today()) 35 | 36 | create a 200 elements list with random datetime.date objects 37 | between *start* and *end* 38 | 39 | * *integer* 40 | For example:: 41 | 42 | populate('integer',200, start = 0, end = 1000) 43 | 44 | create a 200 elements list with random int between *start* and *end* 45 | 46 | * *float* 47 | For example:: 48 | 49 | populate('float', 200, start = 0, end = 10) 50 | 51 | create a 200 elements list with random floats between *start* and *end* 52 | 53 | * *choice* (elements of an iterable) 54 | For example:: 55 | 56 | populate('choice', 200, choice_from = ['pippo','pluto','blob']) 57 | 58 | create a 200 elements list with random elements from *choice_from*. 59 | ''' 60 | data = [] 61 | converter = converter or def_converter 62 | if datatype == 'date': 63 | date_end = end or date.today() 64 | date_start = start or date(1990, 1, 1) 65 | delta = date_end - date_start 66 | for s in range(size): 67 | data.append(converter(random_date(date_start, delta.days))) 68 | elif datatype == 'integer': 69 | start = start or 0 70 | end = end or 1000000 71 | for s in range(size): 72 | data.append(converter(randint(start, end))) 73 | elif datatype == 'float': 74 | start = start or 0 75 | end = end or 10 76 | for s in range(size): 77 | data.append(converter(uniform(start, end))) 78 | elif datatype == 'choice' and choice_from: 79 | for s in range(size): 80 | data.append(choice(list(choice_from))) 81 | else: 82 | for s in range(size): 83 | data.append(converter(random_string(**kwargs))) 84 | return data 85 | 86 | 87 | def random_string(min_len=3, max_len=20, **kwargs): 88 | len = randint(min_len, max_len) if max_len > min_len else min_len 89 | return ''.join((choice(characters) for s in range(len))) 90 | 91 | 92 | def random_date(date_start, delta): 93 | return date_start + timedelta(days=randint(0, delta)) 94 | -------------------------------------------------------------------------------- /stdnet/utils/py2py3.py: -------------------------------------------------------------------------------- 1 | '''\ 2 | Simple python script which helps writing python 2.6 \ 3 | forward compatible code with python 3''' 4 | import os 5 | import sys 6 | import types 7 | 8 | ispy3k = int(sys.version[0]) >= 3 9 | 10 | 11 | # Python 3 12 | if ispy3k: 13 | string_type = str 14 | itervalues = lambda d: d.values() 15 | iteritems = lambda d: d.items() 16 | int_type = int 17 | zip = zip 18 | map = map 19 | long = int 20 | range = range 21 | 22 | from urllib import parse as urlparse 23 | from io import StringIO, BytesIO 24 | from itertools import zip_longest 25 | 26 | urlencode = urlparse.urlencode 27 | 28 | class UnicodeMixin(object): 29 | 30 | def __unicode__(self): 31 | return '{0} object'.format(self.__class__.__name__) 32 | 33 | def __str__(self): 34 | return self.__unicode__() 35 | 36 | def __repr__(self): 37 | return '%s: %s' % (self.__class__.__name__, self) 38 | 39 | def native_str(s, encoding='utf-8'): 40 | if isinstance(s, bytes): 41 | return s.decode(encoding) 42 | return s 43 | 44 | # Python 2 45 | else: # pragma: no cover 46 | string_type = unicode 47 | itervalues = lambda d: d.itervalues() 48 | iteritems = lambda d: d.iteritems() 49 | int_type = (types.IntType, types.LongType) 50 | from itertools import izip as zip, imap as map, izip_longest as zip_longest 51 | range = xrange 52 | long = long 53 | 54 | import urlparse 55 | from urllib import urlencode 56 | from cStringIO import StringIO 57 | BytesIO = StringIO 58 | 59 | class UnicodeMixin(object): 60 | 61 | def __unicode__(self): 62 | return unicode('{0} object'.format(self.__class__.__name__)) 63 | 64 | def __str__(self): 65 | return self.__unicode__().encode('utf-8', 'ignore') 66 | 67 | def __repr__(self): 68 | return '%s: %s' % (self.__class__.__name__, self) 69 | 70 | def native_str(s, encoding='utf-8'): 71 | if isinstance(s, unicode): 72 | return s.encode(encoding) 73 | return s 74 | 75 | 76 | is_string = lambda x: isinstance(x, string_type) 77 | is_int = lambda x: isinstance(x, int_type) 78 | 79 | 80 | def to_bytes(s, encoding=None, errors='strict'): 81 | """Returns a bytestring version of 's', 82 | encoded as specified in 'encoding'.""" 83 | encoding = encoding or 'utf-8' 84 | if isinstance(s, bytes): 85 | if encoding != 'utf-8': 86 | return s.decode('utf-8', errors).encode(encoding, errors) 87 | else: 88 | return s 89 | if not is_string(s): 90 | s = string_type(s) 91 | return s.encode(encoding, errors) 92 | 93 | 94 | def to_string(s, encoding=None, errors='strict'): 95 | """Inverse of to_bytes""" 96 | encoding = encoding or 'utf-8' 97 | if isinstance(s, bytes): 98 | return s.decode(encoding, errors) 99 | if not is_string(s): 100 | s = string_type(s) 101 | return s 102 | -------------------------------------------------------------------------------- /stdnet/utils/structures.py: -------------------------------------------------------------------------------- 1 | import sys 2 | from collections import * 3 | 4 | if sys.version_info < (2, 7): # pragma nocover 5 | from .fallbacks._collections import * 6 | -------------------------------------------------------------------------------- /stdnet/utils/version.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import os 3 | import subprocess 4 | from collections import namedtuple 5 | 6 | 7 | class stdnet_version(namedtuple('stdnet_version', 8 | 'major minor micro releaselevel serial')): 9 | __impl = None 10 | 11 | def __new__(cls, *args, **kwargs): 12 | if cls.__impl is None: 13 | cls.__impl = super(stdnet_version, cls).__new__(cls, *args, 14 | **kwargs) 15 | return cls.__impl 16 | else: 17 | raise TypeError('cannot create stdnet_version instances') 18 | 19 | 20 | def get_version(version): 21 | "Returns a PEP 386-compliant version number from *version*." 22 | assert len(version) == 5 23 | assert version[3] in ('alpha', 'beta', 'rc', 'final') 24 | parts = 2 if version[2] == 0 else 3 25 | main = '.'.join(map(str, version[:parts])) 26 | sub = '' 27 | if version[3] == 'alpha' and version[4] == 0: 28 | git_changeset = get_git_changeset() 29 | if git_changeset: 30 | sub = '.dev%s' % git_changeset 31 | elif version[3] != 'final': 32 | mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'c'} 33 | sub = mapping[version[3]] + str(version[4]) 34 | return main + sub 35 | 36 | 37 | def get_git_changeset(): 38 | """Returns a numeric identifier of the latest git changeset. 39 | 40 | The result is the UTC timestamp of the changeset in YYYYMMDDHHMMSS format. 41 | This value isn't guaranteed to be unique, but collisions are very unlikely, 42 | so it's sufficient for generating the development version numbers. 43 | """ 44 | repo_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) 45 | git_show = subprocess.Popen('git show --pretty=format:%ct --quiet HEAD', 46 | stdout=subprocess.PIPE, stderr=subprocess.PIPE, 47 | shell=True, cwd=repo_dir, 48 | universal_newlines=True) 49 | timestamp = git_show.communicate()[0].partition('\n')[0] 50 | try: 51 | timestamp = datetime.datetime.utcfromtimestamp(int(timestamp)) 52 | except ValueError: 53 | return None 54 | return timestamp.strftime('%Y%m%d%H%M%S') 55 | -------------------------------------------------------------------------------- /stdnet/utils/zset.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | from .skiplist import skiplist 4 | 5 | ispy3k = int(sys.version[0]) >= 3 6 | 7 | __all__ = ['zset'] 8 | 9 | 10 | class zset(object): 11 | '''Ordered-set equivalent of redis zset.''' 12 | def __init__(self): 13 | self.clear() 14 | 15 | def __repr__(self): 16 | return repr(self._sl) 17 | 18 | def __str__(self): 19 | return str(self._sl) 20 | 21 | def __len__(self): 22 | return len(self._dict) 23 | 24 | def __iter__(self): 25 | for _, value in self._sl: 26 | yield value 27 | 28 | def items(self): 29 | '''Iterable over ordered score, value pairs of this :class:`zset` 30 | ''' 31 | return iter(self._sl) 32 | 33 | def add(self, score, val): 34 | r = 1 35 | if val in self._dict: 36 | sc = self._dict[val] 37 | if sc == score: 38 | return 0 39 | self._sl.remove(sc) 40 | r = 0 41 | self._dict[val] = score 42 | self._sl.insert(score, val) 43 | return r 44 | 45 | def update(self, score_vals): 46 | '''Update the :class:`zset` with an iterable over pairs of 47 | scores and values.''' 48 | add = self.add 49 | for score, value in score_vals: 50 | add(score, value) 51 | 52 | def remove(self, item): 53 | '''Remove ``item`` for the :class:`zset` it it exists. 54 | If found it returns the score of the item removed.''' 55 | score = self._dict.pop(item, None) 56 | if score is not None: 57 | self._sl.remove(score) 58 | return score 59 | 60 | def clear(self): 61 | '''Clear this :class:`zset`.''' 62 | self._sl = skiplist() 63 | self._dict = {} 64 | 65 | def rank(self, item): 66 | '''Return the rank (index) of ``item`` in this :class:`zset`.''' 67 | score = self._dict.get(item) 68 | if score is not None: 69 | return self._sl.rank(score) 70 | 71 | def flat(self): 72 | return self._sl.flat() 73 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lsbardel/python-stdnet/78db5320bdedc3f28c5e4f38cda13a4469e35db7/tests/__init__.py -------------------------------------------------------------------------------- /tests/all/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lsbardel/python-stdnet/78db5320bdedc3f28c5e4f38cda13a4469e35db7/tests/all/__init__.py -------------------------------------------------------------------------------- /tests/all/apps/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lsbardel/python-stdnet/78db5320bdedc3f28c5e4f38cda13a4469e35db7/tests/all/apps/__init__.py -------------------------------------------------------------------------------- /tests/all/apps/columnts/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lsbardel/python-stdnet/78db5320bdedc3f28c5e4f38cda13a4469e35db7/tests/all/apps/columnts/__init__.py -------------------------------------------------------------------------------- /tests/all/apps/columnts/evaluate.py: -------------------------------------------------------------------------------- 1 | from datetime import date 2 | 3 | from stdnet.utils import test 4 | from stdnet.apps.columnts import ColumnTS 5 | 6 | from .main import ColumnMixin 7 | 8 | 9 | class TestEvaluate(ColumnMixin, test.TestCase): 10 | 11 | def test_simple(self): 12 | ts = self.empty() 13 | l = yield ts.evaluate('return self:length()') 14 | self.assertEqual(l, 0) 15 | yield ts.update({date(2012,5,15): {'open':605}, 16 | date(2012,5,16): {'open':617}}) 17 | yield self.async.assertEqual(ts.evaluate('return self:length()'), 2) 18 | yield self.async.assertEqual(ts.evaluate('return self:fields()'), 19 | [b'open']) 20 | #Return the change from last open with respect previous open 21 | change = "return self:rank_value(-1,'open')-"\ 22 | "self:rank_value(-2,'open')" 23 | change = yield ts.evaluate(change) 24 | self.assertEqual(change, 12) 25 | -------------------------------------------------------------------------------- /tests/all/apps/columnts/field.py: -------------------------------------------------------------------------------- 1 | from stdnet.utils import test 2 | 3 | from tests.all.multifields.struct import MultiFieldMixin 4 | 5 | from examples.tsmodels import ColumnTimeSeries 6 | 7 | from .npts import ColumnTimeSeriesNumpy, skipUnless 8 | 9 | 10 | class TestColumnTSField(MultiFieldMixin, test.TestCase): 11 | model = ColumnTimeSeries 12 | 13 | def testModel(self): 14 | meta = self.model._meta 15 | self.assertTrue(len(meta.multifields),1) 16 | m = meta.multifields[0] 17 | self.assertEqual(m.name,'data') 18 | self.assertTrue(isinstance(m.value_pickler, encoders.Double)) 19 | 20 | 21 | @skipUnless(ColumnTimeSeriesNumpy, 'Requires stdnet-redis and dynts') 22 | class TestColumnTSField(TestColumnTSField): 23 | model = ColumnTimeSeriesNumpy 24 | 25 | def setUp(self): 26 | self.register() 27 | 28 | def testMeta(self): 29 | meta = self.model._meta 30 | self.assertTrue(len(meta.multifields),1) 31 | m = meta.multifields[0] 32 | self.assertEqual(m.name, 'data') 33 | self.assertTrue(isinstance(m.value_pickler, encoders.Double)) 34 | -------------------------------------------------------------------------------- /tests/all/apps/columnts/manipulate.py: -------------------------------------------------------------------------------- 1 | from datetime import date, datetime 2 | 3 | from stdnet.utils import test 4 | 5 | from .main import ColumnMixin 6 | 7 | 8 | class TestManipulate(ColumnMixin, test.TestCase): 9 | 10 | def create(self): 11 | return self.data.data1.create(self) 12 | 13 | def pop_range(self, byrank, ts, start, end, num_popped, sl, sl2=None): 14 | all_dates, all_fields = yield ts.irange() 15 | self.assertEqual(len(all_fields), 6) 16 | if byrank: 17 | dates, fields = yield ts.irange(start, end) 18 | dt, fs = yield ts.ipop_range(start, end) 19 | else: 20 | dates, fields = yield ts.range(start, end) 21 | dt, fs = yield ts.pop_range(start, end) 22 | self.assertEqual(len(dt), num_popped) 23 | size = yield ts.size() 24 | self.assertEqual(size, len(all_dates)-num_popped) 25 | self.assertEqual(dates, dt) 26 | self.assertEqual(fields, fs) 27 | # 28 | dates = all_dates[sl] 29 | fields = dict(((f, all_fields[f][sl]) for f in all_fields)) 30 | if sl2: 31 | dates.extend(all_dates[sl2]) 32 | for f in fields: 33 | fields[f].extend(all_fields[f][sl2]) 34 | dt, fs = yield ts.irange() 35 | self.assertEqual(len(dates), len(dt)) 36 | self.assertEqual(dates, dt) 37 | for f in fields: 38 | self.assertEqual(len(fields[f]), len(fs[f])) 39 | self.assertEqual(fields, fs) 40 | 41 | def test_ipop_range_back(self): 42 | ts = yield self.create() 43 | yield self.pop_range(True, ts, -2, -1, 2, slice(0,-2)) 44 | 45 | def test_ipop_range_middle(self): 46 | ts = yield self.create() 47 | all_dates, all_fields = yield ts.irange() 48 | yield self.pop_range(True, ts, -10, -5, 6, slice(0,-10), slice(-4, None)) 49 | 50 | def test_ipop_range_start(self): 51 | ts = yield self.create() 52 | # popping the first 11 records 53 | yield self.pop_range(True, ts, 0, 10, 11, slice(11, None)) 54 | 55 | def test_pop_range_back(self): 56 | ts = yield self.create() 57 | start, end = yield ts.itimes(-2) 58 | yield self.pop_range(False, ts, start, end, 2, slice(0,-2)) 59 | 60 | def test_contains(self): 61 | ts = yield self.create() 62 | all_dates = yield ts.itimes() 63 | dt = all_dates[10] 64 | self.assertTrue(dt in ts) 65 | # now lets pop dt 66 | v = ts.pop(dt) 67 | self.assertEqual(len(v), 6) 68 | self.assertFalse(dt in ts) 69 | # 70 | dn = datetime.now() 71 | self.assertFalse(dn in ts) -------------------------------------------------------------------------------- /tests/all/apps/columnts/npts.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from stdnet import odm 4 | from stdnet.utils import test, encoders 5 | try: 6 | from stdnet.apps.columnts import npts 7 | from dynts import tsname 8 | 9 | nptsColumnTS = npts.ColumnTS 10 | 11 | class ColumnTimeSeriesNumpy(odm.StdModel): 12 | ticker = odm.SymbolField(unique = True) 13 | data = npts.ColumnTSField() 14 | 15 | except ImportError: 16 | nptsColumnTS = None 17 | ColumnTimeSeriesNumpy = None 18 | 19 | from . import main 20 | 21 | 22 | skipUnless = test.unittest.skipUnless 23 | 24 | 25 | @skipUnless(nptsColumnTS, 'Requires dynts') 26 | class TestDynTsIntegration(main.TestOperations): 27 | ColumnTS = nptsColumnTS 28 | 29 | def testGetFields(self): 30 | ts1 = self.create() 31 | ts = ts1.irange() 32 | self.assertEqual(ts.count(),6) 33 | d1,v1 = ts1.front() 34 | d2,v2 = ts1.back() 35 | self.assertTrue(d2>d1) 36 | 37 | def testEmpty(self): 38 | session = self.session() 39 | ts1 = session.add(self.ColumnTS()) 40 | ts = ts1.irange() 41 | self.assertEqual(len(ts),0) 42 | self.assertFalse(ts1.front()) 43 | self.assertFalse(ts1.back()) 44 | 45 | def testgetFieldInOrder(self): 46 | ts1 = self.create() 47 | ts = ts1.irange(fields = ('a','b','c')) 48 | self.assertEqual(ts.count(), 3) 49 | self.assertEqual(ts.name, tsname('a','b','c')) 50 | 51 | def testgetItem(self): 52 | ts1 = self.create() 53 | dates = list(ts1) 54 | N = len(dates) 55 | self.assertTrue(N) 56 | n = N//2 57 | dte = dates[n] 58 | v = ts1[dte] 59 | -------------------------------------------------------------------------------- /tests/all/apps/columnts/test1.lua: -------------------------------------------------------------------------------- 1 | 2 | local ts = columnts:new(KEYS[1]) 3 | 4 | assert(ts.key == KEYS[1]) 5 | assert(ts:fieldkey('myfield') == KEYS[1] .. ':field:myfield') 6 | assert(# ts:fields() == 0) 7 | 8 | return {ok = 'OK'} 9 | -------------------------------------------------------------------------------- /tests/all/apps/searchengine/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lsbardel/python-stdnet/78db5320bdedc3f28c5e4f38cda13a4469e35db7/tests/all/apps/searchengine/__init__.py -------------------------------------------------------------------------------- /tests/all/apps/searchengine/search.py: -------------------------------------------------------------------------------- 1 | '''search a mock database.''' 2 | from stdnet import odm 3 | from stdnet.utils import test, populate 4 | 5 | from examples.wordsearch.models import Item, RelatedItem 6 | 7 | from .meta import SearchMixin 8 | 9 | 10 | class TestBigSearch(SearchMixin, test.TestCase): 11 | 12 | @classmethod 13 | def after_setup(cls): 14 | cls.mapper.set_search_engine(cls.make_engine()) 15 | cls.mapper.search_engine.register(Item, ('related',)) 16 | cls.mapper.search_engine.register(RelatedItem) 17 | return cls.data.make_items(cls, content=True) 18 | 19 | def test_meta_session(self): 20 | models = self.mapper 21 | self.assertFalse(models.search_engine.backend) 22 | session = models.search_engine.session() 23 | self.assertEqual(session.router, models) 24 | 25 | def testSearchWords(self): 26 | engine = self.mapper.search_engine 27 | words = list(engine.words_from_text('python gains')) 28 | self.assertTrue(len(words)>=2) 29 | 30 | def test_items(self): 31 | engine = self.mapper.search_engine 32 | wis = engine.worditems(Item) 33 | yield self.async.assertTrue(wis.count()) 34 | 35 | def __test_big_search(self): 36 | #TODO: 37 | #this test sometimes fails. Need to be fixed 38 | models = self.mapper 39 | sw = ' '.join(populate('choice', 1, choice_from=self.words)) 40 | qs = yield models.item.search(sw).all() 41 | self.assertTrue(qs) 42 | for item in qs: 43 | self.assertTrue(sw in item.name or sw in item.content) 44 | 45 | def testSearch(self): 46 | engine = self.mapper.search_engine 47 | text = ' '.join(populate('choice', 1, choice_from=self.words)) 48 | result = yield engine.search(text) 49 | self.assertTrue(result) 50 | 51 | def testNoWords(self): 52 | models = self.mapper 53 | query = models.item.query() 54 | q1 = yield query.search('').all() 55 | all = yield query.all() 56 | self.assertTrue(q1) 57 | self.assertEqual(set(q1), set(all)) 58 | 59 | def testInSearch(self): 60 | models = self.mapper 61 | query = models.item.query() 62 | sw = ' '.join(populate('choice', 5, choice_from=self.words)) 63 | res1 = yield query.search(sw).all() 64 | res2 = yield query.search(sw, lookup='in').all() 65 | self.assertTrue(res2) 66 | self.assertTrue(len(res1) < len(res2)) 67 | 68 | def testEmptySearch(self): 69 | engine = self.mapper.search_engine 70 | queries = engine.search('') 71 | self.assertEqual(len(queries), 1) 72 | qs = yield queries[0].all() 73 | qs2 = yield engine.worditems().all() 74 | self.assertTrue(qs) 75 | self.assertEqual(set(qs), set(qs2)) 76 | 77 | def test_bad_lookup(self): 78 | engine = self.mapper.search_engine 79 | self.assertRaises(ValueError, engine.search, 80 | 'first second ', lookup='foo') 81 | 82 | 83 | -------------------------------------------------------------------------------- /tests/all/backends/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lsbardel/python-stdnet/78db5320bdedc3f28c5e4f38cda13a4469e35db7/tests/all/backends/__init__.py -------------------------------------------------------------------------------- /tests/all/backends/interface.py: -------------------------------------------------------------------------------- 1 | from stdnet import odm, getdb, BackendDataServer, ModelNotAvailable,\ 2 | SessionNotAvailable, BackendStructure 3 | from stdnet.utils import test 4 | 5 | from examples.models import SimpleModel 6 | 7 | 8 | class DummyBackendDataServer(BackendDataServer): 9 | default_port = 9090 10 | def setup_connection(self, address): 11 | pass 12 | 13 | 14 | class TestBackend(test.TestCase): 15 | multipledb = False 16 | 17 | def get_backend(self, **kwargs): 18 | return DummyBackendDataServer(**kwargs) 19 | 20 | def testVirtuals(self): 21 | self.assertRaises(NotImplementedError, BackendDataServer, '', '') 22 | b = self.get_backend() 23 | self.assertEqual(str(b), 'dummy://127.0.0.1:9090') 24 | self.assertFalse(b.clean(None)) 25 | self.assertRaises(NotImplementedError, b.execute_session, None, None) 26 | self.assertRaises(NotImplementedError, b.model_keys, None) 27 | self.assertRaises(NotImplementedError, b.flush) 28 | 29 | def testMissingStructure(self): 30 | l = odm.List() 31 | self.assertRaises(AttributeError, l.backend_structure) 32 | 33 | def testRedis(self): 34 | b = getdb('redis://') 35 | self.assertEqual(b.name, 'redis') 36 | self.assertEqual(b.connection_string, 'redis://127.0.0.1:6379?db=0') 37 | 38 | def testBackendStructure_error(self): 39 | s = BackendStructure(None, None, None) 40 | self.assertRaises(NotImplementedError, s.flush) 41 | self.assertRaises(NotImplementedError, s.delete) 42 | self.assertRaises(NotImplementedError, s.size) 43 | -------------------------------------------------------------------------------- /tests/all/backends/redis/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lsbardel/python-stdnet/78db5320bdedc3f28c5e4f38cda13a4469e35db7/tests/all/backends/redis/__init__.py -------------------------------------------------------------------------------- /tests/all/backends/redis/async.py: -------------------------------------------------------------------------------- 1 | '''Test the asynchronous redis client''' 2 | from copy import copy 3 | 4 | import pulsar 5 | 6 | from stdnet.utils import test 7 | from stdnet.utils.async import async_binding 8 | 9 | from examples.data import FinanceTest 10 | 11 | def check_connection(self, command_name): 12 | redis = self.mapper.default_backend.client 13 | client = redis.connection_pool 14 | self.assertIsInstance(client, pulsar.Client) 15 | for pool in client.connection_pools.values(): 16 | for conn in pool._concurrent_connections: 17 | consumer = conn.current_consumer 18 | request = consumer.current_request 19 | self.assertEqual(client.available_connections, 1) 20 | 21 | 22 | @test.skipUnless(async_binding, 'Requires asynchronous binding') 23 | class TestRedisAsyncClient(test.TestWrite): 24 | multipledb = 'redis' 25 | 26 | @classmethod 27 | def after_setup(cls): 28 | return cls.data.create(cls) 29 | 30 | @classmethod 31 | def backend_params(cls): 32 | return {'timeout': 0} 33 | 34 | def test_client(self): 35 | redis = self.mapper.default_backend.client 36 | self.assertFalse(redis.full_response) 37 | redis = copy(redis) 38 | redis.full_response = True 39 | ping = yield redis.execute_command('PING').on_finished 40 | self.assertTrue(ping.result) 41 | self.assertTrue(ping.connection) 42 | echo = yield redis.echo('Hello!').on_finished 43 | self.assertEqual(echo.result, b'Hello!') 44 | self.assertTrue(echo.connection) 45 | 46 | -------------------------------------------------------------------------------- /tests/all/backends/redis/prefixed.py: -------------------------------------------------------------------------------- 1 | from stdnet.utils import test 2 | from stdnet.utils import gen_unique_id 3 | 4 | 5 | class TestRedisPrefixed(test.TestCase): 6 | multipledb = 'redis' 7 | 8 | def get_client(self, prefix=None): 9 | prefix = prefix or gen_unique_id() 10 | c = self.backend.client.prefixed(prefix + self.namespace) 11 | if c.prefix not in self.clients: 12 | self.clients[c.prefix] = c 13 | return self.clients[c.prefix] 14 | 15 | def setUp(self): 16 | self.clients = {} 17 | 18 | def tearDown(self): 19 | for c in self.clients.values(): 20 | yield c.flushdb() 21 | 22 | def test_meta(self): 23 | c = self.get_client('yyy') 24 | self.assertTrue(c.prefix) 25 | self.assertTrue(c.prefix.startswith('yyy')) 26 | self.assertTrue(c.client) 27 | self.assertFalse(c.client.prefix) 28 | 29 | def test_delete(self): 30 | c1 = self.get_client() 31 | c2 = self.get_client() 32 | yield c1.set('bla', 'foo') 33 | yield c2.set('bla', 'foo') 34 | yield self.async.assertEqual(c1.dbsize(), 1) 35 | yield self.async.assertEqual(c2.dbsize(), 1) 36 | yield c1.flushdb() 37 | yield self.async.assertEqual(c1.dbsize(), 0) 38 | yield self.async.assertEqual(c2.dbsize(), 1) 39 | yield c2.flushdb() 40 | yield self.async.assertEqual(c2.dbsize(), 0) 41 | 42 | def test_error(self): 43 | c = self.get_client() 44 | self.assertRaises(NotImplementedError, c.execute_command, 'FLUSHDB') 45 | self.assertRaises(NotImplementedError, c.execute_command, 'FLUSHALL') 46 | 47 | 48 | -------------------------------------------------------------------------------- /tests/all/benchmarks/__init__.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from stdnet.utils import test 4 | 5 | from examples.models import Instrument, Fund, Position, PortfolioView,\ 6 | UserDefaultView 7 | from examples.data import finance_data, INSTS_TYPES, CCYS_TYPES 8 | 9 | 10 | class Benchmarks(test.TestWrite): 11 | __benchmark__ = True 12 | data_cls = finance_data 13 | models = (Instrument, Fund, Position) 14 | 15 | def test_create(self): 16 | session = yield self.data.create(self) 17 | 18 | -------------------------------------------------------------------------------- /tests/all/fields/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lsbardel/python-stdnet/78db5320bdedc3f28c5e4f38cda13a4469e35db7/tests/all/fields/__init__.py -------------------------------------------------------------------------------- /tests/all/fields/fk.py: -------------------------------------------------------------------------------- 1 | import stdnet 2 | from stdnet import odm, FieldError 3 | from stdnet.utils import test 4 | 5 | from examples.models import Person, Group 6 | 7 | 8 | class TestForeignKey(test.TestCase): 9 | models = (Person, Group) 10 | 11 | @classmethod 12 | def after_setup(cls): 13 | session = cls.mapper.session() 14 | with session.begin() as t: 15 | t.add(Group(name='bla')) 16 | yield t.on_result 17 | g = yield session.query(Group).get(name='bla') 18 | with session.begin() as t: 19 | t.add(Person(name='foo', group=g)) 20 | yield t.on_result 21 | 22 | def testSimple(self): 23 | session = self.session() 24 | query = session.query(Person) 25 | yield self.async.assertEqual(query.count(), 1) 26 | p = yield query.get(name='foo') 27 | self.assertTrue(p.group_id) 28 | p.group = None 29 | self.assertEqual(p.group_id, None) 30 | 31 | def testOldRelatedNone(self): 32 | models = self.mapper 33 | p = yield models.person.get(name='foo') 34 | g = yield p.group 35 | self.assertTrue(g) 36 | self.assertEqual(g, p.group) 37 | self.assertEqual(g.id, p.group_id) 38 | p.group = None 39 | self.assertEqual(p.group_id, None) 40 | yield self.async.assertRaises(stdnet.FieldValueError, p.session.add, p) 41 | 42 | def testCoverage(self): 43 | self.assertRaises(FieldError, odm.ForeignKey, None) 44 | 45 | 46 | class TestForeignKeyWrite(test.TestWrite): 47 | models = (Person, Group) 48 | 49 | def test_create(self): 50 | models = self.mapper 51 | group = yield models.group.new(name='quant') 52 | self.assertEqual(group.name, 'quant') 53 | self.assertEqualId(group, 1) 54 | person = yield models.person.new(name='luca', group=group) 55 | self.assertEqualId(person, 1) 56 | self.assertEqual(group.id, person.group_id) 57 | self.assertEqual(group, person.group) -------------------------------------------------------------------------------- /tests/all/fields/integer.py: -------------------------------------------------------------------------------- 1 | from stdnet import FieldValueError 2 | from stdnet.utils import test 3 | 4 | from examples.models import Page 5 | 6 | 7 | class TestIntegerField(test.TestCase): 8 | model = Page 9 | 10 | def test_default_value(self): 11 | models = self.mapper 12 | p = Page() 13 | self.assertEqual(p.in_navigation, 1) 14 | p = Page(in_navigation='4') 15 | self.assertEqual(p.in_navigation, 4) 16 | self.assertRaises(FieldValueError, p=Page, in_navigation='foo') 17 | yield self.session().add(p) 18 | self.assertEqual(p.in_navigation, 4) 19 | p = yield models.page.get(id=p.id) 20 | self.assertEqual(p.in_navigation, 4) 21 | 22 | def testNotValidated(self): 23 | models = self.mapper 24 | p = yield models.page.new() 25 | self.assertEqual(p.in_navigation, 1) 26 | self.assertRaises(ValueError, Page, in_navigation='bla') 27 | 28 | def testZeroValue(self): 29 | models = self.mapper 30 | p = models.page(in_navigation=0) 31 | self.assertEqual(p.in_navigation, 0) 32 | yield models.session().add(p) 33 | self.assertEqual(p.in_navigation, 0) 34 | p = yield models.page.get(id=p.id) 35 | self.assertEqual(p.in_navigation, 0) 36 | -------------------------------------------------------------------------------- /tests/all/fields/meta.py: -------------------------------------------------------------------------------- 1 | '''Field metadata and full coverage.''' 2 | import stdnet 3 | from stdnet import odm, FieldError 4 | from stdnet.utils import test 5 | 6 | 7 | class TestFields(test.TestCase): 8 | 9 | def testBaseClass(self): 10 | self.assertRaises(TypeError, odm.Field, kaputt=True) 11 | f = odm.Field() 12 | self.assertEqual(f.to_python(self), self) 13 | f = odm.StructureField() 14 | self.assertEqual(f.model, None) 15 | self.assertEqual(f.to_python(self), None) 16 | self.assertRaises(NotImplementedError, f.structure_class) 17 | 18 | def testDoublePK(self): 19 | def bad_class(): 20 | class MyBadClass(odm.StdModel): 21 | id = odm.IntegerField(primary_key=True) 22 | code = odm.SymbolField(primary_key=True) 23 | self.assertRaises(FieldError, bad_class) 24 | 25 | def test_defaults(self): 26 | f = odm.Field() 27 | self.assertEqual(f.default, None) 28 | f = odm.Field(default = 'bla') 29 | self.assertEqual(f.default, 'bla') 30 | 31 | def test_id(self): 32 | f = odm.Field() 33 | self.assertEqual(f.id(None), None) 34 | 35 | def test_get_value(self): 36 | f = odm.Field() 37 | self.assertRaises(AttributeError, f.get_value, 1) 38 | -------------------------------------------------------------------------------- /tests/all/fields/pickle.py: -------------------------------------------------------------------------------- 1 | from stdnet.utils import test 2 | 3 | from examples.models import Environment 4 | 5 | 6 | 7 | class TestPickleObjectField(test.TestCase): 8 | model = Environment 9 | 10 | def testMetaData(self): 11 | field = self.model._meta.dfields['data'] 12 | self.assertEqual(field.type,'object') 13 | self.assertEqual(field.internal_type,'bytes') 14 | self.assertEqual(field.index,False) 15 | self.assertEqual(field.name,field.attname) 16 | return field 17 | 18 | def testOkObject(self): 19 | session = self.session() 20 | v = self.model(data=['ciao','pippo']) 21 | self.assertEqual(v.data, ['ciao','pippo']) 22 | yield session.add(v) 23 | self.assertEqual(v.data, ['ciao','pippo']) 24 | v = yield session.query(self.model).get(id=v.id) 25 | self.assertEqual(v.data, ['ciao','pippo']) 26 | 27 | def testRecursive(self): 28 | '''Silly test to test both pickle field and picklable instance''' 29 | session = self.session() 30 | v = yield session.add(self.model(data=('ciao','pippo', 4, {}))) 31 | v2 = self.model(data=v) 32 | self.assertEqual(v2.data, v) 33 | yield session.add(v2) 34 | self.assertEqual(v2.data, v) 35 | v2 = yield session.query(self.model).get(id=v2.id) 36 | self.assertEqual(v2.data, v) -------------------------------------------------------------------------------- /tests/all/fields/pk.py: -------------------------------------------------------------------------------- 1 | import stdnet 2 | from stdnet import odm, FieldError 3 | from stdnet.utils import test 4 | 5 | from examples.models import Parent, Child 6 | 7 | 8 | class TestForeignKey(test.TestCase): 9 | models = (Parent, Child) 10 | 11 | def test_custom_pk(self): 12 | models = self.mapper 13 | parent = yield models.parent.new(name='test') 14 | self.assertEqual(parent.pkvalue(), 'test') 15 | self.assertEqual(parent.pk().name, 'name') 16 | 17 | def test_add_parent_and_child(self): 18 | models = self.mapper 19 | with models.session().begin() as t: 20 | parent = models.parent(name='test2') 21 | child = models.child(parent=parent, name='foo') 22 | self.assertEqual(child.parent, parent) 23 | self.assertEqual(child.parent_id, parent.pkvalue()) 24 | t.add(parent) 25 | t.add(child) 26 | yield t.on_result 27 | 28 | 29 | class TestQuery(test.TestCase): 30 | models = (Parent, Child) 31 | 32 | def test_non_id_pk(self): 33 | ''' 34 | Models with non-'id' primary keys should be queryable (regression test) 35 | 36 | ''' 37 | models = self.mapper 38 | with models.session().begin() as t: 39 | parent = models.parent(name='test2') 40 | child = models.child(parent=parent, name='foo') 41 | t.add(parent) 42 | t.add(child) 43 | yield t.on_result 44 | with models.session().begin() as t: 45 | parents = t.query(Parent).all() 46 | self.assertEqual(len(parents), 1) 47 | yield t.on_result 48 | 49 | 50 | class TestManyToMany(test.TestCase): 51 | models = (Parent, Child) 52 | 53 | def test_non_id_pk(self): 54 | ''' 55 | Models with non-'id' primary keys should be queryable from a ManyToMany 56 | relation (regression test) 57 | 58 | ''' 59 | models = self.mapper 60 | with models.session().begin() as t: 61 | parent = models.parent(name='test2') 62 | uncle = models.parent(name='test3') 63 | child = models.child(parent=parent, name='foo') 64 | t.add(parent) 65 | t.add(uncle) 66 | t.add(child) 67 | yield t.on_result 68 | with models.session().begin() as t: 69 | child.uncles.add(uncle) 70 | self.assertEqual(len(child.uncles.all()), 1) 71 | yield t.on_result 72 | -------------------------------------------------------------------------------- /tests/all/lib/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lsbardel/python-stdnet/78db5320bdedc3f28c5e4f38cda13a4469e35db7/tests/all/lib/__init__.py -------------------------------------------------------------------------------- /tests/all/lib/autoincrement.py: -------------------------------------------------------------------------------- 1 | from stdnet import odm 2 | from stdnet.apps.searchengine.models import WordItem 3 | from stdnet.utils import test 4 | 5 | from examples.models import SimpleModel 6 | 7 | 8 | class TestCase(test.TestWrite): 9 | multipledb = 'redis' 10 | models = (WordItem, SimpleModel) 11 | 12 | def testAutoIncrement(self): 13 | a = odm.autoincrement() 14 | self.assertEqual(a.incrby, 1) 15 | self.assertEqual(a.desc, False) 16 | self.assertEqual(str(a), 'autoincrement(1)') 17 | a = odm.autoincrement(3) 18 | self.assertEqual(a.incrby, 3) 19 | self.assertEqual(a.desc, False) 20 | self.assertEqual(str(a), 'autoincrement(3)') 21 | b = -a 22 | self.assertEqual(str(a), 'autoincrement(3)') 23 | self.assertEqual(b.desc, True) 24 | self.assertEqual(str(b), '-autoincrement(3)') 25 | 26 | def testSimple(self): 27 | session = self.session() 28 | m = yield session.add(SimpleModel(code='pluto')) 29 | w = yield session.add(WordItem(word='ciao', model_type=SimpleModel, 30 | object_id=m.id)) 31 | yield self.async.assertEqual(session.query(WordItem).count(), 1) 32 | w = yield session.add(WordItem(word='ciao', model_type=SimpleModel, 33 | object_id=m.id)) 34 | yield self.async.assertEqual(session.query(WordItem).count(), 1) 35 | self.assertEqual(w.get_state().score, 2) 36 | # 37 | w = yield session.add(WordItem(word='ciao', model_type=SimpleModel, 38 | object_id=m.id)) 39 | yield self.async.assertEqual(session.query(WordItem).count(), 1) 40 | self.assertEqual(w.get_state().score, 3) -------------------------------------------------------------------------------- /tests/all/lib/local.py: -------------------------------------------------------------------------------- 1 | from stdnet import odm 2 | from stdnet.utils import test 3 | 4 | class TestModel(test.TestCase): 5 | multipledb = False 6 | 7 | def test_create(self): 8 | User = odm.create_model('User', 'name', 'email', 'name') 9 | self.assertTrue(isinstance(User, odm.ModelType)) 10 | self.assertEqual(User._meta.attributes, ('name', 'email')) 11 | 12 | def test_create_name(self): 13 | User = odm.create_model('UserBase', 'name', 'email', 'name', 14 | abstract=True) 15 | self.assertEqual(User.__name__, 'UserBase') 16 | self.assertTrue(User._meta.abstract) 17 | self.assertRaises(AttributeError, User._meta.pkname) 18 | 19 | def test_init(self): 20 | User = odm.create_model('User', 'name', 'email') 21 | user = User(name='luca') 22 | self.assertEqual(user.name, 'luca') 23 | self.assertEqual(user.email, None) 24 | self.assertRaises(ValueError, User, bla='foo') 25 | 26 | def test_init_args(self): 27 | User = odm.create_model('User', 'name', 'email') 28 | user = User('luca') 29 | self.assertEqual(user.name, 'luca') 30 | self.assertEqual(user.email, None) 31 | user = User('bla', 'bla@foo') 32 | self.assertEqual(user.name, 'bla') 33 | self.assertEqual(user.email, 'bla@foo') 34 | self.assertRaises(ValueError, User, 'foo', 'jhjh', 'gjgj') 35 | 36 | def test_router(self): 37 | models = odm.Router() 38 | User = odm.create_model('User', 'name', 'email', 'name') 39 | models.register(User) 40 | self.assertEqual(models.user.model, User) -------------------------------------------------------------------------------- /tests/all/lib/me.py: -------------------------------------------------------------------------------- 1 | from stdnet.utils import test 2 | from stdnet import settings 3 | import stdnet as me 4 | 5 | 6 | class TestInitFile(test.TestCase): 7 | multipledb = False 8 | 9 | def test_version(self): 10 | self.assertTrue(len(me.VERSION), 5) 11 | version = me.__version__ 12 | self.assertTrue(version) 13 | self.assertEqual(me.__version__, me.get_version(me.VERSION)) 14 | 15 | def testStdnetVersion(self): 16 | self.assertRaises(TypeError, me.stdnet_version, 1, 2, 3, 4, 5) 17 | 18 | def test_meta(self): 19 | for m in ("__author__", "__contact__", "__homepage__", "__doc__"): 20 | self.assertTrue(getattr(me, m, None)) 21 | 22 | -------------------------------------------------------------------------------- /tests/all/lib/register.py: -------------------------------------------------------------------------------- 1 | '''Test router registration''' 2 | from stdnet import odm, AlreadyRegistered 3 | from stdnet.utils import test 4 | 5 | from examples.models import SimpleModel 6 | 7 | 8 | class TestRegistration(test.TestWrite): 9 | 10 | def register(self): 11 | router = odm.Router(self.backend) 12 | self.assertEqual(router.default_backend, self.backend) 13 | router.register_applications('examples') 14 | self.assertTrue(router) 15 | return router 16 | 17 | def test_registered_models(self): 18 | router = self.register() 19 | for meta in router.registered_models: 20 | name = meta.name 21 | self.assertEqual(meta.app_label, 'examples') 22 | manager = router[meta] 23 | model = manager.model 24 | self.assertEqual(manager, getattr(router, name)) 25 | self.assertEqual(manager, router[manager]) 26 | self.assertEqual(manager, router[model]) 27 | self.assertEqual(model._meta, manager._meta) 28 | self.assertFalse(meta.abstract) 29 | self.assertTrue(manager.backend) 30 | 31 | def test_unregister_all(self): 32 | router = self.register() 33 | self.assertTrue(router.registered_models) 34 | self.assertEqual(router.unregister(self), None) 35 | self.assertTrue(router.registered_models) 36 | N = len(router.registered_models) 37 | managers = router.unregister() 38 | self.assertEqual(N, len(managers)) 39 | self.assertFalse(router.registered_models) 40 | 41 | def testFlushModel(self): 42 | router = self.register() 43 | yield router.flush() 44 | 45 | def test_flush_exclude(self): 46 | models = self.register() 47 | s = yield models.simplemodel.new(code='test') 48 | all = yield models.simplemodel.all() 49 | self.assertEqual(len(all), 1) 50 | yield models.flush(exclude=('examples.simplemodel',)) 51 | all = yield models.simplemodel.all() 52 | self.assertEqual(len(all), 1) 53 | self.assertEqual(all[0], s) 54 | yield models.flush() 55 | all = yield models.simplemodel.all() 56 | self.assertFalse(all) 57 | 58 | def testFromUuid(self): 59 | models = self.register() 60 | s = yield models.simplemodel.new(code='test') 61 | uuid = s.uuid 62 | s2 = yield models.from_uuid(s.uuid) 63 | self.assertEqual(s, s2) 64 | yield self.async.assertRaises(odm.StdModel.DoesNotExist, 65 | models.from_uuid, 'ccdscscds') 66 | yield self.async.assertRaises(odm.StdModel.DoesNotExist, 67 | models.from_uuid, 'ccdscscds.1') 68 | a,b = tuple(uuid.split('.')) 69 | yield self.async.assertRaises(odm.StdModel.DoesNotExist, 70 | models.from_uuid, '{0}.5'.format(a)) 71 | 72 | def testFailedHashModel(self): 73 | self.assertRaises(KeyError, odm.hashmodel, SimpleModel) 74 | 75 | -------------------------------------------------------------------------------- /tests/all/multifields/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lsbardel/python-stdnet/78db5320bdedc3f28c5e4f38cda13a4469e35db7/tests/all/multifields/__init__.py -------------------------------------------------------------------------------- /tests/all/multifields/set.py: -------------------------------------------------------------------------------- 1 | '''tests for odm.SetField''' 2 | from datetime import datetime 3 | from itertools import chain 4 | 5 | from stdnet import getdb 6 | from stdnet.utils import test, populate, zip 7 | 8 | from examples.models import Calendar, DateValue, Collection, Group 9 | 10 | 11 | class ZsetData(test.DataGenerator): 12 | 13 | def generate(self): 14 | self.dates = self.populate('date') 15 | self.values = self.populate('string', min_len=10, max_len=120) 16 | 17 | 18 | class TestSetField(test.TestCase): 19 | models = (Collection, Group) 20 | 21 | def test_simple(self): 22 | m = yield self.session().add(self.model()) 23 | yield m.numbers.add(1) 24 | yield m.numbers.update((1, 2, 3, 4, 5)) 25 | yield self.async.assertEqual(m.numbers.size(), 5) 26 | 27 | 28 | class TestOrderedSet(test.TestCase): 29 | multipledb = 'redis' 30 | models = (Calendar, DateValue) 31 | data_cls = ZsetData 32 | 33 | def fill(self, update=False): 34 | session = self.session() 35 | c = yield session.add(Calendar(name=self.data.random_string())) 36 | with session.begin() as t: 37 | for dt, value in zip(self.data.dates, self.data.values): 38 | t.add(DateValue(dt=dt, value=value)) 39 | yield t.on_result 40 | items = t.saved[DateValue._meta] 41 | with session.begin() as t: 42 | if update: 43 | c.data.update(items) 44 | else: 45 | for value in items: 46 | c.data.add(value) 47 | yield t.on_result 48 | yield c 49 | 50 | def test_add(self): 51 | return self.fill() 52 | 53 | def test_update(self): 54 | return self.fill(True) 55 | 56 | def test_order(self): 57 | c = yield self.fill() 58 | yield self.async.assertEqual(c.data.size(), self.data.size) 59 | dprec = None 60 | events = yield c.data.items() 61 | for event in events: 62 | if dprec: 63 | self.assertTrue(event.dt >= dprec) 64 | dprec = event.dt 65 | 66 | def test_rank(self): 67 | c = yield self.fill() 68 | data = c.data 69 | vals = yield data.items() 70 | self.assertEqual(vals, data.cache.cache) 71 | data.cache.clear() 72 | self.assertEqual(data.cache.cache, None) 73 | ranks = [] 74 | for v in vals: 75 | ranks.append(data.rank(v)) 76 | ranks = yield self.multi_async(ranks) 77 | 78 | 79 | -------------------------------------------------------------------------------- /tests/all/multifields/string.py: -------------------------------------------------------------------------------- 1 | '''tests for odm.StringField''' 2 | from stdnet.utils import test, populate, zip, iteritems, to_string 3 | 4 | from examples.models import SimpleString 5 | 6 | from .struct import MultiFieldMixin 7 | 8 | 9 | class TestStringField(MultiFieldMixin, test.TestCase): 10 | multipledb = 'redis' 11 | model = SimpleString 12 | 13 | def adddata(self, li): 14 | '''Add elements to a list without using transactions.''' 15 | for elem in self.data.names: 16 | li.data.push_back(elem) 17 | yield self.async.assertEqual(li.data.size(), 18 | len(''.join(self.data.names))) 19 | 20 | def test_incr(self): 21 | m = yield self.session().add(self.model()) 22 | self.async.assertEqual(m.data.incr(), 1) 23 | self.async.assertEqual(m.data.incr(), 2) 24 | self.async.assertEqual(m.data.incr(3), 5) 25 | self.async.assertEqual(m.data.incr(-7), -2) 26 | 27 | 28 | -------------------------------------------------------------------------------- /tests/all/multifields/struct.py: -------------------------------------------------------------------------------- 1 | __test__ = False 2 | from time import sleep 3 | 4 | from stdnet import StructureFieldError 5 | from stdnet.utils import test, populate, zip, to_string 6 | 7 | 8 | class StringData(test.DataGenerator): 9 | 10 | def generate(self): 11 | self.names = self.populate() 12 | 13 | 14 | class MultiFieldMixin(object): 15 | '''Test class which add a couple of tests for multi fields.''' 16 | attrname = 'data' 17 | data_cls = StringData 18 | 19 | def setUp(self): 20 | self.names = test.populate('string', size=10) 21 | self.name = self.names[0] 22 | 23 | def defaults(self): 24 | return {} 25 | 26 | def get_object_and_field(self, save=True, **kwargs): 27 | models = self.mapper 28 | params = self.defaults() 29 | params.update(kwargs) 30 | m = self.model(**params) 31 | if save: 32 | yield models.session().add(m) 33 | yield m, getattr(m, self.attrname) 34 | 35 | def adddata(self, obj): 36 | raise NotImplementedError 37 | 38 | def test_RaiseStructFieldError(self): 39 | yield self.async.assertRaises(StructureFieldError, 40 | self.get_object_and_field, False) 41 | 42 | def test_multi_field_meta(self): 43 | '''Here we check for multifield specific stuff like the instance 44 | related keys (keys which are related to the instance rather than the model).''' 45 | # get instance and field, the field has no data here 46 | models = self.mapper 47 | # 48 | obj, field = yield self.get_object_and_field() 49 | # 50 | self.assertTrue(field.field) 51 | self.assertEqual(field.field.model, self.model) 52 | self.assertEqual(field._pkvalue, obj.pkvalue()) 53 | self.assertEqual(field.session, obj.session) 54 | # 55 | be = field.backend_structure() 56 | self.assertEqual(be.backend, models[self.model].backend) 57 | self.assertEqual(be.instance, field) 58 | # 59 | if be.backend.name == 'redis': 60 | yield self.check_redis_structure(obj, be) 61 | 62 | def check_redis_structure(self, obj, be): 63 | session = obj.session 64 | backend = be.backend 65 | # 66 | # field id should be in instance keys 67 | keys = backend.instance_keys(obj) 68 | self.assertTrue(be.id in keys) 69 | # 70 | # the field has no data, so there is no key in the database 71 | lkeys = yield backend.model_keys(self.model._meta) 72 | self.assertFalse(be.id in lkeys) 73 | # 74 | # Lets add data 75 | yield self.adddata(obj) 76 | # The field id should be in the server keys 77 | if backend.name == 'redis': 78 | lkeys = yield backend.model_keys(self.model._meta) 79 | self.assertTrue(be.id in lkeys) 80 | # 81 | # Delete the object 82 | yield session.delete(obj) 83 | # The backend id should not be in all model keys 84 | lkeys = yield backend.model_keys(self.model._meta) 85 | self.assertFalse(be.id in lkeys) 86 | -------------------------------------------------------------------------------- /tests/all/query/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lsbardel/python-stdnet/78db5320bdedc3f28c5e4f38cda13a4469e35db7/tests/all/query/__init__.py -------------------------------------------------------------------------------- /tests/all/query/contains.py: -------------------------------------------------------------------------------- 1 | from stdnet.utils import test 2 | from stdnet.utils.py2py3 import zip 3 | 4 | from examples.models import SimpleModel 5 | from examples.wordsearch.basicwords import basic_english_words 6 | 7 | class TextGenerator(test.DataGenerator): 8 | sizes = {'tiny': (20, 5), 9 | 'small': (100, 20), 10 | 'normal': (500, 50), 11 | 'big': (2000, 100), 12 | 'huge': (10000, 200)} 13 | 14 | def generate(self): 15 | size, words = self.size 16 | self.descriptions = [] 17 | self.names = self.populate('string', size, min_len=10, max_len=30) 18 | for s in range(size): 19 | d = ' '.join(self.populate('choice', words, choice_from=basic_english_words)) 20 | self.descriptions.append(d) 21 | 22 | 23 | class TestFieldSerach(test.TestCase): 24 | model = SimpleModel 25 | data_cls = TextGenerator 26 | 27 | @classmethod 28 | def after_setup(cls): 29 | with cls.session().begin() as t: 30 | for name, des in zip(cls.data.names, cls.data.descriptions): 31 | t.add(cls.model(code=name, description=des)) 32 | yield t.on_result 33 | 34 | def test_contains(self): 35 | session = self.session() 36 | qs = session.query(self.model) 37 | all = yield qs.filter(description__contains='ll').all() 38 | self.assertTrue(all) 39 | for m in all: 40 | self.assertTrue('ll' in m.description) 41 | all = yield qs.filter(description__contains='llllll').all() 42 | self.assertFalse(all) 43 | 44 | def test_startswith(self): 45 | session = self.session() 46 | qs = session.query(self.model) 47 | all = yield qs.all() 48 | count = {} 49 | for m in all: 50 | start = m.description.split(' ')[0][:2] 51 | if start in count: 52 | count[start] += 1 53 | else: 54 | count[start] = 1 55 | ordered = [k for k, _ in sorted(count.items(), key=lambda x: x[1])] 56 | start = ordered[-1] 57 | all = yield qs.filter(description__startswith=start).all() 58 | self.assertTrue(all) 59 | for m in all: 60 | self.assertTrue(m.description.startswith(start)) 61 | self.assertEqual(len(all), count[start]) -------------------------------------------------------------------------------- /tests/all/query/meta.py: -------------------------------------------------------------------------------- 1 | '''Test query meta and corner cases''' 2 | from stdnet import QuerySetError, odm 3 | from stdnet.utils import test 4 | 5 | from examples.models import Instrument 6 | from examples.data import FinanceTest 7 | 8 | 9 | class TestMeta(FinanceTest): 10 | 11 | def test_session_meta(self): 12 | models = self.mapper 13 | session = models.session() 14 | self.assertEqual(session.router, models) 15 | self.assertEqual(session.transaction, None) 16 | 17 | def testQueryMeta(self): 18 | models = self.mapper 19 | qs = models.instrument.query() 20 | self.assertIsInstance(qs, odm.Query) 21 | self.assertEqual(qs.model, models.instrument.model) 22 | 23 | def test_empty_query(self): 24 | empty = self.session().empty(Instrument) 25 | self.assertEqual(empty.meta, Instrument._meta) 26 | self.assertEqual(len(empty), 0) 27 | self.assertEqual(empty.count(), 0) 28 | self.assertEqual(list(empty), []) 29 | self.assertEqual(empty.executed, True) 30 | self.assertEqual(empty.construct(), empty) 31 | self.assertEqual(empty.items(), []) 32 | self.assertEqual(empty.all(), []) 33 | self.assertEqual(empty.intersect(self.query()), empty) 34 | all = self.query() 35 | all2 = empty.union(all) 36 | all = yield all.all() 37 | all2 = yield all2.all() 38 | self.assertEqual(set(all), set(all2)) 39 | q = self.query().filter(ccy__in=()) 40 | yield self.async.assertEqual(q.count(), 0) 41 | 42 | def testProperties(self): 43 | query = self.query() 44 | self.assertFalse(query.executed) 45 | 46 | def test_getfield(self): 47 | query = self.query() 48 | self.assertRaises(QuerySetError, query.get_field, 'waaaaaaa') 49 | query = query.get_field('id') 50 | query2 = query.get_field('id') 51 | self.assertEqual(query, query2) 52 | 53 | def testFilterError(self): 54 | query = self.query().filter(whoaaaaa='foo') 55 | self.assertRaises(QuerySetError, query.all) 56 | 57 | def testEmptyParameters(self): 58 | query = self.query().filter(ccy='USD') 59 | self.assertEqual(query, query.filter()) 60 | self.assertEqual(query, query.exclude()) 61 | 62 | 63 | class TestMetaWithData(FinanceTest): 64 | 65 | @classmethod 66 | def after_setup(cls): 67 | return cls.data.create(cls) 68 | 69 | def test_repr(self): 70 | models = self.mapper 71 | # make sure there is at least one of them 72 | yield models.instrument.new(name='a123345566', ccy='EUR', type='future') 73 | query = self.query().filter(ccy='EUR')\ 74 | .exclude(type=('equity', 'bond')) 75 | self.assertTrue(str(query)) 76 | # The query is still lazy 77 | self.assertFalse(query.executed) 78 | v = yield query.all() 79 | self.assertTrue(v) 80 | self.assertEqual(str(query), str(v)) 81 | -------------------------------------------------------------------------------- /tests/all/query/session.py: -------------------------------------------------------------------------------- 1 | '''Sessions and transactions management''' 2 | from stdnet import odm, getdb 3 | from stdnet.utils import test, gen_unique_id 4 | 5 | from examples.models import SimpleModel, Instrument 6 | 7 | 8 | class TestSession(test.TestWrite): 9 | models = (SimpleModel, Instrument) 10 | 11 | def test_simple_create(self): 12 | models = self.mapper 13 | session = models.session() 14 | self.assertFalse(session.transaction) 15 | session.begin() 16 | self.assertTrue(session.transaction) 17 | m = models.simplemodel(code='pluto', group='planet') 18 | self.assertEqual(m, session.add(m)) 19 | self.assertTrue(m in session) 20 | sm = session.model(m) 21 | self.assertEqual(len(sm.new), 1) 22 | self.assertEqual(len(sm.modified), 0) 23 | self.assertEqual(len(sm.deleted), 0) 24 | self.assertTrue(m in sm.new) 25 | t = yield session.commit() 26 | self.assertTrue(t) 27 | self.assertEqualId(m, 1) 28 | self.assertFalse(session.dirty) 29 | 30 | def test_create_objects(self): 31 | # Tests a session with two models. This was for a bug 32 | models = self.mapper 33 | with models.session().begin() as t: 34 | t.add(models.simplemodel(code='pluto',group='planet')) 35 | t.add(models.instrument(name='bla',ccy='EUR',type='equity')) 36 | # The transaction is complete when the on_commit is not asynchronous 37 | yield t.on_result 38 | yield self.async.assertEqual(models.simplemodel.query().count(), 1) 39 | yield self.async.assertEqual(models.instrument.query().count(), 1) 40 | 41 | def test_simple_filter(self): 42 | models = self.mapper 43 | session = models.session() 44 | with session.begin() as t: 45 | t.add(SimpleModel(code='pluto', group='planet')) 46 | t.add(SimpleModel(code='venus', group='planet')) 47 | t.add(SimpleModel(code='sun', group='star')) 48 | yield t.on_result 49 | query = session.query(SimpleModel) 50 | yield self.async.assertEqual(query.count(), 3) 51 | all = yield query.all() 52 | self.assertEqual(len(all), 3) 53 | qs = query.filter(group='planet') 54 | self.assertFalse(qs.executed) 55 | yield self.async.assertEqual(qs.count(), 2) 56 | self.assertTrue(qs.executed) 57 | qs = query.filter(group='star') 58 | yield self.async.assertEqual(qs.count(), 1) 59 | qs = query.filter(group='bla') 60 | yield self.async.assertEqual(qs.count(), 0) 61 | 62 | def test_modify_index_field(self): 63 | session = self.session() 64 | with session.begin() as t: 65 | t.add(SimpleModel(code='pluto', group='planet')) 66 | yield t.on_result 67 | query = session.query(SimpleModel) 68 | qs = query.filter(group='planet') 69 | yield self.async.assertEqual(qs.count(), 1) 70 | el = yield qs[0] 71 | id = self.assertEqualId(el, 1) 72 | session = self.session() 73 | el.group = 'smallplanet' 74 | with session.begin() as t: 75 | t.add(el) 76 | yield t.on_result 77 | yield self.async.assertEqual(session.query(self.model).count(), 1) 78 | self.assertEqualId(el, id, True) 79 | # lets get it from the server 80 | qs = session.query(self.model).filter(id=id) 81 | yield self.async.assertEqual(qs.count(), 1) 82 | el = yield qs[0] 83 | self.assertEqual(el.code, 'pluto') 84 | self.assertEqual(el.group, 'smallplanet') 85 | # now filter on group 86 | qs = session.query(self.model).filter(group='smallplanet') 87 | yield self.async.assertEqual(qs.count(), 1) 88 | el = yield qs[0] 89 | self.assertEqual(el.id, id) 90 | # now filter on old group 91 | qs = session.query(self.model).filter(group='planet') 92 | yield self.async.assertEqual(qs.count(), 0) 93 | -------------------------------------------------------------------------------- /tests/all/query/signal.py: -------------------------------------------------------------------------------- 1 | from stdnet.utils import test 2 | from stdnet import odm 3 | 4 | from examples.models import Group, Person 5 | 6 | 7 | class TestSignals(test.TestWrite): 8 | models = (Group, Person) 9 | 10 | def setUp(self): 11 | models = self.mapper 12 | models.post_commit.bind(self.addPerson, sender=Group) 13 | 14 | def addPerson(self, signal, sender, instances=None, session=None, 15 | **kwargs): 16 | models = session.router 17 | self.assertEqual(models, self.mapper) 18 | session = models.session() 19 | with session.begin() as t: 20 | for instance in instances: 21 | self.counter += 1 22 | if instance.name == 'user': 23 | t.add(models.person(name='luca', group=instance)) 24 | return t.on_result 25 | 26 | def testPostCommit(self): 27 | self.counter = 0 28 | session = self.session() 29 | with session.begin() as t: 30 | g = Group(name='user') 31 | t.add(g) 32 | t.add(Group(name='admin')) 33 | yield t.on_result 34 | self.assertEqualId(g, 1) 35 | users = session.query(Person).filter(group__name='user') 36 | admins = session.query(Person).filter(group__name='admin') 37 | yield self.async.assertEqual(users.count(), 1) 38 | yield self.async.assertEqual(admins.count(), 0) 39 | self.assertEqual(self.counter, 2) 40 | -------------------------------------------------------------------------------- /tests/all/query/slice.py: -------------------------------------------------------------------------------- 1 | '''Slice Query to obtain subqueries.''' 2 | from stdnet import QuerySetError 3 | from stdnet.utils import test 4 | 5 | from examples.data import FinanceTest 6 | 7 | 8 | class TestFilter(FinanceTest): 9 | 10 | @classmethod 11 | def after_setup(cls): 12 | yield cls.data.create(cls) 13 | 14 | def test_all(self): 15 | session = self.session() 16 | qs = session.query(self.model) 17 | c = yield qs.count() 18 | self.assertTrue(c > 0) 19 | q1 = yield qs.all() 20 | self.assertEqual(len(q1), qs.count()) 21 | 22 | def testUnsortedSliceSimple(self): 23 | session = self.session() 24 | qs = session.query(self.model) 25 | c = yield qs.count() 26 | self.assertTrue(c > 0) 27 | q1 = yield qs[0:2] 28 | self.assertEqual(len(q1), 2) 29 | self.assertEqual([q.id for q in q1], [1, 2]) 30 | 31 | def testUnsortedSliceComplex(self): 32 | session = self.session() 33 | qs = session.query(self.model) 34 | N = yield qs.count() 35 | self.assertTrue(N) 36 | q1 = yield qs[0:-1] 37 | self.assertEqual(len(q1), N-1) 38 | for id, q in enumerate(q1,1): 39 | self.assertEqual(q.id, id) 40 | q1 = yield qs[2:4] 41 | self.assertEqual(len(q1), 2) 42 | self.assertEqual(q1[0].id, 3) 43 | self.assertEqual(q1[1].id, 4) 44 | 45 | def testUnsortedSliceToEnd(self): 46 | session = self.session() 47 | qs = session.query(self.model) 48 | N = qs.count() 49 | self.assertTrue(N) 50 | q1 = yield qs[0:] 51 | self.assertEqual(len(q1), N) 52 | # This time the result is sorted by ids 53 | q1 = yield qs[3:] 54 | self.assertEqual(len(q1), N-3) 55 | for id, q in enumerate(q1, 4): 56 | self.assertEqual(q.id, id) 57 | 58 | def testSliceBack(self): 59 | session = self.session() 60 | qs = session.query(self.model) 61 | N = qs.count() 62 | self.assertTrue(N) 63 | q1 = yield qs[-2:] 64 | self.assertEqual(len(q1), 2) 65 | self.assertEqual(q1[0].id, N-1) 66 | self.assertEqual(q1[1].id, N) 67 | # This time the result is sorted by ids 68 | q1 = yield qs[-2:-1] 69 | self.assertEqual(len(q1),1) 70 | self.assertEqual(q1[0].id,N-1) 71 | 72 | def testSliceGetField(self): 73 | '''test slice in conjunction with get_field method''' 74 | session = self.session() 75 | qs = session.query(self.model).get_field('id') 76 | yield self.async.assertRaises(QuerySetError, lambda: qs[:2]) 77 | -------------------------------------------------------------------------------- /tests/all/query/where.py: -------------------------------------------------------------------------------- 1 | from . import ranges 2 | 3 | 4 | class TestWhere(ranges.NumericTest): 5 | multipledb = ('redis', 'mongo') 6 | 7 | def testWhere(self): 8 | session = self.session() 9 | qs = session.query(self.model).where('this.vega > this.delta') 10 | qs = yield qs.all() 11 | self.assertTrue(qs) 12 | for m in qs: 13 | self.assertTrue(m.vega > m.delta) 14 | 15 | def testConcatenation(self): 16 | session = self.session() 17 | qs = session.query(self.model) 18 | qs = qs.filter(pv__gt=0).where('this.vega > this.delta') 19 | qs = yield qs.all() 20 | self.assertTrue(qs) 21 | for m in qs: 22 | self.assertTrue(m.pv > 0) 23 | self.assertTrue(m.vega > m.delta) 24 | 25 | def testLoadOnly(self): 26 | '''load only is only used in redis''' 27 | session = self.session() 28 | qs = session.query(self.model).where('this.vega > this.delta', 29 | load_only=('vega','foo','delta')) 30 | qs = yield qs.all() 31 | self.assertTrue(qs) 32 | for m in qs: 33 | self.assertTrue(m.vega > m.delta) -------------------------------------------------------------------------------- /tests/all/serialize/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lsbardel/python-stdnet/78db5320bdedc3f28c5e4f38cda13a4469e35db7/tests/all/serialize/__init__.py -------------------------------------------------------------------------------- /tests/all/serialize/base.py: -------------------------------------------------------------------------------- 1 | import os 2 | import tempfile 3 | 4 | from stdnet import odm 5 | from stdnet.utils import test, BytesIO, to_bytes 6 | 7 | 8 | class Tempfile(object): 9 | 10 | def __init__(self, data, text=True): 11 | fd, path = tempfile.mkstemp(text=text) 12 | self.handler = None 13 | self.path = path 14 | os.write(fd, to_bytes(data)) 15 | os.close(fd) 16 | 17 | def __enter__(self): 18 | return self 19 | 20 | def write(self, data): 21 | if self.fd: 22 | os.write(self.fd,data) 23 | os.close(self.fd) 24 | self.fd = None 25 | 26 | def close(self): 27 | if self.handler: 28 | self.handler.close() 29 | self.handler = None 30 | 31 | def open(self): 32 | if self.handler: 33 | raise RuntimeError('File is already opened') 34 | self.handler = open(self.path, 'r') 35 | return self.handler 36 | 37 | def __exit__(self, type, value, trace): 38 | self.close() 39 | os.remove(self.path) 40 | 41 | 42 | class BaseSerializerMixin(object): 43 | serializer = 'json' 44 | 45 | @classmethod 46 | def after_setup(cls): 47 | yield cls.data.create(cls) 48 | 49 | def get(self, **options): 50 | s = odm.get_serializer(self.serializer) 51 | if not s.default_options: 52 | self.assertEqual(s.options, options) 53 | self.assertFalse(s.data) 54 | self.assertTrue(s) 55 | return s 56 | 57 | def dump(self): 58 | models = self.mapper 59 | s = self.get() 60 | qs = yield models.instrument.query().sort_by('id').all() 61 | s.dump(qs) 62 | self.assertTrue(s.data) 63 | self.assertEqual(len(s.data), 1) 64 | yield s 65 | 66 | 67 | class SerializerMixin(BaseSerializerMixin): 68 | 69 | def testMeta(self): 70 | self.get() 71 | 72 | def test_dump(self): 73 | return self.dump() 74 | 75 | def test_write(self): 76 | s = yield self.dump() 77 | data = s.write() 78 | self.assertTrue(data) 79 | 80 | 81 | class LoadSerializerMixin(BaseSerializerMixin): 82 | 83 | def testLoad(self): 84 | models = self.mapper 85 | s = yield self.dump() 86 | qs = yield models.instrument.query().sort_by('id').all() 87 | self.assertTrue(qs) 88 | data = s.write().getvalue() 89 | with Tempfile(data) as tmp: 90 | yield models.instrument.flush() 91 | yield s.load(models, tmp.open(), self.model) 92 | qs2 = yield models.instrument.query().sort_by('id').all() 93 | self.assertEqual(qs, qs2) 94 | 95 | 96 | class DummySerializer(odm.Serializer): 97 | '''A Serializer for testing registration''' 98 | pass 99 | 100 | 101 | class TestMeta(test.TestCase): 102 | 103 | def testBadSerializer(self): 104 | self.assertRaises(ValueError, odm.get_serializer, 'djsbvjchvsdjcvsdj') 105 | 106 | def testRegisterUnregister(self): 107 | odm.register_serializer('dummy', DummySerializer()) 108 | s = odm.get_serializer('dummy') 109 | self.assertTrue('dummy' in odm.all_serializers()) 110 | self.assertTrue(isinstance(s, DummySerializer)) 111 | self.assertRaises(NotImplementedError, s.dump, None) 112 | self.assertRaises(NotImplementedError, s.write) 113 | self.assertRaises(NotImplementedError, s.load, None, None) 114 | self.assertTrue(odm.unregister_serializer('dummy')) 115 | self.assertRaises(ValueError, odm.get_serializer, 'dummy') 116 | -------------------------------------------------------------------------------- /tests/all/serialize/csv.py: -------------------------------------------------------------------------------- 1 | '''Test the CSV serializer''' 2 | from stdnet import odm 3 | 4 | from examples.data import FinanceTest, Fund 5 | 6 | from . import base 7 | 8 | 9 | class TestFinanceCSV(base.SerializerMixin, FinanceTest): 10 | serializer = 'csv' 11 | 12 | def testTwoModels(self): 13 | models = self.mapper 14 | s = yield self.dump() 15 | self.assertEqual(len(s.data), 1) 16 | funds = yield models.fund.all() 17 | self.assertRaises(ValueError, s.dump, funds) 18 | self.assertEqual(len(s.data), 1) 19 | 20 | def testLoadError(self): 21 | s = yield self.dump() 22 | self.assertRaises(ValueError, s.load, self.mapper, 'bla') 23 | 24 | 25 | class TestLoadFinanceCSV(base.LoadSerializerMixin, FinanceTest): 26 | serializer = 'csv' 27 | -------------------------------------------------------------------------------- /tests/all/serialize/json.py: -------------------------------------------------------------------------------- 1 | '''Test the JSON serializer''' 2 | from stdnet import odm 3 | 4 | from examples.data import FinanceTest, Fund 5 | 6 | from . import base 7 | 8 | 9 | class TestFinanceJSON(base.SerializerMixin, FinanceTest): 10 | serializer = 'json' 11 | 12 | def testTwoModels(self): 13 | models = self.mapper 14 | s = yield self.dump() 15 | d = s.data[0] 16 | self.assertEqual(d['model'], str(self.model._meta)) 17 | all = yield models.fund.query().sort_by('id').all() 18 | s.dump(all) 19 | self.assertEqual(len(s.data), 2) 20 | 21 | 22 | class TestLoadFinanceJSON(base.LoadSerializerMixin, FinanceTest): 23 | serializer = 'json' 24 | 25 | 26 | -------------------------------------------------------------------------------- /tests/all/structures/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lsbardel/python-stdnet/78db5320bdedc3f28c5e4f38cda13a4469e35db7/tests/all/structures/__init__.py -------------------------------------------------------------------------------- /tests/all/structures/base.py: -------------------------------------------------------------------------------- 1 | __test__ = False 2 | 3 | from stdnet import odm, InvalidTransaction 4 | 5 | 6 | class StructMixin(object): 7 | multipledb = 'redis' 8 | structure = None 9 | name = None 10 | 11 | def create_one(self): 12 | '''Create a structure and add few elements. Must return an instance 13 | of the :attr:`structure`.''' 14 | raise NotImplementedError 15 | 16 | def empty(self): 17 | models = self.mapper 18 | l = models.register(self.structure()) 19 | self.assertTrue(l.id) 20 | models.session().add(l) 21 | self.assertTrue(l.session is not None) 22 | return l 23 | 24 | def not_empty(self): 25 | models = self.mapper 26 | l = models.register(self.create_one()) 27 | self.assertTrue(l.id) 28 | yield models.session().add(l) 29 | self.assertTrue(l.session is not None) 30 | yield l 31 | 32 | def test_no_session(self): 33 | l = self.create_one() 34 | self.assertFalse(l.session) 35 | self.assertTrue(l.id) 36 | session = self.mapper.session() 37 | self.assertRaises(InvalidTransaction, session.add, l) 38 | 39 | def test_meta(self): 40 | models = self.mapper 41 | l = models.register(self.create_one()) 42 | self.assertTrue(l.id) 43 | session = models.session() 44 | with session.begin() as t: 45 | t.add(l) # add the structure to the session 46 | self.assertEqual(l.session, session) 47 | self.assertEqual(l._meta.name, self.name) 48 | self.assertEqual(l._meta.model._model_type, 'structure') 49 | #Structure have always the persistent flag set to True 50 | self.assertTrue(l.get_state().persistent) 51 | self.assertTrue(l in session) 52 | size = yield l.size() 53 | self.assertEqual(size, 0) 54 | yield t.on_result 55 | yield l 56 | 57 | def test_commit(self): 58 | l = yield self.test_meta() 59 | yield self.async.assertTrue(l.size()) 60 | 61 | def test_delete(self): 62 | models = self.mapper 63 | l = models.register(self.create_one()) 64 | self.assertTrue(l.id) 65 | session = models.session() 66 | yield session.add(l) 67 | yield self.async.assertTrue(l.size()) 68 | yield session.delete(l) 69 | yield self.async.assertEqual(l.size(), 0) 70 | self.assertEqual(l.session, session) 71 | 72 | def test_empty(self): 73 | '''Create an empty structure''' 74 | models = self.mapper 75 | l = models.register(self.structure()) 76 | self.assertTrue(l.id) 77 | session = models.session() 78 | with session.begin() as t: 79 | t.add(l) 80 | yield t.on_result 81 | yield self.async.assertEqual(l.size(), 0) 82 | self.assertEqual(l.session, session) 83 | -------------------------------------------------------------------------------- /tests/all/structures/hash.py: -------------------------------------------------------------------------------- 1 | from pulsar import multi_async 2 | 3 | from stdnet import odm 4 | from stdnet.utils import test 5 | 6 | from .base import StructMixin 7 | 8 | 9 | class TestHash(StructMixin, test.TestCase): 10 | structure = odm.HashTable 11 | name = 'hashtable' 12 | 13 | def create_one(self): 14 | h = odm.HashTable() 15 | h['bla'] = 'foo' 16 | h['pluto'] = 3 17 | return h 18 | 19 | def test_get_empty(self): 20 | d = self.empty() 21 | result = yield d.get('blaxxx', 3) 22 | self.assertEqual(result, 3) 23 | 24 | def test_pop(self): 25 | models = self.mapper 26 | d = models.register(self.create_one()) 27 | session = models.session() 28 | with session.begin() as t: 29 | d = t.add(d) 30 | d['foo'] = 'ciao' 31 | yield t.on_result 32 | yield self.async.assertEqual(d.size(), 3) 33 | yield self.async.assertEqual(d['foo'], 'ciao') 34 | yield self.async.assertRaises(KeyError, d.pop, 'blascd') 35 | yield self.async.assertEqual(d.pop('xxx', 56), 56) 36 | self.assertRaises(TypeError, d.pop, 'xxx', 1, 2) 37 | yield self.async.assertEqual(d.pop('foo'), 'ciao') 38 | yield self.async.assertEqual(d.size(), 2) 39 | 40 | def test_get(self): 41 | models = self.mapper 42 | d = models.register(self.structure()) 43 | session = models.session() 44 | with session.begin() as t: 45 | d = t.add(d) 46 | d['baba'] = 'foo' 47 | d['bee'] = 3 48 | self.assertEqual(len(d.cache.toadd), 2) 49 | yield t.on_result 50 | result = yield multi_async((d['baba'], d.get('bee'), d.get('ggg'), 51 | d.get('ggg', 1))) 52 | self.assertEqual(result, ['foo', 3, None, 1]) 53 | yield self.async.assertRaises(KeyError, lambda : d['gggggg']) 54 | 55 | def test_keys(self): 56 | models = self.mapper 57 | d = models.register(self.create_one()) 58 | session = models.session() 59 | yield session.add(d) 60 | values = yield d.keys() 61 | self.assertEqual(set(('bla', 'pluto')), set(values)) 62 | 63 | def test_values(self): 64 | models = self.mapper 65 | d = models.register(self.create_one()) 66 | session = models.session() 67 | yield session.add(d) 68 | values = yield d.values() 69 | self.assertEqual(set(('foo', 3)), set(values)) 70 | 71 | def test_items(self): 72 | models = self.mapper 73 | d = models.register(self.create_one()) 74 | session = models.session() 75 | yield session.add(d) 76 | values = yield d.items() 77 | data = {'bla': 'foo', 'pluto': 3} 78 | self.assertNotEqual(data, values) 79 | self.assertEqual(data, dict(values)) 80 | -------------------------------------------------------------------------------- /tests/all/structures/list.py: -------------------------------------------------------------------------------- 1 | from stdnet import odm 2 | from stdnet.utils import test, encoders 3 | 4 | from .base import StructMixin 5 | 6 | 7 | class TestList(StructMixin, test.TestCase): 8 | structure = odm.List 9 | name = 'list' 10 | 11 | def create_one(self): 12 | l = odm.List() 13 | l.push_back(3) 14 | l.push_back(5.6) 15 | return l 16 | 17 | def test_items(self): 18 | l = yield self.test_meta() 19 | self.assertFalse(l.session.transaction) 20 | yield l.push_back('save') 21 | yield l.push_back({'test': 1}) 22 | yield self.async.assertEqual(l.size(), 4) 23 | result = [3,5.6,'save',"{'test': 1}"] 24 | yield self.async.assertEqual(l.items(), result) 25 | 26 | def test_json_list(self): 27 | models = self.mapper 28 | l = models.register(self.structure(value_pickler=encoders.Json())) 29 | self.assertIsInstance(l.value_pickler, encoders.Json) 30 | self.assertTrue(l.id) 31 | session = models.session() 32 | with session.begin() as t: 33 | t.add(l) 34 | l.push_back(3) 35 | l.push_back(5.6) 36 | l.push_back('save') 37 | l.push_back({'test': 1}) 38 | l.push_back({'test': 2}) 39 | self.assertEqual(len(l.cache.back), 5) 40 | yield t.on_result 41 | yield self.async.assertEqual(l.size(), 5) 42 | result = [3, 5.6, 'save', {'test': 1}, {'test': 2}] 43 | yield self.async.assertEqual(l.items(), result) 44 | self.assertEqual(list(l), result) 45 | 46 | def test_pop_front(self): 47 | list = yield self.not_empty() 48 | elem = yield list.pop_front() 49 | self.assertEqual(elem, 3) 50 | elem = yield list.pop_front() 51 | self.assertEqual(elem, 5.6) 52 | elem = yield list.pop_front() 53 | self.assertEqual(elem, None) 54 | 55 | def test_pop_back(self): 56 | list = yield self.not_empty() 57 | elem = yield list.pop_back() 58 | self.assertEqual(elem, 5.6) 59 | elem = yield list.pop_back() 60 | self.assertEqual(elem, 3) 61 | elem = yield list.pop_back() 62 | self.assertEqual(elem, None) 63 | 64 | def test_block_pop_front(self): 65 | list = yield self.not_empty() 66 | elem = yield list.block_pop_front(1) 67 | self.assertEqual(elem, 3) 68 | elem = yield list.block_pop_front(1) 69 | self.assertEqual(elem, 5.6) 70 | elem = yield list.block_pop_front(1) 71 | self.assertEqual(elem, None) 72 | 73 | def test_block_pop_back(self): 74 | list = yield self.not_empty() 75 | elem = yield list.block_pop_back(1) 76 | self.assertEqual(elem, 5.6) 77 | elem = yield list.block_pop_back(1) 78 | self.assertEqual(elem, 3) 79 | elem = yield list.block_pop_back(1) 80 | self.assertEqual(elem, None) 81 | -------------------------------------------------------------------------------- /tests/all/structures/numarray.py: -------------------------------------------------------------------------------- 1 | import os 2 | from datetime import date 3 | 4 | from stdnet import odm, InvalidTransaction 5 | from stdnet.utils import test, encoders, zip 6 | from stdnet.utils.populate import populate 7 | 8 | from .base import StructMixin 9 | 10 | class TestNumberArray(StructMixin, test.TestCase): 11 | structure = odm.NumberArray 12 | name = 'numberarray' 13 | 14 | def create_one(self): 15 | a = self.structure() 16 | return a.push_back(56).push_back(-78.6) 17 | 18 | def testSizeResize(self): 19 | a = yield self.not_empty() 20 | yield self.async.assertEqual(a.size(), 2) 21 | yield self.async.assertEqual(len(a), 2) 22 | yield self.async.assertEqual(a.resize(10), 10) 23 | data = yield a.items() 24 | self.assertEqual(len(data), 10) 25 | self.assertAlmostEqual(data[0], 56.0) 26 | self.assertAlmostEqual(data[1], -78.6) 27 | for v in data[2:]: 28 | self.assertNotEqual(v,v) 29 | 30 | def testSetGet(self): 31 | a = yield self.not_empty() 32 | yield self.async.assertEqual(a.size(), 2) 33 | value = yield a[1] 34 | self.assertAlmostEqual(value, -78.6) -------------------------------------------------------------------------------- /tests/all/structures/set.py: -------------------------------------------------------------------------------- 1 | from stdnet import odm 2 | from stdnet.utils import test 3 | 4 | from .base import StructMixin 5 | 6 | 7 | class TestSet(StructMixin, test.TestCase): 8 | structure = odm.Set 9 | name = 'set' 10 | 11 | def create_one(self): 12 | s = self.structure() 13 | s.update((1,2,3,4,5,5)) 14 | return s 15 | 16 | def test_update(self): 17 | # Typical usage. Add a set to a session 18 | s = self.empty() 19 | s.session.add(s) 20 | yield s.add(8) 21 | yield self.async.assertEqual(s.size(), 1) 22 | yield s.update((1,2,3,4,5,5)) 23 | yield self.async.assertEqual(s.size(), 6) 24 | 25 | def test_update_delete(self): 26 | s = self.empty() 27 | with s.session.begin() as t: 28 | t.add(s) 29 | s.update((1,2,3,4,5,5)) 30 | s.discard(2) 31 | s.discard(67) 32 | s.remove(4) 33 | s.remove(46) 34 | s.difference_update((1,56,89)) 35 | yield t.on_result 36 | yield self.async.assertEqual(s.size(), 2) 37 | yield s.difference_update((3,5,6,7)) 38 | yield self.async.assertEqual(s.size(), 0) 39 | 40 | -------------------------------------------------------------------------------- /tests/all/structures/string.py: -------------------------------------------------------------------------------- 1 | import os 2 | from datetime import date 3 | 4 | from stdnet import odm, InvalidTransaction 5 | from stdnet.utils import test, encoders, zip 6 | from stdnet.utils.populate import populate 7 | 8 | from .base import StructMixin 9 | 10 | 11 | class TestString(StructMixin, test.TestCase): 12 | structure = odm.String 13 | name = 'string' 14 | 15 | def create_one(self): 16 | a = self.structure() 17 | a.push_back('this is a test') 18 | return a 19 | 20 | def test_incr(self): 21 | a = self.empty() 22 | a.session.add(a) 23 | yield self.async.assertEqual(a.incr(), 1) 24 | yield self.async.assertEqual(a.incr(), 2) 25 | yield self.async.assertEqual(a.incr(3), 5) 26 | yield self.async.assertEqual(a.incr(-7), -2) 27 | 28 | -------------------------------------------------------------------------------- /tests/all/structures/ts.py: -------------------------------------------------------------------------------- 1 | import os 2 | from datetime import date 3 | 4 | from stdnet import odm 5 | from stdnet.utils import test, encoders, zip 6 | 7 | from tests.all.multifields.timeseries import TsData 8 | 9 | from .base import StructMixin 10 | 11 | 12 | class TestTS(StructMixin, test.TestCase): 13 | structure = odm.TS 14 | data_cls = TsData 15 | name = 'ts' 16 | 17 | def create_one(self): 18 | ts = self.structure() 19 | ts.update(zip(self.data.dates, self.data.values)) 20 | self.assertFalse(ts.cache.cache) 21 | self.assertTrue(ts.cache.toadd) 22 | self.assertFalse(ts.cache.toremove) 23 | return ts 24 | 25 | def test_empty2(self): 26 | ts = self.empty() 27 | yield self.async.assertEqual(ts.front(), None) 28 | yield self.async.assertEqual(ts.back(), None) 29 | 30 | def test_range(self): 31 | ts = yield self.not_empty() 32 | yield self.async.assertEqual(ts.size(), len(set(self.data.dates))) 33 | front = yield ts.front() 34 | back = yield ts.back() 35 | self.assertTrue(back[0] > front[0]) 36 | all_dates = yield ts.itimes() 37 | N = len(all_dates) 38 | start = N // 4 39 | end = 3 * N // 4 40 | range = yield ts.range(all_dates[start], all_dates[end]) 41 | self.assertTrue(range) 42 | for time, val in range: 43 | self.assertTrue(time>=front[0]) 44 | self.assertTrue(time<=back[0]) 45 | 46 | def test_get(self): 47 | ts = yield self.not_empty() 48 | dt1 = self.data.dates[0] 49 | val1 = yield ts[dt1] 50 | self.assertTrue(val1) 51 | yield self.async.assertEqual(ts.get(dt1), val1) 52 | yield self.async.assertEqual(ts.get(date(1990,1,1)),None) 53 | yield self.async.assertEqual(ts.get(date(1990,1,1),1),1) 54 | yield self.async.assertRaises(KeyError, lambda : ts[date(1990,1,1)]) 55 | 56 | def test_pop(self): 57 | ts = yield self.not_empty() 58 | dt = self.data.dates[5] 59 | yield self.async.assertTrue(dt in ts) 60 | v = yield ts.pop(dt) 61 | self.assertTrue(v) 62 | yield self.async.assertFalse(dt in ts) 63 | yield self.async.assertRaises(KeyError, ts.pop, dt) 64 | yield self.async.assertEqual(ts.pop(dt,'bla'), 'bla') 65 | 66 | def test_rank_ipop(self): 67 | ts = yield self.not_empty() 68 | dt = self.data.dates[5] 69 | value = yield ts.get(dt) 70 | r = yield ts.rank(dt) 71 | all_dates = yield ts.itimes() 72 | self.assertEqual(all_dates[r].date(), dt) 73 | value2 = yield ts.ipop(r) 74 | self.assertEqual(value, value2) 75 | yield self.async.assertFalse(dt in ts) 76 | 77 | def test_pop_range(self): 78 | ts = yield self.not_empty() 79 | all_dates = yield ts.itimes() 80 | N = len(all_dates) 81 | start = N // 4 82 | end = 3 * N // 4 83 | range = yield ts.range(all_dates[start],all_dates[end]) 84 | self.assertTrue(range) 85 | range2 = yield ts.pop_range(all_dates[start], all_dates[end]) 86 | self.assertEqual(range, range2) 87 | all_dates = yield ts.itimes() 88 | all_dates = set(all_dates) 89 | self.assertTrue(all_dates) 90 | for dt,_ in range: 91 | self.assertFalse(dt in all_dates) 92 | -------------------------------------------------------------------------------- /tests/all/structures/zset.py: -------------------------------------------------------------------------------- 1 | import os 2 | from datetime import date 3 | 4 | from stdnet import odm 5 | from stdnet.utils import test, encoders, zip 6 | from stdnet.utils.populate import populate 7 | 8 | from .base import StructMixin 9 | 10 | dates = list(set(populate('date',100,start=date(2009,6,1),end=date(2010,6,6)))) 11 | values = populate('float',len(dates),start=0,end=1000) 12 | 13 | 14 | class TestZset(StructMixin, test.TestCase): 15 | structure = odm.Zset 16 | name = 'zset' 17 | result = [(0.0022,'pluto'), 18 | (0.06,'mercury'), 19 | (0.11,'mars'), 20 | (0.82,'venus'), 21 | (1,'earth'), 22 | (14.6,'uranus'), 23 | (17.2,'neptune'), 24 | (95.2,'saturn'), 25 | (317.8,'juppiter')] 26 | 27 | def create_one(self): 28 | l = self.structure() 29 | l.add(1,'earth') 30 | l.add(0.06,'mercury') 31 | l.add(317.8,'juppiter') 32 | l.update(((95.2,'saturn'),\ 33 | (0.82,'venus'),\ 34 | (14.6,'uranus'),\ 35 | (0.11,'mars'), 36 | (17.2,'neptune'), 37 | (0.0022,'pluto'))) 38 | self.assertEqual(len(l.cache.toadd), 9) 39 | self.assertFalse(l.cache.cache) 40 | self.assertTrue(l.cache.toadd) 41 | self.assertFalse(l.cache.toremove) 42 | return l 43 | 44 | def planets(self): 45 | models = self.mapper 46 | l = models.register(self.create_one()) 47 | self.assertTrue(l.id) 48 | session = models.session() 49 | with session.begin() as t: 50 | t.add(l) 51 | size = yield l.size() 52 | self.assertEqual(size, 0) 53 | yield t.on_result 54 | self.assertTrue(l.get_state().persistent) 55 | size = yield l.size() 56 | self.assertEqual(size, 9) 57 | yield l 58 | 59 | def test_irange(self): 60 | l = yield self.planets() 61 | # Get the whole range without the scores 62 | r = yield l.irange(withscores=False) 63 | self.assertEqual(r, [v[1] for v in self.result]) 64 | 65 | def test_irange_withscores(self): 66 | l = yield self.planets() 67 | # Get the whole range 68 | r = yield l.irange() 69 | self.assertEqual(r, self.result) 70 | 71 | def test_range(self): 72 | l = yield self.planets() 73 | r = yield l.range(0.5, 20, withscores=False) 74 | self.assertEqual(r, ['venus', 'earth', 'uranus', 'neptune']) 75 | 76 | def test_range_withscores(self): 77 | l = yield self.planets() 78 | r = yield l.range(0.5,20) 79 | self.assertTrue(r) 80 | k1 = 0.5 81 | for k, v in r: 82 | self.assertTrue(k>=k1) 83 | self.assertTrue(k<=20) 84 | k1 = k 85 | 86 | def test_iter(self): 87 | '''test a very simple zset with integer''' 88 | l = yield self.planets() 89 | r = list(l) 90 | v = [t[1] for t in self.result] 91 | self.assertEqual(r,v) 92 | 93 | def test_items(self): 94 | '''test a very simple zset with integer''' 95 | l = yield self.planets() 96 | r = list(l.items()) 97 | self.assertEqual(r, [r[1] for r in self.result]) 98 | 99 | -------------------------------------------------------------------------------- /tests/all/topics/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lsbardel/python-stdnet/78db5320bdedc3f28c5e4f38cda13a4469e35db7/tests/all/topics/__init__.py -------------------------------------------------------------------------------- /tests/all/topics/permissions.py: -------------------------------------------------------------------------------- 1 | from random import choice 2 | from examples.permissions import User, Group, Role, Permission 3 | 4 | from stdnet import odm 5 | from stdnet.utils import test, zip 6 | 7 | read = 10 8 | comment = 15 9 | create = 20 10 | update = 30 11 | delete = 40 12 | 13 | class MyModel(odm.StdModel): 14 | pass 15 | 16 | 17 | class NamesGenerator(test.DataGenerator): 18 | 19 | def generate(self): 20 | group_size = self.size // 2 21 | self.usernames = self.populate(min_len=5, max_len=20) 22 | self.passwords = self.populate(min_len=7, max_len=20) 23 | self.groups = self.populate(size=group_size, min_len=5, max_len=10) 24 | 25 | 26 | 27 | class TestPermissions(test.TestCase): 28 | models = (User, Group, Role, Permission, MyModel) 29 | data_cls = NamesGenerator 30 | 31 | @classmethod 32 | def after_setup(cls): 33 | d = cls.data 34 | models = cls.mapper 35 | groups = [] 36 | groups.append(models.group.create_user(username='stdnet', 37 | can_login=False)) 38 | for username, password in zip(d.usernames, d.passwords): 39 | groups.append(models.group.create_user(username=username, 40 | password=password)) 41 | yield cls.multi_async(groups) 42 | session = models.session() 43 | groups = yield session.query(Group).all() 44 | with models.session().begin() as t: 45 | for group in groups: 46 | group.create_role('family') # create the group-family role 47 | group.create_role('friends') # create the group-friends role 48 | yield t.on_result 49 | 50 | def random_group(self, *excludes): 51 | if excludes: 52 | name = choice(list(set(self.data.usernames)-set(excludes))) 53 | else: 54 | name = choice(self.data.usernames) 55 | return self.mapper.group.get(name=name) 56 | 57 | def test_group_query(self): 58 | groups = self.mapper.group 59 | cache = groups._meta.dfields['user'].get_cache_name() 60 | groups = yield groups.all() 61 | for g in groups: 62 | self.assertTrue(hasattr(g, cache)) 63 | self.assertEqual(g.user.username, g.name) 64 | 65 | def test_create_role(self, name=None): 66 | # Create a new role 67 | name = name or self.data.random_string() 68 | models = self.mapper 69 | group = yield self.random_group() 70 | role = yield group.create_role(name) # add a random role 71 | self.assertEqual(role.name, name) 72 | self.assertEqual(role.owner, group) 73 | permission = yield role.add_permission(MyModel, read) 74 | self.assertEqual(permission.model_type, MyModel) 75 | self.assertEqual(permission.object_pk, '') 76 | self.assertEqual(permission.operation, read) 77 | # 78 | # the role should have only one permission 79 | permissions = yield role.permissions.all() 80 | self.assertTrue(len(permissions)>=1) 81 | yield role 82 | 83 | def test_role_assignto_group(self): 84 | role = yield self.test_create_role() 85 | group = yield self.random_group(role.owner.name) 86 | # we add role to group 87 | # return the Subject Assignment (SA) for this role-subject link 88 | sa = yield role.assignto(group) 89 | self.assertEqual(sa.role, role) 90 | self.assertEqual(sa.group, group) 91 | # 92 | # group has a new role 93 | roles = yield group.roles.all() 94 | self.assertTrue(role in roles) 95 | -------------------------------------------------------------------------------- /tests/all/utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lsbardel/python-stdnet/78db5320bdedc3f28c5e4f38cda13a4469e35db7/tests/all/utils/__init__.py -------------------------------------------------------------------------------- /tests/all/utils/intervals.py: -------------------------------------------------------------------------------- 1 | from stdnet.utils import test, Interval, Intervals, pickle 2 | 3 | 4 | class TestInterval(test.TestCase): 5 | 6 | def intervals(self): 7 | a = Interval(4,6) 8 | b = Interval(8,10) 9 | intervals = Intervals((b,a)) 10 | self.assertEqual(len(intervals),2) 11 | self.assertEqual(intervals[0],a) 12 | self.assertEqual(intervals[1],b) 13 | return intervals 14 | 15 | def testSimple(self): 16 | a = Interval(4,6) 17 | self.assertEqual(a.start,4) 18 | self.assertEqual(a.end,6) 19 | self.assertEqual(tuple(a),(4,6)) 20 | self.assertRaises(ValueError, Interval, 6, 3) 21 | 22 | def testPickle(self): 23 | a = Interval(4,6) 24 | s = pickle.dumps(a) 25 | b = pickle.loads(s) 26 | self.assertEqual(type(b),tuple) 27 | self.assertEqual(len(b),2) 28 | self.assertEqual(b[0],4) 29 | self.assertEqual(b[1],6) 30 | 31 | def testPickleIntervals(self): 32 | a = self.intervals() 33 | s = pickle.dumps(a) 34 | b = pickle.loads(s) 35 | self.assertEqual(type(b),list) 36 | self.assertEqual(len(b),len(a)) 37 | 38 | def testmultiple(self): 39 | i = self.intervals() 40 | a = Interval(20,30) 41 | i.append(a) 42 | self.assertEqual(len(i),3) 43 | self.assertEqual(i[-1],a) 44 | i.append(Interval(18,21)) 45 | self.assertEqual(len(i),3) 46 | self.assertNotEqual(i[-1],a) 47 | self.assertEqual(i[-1].start,18) 48 | self.assertEqual(i[-1].end,30) 49 | i.append(Interval(8,10)) 50 | self.assertEqual(len(i),3) 51 | self.assertEqual(i[-2].start,8) 52 | self.assertEqual(i[-2].end,10) 53 | i.append(Interval(8,25)) 54 | self.assertEqual(len(i),2) 55 | self.assertEqual(i[-1].start,8) 56 | self.assertEqual(i[-1].end,30) 57 | i.append(Interval(1,40)) 58 | self.assertEqual(len(i),1) 59 | self.assertEqual(i[0].start,1) 60 | self.assertEqual(i[0].end,40) 61 | 62 | def testAppendtuple(self): 63 | i = self.intervals() 64 | i.append((18,21)) 65 | self.assertEqual(len(i),3) 66 | self.assertEqual(i[-1].start,18) 67 | self.assertEqual(i[-1].end,21) 68 | self.assertRaises(TypeError, i.append, 3) 69 | self.assertRaises(ValueError, i.append, (8,2)) 70 | -------------------------------------------------------------------------------- /tests/all/utils/zset.py: -------------------------------------------------------------------------------- 1 | from random import randint 2 | from stdnet.utils import test 3 | from stdnet.utils.zset import zset 4 | 5 | class TestPythonZset(test.TestCase): 6 | 7 | def test_add(self): 8 | s = zset() 9 | s.add(3, 'ciao') 10 | s.add(4, 'bla') 11 | self.assertEqual(len(s), 2) 12 | s.add(-1, 'bla') 13 | self.assertEqual(len(s), 2) 14 | data = list(s) 15 | self.assertEqual(data, ['bla', 'ciao']) 16 | 17 | def test_rank(self): 18 | s = zset() 19 | s.add(3, 'ciao') 20 | s.add(4, 'bla') 21 | s.add(2, 'foo') 22 | s.add(20, 'pippo') 23 | s.add(-1, 'bla') 24 | self.assertEqual(len(s), 4) 25 | self.assertEqual(s.rank('bla'), 0) 26 | self.assertEqual(s.rank('foo'), 1) 27 | self.assertEqual(s.rank('ciao'), 2) 28 | self.assertEqual(s.rank('pippo'), 3) 29 | self.assertEqual(s.rank('xxxx'), None) 30 | 31 | def test_update(self): 32 | string = test.populate('string', size=100) 33 | values = test.populate('float', size=100) 34 | s = zset() 35 | s.update(zip(values,string)) 36 | self.assertTrue(s) 37 | prev = None 38 | for score, _ in s.items(): 39 | if prev is not None: 40 | self.assertTrue(score>=prev) 41 | prev = score 42 | return s 43 | 44 | def test_remove(self): 45 | s = self.test_update() 46 | values = list(s) 47 | while values: 48 | index = randint(0, len(values)-1) 49 | val = values.pop(index) 50 | self.assertTrue(val in s) 51 | self.assertNotEqual(s.remove(val), None) 52 | self.assertFalse(val in s) 53 | self.assertFalse(s) 54 | 55 | -------------------------------------------------------------------------------- /tests/lua/odm.lua: -------------------------------------------------------------------------------- 1 | local tabletools = require("tabletools") 2 | local cjson = require("json") 3 | local odm = require("odm") 4 | local redis = require("rserver") 5 | odm.redis = redis 6 | 7 | -- Connect to redis server 8 | redis.connect('fibpalap1d', 6379) 9 | redis.call('select', 15) 10 | 11 | local suite = {} 12 | 13 | local model_meta = { 14 | namespace = 'test:simplemodel', 15 | id_name = 'id', 16 | id_type = 1, 17 | indices = {code=true, group=false} 18 | } 19 | 20 | local function commit_data (model, data) 21 | local num, ctx, flat = # data, 0, {} 22 | for _, sdata in ipairs(data) do 23 | table.insert(flat, sdata.action or '') 24 | table.insert(flat, sdata.id or '') 25 | table.insert(flat, sdata.score or '') 26 | local odata = sdata.data or {} 27 | local count = 0 28 | table.insert(flat, 0) 29 | ctx = # flat 30 | for field, value in pairs(odata) do 31 | table.insert(flat, field) 32 | table.insert(flat, value) 33 | count = count + 1 34 | end 35 | flat[ctx] = 2 * count 36 | end 37 | return model:commit(# data, flat) 38 | end 39 | 40 | suite.test_range_selectors = function () 41 | assert_true(odm.range_selectors['ge'](3, 2)) 42 | assert_true(odm.range_selectors['ge'](3, 3)) 43 | assert_false(odm.range_selectors['ge'](3, 4)) 44 | end 45 | 46 | suite.test_commit_simple = function () 47 | odm.redis.call('flushdb') 48 | local model = odm.model(model_meta) 49 | local ids = commit_data(model, {{action='add', data={code = 'bla'}}, 50 | {action='add', data={code = 'foo'}}}) 51 | assert_equal(# ids, 2) 52 | assert_equal(ids[1][1], 1) 53 | assert_equal(ids[1][2], 1) 54 | assert_equal(ids[2][1], 2) 55 | assert_equal(ids[2][2], 1) 56 | -- 57 | assert_equal(redis.call('scard', model.idset), 2) 58 | -- 59 | -- this should contain an error 60 | local ids = commit_data(model, {{action='add', data={code = 'bla'}}, 61 | {action='add', data={code = 'pippo'}}}) 62 | assert_equal(# ids, 2) 63 | assert_equal('', ids[1][1]) 64 | assert_equal(0, ids[1][2]) 65 | assert_equal(3, ids[2][1]) 66 | assert_equal(1, ids[2][2]) 67 | -- 68 | assert_equal(redis.call('scard', model.idset), 3) 69 | end 70 | 71 | suite.test_commit_update_index = function () 72 | odm.redis.call('flushdb') 73 | local model = odm.model(model_meta) 74 | local ids = commit_data(model, {{action='add', data={code='pluto', group='planet'}}}) 75 | assert_equal(# ids, 1) 76 | assert_equal(ids[1][1], 1) 77 | -- Update the instance with a different group 78 | ids = commit_data(model, {{action='change', id=1, data={code='pluto', group='smallplanet'}}}) 79 | assert_equal(# ids, 1) 80 | assert_equal(1, ids[1][1]) 81 | end 82 | 83 | 84 | suite.test_query = function () 85 | odm.redis.call('flushdb') 86 | local model = odm.model(model_meta) 87 | local ids = commit_data(model, {{action='add', data={code = 'bla'}}, 88 | {action='add', data={code = 'foo'}}, 89 | {action='add', data={code = 'pluto', group='planet'}}}) 90 | local r = model:query('group', model:temp_key(), {'value', ''}) 91 | assert_equal(r, 2) 92 | r = model:query('group', model:temp_key(), {'value', 'planet'}) 93 | assert_equal(r, 1) 94 | r = model:query('group', model:temp_key(), {'value', 'xxxxx'}) 95 | assert_equal(r, 0) 96 | end 97 | 98 | return suite 99 | -------------------------------------------------------------------------------- /tests/lua/rserver.lua: -------------------------------------------------------------------------------- 1 | -- Obtain the redis client 2 | local client = require("redis") 3 | local function bool2int (result) 4 | if result then 5 | return 1 6 | else 7 | return 0 8 | end 9 | end 10 | 11 | local redis = { 12 | hsetnx = bool2int, 13 | exists = bool2int 14 | } 15 | 16 | redis.connect = function (...) 17 | redis.client = client.connect(unpack(arg)) 18 | end 19 | 20 | redis.call = function (command, ...) 21 | local lcom = string.lower(command) 22 | local com = redis.client[lcom] 23 | if com then 24 | local callback = redis[lcom] 25 | local res = com(redis.client, unpack(arg)) 26 | if callback then 27 | res = callback(res) 28 | end 29 | return res 30 | else 31 | error('Unknown redis command ' .. command) 32 | end 33 | end 34 | 35 | return redis -------------------------------------------------------------------------------- /tests/lua/utils.lua: -------------------------------------------------------------------------------- 1 | local t = require("tabletools") 2 | 3 | local suite = {} 4 | 5 | suite.test_init = function () 6 | assert_true(# t.init(5,-3) == 5) 7 | assert_true(# t.init(0,7) == 0) 8 | assert_true(# t.init(-1,0) == 0) 9 | end 10 | 11 | 12 | suite.test_init_value = function () 13 | local vector = t.init(7, 98) 14 | assert_true(# vector == 7) 15 | for i,v in ipairs(vector) do 16 | assert(v == 98) 17 | end 18 | end 19 | 20 | 21 | 22 | return suite 23 | --------------------------------------------------------------------------------