├── .coveragerc ├── .gitignore ├── .travis.yml ├── AUTHORS.rst ├── LICENSE ├── README.rst ├── docs ├── Makefile ├── conf.py └── index.rst ├── example ├── chinook.sqlite └── ipydb_pandas_DataFrame.ipynb ├── ipydb ├── __init__.py ├── asciitable.py ├── completion.py ├── engine.py ├── magic.py ├── metadata │ ├── __init__.py │ ├── model.py │ └── persist.py ├── plugin.py └── utils.py ├── setup.py └── tests ├── __init__.py ├── dbs ├── chinook.sqlite └── test.sql ├── test_completion.py ├── test_integration.py ├── test_magic.py ├── test_metadata.py ├── test_model.py ├── test_persist.py └── test_plugin.py /.coveragerc: -------------------------------------------------------------------------------- 1 | [report] 2 | omit = 3 | */python?.?/* 4 | */site-packages/nose/* 5 | */tests/* 6 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # docs build 2 | _build 3 | .DS_Store 4 | cover 5 | .coverage 6 | dist 7 | *.swp 8 | *.pyc 9 | .ropeproject 10 | build 11 | *.egg-info 12 | *.orig 13 | 14 | # catags 15 | tags 16 | 17 | # java things 18 | .project 19 | .settings 20 | .classpath 21 | .checkstyle 22 | target/ 23 | 24 | # temp file: 25 | *~ 26 | .ipynb_checkpoints 27 | 28 | # DB files generated by tests 29 | tests/dbs/temp.sqlite 30 | tests/dbs/temp.sqlite-journal 31 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | # travis-ci configuration 2 | language: python 3 | python: 4 | - "2.7" 5 | - "3.3" 6 | - "3.4" 7 | install: 8 | - pip install -e . --use-mirrors 9 | - pip install coveralls --use-mirrors 10 | - pip install flake8 coverage --use-mirrors 11 | - pip install mock==1.0.1 --use-mirrors 12 | script: 13 | - nosetests --with-coverage --cover-package=ipydb 14 | - flake8 --exit-zero ipydb 15 | after_success: 16 | - coveralls 17 | -------------------------------------------------------------------------------- /AUTHORS.rst: -------------------------------------------------------------------------------- 1 | - Jay Sweeney (`@jaysw `_) 2 | 3 | - Kristian Perkins (`@krockode `_) 4 | 5 | - litaotao (`@litaotao `_) 6 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2014, Jay Sweeney 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | ipydb: Work with databases in IPython 2 | ===================================== 3 | 4 | .. image:: https://travis-ci.org/jaysw/ipydb.svg?branch=master 5 | :target: https://travis-ci.org/jaysw/ipydb 6 | 7 | 8 | .. image:: https://coveralls.io/repos/jaysw/ipydb/badge.svg?branch=master&service=github 9 | :target: https://coveralls.io/r/jaysw/ipydb?branch=master 10 | 11 | 12 | ipydb is an `IPython `_ plugin for running SQL queries and viewing their results. 13 | 14 | Usage 15 | ----- 16 | Some demonstration videos are available in the `documentation `_. 17 | 18 | .. code-block:: pycon 19 | 20 | $ ipython 21 | In [1] : %load_ext ipydb 22 | In [2] : %automagic on 23 | Automagic is ON, % prefix IS NOT needed for line magics. 24 | 25 | In [3] : connecturl mysql://user:pass@localhost/employees 26 | In [4] localhost/employees: tables 27 | departments 28 | dept_emp 29 | dept_manager 30 | employees 31 | salaries 32 | titles 33 | 34 | In [5] localhost/employees: fields departments 35 | departments 36 | ----------- 37 | dept_name VARCHAR(40) 38 | dept_no CHAR(4) 39 | 40 | In [6] localhost/employees: select * from departments order by dept_name 41 | +---------+--------------------+ 42 | | dept_no | dept_name | 43 | +---------+--------------------+ 44 | | d009 | Customer Service | 45 | | d005 | Development | 46 | | d002 | Finance | 47 | | d003 | Human Resources | 48 | | d001 | Marketing | 49 | | d004 | Production | 50 | | d006 | Quality Management | 51 | | d008 | Research | 52 | | d007 | Sales | 53 | 54 | 55 | Features 56 | -------- 57 | 58 | - Tab-completion of table names, fields and joins 59 | - View query results in ascii-table format piped through less 60 | - Single-line or multi-line query editing 61 | - Tab-completion metadata is read in the background and persisted across sessions 62 | - Cross-database support, thanks to SqlAlchemy: `supported databases `_ 63 | 64 | 65 | Installation 66 | ------------ 67 | 68 | To install ipydb: 69 | 70 | .. code-block:: bash 71 | 72 | $ pip install ipydb 73 | 74 | You will need a python driver for your database of choice. For example: 75 | 76 | .. code-block:: bash 77 | 78 | $ pip install mysql-python 79 | 80 | ipydb uses `SqlAlchemy `_ to interact with databases. 81 | See the `Supported Databases `_ page 82 | for a (large!) list of supported `DB-API 2.0 `_ drivers and how to 83 | write a connection URL for your particular database. 84 | 85 | 86 | Start ipython and load the ipydb plugin: 87 | 88 | .. code-block:: bash 89 | 90 | $ ipython 91 | In [1]: load_ext ipydb 92 | 93 | 94 | Documentation 95 | ------------- 96 | 97 | Documentation is available at: http://ipydb.readthedocs.org 98 | 99 | 100 | Connecting to Databases 101 | ----------------------- 102 | 103 | There are two ways to connect to a database with ipydb. Directly via a connection url, using 104 | the ``connecturl`` magic function, or, using a connection 'nickname' with the ``connect`` magic function. 105 | 106 | 1. Using ``connecturl`` 107 | ^^^^^^^^^^^^^^^^^^^^^^^ 108 | 109 | You can connect to a database using an SqlAlchemy style url as follows: 110 | 111 | .. code-block:: pycon 112 | 113 | %connecturl drivername://username:password@host/database 114 | 115 | Some examples: 116 | 117 | .. code-block:: pycon 118 | 119 | In [3] : connecturl mysql://myuser:mypass@localhost/mydatabase 120 | In [4] : connecturl sqlite:///path/to/mydb.sqlite 121 | In [5] : connecturl sqlite:///:memory: 122 | 123 | See the `SqlAlchemy Documentation `_ for further information. 124 | 125 | 2. Using ``connect`` and a ``.db-connections`` configuration file 126 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 127 | 128 | For this to work, you need to create a file called 129 | ``.db-connections`` located in your home directory. 130 | ``.db-connections`` is an "ini" formatted file, 131 | parsable by python's ConfigParser module. 132 | 133 | Here's an example of what ``~/.db-connections`` might look like: 134 | 135 | .. code-block:: ini 136 | 137 | [mydb] ; nickname 138 | type = mysql 139 | username = root 140 | password = xxxx 141 | host = localhost 142 | database = employees 143 | 144 | [myotherdb] ; nickname 145 | type = sqlite 146 | database = /path/to/file.sqlite 147 | 148 | Each database connection defined in ``~/.db-connections`` is 149 | then referenceable via its [section heading]. So with the 150 | above ``.db-connections`` file, the following examples would work: 151 | 152 | .. code-block:: pycon 153 | 154 | In [6] : connect mydb 155 | In [7] mydb : connect myotherdb 156 | 157 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = _build 9 | 10 | # User-friendly check for sphinx-build 11 | ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) 12 | $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) 13 | endif 14 | 15 | # Internal variables. 16 | PAPEROPT_a4 = -D latex_paper_size=a4 17 | PAPEROPT_letter = -D latex_paper_size=letter 18 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 19 | # the i18n builder cannot share the environment and doctrees with the others 20 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 21 | 22 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext 23 | 24 | help: 25 | @echo "Please use \`make ' where is one of" 26 | @echo " html to make standalone HTML files" 27 | @echo " dirhtml to make HTML files named index.html in directories" 28 | @echo " singlehtml to make a single large HTML file" 29 | @echo " pickle to make pickle files" 30 | @echo " json to make JSON files" 31 | @echo " htmlhelp to make HTML files and a HTML help project" 32 | @echo " qthelp to make HTML files and a qthelp project" 33 | @echo " devhelp to make HTML files and a Devhelp project" 34 | @echo " epub to make an epub" 35 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 36 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 37 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" 38 | @echo " text to make text files" 39 | @echo " man to make manual pages" 40 | @echo " texinfo to make Texinfo files" 41 | @echo " info to make Texinfo files and run them through makeinfo" 42 | @echo " gettext to make PO message catalogs" 43 | @echo " changes to make an overview of all changed/added/deprecated items" 44 | @echo " xml to make Docutils-native XML files" 45 | @echo " pseudoxml to make pseudoxml-XML files for display purposes" 46 | @echo " linkcheck to check all external links for integrity" 47 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 48 | 49 | clean: 50 | rm -rf $(BUILDDIR)/* 51 | 52 | html: 53 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 54 | @echo 55 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 56 | 57 | dirhtml: 58 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 59 | @echo 60 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 61 | 62 | singlehtml: 63 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 64 | @echo 65 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 66 | 67 | pickle: 68 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 69 | @echo 70 | @echo "Build finished; now you can process the pickle files." 71 | 72 | json: 73 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 74 | @echo 75 | @echo "Build finished; now you can process the JSON files." 76 | 77 | htmlhelp: 78 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 79 | @echo 80 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 81 | ".hhp project file in $(BUILDDIR)/htmlhelp." 82 | 83 | qthelp: 84 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 85 | @echo 86 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 87 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 88 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/ipydb.qhcp" 89 | @echo "To view the help file:" 90 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/ipydb.qhc" 91 | 92 | devhelp: 93 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 94 | @echo 95 | @echo "Build finished." 96 | @echo "To view the help file:" 97 | @echo "# mkdir -p $$HOME/.local/share/devhelp/ipydb" 98 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/ipydb" 99 | @echo "# devhelp" 100 | 101 | epub: 102 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 103 | @echo 104 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 105 | 106 | latex: 107 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 108 | @echo 109 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 110 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 111 | "(use \`make latexpdf' here to do that automatically)." 112 | 113 | latexpdf: 114 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 115 | @echo "Running LaTeX files through pdflatex..." 116 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 117 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 118 | 119 | latexpdfja: 120 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 121 | @echo "Running LaTeX files through platex and dvipdfmx..." 122 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja 123 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 124 | 125 | text: 126 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 127 | @echo 128 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 129 | 130 | man: 131 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 132 | @echo 133 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 134 | 135 | texinfo: 136 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 137 | @echo 138 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 139 | @echo "Run \`make' in that directory to run these through makeinfo" \ 140 | "(use \`make info' here to do that automatically)." 141 | 142 | info: 143 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 144 | @echo "Running Texinfo files through makeinfo..." 145 | make -C $(BUILDDIR)/texinfo info 146 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 147 | 148 | gettext: 149 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 150 | @echo 151 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 152 | 153 | changes: 154 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 155 | @echo 156 | @echo "The overview file is in $(BUILDDIR)/changes." 157 | 158 | linkcheck: 159 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 160 | @echo 161 | @echo "Link check complete; look for any errors in the above output " \ 162 | "or in $(BUILDDIR)/linkcheck/output.txt." 163 | 164 | doctest: 165 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 166 | @echo "Testing of doctests in the sources finished, look at the " \ 167 | "results in $(BUILDDIR)/doctest/output.txt." 168 | 169 | xml: 170 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml 171 | @echo 172 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml." 173 | 174 | pseudoxml: 175 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml 176 | @echo 177 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." 178 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # ipydb documentation build configuration file, created by 4 | # sphinx-quickstart on Sun Sep 21 15:24:56 2014. 5 | # 6 | # This file is execfile()d with the current directory set to its 7 | # containing dir. 8 | # 9 | # Note that not all possible configuration values are present in this 10 | # autogenerated file. 11 | # 12 | # All configuration values have a default; values that are commented out 13 | # serve to show the default. 14 | 15 | import sys 16 | import os 17 | 18 | # If extensions (or modules to document with autodoc) are in another directory, 19 | # add these directories to sys.path here. If the directory is relative to the 20 | # documentation root, use os.path.abspath to make it absolute, like shown here. 21 | #sys.path.insert(0, os.path.abspath('.')) 22 | 23 | # -- General configuration ------------------------------------------------ 24 | 25 | # If your documentation needs a minimal Sphinx version, state it here. 26 | #needs_sphinx = '1.0' 27 | 28 | # Add any Sphinx extension module names here, as strings. They can be 29 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 30 | # ones. 31 | extensions = [] 32 | 33 | # Add any paths that contain templates here, relative to this directory. 34 | templates_path = ['_templates'] 35 | 36 | # The suffix of source filenames. 37 | source_suffix = '.rst' 38 | 39 | # The encoding of source files. 40 | #source_encoding = 'utf-8-sig' 41 | 42 | # The master toctree document. 43 | master_doc = 'index' 44 | 45 | # General information about the project. 46 | project = u'ipydb' 47 | copyright = u'2014, Jay Sweeney' 48 | 49 | # The version info for the project you're documenting, acts as replacement for 50 | # |version| and |release|, also used in various other places throughout the 51 | # built documents. 52 | # 53 | # The short X.Y version. 54 | version = '0.0.2' 55 | # The full version, including alpha/beta/rc tags. 56 | release = '0.0.2' 57 | 58 | # The language for content autogenerated by Sphinx. Refer to documentation 59 | # for a list of supported languages. 60 | #language = None 61 | 62 | # There are two options for replacing |today|: either, you set today to some 63 | # non-false value, then it is used: 64 | #today = '' 65 | # Else, today_fmt is used as the format for a strftime call. 66 | #today_fmt = '%B %d, %Y' 67 | 68 | # List of patterns, relative to source directory, that match files and 69 | # directories to ignore when looking for source files. 70 | exclude_patterns = ['_build'] 71 | 72 | # The reST default role (used for this markup: `text`) to use for all 73 | # documents. 74 | #default_role = None 75 | 76 | # If true, '()' will be appended to :func: etc. cross-reference text. 77 | #add_function_parentheses = True 78 | 79 | # If true, the current module name will be prepended to all description 80 | # unit titles (such as .. function::). 81 | #add_module_names = True 82 | 83 | # If true, sectionauthor and moduleauthor directives will be shown in the 84 | # output. They are ignored by default. 85 | #show_authors = False 86 | 87 | # The name of the Pygments (syntax highlighting) style to use. 88 | pygments_style = 'sphinx' 89 | 90 | # A list of ignored prefixes for module index sorting. 91 | #modindex_common_prefix = [] 92 | 93 | # If true, keep warnings as "system message" paragraphs in the built documents. 94 | #keep_warnings = False 95 | 96 | 97 | # -- Options for HTML output ---------------------------------------------- 98 | 99 | # The theme to use for HTML and HTML Help pages. See the documentation for 100 | # a list of builtin themes. 101 | 102 | # on_rtd is whether we are on readthedocs.org, this line of code grabbed from docs.readthedocs.org 103 | on_rtd = os.environ.get('READTHEDOCS', None) == 'True' 104 | 105 | if not on_rtd: # only import and set the theme if we're building docs locally 106 | import sphinx_rtd_theme 107 | html_theme = 'sphinx_rtd_theme' 108 | html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] 109 | 110 | # otherwise, readthedocs.org uses their theme by default, so no need to specify it 111 | #html_theme = 'sphinx_rtd_theme' 112 | 113 | # Theme options are theme-specific and customize the look and feel of a theme 114 | # further. For a list of options available for each theme, see the 115 | # documentation. 116 | #html_theme_options = {} 117 | 118 | # Add any paths that contain custom themes here, relative to this directory. 119 | #html_theme_path = ['_themes'] 120 | 121 | # The name for this set of Sphinx documents. If None, it defaults to 122 | # " v documentation". 123 | #html_title = None 124 | 125 | # A shorter title for the navigation bar. Default is the same as html_title. 126 | #html_short_title = None 127 | 128 | # The name of an image file (relative to this directory) to place at the top 129 | # of the sidebar. 130 | #html_logo = None 131 | 132 | # The name of an image file (within the static path) to use as favicon of the 133 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 134 | # pixels large. 135 | #html_favicon = None 136 | 137 | # Add any paths that contain custom static files (such as style sheets) here, 138 | # relative to this directory. They are copied after the builtin static files, 139 | # so a file named "default.css" will overwrite the builtin "default.css". 140 | html_static_path = ['_static'] 141 | 142 | # Add any extra paths that contain custom files (such as robots.txt or 143 | # .htaccess) here, relative to this directory. These files are copied 144 | # directly to the root of the documentation. 145 | #html_extra_path = [] 146 | 147 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, 148 | # using the given strftime format. 149 | #html_last_updated_fmt = '%b %d, %Y' 150 | 151 | # If true, SmartyPants will be used to convert quotes and dashes to 152 | # typographically correct entities. 153 | #html_use_smartypants = True 154 | 155 | # Custom sidebar templates, maps document names to template names. 156 | #html_sidebars = {} 157 | 158 | # Additional templates that should be rendered to pages, maps page names to 159 | # template names. 160 | #html_additional_pages = {} 161 | 162 | # If false, no module index is generated. 163 | #html_domain_indices = True 164 | 165 | # If false, no index is generated. 166 | #html_use_index = True 167 | 168 | # If true, the index is split into individual pages for each letter. 169 | #html_split_index = False 170 | 171 | # If true, links to the reST sources are added to the pages. 172 | #html_show_sourcelink = True 173 | 174 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. 175 | #html_show_sphinx = True 176 | 177 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. 178 | #html_show_copyright = True 179 | 180 | # If true, an OpenSearch description file will be output, and all pages will 181 | # contain a tag referring to it. The value of this option must be the 182 | # base URL from which the finished HTML is served. 183 | #html_use_opensearch = '' 184 | 185 | # This is the file name suffix for HTML files (e.g. ".xhtml"). 186 | #html_file_suffix = None 187 | 188 | # Output file base name for HTML help builder. 189 | htmlhelp_basename = 'ipydbdoc' 190 | 191 | 192 | # -- Options for LaTeX output --------------------------------------------- 193 | 194 | latex_elements = { 195 | # The paper size ('letterpaper' or 'a4paper'). 196 | #'papersize': 'letterpaper', 197 | 198 | # The font size ('10pt', '11pt' or '12pt'). 199 | #'pointsize': '10pt', 200 | 201 | # Additional stuff for the LaTeX preamble. 202 | #'preamble': '', 203 | } 204 | 205 | # Grouping the document tree into LaTeX files. List of tuples 206 | # (source start file, target name, title, 207 | # author, documentclass [howto, manual, or own class]). 208 | latex_documents = [ 209 | ('index', 'ipydb.tex', u'ipydb Documentation', 210 | u'Jay Sweeney', 'manual'), 211 | ] 212 | 213 | # The name of an image file (relative to this directory) to place at the top of 214 | # the title page. 215 | #latex_logo = None 216 | 217 | # For "manual" documents, if this is true, then toplevel headings are parts, 218 | # not chapters. 219 | #latex_use_parts = False 220 | 221 | # If true, show page references after internal links. 222 | #latex_show_pagerefs = False 223 | 224 | # If true, show URL addresses after external links. 225 | #latex_show_urls = False 226 | 227 | # Documents to append as an appendix to all manuals. 228 | #latex_appendices = [] 229 | 230 | # If false, no module index is generated. 231 | #latex_domain_indices = True 232 | 233 | 234 | # -- Options for manual page output --------------------------------------- 235 | 236 | # One entry per manual page. List of tuples 237 | # (source start file, name, description, authors, manual section). 238 | man_pages = [ 239 | ('index', 'ipydb', u'ipydb Documentation', 240 | [u'Jay Sweeney'], 1) 241 | ] 242 | 243 | # If true, show URL addresses after external links. 244 | #man_show_urls = False 245 | 246 | 247 | # -- Options for Texinfo output ------------------------------------------- 248 | 249 | # Grouping the document tree into Texinfo files. List of tuples 250 | # (source start file, target name, title, author, 251 | # dir menu entry, description, category) 252 | texinfo_documents = [ 253 | ('index', 'ipydb', u'ipydb Documentation', 254 | u'Jay Sweeney', 'ipydb', 'One line description of project.', 255 | 'Miscellaneous'), 256 | ] 257 | 258 | # Documents to append as an appendix to all manuals. 259 | #texinfo_appendices = [] 260 | 261 | # If false, no module index is generated. 262 | #texinfo_domain_indices = True 263 | 264 | # How to display URL addresses: 'footnote', 'no', or 'inline'. 265 | #texinfo_show_urls = 'footnote' 266 | 267 | # If true, do not generate a @detailmenu in the "Top" node's menu. 268 | #texinfo_no_detailmenu = False 269 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | .. ipydb documentation master file, created by 2 | sphinx-quickstart on Sun Sep 21 15:24:56 2014. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | ipydb: Work with databases in IPython 7 | ===================================== 8 | 9 | ipydb is an `IPython `_ plugin for running SQL queries and viewing their results 10 | 11 | 12 | Here's some asciicasts showing off some of ipydb's features. 13 | 14 | 1) Running queries, tab-completion and tab-expansions with the ``%sql`` command. 15 | 16 | .. raw:: html 17 | 18 | 19 | 20 | 2) Schema browsing with ``tables``, ``fields``, ``fks``, ``relationships`` and 21 | ``describe`` 22 | 23 | .. raw:: html 24 | 25 | 26 | 27 | 28 | Contents: 29 | 30 | .. toctree:: 31 | :maxdepth: 2 32 | 33 | 34 | 35 | Indices and tables 36 | ================== 37 | 38 | * :ref:`genindex` 39 | * :ref:`modindex` 40 | * :ref:`search` 41 | 42 | -------------------------------------------------------------------------------- /example/chinook.sqlite: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jaysw/ipydb/ecb1014af9b7f2cd0ab2f4d31b0b18e52f3c813c/example/chinook.sqlite -------------------------------------------------------------------------------- /example/ipydb_pandas_DataFrame.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "### load extention" 8 | ] 9 | }, 10 | { 11 | "cell_type": "code", 12 | "execution_count": 1, 13 | "metadata": { 14 | "collapsed": false, 15 | "scrolled": true 16 | }, 17 | "outputs": [ 18 | { 19 | "name": "stdout", 20 | "output_type": "stream", 21 | "text": [ 22 | "Welcome to ipydb 0.0.2!\n", 23 | "type %ipydb_help for documentation of ipydb\n" 24 | ] 25 | } 26 | ], 27 | "source": [ 28 | "%load_ext ipydb" 29 | ] 30 | }, 31 | { 32 | "cell_type": "code", 33 | "execution_count": 2, 34 | "metadata": { 35 | "collapsed": false 36 | }, 37 | "outputs": [ 38 | { 39 | "name": "stdout", 40 | "output_type": "stream", 41 | "text": [ 42 | "chinook.sqlite ipydb_pandas_DataFrame.ipynb\r\n" 43 | ] 44 | } 45 | ], 46 | "source": [ 47 | "!ls" 48 | ] 49 | }, 50 | { 51 | "cell_type": "code", 52 | "execution_count": 5, 53 | "metadata": { 54 | "collapsed": false 55 | }, 56 | "outputs": [ 57 | { 58 | "name": "stdout", 59 | "output_type": "stream", 60 | "text": [ 61 | "ipydb is connecting to: postgresql://:xxx@localhost:5432/hacking\n", 62 | "ipydb is fetching database metadata\n" 63 | ] 64 | } 65 | ], 66 | "source": [ 67 | "%connect mydb" 68 | ] 69 | }, 70 | { 71 | "cell_type": "code", 72 | "execution_count": 6, 73 | "metadata": { 74 | "collapsed": false 75 | }, 76 | "outputs": [ 77 | { 78 | "name": "stdout", 79 | "output_type": "stream", 80 | "text": [ 81 | "ipydb is connecting to: sqlite:///chinook.sqlite\n", 82 | "ipydb is fetching database metadata\n" 83 | ] 84 | } 85 | ], 86 | "source": [ 87 | "%connecturl sqlite:///chinook.sqlite" 88 | ] 89 | }, 90 | { 91 | "cell_type": "markdown", 92 | "metadata": {}, 93 | "source": [ 94 | "### fetch data and return as a pandas DataFrame" 95 | ] 96 | }, 97 | { 98 | "cell_type": "code", 99 | "execution_count": 5, 100 | "metadata": { 101 | "collapsed": true 102 | }, 103 | "outputs": [], 104 | "source": [ 105 | "data = %sql -P select * from Album;" 106 | ] 107 | }, 108 | { 109 | "cell_type": "code", 110 | "execution_count": 6, 111 | "metadata": { 112 | "collapsed": false 113 | }, 114 | "outputs": [ 115 | { 116 | "data": { 117 | "text/html": [ 118 | "
\n", 119 | "\n", 120 | " \n", 121 | " \n", 122 | " \n", 123 | " \n", 124 | " \n", 125 | " \n", 126 | " \n", 127 | " \n", 128 | " \n", 129 | " \n", 130 | " \n", 131 | " \n", 132 | " \n", 133 | " \n", 134 | " \n", 135 | " \n", 136 | " \n", 137 | " \n", 138 | " \n", 139 | " \n", 140 | " \n", 141 | " \n", 142 | " \n", 143 | " \n", 144 | " \n", 145 | " \n", 146 | " \n", 147 | " \n", 148 | " \n", 149 | " \n", 150 | " \n", 151 | " \n", 152 | " \n", 153 | " \n", 154 | " \n", 155 | " \n", 156 | " \n", 157 | " \n", 158 | " \n", 159 | " \n", 160 | "
AlbumIdTitleArtistId
01For Those About To Rock We Salute You1
12Balls to the Wall2
23Restless and Wild2
34Let There Be Rock1
45Big Ones3
\n", 161 | "
" 162 | ], 163 | "text/plain": [ 164 | " AlbumId Title ArtistId\n", 165 | "0 1 For Those About To Rock We Salute You 1\n", 166 | "1 2 Balls to the Wall 2\n", 167 | "2 3 Restless and Wild 2\n", 168 | "3 4 Let There Be Rock 1\n", 169 | "4 5 Big Ones 3" 170 | ] 171 | }, 172 | "execution_count": 6, 173 | "metadata": {}, 174 | "output_type": "execute_result" 175 | } 176 | ], 177 | "source": [ 178 | "data.head(5)" 179 | ] 180 | }, 181 | { 182 | "cell_type": "code", 183 | "execution_count": 7, 184 | "metadata": { 185 | "collapsed": false, 186 | "scrolled": false 187 | }, 188 | "outputs": [ 189 | { 190 | "name": "stdout", 191 | "output_type": "stream", 192 | "text": [ 193 | "\n", 194 | "Int64Index: 347 entries, 0 to 346\n", 195 | "Data columns (total 3 columns):\n", 196 | "AlbumId 347 non-null int64\n", 197 | "Title 347 non-null object\n", 198 | "ArtistId 347 non-null int64\n", 199 | "dtypes: int64(2), object(1)\n", 200 | "memory usage: 10.8+ KB\n" 201 | ] 202 | } 203 | ], 204 | "source": [ 205 | "data.info()" 206 | ] 207 | }, 208 | { 209 | "cell_type": "code", 210 | "execution_count": 8, 211 | "metadata": { 212 | "collapsed": false 213 | }, 214 | "outputs": [ 215 | { 216 | "data": { 217 | "text/html": [ 218 | "
\n", 219 | "\n", 220 | " \n", 221 | " \n", 222 | " \n", 223 | " \n", 224 | " \n", 225 | " \n", 226 | " \n", 227 | " \n", 228 | " \n", 229 | " \n", 230 | " \n", 231 | " \n", 232 | " \n", 233 | " \n", 234 | " \n", 235 | " \n", 236 | " \n", 237 | " \n", 238 | " \n", 239 | " \n", 240 | " \n", 241 | " \n", 242 | "
AlbumIdTitleArtistId
9091Use Your Illusion I88
9192Use Your Illusion II88
\n", 243 | "
" 244 | ], 245 | "text/plain": [ 246 | " AlbumId Title ArtistId\n", 247 | "90 91 Use Your Illusion I 88\n", 248 | "91 92 Use Your Illusion II 88" 249 | ] 250 | }, 251 | "execution_count": 8, 252 | "metadata": {}, 253 | "output_type": "execute_result" 254 | } 255 | ], 256 | "source": [ 257 | "data[(data.ArtistId < 100) & (data.ArtistId > 80) & (data.Title.str.contains('Use'))]" 258 | ] 259 | }, 260 | { 261 | "cell_type": "code", 262 | "execution_count": null, 263 | "metadata": { 264 | "collapsed": true 265 | }, 266 | "outputs": [], 267 | "source": [] 268 | }, 269 | { 270 | "cell_type": "code", 271 | "execution_count": 9, 272 | "metadata": { 273 | "collapsed": false 274 | }, 275 | "outputs": [ 276 | { 277 | "name": "stdout", 278 | "output_type": "stream", 279 | "text": [ 280 | "Welcome to ipydb 0.0.2!\n", 281 | "\n", 282 | "ipydb has added the following `magic` commands to your ipython session:\n", 283 | "-------------------\n", 284 | " %begin Start a transaction.\n", 285 | " %commit Commit active transaction, if one exists.\n", 286 | " %connect Connect to a database using a configuration 'nickname'.\n", 287 | " %connecturl Connect to a database using an SqlAlchemy style connection URL.\n", 288 | " %debug_ipydb Toggle debugging mode for ipydb.\n", 289 | " %describe Print information about table: columns and keys.\n", 290 | " %engine Returns the current SqlAlchemy engine/connection.\n", 291 | " %fields Show a list of fields and data types for the given table.\n", 292 | " %fks Shows a list of foreign keys for the given table.\n", 293 | " %flushmetadata Flush ipydb's schema caches for the current connection.\n", 294 | " %get_ipydb Return the active ipdyb plugin instance.\n", 295 | " %ipydb_help Show this help message.\n", 296 | " %joins Shows a list of all joins involving a given table.\n", 297 | " %references Shows a list of all foreign keys that reference the given field.\n", 298 | " %rereflect Force re-loading of completion metadata.\n", 299 | " %rollback Rollback active transaction, if one exists.\n", 300 | " %runsql Run delimited SQL statements from a file\n", 301 | " %saveconnection Save current connection to ~/.db-connections file.\n", 302 | " %set_reflection Toggle schema reflection.\n", 303 | " %showsql Toggle SQL statement logging from SqlAlchemy.\n", 304 | " %sql ::\n", 305 | " %sqlformat Change the output format.\n", 306 | " %tables Show a list of tables for the current db connection.\n", 307 | " %views Show a list of views for the current db connection.\n", 308 | "-------------------\n", 309 | "\n", 310 | "You can get detailed usage information for any of the above commands \n", 311 | "by typing %magic_command_name? For example, to get help on %connect, type\n", 312 | "\n", 313 | " %connect?\n", 314 | "\n", 315 | "Get started by connecting to a database using %connect_url or %connect\n" 316 | ] 317 | } 318 | ], 319 | "source": [ 320 | "%ipydb_help" 321 | ] 322 | }, 323 | { 324 | "cell_type": "code", 325 | "execution_count": null, 326 | "metadata": { 327 | "collapsed": true 328 | }, 329 | "outputs": [], 330 | "source": [] 331 | }, 332 | { 333 | "cell_type": "code", 334 | "execution_count": null, 335 | "metadata": { 336 | "collapsed": true 337 | }, 338 | "outputs": [], 339 | "source": [] 340 | } 341 | ], 342 | "metadata": { 343 | "kernelspec": { 344 | "display_name": "Python 2", 345 | "language": "python", 346 | "name": "python2" 347 | }, 348 | "language_info": { 349 | "codemirror_mode": { 350 | "name": "ipython", 351 | "version": 2 352 | }, 353 | "file_extension": ".py", 354 | "mimetype": "text/x-python", 355 | "name": "python", 356 | "nbconvert_exporter": "python", 357 | "pygments_lexer": "ipython2", 358 | "version": "2.7.6" 359 | } 360 | }, 361 | "nbformat": 4, 362 | "nbformat_minor": 0 363 | } 364 | -------------------------------------------------------------------------------- /ipydb/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | ipydb: An IPython extension to help you type and execute SQL queries. 5 | 6 | Usage: 7 | $ ipython 8 | In [1]: %load_ext ipydb 9 | In [2]: %connect_url mysql://user:pass@localhost/mydbname 10 | In [3]: %select * from person order by id desc 11 | 12 | :copyright: (c) 2012 by Jay Sweeney. 13 | :license: see LICENSE for more details. 14 | """ 15 | from __future__ import print_function 16 | 17 | import logging 18 | import os 19 | 20 | __title__ = 'ipydb' 21 | __version__ = '0.0.2' 22 | __author__ = 'Jay Sweeney' 23 | __license__ = 'Apache 2.0' 24 | __copyright__ = 'Copyright 2012 Jay Sweeney' 25 | 26 | PLUGIN_NAME = 'ipydb' 27 | _loaded = False 28 | _backup_prompt1 = '' 29 | CONFIG_FILE = os.path.join(os.path.expanduser('~'), '.db-connections') 30 | 31 | 32 | def load_ipython_extension(ip): 33 | """Load the ipydb into the active ipython session""" 34 | from .plugin import SqlPlugin 35 | global _loaded 36 | if not _loaded: 37 | plugin = SqlPlugin(shell=ip, config=ip.config) 38 | configure_prompt(plugin) 39 | _loaded = True 40 | print(u"Welcome to ipydb %s!" % __version__) 41 | print(u"type %ipydb_help for documentation of ipydb") 42 | logging.basicConfig() 43 | 44 | 45 | def configure_prompt(ipydb): 46 | from IPython.core.prompts import LazyEvaluate 47 | global _backup_prompt1 48 | ip = ipydb.shell 49 | ip.prompt_manager.lazy_evaluate_fields['_ipydb'] = LazyEvaluate( 50 | ipydb.get_db_ps1) 51 | ip.prompt_manager.lazy_evaluate_fields['_reflecting'] = LazyEvaluate( 52 | ipydb.get_reflecting_ps1) 53 | ip.prompt_manager.lazy_evaluate_fields['_tx'] = LazyEvaluate( 54 | ipydb.get_transaction_ps1) 55 | tmpl = ip.prompt_manager.in_template 56 | _backup_prompt1 = tmpl 57 | tmpl = tmpl.rstrip(': ') 58 | tmpl += '{color.LightPurple}{_reflecting}' \ 59 | '{color.Cyan}{_ipydb}' \ 60 | '{color.LightRed}{_tx}' \ 61 | '{color.Green}: ' 62 | ip.prompt_manager.in_template = tmpl 63 | 64 | 65 | def ipydb_help(): 66 | msg = u"Welcome to ipydb %s!" % __version__ 67 | print(msg) 68 | print() 69 | msg2 = u'ipydb has added the following `magic` ' \ 70 | 'commands to your ipython session:' 71 | print(msg2) 72 | helps = get_brief_help() 73 | maxname = max(map(len, (r[0] for r in helps))) 74 | print('-' * (maxname + 5)) 75 | for magic, doc in helps: 76 | print((u" %%%-" + str(maxname) + "s %s") % (magic, doc)) 77 | print('-' * (maxname + 5)) 78 | print() 79 | print("You can get detailed usage information " 80 | "for any of the above commands ") 81 | print("by typing %magic_command_name? For example, " 82 | "to get help on %connect, type") 83 | print() 84 | print(" %connect?") 85 | print() 86 | print("Get started by connecting to a database " 87 | "using %connect_url or %connect") 88 | 89 | 90 | def get_brief_help(): 91 | """return a list of (magic_name, first_line_of_docstring) 92 | for all the magic methods ipydb defines""" 93 | from .magic import SqlMagics 94 | docs = [] 95 | magics = {} 96 | magic_thing = SqlMagics.magics 97 | magics.update(magic_thing.get('cell', {})) 98 | magics.update(magic_thing.get('line', {})) 99 | for magic in sorted(magics.keys()): 100 | m = getattr(SqlMagics, magic, None) 101 | if m: 102 | if hasattr(m, '__description__'): 103 | doc = m.__description__ 104 | else: 105 | doc = getattr(m, '__doc__', '') 106 | if doc is not None: 107 | doc = doc.strip() 108 | if not doc: 109 | doc = '' 110 | docs.append((magic, doc.split('\n')[0])) 111 | return docs 112 | 113 | 114 | def unload_ipython_extension(ip): 115 | ip.prompt_manager.in_template = _backup_prompt1 116 | # XXX: close any open connection / cursors.. 117 | -------------------------------------------------------------------------------- /ipydb/asciitable.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from future.standard_library import install_aliases 3 | install_aliases() 4 | 5 | """Draw ascii tables.""" 6 | import itertools 7 | import sys 8 | 9 | from future.utils import string_types 10 | 11 | from .utils import termsize 12 | 13 | 14 | class FakedResult(object): 15 | """Utility for making an iterable look like an sqlalchemy ResultProxy.""" 16 | 17 | def __init__(self, items, headings): 18 | self.items = items 19 | self.headings = headings 20 | 21 | def __iter__(self): 22 | return iter(self.items) 23 | 24 | def keys(self): 25 | return self.headings 26 | 27 | 28 | class PivotResultSet(object): 29 | """Pivot a result set into an iterable of (fieldname, value).""" 30 | 31 | def __init__(self, rs): 32 | self.rs = rs 33 | 34 | def __iter__(self): 35 | # Note: here we 'ovewrite' ambiguous / duplicate keys 36 | # is this a bad thing? probably not? 37 | # r.items() throws exceptions from SA if there are ambiguous 38 | # columns in the select statement. 39 | return (zip(r.keys(), r.values()) for r in self.rs) 40 | 41 | def keys(self): 42 | return ['Field', 'Value'] 43 | 44 | 45 | def isublists(l, n): 46 | return itertools.zip_longest(*[iter(l)] * n) 47 | 48 | 49 | def draw(cursor, out=sys.stdout, paginate=True, max_fieldsize=100): 50 | """Render an result set as an ascii-table. 51 | 52 | Renders an SQL result set to `out`, some file-like object. 53 | Assumes that we can determine the current terminal height and 54 | width via the termsize module. 55 | 56 | Args: 57 | cursor: An iterable of rows. Each row is a list or tuple 58 | with index access to each cell. The cursor 59 | has a list/tuple of headings via cursor.keys(). 60 | out: File-like object. 61 | """ 62 | 63 | def heading_line(sizes): 64 | for size in sizes: 65 | out.write(b'+' + b'-' * (size + 2)) 66 | out.write(b'+\n') 67 | 68 | def draw_headings(headings, sizes): 69 | heading_line(sizes) 70 | for idx, size in enumerate(sizes): 71 | fmt = '| %%-%is ' % size 72 | out.write((fmt % headings[idx]).encode('utf8')) 73 | out.write(b'|\n') 74 | heading_line(sizes) 75 | 76 | cols, lines = termsize() 77 | headings = cursor.keys() 78 | sizes = list(map(lambda x: len(x), headings)) 79 | if paginate: 80 | cursor = isublists(cursor, lines - 4) 81 | # else we assume cursor arrive here pre-paginated 82 | for screenrows in cursor: 83 | for row in screenrows: 84 | if row is None: 85 | break 86 | for idx, value in enumerate(row): 87 | if not isinstance(value, string_types): 88 | value = str(value) 89 | size = max(sizes[idx], len(value)) 90 | sizes[idx] = min(size, max_fieldsize) 91 | draw_headings(headings, sizes) 92 | for rw in screenrows: 93 | if rw is None: 94 | break # from isublists impl 95 | for idx, size in enumerate(sizes): 96 | fmt = '| %%-%is ' % size 97 | value = rw[idx] 98 | if not isinstance(value, string_types): 99 | value = str(value) 100 | if len(value) > max_fieldsize: 101 | value = value[:max_fieldsize - 5] + '[...]' 102 | value = value.replace('\n', '^') 103 | value = value.replace('\r', '^').replace('\t', ' ') 104 | try: 105 | value = fmt % value 106 | except UnicodeDecodeError: 107 | value = fmt % value.decode('utf8') 108 | out.write(value.encode('utf8')) 109 | out.write(b'|\n') 110 | if not paginate: 111 | heading_line(sizes) 112 | out.write(b'\n') 113 | -------------------------------------------------------------------------------- /ipydb/completion.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | This module provides functionality for readline-style 5 | tab-completion of SQL statements and other ipydb commands. 6 | """ 7 | from __future__ import print_function 8 | import itertools 9 | import logging 10 | import re 11 | 12 | from sqlalchemy.sql.compiler import RESERVED_WORDS 13 | 14 | from ipydb.engine import getconfigs 15 | from ipydb.magic import SQL_ALIASES 16 | 17 | log = logging.getLogger(__name__) 18 | reassignment = re.compile(r'^\w+\s*=\s*%((\w+).*)') 19 | 20 | 21 | def get_ipydb(ipython): 22 | """Return the active ipydb instance.""" 23 | return ipython.magic('get_ipydb') 24 | 25 | 26 | def ipydb_complete(self, event): 27 | """Returns a list of suggested completions for event.symbol. 28 | 29 | Note: This function is bound to an ipython shell instance 30 | and called on tab-presses by ipython. 31 | Args: 32 | event: see IPython.core.completer 33 | Returns: 34 | A list of candidate strings which complete the input text 35 | or None to propagate completion to other handlers or 36 | return [] to suppress further completion 37 | """ 38 | sqlplugin = get_ipydb(self) 39 | try: 40 | if sqlplugin: 41 | if sqlplugin.debug: 42 | print('complete: sym=[%s] line=[%s] tuc=[%s]' % ( 43 | event.symbol, event.line, event.text_until_cursor)) 44 | completions = sqlplugin.completer.complete(event) 45 | if sqlplugin.debug: 46 | print('completions:', completions) 47 | return completions 48 | except Exception as e: 49 | print(repr(e)) 50 | if sqlplugin and sqlplugin.debug: 51 | import traceback 52 | traceback.print_exc() 53 | return None 54 | 55 | 56 | def match_lists(lists, text, appendfunc=None, sort=True): 57 | """Helper to substring-match text in a list-of-lists.""" 58 | n = len(text) 59 | results = None 60 | if appendfunc is None: 61 | results = [] 62 | for word in itertools.chain(*lists): 63 | if word[:n] == text: 64 | if appendfunc: 65 | appendfunc(word) 66 | else: 67 | results.append(word) 68 | if appendfunc is None: 69 | return results 70 | 71 | 72 | class MonkeyString(str): 73 | """This is to avoid the restriction in 74 | i.c.completer.IPCompleter.dispatch_custom_completer where 75 | matches must begin with the text being matched.""" 76 | 77 | def __new__(self, text, completion): 78 | self.text = text 79 | return str.__new__(self, completion) 80 | 81 | def startswith(self, text): 82 | if self.text == text: 83 | return True 84 | else: 85 | return super(MonkeyString, self).startswith(text) 86 | 87 | 88 | class IpydbCompleter(object): 89 | """Readline completer functions for various ipython commands.""" 90 | 91 | restr = re.compile(r'TEXT|VARCHAR.*|CHAR.*') 92 | renumeric = re.compile(r'FLOAT.*|DECIMAL.*|INT.*' 93 | '|DOUBLE.*|FIXED.*|SHORT.*|NUMERIC.*|NUMBER.*') 94 | redate = re.compile(r'DATE|TIME|DATETIME|TIMESTAMP') 95 | 96 | def __init__(self, get_db): 97 | """ 98 | Args: 99 | get_db: callable that will return an 100 | instance of ipydb.metadata.model.Database 101 | """ 102 | self.get_db = get_db 103 | self.commands_completers = { 104 | 'connect': self.connection_nickname, 105 | 'sqlformat': self.sql_format, 106 | 'references': self.table_dot_field, 107 | 'fields': self.table_dot_field, 108 | 'tables': self.table_name, 109 | 'joins': self.table_name, 110 | 'fks': self.table_name, 111 | 'describe': self.table_name, 112 | 'sql': self.sql_statement, 113 | 'runsql': lambda _: None # delegate to ipython for file match 114 | } 115 | self.commands_completers.update( 116 | zip(SQL_ALIASES, [self.sql_statement] * len(SQL_ALIASES))) 117 | 118 | @property 119 | def db(self): 120 | return self.get_db() 121 | 122 | def complete(self, ev): 123 | """Locate completer for ev.command and call it. 124 | Args: 125 | event: see IPython.core.completer 126 | Returns: 127 | list of strings which can complete event.symbol 128 | """ 129 | key = ev.command 130 | match_assign = reassignment.search(ev.line) 131 | if match_assign: 132 | key = match_assign.group(2) 133 | if ev.command.startswith('%'): 134 | key = ev.command[1:] 135 | func = self.commands_completers.get(key) 136 | if func is None: 137 | return None 138 | return func(ev) 139 | 140 | def connection_nickname(self, ev): 141 | """Return completions for %connect.""" 142 | keys = sorted(getconfigs()[1].keys()) 143 | if not ev.symbol: 144 | return keys 145 | matches = match_lists([keys], ev.symbol) 146 | matches.sort() 147 | return matches 148 | 149 | def sql_format(self, ev): 150 | """Return completions for %sql_format.""" 151 | from ipydb.plugin import SQLFORMATS 152 | if not ev.symbol: 153 | return SQLFORMATS 154 | matches = match_lists([SQLFORMATS], ev.symbol) 155 | matches.sort() 156 | return matches 157 | 158 | def sql_statement(self, ev): 159 | """Completions for %sql commands""" 160 | chunks = ev.line.split() 161 | if len(chunks) == 2: 162 | first, second = chunks 163 | starters = 'select insert'.split() # TODO: delete, update 164 | if first in starters and (second in self.db.tablenames() or 165 | self.is_valid_join_expression(second)): 166 | return self.expand_two_token_sql(ev) 167 | if '**' in ev.symbol: # special join syntax t1**t2 168 | return self.join_shortcut(ev) 169 | if ev.symbol.count('.') == 1: # something.other 170 | return self.dotted_expression(ev, expansion=True) 171 | # single token, no dot 172 | matches = match_lists([self.db.tablenames(), self.db.fieldnames(), 173 | RESERVED_WORDS], ev.symbol) 174 | matches.sort() 175 | return matches 176 | 177 | def table_dot_field(self, ev): 178 | """completes table.fieldname""" 179 | if ev.symbol.count('.') == 1: # something.other 180 | return self.dotted_expression(ev, expansion=False) 181 | matches = match_lists([self.db.tablenames()], ev.symbol) 182 | matches.sort() 183 | return matches 184 | 185 | def table_name(self, ev): 186 | matches = match_lists([self.db.tablenames()], ev.symbol) 187 | matches.sort() 188 | return matches 189 | 190 | def is_valid_join_expression(self, expr): 191 | 192 | def joining_tables(table): 193 | for fk in self.db.all_joins(table): 194 | yield fk.table if table != fk.table else fk.reftable 195 | if '**' not in expr: 196 | return False 197 | tables = expr.split('**') 198 | valid = True 199 | while len(tables) > 1: 200 | tail = tables.pop() 201 | jointables = joining_tables(tail) 202 | valid = bool(set(jointables) & set(tables)) 203 | if not valid: 204 | break 205 | return valid 206 | 207 | def expand_join_expression(self, expr): 208 | if not self.is_valid_join_expression(expr): 209 | log.debug('%s is not a valid join expr', expr) 210 | return expr 211 | tables = expr.split('**') 212 | ret = '' 213 | while len(tables) > 1: 214 | tail = tables.pop() 215 | # try to join to the other tables: 216 | for tbl in reversed(tables): 217 | joins = self.db.get_joins(tbl, tail) 218 | if joins: 219 | join = next(iter(joins)) # XXX: take a punt 220 | joinstr = 'inner join %s on ' % (tail) 221 | sep = '' 222 | for idx, col in enumerate(join.columns): 223 | joinstr += sep + '%s.%s = %s.%s' % ( 224 | join.table, col, join.reftable, 225 | join.refcolumns[idx]) 226 | sep = ' and ' 227 | ret = joinstr + ' ' + ret 228 | break 229 | ret = tables[0] + ' ' + ret 230 | return ret 231 | 232 | def join_shortcut(self, ev): 233 | matches = [] 234 | 235 | def _all_joining_tables(tables): 236 | ret = set() 237 | for tablename in tables: 238 | for fk in self.db.all_joins(tablename): 239 | tgt = fk.reftable if fk.table == tablename else fk.table 240 | ret.add(tgt) 241 | return ret 242 | 243 | if ev.symbol.endswith('**'): # incomplete stmt: t1**t2** 244 | for t in _all_joining_tables(ev.symbol.split('**')): 245 | matches.append(MonkeyString(ev.symbol, ev.symbol + t)) 246 | else: 247 | joinexpr = self.expand_join_expression(ev.symbol) 248 | if joinexpr != ev.symbol: # expand succeeded 249 | return [MonkeyString(ev.symbol, joinexpr)] 250 | # assume that end token is partial table name: 251 | bits = ev.symbol.split('**') 252 | toke = bits.pop() 253 | start = '**'.join(bits) 254 | all_joins = _all_joining_tables(bits) 255 | matches = [MonkeyString(ev.symbol, start + '**' + t) 256 | for t in all_joins if t.startswith(toke)] 257 | matches.sort() 258 | return matches 259 | 260 | def dotted_expression(self, ev, expansion=True): 261 | """Return completions for head.tail""" 262 | head, tail = ev.symbol.split('.') 263 | if expansion and head in self.db.tablenames() and tail == '*': 264 | # tablename.* -> expand all names 265 | matches = self.db.fieldnames(table=head, dotted=True) 266 | return [MonkeyString(ev.symbol, ', '.join(sorted(matches)))] 267 | matches = match_lists([self.db.fieldnames(dotted=True)], ev.symbol) 268 | if not len(matches): # head could be a table alias TODO: parse these. 269 | if tail == '': 270 | fields = map(lambda word: head + '.' + word, 271 | self.db.fieldnames()) 272 | matches.extend(fields) 273 | else: 274 | match_lists([self.db.fieldnames()], tail, matches.append, 275 | matches.sort) 276 | matches.sort() 277 | return matches 278 | 279 | def expand_two_token_sql(self, ev): 280 | """Return special expansions for 'select tablename' 281 | and for insert 'tablename'""" 282 | first, tablename = ev.line.split() 283 | if first == 'select': 284 | colstr = ', '.join('%s.*' % t for t in tablename.split('**')) 285 | tablename = self.expand_join_expression(tablename) 286 | return [MonkeyString(ev.symbol, '%s from %s' % 287 | (colstr, tablename))] 288 | elif first == 'insert': 289 | ins = self.db.insert_statement(tablename) 290 | return [MonkeyString(ev.symbol, ins.lstrip('insert'))] 291 | -------------------------------------------------------------------------------- /ipydb/engine.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """Functions to help create an SQLalchemy connection based upon 4 | a 'connection configuration file'""" 5 | from future.standard_library import install_aliases 6 | install_aliases() 7 | 8 | from urllib import parse 9 | from configparser import ConfigParser, DuplicateSectionError 10 | 11 | import sqlalchemy as sa 12 | 13 | from ipydb import CONFIG_FILE 14 | 15 | 16 | def getconfigparser(): 17 | cp = ConfigParser() 18 | cp.read(CONFIG_FILE) 19 | return cp 20 | 21 | 22 | def getconfigs(): 23 | """Return a dictionary of saved database connection configurations.""" 24 | cp = getconfigparser() 25 | configs = {} 26 | default = None 27 | for section in cp.sections(): 28 | conf = dict(cp.defaults()) 29 | conf.update(dict(cp.items(section))) 30 | if conf.get('default'): 31 | default = section 32 | configs[section] = conf 33 | return default, configs 34 | 35 | 36 | def get_nicknames(): 37 | return sorted(getconfigs().keys()) 38 | 39 | 40 | def from_config(configname=None): 41 | """Connect to a database based upon its `nickname`. 42 | 43 | See ipydb.magic.connect() for details. 44 | """ 45 | default, configs = getconfigs() 46 | 47 | if not configname: 48 | raise ValueError('Configname is required') 49 | elif configname not in configs: 50 | raise ValueError( 51 | 'Config name not found. Try one of {%s}' % (get_nicknames())) 52 | else: 53 | config = configs[configname] 54 | connect_args = {} 55 | engine = from_url(make_connection_url(config), 56 | connect_args=connect_args) 57 | return engine 58 | 59 | 60 | def from_url(url, connect_args={}): 61 | """Connect to a database using an SqlAlchemy URL. 62 | 63 | Args: 64 | url: An SqlAlchemy-style DB connection URL. 65 | connect_args: extra argument to be passed to the underlying 66 | DB-API driver. 67 | Returns: 68 | True if connection was successful. 69 | """ 70 | url_string = url 71 | url = sa.engine.url.make_url(str(url_string)) 72 | if url.drivername == 'oracle': 73 | # not sure why we need this horrible _cxmakedsn hack - 74 | # I think there's some weirdness 75 | # with cx_oracle/oracle versions I'm using. 76 | import cx_Oracle 77 | if not getattr(cx_Oracle, '_cxmakedsn', None): 78 | setattr(cx_Oracle, '_cxmakedsn', cx_Oracle.makedsn) 79 | 80 | def newmakedsn(*args, **kw): 81 | return cx_Oracle._cxmakedsn(*args, **kw).replace( 82 | 'SID', 'SERVICE_NAME') 83 | cx_Oracle.makedsn = newmakedsn 84 | elif url.drivername == 'mysql': 85 | import MySQLdb.cursors 86 | # use server-side cursors by default (does this work with myISAM?) 87 | connect_args = {'cursorclass': MySQLdb.cursors.SSCursor} 88 | engine = sa.engine.create_engine(url, connect_args=connect_args) 89 | return engine 90 | 91 | 92 | def make_connection_url(config): 93 | """ 94 | Returns an SqlAlchemy connection URL based upon values in config dict. 95 | 96 | Args: 97 | config: dict-like object with keys: type, username, password, 98 | host, and database. 99 | Returns: 100 | str URL which SqlAlchemy can use to connect to a database. 101 | """ 102 | return sa.engine.url.URL( 103 | drivername=config.get('type'), username=config.get('username'), 104 | password=config.get('password'), host=config.get('host'), 105 | port=config.get('port') or None, 106 | database=config.get('database'), 107 | query=dict(parse.parse_qsl(config.get('query', '')))) 108 | 109 | 110 | def save_connection(name, engine, overwrite=False): 111 | """Saves a connection configuration to ~/.db-connections.""" 112 | cp = getconfigparser() 113 | try: 114 | cp.add_section(name) 115 | except DuplicateSectionError: 116 | if overwrite: 117 | pass 118 | else: 119 | raise 120 | url = engine.url 121 | cp.set(name, 'type', url.drivername or '') 122 | cp.set(name, 'username', url.username or '') 123 | cp.set(name, 'password', url.password or '') 124 | cp.set(name, 'host', url.host or '') 125 | cp.set(name, 'database', url.database or '') 126 | cp.set(name, 'port', url.port or '') 127 | cp.set(name, 'query', url.query or '') 128 | with open(CONFIG_FILE, 'w') as fout: 129 | cp.write(fout) 130 | -------------------------------------------------------------------------------- /ipydb/magic.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | IPython magic commands registered by ipydb 5 | 6 | :copyright: (c) 2012 by Jay Sweeney. 7 | :license: see LICENSE for more details. 8 | """ 9 | from __future__ import print_function 10 | import logging 11 | 12 | from IPython.core.magic import Magics, magics_class, \ 13 | line_magic, line_cell_magic 14 | from IPython.core.magic_arguments import magic_arguments, \ 15 | argument, parse_argstring 16 | from IPython.utils.process import arg_split 17 | 18 | import sqlparse 19 | from ipydb.asciitable import PivotResultSet 20 | 21 | SQL_ALIASES = 'select insert update delete create alter drop'.split() 22 | 23 | 24 | def create_sql_alias(alias, sqlmagics): 25 | """Returns a function which calls SqlMagics.sql 26 | 27 | For example, create_sql_alias('select', mm, sqlm) returns a function 28 | which when called like this: _sqlalias('-r -a --foo=bar * from thing') 29 | will rearrange options in the line and result in the following call: 30 | sqlmagics.sql("-r -a --foo=bar select * from thing") 31 | """ 32 | def _sqlalias(line, cell=None): 33 | """Alias to %sql""" 34 | opts, args = [], [] 35 | for chunk in arg_split(line): 36 | # XXX: what about math?!: 37 | # select -1 + 5 * something from blah; 38 | if chunk.startswith('-') and len(chunk.strip()) > 1: 39 | opts.append(chunk) 40 | else: 41 | args.append(chunk) 42 | line = '%s %s %s' % (' '.join(opts), alias, ' '.join(args)) 43 | return sqlmagics.sql(line, cell) 44 | return _sqlalias 45 | 46 | 47 | def register_sql_aliases(magic_manager, sqlmagics): 48 | """Creates and registers convenience aliases to SqlMagics.sql for 49 | %select, %insert, %update, ... 50 | 51 | Args: 52 | magic_manager: ipython's shell.magic_manager instance 53 | sqlmagics: instance of SqlMagics 54 | """ 55 | for alias in SQL_ALIASES: 56 | magic_func = create_sql_alias(alias, sqlmagics) 57 | magic_func.func_name = alias 58 | magic_manager.register_function(magic_func, 'line', alias) 59 | magic_manager.register_function(magic_func, 'cell', alias) 60 | 61 | 62 | @magics_class 63 | class SqlMagics(Magics): 64 | 65 | def __init__(self, ipydb, shell, *a, **kw): 66 | super(SqlMagics, self).__init__(shell, *a, **kw) 67 | self.ipydb = ipydb 68 | 69 | @line_magic 70 | def ipydb_help(self, *args): 71 | """Show this help message.""" 72 | from ipydb import ipydb_help # XXX: recursive import problem... 73 | ipydb_help() 74 | 75 | @line_magic 76 | def set_reflection(self, arg): 77 | """Toggle schema reflection.""" 78 | if self.ipydb.do_reflection: 79 | self.ipydb.do_reflection = False 80 | else: 81 | self.ipydb.do_reflection = True 82 | print('Schema reflection: %s' % ( 83 | 'on' if self.ipydb.do_reflection else 'off')) 84 | 85 | @line_magic 86 | def engine(self, arg): 87 | """Returns the current SqlAlchemy engine/connection.""" 88 | return self.ipydb.get_engine() 89 | 90 | @line_magic 91 | def debug_ipydb(self, arg): 92 | """Toggle debugging mode for ipydb.""" 93 | if self.ipydb.debug: 94 | self.ipydb.set_debug(False) 95 | root_logger = logging.getLogger() 96 | root_logger.setLevel(logging.WARNING) 97 | else: 98 | self.ipydb.set_debug(True) 99 | root_logger = logging.getLogger() 100 | root_logger.setLevel(logging.DEBUG) 101 | print("ipydb debugging is", 'on' if self.ipydb.debug else 'off') 102 | 103 | @line_magic 104 | def begin(self, arg): 105 | """Start a transaction.""" 106 | self.ipydb.begin() 107 | 108 | @line_magic 109 | def commit(self, arg): 110 | """Commit active transaction, if one exists.""" 111 | self.ipydb.commit() 112 | 113 | @line_magic 114 | def rollback(self, arg): 115 | """Rollback active transaction, if one exists.""" 116 | self.ipydb.rollback() 117 | 118 | @magic_arguments() 119 | @argument('-r', '--return', dest='ret', action='store_true', 120 | help='Return a resultset instead of printing the results') 121 | @argument('-p', '--pivot', dest='single', action='store_true', 122 | help='View in "single record" mode') 123 | @argument('-m', '--multiparams', dest='multiparams', default=None, 124 | help='A collection of dictionaries of bind parameters') 125 | @argument('-a', '--params', dest='params', default=None, 126 | help='A dictionary of bind parameters for the sql statement') 127 | @argument('-f', '--format', action='store_true', 128 | help='pretty-print sql statement and exit') 129 | @argument('-o', '--output', action='store', dest='file', 130 | help='Write sql output as CSV to the given file') 131 | @argument('-P', '--pandas', action='store_true', 132 | help='Return data as pandas DataFrame') 133 | @argument('sql_statement', help='The SQL statement to run', nargs="*") 134 | 135 | @line_cell_magic 136 | def sql(self, args='', cell=None): 137 | """Run an sql statement against the current db connection. 138 | 139 | Examples: 140 | %sql select first_name from person where first_name like 'J%' 141 | 142 | Also works as a multi-line ipython command: 143 | 144 | %%sql 145 | select 146 | id, name, description 147 | from 148 | my_table 149 | where 150 | id < 10 151 | 152 | Returning a result set: 153 | To return a database cursor, use the -r option: 154 | 155 | results = %sql -r select first_name from employees 156 | for row in results: 157 | do_things_with(row.first_name) 158 | 159 | Shortcut Aliases to %sql: 160 | ipydb defines some 'short-cut' aliases which call %sql. 161 | Aliases have been added for: 162 | 163 | {select, insert, update, delete, create, alter, drop} 164 | 165 | Using aliases, you can write 'natural' SQL statements like so: 166 | 167 | select * from my_table 168 | 169 | Which results in: 170 | 171 | %sql select * from my_table 172 | 173 | """ 174 | args = parse_argstring(self.sql, args) 175 | params = None 176 | multiparams = None 177 | sql = ' '.join(args.sql_statement) 178 | if cell is not None: 179 | sql += '\n' + cell 180 | if args.format: 181 | sqlstr = sqlparse.format(sql, reindent=True) 182 | if args.ret: 183 | return sqlstr 184 | else: 185 | print("\n%s" % sqlstr) 186 | return 187 | if args.params: 188 | params = self.shell.user_ns.get(args.params, {}) 189 | if args.multiparams: 190 | multiparams = self.shell.user_ns.get(args.multiparams, []) 191 | cursor = self.ipydb.execute(sql, params=params, 192 | multiparams=multiparams) 193 | 194 | if not cursor: 195 | return None 196 | if not cursor.returns_rows: 197 | s = 's' if cursor.rowcount != 1 else '' 198 | print("%i row%s affected" % (cursor.rowcount, s)) 199 | 200 | if args.pandas: 201 | return self.ipydb.build_dataframe(cursor) 202 | if args.ret: 203 | return cursor 204 | if cursor and cursor.returns_rows: 205 | if args.single: 206 | self.ipydb.render_result( 207 | PivotResultSet(cursor), paginate=False, filepath=args.file) 208 | else: 209 | self.ipydb.render_result( 210 | cursor, paginate=not bool(args.file), filepath=args.file) 211 | sql.__description__ = 'Run an sql statement against ' 212 | 213 | 214 | @magic_arguments() 215 | @argument('-d', '--delimiter', action='store', default='/', 216 | help='Statement delimiter. Must be on a new line by itself') 217 | @argument('-i', '--interactive', action='store_true', default=False, 218 | help='Interactive mode - show and prompt each SQL statement') 219 | @argument('file', action='store', help='SQL script file') 220 | @line_magic 221 | def runsql(self, param=''): 222 | """Run delimited SQL statements from a file. 223 | 224 | SQL statements in the input file are expected to be delimited 225 | by '/' by itself on a new line. This can be overidden with the 226 | -d option. 227 | """ 228 | args = parse_argstring(self.runsql, param) 229 | self.ipydb.run_sql_script( 230 | args.file, 231 | interactive=args.interactive, 232 | delimiter=args.delimiter) 233 | runsql.__description__ = 'Run delimited SQL ' \ 234 | 'statements from a file' 235 | 236 | @line_magic 237 | def tables(self, param=''): 238 | """Show a list of tables for the current db connection. 239 | 240 | Usage: %tables [GLOB1 GLOB2...] 241 | 242 | Show tables matching GLOB if given 243 | Example usage: 244 | %tables 245 | : lists all avaiable tables for the current connection 246 | %tables *p* *z* 247 | : shows tables having a 'p' or a 'z' in their name 248 | 249 | """ 250 | self.ipydb.show_tables(*param.split()) 251 | 252 | @line_magic 253 | def views(self, param=''): 254 | """Show a list of views for the current db connection. 255 | 256 | Usage: %views [GLOB1 GLOB2...] 257 | 258 | Show views matching GLOB if given 259 | Example usage: 260 | %views 261 | : lists all avaiable views for the current connection 262 | %views *p* *z* 263 | : shows views having a 'p' or a 'z' in their name 264 | 265 | """ 266 | self.ipydb.show_tables(*param.split(), views=True) 267 | 268 | @line_magic 269 | def fields(self, param=''): 270 | """Show a list of fields and data types for the given table. 271 | 272 | Usage: %fields TABLE_GLOB[.FIELD_GLOB] [GLOB2...] 273 | 274 | Examples: 275 | fields person 276 | : shows fields for person table 277 | fields person.*id* 278 | : show fields for person table having `id` in their name 279 | fields *person*.*id* 280 | : show fields having id in their name for all tables 281 | having 'person' in their name 282 | """ 283 | self.ipydb.show_fields(*param.split()) 284 | 285 | @line_magic 286 | def describe(self, param=''): 287 | """Print information about table: columns and keys.""" 288 | self.ipydb.describe(*param.split()) 289 | 290 | @line_magic 291 | def showsql(self, param=''): 292 | """Toggle SQL statement logging from SqlAlchemy.""" 293 | if self.ipydb.show_sql: 294 | level = logging.WARNING 295 | self.ipydb.show_sql = False 296 | else: 297 | level = logging.INFO 298 | self.ipydb.show_sql = True 299 | logging.getLogger('sqlalchemy.engine').setLevel(level) 300 | print('SQL logging %s' % ('on' if self.ipydb.show_sql else 'off')) 301 | 302 | @line_magic 303 | def references(self, param=""): 304 | """Shows a list of all foreign keys that reference the given field. 305 | 306 | Usage: %references TABLE_NAME[.FIELD_NAME] 307 | 308 | If FIELD_NAME is ommitted, all fields in TABLE_NAME are checked as 309 | the target of a foreign key reference 310 | 311 | Examples: 312 | references person.id 313 | : shows all fields having a foreign key referencing person.id 314 | """ 315 | if not param.strip() or len(param.split()) != 1: 316 | print("Usage: %references TABLE_NAME[.FIELD_NAME]") 317 | return 318 | self.ipydb.what_references(param) 319 | 320 | @line_magic 321 | def get_ipydb(self, param=''): 322 | """Return the active ipdyb plugin instance.""" 323 | return self.ipydb 324 | 325 | @line_magic 326 | def joins(self, param=""): 327 | """Shows a list of all joins involving a given table. 328 | 329 | Usage: %joins TABLE_NAME 330 | """ 331 | if not param.strip() or len(param.split()) != 1: 332 | print("Usage: %show_joins TABLE_NAME") 333 | return 334 | self.ipydb.show_joins(param) 335 | 336 | @line_magic 337 | def fks(self, param=""): 338 | """Shows a list of foreign keys for the given table. 339 | 340 | Usage: %fks TABLE_NAME 341 | """ 342 | if not param.strip() or len(param.split()) != 1: 343 | print("Usage: %show_fks TABLE_NAME") 344 | return 345 | self.ipydb.show_fks(param) 346 | 347 | @line_magic 348 | def sqlformat(self, param=None): 349 | """Change the output format.""" 350 | from ipydb.plugin import SQLFORMATS 351 | if not param or param not in SQLFORMATS: 352 | print(self.sqlformat.__doc__) 353 | else: 354 | self.ipydb.sqlformat = param 355 | print("output format: %s" % self.ipydb.sqlformat) 356 | 357 | @line_magic 358 | def connect(self, param): 359 | """Connect to a database using a configuration 'nickname'. 360 | 361 | Usage: %connect NICKNAME 362 | 363 | For this to work, you need to create a file called 364 | ~/.db-connections. This file is an "ini" file, 365 | parsable by python's ConfigParser. 366 | 367 | Here's an example of what ~/.db-connections might look like: 368 | 369 | [mydb] 370 | type: mysql 371 | username: root 372 | password: xxxx 373 | host: localhost 374 | database: employees 375 | 376 | [myotherdb] 377 | type: sqlite 378 | database: /path/to/file.sqlite 379 | 380 | Each database connection defined in ~/.db-connections is 381 | then referenceable via its section heading, or NICKNAME. 382 | 383 | Note: Before you can connect, you will need to install a python driver 384 | for your chosen database. For a list of recommended drivers, 385 | see the SQLAlchemy documentation: 386 | 387 | http://bit.ly/J3TBJh 388 | """ 389 | self.ipydb.connect(param) 390 | 391 | @line_magic 392 | def connecturl(self, param): 393 | """Connect to a database using an SqlAlchemy style connection URL. 394 | 395 | Usage: %connecturl drivername://username:password@host/database 396 | Examples: 397 | %connecturl mysql://root@localhost/mydatabase 398 | %connecturl sqlite:///:memory: 399 | 400 | Note: Before you can connect, you will need to install a python driver 401 | for your chosen database. For a list of recommended drivers, 402 | see the SQLAlchemy documentation: 403 | 404 | http://bit.ly/J3TBJh 405 | """ 406 | self.ipydb.connect_url(param) 407 | 408 | @line_magic 409 | def flushmetadata(self, arg): 410 | """Flush ipydb's schema caches for the current connection. 411 | 412 | Delete ipydb's in-memory cache of reflected schema information. 413 | Delete and re-create ipydb's sqlite information store. 414 | """ 415 | self.ipydb.flush_metadata() 416 | 417 | @line_magic 418 | def rereflect(self, arg): 419 | """Force re-loading of completion metadata.""" 420 | if not self.ipydb.connected: 421 | print(self.ipydb.not_connected_message) 422 | return 423 | self.ipydb.metadata_accessor.get_metadata( 424 | self.ipydb.engine, force=True, noisy=True) 425 | 426 | @line_magic 427 | def saveconnection(self, arg): 428 | """Save current connection to ~/.db-connections file. 429 | 430 | Usage: %saveconnection NICKNAME 431 | 432 | After you have saved the connection, you can use the following to 433 | connect: 434 | %connect NICKNAME 435 | Note: if a configuration exists for NICKNAME it will be overwritten 436 | with the current engine's connection parameters. 437 | """ 438 | if not self.ipydb.connected: 439 | print(self.ipydb.not_connected_message) 440 | return 441 | if not len(arg.strip()): 442 | print("Usage: %saveconnection NICKNAME. \n\n" 443 | "Please supply a NICKNAME to store the connection against.") 444 | return 445 | self.ipydb.save_connection(arg) 446 | -------------------------------------------------------------------------------- /ipydb/metadata/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | Reading and caching command-line completion strings from 5 | a database schema. 6 | 7 | :copyright: (c) 2012 by Jay Sweeney. 8 | :license: see LICENSE for more details. 9 | """ 10 | from __future__ import print_function 11 | 12 | import base64 13 | from collections import defaultdict 14 | from contextlib import contextmanager 15 | import datetime as dt 16 | import logging 17 | import multiprocessing 18 | from multiprocessing.pool import ThreadPool 19 | import os 20 | 21 | import sqlalchemy as sa 22 | from sqlalchemy import orm 23 | from sqlalchemy.engine.url import URL 24 | try: 25 | from IPython.paths import locate_profile 26 | except ImportError: 27 | # IPython 3 support 28 | from IPython.utils.path import locate_profile 29 | 30 | from ipydb.utils import timer 31 | from . import model as m 32 | from . import persist 33 | 34 | # invalidate db metadata if it is older than CACHE_MAX_AGE 35 | MAX_CACHE_AGE = dt.timedelta(minutes=180) 36 | 37 | log = logging.getLogger(__name__) 38 | 39 | Session = orm.sessionmaker() 40 | 41 | 42 | def get_metadata_engine(other_engine): 43 | """Create and return an SA engine for which will be used for 44 | storing ipydb db metadata about the input engine. 45 | 46 | Args: 47 | other_engine - SA engine for which we will be storing metadata for. 48 | Returns: 49 | tuple (dbname, sa_engine). dbname is a unique key for the input 50 | other_engine. sa_engine is the SA engine that will be used for storing 51 | metadata about `other_engine` 52 | """ 53 | path = os.path.join(locate_profile(), 'ipydb') 54 | if not os.path.exists(path): 55 | os.makedirs(path) 56 | dbfilename = get_db_filename(other_engine) 57 | dburl = u'sqlite:////%s' % os.path.join(path, dbfilename) 58 | return dbfilename, sa.create_engine(dburl) 59 | 60 | 61 | def get_db_filename(engine): 62 | """For the input SqlAlchemy engine, return a string name suitable for 63 | creating an sqlite database to store the engine's ipydb metadata. 64 | """ 65 | url = engine.url 66 | url = str(URL(url.drivername, url.username, host=url.host, 67 | port=url.port, database=url.database)) 68 | return str(base64.urlsafe_b64encode(url.encode('utf-8'))) 69 | 70 | 71 | @contextmanager 72 | def session_scope(engine): 73 | """Provide a transactional scope around a series of operations.""" 74 | Session.configure(bind=engine) 75 | session = Session() 76 | try: 77 | yield session 78 | session.commit() 79 | except: 80 | session.rollback() 81 | raise 82 | finally: 83 | session.close() 84 | 85 | 86 | def create_schema(engine): 87 | m.Base.metadata.create_all(engine) 88 | 89 | 90 | def delete_schema(engine): 91 | m.Base.metadata.drop_all(engine) 92 | 93 | 94 | class MetaDataAccessor(object): 95 | """Reads and writes database metadata. 96 | 97 | Database metadata is read from sqlalchemy.MetaData, converted to 98 | a simplified schema (ipydb.metadata.model) and saved to an sqlite 99 | database for successive fast-loading. sqlalchemy.MetaData.reflect() 100 | can be very slow for large/complicated database schemas (this was observed 101 | for large oracle schemas). The approach taken here is to reflect and 102 | update database metadata in a background thread prevent ipydb 103 | from becoming unresponsive to the user. 104 | """ 105 | 106 | pool = ThreadPool(multiprocessing.cpu_count() * 2) 107 | debug = False 108 | 109 | def __init__(self): 110 | self.databases = defaultdict(m.Database) 111 | 112 | def read_expunge(self, ipydb_engine): 113 | with session_scope(ipydb_engine) as session, \ 114 | timer('Read-Expunge', log=log): 115 | db = persist.read(session) 116 | session.expunge_all() # unhook SA 117 | return db 118 | 119 | def get_metadata(self, engine, noisy=False, force=False, do_reflection=True): 120 | """Fetch metadata for an sqlalchemy engine""" 121 | db_key, ipydb_engine = get_metadata_engine(engine) 122 | create_schema(ipydb_engine) 123 | db = self.databases[db_key] 124 | if db.reflecting: 125 | log.debug('Is already reflecting') 126 | # we're already busy 127 | return db 128 | if not do_reflection: 129 | return db 130 | if force: 131 | log.debug('was foreced to re-reflect') 132 | # return sqlite data, re-reflect 133 | db = self.read_expunge(ipydb_engine) 134 | self.databases[db_key] = db 135 | if noisy: 136 | print("ipydb is fetching database metadata") 137 | self.spawn_reflection_thread(db_key, db, engine.url) 138 | return db 139 | if db.age > MAX_CACHE_AGE: 140 | log.debug('Cache expired age:%s reading from sqlite', db.age) 141 | # read from sqlite, should be fast enough to do synchronously 142 | db = self.read_expunge(ipydb_engine) 143 | self.databases[db_key] = db 144 | if db.age > MAX_CACHE_AGE: 145 | log.debug('Sqlite data too old: %s, re-reflecting', db.age) 146 | # Metadata is empty or too old. 147 | # Spawn a thread to do the slow sqlalchemy reflection, 148 | # return whatever we have 149 | if noisy: 150 | print("ipydb is fetching database metadata") 151 | self.spawn_reflection_thread(db_key, db, engine.url) 152 | return db 153 | 154 | def spawn_reflection_thread(self, db_key, db, dburl_to_reflect): 155 | if not self.debug: 156 | self.pool.apply_async(self.reflect_db, 157 | (db_key, db, dburl_to_reflect)) 158 | else: 159 | self.reflect_db(db_key, db, dburl_to_reflect) 160 | 161 | def reflect_db(self, db_key, db, dburl_to_reflect): 162 | """runs in a new thread""" 163 | db.reflecting = True 164 | target_engine = sa.create_engine(dburl_to_reflect) 165 | db_key, ipydb_engine = get_metadata_engine(target_engine) 166 | db.sa_metadata.bind = target_engine 167 | with timer('sa reflect', log=log): 168 | db.sa_metadata.reflect() 169 | with timer('drop-recreate schema', log=log): 170 | delete_schema(ipydb_engine) 171 | create_schema(ipydb_engine) 172 | with timer('Persist sa data', log=log): 173 | persist.write_sa_metadata(ipydb_engine, db.sa_metadata) 174 | # make sure that everything was eager loaded, and update 175 | # db metadata from other thread XXX: dicey 176 | with session_scope(ipydb_engine) as session: 177 | with timer('read-expunge after write', log=log): 178 | database = persist.read(session) 179 | db.update_tables(database.tables.values()) 180 | db.sa_metadata = database.sa_metadata 181 | session.expunge_all() # unhook SA 182 | db.reflecting = False 183 | 184 | def flush(self, engine): 185 | """Delete all metadata associated with engine.""" 186 | self.pool.terminate() 187 | self.pool.join() 188 | db_key, ipydb_engine = get_metadata_engine(engine) 189 | del self.databases[db_key] 190 | delete_schema(ipydb_engine) 191 | create_schema(ipydb_engine) 192 | self.pool = ThreadPool(multiprocessing.cpu_count() * 2) 193 | 194 | def reflecting(self, engine): 195 | db_key = get_db_filename(engine) 196 | return self.databases[db_key].reflecting 197 | -------------------------------------------------------------------------------- /ipydb/metadata/model.py: -------------------------------------------------------------------------------- 1 | """A simple SQLAlchemy model for describing database metadata. 2 | 3 | Stores information about tables, columns, indexes, and foreign-keys. 4 | Database (non persistent) gives a high-level API to a collection 5 | of Tables objects from a given database schema. 6 | """ 7 | import collections 8 | import datetime as dt 9 | import itertools 10 | import logging 11 | import re 12 | 13 | import future 14 | from future.utils import viewvalues 15 | import sqlalchemy as sa 16 | from sqlalchemy import orm 17 | from sqlalchemy.ext.declarative import declarative_base 18 | 19 | ZERODATE = dt.datetime(dt.MINYEAR, 1, 1) 20 | Base = declarative_base() 21 | log = logging.getLogger(__name__) 22 | 23 | 24 | class Database(object): 25 | """Database metadata for a particular database. 26 | 27 | Databases are identified by the sqlalchemy connection url 28 | without the password (dbkey) and contain a dictionary of 29 | model.Table objects keyed by table name. 30 | There be dragons: another thread can be writing 31 | to self.tables at any time, so we need to lock for 32 | reads/writes. (that would be XXX TODO) 33 | """ 34 | 35 | def __init__(self, tables=None): 36 | self.tables = {} 37 | self.modified = None 38 | self.reflecting = False 39 | self.sa_metadata = sa.MetaData() 40 | if tables is None: 41 | tables = [] 42 | self.update_tables(tables) 43 | 44 | def isempty(self): 45 | return bool(self.tables) 46 | 47 | def update_tables(self, tables): 48 | """Update table definitions from a list of tables.""" 49 | for t in tables: 50 | self.isempty = False 51 | self.tables[t.name] = t 52 | if self.modified is None: 53 | self.modified = t.modified 54 | self.modified = min(self.modified, t.modified, 55 | key=lambda x: '' if x is None else x) 56 | 57 | @property 58 | def views(self): 59 | for t in viewvalues(self.tables): 60 | if t.isview: 61 | yield t 62 | 63 | def tablenames(self): 64 | return list(self.tables) 65 | 66 | @property 67 | def columns(self): 68 | for t in viewvalues(self.tables): 69 | for c in t.columns: 70 | yield c 71 | 72 | def fieldnames(self, table=None, dotted=False): 73 | ret = set() 74 | if table is None: # all field names 75 | for t in viewvalues(self.tables): 76 | if dotted: 77 | ret.update(['%s.%s' % (t.name, c.name) for c in t.columns]) 78 | else: 79 | ret.update([c.name for c in t.columns]) 80 | 81 | return ret 82 | if table not in self.tables: 83 | return set() 84 | t = self.tables[table] 85 | if dotted: 86 | return {'%s.%s' % (t.name, c.name) for c in t.columns} 87 | return {c.name for c in t.columns} 88 | 89 | def get_joins(self, tbl1, tbl2): 90 | if tbl1 not in self.tables or tbl2 not in self.tables: 91 | return set() 92 | t1 = self.tables[tbl1] 93 | t2 = self.tables[tbl2] 94 | joins = set() 95 | for src, tgt in [(t1, t2), (t2, t1)]: 96 | for c in src.columns: 97 | if (c.referenced_column and 98 | c.referenced_column.table.name == tgt.name): 99 | joins.add(ForeignKey( 100 | src.name, (c.name,), 101 | tgt.name, (c.referenced_column.name,))) 102 | return joins 103 | 104 | def tables_referencing(self, tbl): 105 | if tbl not in self.tables: 106 | return set() 107 | reftables = set() 108 | for c in self.tables[tbl].columns: 109 | reftables.update({col.table.name for col in c.referenced_by}) 110 | return reftables 111 | 112 | def fields_referencing(self, tbl, column=None): 113 | if tbl not in self.tables: 114 | raise StopIteration() 115 | for c in self.tables[tbl].columns: 116 | for r in c.referenced_by: 117 | if column is None or column == r.referenced_column.name: 118 | yield ForeignKey(r.table.name, (r.name,), tbl, (c.name,)) 119 | 120 | def foreign_keys(self, tbl): 121 | if tbl not in self.tables: 122 | raise StopIteration() 123 | for c in self.tables[tbl].columns: 124 | if c.referenced_column: 125 | yield ForeignKey(tbl, (c.name,), 126 | c.referenced_column.table.name, 127 | (c.referenced_column.name,)) 128 | 129 | def all_joins(self, tbl): 130 | return itertools.chain(self.foreign_keys(tbl), 131 | self.fields_referencing(tbl)) 132 | 133 | def insert_statement(self, tbl): 134 | if tbl not in self.tables: 135 | return '' 136 | t = self.tables[tbl] 137 | sql = 'insert into {table} ({columns}) values ({defaults})' 138 | columns = ', '.join(c.name for c in t.columns) 139 | defaults = ', '.join(sql_default(c) for c in t.columns) 140 | return sql.format(table=tbl, columns=columns, defaults=defaults) 141 | 142 | @property 143 | def age(self): 144 | """return age of this metadata as a datetime.timedelta""" 145 | return dt.datetime.now() - (self.modified or ZERODATE) 146 | 147 | def indexes(self, tbl): 148 | if tbl not in self.tables: 149 | raise StopIteration() 150 | for index in self.tables[tbl].indexes: 151 | yield index 152 | 153 | 154 | fkclass = collections.namedtuple('ForeignKey', 155 | 'table columns reftable refcolumns') 156 | 157 | 158 | class ForeignKey(fkclass): 159 | """Simplistic representation of a foreign key""" 160 | __slots__ = () 161 | 162 | def __str__(self): 163 | return '%s(%s) references %s(%s)' % ( 164 | self.table, ','.join(self.columns), 165 | self.reftable, ','.join(self.refcolumns)) 166 | 167 | def as_join(self, reverse=False): 168 | """Return a string formatted as an SQL join expression.""" 169 | tables = [self.reftable, self.table] 170 | if reverse: 171 | tables.reverse() 172 | tables = tuple(tables) 173 | joinstr = '%s inner join %s on ' % tables 174 | sep = '' 175 | for idx, col in enumerate(self.columns): 176 | joinstr += sep + '%s.%s = %s.%s' % ( 177 | self.reftable, self.refcolumns[idx], 178 | self.table, col) 179 | sep = ' and ' 180 | return joinstr 181 | 182 | 183 | restr = re.compile(r'TEXT|VARCHAR.*|CHAR.*', re.I) 184 | renumeric = re.compile(r'FLOAT.*|DECIMAL.*|INT.*|DOUBLE.*|' 185 | 'FIXED.*|SHORT.*|NUMBER.*|NUMERIC.*', re.I) 186 | redate = re.compile(r'DATE|TIME|DATETIME|TIMESTAMP', re.I) 187 | 188 | 189 | def sql_default(column): 190 | """Return an acceptable default value for the given column. 191 | col is an ipydb.model.Column. 192 | """ 193 | if column.default_value: 194 | return "'%s'" % column.default_value 195 | if column.nullable: 196 | return 'NULL' 197 | typ = str(column.type).lower().strip() 198 | value = '' 199 | if redate.search(typ): 200 | log.debug('%s is a date', typ) 201 | head = typ.split()[0] 202 | if head == 'date': 203 | value = "current_date" 204 | elif head == 'time': 205 | value = "current_time" 206 | elif head in ('datetime', 'timestamp'): 207 | value = "current_timestamp" 208 | elif restr.search(typ): 209 | log.debug('%s is a string', typ) 210 | value = "'hello'" 211 | elif renumeric.search(typ): 212 | log.debug('%s is a number', typ) 213 | value = "0" 214 | else: 215 | log.debug('no match for type: %s', typ) 216 | return value 217 | 218 | 219 | class TimesMixin(object): 220 | created = sa.Column(sa.DateTime, default=dt.datetime.now) 221 | modified = sa.Column(sa.DateTime, default=dt.datetime.now, 222 | onupdate=dt.datetime.now) 223 | 224 | 225 | class Table(Base, TimesMixin): 226 | __tablename__ = 'dbtable' 227 | id = sa.Column(sa.Integer, primary_key=True) 228 | name = sa.Column(sa.String, index=True, unique=True) 229 | isview = sa.Column(sa.Boolean, default=False, nullable=False) 230 | 231 | def column(self, name): 232 | for column in self.columns: 233 | if column.name == name: 234 | return column 235 | else: 236 | raise KeyError("Column %s not found in table %s" % 237 | (name, self.name)) 238 | 239 | 240 | class Column(Base): 241 | __tablename__ = 'dbcolumn' 242 | __table_args__ = ( 243 | sa.UniqueConstraint('table_id', 'name'), 244 | ) 245 | id = sa.Column(sa.Integer, primary_key=True) 246 | table_id = sa.Column(sa.Integer, sa.ForeignKey('dbtable.id')) 247 | name = sa.Column(sa.String, index=True) 248 | type = sa.Column(sa.String) 249 | referenced_column_id = sa.Column(sa.Integer, sa.ForeignKey('dbcolumn.id')) 250 | constraint_name = sa.Column(sa.String, nullable=True) 251 | primary_key = sa.Column(sa.Boolean) 252 | nullable = sa.Column(sa.Boolean) 253 | default_value = sa.Column(sa.String, nullable=True) 254 | 255 | table = orm.relationship('Table', backref='columns', order_by=name) 256 | referenced_column = orm.relationship( 257 | 'Column', backref='referenced_by', remote_side=[id]) 258 | 259 | 260 | index_column_table = sa.Table( 261 | 'dbindex_dbcolumn', Base.metadata, 262 | sa.Column('dbindex_id', sa.Integer, sa.ForeignKey('dbindex.id')), 263 | sa.Column('dbcolumn_id', sa.Integer, sa.ForeignKey('dbcolumn.id'))) 264 | 265 | 266 | class Index(Base): 267 | __tablename__ = 'dbindex' 268 | id = sa.Column(sa.Integer, primary_key=True) 269 | name = sa.Column(sa.String, index=True) 270 | unique = sa.Column(sa.Boolean) 271 | table_id = sa.Column(sa.Integer, sa.ForeignKey('dbtable.id')) 272 | 273 | table = orm.relationship('Table', backref='indexes', order_by=name) 274 | columns = orm.relationship('Column', secondary=lambda: index_column_table, 275 | backref='indexes') 276 | -------------------------------------------------------------------------------- /ipydb/metadata/persist.py: -------------------------------------------------------------------------------- 1 | """Persists (and reads) SQLAlchemy metadata representations to a local db.""" 2 | import logging 3 | 4 | import sqlalchemy as sa 5 | from sqlalchemy import orm 6 | 7 | from ipydb.metadata import model as m 8 | 9 | log = logging.getLogger(__name__) 10 | 11 | 12 | def get_viewdata(metadata): 13 | views = [] 14 | try: 15 | res = metadata.bind.execute( 16 | ''' 17 | select 18 | table_name 19 | from 20 | information_schema.views 21 | where 22 | table_schema = 'public' 23 | ''') 24 | views = [{'name': row.table_name, 'isview': True} for row in res] 25 | except sa.exc.OperationalError: 26 | log.debug('Error fetching view from information_schema', exc_info=1) 27 | return views 28 | 29 | 30 | def write_sa_metadata(engine, sa_metadata): 31 | """Bulk import of SqlAlchemy metadata into sqlite engine. 32 | 33 | We can assume that engine is a bunch of empty tables, hence 34 | should not need to do upsert/existence checking. 35 | Args: 36 | engine - SA engine for the ipydb sqlite db 37 | sa_metadata - bound metadata object for the 38 | currently connected user db. 39 | """ 40 | data = [{'name': t.name} for t in sa_metadata.sorted_tables] 41 | if data: 42 | engine.execute(m.Table.__table__.insert(), data) 43 | viewdata = get_viewdata(sa_metadata) 44 | if viewdata: 45 | engine.execute(m.Table.__table__.insert(), viewdata) 46 | result = engine.execute('select name, id from dbtable') 47 | tableidmap = dict(result.fetchall()) 48 | 49 | def get_column_data(table): 50 | for column in table.columns: 51 | data = { 52 | 'table_id': tableidmap[table.name], 53 | 'name': column.name, 54 | 'type': str(column.type), 55 | 'primary_key': column.primary_key, 56 | 'default_value': column.default, 57 | 'nullable': column.nullable 58 | } 59 | yield data 60 | 61 | def all_col_data(): 62 | for t in sa_metadata.sorted_tables: 63 | for coldata in get_column_data(t): 64 | yield coldata 65 | for vdata in viewdata: 66 | view = sa.Table(vdata['name'], sa_metadata, autoload=True) 67 | for coldata in get_column_data(view): 68 | yield coldata 69 | # XXX: SA doesn't like a generator? 70 | data = list(all_col_data()) 71 | if data: 72 | engine.execute(m.Column.__table__.insert(), data) 73 | result = engine.execute( 74 | """ 75 | select 76 | t.name || c.name, 77 | c.id 78 | from 79 | dbcolumn c 80 | inner join dbtable t on t.id = c.table_id 81 | """) 82 | columnidmap = dict(result.fetchall()) 83 | 84 | def get_index_data(): 85 | for table in sa_metadata.sorted_tables: 86 | for index in table.indexes: 87 | yield { 88 | 'name': index.name, 89 | 'unique': index.unique, 90 | 'table_id': tableidmap[table.name], 91 | } 92 | data = list(get_index_data()) 93 | if data: 94 | engine.execute(m.Index.__table__.insert(), data) 95 | result = engine.execute( 96 | """ 97 | select 98 | t.name || i.name, 99 | i.id 100 | from 101 | dbindex i 102 | inner join dbtable t on t.id = i.table_id 103 | """) 104 | indexidmap = dict(result.fetchall()) 105 | 106 | def get_index_column_data(): 107 | for table in sa_metadata.sorted_tables: 108 | for index in table.indexes: 109 | for column in index.columns: 110 | index_id = indexidmap[table.name + index.name] 111 | column_id = columnidmap[table.name + column.name] 112 | yield { 113 | 'dbindex_id': index_id, 114 | 'dbcolumn_id': column_id 115 | } 116 | ins = m.index_column_table.insert() 117 | ins.values(dbindex_id=sa.bindparam('dbindex_id'), 118 | dbcolumn_id=sa.bindparam('dbcolumn_id')) 119 | 120 | data = list(get_index_column_data()) 121 | if data: 122 | engine.execute(ins, data) 123 | 124 | def get_fk_data(): 125 | for table in sa_metadata.sorted_tables: 126 | for column in table.columns: 127 | for fk in column.foreign_keys: 128 | column_id = columnidmap[table.name + column.name] 129 | reftable_name = fk.column.table.name 130 | ref_column_id = columnidmap[reftable_name + fk.column.name] 131 | constraint_name = fk.constraint.name 132 | yield { 133 | 'column_id': column_id, 134 | 'referenced_column_id': ref_column_id, 135 | 'constraint_name': constraint_name, 136 | } 137 | break # XXX: only one per fk field for now! 138 | col = m.Column.__table__ 139 | upd = col.update().\ 140 | where(col.c.id == sa.bindparam('column_id')).\ 141 | values( 142 | referenced_column_id=sa.bindparam('referenced_column_id'), 143 | constraint_name=sa.bindparam('constraint_name')) 144 | data = list(get_fk_data()) 145 | if data: 146 | engine.execute(upd, data) 147 | 148 | 149 | def read(session): 150 | tables = session.query(m.Table).\ 151 | options( 152 | orm.joinedload('columns') 153 | .joinedload('referenced_by'), 154 | orm.joinedload('columns') 155 | .joinedload('referenced_column'), 156 | orm.joinedload('indexes') 157 | .joinedload('columns') 158 | ).all() 159 | # XXX: for some reason this is the only way that I could 160 | # force eager-loading of the column.referenced_column, 161 | # no idea why or how else to do it. 162 | for t in tables: 163 | for c in t.columns: 164 | if c.referenced_column: 165 | c.referenced_column.table 166 | for r in c.referenced_by: 167 | r.table 168 | return m.Database(tables=tables) 169 | -------------------------------------------------------------------------------- /ipydb/plugin.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | The ipydb plugin. 5 | 6 | :copyright: (c) 2012 by Jay Sweeney. 7 | :license: see LICENSE for more details. 8 | """ 9 | from __future__ import print_function 10 | from configparser import DuplicateSectionError 11 | import fnmatch 12 | import functools 13 | import logging 14 | import os 15 | import shlex 16 | import subprocess 17 | import sys 18 | 19 | try: 20 | from traitlets.config.configurable import Configurable 21 | except ImportError: 22 | # IPython 3 support 23 | from IPython.config.configurable import Configurable 24 | 25 | from future.utils import viewvalues 26 | import sqlalchemy as sa 27 | 28 | from ipydb.utils import multi_choice_prompt, UnicodeWriter 29 | from ipydb.metadata import MetaDataAccessor 30 | from ipydb import asciitable 31 | from ipydb.asciitable import FakedResult 32 | from ipydb.completion import IpydbCompleter, ipydb_complete, reassignment 33 | from ipydb import engine 34 | from ipydb.magic import SqlMagics, register_sql_aliases 35 | from ipydb.metadata import model 36 | 37 | # pandas as a extra requirement 38 | _has_pandas = False 39 | 40 | try: 41 | import pandas as pd 42 | _has_pandas = True 43 | except ImportError: 44 | pass 45 | 46 | log = logging.getLogger(__name__) 47 | 48 | SQLFORMATS = ['csv', 'table'] 49 | 50 | os.environ['PYTHONIOENCODING'] = 'utf-8' 51 | 52 | 53 | def connected(f): 54 | """Decorator - bail if not connected""" 55 | @functools.wraps(f) 56 | def wrapper(plugin, *args, **kw): 57 | if not plugin.connected: 58 | print(plugin.not_connected_message) 59 | return 60 | return f(plugin, *args, **kw) 61 | return wrapper 62 | 63 | 64 | class Popen(subprocess.Popen): 65 | 66 | def __enter__(self): 67 | return self 68 | 69 | def __exit__(self, type, value, traceback): 70 | if self.stdout: 71 | self.stdout.close() 72 | if self.stderr: 73 | self.stderr.close() 74 | if self.stdin: 75 | self.stdin.close() 76 | # Wait for the process to terminate, to avoid zombies. 77 | self.wait() 78 | 79 | def write(self, bytestring): 80 | self.stdin.write(bytestring) 81 | #self.communicate(input=bytestring, timeout=10) 82 | 83 | 84 | def pager(): 85 | return Popen(shlex.split('less -FXRiS'), stdin=subprocess.PIPE) 86 | 87 | 88 | class SqlPlugin(Configurable): 89 | """The ipydb plugin - manipulate databases from ipython.""" 90 | 91 | max_fieldsize = 100 # configurable? 92 | metadata_accessor = MetaDataAccessor() 93 | sqlformats = "table csv".split() 94 | not_connected_message = "ipydb is not connected to a database. " \ 95 | "Try:\n\t%connect CONFIGNAME\nor try:\n\t" \ 96 | "%connect_url dbdriver://user:pass@host/dbname\n" 97 | 98 | def __init__(self, shell=None, config=None): 99 | """Constructor. 100 | 101 | Args: 102 | shell: An instance of IPython.core.InteractiveShell. 103 | config: IPython's config object. 104 | """ 105 | super(SqlPlugin, self).__init__(shell=shell, config=config) 106 | self.auto_magics = SqlMagics(self, shell) 107 | shell.register_magics(self.auto_magics) 108 | register_sql_aliases(shell.magics_manager, self.auto_magics) 109 | self.sqlformat = 'table' # 'table' | 'csv' 110 | self.do_reflection = True 111 | self.connected = False 112 | self.engine = None 113 | self.nickname = None 114 | self.autocommit = False 115 | self.trans_ctx = None 116 | self.debug = False 117 | self.show_sql = False 118 | default, configs = engine.getconfigs() 119 | self.init_completer() 120 | if default: 121 | self.connect(default) 122 | 123 | def init_completer(self): 124 | """Setup ipydb sql completion.""" 125 | # to complete things like table.* we need to 126 | # change the ipydb spliter delims: 127 | delims = self.shell.Completer.splitter.delims.replace('*', '') 128 | self.shell.Completer.splitter.delim = delims 129 | if self.shell.Completer.readline: 130 | self.shell.Completer.readline.set_completer_delims(delims) 131 | self.completer = IpydbCompleter(self.get_metadata) 132 | for str_key in self.completer.commands_completers.keys(): 133 | str_key = '%' + str_key # as ipython magic commands 134 | self.shell.set_hook('complete_command', ipydb_complete, 135 | str_key=str_key) 136 | # add a regex dispatch for assignments: res = %select -r ... 137 | self.shell.set_hook('complete_command', 138 | ipydb_complete, re_key=reassignment) 139 | 140 | def set_debug(self, debug): 141 | self.debug = debug 142 | self.metadata_accessor.debug = debug 143 | 144 | @connected 145 | def get_engine(self): 146 | """Returns current sqlalchemy engine reference, if there was one.""" 147 | return self.engine 148 | 149 | def get_db_ps1(self, *args, **kwargs): 150 | """ Return current host/db for use in ipython's prompt PS1. """ 151 | if not self.connected: 152 | return '' 153 | if self.nickname: 154 | return " " + self.nickname 155 | host = self.engine.url.host or '' 156 | 157 | if '.' in host: 158 | host = host.split('.')[0] 159 | host = host[:15] # don't like long hostnames 160 | db = '?' 161 | if self.engine.url.database: 162 | db = self.engine.url.database[:15] 163 | if host: 164 | url = "%s/%s" % (host, db) 165 | else: 166 | url = db 167 | return " " + url 168 | 169 | def get_transaction_ps1(self, *args, **kw): 170 | """Return '*' if ipydb has an active transaction.""" 171 | if not self.connected: 172 | return '' 173 | # I want this: ⚡ 174 | # but looks like IPython is expecting ascii for the PS1. 175 | if self.trans_ctx and self.trans_ctx.transaction.is_active: 176 | return ' *' 177 | else: 178 | return '' 179 | 180 | def get_reflecting_ps1(self, *args, **kw): 181 | """ 182 | Return a string indictor if background schema reflection is running. 183 | """ 184 | if not self.connected: 185 | return '' 186 | return ' !' if self.metadata_accessor.reflecting(self.engine) else '' 187 | 188 | def safe_url(self, url_string): 189 | """Return url_string with password removed.""" 190 | url = None 191 | try: 192 | url = sa.engine.url.make_url(str(url_string)) 193 | url.password = 'xxx' 194 | except: # pragma: no cover 195 | pass 196 | return url 197 | 198 | def get_metadata(self): 199 | """Returns database metadata for the currect connection. 200 | Returns: 201 | Instance of ipydb.metadata.Database(). 202 | """ 203 | if not self.connected: 204 | return model.Database() 205 | return self.metadata_accessor.get_metadata( 206 | self.engine, do_reflection=self.do_reflection) 207 | 208 | def save_connection(self, configname): 209 | """Save the current connection to ~/.db-connections.""" 210 | try: 211 | engine.save_connection(configname, self.engine) 212 | except DuplicateSectionError: 213 | over = self.shell.ask_yes_no( 214 | '`%s` exists, Overwrite (y/n)?' % configname) 215 | if over: 216 | engine.save_connection( 217 | configname, self.engine, overwrite=True) 218 | else: 219 | print("Save aborted") 220 | return 221 | print("`%s` saved to ~/.db-connections" % (configname,)) 222 | 223 | def connect(self, configname=None): 224 | """Connect to a database based upon its `nickname`. 225 | 226 | See ipydb.magic.connect() for details. 227 | """ 228 | default, configs = engine.getconfigs() 229 | success = False 230 | 231 | def available(): 232 | print(self.connect.__doc__) 233 | print("Available connection nicknames: %s" % ( 234 | ' '.join(sorted(configs.keys())))) 235 | if not configname: 236 | available() 237 | elif configname not in configs: 238 | print("Config `%s` not found. " % configname) 239 | available() 240 | else: 241 | config = configs[configname] 242 | connect_args = {} 243 | success = self.connect_url( 244 | engine.make_connection_url(config), connect_args) 245 | if success: 246 | self.nickname = configname 247 | return success 248 | 249 | def connect_url(self, url, connect_args={}): 250 | """Connect to a database using an SqlAlchemy URL. 251 | 252 | Args: 253 | url: An SqlAlchemy-style DB connection URL. 254 | connect_args: extra argument to be passed to the underlying 255 | DB-API driver. 256 | Returns: 257 | True if connection was successful. 258 | """ 259 | if self.trans_ctx and self.trans_ctx.transaction.is_active: 260 | print("You have an active transaction, either %commit or " 261 | "%rollback before connecting to a new database.") 262 | return False 263 | try: 264 | parsed_url = sa.engine.url.make_url(str(url)) 265 | except sa.exc.ArgumentError as e: 266 | print(e) 267 | return False 268 | safe_url = self.safe_url(parsed_url) 269 | if safe_url: 270 | print("ipydb is connecting to: %s" % safe_url) 271 | try: 272 | self.engine = engine.from_url(parsed_url, 273 | connect_args=connect_args) 274 | except ImportError: # pragma: nocover 275 | print("It looks like you don't have a driver for %s.\n" 276 | "See the following URL for supported " 277 | "database drivers:\n\t%s" % ( 278 | parsed_url.drivername, 279 | 'http://docs.sqlalchemy.org/en/latest/' 280 | 'dialects/index.html#included-dialects')) 281 | return False 282 | # force a connect so that we can fail early if the connection url won't 283 | # work 284 | try: 285 | with self.engine.connect(): 286 | pass 287 | except sa.exc.OperationalError as e: # pragma: nocover 288 | print(e) 289 | return False 290 | 291 | self.connected = True 292 | self.nickname = None 293 | if self.do_reflection: 294 | self.metadata_accessor.get_metadata(self.engine, noisy=True) 295 | return True 296 | 297 | @connected 298 | def flush_metadata(self): 299 | """Delete cached schema information""" 300 | print("Deleting metadata...") 301 | if self.do_reflection: 302 | self.metadata_accessor.flush(self.engine) 303 | self.metadata_accessor.get_metadata(self.engine, noisy=True) 304 | 305 | @connected 306 | def execute(self, query, params=None, multiparams=None): 307 | """Execute query against current db connection, return result set. 308 | 309 | Args: 310 | query: String query to execute. 311 | args: Dictionary of bind parameters for the query. 312 | multiargs: Collection/iterable of dictionaries of bind parameters. 313 | Returns: 314 | Sqlalchemy's DB-API cursor-like object. 315 | """ 316 | rereflect = False 317 | ddl_commands = 'create drop alter truncate rename'.split() 318 | want_tx = 'insert update delete merge replace'.split() 319 | result = None 320 | if params is None: 321 | params = {} 322 | if multiparams is None: 323 | multiparams = [] 324 | bits = query.split() 325 | if (len(bits) == 2 and bits[0].lower() == 'select' and 326 | bits[1] in self.get_metadata().tables): 327 | query = 'select * from %s' % bits[1] 328 | elif (bits[0].lower() in want_tx and 329 | not self.trans_ctx and not self.autocommit): 330 | self.begin() # create tx before doing modifications 331 | elif bits[0].lower() in ddl_commands: 332 | rereflect = True 333 | conn = self.engine 334 | if self.trans_ctx and self.trans_ctx.transaction.is_active: 335 | conn = self.trans_ctx.conn 336 | try: 337 | result = conn.execute(query, *multiparams, **params) 338 | if rereflect and self.do_reflection: # schema changed 339 | self.metadata_accessor.get_metadata(self.engine, 340 | force=True, noisy=True) 341 | except Exception as e: # pragma: nocover 342 | if self.debug: 343 | raise 344 | print(e.message) 345 | return result 346 | 347 | @connected 348 | def run_sql_script(self, script, interactive=False, delimiter='/'): 349 | """Run all SQL statments found in a text file. 350 | 351 | Args: 352 | script: path to file containing SQL statments. 353 | interactive: run in ineractive mode, showing and prompting each 354 | statement. default: False. 355 | delimiter: SQL statement delimiter, must be on a new line 356 | by itself. default: '/'. 357 | """ 358 | with open(script) as fin: 359 | current = '' 360 | while True: 361 | line = fin.readline() 362 | if line.strip() == delimiter or (line == '' and current): 363 | if interactive: 364 | print(current) 365 | choice = multi_choice_prompt( 366 | 'Run this statement ' 367 | '([y]es, [n]o, [a]ll, [q]uit):', 368 | {'y': 'y', 'n': 'n', 'a': 'a', 'q': 'q'}) 369 | if choice == 'y': 370 | pass 371 | elif choice == 'n': 372 | current = '' 373 | elif choice == 'a': 374 | interactive = False 375 | elif choice == 'q': 376 | break 377 | if current: 378 | if current.strip().lower() == 'commit': 379 | self.commit() 380 | elif current.strip().lower() == 'rollback': 381 | self.rollback() 382 | else: 383 | self.execute(current) 384 | current = '' 385 | else: 386 | current += line 387 | if line == '': 388 | break 389 | 390 | @connected 391 | def begin(self): 392 | """Start a new transaction against the current db connection.""" 393 | if not self.trans_ctx or not self.trans_ctx.transaction.is_active: 394 | self.trans_ctx = self.engine.begin() 395 | else: 396 | print("You are already in a transaction" 397 | " block and nesting is not supported") 398 | 399 | @connected 400 | def commit(self): 401 | """Commit current transaction if there was one.""" 402 | if self.trans_ctx: 403 | with self.trans_ctx: 404 | pass 405 | self.trans_ctx = None 406 | else: 407 | print("No active transaction") 408 | 409 | @connected 410 | def rollback(self): 411 | """Rollback current transaction if there was one.""" 412 | if self.trans_ctx: 413 | self.trans_ctx.transaction.rollback() 414 | self.trans_ctx = None 415 | else: 416 | print("No active transaction") 417 | 418 | @connected 419 | def show_tables(self, *globs, **kw): 420 | """Print a list of tablenames matching input glob/s. 421 | 422 | All table names are printed if no glob is given, otherwise 423 | just those table names matching any of the *globs are printed. 424 | 425 | Args: 426 | *glob: zero or more globs to match against table names. 427 | Kwargs: 428 | views: (bool) show views only 429 | 430 | """ 431 | matches = set() 432 | if kw.get('views'): 433 | tablenames = [v.name for v in self.get_metadata().views] 434 | else: 435 | tablenames = self.get_metadata().tables 436 | if not globs: 437 | matches = tablenames 438 | else: 439 | for glob in globs: 440 | matches.update(fnmatch.filter(tablenames, glob)) 441 | matches = sorted(matches) 442 | self.render_result(FakedResult(((r,) for r in matches), ['Table'])) 443 | # print '\n'.join(sorted(matches)) 444 | 445 | @connected 446 | def describe(self, table): 447 | """Print information about a table.""" 448 | if table not in self.get_metadata().tables: 449 | print("Table not found: %s" % table) 450 | return 451 | tbl = self.get_metadata().tables[table] 452 | 453 | def nullstr(nullable): 454 | return 'NULL' if nullable else 'NOT NULL' 455 | 456 | def namestr(c): 457 | return ('*%s' if c.primary_key else '%s') % c.name 458 | 459 | with pager() as out: 460 | items = ((namestr(c), c.type, nullstr(c.nullable)) 461 | for c in tbl.columns) 462 | out.write(b'Columns' + b'\n') 463 | asciitable.draw( 464 | FakedResult(sorted(items), 'Name Type Nullable'.split()), 465 | out, paginate=True, 466 | max_fieldsize=5000) 467 | out.write(b'\n') 468 | out.write(b'Primary Key (*)\n') 469 | out.write(b'---------------\n') 470 | pk = ', '.join(c.name for c in tbl.columns if c.primary_key) 471 | out.write(b' ') 472 | if not pk: 473 | out.write(b'(None Found!)') 474 | else: 475 | out.write(pk.encode('utf8')) 476 | out.write(b'\n\n') 477 | out.write(b'Foreign Keys\n') 478 | out.write(b'------------\n') 479 | fks = self.get_metadata().foreign_keys(table) 480 | fk = None 481 | for fk in fks: 482 | out.write((' %s\n' % str(fk)).encode('utf8')) 483 | if fk is None: 484 | out.write(b' (None Found)') 485 | out.write(('\n\nReferences to %s\n' % table).encode('utf8')) 486 | out.write(b'--------------' + b'-' * len(table) + b'\n') 487 | fks = self.get_metadata().fields_referencing(table) 488 | fk = None 489 | for fk in fks: 490 | out.write(b' ' + str(fk).encode('utf8') + b'\n') 491 | if fk is None: 492 | out.write(b' (None found)\n') 493 | out.write(b'\n\nIndexes\n') 494 | 495 | def items(): 496 | for idx in self.get_metadata().indexes(table): 497 | yield (idx.name, ', '.join(c.name for c in idx.columns), 498 | idx.unique) 499 | asciitable.draw( 500 | FakedResult(sorted(items()), 'Name Columns Unique'.split()), 501 | out, paginate=True, max_fieldsize=5000) 502 | 503 | @connected 504 | def show_fields(self, *globs): 505 | """ 506 | Print a list of fields matching the input glob tableglob[.fieldglob]. 507 | 508 | See ipydb.magic.show_fields for examples. 509 | 510 | Args: 511 | *globs: list of [tableglob].[fieldglob] strings 512 | """ 513 | 514 | def starname(col): 515 | star = '*' if col.primary_key else '' 516 | return star + col.name 517 | 518 | def glob_columns(table): 519 | for c in table.columns: 520 | for glob in globs: 521 | bits = glob.split('.', 1) 522 | if len(bits) == 1: 523 | glob += '.*' 524 | if fnmatch.fnmatch('%s.%s' % (table.name, c.name), glob): 525 | yield c 526 | 527 | with pager() as out: 528 | for table in viewvalues(self.get_metadata().tables): 529 | if globs: 530 | columns = list(glob_columns(table)) 531 | else: 532 | columns = table.columns 533 | columns = {starname(c): c for c in columns} 534 | if columns: 535 | out.write(table.name.encode('utf8') + b'\n') 536 | out.write(b'-' * len(table.name) + b'\n') 537 | for starcol in sorted(columns): 538 | col = columns[starcol] 539 | output = " %-35s%s %s\n" % ( 540 | starcol, 541 | col.type, 542 | 'NULL' if col.nullable else 'NOT NULL') 543 | out.write(output.encode('utf8')) 544 | if columns: 545 | out.write(b'\n') 546 | 547 | @connected 548 | def show_joins(self, table): 549 | """Show all incoming and outgoing joins possible for a table. 550 | Args: 551 | table: Table name. 552 | """ 553 | with pager() as out: 554 | for fk in self.get_metadata().foreign_keys(table): 555 | out.write(fk.as_join(reverse=True).encode('utf8') + b'\n') 556 | for fk in self.get_metadata().fields_referencing(table): 557 | out.write(fk.as_join().encode('utf8') + b'\n') 558 | 559 | @connected 560 | def what_references(self, arg): 561 | """Show fields referencing the input table/field arg. 562 | 563 | If arg is a tablename, then print fields which reference 564 | any field in tablename. If arg is a field (specified by 565 | tablename.fieldname), then print only fields which reference 566 | the specified table.field. 567 | 568 | Args: 569 | arg: Either a table name or a [table.field] name""" 570 | with pager() as out: 571 | bits = arg.split('.', 1) 572 | tablename = bits[0] 573 | fieldname = bits[1] if len(bits) > 1 else None 574 | fks = self.get_metadata().fields_referencing(tablename, fieldname) 575 | for fk in fks: 576 | out.write(str(fk).encode('utf8') + b'\n') 577 | 578 | @connected 579 | def show_fks(self, table): 580 | """Show foreign keys for the given table 581 | 582 | Args: 583 | table: A table name.""" 584 | with pager() as out: 585 | fks = self.get_metadata().foreign_keys(table) 586 | for fk in fks: 587 | out.write(str(fk).encode('utf8') + b'\n') 588 | 589 | def render_result(self, cursor, paginate=True, 590 | filepath=None, sqlformat=None): 591 | """Render a result set and pipe through less. 592 | 593 | Args: 594 | cursor: iterable of tuples, with one special method: 595 | cursor.keys() which returns a list of string columns 596 | headings for the tuples. 597 | """ 598 | if not sqlformat: 599 | sqlformat = self.sqlformat 600 | if filepath: 601 | out = open(filepath, 'w') 602 | sqlformat = 'csv' 603 | else: 604 | out = pager() 605 | with out as stdout: 606 | if sqlformat == 'csv': 607 | self.format_result_csv(cursor, out=stdout) 608 | else: 609 | asciitable.draw(cursor, out=stdout, 610 | paginate=paginate, 611 | max_fieldsize=self.max_fieldsize) 612 | 613 | def format_result_csv(self, cursor, out=sys.stdout): 614 | """Render an sql cursor set in CSV format. 615 | 616 | Args: 617 | cursor: cursor-like object: see render_result() 618 | out: file-like object to write results to. 619 | """ 620 | writer = UnicodeWriter(out) 621 | writer.writerow(cursor.keys()) 622 | writer.writerows(cursor) 623 | 624 | def build_dataframe(self, cursor): 625 | """Reture an sql result set in pandas DataFrame format. 626 | 627 | Args: 628 | cursor: a sqlalchemy connection cursor 629 | """ 630 | if not _has_pandas: 631 | print("Warning: Pandas support not installed." 632 | "Please use `pip install 'ipydb[notebook]'` " 633 | "to add support for pandas dataframes in ipydb.") 634 | return None 635 | 636 | data = cursor.fetchall() # XXX: clamp upper limit on fetch here 637 | columns = cursor.keys() 638 | frame = pd.DataFrame.from_records(data, columns=columns) 639 | return frame 640 | -------------------------------------------------------------------------------- /ipydb/utils.py: -------------------------------------------------------------------------------- 1 | """Helpers and utils.""" 2 | 3 | import codecs 4 | import csv 5 | from io import BytesIO as StringIO 6 | import time 7 | 8 | from builtins import input 9 | from past.builtins import basestring 10 | 11 | 12 | class UnicodeWriter: 13 | """ 14 | A CSV writer which will write rows to CSV file "f", 15 | which is encoded in the given encoding. 16 | """ 17 | 18 | def __init__(self, f, dialect=csv.excel, encoding="utf-8", **kwds): 19 | # Redirect output to a queue 20 | self.queue = StringIO() 21 | self.writer = csv.writer(self.queue, dialect=dialect, **kwds) 22 | self.stream = f 23 | self.encoder = codecs.getincrementalencoder(encoding)() 24 | 25 | def writerow(self, row): 26 | try: 27 | self.writer.writerow([s.decode('utf8').encode("utf-8") if isinstance(s, basestring) 28 | else s for s in row]) 29 | except: 30 | self.writer.writerow([s.encode("utf-8") if isinstance(s, basestring) 31 | else s for s in row]) 32 | # Fetch UTF-8 output from the queue ... 33 | data = self.queue.getvalue() 34 | data = data.decode("utf-8") 35 | # ... and reencode it into the target encoding 36 | data = self.encoder.encode(data) 37 | # write to the target stream 38 | self.stream.write(data) 39 | # empty queue 40 | self.queue.truncate(0) 41 | 42 | def writerows(self, rows): 43 | for row in rows: 44 | self.writerow(row) 45 | 46 | 47 | def multi_choice_prompt(prompt, choices, default=None): 48 | ans = None 49 | while ans not in choices.keys(): 50 | try: 51 | ans = input(prompt + ' ').lower() 52 | if not ans: # response was an empty string 53 | ans = default 54 | except KeyboardInterrupt: 55 | pass 56 | except EOFError: 57 | if default in choices.keys(): 58 | ans = default 59 | print() 60 | else: 61 | raise 62 | 63 | return choices[ans] 64 | 65 | 66 | class timer(object): 67 | """Timer Context Manager. 68 | 69 | Usage: 70 | with(timer("doing something")): 71 | time.sleep(10) 72 | """ 73 | def __init__(self, name='timer', log=None): 74 | self.name = name 75 | self.log = log 76 | 77 | def __enter__(self): 78 | self.start = time.time() 79 | 80 | def __exit__(self, ty, val, tb): 81 | end = time.time() 82 | msg = "%s : %0.3f ms" % (self.name, (end - self.start) * 1000) 83 | if self.log and hasattr(self.log, 'debug'): 84 | self.log.debug(msg) 85 | else: 86 | print(msg) 87 | return False 88 | 89 | 90 | def termsize(): 91 | """Try to figure out the size of the current terminal. 92 | 93 | Returns: 94 | Size of the terminal as a tuple: (height, width). 95 | """ 96 | import os 97 | env = os.environ 98 | 99 | def ioctl_GWINSZ(fd): 100 | try: 101 | import fcntl 102 | import termios 103 | import struct 104 | cr = struct.unpack('hh', fcntl.ioctl(fd, termios.TIOCGWINSZ, 105 | '1234')) 106 | except: 107 | return None 108 | return cr 109 | cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2) 110 | if not cr: 111 | try: 112 | fd = os.open(os.ctermid(), os.O_RDONLY) 113 | cr = ioctl_GWINSZ(fd) 114 | os.close(fd) 115 | except: 116 | pass 117 | if not cr: 118 | try: 119 | cr = (env['LINES'], env['COLUMNS']) 120 | except: 121 | cr = (25, 80) 122 | return int(cr[1]), int(cr[0]) 123 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from codecs import open 3 | 4 | from setuptools import setup 5 | import ipydb 6 | 7 | requires = ['SQLAlchemy', 'ipython>=1.0', 'python-dateutil', 'sqlparse', 8 | 'future'] 9 | tests_require = ['nose', 'mock==1.0.1'] 10 | extras_require = {'doc': ['Sphinx==1.2.3', 'sphinx-rtd-theme==0.1.6'], 11 | 'notebook': ['pandas>=0.16.2'] 12 | } 13 | description = "An IPython extension to help you write and run SQL statements" 14 | 15 | with open('README.rst', 'r', 'utf-8') as f: 16 | readme = f.read() 17 | 18 | setup( 19 | name='ipydb', 20 | version=ipydb.__version__, 21 | description=description, 22 | long_description=readme, 23 | author='Jay Sweeney', 24 | author_email='writetojay@gmail.com', 25 | url='http://github.com/jaysw/ipydb', 26 | packages=['ipydb', 'ipydb.metadata'], 27 | package_dir={'ipydb': 'ipydb'}, 28 | package_data={'': ['LICENSE']}, 29 | include_package_data=True, 30 | zip_safe=False, 31 | license='Apache 2.0', 32 | install_requires=requires, 33 | extras_require=extras_require, 34 | test_suite='nose.collector', 35 | tests_require=tests_require, 36 | classifiers=( 37 | "Development Status :: 4 - Beta", 38 | 'Intended Audience :: Developers', 39 | 'Natural Language :: English', 40 | 'License :: OSI Approved :: Apache Software License', 41 | 'Programming Language :: Python', 42 | 'Programming Language :: Python :: 2.6', 43 | 'Programming Language :: Python :: 2.7', 44 | 'Environment :: Console', 45 | 'Framework :: IPython', 46 | 'Topic :: Database', 47 | ) 48 | ) 49 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jaysw/ipydb/ecb1014af9b7f2cd0ab2f4d31b0b18e52f3c813c/tests/__init__.py -------------------------------------------------------------------------------- /tests/dbs/chinook.sqlite: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jaysw/ipydb/ecb1014af9b7f2cd0ab2f4d31b0b18e52f3c813c/tests/dbs/chinook.sqlite -------------------------------------------------------------------------------- /tests/dbs/test.sql: -------------------------------------------------------------------------------- 1 | select * from Album where AlbumId < 2 2 | / 3 | select * from Album where AlbumId < 3 4 | / 5 | -------------------------------------------------------------------------------- /tests/test_completion.py: -------------------------------------------------------------------------------- 1 | import collections 2 | import itertools 3 | import unittest 4 | 5 | import mock 6 | from mock import patch 7 | import nose.tools as nt 8 | 9 | from ipydb import completion 10 | from ipydb.metadata import model as m 11 | 12 | 13 | class Event(object): 14 | 15 | def __init__(self, command='', line='', symbol='', text_until_cursor=''): 16 | self.command = command 17 | self.line = line 18 | self.symbol = symbol 19 | self.text_until_cursor = text_until_cursor 20 | 21 | 22 | class CompleterTest(unittest.TestCase): 23 | 24 | def setUp(self): 25 | self.db = mock.Mock(spec=m.Database) 26 | self.completer = completion.IpydbCompleter(get_db=lambda: self.db) 27 | self.data = { 28 | 'foo': ['first', 'second', 'third'], 29 | 'bar': ['thing'], 30 | 'baz': ['other'], 31 | 'lur': ['foo_id', 'bar_id'] 32 | } 33 | 34 | self.db.tablenames.return_value = self.data.keys() 35 | self.db.fieldnames = mock.MagicMock(side_effect=self.mock_fieldnames) 36 | # setup some joins 37 | lur_foo = m.ForeignKey(table='lur', columns=('foo_id',), 38 | reftable='foo', refcolumns=('first',)) 39 | lur_bar = m.ForeignKey(table='lur', columns=('bar_id',), 40 | reftable='bar', refcolumns=('thing',)) 41 | joins = collections.defaultdict(set) 42 | joins.update({ 43 | 'foo': {lur_foo}, 44 | 'bar': {lur_bar}, 45 | 'baz': set(), 46 | 'lur': {lur_foo, lur_bar}, 47 | }) 48 | self.db.all_joins = mock.MagicMock( 49 | side_effect=lambda t: joins.get(t, set())) 50 | 51 | def mock_get_joins(t1, t2): 52 | return joins.get(t1, set()) & joins.get(t2, set()) 53 | 54 | self.db.get_joins = mock.MagicMock(side_effect=mock_get_joins) 55 | 56 | def mock_fieldnames(self, table=None, dotted=False): 57 | """Pretends to be Database.fieldnames() using self.data""" 58 | if table is None: 59 | if not dotted: 60 | return itertools.chain(*self.data.values()) 61 | else: 62 | return ['%s.%s' % (t, c) for t, cols in self.data.items() 63 | for c in cols] 64 | if dotted: 65 | return ['%s.%s' % (table, col) for col in self.data[table]] 66 | else: 67 | return self.data[table] 68 | 69 | def test_table_name(self): 70 | result = self.completer.table_name(Event(symbol='ba')) 71 | nt.assert_equal(sorted(result), ['bar', 'baz']) 72 | 73 | def test_dotted_expressions(self): 74 | result = self.completer.dotted_expression(Event(symbol='foo.')) 75 | nt.assert_equal(result, ['foo.first', 'foo.second', 'foo.third']) 76 | 77 | result = self.completer.dotted_expression(Event(symbol='foo.se')) 78 | nt.assert_equal(result, ['foo.second']) 79 | 80 | result = self.completer.dotted_expression(Event(symbol='bar.')) 81 | nt.assert_equal(result, ['bar.thing']) 82 | 83 | # where the table is unknown (e.g. some alias that we haven't parsed) 84 | # return a match on ANY field. I think this is better than returning 85 | # no matches for now. 86 | result = self.completer.dotted_expression( 87 | Event(symbol='something.thin')) 88 | nt.assert_equal(result, ['thing']) 89 | 90 | # this one is a bit crazy. show every possible fieldname... 91 | result = self.completer.dotted_expression( 92 | Event(symbol='something.')) 93 | nt.assert_equal(result, 94 | sorted('something.' + c 95 | for c in itertools.chain(*self.data.values()))) 96 | 97 | def test_expand_table_dot_star(self): 98 | result = self.completer.dotted_expression(Event(symbol='foo.*')) 99 | nt.assert_equal(result, ['foo.first, foo.second, foo.third']) 100 | 101 | def test_expand_simple_select(self): 102 | result = self.completer.expand_two_token_sql( 103 | Event(line="select foo", symbol="foo")) 104 | nt.assert_equal(result, ['foo.* from foo']) 105 | 106 | def test_expand_insert_statement(self): 107 | insert_statement = 'insert into foo blah sure thing' 108 | self.db.insert_statement.return_value = insert_statement 109 | result = self.completer.expand_two_token_sql( 110 | Event(line="insert foo", symbol="foo")) 111 | nt.assert_equal(result, [' into foo blah sure thing']) 112 | 113 | def test_is_valid_join_expression(self): 114 | valid_joins = [ 115 | 'lur**foo', 116 | 'foo**lur', 117 | 'lur**bar', 118 | 'bar**lur', 119 | 'bar**lur**foo', 120 | 'bar**lur**foo**foo**bar', 121 | ] 122 | invalid_joins = [ 123 | 'bar**baz', 124 | 'foo**baz', 125 | 'bar**z', 126 | 'bar**', 127 | ] 128 | for valid in valid_joins: 129 | nt.assert_true(self.completer.is_valid_join_expression(valid)) 130 | for invalid in invalid_joins: 131 | nt.assert_false(self.completer.is_valid_join_expression(invalid)) 132 | 133 | def test_expand_join_expression(self): 134 | expansions = { 135 | 'not**real': 'not**real', 136 | 'lur**foo': 'lur inner join foo on lur.foo_id = foo.first ', 137 | } 138 | for k, v in expansions.items(): 139 | nt.assert_equal(self.completer.expand_join_expression(k), v) 140 | 141 | def test_join_shortcut(self): 142 | expectations = { 143 | 'lur**': ['lur**bar', 'lur**foo'], 144 | 'foo**': ['foo**lur'], 145 | 'baz**': [], 146 | 'lur**foo**': ['lur**foo**bar', 'lur**foo**foo', 'lur**foo**lur'], 147 | 'lur**ba': ['lur**bar'], 148 | 'lur**bar**f': ['lur**bar**foo'], 149 | 'lur**bar': ['lur inner join bar on lur.bar_id = bar.thing '], 150 | 151 | } 152 | for symbol, expected in expectations.items(): 153 | actual = self.completer.join_shortcut(Event(symbol=symbol)) 154 | nt.assert_equal(expected, actual) 155 | 156 | def test_sql_format(self): 157 | expectations = { 158 | '': ['csv', 'table'], 159 | 'cs': ['csv'], 160 | 'ta': ['table'] 161 | } 162 | for symbol, expected in expectations.items(): 163 | actual = self.completer.sql_format(Event(symbol=symbol)) 164 | nt.assert_equal(expected, actual) 165 | 166 | def mock_config(self, mock_getconfigs): 167 | """mocks out the getconfigs() call in ipydb.completion""" 168 | confignames = 'employees northwind something'.split() 169 | # getconfigs()[1].keys() 170 | mock_getconfigs.return_value.__getitem__.return_value\ 171 | .keys.return_value = confignames 172 | 173 | @patch('ipydb.completion.getconfigs') 174 | def test_connection_nickname(self, mock_getconfigs): 175 | self.mock_config(mock_getconfigs) 176 | expectations = { 177 | '': ['employees', 'northwind', 'something'], 178 | 'emp': ['employees'], 179 | 'no': ['northwind'] 180 | } 181 | for symbol, expected in expectations.items(): 182 | actual = self.completer.connection_nickname(Event(symbol=symbol)) 183 | nt.assert_equal(expected, actual) 184 | 185 | def test_sql_statement(self): 186 | expectations = { 187 | ('select foo', 'foo'): ['foo.* from foo'], 188 | ('select foo.fir', 'foo.fir'): ['foo.first'], 189 | ('select foo**lu', 'foo**lu'): ['foo**lur'], 190 | ('select foo.first foo.se', 'foo.se'): ['foo.second'], 191 | } 192 | for (line, symbol), expected in expectations.items(): 193 | actual = self.completer.sql_statement( 194 | Event(line=line, symbol=symbol)) 195 | nt.assert_equal(expected, actual) 196 | 197 | @patch('ipydb.completion.getconfigs') 198 | def test_complete(self, mock_getconfigs): 199 | self.mock_config(mock_getconfigs) 200 | expectations = { 201 | ('connect nor', 'connect', 'nor'): ['northwind'], 202 | ('sqlformat ta', 'sqlformat', 'ta'): ['table'], 203 | ('references ba', 'references', 'ba'): ['bar', 'baz'], 204 | ('tables ba', 'tables', 'ba'): ['bar', 'baz'], 205 | ('fields fo', 'fields', 'fo'): ['foo'], # needs more! 206 | ('joins fo', 'joins', 'fo'): ['foo'], 207 | ('fks fo', 'fks', 'fo'): ['foo'], 208 | ('describe fo', 'describe', 'fo'): ['foo'], 209 | ('sql sele', 'sql', 'sele'): ['select'], 210 | ('%sql sele', '%sql', 'sele'): ['select'], 211 | ('sql select foo.fi', 'sql', 'foo.fi'): ['foo.first'], 212 | ('select foo.fi', 'sql', 'foo.fi'): ['foo.first'], 213 | ('runsql anything', 'runsql', 'anything'): None, 214 | ('foo = %select -r foo.fi', 'select', 'foo.fi'): ['foo.first'], 215 | ('zzzz', 'zzzz', 'zzzz'): None, 216 | } 217 | for (line, command, symbol), expected in expectations.items(): 218 | actual = self.completer.complete( 219 | Event(line=line, symbol=symbol, command=command)) 220 | nt.assert_equal(expected, actual) 221 | 222 | def mock_ipy_magic(self, s): 223 | """mock for completion.get_ipydb and completion.ipydb_complete()""" 224 | if s != 'get_ipydb': 225 | raise Exception('something bad happened') 226 | m = mock.MagicMock() 227 | sqlplugin = m.return_value 228 | sqlplugin.debug = True 229 | sqlplugin.completer = self.completer 230 | return sqlplugin 231 | 232 | def test_ipydb_complete(self): 233 | mock_ipy = mock.MagicMock() 234 | mock_ipy.magic = mock.MagicMock(side_effect=self.mock_ipy_magic) 235 | result = completion.ipydb_complete( 236 | mock_ipy, 237 | Event(line='select fo', command='select', symbol='fo', 238 | text_until_cursor='select fo')) 239 | nt.assert_true('foo', result) 240 | 241 | def test_monkey_string(self): 242 | ms = completion.MonkeyString('hello w', 'something hello w') 243 | nt.assert_true(ms.startswith('hello w')) 244 | nt.assert_equal(ms, 'something hello w') 245 | nt.assert_false(ms.startswith('other unrelated thing')) 246 | 247 | def test_exceptions_are_surpressed(self): 248 | mock_ipy = mock.MagicMock() 249 | mock_ipydb = self.mock_ipy_magic('get_ipydb') 250 | mock_ipydb.debug = False 251 | mock_ipy.magic.return_value = mock_ipydb 252 | 253 | def kaboom(*args, **kw): 254 | raise Exception('ka ka ka boo boo booom!') 255 | 256 | mock_ipydb.completer = mock.MagicMock() 257 | mock_ipydb.completer.complete = mock.MagicMock(side_effect=kaboom) 258 | completion.ipydb_complete( 259 | mock_ipy, 260 | Event(line='select fo', command='select', symbol='fo', 261 | text_until_cursor='select fo')) 262 | -------------------------------------------------------------------------------- /tests/test_integration.py: -------------------------------------------------------------------------------- 1 | """Some integration tests using the chinook example db.""" 2 | from __future__ import print_function 3 | 4 | from io import BytesIO, StringIO 5 | import shutil 6 | import sys 7 | 8 | from IPython.terminal.interactiveshell import TerminalInteractiveShell 9 | import mock 10 | import nose.tools as nt 11 | 12 | import ipydb 13 | from ipydb import plugin, engine 14 | 15 | 16 | EXAMPLEDB = 'sqlite:///tests/dbs/temp.sqlite' 17 | 18 | 19 | class DStringIO(StringIO): 20 | def write(self, s): 21 | if isinstance(s, bytes): 22 | s = s.decode('utf8') 23 | super(DStringIO, self).write(s) 24 | 25 | 26 | class TestIntegration(object): 27 | 28 | def setup(self): 29 | shutil.copyfile('tests/dbs/chinook.sqlite', 'tests/dbs/temp.sqlite') 30 | self.pgetconfigs = mock.patch('ipydb.plugin.engine.getconfigs') 31 | mgetconfigs = self.pgetconfigs.start() 32 | mgetconfigs.return_value = None, [] 33 | self.ipython = mock.MagicMock(spec=TerminalInteractiveShell) 34 | self.pget_metadata_engine = mock.patch( 35 | 'ipydb.metadata.get_metadata_engine') 36 | mget_engine = self.pget_metadata_engine.start() 37 | self.md_engine = engine.from_url('sqlite:///:memory:') 38 | mget_engine.return_value = ('memory', self.md_engine) 39 | self.ipython.config = None 40 | self.ipython.register_magics = mock.MagicMock() 41 | self.ipython.Completer = mock.MagicMock() 42 | self.ipydb = plugin.SqlPlugin(shell=self.ipython) 43 | self.ipydb.metadata_accessor.debug = True # turn off threading 44 | self.m = self.ipydb.auto_magics 45 | self.out = BytesIO() 46 | self.ppager = mock.patch('ipydb.plugin.pager', spec=plugin.pager) 47 | self.mockpager = self.ppager.start() 48 | self.mockpager.return_value.__enter__.return_value = self.out 49 | 50 | def test_it(self): 51 | self.m.connecturl(EXAMPLEDB) 52 | self.ipydb.get_reflecting_ps1() 53 | self.m.flushmetadata('') 54 | self.m.describe('Album') 55 | print(self.out.getvalue()) 56 | 57 | def test_debug(self): 58 | self.m.debug_ipydb('') 59 | nt.assert_true(self.ipydb.debug) 60 | nt.assert_true(self.ipydb.metadata_accessor.debug) 61 | 62 | def test_help(self): 63 | ipydb.ipydb_help() # XXX: assert somthing... 64 | 65 | def test_other(self): 66 | self.m.connecturl(EXAMPLEDB) 67 | self.m.sql('-p select * from Album', 'where albumId = 1') 68 | self.m.sql('-f select * from Album') 69 | self.m.runsql('tests/dbs/test.sql') 70 | self.m.sqlformat('vsc') 71 | self.m.sqlformat('csv') 72 | self.m.rereflect('') 73 | print(self.out.getvalue()) 74 | 75 | def test_insert(self): 76 | self.m.connecturl(EXAMPLEDB) 77 | output = '' 78 | try: 79 | sys.stdout = DStringIO() 80 | self.m.sql("insert into Genre (Name) values ('Cronk')") 81 | output = sys.stdout.getvalue() 82 | finally: 83 | sys.stdout = sys.__stdout__ 84 | nt.assert_in(u'1 row affected', output) 85 | 86 | def teardown(self): 87 | self.pgetconfigs.stop() 88 | self.pget_metadata_engine.stop() 89 | self.ppager.stop() 90 | -------------------------------------------------------------------------------- /tests/test_magic.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | from IPython.terminal.interactiveshell import TerminalInteractiveShell 4 | import nose.tools as nt 5 | import mock 6 | 7 | from ipydb import magic, plugin 8 | 9 | 10 | class ModelTest(unittest.TestCase): 11 | 12 | def setUp(self): 13 | self.ipydb = mock.MagicMock(spec=plugin.SqlPlugin) 14 | self.ipython = mock.MagicMock(spec=TerminalInteractiveShell) 15 | self.ipython.config = None 16 | self.magics = magic.SqlMagics(self.ipydb, self.ipython) 17 | 18 | def test_create_sql_alias(self): 19 | m = mock.MagicMock() 20 | alias = magic.create_sql_alias('select', m) 21 | alias(' -a -b -c * from table where thing = 0') 22 | m.sql.assert_called_with( 23 | '-a -b -c select * from table where thing = 0', 24 | None) 25 | 26 | @mock.patch('ipydb.ipydb_help') 27 | def test_ipydb_help(self, mockhelp): 28 | self.magics.ipydb_help() 29 | mockhelp.assert_called_once_with() 30 | 31 | def test_set_reflection(self): 32 | self.ipydb.do_reflection = False 33 | self.magics.set_reflection('') 34 | nt.assert_true(self.ipydb.do_reflection) 35 | self.magics.set_reflection('') 36 | nt.assert_false(self.ipydb.do_reflection) 37 | 38 | def test_engine(self): 39 | self.ipydb.get_engine.return_value = 'barry' 40 | eng = self.magics.engine('') 41 | nt.assert_equal('barry', eng) 42 | 43 | def test_useless_tests_for_coverage_sake(self): 44 | self.magics.commit('') 45 | self.ipydb.commit.assert_called() 46 | self.magics.begin('') 47 | self.ipydb.begin.assert_called() 48 | self.magics.rollback('') 49 | self.ipydb.rollback.assert_called() 50 | self.magics.tables('') 51 | self.ipydb.show_tables.assert_called() 52 | self.magics.fields('') 53 | self.ipydb.show_fields.assert_called() 54 | self.ipydb.show_sql = True 55 | self.magics.showsql('') 56 | nt.assert_false(self.ipydb.show_sql) 57 | self.magics.references('a') 58 | self.ipydb.what_references.assert_called() 59 | self.magics.joins('a') 60 | self.ipydb.show_joins.assert_called() 61 | self.magics.fks('a') 62 | self.ipydb.show_fks.assert_called() 63 | self.magics.connect('a') 64 | self.ipydb.connect.assert_called() 65 | self.magics.connecturl('a') 66 | self.ipydb.connect_url.assert_called() 67 | 68 | def test_sql(self): 69 | thing = self.magics.sql('-r -f select * from blah where something = 1') 70 | nt.assert_is_not_none(thing) # uhm, not sure what to check... 71 | lst = [{'x': 'y'}, {'e': 'f'}] 72 | d = {'a': 'b'} 73 | dct = { 74 | 'zzz': d, 75 | 'yyy': lst 76 | } 77 | 78 | # params and multiparams 79 | ret = self.ipydb.execute.return_value 80 | ret.returns_rows = True 81 | self.ipython.user_ns = dct 82 | self.magics.sql('-a zzz -m yyy select * from foo') 83 | self.ipydb.execute.assert_called_with( 84 | 'select * from foo', 85 | params=d, multiparams=lst) 86 | 87 | ret.returns_rows = False 88 | ret.rowount = 2 89 | self.magics.sql('-a zzz -m yyy select * from foo') 90 | 91 | r = self.magics.sql('-r select * from foo') 92 | nt.assert_equal(ret, r) 93 | -------------------------------------------------------------------------------- /tests/test_metadata.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | import sqlalchemy as sa 4 | 5 | from ipydb import metadata 6 | from ipydb.metadata import model as m 7 | 8 | 9 | logging.basicConfig() 10 | 11 | ipengine = None # in memory ipydb engine with schema 12 | ipsession = None 13 | 14 | show_sql = False 15 | 16 | if show_sql: 17 | logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO) 18 | 19 | 20 | def setup_ipydb_schema(): 21 | global ipsession, ipengine 22 | ipengine = sa.create_engine('sqlite:///:memory:') 23 | m.Base.metadata.create_all(ipengine) 24 | metadata.Session.configure(bind=ipengine) 25 | ipsession = metadata.Session() 26 | 27 | 28 | def teardown_ipydb_schema(): 29 | try: 30 | ipsession.commit() 31 | except Exception as e: 32 | ipsession.rollback() 33 | raise e 34 | finally: 35 | ipsession.close() 36 | ipengine.dispose() 37 | 38 | 39 | def get_user_table(): 40 | metadata = sa.MetaData() 41 | user = sa.Table( 42 | 'user', metadata, 43 | sa.Column('user_id', sa.Integer, primary_key=True), 44 | sa.Column('user_name', sa.String(16), nullable=False), 45 | sa.Column('email_address', sa.String(60)), 46 | sa.Column('password', sa.String(20), nullable=False) 47 | ) 48 | return user 49 | 50 | 51 | def setup(*args, **kw): 52 | pass 53 | 54 | 55 | def teardown(*args, **kw): 56 | pass 57 | 58 | 59 | def test_get_metadata(): 60 | # nothing to see here just yet... 61 | pass 62 | -------------------------------------------------------------------------------- /tests/test_model.py: -------------------------------------------------------------------------------- 1 | import itertools 2 | import unittest 3 | 4 | from future.utils import viewitems 5 | import nose.tools as nt 6 | 7 | from ipydb.metadata import model as m 8 | 9 | 10 | class ModelTest(unittest.TestCase): 11 | 12 | def setUp(self): 13 | foo = m.Table(id=1, name='foo') 14 | foo.columns = [ 15 | m.Column(id=1, table_id=1, name='first', type='VARCHAR(10)', 16 | primary_key=True, nullable=False, table=foo), 17 | m.Column(id=2, table_id=1, name='second', type='INT', 18 | primary_key=True, nullable=False, table=foo), 19 | m.Column(id=3, table_id=1, name='third', type='DATE', 20 | primary_key=True, nullable=False, table=foo, 21 | default_value='bananas'), 22 | ] 23 | bar = m.Table(id=2, name='bar') 24 | bar.columns = [ 25 | m.Column(id=4, table_id=2, name='thing', type='atype', 26 | primary_key=True, nullable=True, table=bar), 27 | ] 28 | baz = m.Table(id=3, name='baz') 29 | baz.columns = [ 30 | m.Column(id=5, table_id=3, name='other', type='atype', 31 | primary_key=True, nullable=True, table=baz), 32 | ] 33 | lur = m.Table(id=4, name='lur') 34 | lur.columns = [ 35 | m.Column(id=6, table_id=4, name='foo_id', type='atype', 36 | primary_key=True, nullable=False, table=lur, 37 | referenced_column_id=1, constraint_name='foo_fk'), 38 | m.Column(id=7, table_id=4, name='bar_id', type='atype', 39 | primary_key=True, nullable=False, table=lur, 40 | referenced_column_id=4, constraint_name='bar_fk'), 41 | ] 42 | self.foo = foo 43 | self.bar = bar 44 | self.baz = baz 45 | self.lur = lur 46 | self.tables = [foo, bar, baz, lur] 47 | self.db = m.Database(self.tables) 48 | 49 | # setup join asociations. 50 | lur.columns[0].referenced_column = foo.columns[0] 51 | lur.columns[1].referenced_column = bar.columns[0] 52 | foo.columns[0].referenced_by = [lur.columns[0]] 53 | bar.columns[0].referenced_by = [lur.columns[1]] 54 | self.lur_foo = {m.ForeignKey('lur', ('foo_id',), 'foo', ('first',))} 55 | self.lur_bar = {m.ForeignKey('lur', ('bar_id',), 'bar', ('thing',))} 56 | 57 | # an index 58 | self.idx = m.Index(id=1, name='myidx', unique=False, table_id=4, 59 | table=lur, columns=[lur.columns[0]]) 60 | lur.indexes = [self.idx] 61 | lur.columns[0].indexes = [self.idx] 62 | 63 | def test_init(self): 64 | nt.assert_false(self.db.isempty) 65 | nt.assert_false(self.db.reflecting) 66 | expected = ['foo', 'bar', 'baz', 'lur'] 67 | nt.assert_equal(sorted(expected), sorted(self.db.tablenames())) 68 | nt.assert_equal(self.foo.columns[0], self.foo.column('first')) 69 | 70 | def test_sql_default(self): 71 | expectations = { 72 | ('sometype', True, None): 'NULL', 73 | ('sometype', False, None): '', 74 | ('sometype', False, 'somedefault'): "'somedefault'", 75 | ('INT', False, None): '0', 76 | ('VARCHAR', False, None): "'hello'", 77 | ('DATE', False, None): "current_date", 78 | ('TIME', False, None): "current_time", 79 | ('TIMESTAMP', False, None): "current_timestamp", 80 | } 81 | for (typ, nullable, default), expected in viewitems(expectations): 82 | col = m.Column(id=1, table_id=1, name='first', 83 | primary_key=True, 84 | type=typ, 85 | nullable=nullable, 86 | default_value=default) 87 | nt.assert_equal(expected, m.sql_default(col)) 88 | 89 | def test_columns(self): 90 | cols = itertools.chain(*[t.columns for t in self.tables]) 91 | nt.assert_equal(set(cols), set(self.db.columns)) 92 | 93 | def test_fieldnames(self): 94 | cols = itertools.chain(*[t.columns for t in self.tables]) 95 | fieldnames = [c.name for c in cols] 96 | nt.assert_equal(sorted(fieldnames), sorted(self.db.fieldnames())) 97 | 98 | lur_fields = ['bar_id', 'foo_id'] 99 | nt.assert_equal(lur_fields, sorted(self.db.fieldnames('lur'))) 100 | 101 | lur_dfields = ['lur.bar_id', 'lur.foo_id'] 102 | nt.assert_equal(lur_dfields, 103 | sorted(self.db.fieldnames('lur', dotted=True))) 104 | 105 | nt.assert_equal(set(), self.db.fieldnames('asfd')) 106 | 107 | def test_get_joins(self): 108 | 109 | nt.assert_equal(self.lur_foo, self.db.get_joins('lur', 'foo')) 110 | nt.assert_equal(self.lur_bar, self.db.get_joins('lur', 'bar')) 111 | nt.assert_equal(set(), self.db.get_joins('foo', 'bar')) 112 | nt.assert_equal(set(), self.db.get_joins('xxx', 'bar')) 113 | 114 | def test_tables_referencing(self): 115 | nt.assert_equal({'lur'}, self.db.tables_referencing('foo')) 116 | nt.assert_equal({'lur'}, self.db.tables_referencing('bar')) 117 | nt.assert_equal(set(), self.db.tables_referencing('xx')) 118 | 119 | def test_fields_referencing(self): 120 | nt.assert_equal(self.lur_foo, set(self.db.fields_referencing('foo'))) 121 | nt.assert_equal(self.lur_bar, set(self.db.fields_referencing('bar'))) 122 | nt.assert_equal(set(), set(self.db.fields_referencing('baz'))) 123 | 124 | def test_foreign_keys(self): 125 | exp = set() 126 | exp.update(self.lur_foo, self.lur_bar) 127 | nt.assert_equal(exp, set(self.db.foreign_keys('lur'))) 128 | nt.assert_equal(set(), set(self.db.foreign_keys('foo'))) 129 | nt.assert_equal(set(), set(self.db.foreign_keys('not_a_table'))) 130 | 131 | def test_all_joins(self): 132 | exp = set() 133 | exp.update(self.lur_foo, self.lur_bar) 134 | nt.assert_equal(exp, set(self.db.all_joins('lur'))) 135 | nt.assert_equal(self.lur_foo, set(self.db.all_joins('foo'))) 136 | 137 | def test_insert_statement(self): 138 | exp = ("insert into foo (first, second, third) " 139 | "values ('hello', 0, 'bananas')") 140 | nt.assert_equal(exp, self.db.insert_statement('foo')) 141 | 142 | def test_indexes(self): 143 | nt.assert_equal({self.idx}, set(self.db.indexes('lur'))) 144 | nt.assert_equal(set(), set(self.db.indexes('foo'))) 145 | 146 | def test_as_join(self): 147 | fk = next(iter(self.lur_foo)) 148 | exp = 'foo inner join lur on foo.first = lur.foo_id' 149 | nt.assert_equal(exp, fk.as_join()) 150 | -------------------------------------------------------------------------------- /tests/test_persist.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | import sqlalchemy as sa 4 | 5 | from ipydb import metadata 6 | from ipydb.metadata import model as m 7 | 8 | 9 | logging.basicConfig() 10 | 11 | ipengine = None # in memory ipydb engine with schema 12 | ipsession = None 13 | 14 | show_sql = False 15 | 16 | if show_sql: 17 | logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO) 18 | 19 | 20 | def setup_ipydb_schema(): 21 | global ipsession, ipengine 22 | ipengine = sa.create_engine('sqlite:///:memory:') 23 | m.Base.metadata.create_all(ipengine) 24 | metadata.Session.configure(bind=ipengine) 25 | ipsession = metadata.Session() 26 | 27 | 28 | def teardown_ipydb_schema(): 29 | try: 30 | ipsession.commit() 31 | except Exception as e: 32 | ipsession.rollback() 33 | raise e 34 | finally: 35 | ipsession.close() 36 | ipengine.dispose() 37 | 38 | 39 | def get_user_table(): 40 | metadata = sa.MetaData() 41 | user = sa.Table( 42 | 'user', metadata, 43 | sa.Column('user_id', sa.Integer, primary_key=True), 44 | sa.Column('user_name', sa.String(16), nullable=False), 45 | sa.Column('email_address', sa.String(60)), 46 | sa.Column('password', sa.String(20), nullable=False) 47 | ) 48 | return user 49 | 50 | 51 | # @with_setup(setup_ipydb_schema, teardown_ipydb_schema) 52 | # def test_write_column_empty_schema(): 53 | # user = get_user_table() 54 | # sacol = user.columns['user_id'] 55 | # user_table = m.Table(name=user.name) 56 | # ipsession.add(user_table) 57 | # 58 | # column = persist.write_column(ipsession, user_table, sacol) 59 | # 60 | # assert column.name == sacol.name 61 | # col2 = ipsession.query(m.Column).filter_by( 62 | # name=sacol.name, table=user_table).scalar() 63 | # assert column == col2 64 | -------------------------------------------------------------------------------- /tests/test_plugin.py: -------------------------------------------------------------------------------- 1 | from configparser import DuplicateSectionError 2 | import re 3 | from io import BytesIO, StringIO 4 | 5 | from IPython.terminal.interactiveshell import TerminalInteractiveShell 6 | import nose.tools as nt 7 | import mock 8 | 9 | from ipydb import plugin 10 | from ipydb.metadata import model as m 11 | from ipydb.metadata.model import Database 12 | 13 | 14 | class TestSqlPlugin(object): 15 | 16 | def setup(self): 17 | self.pmeta = mock.patch('ipydb.metadata.MetaDataAccessor') 18 | self.md_accessor = self.pmeta.start() 19 | self.mock_db = mock.MagicMock(spec=Database) 20 | self.md_accessor.get_metadata.return_value = self.mock_db 21 | self.pengine = mock.patch('ipydb.plugin.engine') 22 | self.mengine = self.pengine.start() 23 | configs = { 24 | 'con1': { 25 | 'type': 'mysql', 26 | 'username': 'barry', 27 | 'password': 'xyz', 28 | 'host': 'zing', 29 | 'database': 'db' 30 | } 31 | } 32 | self.mengine.getconfigs.return_value = ('con1', configs) 33 | self.mock_db_url = 'mysql://barry:xyz@zing.com/db' 34 | self.mengine.make_connection_url.return_value = self.mock_db_url 35 | self.sa_engine = mock.MagicMock() 36 | self.sa_engine.url.host = 'zing.com' 37 | self.sa_engine.url.database = 'db' 38 | self.mengine.from_url.return_value = self.sa_engine 39 | plugin.SqlPlugin.metadata_accessor = self.md_accessor 40 | self.ipython = mock.MagicMock(spec=TerminalInteractiveShell) 41 | self.ipython.config = None 42 | self.ipython.register_magics = mock.MagicMock() 43 | self.ipython.Completer = mock.MagicMock() 44 | self.ip = plugin.SqlPlugin(shell=self.ipython) 45 | 46 | def setup_run_sql(self, runsetup=False): 47 | if runsetup: 48 | self.setup() 49 | self.ip.engine.begin.return_value = self.ip.engine 50 | self.ip.trans_ctx = self.ip.engine 51 | self.ip.trans_ctx.conn = self.ip.engine 52 | s1 = u"update table foo set bar = 1 where baz = 2\n" 53 | s2 = u'delete from spam where eggs = 1\n' 54 | statements = u"{s1}/\n{s2}/\n".format(s1=s1, s2=s2) 55 | sio = StringIO(statements) 56 | mo = mock.mock_open(read_data=statements) 57 | handle = mo.return_value 58 | handle.readline = sio.readline 59 | self.ppatcher = mock.patch('ipydb.plugin.multi_choice_prompt') 60 | self.mock_open = mo 61 | self.s1 = s1 62 | self.s2 = s2 63 | 64 | def test_run_sql_script(self): 65 | self.setup_run_sql() 66 | with mock.patch('ipydb.plugin.open', self.mock_open, create=True): 67 | self.ip.run_sql_script('something', interactive=False) 68 | self.ip.engine.execute.assert_any_call(self.s1) 69 | self.ip.engine.execute.assert_any_call(self.s2) 70 | 71 | def run_sql_check(self, keypress): 72 | self.setup_run_sql() 73 | with mock.patch('ipydb.plugin.open', self.mock_open, create=True), \ 74 | self.ppatcher as prompt: 75 | prompt.return_value = keypress 76 | self.ip.run_sql_script('something', interactive=True) 77 | if keypress in 'qn': 78 | nt.assert_equal(0, self.ip.engine.execute.call_count) 79 | elif keypress in 'ya': 80 | self.ip.engine.execute.assert_any_call(self.s1) 81 | self.ip.engine.execute.assert_any_call(self.s2) 82 | 83 | def test_run_sql_script_interactive(self): 84 | for keypress in 'ynqa': 85 | yield self.run_sql_check, keypress 86 | 87 | def test_rollback(self): 88 | self.ip.connected = False 89 | self.ip.rollback() 90 | nt.assert_is_none(self.ip.trans_ctx) 91 | self.ip.connected = True 92 | mockctx = mock.MagicMock() 93 | self.ip.trans_ctx = mockctx 94 | self.ip.rollback() 95 | mockctx.transaction.rollback.assert_called_once_with() 96 | 97 | def test_get_engine(self): 98 | self.ip.connected = False 99 | e = self.ip.get_engine() 100 | nt.assert_is_none(e) 101 | self.ip.connected = True 102 | e = self.ip.get_engine() 103 | nt.assert_equal(self.sa_engine, e) 104 | 105 | def test_transaction_prompt(self): 106 | self.ip.trans_ctx = mock.MagicMock() 107 | self.ip.trans_ctx.transaction.is_active = True 108 | nt.assert_equal(' *', self.ip.get_transaction_ps1()) 109 | self.ip.connected = False 110 | nt.assert_equal('', self.ip.get_transaction_ps1()) 111 | 112 | def test_prompt(self): 113 | self.ip.connected = False 114 | nt.assert_equal('', self.ip.get_db_ps1()) 115 | self.ip.connected = True 116 | nt.assert_equal(' con1', self.ip.get_db_ps1()) 117 | nt.assert_equal('', self.ip.get_transaction_ps1()) 118 | self.md_accessor.reflecting.return_value = True 119 | nt.assert_equal(' !', self.ip.get_reflecting_ps1()) 120 | self.ip.connected = False 121 | nt.assert_equal('', self.ip.get_reflecting_ps1()) 122 | self.ip.connect_url(self.mock_db_url) 123 | nt.assert_equal(' zing/db', self.ip.get_db_ps1()) 124 | 125 | def test_execute(self): 126 | self.mock_db.tables = ['foo'] 127 | self.ip.execute('select foo') 128 | self.sa_engine.execute.assert_called_with('select * from foo') 129 | 130 | def test_execute_autotransaction(self): 131 | self.ip.flush_metadata() 132 | self.mock_db.tables = ['foo'] 133 | stmt = 'insert into foo(id) values (1)' 134 | self.ip.execute(stmt) 135 | self.sa_engine.begin.assert_called_with() 136 | self.sa_engine.begin.return_value.conn.execute.assert_called_with(stmt) 137 | self.ip.commit() # XXX: what to assert? 138 | 139 | def test_save_connection(self): 140 | self.ipython.ask_yes_no = mock.MagicMock(return_value=True) 141 | 142 | def sidey(name, engine, overwrite=False): 143 | if not overwrite: 144 | raise DuplicateSectionError('boom') 145 | self.mengine.save_connection = mock.MagicMock( 146 | side_effect=sidey) 147 | self.ip.save_connection('con1') 148 | self.mengine.save_connection.assert_called_with('con1', self.ip.engine, 149 | overwrite=True) 150 | 151 | @mock.patch('ipydb.plugin.pager') 152 | def test_get_tables(self, pager): 153 | pagerio = BytesIO() 154 | pager.return_value.__enter__.return_value = pagerio 155 | self.ip.connected = False 156 | self.ip.show_tables() 157 | nt.assert_equal(0, pager.call_count) 158 | self.ip.connected = True 159 | self.mock_db.tables = 'foo bar'.split() 160 | self.ip.show_tables() 161 | output = pagerio.getvalue() 162 | nt.assert_in(b'foo', output) 163 | nt.assert_in(b'bar', output) 164 | 165 | @mock.patch('ipydb.plugin.pager') 166 | def test_get_tables_glob(self, pager): 167 | pagerio = BytesIO() 168 | pager.return_value.__enter__.return_value = pagerio 169 | self.mock_db.tables = 'foo bar'.split() 170 | self.ip.show_tables('f*') 171 | output = pagerio.getvalue() 172 | nt.assert_in(b'foo', output) 173 | nt.assert_not_in(b'bar', output) 174 | 175 | def setup_mock_describe_db(self, pager): 176 | self.pagerio = BytesIO() 177 | pager.return_value.__enter__.return_value = self.pagerio 178 | company = m.Table(id=1, name='company') 179 | cols = [ 180 | m.Column(id=1, table_id=1, name='id', primary_key=True, 181 | type="INTEGER", nullable=False, table=company), 182 | m.Column(id=2, table_id=1, name='name', type="INTEGER", 183 | nullable=False, table=company), 184 | ] 185 | company.columns = cols 186 | company.indexes = [ 187 | m.Index(name='someindex', id=1, table_id=1, table=company, 188 | columns=[company.column('name')], unique=True) 189 | ] 190 | customer = m.Table(id=2, name='customer') 191 | columns = [ 192 | m.Column(id=3, table_id=2, name='id', primary_key=True, 193 | type="INTEGER", nullable=False, table=customer), 194 | m.Column(id=4, table_id=2, name='name', type="INTEGER", 195 | nullable=False, table=customer), 196 | m.Column(id=5, table_id=2, name='company_id', type="INTEGER", 197 | nullable=True, referenced_column_id=1, 198 | constraint_name='company_id_fk', 199 | referenced_column=company.column('id'), 200 | table=customer) 201 | ] 202 | customer.columns = columns 203 | self.database = m.Database(tables=[company, customer]) 204 | self.md_accessor.get_metadata.return_value = self.database 205 | 206 | @mock.patch('ipydb.plugin.pager') 207 | def test_describe_company(self, pager): 208 | self.setup_mock_describe_db(pager) 209 | self.ip.describe('company') 210 | output = self.pagerio.getvalue() 211 | nt.assert_regexp_matches( 212 | output.decode('utf8'), r'\*id\s+\|\s+INTEGER\s+\|\s+NOT NULL') 213 | nt.assert_regexp_matches( 214 | output.decode('utf8'), r'name\s+\|\s+INTEGER\s+\|\s+NOT NULL') 215 | pkre = re.compile(r'Primary Key \(\*\)\n\-+\s+id', re.M | re.I) 216 | nt.assert_regexp_matches(output.decode('utf8'), pkre) 217 | refs_re = re.compile( 218 | 'References to company\n\-+\s+' 219 | 'customer\(company_id\) references company\(id\)', 220 | re.M | re.I) 221 | nt.assert_regexp_matches(output.decode('utf8'), refs_re) 222 | 223 | @mock.patch('ipydb.plugin.pager') 224 | def test_describe_customer(self, pager): 225 | self.setup_mock_describe_db(pager) 226 | self.ip.describe('customer') 227 | output = self.pagerio.getvalue() 228 | nt.assert_regexp_matches(output.decode('utf8'), r'\*id.*INTEGER.*NOT NULL') 229 | nt.assert_regexp_matches(output.decode('utf8'), r'name.*INTEGER.*NOT NULL') 230 | nt.assert_regexp_matches( 231 | output.decode('utf8'), r'company_id\s+\|\s+INTEGER\s+\|\s+NULL') 232 | pkre = re.compile(r'Primary Key \(\*\)\n\-+\s+id', re.M | re.I) 233 | nt.assert_regexp_matches(output.decode('utf8'), pkre) 234 | fkre = re.compile( 235 | 'Foreign Keys\n\-+\s+' 236 | 'customer\(company_id\) references company\(id\)', 237 | re.M | re.I) 238 | nt.assert_regexp_matches(output.decode('utf8'), fkre) 239 | 240 | @mock.patch('ipydb.plugin.pager') 241 | def test_get_columns(self, pager): 242 | self.setup_mock_describe_db(pager) 243 | self.ip.show_fields() 244 | output = self.pagerio.getvalue() 245 | nt.assert_regexp_matches( 246 | output.decode('utf8'), 'company_id\s+INTEGER NULL') 247 | 248 | @mock.patch('ipydb.plugin.pager') 249 | def test_show_joins(self, pager): 250 | self.setup_mock_describe_db(pager) 251 | self.ip.show_joins('customer') 252 | output = self.pagerio.getvalue() 253 | expected = (b'customer inner join company on company.id = ' 254 | b'customer.company_id\n') 255 | nt.assert_equal(expected, output) 256 | 257 | @mock.patch('ipydb.plugin.pager') 258 | def test_what_references(self, pager): 259 | self.setup_mock_describe_db(pager) 260 | self.ip.what_references('company') 261 | output = self.pagerio.getvalue() 262 | expected = b'customer(company_id) references company(id)\n' 263 | nt.assert_equal(expected, output) 264 | 265 | @mock.patch('ipydb.plugin.pager') 266 | def test_show_fks(self, pager): 267 | self.setup_mock_describe_db(pager) 268 | self.ip.show_fks('customer') 269 | output = self.pagerio.getvalue() 270 | expected = b'customer(company_id) references company(id)\n' 271 | nt.assert_equal(expected, output) 272 | 273 | @mock.patch('ipydb.plugin.pager') 274 | def test_get_columns_glob(self, pager): 275 | self.setup_mock_describe_db(pager) 276 | self.ip.show_fields('*ustomer.na*') 277 | output = self.pagerio.getvalue() 278 | myre = re.compile(r'customer\n\-+\s+name\s+INTEGER NOT NULL') 279 | nt.assert_regexp_matches(output.decode('utf8'), myre) 280 | 281 | def teardown(self): 282 | self.pmeta.stop() 283 | self.pengine.stop() 284 | --------------------------------------------------------------------------------