├── .github └── ISSUE_TEMPLATE │ └── report-a-bug-or-feature-request.md ├── .gitignore ├── LICENSE ├── README.md ├── build.sla ├── contrib ├── ansible-cmdb.man.1 ├── debian │ ├── DEBIAN │ │ └── control │ ├── changelog │ └── copyright ├── release_Makefile ├── screenshot-detail.png └── screenshot-overview.png ├── docs ├── dev.md ├── faq.md ├── index.md ├── installation.md └── usage.md ├── example ├── ansible.cfg ├── cust_cols.conf ├── dyninv_test.py ├── dyninv_vbox.py ├── generate.sh ├── group_vars │ ├── all │ ├── db │ ├── db.json │ └── db.yml ├── host_vars │ ├── all │ ├── debian.dev.local │ ├── eek.electricmonk.nl │ ├── eek.electricmonk.nl.json │ └── zoltar.electricmonk.nl │ │ └── sysconfig.yml ├── hosts ├── hosts_complicated ├── hostsdir │ ├── hosts │ └── hosts2 ├── html_fancy.html ├── out │ ├── .invalid_file │ ├── app.uat.local │ ├── centos.dev.local │ ├── custfact.test.local │ ├── db01.prod.local │ ├── db02.prod.local │ ├── db03.prod.local │ ├── dead.dev.local │ ├── debian.dev.local │ ├── eek.electricmonk.nl │ ├── facter.test.local │ ├── host5.example.com │ ├── jib.electricmonk.nl │ ├── no_fqdn.err │ ├── openbsd.dev.local │ ├── openvz.debian.local │ ├── sol_host │ ├── win.dev.local │ ├── win2k8r2.local │ └── zoltar.electricmonk.nl ├── out_custom │ └── custfact.test.local ├── out_extend │ └── openbsd.dev.local ├── out_factcache │ └── debian.dev.local ├── ssh.config └── txt_table.txt ├── lib ├── jsonxs.py ├── mako │ ├── __init__.py │ ├── _ast_util.py │ ├── ast.py │ ├── cache.py │ ├── cmd.py │ ├── codegen.py │ ├── compat.py │ ├── exceptions.py │ ├── ext │ │ ├── __init__.py │ │ ├── autohandler.py │ │ ├── babelplugin.py │ │ ├── beaker_cache.py │ │ ├── extract.py │ │ ├── linguaplugin.py │ │ ├── preprocessors.py │ │ ├── pygmentplugin.py │ │ └── turbogears.py │ ├── filters.py │ ├── lexer.py │ ├── lookup.py │ ├── parsetree.py │ ├── pygen.py │ ├── pyparser.py │ ├── runtime.py │ ├── template.py │ └── util.py ├── ushlex.py ├── yaml │ ├── __init__.py │ ├── composer.py │ ├── constructor.py │ ├── cyaml.py │ ├── dumper.py │ ├── emitter.py │ ├── error.py │ ├── events.py │ ├── loader.py │ ├── nodes.py │ ├── parser.py │ ├── reader.py │ ├── representer.py │ ├── resolver.py │ ├── scanner.py │ ├── serializer.py │ └── tokens.py └── yaml3 │ ├── __init__.py │ ├── composer.py │ ├── constructor.py │ ├── cyaml.py │ ├── dumper.py │ ├── emitter.py │ ├── error.py │ ├── events.py │ ├── loader.py │ ├── nodes.py │ ├── parser.py │ ├── reader.py │ ├── representer.py │ ├── resolver.py │ ├── scanner.py │ ├── serializer.py │ └── tokens.py ├── mkdocs.yml ├── requirements.txt ├── setup.py ├── src ├── ansible-cmdb ├── ansible-cmdb.py └── ansiblecmdb │ ├── __init__.py │ ├── ansible.py │ ├── data │ ├── VERSION │ ├── static │ │ ├── images │ │ │ ├── sort_asc.png │ │ │ ├── sort_both.png │ │ │ └── sort_desc.png │ │ └── js │ │ │ ├── jquery-1.10.2.min.js │ │ │ ├── jquery.dataTables.css │ │ │ └── jquery.dataTables.js │ └── tpl │ │ ├── csv.tpl │ │ ├── html_fancy.tpl │ │ ├── html_fancy_defs.html │ │ ├── html_fancy_split.py │ │ ├── html_fancy_split_detail.tpl │ │ ├── html_fancy_split_overview.tpl │ │ ├── json.tpl │ │ ├── markdown.tpl │ │ ├── markdown_split.py │ │ ├── markdown_split_detail.tpl │ │ ├── markdown_split_overview.tpl │ │ ├── sql.tpl │ │ └── txt_table.tpl │ ├── ihateyaml.py │ ├── parser.py │ ├── render.py │ └── util.py └── test ├── f_extend ├── extend │ └── debian.dev.local └── out_setup │ └── debian.dev.local ├── f_factcache ├── hosts └── out │ └── debian.dev.local ├── f_hostparse ├── hosts └── out │ └── db.dev.local ├── f_inventory ├── dyninv.py ├── hostsdir │ ├── hosts_db │ └── hosts_dev ├── mixeddir │ ├── config.ini │ ├── dyninv.py │ └── hosts └── out │ └── db.dev.local ├── test.py └── test.sh /.github/ISSUE_TEMPLATE/report-a-bug-or-feature-request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Report a bug or feature request 3 | about: Report a bug of feature request 4 | 5 | --- 6 | 7 | If you're reporting a bug: 8 | 9 | * Don't worry if you're new to reporting bugs or don't know which information 10 | to provide. Just try to follow the guidelines below, and we'll figure it 11 | out. 12 | * Please provide output of `ansible-cmdb --debug` 13 | * Please reduce your facts (just delete some of the `ansible -m setup` output 14 | files) until you're left with a single fact or fact file that still 15 | reproduces the problem. 16 | * If possible, please attach the problematic fact file. 17 | 18 | If you're report a feature request: 19 | 20 | * Please do not file feature requests for column additions. Every user of 21 | ansible-cmdb has different requirements and adding an endless variety of 22 | columns is both unpractical and is quite a burden on the developer of 23 | ansible-cmdb. You can easily add custom columns yourself with the [Custom 24 | Columns](https://ansible-cmdb.readthedocs.io/en/latest/usage/#custom-columns) 25 | feature. Pull requests are welcome if you believe the columns are useful for 26 | a wide audience. 27 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /.envrc 2 | /cmdb.html 3 | /README.html 4 | /ansible-cmdb*.deb 5 | /ansible-cmdb*.rpm 6 | /ansible-cmdb*.tar.gz 7 | /ansible-cmdb*.zip 8 | /example/gen_* 9 | /build/ 10 | /dist/ 11 | /*.whl 12 | /src/ansible_cmdb.egg-info/ 13 | /example/cmdb/ 14 | __pycache__ 15 | *.pyc 16 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | Ansible Configuration Management Database 2 | ========================================= 3 | 4 | ![Status: Stable](https://img.shields.io/badge/status-stable-green.svg) 5 | ![Build Status](http://build.electricmonk.nl/job/ansible-cmdb/shield) 6 | ![Activity: Active development](https://img.shields.io/badge/activity-active%20development-green.svg) 7 | ![License: GPLv3](https://img.shields.io/badge/license-GPLv3-blue.svg) 8 | 9 | About 10 | ----- 11 | 12 | Ansible-cmdb takes the output of Ansible's fact gathering and converts it into 13 | a static HTML overview page (and other things) containing system configuration 14 | information. 15 | 16 | It supports multiple types of output (html, csv, sql, etc) and extending 17 | information gathered by Ansible with custom data. For each host it also shows 18 | the groups, host variables, custom variables and machine-local facts. 19 | 20 | ![](https://raw.githubusercontent.com/fboender/ansible-cmdb/master/contrib/screenshot-overview.png) 21 | 22 | ![](https://raw.githubusercontent.com/fboender/ansible-cmdb/master/contrib/screenshot-detail.png) 23 | 24 | [HTML example](https://rawgit.com/fboender/ansible-cmdb/master/example/html_fancy.html) output. 25 | 26 | 27 | Features 28 | -------- 29 | 30 | (Not all features are supported by all templates) 31 | 32 | * Multiple formats / templates: 33 | * Fancy HTML (`--template html_fancy`), as seen in the screenshots above. 34 | * Fancy HTML Split (`--template html_fancy_split`), with each host's details 35 | in a separate file (for large number of hosts). 36 | * CSV (`--template csv`), the trustworthy and flexible comma-separated format. 37 | * JSON (`--template json`), a dump of all facts in JSON format. 38 | * Markdown (`--template markdown`), useful for copy-pasting into Wiki's and 39 | such. 40 | * Markdown Split (`--template markdown_split`), with each host's details 41 | in a seperate file (for large number of hosts). 42 | * SQL (`--template sql`), for importing host facts into a (My)SQL database. 43 | * Plain Text table (`--template txt_table`), for the console gurus. 44 | * and of course, any custom template you're willing to make. 45 | * Host overview and detailed host information. 46 | * Host and group variables. 47 | * Gathered host facts and manual custom facts. 48 | * Adding and extending facts of existing hosts and manually adding entirely 49 | new hosts. 50 | * Custom columns 51 | 52 | 53 | Getting started 54 | --------------- 55 | 56 | Links to the full documentation can be found below, but here's a rough 57 | indication of how Ansible-cmdb works to give you an idea: 58 | 59 | 1. Install Ansible-cmdb from [source, a release 60 | package](https://github.com/fboender/ansible-cmdb/releases) or through pip: `pip 61 | install ansible-cmdb`. 62 | 63 | 1. Fetch your host's facts through ansible: 64 | 65 | $ mkdir out 66 | $ ansible -m setup --tree out/ all 67 | 68 | 1. Generate the CMDB HTML with Ansible-cmdb: 69 | 70 | $ ansible-cmdb out/ > overview.html 71 | 72 | 1. Open `overview.html` in your browser. 73 | 74 | That's it! Please do read the full documentation on usage, as there are some 75 | caveats to how you can use the generated HTML. 76 | 77 | Documentation 78 | ------------- 79 | 80 | All documentation can be viewed at [readthedocs.io](http://ansible-cmdb.readthedocs.io/en/latest/). 81 | 82 | * [Full documentation](http://ansible-cmdb.readthedocs.io/en/latest/) 83 | * [Requirements and installation](http://ansible-cmdb.readthedocs.io/en/latest/installation/) 84 | * [Usage](http://ansible-cmdb.readthedocs.io/en/latest/usage/) 85 | * [Contributing and development](http://ansible-cmdb.readthedocs.io/en/latest/dev/) 86 | 87 | 88 | License 89 | ------- 90 | 91 | Ansible-cmdb is licensed under the GPLv3: 92 | 93 | This program is free software: you can redistribute it and/or modify 94 | it under the terms of the GNU General Public License as published by 95 | the Free Software Foundation, either version 3 of the License, or 96 | (at your option) any later version. 97 | 98 | This program is distributed in the hope that it will be useful, 99 | but WITHOUT ANY WARRANTY; without even the implied warranty of 100 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 101 | GNU General Public License for more details. 102 | 103 | You should have received a copy of the GNU General Public License 104 | along with this program. If not, see . 105 | 106 | For the full license, see the LICENSE file. 107 | -------------------------------------------------------------------------------- /build.sla: -------------------------------------------------------------------------------- 1 | # 2 | # This is a script containing functions that are used as build rules. You can 3 | # use the Simple Little Automator (https://github.com/fboender/sla.git) to run 4 | # these rules, or you can run them directly in your shell: 5 | # 6 | # $ bash -c ". build.sla && test" 7 | # 8 | 9 | PROG="ansible-cmdb" 10 | 11 | test () { 12 | # Run tests 13 | OLD_PWD="$(pwd)" 14 | cd test && ./test.sh 15 | cd "$OLD_PWD" 16 | example/generate.sh 17 | } 18 | 19 | example () { 20 | # Generate example cmdb 21 | PYTHONPATH=lib src/ansible-cmdb -q -i example/hosts example/out example/out_custom > cmdb.html 22 | } 23 | 24 | doc () { 25 | # Generate documentation 26 | markdown_py README.md > README.html 27 | } 28 | 29 | clean () { 30 | # Remove build artifacts and other trash 31 | rm -rf rel_deb 32 | rm -f README.html 33 | find ./ -name "*.pyc" -delete 34 | find ./ -name "__pycache__" -type d -delete 35 | rm -f example/gen_* 36 | rm -rf example/cmdb/ 37 | rm -rf build/ 38 | rm -rf dist/ 39 | rm -rf src/ansible_cmdb.egg-info/ 40 | } 41 | 42 | _release_check() { 43 | # Verify and prepare for release 44 | 45 | # Only run this rule once 46 | if [ -z "$RELEASE_CHECK_DONE" ]; then 47 | RELEASE_CHECK_DONE=1 48 | 49 | # Prepare project for release 50 | clean 51 | doc 52 | mkdir dist 53 | 54 | # Check that REL_VERSION is set 55 | if [ ! -z "$1" ]; then 56 | REL_VERSION="$1" 57 | shift 58 | else 59 | echo "REL_VERSION not set. Aborting" >&2 60 | exit 1 61 | fi 62 | 63 | echo "$REL_VERSION" > src/ansiblecmdb/data/VERSION 64 | fi 65 | } 66 | 67 | release_src () { 68 | # Create release package (source tar.gz) 69 | _release_check "$*" 70 | 71 | # Cleanup. Only on release, since REL_VERSION doesn't exist otherwise 72 | rm -rf $PROG-$REL_VERSION 73 | 74 | # Prepare source 75 | mkdir $PROG-$REL_VERSION 76 | cp -a src/* $PROG-$REL_VERSION/ 77 | cp -r lib/* $PROG-$REL_VERSION/ 78 | cp LICENSE $PROG-$REL_VERSION/ 79 | cp README.md $PROG-$REL_VERSION/ 80 | cp contrib/release_Makefile $PROG-$REL_VERSION/Makefile 81 | cp contrib/ansible-cmdb.man.1 $PROG-$REL_VERSION/ 82 | 83 | # Bump version numbers 84 | find $PROG-$REL_VERSION/ -type f -print0 | xargs -0 sed -i "s/%%MASTER%%/$REL_VERSION/g" 85 | 86 | # Create archives 87 | zip -q -r dist/$PROG-$REL_VERSION.zip $PROG-$REL_VERSION 88 | tar -czf dist/$PROG-$REL_VERSION.tar.gz $PROG-$REL_VERSION 89 | 90 | # Remove source dir 91 | rm -rf $PROG-$REL_VERSION 92 | } 93 | 94 | release_deb () { 95 | # Create release package (debian / ubuntu) 96 | _release_check "$*" 97 | 98 | mkdir -p rel_deb/usr/bin 99 | mkdir -p rel_deb/usr/lib/${PROG} 100 | mkdir -p rel_deb/usr/share/doc/$PROG 101 | mkdir -p rel_deb/usr/share/man/man1 102 | 103 | # Copy the source to the release directory structure. 104 | cp README.md rel_deb/usr/share/doc/$PROG/ 105 | cp README.html rel_deb/usr/share/doc/$PROG/ 106 | cp -r src/* rel_deb/usr/lib/${PROG}/ 107 | cp -r lib/* rel_deb/usr/lib/${PROG}/ 108 | ln -s ../lib/$PROG/ansible-cmdb rel_deb/usr/bin/ansible-cmdb 109 | cp -a contrib/debian/DEBIAN rel_deb/ 110 | cp contrib/debian/copyright rel_deb/usr/share/doc/$PROG/ 111 | cp contrib/debian/changelog rel_deb/usr/share/doc/$PROG/ 112 | gzip -9 rel_deb/usr/share/doc/$PROG/changelog 113 | cp -a contrib/ansible-cmdb.man.1 rel_deb/usr/share/man/man1/ansible-cmdb.1 114 | gzip -9 rel_deb/usr/share/man/man1/ansible-cmdb.1 115 | 116 | # Bump version numbers 117 | find rel_deb/ -type f -print0 | xargs -0 sed -i "s/%%MASTER%%/$REL_VERSION/g" 118 | 119 | # Create debian pacakge 120 | fakeroot dpkg-deb --build rel_deb > /dev/null 121 | mv rel_deb.deb dist/$PROG-$REL_VERSION.deb 122 | 123 | # Cleanup 124 | rm -rf rel_deb 125 | rm -rf $PROG-$REL_VERSION 126 | } 127 | 128 | release_wheel () { 129 | # Create release package (wheel) 130 | _release_check "$*" 131 | 132 | python setup.py -q bdist_wheel --universal 133 | rm -rf build 134 | echo `git rev-parse --abbrev-ref HEAD | tr "[:lower:]" "[:upper:]"` > src/ansiblecmdb/data/VERSION 135 | } 136 | 137 | release () { 138 | # Create release packages 139 | release_src "$*" 140 | release_deb "$*" 141 | release_wheel "$*" 142 | } 143 | 144 | pypi_upload () { 145 | # Upload new release to PyPi 146 | _release_check "$*" 147 | python setup.py sdist upload 148 | } 149 | 150 | install () { 151 | # Install ansible-cmdb 152 | PREFIX=${PREFIX:-/usr/local} 153 | umask 0022 && mkdir -p $PREFIX/lib/$PROG 154 | umask 0022 && mkdir -p $PREFIX/man/man1 155 | umask 0022 && cp -a src/* $PREFIX/lib/$PROG 156 | umask 0022 && cp -r lib/* $PREFIX/lib/$PROG 157 | umask 0022 && cp LICENSE $PREFIX/lib/$PROG 158 | umask 0022 && cp README.md $PREFIX/lib/$PROG 159 | umask 0022 && gzip -9 -c contrib/ansible-cmdb.man.1 > $PREFIX/man/man1/ansible-cmdb.man.1.gz 160 | if [ -f "$PREFIX/bin/ansible-cmdb" ]; then 161 | rm "$PREFIX/bin/ansible-cmdb" 162 | fi 163 | umask 0022 && ln -s $PREFIX/lib/ansible-cmdb/ansible-cmdb $PREFIX/bin/ansible-cmdb 164 | echo "Installed in $PREFIX/" 165 | } 166 | 167 | uninstall () { 168 | # Uninstall ansible-cmdb 169 | PREFIX=${PREFIX:-/usr/local} 170 | rm -rf $PREFIX/lib/$PROG 171 | rm -rf $PREFIX/man/man/ansible-cmdb* 172 | rm -rf $PREFIX/bin/ansible-cmdb 173 | } 174 | -------------------------------------------------------------------------------- /contrib/debian/DEBIAN/control: -------------------------------------------------------------------------------- 1 | Package: ansible-cmdb 2 | Version: %%MASTER%% 3 | Maintainer: Ferry Boender 4 | Section: utils 5 | Priority: optional 6 | Architecture: all 7 | Description: Generate host overview from Ansible 'setup' module output 8 | ansible-cmdb takes the output of Ansible's 'setup' module output (fact 9 | gathering) and transforms it into an host information overview. It supports 10 | supplementing gathered information with your own information. It can also scan 11 | the ansible 'hosts' file to get groups and variables to be included in the 12 | host information overview. 13 | -------------------------------------------------------------------------------- /contrib/debian/changelog: -------------------------------------------------------------------------------- 1 | ansible-cmdb (%%VERSION%%); urgency=low 2 | 3 | * Please see the CHANGELOG file in the doc directory 4 | 5 | -- Ferry Boender Sat, 22 Mar 2014 19:58:21 -0700 6 | 7 | 8 | -------------------------------------------------------------------------------- /contrib/debian/copyright: -------------------------------------------------------------------------------- 1 | Format: http://www.debian.org/doc/packaging-manuals/copyright-format/1.0/ 2 | Upstream-Name: ansible-cmdb 3 | Source: https://github.com/fboender/ansible-cmdb 4 | 5 | Files: * 6 | Copyright: 2015 Ferry Boender 7 | License: GPL-3.0+ 8 | 9 | License: GPL-3.0+ 10 | This program is free software: you can redistribute it and/or modify 11 | it under the terms of the GNU General Public License as published by 12 | the Free Software Foundation, either version 3 of the License, or 13 | (at your option) any later version. 14 | . 15 | This package is distributed in the hope that it will be useful, 16 | but WITHOUT ANY WARRANTY; without even the implied warranty of 17 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 18 | GNU General Public License for more details. 19 | . 20 | You should have received a copy of the GNU General Public License 21 | along with this program. If not, see . 22 | . 23 | On Debian systems, the complete text of the GNU General 24 | Public License version 3 can be found in "/usr/share/common-licenses/GPL-3". 25 | -------------------------------------------------------------------------------- /contrib/release_Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: doc 2 | PROG=ansible-cmdb 3 | 4 | fake: 5 | # NOOP 6 | 7 | install: 8 | umask 0022 && mkdir -p /usr/local/lib/${PROG} 9 | umask 0022 && mkdir -p /usr/local/man/man1 10 | umask 0022 && cp -a * /usr/local/lib/${PROG}/ 11 | cp -a ansible-cmdb.man.1 /usr/local/man/man1/ansible-cmdb.1 12 | ln -s /usr/local/lib/${PROG}/ansible-cmdb /usr/local/bin/ansible-cmdb 13 | if command -v mandb >/dev/null; then mandb -p -q; fi 14 | 15 | uninstall: 16 | rm -rf /usr/local/lib/${PROG} 17 | rm -rf /usr/local/bin/ansible-cmdb 18 | rm -rf /usr/local/share/man/man1/ansible-cmdb.* 19 | -------------------------------------------------------------------------------- /contrib/screenshot-detail.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fboender/ansible-cmdb/3f3e412d2a7be91c97c5a1842f4e57cc85b06961/contrib/screenshot-detail.png -------------------------------------------------------------------------------- /contrib/screenshot-overview.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fboender/ansible-cmdb/3f3e412d2a7be91c97c5a1842f4e57cc85b06961/contrib/screenshot-overview.png -------------------------------------------------------------------------------- /docs/dev.md: -------------------------------------------------------------------------------- 1 | # Contributions 2 | 3 | If you wish to contribute code, please consider the following: 4 | 5 | * Code should be reasonably PEP8-like. I'm not too strict on this. 6 | * One logical change per merge request. 7 | * By putting in a merge request or putting code in comments, you automatically 8 | grant me permission to include this code in ansible-cmdb under the license 9 | (GPLv3) that ansible-cmdb uses. The copyright for contributed code is 10 | retained by the contributor. 11 | * Please don't be disappointed or angry if your contributions end up unused. 12 | It's not that they aren't appreciated, but I can be somewhat strict when it 13 | comes to code quality, feature creep, etc. 14 | 15 | When in doubt, just open a pull request and post a comment on what you're 16 | unclear of, and we'll figure it out. 17 | 18 | # Inner workings 19 | 20 | Here's a quick introduction on how ansible-cmdb works internally. 21 | 22 | 1. The main section in `ansible-cmdb` reads the commandline params and 23 | instantiates an `Ansible` object. 24 | 1. The `Ansible` object first reads in all the facts by calling 25 | `Ansible.parse_fact_dir()` for each argument. This includes the user-extended 26 | facts. 27 | 1. If hosts file(s) should be parsed (`-i` option), ansible calls 28 | `Ansible.parse_hosts_inventory()`. This first reads in all found hosts files 29 | into one big string, and then it parses it. For this it uses the 30 | `AnsibleHostParser` class. 31 | 1. The `AnsibleHostParser` class first parses the inventory and then creates a 32 | dictionary with all known ansible node names (hosts) as the keys, but with 33 | empty values. It then goes through the 'children', 'vars' and normal 34 | sections from the inventory and applies the found information to the hosts 35 | dictionary. 36 | 1. When `AnsibleHostParser` is done, the `Ansible` class takes all the parsed 37 | hosts information and updates its own version of the hosts dictionary. 38 | 1. Finally, the output is generated by the main section. 39 | 40 | Updating a host in the `Ansible` object is done using the `Ansible.update_host` 41 | method. This method does a deep-update of a dictionary. This lets ansible-cmdb 42 | overlay information from the facts dir, extended / manual facts and hosts 43 | inventory files. 44 | 45 | # Running from the git repo 46 | 47 | If you want to run ansible-cmdb directly from the Git repo: 48 | 49 | $ cd ansible-cmdb 50 | $ export PYTHONPATH="$(readlink -f lib)" 51 | $ src/ansible-cmdb 52 | 53 | # Building 54 | 55 | ## Build system 56 | 57 | Ansible-cmdb uses [sla (the Simple Little 58 | Automator)](https://github.com/fboender/sla) to do builds and run tests. 59 | 60 | You don't need to have `sla` installed. You can run rules directly in your 61 | shell. For example, to run the `test` rule: 62 | 63 | $ . build.sla && test 64 | 65 | Sla makes everything much easier though. 66 | 67 | ## Build targets 68 | 69 | The following build targets are available: 70 | 71 | - `test`: Run tests 72 | - `example`: Generate example cmdb 73 | - `doc`: Generate documentation 74 | - `clean`: Remove build artifacts and other trash 75 | - `release_src`: Create release package (source tar.gz) 76 | - `release_deb`: Create release package (debian / ubuntu) 77 | - `release_wheel`: Create release package (wheel) 78 | - `release`: Create release packages 79 | - `install`: Install ansible-cmdb 80 | - `uninstall`: Uninstall ansible-cmdb 81 | 82 | ## Build packages and source-ball 83 | 84 | To build Debian, RedHat and source-packages for ansible-cmdb you'll need a 85 | Debian based operating system and you'll have to install the following 86 | dependencies: 87 | 88 | - git 89 | - make 90 | - python-markdown 91 | - zip 92 | - fakeroot 93 | - alien 94 | - Python 'wheel' package 95 | 96 | You can then build the packages with 97 | 98 | sla release 99 | 100 | where `VERSION` is a (arbitrary) version number. 101 | 102 | In order to build releases, your repository will have to be completely clean: 103 | everything must be commited and there must be no untracked files. If you want 104 | to build a test release, you can temporary stash your untracked changes: 105 | 106 | git stash -u 107 | 108 | # Testing 109 | 110 | Testing releases: 111 | 112 | Running from github source: 113 | 114 | $ cd ansible-cmdb 115 | $ export PYTHONPATH=lib 116 | $ src/ansible-cmdb examples/out > ~/cmdb.html 117 | $ src/ansible-cmdb -i examples/hosts examples/out > ~/cmdb.html 118 | $ src/ansible-cmdb -p local_js=1 -i examples/hosts examples/out > ~/cmdb.html 119 | 120 | Installing from github source: 121 | 122 | $ cd ansible-cmdb 123 | $ sudo python ./setup.py install 124 | $ ansible-cmdb ansible-cmdb/examples/out > ~/cmdb.html 125 | $ ansible-cmdb -i ansible-cmdb/examples/hosts ansible-cmdb/examples/out > ~/cmdb.html 126 | $ ansible-cmdb -p local_js=1 -i ansible-cmdb/examples/hosts ansible-cmdb/examples/out > ~/cmdb.html 127 | $ sudo pip uninstall mako 128 | $ sudo pip uninstall pyyaml 129 | $ sudo pip uninstall ansible-cmdb 130 | $ sudo rm /usr/local/bin/ansible-cmdb 131 | 132 | Installing from source tarbal: 133 | 134 | $ tar -vxzf ansible-cmdb-*.tar.gz 135 | $ cd ansible-cmdb-* 136 | $ sudo make install 137 | $ ansible-cmdb out > ~/cmdb.html 138 | $ ansible-cmdb -i hosts out > ~/cmdb.html 139 | $ ansible-cmdb -p local_js=1 -i hosts out > ~/cmdb.html 140 | $ cd ansible-cmdb-* 141 | $ sudo make uninstall 142 | 143 | Installing from .deb file: 144 | 145 | $ dpkg -i -i ansible-cmdb-*.deb 146 | $ ansible-cmdb out > ~/cmdb.html 147 | $ ansible-cmdb -i hosts out > ~/cmdb.html 148 | $ ansible-cmdb -p local_js=1 -i hosts out > ~/cmdb.html 149 | $ sudo dpkg --purge ansible-cmdb 150 | 151 | Installing from .rpm file: 152 | 153 | $ sudo yum install ansible-cmdb*.rpm 154 | $ ansible-cmdb out > ~/cmdb.html 155 | $ ansible-cmdb -i hosts out > ~/cmdb.html 156 | $ ansible-cmdb -p local_js=1 -i hosts out > ~/cmdb.html 157 | $ sudo yum uninstall ansible-cmdb 158 | 159 | 160 | # License 161 | 162 | Ansible-cmdb is licensed under the GPLv3: 163 | 164 | This program is free software: you can redistribute it and/or modify 165 | it under the terms of the GNU General Public License as published by 166 | the Free Software Foundation, either version 3 of the License, or 167 | (at your option) any later version. 168 | 169 | This program is distributed in the hope that it will be useful, 170 | but WITHOUT ANY WARRANTY; without even the implied warranty of 171 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 172 | GNU General Public License for more details. 173 | 174 | You should have received a copy of the GNU General Public License 175 | along with this program. If not, see . 176 | 177 | For the full license, see the LICENSE file. 178 | 179 | # History 180 | 181 | Ansible-cmdb started as a short Python script, which I [blogged 182 | about](http://www.electricmonk.nl/log/2015/01/21/host-inventory-overview-using-ansibles-facts/). 183 | 184 | [Cris van Pelt](https://melkfl.es/) then took that and expanded it into a HTML 185 | page. Eventually I forked it to Github and made it public, adding features. 186 | [Many people collaborated](https://github.com/fboender/ansible-cmdb/graphs/contributors) to 187 | make Ansible-cmdb into what it is today. 188 | -------------------------------------------------------------------------------- /docs/faq.md: -------------------------------------------------------------------------------- 1 | ## Solaris machines have no disk information 2 | 3 | Ansible currently does not include disk size information for Solaris hosts. As 4 | such, we can't include it in the output of Ansible-cmdb. See issue #24 for more 5 | information. 6 | 7 | ## The output HTML file doesn't work on other computers. 8 | 9 | When you transfer the output HTML file of ansible-cmdb and try to open it in 10 | the browser on another computer, you'll find that it doesn't work properly. 11 | 12 | This is because HTML files opened on a local computer (those that start with a 13 | `file://` url) are not allowed to fetch the required Javascript files from the 14 | internet (urls that start with `http://` and `https://`). For this reason, 15 | Ansible-cmdb installs those required files when you install ansible-cmdb. 16 | Naturally, another PC won't have those files locally available. 17 | 18 | The solution is to generate the output with the `-p local_js=0` parameter and 19 | host the resulting HTML file(s) on a webserver somewhere. 20 | -------------------------------------------------------------------------------- /docs/index.md: -------------------------------------------------------------------------------- 1 | ## About 2 | 3 | [Ansible-cmdb](https://github.com/fboender/ansible-cmdb) takes the output of 4 | Ansible's [fact 5 | gathering](http://docs.ansible.com/ansible/latest/modules/setup_module.html) 6 | and converts it into a static HTML overview page (and other things) containing 7 | system configuration information. 8 | 9 | It supports multiple types of output (html, csv, sql, etc) and extending 10 | information gathered by Ansible with custom data. For each host it also shows 11 | the groups, host variables, custom variables and machine-local facts. 12 | 13 | ## Example output 14 | 15 | ![](https://raw.githubusercontent.com/fboender/ansible-cmdb/master/contrib/screenshot-overview.png) 16 | 17 | ![](https://raw.githubusercontent.com/fboender/ansible-cmdb/master/contrib/screenshot-detail.png) 18 | 19 | [HTML example](https://rawgit.com/fboender/ansible-cmdb/master/example/html_fancy.html) output. 20 | 21 | ## Output formats 22 | 23 | Supported output formats / templates: 24 | 25 | * Fancy HTML (`--template html_fancy`), as seen in the screenshots above. 26 | * Fancy HTML Split (`--template html_fancy_split`), with each host's details 27 | in a separate file (for large number of hosts). 28 | * CSV (`--template csv`), the trustworthy and flexible comma-separated format. 29 | * JSON (`--template json`), a dump of all facts in JSON format. 30 | * Markdown (`--template markdown`), useful for copy-pasting into Wiki's and 31 | such. 32 | * Markdown Split ('--template markdown_split'), with each host's details 33 | in a seperate file (for large number of hosts). 34 | * SQL (`--template sql`), for importing host facts into a (My)SQL database. 35 | * Plain Text table (`--template txt_table`), for the console gurus. 36 | * and of course, any custom template you're willing to make. 37 | -------------------------------------------------------------------------------- /docs/installation.md: -------------------------------------------------------------------------------- 1 | ## Requirements 2 | 3 | Ansible-cmdb requires **Python v2.7+ / 3.0+**. 4 | 5 | In theory, it should work on any system that can run Python, including BSD, 6 | Linux, Windows, Solaris and MacOS. In practice, ansible-cmdb is developed on 7 | Ubuntu 16.04 and tested on the latest stable versions of Debian, Ubuntu and 8 | Centos. 9 | 10 | ## Installation 11 | 12 | 13 | Ansible-cmdb can be installed using `pip`, the [Python package 14 | manager](https://pypi.org/project/pip/). There are also stand-alone packages 15 | for various Linux distributions. Alternatively, you can use brew or plain old 16 | `make install`. 17 | 18 | ### Through Pip 19 | 20 | For **installation via Pip**: 21 | 22 | Install `pip` [for your distribution](https://packaging.python.org/install_requirements_linux/) 23 | if you don't have it yet. 24 | 25 | Install Ansible-cmdb through Pip: 26 | 27 | sudo pip install ansible-cmdb 28 | 29 | You can also upgrade Ansible-cmdb through Pip: 30 | 31 | sudo pip install --upgrade ansible-cmdb 32 | 33 | ### Through distribution packages 34 | 35 | Get the package for your distribution from the [Releases 36 | page](https://github.com/fboender/ansible-cmdb/releases) (Not required for 37 | MacOS X install) 38 | 39 | For **Debian / Ubuntu** systems: 40 | 41 | sudo dpkg -i ansible-cmdb*.deb 42 | 43 | Support for all other package managers (RPM, etc) has been dropped. Please use 44 | the `pip` method instead, or install from tar.gz. 45 | 46 | ### For other systems 47 | 48 | For **MacOS X** systems: 49 | 50 | brew install ansible-cmdb 51 | 52 | For **Other** systems: 53 | 54 | tar -vxzf ansible-cmdb*.tar.gz 55 | cd ansible-cmdb* 56 | sudo make install 57 | 58 | Installation from **Git** repository: 59 | 60 | git clone https://github.com/fboender/ansible-cmdb.git 61 | cd ansible-cmdb 62 | sudo bash -c ". build.sla && install" 63 | -------------------------------------------------------------------------------- /example/ansible.cfg: -------------------------------------------------------------------------------- 1 | [defaults] 2 | hostfile=hosts 3 | remote_user=fboender 4 | forks=20 5 | 6 | [ssh_connection] 7 | ssh_args = -F ssh.config -q 8 | -------------------------------------------------------------------------------- /example/cust_cols.conf: -------------------------------------------------------------------------------- 1 | [ 2 | # Show whether AppArmor is enabled 3 | { 4 | "title": "AppArmor", 5 | "id": "apparmor", 6 | "sType": "string", 7 | "visible": False, 8 | "jsonxs": "ansible_facts.ansible_apparmor.status" 9 | }, 10 | # Show the nameservers configured on the host 11 | { 12 | "title": "Nameservers", 13 | "id": "nameservers", 14 | "sType": "string", 15 | "visible": True, 16 | "tpl": """ 17 |
    18 | <% 19 | # Get ansible_facts.ansible_dns.nameservers 20 | facts = host.get('ansible_facts', {}) 21 | dns = facts.get('ansible_dns', {}) 22 | nameservers = dns.get('nameservers', []) 23 | %> 24 | % for nameserver in nameservers: 25 |
  • ${nameserver}
  • 26 | % endfor 27 |
28 | """ 29 | }, 30 | # Show the nameservers configured on the host, but use jsonxs. 31 | { 32 | "title": "Nameservers2", 33 | "id": "nameservers2", 34 | "sType": "string", 35 | "visible": True, 36 | "tpl": """ 37 |
    38 | <% 39 | # Get ansible_facts.ansible_dns.nameservers using jsonxs 40 | nameservers = jsonxs(host, 'ansible_facts.ansible_dns.nameservers', default=[]) 41 | %> 42 | % for nameserver in nameservers: 43 |
  • ${nameserver}
  • 44 | % endfor 45 |
46 | """ 47 | 48 | } 49 | ] 50 | -------------------------------------------------------------------------------- /example/dyninv_test.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | import json 4 | 5 | inventory = { 6 | "dyninv_group_dev" : { 7 | "hosts" : [ "debian.dev.local" ] 8 | }, 9 | "dyninv_group_test" : { 10 | "hosts" : [ "facter.test.local", "custfact.test.local" ], 11 | "vars" : { 12 | "net_config" : { 13 | "eth0" : { 14 | "bootproto" : "dhcp", 15 | "onboot" : "yes", 16 | "nozeroconf" : "yes", 17 | "persistent_dhclient" : "yes", 18 | "nm_controlled" : "no" 19 | } 20 | } 21 | } 22 | } 23 | } 24 | 25 | print json.dumps(inventory, indent=2) 26 | -------------------------------------------------------------------------------- /example/dyninv_vbox.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # This file is part of Ansible, 4 | # 5 | # Ansible is free software: you can redistribute it and/or modify 6 | # it under the terms of the GNU General Public License as published by 7 | # the Free Software Foundation, either version 3 of the License, or 8 | # (at your option) any later version. 9 | # 10 | # Ansible is distributed in the hope that it will be useful, 11 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | # GNU General Public License for more details. 14 | # 15 | # You should have received a copy of the GNU General Public License 16 | # along with Ansible. If not, see . 17 | 18 | import sys 19 | from subprocess import Popen,PIPE 20 | 21 | try: 22 | import json 23 | except ImportError: 24 | import simplejson as json 25 | 26 | class SetEncoder(json.JSONEncoder): 27 | def default(self, obj): 28 | if isinstance(obj, set): 29 | return list(obj) 30 | return json.JSONEncoder.default(self, obj) 31 | 32 | VBOX="VBoxManage" 33 | 34 | 35 | def get_hosts(host=None): 36 | 37 | returned = {} 38 | try: 39 | if host: 40 | p = Popen([VBOX, 'showvminfo', host], stdout=PIPE) 41 | else: 42 | returned = { 'all': set(), '_metadata': {} } 43 | p = Popen([VBOX, 'list', '-l', 'vms'], stdout=PIPE) 44 | except: 45 | sys.exit(1) 46 | 47 | hostvars = {} 48 | prevkey = pref_k = '' 49 | 50 | for line in p.stdout.readlines(): 51 | 52 | try: 53 | k,v = line.split(':',1) 54 | except: 55 | continue 56 | 57 | if k == '': 58 | continue 59 | 60 | v = v.strip() 61 | if k.startswith('Name'): 62 | if v not in hostvars: 63 | curname = v 64 | hostvars[curname] = {} 65 | try: # try to get network info 66 | x = Popen([VBOX, 'guestproperty', 'get', curname,"/VirtualBox/GuestInfo/Net/0/V4/IP"],stdout=PIPE) 67 | ipinfo = x.stdout.read() 68 | if 'Value' in ipinfo: 69 | a,ip = ipinfo.split(':',1) 70 | hostvars[curname]['ansible_ssh_host'] = ip.strip() 71 | except: 72 | pass 73 | 74 | continue 75 | 76 | if not host: 77 | if k == 'Groups': 78 | for group in v.split('/'): 79 | if group: 80 | if group not in returned: 81 | returned[group] = set() 82 | returned[group].add(curname) 83 | returned['all'].add(curname) 84 | continue 85 | 86 | pref_k = 'vbox_' + k.strip().replace(' ','_') 87 | if k.startswith(' '): 88 | if prevkey not in hostvars[curname]: 89 | hostvars[curname][prevkey] = {} 90 | hostvars[curname][prevkey][pref_k]= v 91 | else: 92 | if v != '': 93 | hostvars[curname][pref_k] = v 94 | 95 | prevkey = pref_k 96 | 97 | if not host: 98 | returned['_metadata']['hostvars'] = hostvars 99 | else: 100 | returned = hostvars[host] 101 | return returned 102 | 103 | 104 | if __name__ == '__main__': 105 | 106 | inventory = {} 107 | hostname = None 108 | 109 | if len(sys.argv) > 1: 110 | if sys.argv[1] == "--host": 111 | hostname = sys.argv[2] 112 | 113 | if hostname: 114 | inventory = get_hosts(hostname) 115 | else: 116 | inventory = get_hosts() 117 | 118 | sys.stdout.write(json.dumps(inventory, indent=2, cls=SetEncoder)) 119 | -------------------------------------------------------------------------------- /example/generate.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | set -x 4 | set -e 5 | 6 | SCRIPT_DIR=$(dirname $(readlink -f $0)) 7 | cd $SCRIPT_DIR 8 | export PYTHONPATH=../lib 9 | 10 | # 11 | # Default example 12 | # 13 | 14 | # Python v2 15 | python2 ../src/ansible-cmdb.py -q -i hosts out > gen_html_fancy_2.html 16 | python2 ../src/ansible-cmdb.py -q -p local_js=1 -i hosts out > gen_html_fancy_localjs_2.html 17 | python2 ../src/ansible-cmdb.py -q -t txt_table -i hosts out > gen_txt_table_2.txt 18 | python2 ../src/ansible-cmdb.py -q -t csv -i hosts out > gen_csv_2.csv 19 | python2 ../src/ansible-cmdb.py -q -t markdown -i hosts out > gen_markdown_2.md 20 | python2 ../src/ansible-cmdb.py -q -t sql -i hosts out > gen_sql_2.md 21 | python2 ../src/ansible-cmdb.py -q -t html_fancy_split -i hosts out 22 | python2 ../src/ansible-cmdb.py -q -i hosts -f out_factcache > gen_fact_cache_2.html 23 | python2 ../src/ansible-cmdb.py -q -i hosts -C cust_cols.conf out > gen_fact_cust_cols_2.html 24 | 25 | 26 | # Python v3 27 | python3 ../src/ansible-cmdb.py -q -i hosts out > gen_html_fancy_3.html 28 | python3 ../src/ansible-cmdb.py -q -p local_js=1 -i hosts out > gen_html_fancy_localjs_3.html 29 | python3 ../src/ansible-cmdb.py -q -t txt_table -i hosts out > gen_txt_table_3.txt 30 | python3 ../src/ansible-cmdb.py -q -t csv -i hosts out > gen_csv_3.csv 31 | python3 ../src/ansible-cmdb.py -q -t markdown -i hosts out > gen_markdown_3.md 32 | python3 ../src/ansible-cmdb.py -q -t sql -i hosts out > gen_sql_3.md 33 | python3 ../src/ansible-cmdb.py -q -t html_fancy_split -i hosts out 34 | python3 ../src/ansible-cmdb.py -q -i hosts -f out_factcache > gen_fact_cache_3.html 35 | python3 ../src/ansible-cmdb.py -q -i hosts -C cust_cols.conf out > gen_fact_cust_cols_2.html 36 | -------------------------------------------------------------------------------- /example/group_vars/all: -------------------------------------------------------------------------------- 1 | fail2ban_whitelist_ips: 2 | - 127.0.0.1 3 | - 192.168.0.1 4 | - 11.22.33.44 5 | -------------------------------------------------------------------------------- /example/group_vars/db: -------------------------------------------------------------------------------- 1 | db_client_config: /etc/mysql/my.cnf 2 | -------------------------------------------------------------------------------- /example/group_vars/db.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_client_port": 3306, 3 | "params": { 4 | "key_one": "value one", 5 | "key_two": "value two" 6 | }, 7 | "allowed_ips": ["127.0.0.1", "192.168.0.1"] 8 | } 9 | -------------------------------------------------------------------------------- /example/group_vars/db.yml: -------------------------------------------------------------------------------- 1 | db_client_ip: 127.0.0.1 2 | -------------------------------------------------------------------------------- /example/host_vars/all: -------------------------------------------------------------------------------- 1 | data_dir: /data 2 | -------------------------------------------------------------------------------- /example/host_vars/debian.dev.local: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fboender/ansible-cmdb/3f3e412d2a7be91c97c5a1842f4e57cc85b06961/example/host_vars/debian.dev.local -------------------------------------------------------------------------------- /example/host_vars/eek.electricmonk.nl: -------------------------------------------------------------------------------- 1 | host_var_test: Hello 2 | data_dir: /opt 3 | fact_with_int_keys: { 4 | 1: "hoi", 5 | 2: 1 6 | } 7 | password: !vault | 8 | $ANSIBLE_VAULT;1.1;AES256 9 | 35366338653734663863626465336664363333383433326233343339356231363064346366656232 10 | 6163653539393537653532306464313738633330363561390a363530636531346338343338383962 11 | 62626663376161393639393962653862363931356431376635613034616534363266633665363436 12 | 6364616662346362370a373337316636396438623438383965636164633138386435626333643566 13 | 6439 14 | 15 | -------------------------------------------------------------------------------- /example/host_vars/eek.electricmonk.nl.json: -------------------------------------------------------------------------------- 1 | { 2 | "host_var_test2": "from host_vars/eek.electricmonk.nl.json" 3 | } 4 | -------------------------------------------------------------------------------- /example/host_vars/zoltar.electricmonk.nl/sysconfig.yml: -------------------------------------------------------------------------------- 1 | fail2ban_whitelist_ips: 2 | - 10.0.0.1/8 3 | -------------------------------------------------------------------------------- /example/hosts: -------------------------------------------------------------------------------- 1 | [dev] 2 | debian.dev.local dtap=dev 3 | centos.dev.local dtap=dev 4 | 5 | [db] 6 | db[01:03].prod.local dtap=prod 7 | 8 | [prod] 9 | eek.electricmonk.nl 10 | zoltar.electricmonk.nl comment="Trés Load balancer" 11 | jib.electricmonk.nl comment="Workstation" 12 | 13 | [prod:children] 14 | db 15 | 16 | [prod:vars] 17 | dtap=prod 18 | # This appears to be allowed, but is undocumented 19 | json_like_vars=[{'name': 'htpasswd_auth', 'login': 'true', 'challenge': 'true', 'kind': 'HTPasswdPasswordIdentityProvider', 'filename': '/etc/origin/master/htpasswd'}] 20 | 21 | [virtual] 22 | debian.dev.local 23 | centos.dev.local 24 | 25 | [uat] 26 | app.uat.local dtap=uat 27 | 28 | [solaris] 29 | sol_host 30 | 31 | [openbsd] 32 | openbsd.dev.local 33 | 34 | [windows] 35 | win2k8r2.local 36 | win.dev.local 37 | 38 | [openvz] 39 | openvz.debian.local 40 | 41 | [test] 42 | facter.test.local ansible_host=192.168.56.7 43 | custfact.test.local ansible_host=192.168.56.7 abc: [1, 2, 3] 44 | onlyinhosts.test.local 45 | çyrillic.test.local 46 | -------------------------------------------------------------------------------- /example/hosts_complicated: -------------------------------------------------------------------------------- 1 | centos.dev.local dtap=dev 2 | 3 | [dev] 4 | debian.dev.local dtap=dev 5 | 6 | [db] 7 | db[01:03].prod.local dtap=prod 8 | 9 | [prod] 10 | eek.electricmonk.nl 11 | zoltar.electricmonk.nl comment="Load balancer" 12 | jib.electricmonk.nl comment="Workstation" foo="baz" 13 | 14 | [prod:children] 15 | db 16 | 17 | [prod:vars] 18 | dtap=prod 19 | from='vars' 20 | 21 | [virtual] 22 | debian.dev.local 23 | centos.dev.local 24 | 25 | [virtual:vars] 26 | type="virtual" 27 | 28 | [uat] 29 | app.uat.local dtap=uat 30 | 31 | [supersuper] 32 | some.super.host 33 | 34 | [supersuper:children] 35 | super foo="bar" 36 | 37 | [supersuper:vars] 38 | test="heyo" 39 | 40 | [super:children] 41 | prod 42 | 43 | -------------------------------------------------------------------------------- /example/hostsdir/hosts: -------------------------------------------------------------------------------- 1 | [dev] 2 | debian.dev.local dtap=dev 3 | centos.dev.local dtap=dev 4 | 5 | [db] 6 | db[01:03].prod.local dtap=prod 7 | 8 | [prod] 9 | eek.electricmonk.nl 10 | zoltar.electricmonk.nl comment="Load balancer" 11 | jib.electricmonk.nl comment="Workstation" 12 | 13 | -------------------------------------------------------------------------------- /example/hostsdir/hosts2: -------------------------------------------------------------------------------- 1 | [prod:children] 2 | db 3 | 4 | [prod:vars] 5 | dtap=prod 6 | 7 | [virtual] 8 | debian.dev.local 9 | centos.dev.local 10 | 11 | [uat] 12 | app.uat.local dtap=uat 13 | -------------------------------------------------------------------------------- /example/out/.invalid_file: -------------------------------------------------------------------------------- 1 | this is invalid! 2 | -------------------------------------------------------------------------------- /example/out/dead.dev.local: -------------------------------------------------------------------------------- 1 | {"changed": false, "msg": "Failed to connect to the host via ssh: ", "unreachable": true} 2 | -------------------------------------------------------------------------------- /example/out/openvz.debian.local: -------------------------------------------------------------------------------- 1 | 2 | 3 | { 4 | "ansible_facts": { 5 | "ansible_all_ipv4_addresses": [ 6 | "10.0.4.8" 7 | ], 8 | "ansible_all_ipv6_addresses": [ 9 | 10 | ], 11 | "ansible_architecture": "x86_64", 12 | "ansible_bios_date": "NA", 13 | "ansible_bios_version": "NA", 14 | "ansible_cmdline": { 15 | "quiet": true 16 | }, 17 | "ansible_date_time": { 18 | "date": "2016-02-01", 19 | "day": "01", 20 | "epoch": "1454362393", 21 | "hour": "15", 22 | "iso8601": "2016-02-01T21:33:13Z", 23 | "iso8601_basic": "20160201T153313941831", 24 | "iso8601_basic_short": "20160201T153313", 25 | "iso8601_micro": "2016-02-01T21:33:13.941910Z", 26 | "minute": "33", 27 | "month": "02", 28 | "second": "13", 29 | "time": "15:33:13", 30 | "tz": "CST", 31 | "tz_offset": "-0600", 32 | "weekday": "Monday", 33 | "weekday_number": "1", 34 | "weeknumber": "05", 35 | "year": "2016" 36 | }, 37 | "ansible_default_ipv4": { 38 | "address": "10.0.4.8", 39 | "alias": "venet0:0", 40 | "broadcast": "10.0.4.8", 41 | "interface": "venet0", 42 | "macaddress": "", 43 | "mtu": 1500, 44 | "netmask": "255.255.255.255", 45 | "network": "10.0.4.8", 46 | "type": "unknown" 47 | }, 48 | "ansible_default_ipv6": { 49 | "address": "::1", 50 | "interface": "venet0", 51 | "macaddress": "00:00:00:00:00:00", 52 | "mtu": 16436, 53 | "prefix": "128", 54 | "scope": "host", 55 | "type": "loopback" 56 | }, 57 | "ansible_devices": { 58 | 59 | }, 60 | "ansible_distribution": "Debian", 61 | "ansible_distribution_major_version": "7", 62 | "ansible_distribution_release": "wheezy", 63 | "ansible_distribution_version": "7.8", 64 | "ansible_dns": { 65 | "nameservers": [ 66 | "8.8.8.8" 67 | ], 68 | "search": [ 69 | "local" 70 | ] 71 | }, 72 | "ansible_domain": "openvz.debian.local", 73 | "ansible_env": { 74 | "HOME": "\/root", 75 | "LANG": "en_US.UTF-8", 76 | "LANGUAGE": "en_US.UTF-8", 77 | "LC_ALL": "en_US.UTF-8", 78 | "LC_MESSAGES": "en_US.UTF-8", 79 | "LOGNAME": "root", 80 | "MAIL": "\/var\/mail\/root", 81 | "PATH": "\/usr\/local\/sbin:\/usr\/local\/bin:\/usr\/sbin:\/usr\/bin:\/sbin:\/bin:\/usr\/bin\/X11", 82 | "PWD": "\/root", 83 | "SHELL": "\/bin\/bash", 84 | "SHLVL": "1", 85 | "SSH_CLIENT": "10.0.0.1 42562 22", 86 | "SSH_CONNECTION": "10.0.0.1 12432 10.0.4.8 22", 87 | "USER": "root", 88 | "XDG_SESSION_COOKIE": "3940bc301a95021r04cb01ef0350981204a908e098b0c020821e023195ae", 89 | "_": "\/bin\/sh" 90 | }, 91 | "ansible_fips": false, 92 | "ansible_form_factor": "NA", 93 | "ansible_fqdn": "openvz.debian.local", 94 | "ansible_hostname": "openvz", 95 | "ansible_interfaces": [ 96 | "lo", 97 | "venet0_0", 98 | "venet0" 99 | ], 100 | "ansible_kernel": "2.6.32-19-pve", 101 | "ansible_lo": { 102 | "active": true, 103 | "device": "lo", 104 | "ipv4": { 105 | "address": "127.0.0.1", 106 | "broadcast": "host", 107 | "netmask": "255.0.0.0", 108 | "network": "127.0.0.0" 109 | }, 110 | "ipv6": [ 111 | { 112 | "address": "::1", 113 | "prefix": "128", 114 | "scope": "host" 115 | } 116 | ], 117 | "mtu": 16436, 118 | "promisc": false, 119 | "type": "loopback" 120 | }, 121 | "ansible_local": { 122 | }, 123 | "ansible_lsb": { 124 | "codename": "wheezy", 125 | "description": "Debian GNU\/Linux 7.8 (wheezy)", 126 | "id": "Debian", 127 | "major_release": "7", 128 | "release": "7.8" 129 | }, 130 | "ansible_lvm": { 131 | "lvs": { 132 | 133 | }, 134 | "vgs": { 135 | 136 | } 137 | }, 138 | "ansible_machine": "x86_64", 139 | "ansible_machine_id": "074d624c295e0bcb014e9a70315710f1", 140 | "ansible_memfree_mb": 6840, 141 | "ansible_memory_mb": { 142 | "nocache": { 143 | "free": null, 144 | "used": null 145 | }, 146 | "real": { 147 | "free": 6840, 148 | "total": 8192, 149 | "used": 1352 150 | }, 151 | "swap": { 152 | "cached": null, 153 | "free": 390, 154 | "total": 512, 155 | "used": 122 156 | } 157 | }, 158 | "ansible_memtotal_mb": 8192, 159 | "ansible_mounts": [ 160 | { 161 | "device": "\/dev\/simfs", 162 | "fstype": "simfs", 163 | "mount": "\/", 164 | "options": "rw,relatime", 165 | "size_available": 38382993408, 166 | "size_total": 42949672960, 167 | "uuid": "NA" 168 | } 169 | ], 170 | "ansible_nodename": "test", 171 | "ansible_os_family": "Debian", 172 | "ansible_pkg_mgr": "apt", 173 | "ansible_processor": [ 174 | "GenuineIntel", 175 | "Intel(R) Xeon(R) CPU W3520 @ 2.67GHz", 176 | "GenuineIntel", 177 | "Intel(R) Xeon(R) CPU W3520 @ 2.67GHz", 178 | "GenuineIntel", 179 | "Intel(R) Xeon(R) CPU W3520 @ 2.67GHz", 180 | "GenuineIntel", 181 | "Intel(R) Xeon(R) CPU W3520 @ 2.67GHz" 182 | ], 183 | "ansible_processor_cores": 4, 184 | "ansible_processor_count": 1, 185 | "ansible_processor_threads_per_core": 2, 186 | "ansible_processor_vcpus": 8, 187 | "ansible_product_name": "NA", 188 | "ansible_product_serial": "NA", 189 | "ansible_product_uuid": "NA", 190 | "ansible_product_version": "NA", 191 | "ansible_python_version": "2.7.3", 192 | "ansible_selinux": false, 193 | "ansible_service_mgr": "init", 194 | "ansible_ssh_host_key_dsa_public": "", 195 | "ansible_ssh_host_key_rsa_public": "", 196 | "ansible_swapfree_mb": 390, 197 | "ansible_swaptotal_mb": 512, 198 | "ansible_system": "Linux", 199 | "ansible_system_vendor": "NA", 200 | "ansible_uptime_seconds": 14878160, 201 | "ansible_user_dir": "\/root", 202 | "ansible_user_gecos": "root", 203 | "ansible_user_gid": 0, 204 | "ansible_user_id": "root", 205 | "ansible_user_shell": "\/bin\/bash", 206 | "ansible_user_uid": 0, 207 | "ansible_userspace_architecture": "x86_64", 208 | "ansible_userspace_bits": "64", 209 | "ansible_venet0": { 210 | "active": true, 211 | "device": "venet0", 212 | "ipv4": { 213 | "address": "127.0.0.2", 214 | "broadcast": "host", 215 | "netmask": "255.255.255.255", 216 | "network": "127.0.0.2" 217 | }, 218 | "mtu": 1500, 219 | "promisc": false 220 | }, 221 | "ansible_venet0_0": { 222 | "ipv4": { 223 | "address": "10.0.4.8", 224 | "broadcast": "10.0.4.8", 225 | "netmask": "255.255.255.255", 226 | "network": "10.0.4.8" 227 | } 228 | }, 229 | "ansible_virtualization_role": "guest", 230 | "ansible_virtualization_type": "openvz", 231 | "module_setup": true 232 | }, 233 | "changed": false 234 | } 235 | 236 | -------------------------------------------------------------------------------- /example/out/win.dev.local: -------------------------------------------------------------------------------- 1 | { 2 | "ansible_facts": { 3 | "ansible_architecture": "x86_64", 4 | "ansible_distribution": "Windows", 5 | "ansible_distribution_major_version": "2012", 6 | "ansible_distribution_release": "server", 7 | "ansible_distribution_version": "2012", 8 | "ansible_domain": "win-dev.local", 9 | "ansible_form_factor": "VPS", 10 | "ansible_fqdn": "win.dev.local", 11 | "ansible_hostname": "win-dev", 12 | "ansible_memtotal_mb": 4096, 13 | "ansible_memfree_mb": 4096, 14 | "ansible_nodename": "win-dev", 15 | "ansible_os_family": "Windows", 16 | "ansible_virtualization_role": "guest", 17 | "ansible_virtualization_type": "VMware", 18 | "ansible_default_ipv4": { 19 | "address": "10.0.0.3", 20 | "interface": "eth0" 21 | }, 22 | "ansible_interfaces": [ 23 | { 24 | "connection_name": "Ethernet", 25 | "default_gateway": "172.1.1.1", 26 | "dns_domain": "win1.dev.local", 27 | "interface_index": 4, 28 | "interface_name": "Intel(R) 82579LM Gigabit Network Connection", 29 | "macaddress": "FF:B1:1C:FF:7D:23" 30 | }, 31 | { 32 | "connection_name": "VirtualBox Host-Only Network", 33 | "default_gateway": null, 34 | "dns_domain": null, 35 | "interface_index": 3, 36 | "interface_name": "VirtualBox Host-Only Ethernet Adapter", 37 | "macaddress": "0A:00:27:00:00:03" 38 | } 39 | ], 40 | "ansible_all_ipv4_addresses": [ 41 | "10.0.0.3" 42 | ], 43 | "ansible_userspace_architecture": "x86_64", 44 | "ansible_userspace_bits": "64", 45 | "ansible_mounts": [ 46 | ], 47 | "ansible_system_vendor": "VMware, Inc.", 48 | "ansible_product_name": "VMware Virtual Platform", 49 | "ansible_product_serial": "NA", 50 | "ansible_machine": "x86_64" 51 | }, 52 | "changed": false 53 | } 54 | -------------------------------------------------------------------------------- /example/out/win2k8r2.local: -------------------------------------------------------------------------------- 1 | { 2 | "ansible_facts": { 3 | "ansible_architecture": "64-bit", 4 | "ansible_date_time": { 5 | "date": "08/01/2016", 6 | "day": "08", 7 | "hour": "23", 8 | "iso8601": "2016-01-08T23:10:56", 9 | "minute": "10", 10 | "month": "01", 11 | "year": "2016" 12 | }, 13 | "ansible_distribution": "Microsoft Windows NT 6.1.7601 Service Pack 1", 14 | "ansible_distribution_version": "6.1.7601.65536", 15 | "ansible_fqdn": "win2k8r2.local", 16 | "ansible_hostname": "win2k8r2", 17 | "ansible_interfaces": [ 18 | { 19 | "default_gateway": "192.168.1.1", 20 | "dns_domain": "home", 21 | "interface_index": 11, 22 | "interface_name": "Intel(R) PRO/1000 MT Desktop Adapter" 23 | } 24 | ], 25 | "ansible_ip_addresses": [ 26 | "192.168.1.94", 27 | "fe80::aca8:571c:f3e2:afd7" 28 | ], 29 | "ansible_lastboot": "2016-01-08 23:08:57Z", 30 | "ansible_os_family": "Windows", 31 | "ansible_os_name": "Microsoft Windows Server 2008 R2 Standard", 32 | "ansible_powershell_version": 3, 33 | "ansible_system": "Win32NT", 34 | "ansible_totalmem": 2147016704, 35 | "ansible_uptime_seconds": 119, 36 | "ansible_winrm_certificate_expires": "2016-10-10 19:34:00" 37 | }, 38 | "changed": false 39 | } 40 | -------------------------------------------------------------------------------- /example/out_custom/custfact.test.local: -------------------------------------------------------------------------------- 1 | { 2 | "custom_facts": { 3 | "software": { 4 | "apache": { 5 | "version": "2.4", 6 | "install_src": "backport_deb" 7 | }, 8 | "mysql-server": { 9 | "version": "5.5", 10 | "install_src": "manual_compile" 11 | }, 12 | "redis": { 13 | "version": "3.0.7", 14 | "install_src": "manual_compile" 15 | } 16 | } 17 | }, 18 | "ansible_facts": { 19 | "ansible_local": { 20 | "foo": "bar" 21 | } 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /example/out_extend/openbsd.dev.local: -------------------------------------------------------------------------------- 1 | { 2 | "ansible_facts": { 3 | "ansible_userspace_architecture": "x86_64" 4 | } 5 | } 6 | -------------------------------------------------------------------------------- /example/out_factcache/debian.dev.local: -------------------------------------------------------------------------------- 1 | {"ansible_all_ipv4_addresses": ["192.168.56.2"], "ansible_all_ipv6_addresses": ["fe80::a00:27ff:fef9:98a7"], "ansible_architecture": "x86_64", "ansible_bios_date": "12/01/2006", "ansible_bios_version": "VirtualBox", "ansible_cmdline": {"BOOT_IMAGE": "/vmlinuz-2.6.32-5-amd64", "quiet": true, "ro": true, "root": "/dev/mapper/debian-root"}, "ansible_date_time": {"date": "2015-08-30", "day": "30", "epoch": "1440923780", "hour": "10", "iso8601": "2015-08-30T08:36:20Z", "iso8601_micro": "2015-08-30T08:36:20.457036Z", "minute": "36", "month": "08", "second": "20", "time": "10:36:20", "tz": "CEST", "tz_offset": "+0200", "weekday": "Sunday", "year": "2015"}, "ansible_default_ipv4": {"address": "192.168.56.2", "alias": "eth0", "gateway": "192.168.56.1", "interface": "eth0", "macaddress": "08:00:27:f9:98:a7", "mtu": 1500, "netmask": "255.255.255.0", "network": "192.168.56.0", "type": "ether"}, "ansible_default_ipv6": {}, "ansible_devices": {"sda": {"holders": [], "host": "SATA controller: Intel Corporation 82801HBM/HEM (ICH8M/ICH8M-E) SATA AHCI Controller (rev 02)", "model": "VBOX HARDDISK", "partitions": {"sda1": {"sectors": "497664", "sectorsize": 512, "size": "243.00 MB", "start": "2048"}, "sda2": {"sectors": "2", "sectorsize": 512, "size": "1.00 KB", "start": "501758"}, "sda5": {"sectors": "209211392", "sectorsize": 512, "size": "99.76 GB", "start": "501760"}}, "removable": "0", "rotational": "1", "scheduler_mode": "cfq", "sectors": "209715200", "sectorsize": "512", "size": "100.00 GB", "support_discard": null, "vendor": "ATA"}, "sr0": {"holders": [], "host": "IDE interface: Intel Corporation 82371AB/EB/MB PIIX4 IDE (rev 01)", "model": "CD-ROM", "partitions": {}, "removable": "1", "rotational": "1", "scheduler_mode": "cfq", "sectors": "2097151", "sectorsize": "512", "size": "1024.00 MB", "support_discard": null, "vendor": "VBOX"}}, "ansible_distribution": "Debian", "ansible_distribution_major_version": "6", "ansible_distribution_release": "NA", "ansible_distribution_version": "6.0.10", "ansible_domain": "", "ansible_env": {"EDITOR": "vim", "GDK_USE_XFT": "1", "HOME": "/home/fboender", "LANG": "en_US.UTF-8", "LANGUAGE": "en_US.UTF-8", "LC_ALL": "en_US.UTF-8", "LC_CTYPE": "en_US.UTF-8", "LESS": "-RgiMSx4 -FX", "LOGNAME": "fboender", "LS_COLORS": "rs=0:di=01;34:ln=01;36:mh=00:pi=40;33:so=01;35:do=01;35:bd=40;33;01:cd=40;33;01:or=40;31;01:su=37;41:sg=30;43:ca=30;41:tw=30;42:ow=34;42:st=37;44:ex=01;32:*.tar=01;31:*.tgz=01;31:*.arj=01;31:*.taz=01;31:*.lzh=01;31:*.lzma=01;31:*.tlz=01;31:*.txz=01;31:*.zip=01;31:*.z=01;31:*.Z=01;31:*.dz=01;31:*.gz=01;31:*.lz=01;31:*.xz=01;31:*.bz2=01;31:*.bz=01;31:*.tbz=01;31:*.tbz2=01;31:*.tz=01;31:*.deb=01;31:*.rpm=01;31:*.jar=01;31:*.rar=01;31:*.ace=01;31:*.zoo=01;31:*.cpio=01;31:*.7z=01;31:*.rz=01;31:*.jpg=01;35:*.jpeg=01;35:*.gif=01;35:*.bmp=01;35:*.pbm=01;35:*.pgm=01;35:*.ppm=01;35:*.tga=01;35:*.xbm=01;35:*.xpm=01;35:*.tif=01;35:*.tiff=01;35:*.png=01;35:*.svg=01;35:*.svgz=01;35:*.mng=01;35:*.pcx=01;35:*.mov=01;35:*.mpg=01;35:*.mpeg=01;35:*.m2v=01;35:*.mkv=01;35:*.ogm=01;35:*.mp4=01;35:*.m4v=01;35:*.mp4v=01;35:*.vob=01;35:*.qt=01;35:*.nuv=01;35:*.wmv=01;35:*.asf=01;35:*.rm=01;35:*.rmvb=01;35:*.flc=01;35:*.avi=01;35:*.fli=01;35:*.flv=01;35:*.gl=01;35:*.dl=01;35:*.xcf=01;35:*.xwd=01;35:*.yuv=01;35:*.cgm=01;35:*.emf=01;35:*.axv=01;35:*.anx=01;35:*.ogv=01;35:*.ogx=01;35:*.aac=00;36:*.au=00;36:*.flac=00;36:*.mid=00;36:*.midi=00;36:*.mka=00;36:*.mp3=00;36:*.mpc=00;36:*.ogg=00;36:*.ra=00;36:*.wav=00;36:*.axa=00;36:*.oga=00;36:*.spx=00;36:*.xspf=00;36::ow=103;30;01:", "PAGER": "less", "PATH": "~/bin:/usr/local/bin:/usr/bin:/bin:/usr/bin/X11:/usr/games:/usr/local/sbin:/usr/sbin:/sbin:~/bin/:", "PWD": "/home/fboender", "SHELL": "/bin/bash", "SHLVL": "1", "SSH_CLIENT": "192.168.56.1 60106 22", "SSH_CONNECTION": "192.168.56.1 60106 192.168.56.2 22", "SSH_TTY": "/dev/pts/1", "TERM": "xterm-256color", "TNS_ADMIN": "/usr/local/lib/instantclient_10_2/network/admin/", "USER": "fboender", "_": "/bin/sh"}, "ansible_eth0": {"active": true, "device": "eth0", "ipv4": {"address": "192.168.56.2", "netmask": "255.255.255.0", "network": "192.168.56.0"}, "ipv6": [{"address": "fe80::a00:27ff:fef9:98a7", "prefix": "64", "scope": "link"}], "macaddress": "08:00:27:f9:98:a7", "module": "pcnet32", "mtu": 1500, "promisc": false, "type": "ether"}, "ansible_fips": false, "ansible_form_factor": "Other", "ansible_fqdn": "localhost", "ansible_hostname": "dev", "ansible_interfaces": ["lo", "eth0"], "ansible_kernel": "2.6.32-5-amd64", "ansible_lo": {"active": true, "device": "lo", "ipv4": {"address": "127.0.0.1", "netmask": "255.0.0.0", "network": "127.0.0.0"}, "ipv6": [{"address": "::1", "prefix": "128", "scope": "host"}], "mtu": 16436, "promisc": false, "type": "loopback"}, "ansible_lsb": {"codename": "squeeze", "description": "Debian GNU/Linux 6.0.10 (squeeze)", "id": "Debian", "major_release": "6", "release": "6.0.10"}, "ansible_machine": "x86_64", "ansible_machine_id": "00a3ac55878f7a9340c879050000036c", "ansible_memfree_mb": 217, "ansible_memory_mb": {"nocache": {"free": 395, "used": 101}, "real": {"free": 217, "total": 496, "used": 279}, "swap": {"cached": 0, "free": 727, "total": 727, "used": 0}}, "ansible_memtotal_mb": 496, "ansible_mounts": [{"device": "/dev/mapper/debian-root", "fstype": "ext3", "mount": "/", "options": "rw,errors=remount-ro", "size_available": 92955344896, "size_total": 104680742912, "uuid": "NA"}, {"device": "/dev/sda1", "fstype": "ext2", "mount": "/boot", "options": "rw", "size_available": 209807360, "size_total": 238787584, "uuid": "NA"}], "ansible_nodename": "dev.local", "ansible_os_family": "Debian", "ansible_pkg_mgr": "apt", "ansible_processor": ["GenuineIntel", "Intel(R) Core(TM) i7-4712HQ CPU @ 2.30GHz"], "ansible_processor_cores": 1, "ansible_processor_count": 1, "ansible_processor_threads_per_core": 1, "ansible_processor_vcpus": 1, "ansible_product_name": "VirtualBox", "ansible_product_serial": "NA", "ansible_product_uuid": "NA", "ansible_product_version": "1.2", "ansible_python_version": "2.6.6", "ansible_selinux": false, "ansible_ssh_host_key_dsa_public": "AAAAB3NzaC1kc3MAAACBAOJWOpQVltXw3wNsRq20+r37aOHiD11hNvNttywbVkNPLo+s7Q0Y0lctaOWl9WR4b3EK55t+a7/sCqS7Qy5CGwtMmsg3ayUUNZLSwwAkAZ2UISyYRbb3AJwMb3HqBXu6P/lm5GRDEycU+bQUUdVOsBe6kwMEKUdBtsa++ipCCCcNAAAAFQCUI0c8CwmSvtwcuj7JTjEJnhBKiQAAAIEA4Mhgav6N18adoQ6xvgHNVrdf/ilNOv1tFUpL2pFlH21zrONj19/hT/HSyj7CeDV0Hpfwg1gGYI12TNgf+9NDOfz2ceXef6QVfG1Nf8j7HAp9KoSU50MCM9la3oTUnN4AwwPGp8ItuHwzmGubt1UaVaBPpeeNrMCWqewHF8bgZmAAAACAB68uE+BWPsGpKqdXeaohvinF296nWc0urbXQ6yPVaATT96UP+vT2QToZY+4Zkcs6l3gL0kS7s8Y/50AxbvO1yKFhIqBnH/p4tV21jdTnXL066bbU60f5tjC5/ty+zYQREKAm3XiLxOSRyyC0M34bFVIqCtZ5tMax2xtaRndDlys=", "ansible_ssh_host_key_rsa_public": "AAAAB3NzaC1yc2EAAAADAQABAAABAQDFwue0q1kD5CgZczAKg10/DFyKWgxoSK1J/r/Tk9PqvckNjwVx7Yn78rElXgo4SCMceWPIucb8Yl8FpmdnuXH8/yn5i+snOpBQddoFun/CiB3HUw28T2M7Y9q4QtEcMiULBq1oiCoYJfNU9o3aD2caxk8OhcrF5k7Ec5DIyAGN8doYxey6icl6ohUJR6x6jnZO+6uoSKyHwxS3HBZ+6RrVY7ckCuRk/w24P7YM5sEnHZ9dnS4uTVCYKrJpygYUbN/HrSNuIIAQpvitZWua6t7mFy1zugCc0Lj8QbPStnsntIVWoIwWY+iFnFrS6N3IiGHAyOv6Jla0P3HEFmrhoVIH", "ansible_swapfree_mb": 727, "ansible_swaptotal_mb": 727, "ansible_system": "Linux", "ansible_system_vendor": "innotek GmbH", "ansible_user_dir": "/home/fboender", "ansible_user_gecos": "fboender,,,", "ansible_user_gid": 1000, "ansible_user_id": "fboender", "ansible_user_shell": "/bin/bash", "ansible_user_uid": 1000, "ansible_userspace_architecture": "x86_64", "ansible_userspace_bits": "64", "ansible_virtualization_role": "guest", "ansible_virtualization_type": "virtualbox", "module_setup": true} -------------------------------------------------------------------------------- /example/ssh.config: -------------------------------------------------------------------------------- 1 | Host eek.electricmonk.nl 2 | Hostname 192.168.0.10 3 | 4 | Host debian.dev.local 5 | Hostname 192.168.56.2 6 | 7 | Host centos.dev.local 8 | Hostname 192.168.56.8 9 | 10 | Host jib.electricmonk.nl 11 | Hostname 127.0.0.1 12 | -------------------------------------------------------------------------------- /example/txt_table.txt: -------------------------------------------------------------------------------- 1 | Name OS IP Arch Mem CPUsVirt Disk avail 2 | ---------------------- ------------- ------------- ------------- --- - ---------------- -------------------- 3 | jib.electricmonk.nl Linuxmint 17 192.168.0.3 x86_64/x86_64 16g 1 kvm/host 0.0g, 400.0g 4 | app.uat.local Debian 6.0.10 192.168.57.1 x86_64/x86_64 1g 1 virtualbox/guest 180.0g, 0.0g 5 | eek.electricmonk.nl Ubuntu 14.04 192.168.0.10 i386/i386 3g 1 kvm/host 277.0g, 955.0g, 0.0g 6 | db01.prod.local Debian 6.0.10 192.168.58.1 x86_64/x86_64 0g 1 virtualbox/guest 84.0g, 0.0g 7 | debian.dev.local Debian 6.0.10 192.168.56.2 x86_64/x86_64 1g 1 virtualbox/guest 180.0g, 0.0g 8 | db02.prod.local Debian 6.0.10 192.168.58.2 x86_64/x86_64 0g 1 virtualbox/guest 84.0g, 0.0g 9 | centos.dev.local CentOS 6.6 192.168.56.8 x86_64/x86_64 1g 1 virtualbox/guest 27.0g, 0.0g 10 | win.dev.local Windows 2012 10.0.0.3 x86_64/x86_64 4g 0 VMware/guest 11 | host5.example.com Debian 6.0.10 192.168.57.1 x86_64/x86_64 1g 1 virtualbox/guest 180.0g, 0.0g 12 | db03.prod.local Debian 6.0.10 192.168.58.3 x86_64/x86_64 0g 1 virtualbox/guest 84.0g, 0.0g 13 | zoltar.electricmonk.nl Ubuntu 14.04 194.187.79.11 x86_64/x86_64 4g 2 VMware/guest 27.0g, 0.0g 14 | 15 | -------------------------------------------------------------------------------- /lib/jsonxs.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | """ 4 | jsonxs uses a path expression string to get and set values in JSON and Python 5 | datastructures. 6 | 7 | For example: 8 | 9 | >>> d = { 10 | ... 'feed': { 11 | ... 'id': 'my_feed', 12 | ... 'url': 'http://example.com/feed.rss', 13 | ... 'tags': ['devel', 'example', 'python'], 14 | ... 'short.desc': 'A feed', 15 | ... } 16 | ... } 17 | 18 | # Get the value for a path expression 19 | >>> jsonxs(d, 'feed.tags[-1]') 20 | 'python' 21 | 22 | # Access paths with special chars in them 23 | >>> jsonxs(d, 'feed.short\.desc') 24 | 'A feed' 25 | 26 | # Set the value for a path expression 27 | >>> jsonxs(d, 'feed.id', ACTION_SET, 'your_feed') 28 | >>> d['feed']['id'] 29 | 'your_feed' 30 | 31 | # Replace a value in a list 32 | >>> jsonxs(d, 'feed.tags[-1]', ACTION_SET, 'javascript') 33 | >>> d['feed']['tags'] 34 | ['devel', 'example', 'javascript'] 35 | 36 | # Create a new key in a dict 37 | >>> jsonxs(d, 'feed.author', ACTION_SET, 'Ferry Boender') 38 | >>> d['feed']['author'] 39 | 'Ferry Boender' 40 | 41 | # Delete a value from a list 42 | >>> jsonxs(d, 'feed.tags[0]', ACTION_DEL) 43 | >>> d['feed']['tags'] 44 | ['example', 'javascript'] 45 | 46 | # Delete a key/value pair from a dictionary 47 | >>> jsonxs(d, 'feed.url', ACTION_DEL) 48 | >>> 'url' in d['feed'] 49 | False 50 | 51 | # Append a value to a list 52 | >>> jsonxs(d, 'feed.tags', ACTION_APPEND, 'programming') 53 | >>> d['feed']['tags'] 54 | ['example', 'javascript', 'programming'] 55 | 56 | # Insert a value to a list 57 | >>> jsonxs(d, 'feed.tags[1]', ACTION_INSERT, 'tech') 58 | >>> d['feed']['tags'] 59 | ['example', 'tech', 'javascript', 'programming'] 60 | 61 | # Create a dict value 62 | >>> jsonxs(d, 'feed.details', ACTION_MKDICT) 63 | >>> d['feed']['details'] == {} 64 | True 65 | 66 | # Create a list value 67 | >>> jsonxs(d, 'feed.details.users', ACTION_MKLIST) 68 | >>> d['feed']['details']['users'] == [] 69 | True 70 | 71 | # Fill the newly created list 72 | >>> jsonxs(d, 'feed.details.users', ACTION_APPEND, 'fboender') 73 | >>> jsonxs(d, 'feed.details.users', ACTION_APPEND, 'ppeterson') 74 | >>> d['feed']['details']['users'] 75 | ['fboender', 'ppeterson'] 76 | """ 77 | 78 | 79 | ACTION_GET = 'get' 80 | ACTION_SET = 'set' 81 | ACTION_DEL = 'del' 82 | ACTION_APPEND = 'append' 83 | ACTION_INSERT = 'insert' 84 | ACTION_MKDICT = 'mkdict' 85 | ACTION_MKLIST = 'mklist' 86 | 87 | 88 | def tokenize(expr): 89 | """ 90 | Parse a string expression into a set of tokens that can be used as a path 91 | into a Python datastructure. 92 | """ 93 | tokens = [] 94 | escape = False 95 | cur_token = '' 96 | 97 | for c in expr: 98 | if escape == True: 99 | cur_token += c 100 | escape = False 101 | else: 102 | if c == '\\': 103 | # Next char will be escaped 104 | escape = True 105 | continue 106 | elif c == '[': 107 | # Next token is of type index (list) 108 | if len(cur_token) > 0: 109 | tokens.append(cur_token) 110 | cur_token = '' 111 | elif c == ']': 112 | # End of index token. Next token defaults to a key (dict) 113 | if len(cur_token) > 0: 114 | tokens.append(int(cur_token)) 115 | cur_token = '' 116 | elif c == '.': 117 | # End of key token. Next token defaults to a key (dict) 118 | if len(cur_token) > 0: 119 | tokens.append(cur_token) 120 | cur_token = '' 121 | else: 122 | # Append char to token name 123 | cur_token += c 124 | if len(cur_token) > 0: 125 | tokens.append(cur_token) 126 | 127 | return tokens 128 | 129 | 130 | def jsonxs(data, expr, action=ACTION_GET, value=None, default=None): 131 | """ 132 | Get, set, delete values in a JSON structure. `expr` is a JSONpath-like 133 | expression pointing to the desired value. `action` determines the action to 134 | perform. See the module-level `ACTION_*` constants. `value` should be given 135 | if action is `ACTION_SET`. If `default` is set and `expr` isn't found, 136 | return `default` instead. This will override all exceptions. 137 | """ 138 | tokens = tokenize(expr) 139 | 140 | # Walk through the list of tokens to reach the correct path in the data 141 | # structure. 142 | try: 143 | prev_path = None 144 | cur_path = data 145 | for token in tokens: 146 | prev_path = cur_path 147 | if not token in cur_path and action in [ACTION_SET, ACTION_MKDICT, ACTION_MKLIST]: 148 | # When setting values or creating dicts/lists, the key can be 149 | # missing from the data struture 150 | continue 151 | cur_path = cur_path[token] 152 | except Exception: 153 | if default is not None: 154 | return default 155 | else: 156 | raise 157 | 158 | # Perform action the user requested. 159 | if action == ACTION_GET: 160 | return cur_path 161 | elif action == ACTION_DEL: 162 | del prev_path[token] 163 | elif action == ACTION_SET: 164 | prev_path[token] = value 165 | elif action == ACTION_APPEND: 166 | prev_path[token].append(value) 167 | elif action == ACTION_INSERT: 168 | prev_path.insert(token, value) 169 | elif action == ACTION_MKDICT: 170 | prev_path[token] = {} 171 | elif action == ACTION_MKLIST: 172 | prev_path[token] = [] 173 | else: 174 | raise ValueError("Invalid action: {}".format(action)) 175 | 176 | 177 | if __name__ == "__main__": 178 | import doctest 179 | doctest.testmod() 180 | -------------------------------------------------------------------------------- /lib/mako/__init__.py: -------------------------------------------------------------------------------- 1 | # mako/__init__.py 2 | # Copyright (C) 2006-2015 the Mako authors and contributors 3 | # 4 | # This module is part of Mako and is released under 5 | # the MIT License: http://www.opensource.org/licenses/mit-license.php 6 | 7 | 8 | __version__ = '1.0.1' 9 | -------------------------------------------------------------------------------- /lib/mako/ast.py: -------------------------------------------------------------------------------- 1 | # mako/ast.py 2 | # Copyright (C) 2006-2015 the Mako authors and contributors 3 | # 4 | # This module is part of Mako and is released under 5 | # the MIT License: http://www.opensource.org/licenses/mit-license.php 6 | 7 | """utilities for analyzing expressions and blocks of Python 8 | code, as well as generating Python from AST nodes""" 9 | 10 | from mako import exceptions, pyparser, compat 11 | import re 12 | 13 | class PythonCode(object): 14 | """represents information about a string containing Python code""" 15 | def __init__(self, code, **exception_kwargs): 16 | self.code = code 17 | 18 | # represents all identifiers which are assigned to at some point in 19 | # the code 20 | self.declared_identifiers = set() 21 | 22 | # represents all identifiers which are referenced before their 23 | # assignment, if any 24 | self.undeclared_identifiers = set() 25 | 26 | # note that an identifier can be in both the undeclared and declared 27 | # lists. 28 | 29 | # using AST to parse instead of using code.co_varnames, 30 | # code.co_names has several advantages: 31 | # - we can locate an identifier as "undeclared" even if 32 | # its declared later in the same block of code 33 | # - AST is less likely to break with version changes 34 | # (for example, the behavior of co_names changed a little bit 35 | # in python version 2.5) 36 | if isinstance(code, compat.string_types): 37 | expr = pyparser.parse(code.lstrip(), "exec", **exception_kwargs) 38 | else: 39 | expr = code 40 | 41 | f = pyparser.FindIdentifiers(self, **exception_kwargs) 42 | f.visit(expr) 43 | 44 | class ArgumentList(object): 45 | """parses a fragment of code as a comma-separated list of expressions""" 46 | def __init__(self, code, **exception_kwargs): 47 | self.codeargs = [] 48 | self.args = [] 49 | self.declared_identifiers = set() 50 | self.undeclared_identifiers = set() 51 | if isinstance(code, compat.string_types): 52 | if re.match(r"\S", code) and not re.match(r",\s*$", code): 53 | # if theres text and no trailing comma, insure its parsed 54 | # as a tuple by adding a trailing comma 55 | code += "," 56 | expr = pyparser.parse(code, "exec", **exception_kwargs) 57 | else: 58 | expr = code 59 | 60 | f = pyparser.FindTuple(self, PythonCode, **exception_kwargs) 61 | f.visit(expr) 62 | 63 | class PythonFragment(PythonCode): 64 | """extends PythonCode to provide identifier lookups in partial control 65 | statements 66 | 67 | e.g. 68 | for x in 5: 69 | elif y==9: 70 | except (MyException, e): 71 | etc. 72 | """ 73 | def __init__(self, code, **exception_kwargs): 74 | m = re.match(r'^(\w+)(?:\s+(.*?))?:\s*(#|$)', code.strip(), re.S) 75 | if not m: 76 | raise exceptions.CompileException( 77 | "Fragment '%s' is not a partial control statement" % 78 | code, **exception_kwargs) 79 | if m.group(3): 80 | code = code[:m.start(3)] 81 | (keyword, expr) = m.group(1,2) 82 | if keyword in ['for','if', 'while']: 83 | code = code + "pass" 84 | elif keyword == 'try': 85 | code = code + "pass\nexcept:pass" 86 | elif keyword == 'elif' or keyword == 'else': 87 | code = "if False:pass\n" + code + "pass" 88 | elif keyword == 'except': 89 | code = "try:pass\n" + code + "pass" 90 | elif keyword == 'with': 91 | code = code + "pass" 92 | else: 93 | raise exceptions.CompileException( 94 | "Unsupported control keyword: '%s'" % 95 | keyword, **exception_kwargs) 96 | super(PythonFragment, self).__init__(code, **exception_kwargs) 97 | 98 | 99 | class FunctionDecl(object): 100 | """function declaration""" 101 | def __init__(self, code, allow_kwargs=True, **exception_kwargs): 102 | self.code = code 103 | expr = pyparser.parse(code, "exec", **exception_kwargs) 104 | 105 | f = pyparser.ParseFunc(self, **exception_kwargs) 106 | f.visit(expr) 107 | if not hasattr(self, 'funcname'): 108 | raise exceptions.CompileException( 109 | "Code '%s' is not a function declaration" % code, 110 | **exception_kwargs) 111 | if not allow_kwargs and self.kwargs: 112 | raise exceptions.CompileException( 113 | "'**%s' keyword argument not allowed here" % 114 | self.kwargnames[-1], **exception_kwargs) 115 | 116 | def get_argument_expressions(self, as_call=False): 117 | """Return the argument declarations of this FunctionDecl as a printable 118 | list. 119 | 120 | By default the return value is appropriate for writing in a ``def``; 121 | set `as_call` to true to build arguments to be passed to the function 122 | instead (assuming locals with the same names as the arguments exist). 123 | """ 124 | 125 | namedecls = [] 126 | 127 | # Build in reverse order, since defaults and slurpy args come last 128 | argnames = self.argnames[::-1] 129 | kwargnames = self.kwargnames[::-1] 130 | defaults = self.defaults[::-1] 131 | kwdefaults = self.kwdefaults[::-1] 132 | 133 | # Named arguments 134 | if self.kwargs: 135 | namedecls.append("**" + kwargnames.pop(0)) 136 | 137 | for name in kwargnames: 138 | # Keyword-only arguments must always be used by name, so even if 139 | # this is a call, print out `foo=foo` 140 | if as_call: 141 | namedecls.append("%s=%s" % (name, name)) 142 | elif kwdefaults: 143 | default = kwdefaults.pop(0) 144 | if default is None: 145 | # The AST always gives kwargs a default, since you can do 146 | # `def foo(*, a=1, b, c=3)` 147 | namedecls.append(name) 148 | else: 149 | namedecls.append("%s=%s" % ( 150 | name, pyparser.ExpressionGenerator(default).value())) 151 | else: 152 | namedecls.append(name) 153 | 154 | # Positional arguments 155 | if self.varargs: 156 | namedecls.append("*" + argnames.pop(0)) 157 | 158 | for name in argnames: 159 | if as_call or not defaults: 160 | namedecls.append(name) 161 | else: 162 | default = defaults.pop(0) 163 | namedecls.append("%s=%s" % ( 164 | name, pyparser.ExpressionGenerator(default).value())) 165 | 166 | namedecls.reverse() 167 | return namedecls 168 | 169 | @property 170 | def allargnames(self): 171 | return tuple(self.argnames) + tuple(self.kwargnames) 172 | 173 | class FunctionArgs(FunctionDecl): 174 | """the argument portion of a function declaration""" 175 | 176 | def __init__(self, code, **kwargs): 177 | super(FunctionArgs, self).__init__("def ANON(%s):pass" % code, 178 | **kwargs) 179 | -------------------------------------------------------------------------------- /lib/mako/cmd.py: -------------------------------------------------------------------------------- 1 | # mako/cmd.py 2 | # Copyright (C) 2006-2015 the Mako authors and contributors 3 | # 4 | # This module is part of Mako and is released under 5 | # the MIT License: http://www.opensource.org/licenses/mit-license.php 6 | from argparse import ArgumentParser 7 | from os.path import isfile, dirname 8 | import sys 9 | from mako.template import Template 10 | from mako.lookup import TemplateLookup 11 | from mako import exceptions 12 | 13 | def varsplit(var): 14 | if "=" not in var: 15 | return (var, "") 16 | return var.split("=", 1) 17 | 18 | def _exit(): 19 | sys.stderr.write(exceptions.text_error_template().render()) 20 | sys.exit(1) 21 | 22 | def cmdline(argv=None): 23 | 24 | parser = ArgumentParser("usage: %prog [FILENAME]") 25 | parser.add_argument("--var", default=[], action="append", 26 | help="variable (can be used multiple times, use name=value)") 27 | parser.add_argument("--template-dir", default=[], action="append", 28 | help="Directory to use for template lookup (multiple " 29 | "directories may be provided). If not given then if the " 30 | "template is read from stdin, the value defaults to be " 31 | "the current directory, otherwise it defaults to be the " 32 | "parent directory of the file provided.") 33 | parser.add_argument('input', nargs='?', default='-') 34 | 35 | options = parser.parse_args(argv) 36 | if options.input == '-': 37 | lookup_dirs = options.template_dir or ["."] 38 | lookup = TemplateLookup(lookup_dirs) 39 | try: 40 | template = Template(sys.stdin.read(), lookup=lookup) 41 | except: 42 | _exit() 43 | else: 44 | filename = options.input 45 | if not isfile(filename): 46 | raise SystemExit("error: can't find %s" % filename) 47 | lookup_dirs = options.template_dir or [dirname(filename)] 48 | lookup = TemplateLookup(lookup_dirs) 49 | try: 50 | template = Template(filename=filename, lookup=lookup) 51 | except: 52 | _exit() 53 | 54 | kw = dict([varsplit(var) for var in options.var]) 55 | try: 56 | print(template.render(**kw)) 57 | except: 58 | _exit() 59 | 60 | 61 | if __name__ == "__main__": 62 | cmdline() 63 | -------------------------------------------------------------------------------- /lib/mako/compat.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import time 3 | 4 | py3k = sys.version_info >= (3, 0) 5 | py33 = sys.version_info >= (3, 3) 6 | py2k = sys.version_info < (3,) 7 | py26 = sys.version_info >= (2, 6) 8 | jython = sys.platform.startswith('java') 9 | win32 = sys.platform.startswith('win') 10 | pypy = hasattr(sys, 'pypy_version_info') 11 | 12 | if py3k: 13 | from io import StringIO 14 | import builtins as compat_builtins 15 | from urllib.parse import quote_plus, unquote_plus 16 | from html.entities import codepoint2name, name2codepoint 17 | string_types = str, 18 | binary_type = bytes 19 | text_type = str 20 | 21 | from io import BytesIO as byte_buffer 22 | 23 | def u(s): 24 | return s 25 | 26 | def b(s): 27 | return s.encode("latin-1") 28 | 29 | def octal(lit): 30 | return eval("0o" + lit) 31 | 32 | else: 33 | import __builtin__ as compat_builtins 34 | try: 35 | from cStringIO import StringIO 36 | except: 37 | from StringIO import StringIO 38 | 39 | byte_buffer = StringIO 40 | 41 | from urllib import quote_plus, unquote_plus 42 | from htmlentitydefs import codepoint2name, name2codepoint 43 | string_types = basestring, 44 | binary_type = str 45 | text_type = unicode 46 | 47 | def u(s): 48 | return unicode(s, "utf-8") 49 | 50 | def b(s): 51 | return s 52 | 53 | def octal(lit): 54 | return eval("0" + lit) 55 | 56 | 57 | if py33: 58 | from importlib import machinery 59 | def load_module(module_id, path): 60 | return machinery.SourceFileLoader(module_id, path).load_module() 61 | else: 62 | import imp 63 | def load_module(module_id, path): 64 | fp = open(path, 'rb') 65 | try: 66 | return imp.load_source(module_id, path, fp) 67 | finally: 68 | fp.close() 69 | 70 | 71 | if py3k: 72 | def reraise(tp, value, tb=None, cause=None): 73 | if cause is not None: 74 | value.__cause__ = cause 75 | if value.__traceback__ is not tb: 76 | raise value.with_traceback(tb) 77 | raise value 78 | else: 79 | exec("def reraise(tp, value, tb=None, cause=None):\n" 80 | " raise tp, value, tb\n") 81 | 82 | 83 | def exception_as(): 84 | return sys.exc_info()[1] 85 | 86 | try: 87 | import threading 88 | if py3k: 89 | import _thread as thread 90 | else: 91 | import thread 92 | except ImportError: 93 | import dummy_threading as threading 94 | if py3k: 95 | import _dummy_thread as thread 96 | else: 97 | import dummy_thread as thread 98 | 99 | if win32 or jython: 100 | time_func = time.clock 101 | else: 102 | time_func = time.time 103 | 104 | try: 105 | from functools import partial 106 | except: 107 | def partial(func, *args, **keywords): 108 | def newfunc(*fargs, **fkeywords): 109 | newkeywords = keywords.copy() 110 | newkeywords.update(fkeywords) 111 | return func(*(args + fargs), **newkeywords) 112 | return newfunc 113 | 114 | 115 | all = all 116 | import json 117 | 118 | def exception_name(exc): 119 | return exc.__class__.__name__ 120 | 121 | try: 122 | from inspect import CO_VARKEYWORDS, CO_VARARGS 123 | def inspect_func_args(fn): 124 | if py3k: 125 | co = fn.__code__ 126 | else: 127 | co = fn.func_code 128 | 129 | nargs = co.co_argcount 130 | names = co.co_varnames 131 | args = list(names[:nargs]) 132 | 133 | varargs = None 134 | if co.co_flags & CO_VARARGS: 135 | varargs = co.co_varnames[nargs] 136 | nargs = nargs + 1 137 | varkw = None 138 | if co.co_flags & CO_VARKEYWORDS: 139 | varkw = co.co_varnames[nargs] 140 | 141 | if py3k: 142 | return args, varargs, varkw, fn.__defaults__ 143 | else: 144 | return args, varargs, varkw, fn.func_defaults 145 | except ImportError: 146 | import inspect 147 | def inspect_func_args(fn): 148 | return inspect.getargspec(fn) 149 | 150 | if py3k: 151 | def callable(fn): 152 | return hasattr(fn, '__call__') 153 | else: 154 | callable = callable 155 | 156 | 157 | ################################################ 158 | # cross-compatible metaclass implementation 159 | # Copyright (c) 2010-2012 Benjamin Peterson 160 | def with_metaclass(meta, base=object): 161 | """Create a base class with a metaclass.""" 162 | return meta("%sBase" % meta.__name__, (base,), {}) 163 | ################################################ 164 | 165 | 166 | def arg_stringname(func_arg): 167 | """Gets the string name of a kwarg or vararg 168 | In Python3.4 a function's args are 169 | of _ast.arg type not _ast.name 170 | """ 171 | if hasattr(func_arg, 'arg'): 172 | return func_arg.arg 173 | else: 174 | return str(func_arg) 175 | -------------------------------------------------------------------------------- /lib/mako/ext/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fboender/ansible-cmdb/3f3e412d2a7be91c97c5a1842f4e57cc85b06961/lib/mako/ext/__init__.py -------------------------------------------------------------------------------- /lib/mako/ext/autohandler.py: -------------------------------------------------------------------------------- 1 | # ext/autohandler.py 2 | # Copyright (C) 2006-2015 the Mako authors and contributors 3 | # 4 | # This module is part of Mako and is released under 5 | # the MIT License: http://www.opensource.org/licenses/mit-license.php 6 | 7 | """adds autohandler functionality to Mako templates. 8 | 9 | requires that the TemplateLookup class is used with templates. 10 | 11 | usage: 12 | 13 | <%! 14 | from mako.ext.autohandler import autohandler 15 | %> 16 | <%inherit file="${autohandler(template, context)}"/> 17 | 18 | 19 | or with custom autohandler filename: 20 | 21 | <%! 22 | from mako.ext.autohandler import autohandler 23 | %> 24 | <%inherit file="${autohandler(template, context, name='somefilename')}"/> 25 | 26 | """ 27 | 28 | import posixpath, os, re 29 | 30 | def autohandler(template, context, name='autohandler'): 31 | lookup = context.lookup 32 | _template_uri = template.module._template_uri 33 | if not lookup.filesystem_checks: 34 | try: 35 | return lookup._uri_cache[(autohandler, _template_uri, name)] 36 | except KeyError: 37 | pass 38 | 39 | tokens = re.findall(r'([^/]+)', posixpath.dirname(_template_uri)) + [name] 40 | while len(tokens): 41 | path = '/' + '/'.join(tokens) 42 | if path != _template_uri and _file_exists(lookup, path): 43 | if not lookup.filesystem_checks: 44 | return lookup._uri_cache.setdefault( 45 | (autohandler, _template_uri, name), path) 46 | else: 47 | return path 48 | if len(tokens) == 1: 49 | break 50 | tokens[-2:] = [name] 51 | 52 | if not lookup.filesystem_checks: 53 | return lookup._uri_cache.setdefault( 54 | (autohandler, _template_uri, name), None) 55 | else: 56 | return None 57 | 58 | def _file_exists(lookup, path): 59 | psub = re.sub(r'^/', '',path) 60 | for d in lookup.directories: 61 | if os.path.exists(d + '/' + psub): 62 | return True 63 | else: 64 | return False 65 | 66 | -------------------------------------------------------------------------------- /lib/mako/ext/babelplugin.py: -------------------------------------------------------------------------------- 1 | # ext/babelplugin.py 2 | # Copyright (C) 2006-2015 the Mako authors and contributors 3 | # 4 | # This module is part of Mako and is released under 5 | # the MIT License: http://www.opensource.org/licenses/mit-license.php 6 | 7 | """gettext message extraction via Babel: http://babel.edgewall.org/""" 8 | from babel.messages.extract import extract_python 9 | from mako.ext.extract import MessageExtractor 10 | 11 | 12 | class BabelMakoExtractor(MessageExtractor): 13 | def __init__(self, keywords, comment_tags, options): 14 | self.keywords = keywords 15 | self.options = options 16 | self.config = { 17 | 'comment-tags': u' '.join(comment_tags), 18 | 'encoding': options.get('input_encoding', 19 | options.get('encoding', None)), 20 | } 21 | super(BabelMakoExtractor, self).__init__() 22 | 23 | def __call__(self, fileobj): 24 | return self.process_file(fileobj) 25 | 26 | def process_python(self, code, code_lineno, translator_strings): 27 | comment_tags = self.config['comment-tags'] 28 | for lineno, funcname, messages, python_translator_comments \ 29 | in extract_python(code, 30 | self.keywords, comment_tags, self.options): 31 | yield (code_lineno + (lineno - 1), funcname, messages, 32 | translator_strings + python_translator_comments) 33 | 34 | 35 | def extract(fileobj, keywords, comment_tags, options): 36 | """Extract messages from Mako templates. 37 | 38 | :param fileobj: the file-like object the messages should be extracted from 39 | :param keywords: a list of keywords (i.e. function names) that should be 40 | recognized as translation functions 41 | :param comment_tags: a list of translator tags to search for and include 42 | in the results 43 | :param options: a dictionary of additional options (optional) 44 | :return: an iterator over ``(lineno, funcname, message, comments)`` tuples 45 | :rtype: ``iterator`` 46 | """ 47 | extractor = BabelMakoExtractor(keywords, comment_tags, options) 48 | for message in extractor(fileobj): 49 | yield message 50 | -------------------------------------------------------------------------------- /lib/mako/ext/beaker_cache.py: -------------------------------------------------------------------------------- 1 | """Provide a :class:`.CacheImpl` for the Beaker caching system.""" 2 | 3 | from mako import exceptions 4 | 5 | from mako.cache import CacheImpl 6 | 7 | try: 8 | from beaker import cache as beaker_cache 9 | except: 10 | has_beaker = False 11 | else: 12 | has_beaker = True 13 | 14 | _beaker_cache = None 15 | 16 | 17 | class BeakerCacheImpl(CacheImpl): 18 | """A :class:`.CacheImpl` provided for the Beaker caching system. 19 | 20 | This plugin is used by default, based on the default 21 | value of ``'beaker'`` for the ``cache_impl`` parameter of the 22 | :class:`.Template` or :class:`.TemplateLookup` classes. 23 | 24 | """ 25 | 26 | def __init__(self, cache): 27 | if not has_beaker: 28 | raise exceptions.RuntimeException( 29 | "Can't initialize Beaker plugin; Beaker is not installed.") 30 | global _beaker_cache 31 | if _beaker_cache is None: 32 | if 'manager' in cache.template.cache_args: 33 | _beaker_cache = cache.template.cache_args['manager'] 34 | else: 35 | _beaker_cache = beaker_cache.CacheManager() 36 | super(BeakerCacheImpl, self).__init__(cache) 37 | 38 | def _get_cache(self, **kw): 39 | expiretime = kw.pop('timeout', None) 40 | if 'dir' in kw: 41 | kw['data_dir'] = kw.pop('dir') 42 | elif self.cache.template.module_directory: 43 | kw['data_dir'] = self.cache.template.module_directory 44 | 45 | if 'manager' in kw: 46 | kw.pop('manager') 47 | 48 | if kw.get('type') == 'memcached': 49 | kw['type'] = 'ext:memcached' 50 | 51 | if 'region' in kw: 52 | region = kw.pop('region') 53 | cache = _beaker_cache.get_cache_region(self.cache.id, region, **kw) 54 | else: 55 | cache = _beaker_cache.get_cache(self.cache.id, **kw) 56 | cache_args = {'starttime': self.cache.starttime} 57 | if expiretime: 58 | cache_args['expiretime'] = expiretime 59 | return cache, cache_args 60 | 61 | def get_or_create(self, key, creation_function, **kw): 62 | cache, kw = self._get_cache(**kw) 63 | return cache.get(key, createfunc=creation_function, **kw) 64 | 65 | def put(self, key, value, **kw): 66 | cache, kw = self._get_cache(**kw) 67 | cache.put(key, value, **kw) 68 | 69 | def get(self, key, **kw): 70 | cache, kw = self._get_cache(**kw) 71 | return cache.get(key, **kw) 72 | 73 | def invalidate(self, key, **kw): 74 | cache, kw = self._get_cache(**kw) 75 | cache.remove_value(key, **kw) 76 | -------------------------------------------------------------------------------- /lib/mako/ext/extract.py: -------------------------------------------------------------------------------- 1 | import re 2 | from mako import compat 3 | from mako import lexer 4 | from mako import parsetree 5 | 6 | 7 | class MessageExtractor(object): 8 | def process_file(self, fileobj): 9 | template_node = lexer.Lexer( 10 | fileobj.read(), 11 | input_encoding=self.config['encoding']).parse() 12 | for extracted in self.extract_nodes(template_node.get_children()): 13 | yield extracted 14 | 15 | def extract_nodes(self, nodes): 16 | translator_comments = [] 17 | in_translator_comments = False 18 | comment_tags = list( 19 | filter(None, re.split(r'\s+', self.config['comment-tags']))) 20 | 21 | for node in nodes: 22 | child_nodes = None 23 | if in_translator_comments and \ 24 | isinstance(node, parsetree.Text) and \ 25 | not node.content.strip(): 26 | # Ignore whitespace within translator comments 27 | continue 28 | 29 | if isinstance(node, parsetree.Comment): 30 | value = node.text.strip() 31 | if in_translator_comments: 32 | translator_comments.extend( 33 | self._split_comment(node.lineno, value)) 34 | continue 35 | for comment_tag in comment_tags: 36 | if value.startswith(comment_tag): 37 | in_translator_comments = True 38 | translator_comments.extend( 39 | self._split_comment(node.lineno, value)) 40 | continue 41 | 42 | if isinstance(node, parsetree.DefTag): 43 | code = node.function_decl.code 44 | child_nodes = node.nodes 45 | elif isinstance(node, parsetree.BlockTag): 46 | code = node.body_decl.code 47 | child_nodes = node.nodes 48 | elif isinstance(node, parsetree.CallTag): 49 | code = node.code.code 50 | child_nodes = node.nodes 51 | elif isinstance(node, parsetree.PageTag): 52 | code = node.body_decl.code 53 | elif isinstance(node, parsetree.CallNamespaceTag): 54 | code = node.expression 55 | child_nodes = node.nodes 56 | elif isinstance(node, parsetree.ControlLine): 57 | if node.isend: 58 | in_translator_comments = False 59 | continue 60 | code = node.text 61 | elif isinstance(node, parsetree.Code): 62 | in_translator_comments = False 63 | code = node.code.code 64 | elif isinstance(node, parsetree.Expression): 65 | code = node.code.code 66 | else: 67 | continue 68 | 69 | # Comments don't apply unless they immediately preceed the message 70 | if translator_comments and \ 71 | translator_comments[-1][0] < node.lineno - 1: 72 | translator_comments = [] 73 | 74 | translator_strings = [ 75 | comment[1] for comment in translator_comments] 76 | 77 | if isinstance(code, compat.text_type): 78 | code = code.encode('ascii', 'backslashreplace') 79 | 80 | used_translator_comments = False 81 | code = compat.byte_buffer(code) 82 | 83 | for message in self.process_python( 84 | code, node.lineno, translator_strings): 85 | yield message 86 | used_translator_comments = True 87 | 88 | if used_translator_comments: 89 | translator_comments = [] 90 | in_translator_comments = False 91 | 92 | if child_nodes: 93 | for extracted in self.extract_nodes(child_nodes): 94 | yield extracted 95 | 96 | @staticmethod 97 | def _split_comment(lineno, comment): 98 | """Return the multiline comment at lineno split into a list of 99 | comment line numbers and the accompanying comment line""" 100 | return [(lineno + index, line) for index, line in 101 | enumerate(comment.splitlines())] 102 | -------------------------------------------------------------------------------- /lib/mako/ext/linguaplugin.py: -------------------------------------------------------------------------------- 1 | import io 2 | from lingua.extractors import Extractor 3 | from lingua.extractors import Message 4 | from lingua.extractors import get_extractor 5 | from mako.ext.extract import MessageExtractor 6 | from mako import compat 7 | 8 | 9 | class LinguaMakoExtractor(Extractor, MessageExtractor): 10 | '''Mako templates''' 11 | extensions = ['.mako'] 12 | default_config = { 13 | 'encoding': 'utf-8', 14 | 'comment-tags': '', 15 | } 16 | 17 | def __call__(self, filename, options, fileobj=None): 18 | self.options = options 19 | self.filename = filename 20 | self.python_extractor = get_extractor('x.py') 21 | if fileobj is None: 22 | fileobj = open(filename, 'rb') 23 | return self.process_file(fileobj) 24 | 25 | def process_python(self, code, code_lineno, translator_strings): 26 | source = code.getvalue().strip() 27 | if source.endswith(compat.b(':')): 28 | source += compat.b(' pass') 29 | code = io.BytesIO(source) 30 | for msg in self.python_extractor( 31 | self.filename, self.options, code, code_lineno): 32 | if translator_strings: 33 | msg = Message(msg.msgctxt, msg.msgid, msg.msgid_plural, 34 | msg.flags, 35 | compat.u(' ').join( 36 | translator_strings + [msg.comment]), 37 | msg.tcomment, msg.location) 38 | yield msg 39 | -------------------------------------------------------------------------------- /lib/mako/ext/preprocessors.py: -------------------------------------------------------------------------------- 1 | # ext/preprocessors.py 2 | # Copyright (C) 2006-2015 the Mako authors and contributors 3 | # 4 | # This module is part of Mako and is released under 5 | # the MIT License: http://www.opensource.org/licenses/mit-license.php 6 | 7 | """preprocessing functions, used with the 'preprocessor' 8 | argument on Template, TemplateLookup""" 9 | 10 | import re 11 | 12 | def convert_comments(text): 13 | """preprocess old style comments. 14 | 15 | example: 16 | 17 | from mako.ext.preprocessors import convert_comments 18 | t = Template(..., preprocessor=convert_comments)""" 19 | return re.sub(r'(?<=\n)\s*#[^#]', "##", text) 20 | 21 | -------------------------------------------------------------------------------- /lib/mako/ext/pygmentplugin.py: -------------------------------------------------------------------------------- 1 | # ext/pygmentplugin.py 2 | # Copyright (C) 2006-2015 the Mako authors and contributors 3 | # 4 | # This module is part of Mako and is released under 5 | # the MIT License: http://www.opensource.org/licenses/mit-license.php 6 | 7 | from pygments.lexers.web import \ 8 | HtmlLexer, XmlLexer, JavascriptLexer, CssLexer 9 | from pygments.lexers.agile import PythonLexer, Python3Lexer 10 | from pygments.lexer import DelegatingLexer, RegexLexer, bygroups, \ 11 | include, using 12 | from pygments.token import \ 13 | Text, Comment, Operator, Keyword, Name, String, Other 14 | from pygments.formatters.html import HtmlFormatter 15 | from pygments import highlight 16 | from mako import compat 17 | 18 | class MakoLexer(RegexLexer): 19 | name = 'Mako' 20 | aliases = ['mako'] 21 | filenames = ['*.mao'] 22 | 23 | tokens = { 24 | 'root': [ 25 | (r'(\s*)(\%)(\s*end(?:\w+))(\n|\Z)', 26 | bygroups(Text, Comment.Preproc, Keyword, Other)), 27 | (r'(\s*)(\%(?!%))([^\n]*)(\n|\Z)', 28 | bygroups(Text, Comment.Preproc, using(PythonLexer), Other)), 29 | (r'(\s*)(##[^\n]*)(\n|\Z)', 30 | bygroups(Text, Comment.Preproc, Other)), 31 | (r'''(?s)<%doc>.*?''', Comment.Preproc), 32 | (r'(<%)([\w\.\:]+)', 33 | bygroups(Comment.Preproc, Name.Builtin), 'tag'), 34 | (r'()', 35 | bygroups(Comment.Preproc, Name.Builtin, Comment.Preproc)), 36 | (r'<%(?=([\w\.\:]+))', Comment.Preproc, 'ondeftags'), 37 | (r'(<%(?:!?))(.*?)(%>)(?s)', 38 | bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)), 39 | (r'(\$\{)(.*?)(\})', 40 | bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)), 41 | (r'''(?sx) 42 | (.+?) # anything, followed by: 43 | (?: 44 | (?<=\n)(?=%(?!%)|\#\#) | # an eval or comment line 45 | (?=\#\*) | # multiline comment 46 | (?=', Comment.Preproc, '#pop'), 66 | (r'\s+', Text), 67 | ], 68 | 'attr': [ 69 | ('".*?"', String, '#pop'), 70 | ("'.*?'", String, '#pop'), 71 | (r'[^\s>]+', String, '#pop'), 72 | ], 73 | } 74 | 75 | 76 | class MakoHtmlLexer(DelegatingLexer): 77 | name = 'HTML+Mako' 78 | aliases = ['html+mako'] 79 | 80 | def __init__(self, **options): 81 | super(MakoHtmlLexer, self).__init__(HtmlLexer, MakoLexer, 82 | **options) 83 | 84 | class MakoXmlLexer(DelegatingLexer): 85 | name = 'XML+Mako' 86 | aliases = ['xml+mako'] 87 | 88 | def __init__(self, **options): 89 | super(MakoXmlLexer, self).__init__(XmlLexer, MakoLexer, 90 | **options) 91 | 92 | class MakoJavascriptLexer(DelegatingLexer): 93 | name = 'JavaScript+Mako' 94 | aliases = ['js+mako', 'javascript+mako'] 95 | 96 | def __init__(self, **options): 97 | super(MakoJavascriptLexer, self).__init__(JavascriptLexer, 98 | MakoLexer, **options) 99 | 100 | class MakoCssLexer(DelegatingLexer): 101 | name = 'CSS+Mako' 102 | aliases = ['css+mako'] 103 | 104 | def __init__(self, **options): 105 | super(MakoCssLexer, self).__init__(CssLexer, MakoLexer, 106 | **options) 107 | 108 | 109 | pygments_html_formatter = HtmlFormatter(cssclass='syntax-highlighted', 110 | linenos=True) 111 | def syntax_highlight(filename='', language=None): 112 | mako_lexer = MakoLexer() 113 | if compat.py3k: 114 | python_lexer = Python3Lexer() 115 | else: 116 | python_lexer = PythonLexer() 117 | if filename.startswith('memory:') or language == 'mako': 118 | return lambda string: highlight(string, mako_lexer, 119 | pygments_html_formatter) 120 | return lambda string: highlight(string, python_lexer, 121 | pygments_html_formatter) 122 | 123 | -------------------------------------------------------------------------------- /lib/mako/ext/turbogears.py: -------------------------------------------------------------------------------- 1 | # ext/turbogears.py 2 | # Copyright (C) 2006-2015 the Mako authors and contributors 3 | # 4 | # This module is part of Mako and is released under 5 | # the MIT License: http://www.opensource.org/licenses/mit-license.php 6 | 7 | import inspect 8 | from mako import compat 9 | from mako.lookup import TemplateLookup 10 | from mako.template import Template 11 | 12 | class TGPlugin(object): 13 | """TurboGears compatible Template Plugin.""" 14 | 15 | def __init__(self, extra_vars_func=None, options=None, extension='mak'): 16 | self.extra_vars_func = extra_vars_func 17 | self.extension = extension 18 | if not options: 19 | options = {} 20 | 21 | # Pull the options out and initialize the lookup 22 | lookup_options = {} 23 | for k, v in options.items(): 24 | if k.startswith('mako.'): 25 | lookup_options[k[5:]] = v 26 | elif k in ['directories', 'filesystem_checks', 'module_directory']: 27 | lookup_options[k] = v 28 | self.lookup = TemplateLookup(**lookup_options) 29 | 30 | self.tmpl_options = {} 31 | # transfer lookup args to template args, based on those available 32 | # in getargspec 33 | for kw in inspect.getargspec(Template.__init__)[0]: 34 | if kw in lookup_options: 35 | self.tmpl_options[kw] = lookup_options[kw] 36 | 37 | def load_template(self, templatename, template_string=None): 38 | """Loads a template from a file or a string""" 39 | if template_string is not None: 40 | return Template(template_string, **self.tmpl_options) 41 | # Translate TG dot notation to normal / template path 42 | if '/' not in templatename: 43 | templatename = '/' + templatename.replace('.', '/') + '.' +\ 44 | self.extension 45 | 46 | # Lookup template 47 | return self.lookup.get_template(templatename) 48 | 49 | def render(self, info, format="html", fragment=False, template=None): 50 | if isinstance(template, compat.string_types): 51 | template = self.load_template(template) 52 | 53 | # Load extra vars func if provided 54 | if self.extra_vars_func: 55 | info.update(self.extra_vars_func()) 56 | 57 | return template.render(**info) 58 | 59 | -------------------------------------------------------------------------------- /lib/mako/filters.py: -------------------------------------------------------------------------------- 1 | # mako/filters.py 2 | # Copyright (C) 2006-2015 the Mako authors and contributors 3 | # 4 | # This module is part of Mako and is released under 5 | # the MIT License: http://www.opensource.org/licenses/mit-license.php 6 | 7 | 8 | import re 9 | import codecs 10 | 11 | from mako.compat import quote_plus, unquote_plus, codepoint2name, \ 12 | name2codepoint 13 | 14 | from mako import compat 15 | 16 | xml_escapes = { 17 | '&': '&', 18 | '>': '>', 19 | '<': '<', 20 | '"': '"', # also " in html-only 21 | "'": ''' # also ' in html-only 22 | } 23 | 24 | # XXX: " is valid in HTML and XML 25 | # ' is not valid HTML, but is valid XML 26 | 27 | def legacy_html_escape(s): 28 | """legacy HTML escape for non-unicode mode.""" 29 | s = s.replace("&", "&") 30 | s = s.replace(">", ">") 31 | s = s.replace("<", "<") 32 | s = s.replace('"', """) 33 | s = s.replace("'", "'") 34 | return s 35 | 36 | 37 | try: 38 | import markupsafe 39 | html_escape = markupsafe.escape 40 | except ImportError: 41 | html_escape = legacy_html_escape 42 | 43 | def xml_escape(string): 44 | return re.sub(r'([&<"\'>])', lambda m: xml_escapes[m.group()], string) 45 | 46 | def url_escape(string): 47 | # convert into a list of octets 48 | string = string.encode("utf8") 49 | return quote_plus(string) 50 | 51 | def legacy_url_escape(string): 52 | # convert into a list of octets 53 | return quote_plus(string) 54 | 55 | def url_unescape(string): 56 | text = unquote_plus(string) 57 | if not is_ascii_str(text): 58 | text = text.decode("utf8") 59 | return text 60 | 61 | def trim(string): 62 | return string.strip() 63 | 64 | 65 | class Decode(object): 66 | def __getattr__(self, key): 67 | def decode(x): 68 | if isinstance(x, compat.text_type): 69 | return x 70 | elif not isinstance(x, compat.binary_type): 71 | return decode(str(x)) 72 | else: 73 | return compat.text_type(x, encoding=key) 74 | return decode 75 | decode = Decode() 76 | 77 | 78 | _ASCII_re = re.compile(r'\A[\x00-\x7f]*\Z') 79 | 80 | def is_ascii_str(text): 81 | return isinstance(text, str) and _ASCII_re.match(text) 82 | 83 | ################################################################ 84 | 85 | class XMLEntityEscaper(object): 86 | def __init__(self, codepoint2name, name2codepoint): 87 | self.codepoint2entity = dict([(c, compat.text_type('&%s;' % n)) 88 | for c, n in codepoint2name.items()]) 89 | self.name2codepoint = name2codepoint 90 | 91 | def escape_entities(self, text): 92 | """Replace characters with their character entity references. 93 | 94 | Only characters corresponding to a named entity are replaced. 95 | """ 96 | return compat.text_type(text).translate(self.codepoint2entity) 97 | 98 | def __escape(self, m): 99 | codepoint = ord(m.group()) 100 | try: 101 | return self.codepoint2entity[codepoint] 102 | except (KeyError, IndexError): 103 | return '&#x%X;' % codepoint 104 | 105 | 106 | __escapable = re.compile(r'["&<>]|[^\x00-\x7f]') 107 | 108 | def escape(self, text): 109 | """Replace characters with their character references. 110 | 111 | Replace characters by their named entity references. 112 | Non-ASCII characters, if they do not have a named entity reference, 113 | are replaced by numerical character references. 114 | 115 | The return value is guaranteed to be ASCII. 116 | """ 117 | return self.__escapable.sub(self.__escape, compat.text_type(text) 118 | ).encode('ascii') 119 | 120 | # XXX: This regexp will not match all valid XML entity names__. 121 | # (It punts on details involving involving CombiningChars and Extenders.) 122 | # 123 | # .. __: http://www.w3.org/TR/2000/REC-xml-20001006#NT-EntityRef 124 | __characterrefs = re.compile(r'''& (?: 125 | \#(\d+) 126 | | \#x([\da-f]+) 127 | | ( (?!\d) [:\w] [-.:\w]+ ) 128 | ) ;''', 129 | re.X | re.UNICODE) 130 | 131 | def __unescape(self, m): 132 | dval, hval, name = m.groups() 133 | if dval: 134 | codepoint = int(dval) 135 | elif hval: 136 | codepoint = int(hval, 16) 137 | else: 138 | codepoint = self.name2codepoint.get(name, 0xfffd) 139 | # U+FFFD = "REPLACEMENT CHARACTER" 140 | if codepoint < 128: 141 | return chr(codepoint) 142 | return chr(codepoint) 143 | 144 | def unescape(self, text): 145 | """Unescape character references. 146 | 147 | All character references (both entity references and numerical 148 | character references) are unescaped. 149 | """ 150 | return self.__characterrefs.sub(self.__unescape, text) 151 | 152 | 153 | _html_entities_escaper = XMLEntityEscaper(codepoint2name, name2codepoint) 154 | 155 | html_entities_escape = _html_entities_escaper.escape_entities 156 | html_entities_unescape = _html_entities_escaper.unescape 157 | 158 | 159 | def htmlentityreplace_errors(ex): 160 | """An encoding error handler. 161 | 162 | This python `codecs`_ error handler replaces unencodable 163 | characters with HTML entities, or, if no HTML entity exists for 164 | the character, XML character references. 165 | 166 | >>> u'The cost was \u20ac12.'.encode('latin1', 'htmlentityreplace') 167 | 'The cost was €12.' 168 | """ 169 | if isinstance(ex, UnicodeEncodeError): 170 | # Handle encoding errors 171 | bad_text = ex.object[ex.start:ex.end] 172 | text = _html_entities_escaper.escape(bad_text) 173 | return (compat.text_type(text), ex.end) 174 | raise ex 175 | 176 | codecs.register_error('htmlentityreplace', htmlentityreplace_errors) 177 | 178 | 179 | # TODO: options to make this dynamic per-compilation will be added in a later 180 | # release 181 | DEFAULT_ESCAPES = { 182 | 'x': 'filters.xml_escape', 183 | 'h': 'filters.html_escape', 184 | 'u': 'filters.url_escape', 185 | 'trim': 'filters.trim', 186 | 'entity': 'filters.html_entities_escape', 187 | 'unicode': 'unicode', 188 | 'decode': 'decode', 189 | 'str': 'str', 190 | 'n': 'n' 191 | } 192 | 193 | if compat.py3k: 194 | DEFAULT_ESCAPES.update({ 195 | 'unicode': 'str' 196 | }) 197 | 198 | NON_UNICODE_ESCAPES = DEFAULT_ESCAPES.copy() 199 | NON_UNICODE_ESCAPES['h'] = 'filters.legacy_html_escape' 200 | NON_UNICODE_ESCAPES['u'] = 'filters.legacy_url_escape' 201 | 202 | -------------------------------------------------------------------------------- /lib/yaml/composer.py: -------------------------------------------------------------------------------- 1 | 2 | __all__ = ['Composer', 'ComposerError'] 3 | 4 | from error import MarkedYAMLError 5 | from events import * 6 | from nodes import * 7 | 8 | class ComposerError(MarkedYAMLError): 9 | pass 10 | 11 | class Composer(object): 12 | 13 | def __init__(self): 14 | self.anchors = {} 15 | 16 | def check_node(self): 17 | # Drop the STREAM-START event. 18 | if self.check_event(StreamStartEvent): 19 | self.get_event() 20 | 21 | # If there are more documents available? 22 | return not self.check_event(StreamEndEvent) 23 | 24 | def get_node(self): 25 | # Get the root node of the next document. 26 | if not self.check_event(StreamEndEvent): 27 | return self.compose_document() 28 | 29 | def get_single_node(self): 30 | # Drop the STREAM-START event. 31 | self.get_event() 32 | 33 | # Compose a document if the stream is not empty. 34 | document = None 35 | if not self.check_event(StreamEndEvent): 36 | document = self.compose_document() 37 | 38 | # Ensure that the stream contains no more documents. 39 | if not self.check_event(StreamEndEvent): 40 | event = self.get_event() 41 | raise ComposerError("expected a single document in the stream", 42 | document.start_mark, "but found another document", 43 | event.start_mark) 44 | 45 | # Drop the STREAM-END event. 46 | self.get_event() 47 | 48 | return document 49 | 50 | def compose_document(self): 51 | # Drop the DOCUMENT-START event. 52 | self.get_event() 53 | 54 | # Compose the root node. 55 | node = self.compose_node(None, None) 56 | 57 | # Drop the DOCUMENT-END event. 58 | self.get_event() 59 | 60 | self.anchors = {} 61 | return node 62 | 63 | def compose_node(self, parent, index): 64 | if self.check_event(AliasEvent): 65 | event = self.get_event() 66 | anchor = event.anchor 67 | if anchor not in self.anchors: 68 | raise ComposerError(None, None, "found undefined alias %r" 69 | % anchor.encode('utf-8'), event.start_mark) 70 | return self.anchors[anchor] 71 | event = self.peek_event() 72 | anchor = event.anchor 73 | if anchor is not None: 74 | if anchor in self.anchors: 75 | raise ComposerError("found duplicate anchor %r; first occurence" 76 | % anchor.encode('utf-8'), self.anchors[anchor].start_mark, 77 | "second occurence", event.start_mark) 78 | self.descend_resolver(parent, index) 79 | if self.check_event(ScalarEvent): 80 | node = self.compose_scalar_node(anchor) 81 | elif self.check_event(SequenceStartEvent): 82 | node = self.compose_sequence_node(anchor) 83 | elif self.check_event(MappingStartEvent): 84 | node = self.compose_mapping_node(anchor) 85 | self.ascend_resolver() 86 | return node 87 | 88 | def compose_scalar_node(self, anchor): 89 | event = self.get_event() 90 | tag = event.tag 91 | if tag is None or tag == u'!': 92 | tag = self.resolve(ScalarNode, event.value, event.implicit) 93 | node = ScalarNode(tag, event.value, 94 | event.start_mark, event.end_mark, style=event.style) 95 | if anchor is not None: 96 | self.anchors[anchor] = node 97 | return node 98 | 99 | def compose_sequence_node(self, anchor): 100 | start_event = self.get_event() 101 | tag = start_event.tag 102 | if tag is None or tag == u'!': 103 | tag = self.resolve(SequenceNode, None, start_event.implicit) 104 | node = SequenceNode(tag, [], 105 | start_event.start_mark, None, 106 | flow_style=start_event.flow_style) 107 | if anchor is not None: 108 | self.anchors[anchor] = node 109 | index = 0 110 | while not self.check_event(SequenceEndEvent): 111 | node.value.append(self.compose_node(node, index)) 112 | index += 1 113 | end_event = self.get_event() 114 | node.end_mark = end_event.end_mark 115 | return node 116 | 117 | def compose_mapping_node(self, anchor): 118 | start_event = self.get_event() 119 | tag = start_event.tag 120 | if tag is None or tag == u'!': 121 | tag = self.resolve(MappingNode, None, start_event.implicit) 122 | node = MappingNode(tag, [], 123 | start_event.start_mark, None, 124 | flow_style=start_event.flow_style) 125 | if anchor is not None: 126 | self.anchors[anchor] = node 127 | while not self.check_event(MappingEndEvent): 128 | #key_event = self.peek_event() 129 | item_key = self.compose_node(node, None) 130 | #if item_key in node.value: 131 | # raise ComposerError("while composing a mapping", start_event.start_mark, 132 | # "found duplicate key", key_event.start_mark) 133 | item_value = self.compose_node(node, item_key) 134 | #node.value[item_key] = item_value 135 | node.value.append((item_key, item_value)) 136 | end_event = self.get_event() 137 | node.end_mark = end_event.end_mark 138 | return node 139 | 140 | -------------------------------------------------------------------------------- /lib/yaml/cyaml.py: -------------------------------------------------------------------------------- 1 | 2 | __all__ = ['CBaseLoader', 'CSafeLoader', 'CLoader', 3 | 'CBaseDumper', 'CSafeDumper', 'CDumper'] 4 | 5 | from _yaml import CParser, CEmitter 6 | 7 | from constructor import * 8 | 9 | from serializer import * 10 | from representer import * 11 | 12 | from resolver import * 13 | 14 | class CBaseLoader(CParser, BaseConstructor, BaseResolver): 15 | 16 | def __init__(self, stream): 17 | CParser.__init__(self, stream) 18 | BaseConstructor.__init__(self) 19 | BaseResolver.__init__(self) 20 | 21 | class CSafeLoader(CParser, SafeConstructor, Resolver): 22 | 23 | def __init__(self, stream): 24 | CParser.__init__(self, stream) 25 | SafeConstructor.__init__(self) 26 | Resolver.__init__(self) 27 | 28 | class CLoader(CParser, Constructor, Resolver): 29 | 30 | def __init__(self, stream): 31 | CParser.__init__(self, stream) 32 | Constructor.__init__(self) 33 | Resolver.__init__(self) 34 | 35 | class CBaseDumper(CEmitter, BaseRepresenter, BaseResolver): 36 | 37 | def __init__(self, stream, 38 | default_style=None, default_flow_style=None, 39 | canonical=None, indent=None, width=None, 40 | allow_unicode=None, line_break=None, 41 | encoding=None, explicit_start=None, explicit_end=None, 42 | version=None, tags=None): 43 | CEmitter.__init__(self, stream, canonical=canonical, 44 | indent=indent, width=width, encoding=encoding, 45 | allow_unicode=allow_unicode, line_break=line_break, 46 | explicit_start=explicit_start, explicit_end=explicit_end, 47 | version=version, tags=tags) 48 | Representer.__init__(self, default_style=default_style, 49 | default_flow_style=default_flow_style) 50 | Resolver.__init__(self) 51 | 52 | class CSafeDumper(CEmitter, SafeRepresenter, Resolver): 53 | 54 | def __init__(self, stream, 55 | default_style=None, default_flow_style=None, 56 | canonical=None, indent=None, width=None, 57 | allow_unicode=None, line_break=None, 58 | encoding=None, explicit_start=None, explicit_end=None, 59 | version=None, tags=None): 60 | CEmitter.__init__(self, stream, canonical=canonical, 61 | indent=indent, width=width, encoding=encoding, 62 | allow_unicode=allow_unicode, line_break=line_break, 63 | explicit_start=explicit_start, explicit_end=explicit_end, 64 | version=version, tags=tags) 65 | SafeRepresenter.__init__(self, default_style=default_style, 66 | default_flow_style=default_flow_style) 67 | Resolver.__init__(self) 68 | 69 | class CDumper(CEmitter, Serializer, Representer, Resolver): 70 | 71 | def __init__(self, stream, 72 | default_style=None, default_flow_style=None, 73 | canonical=None, indent=None, width=None, 74 | allow_unicode=None, line_break=None, 75 | encoding=None, explicit_start=None, explicit_end=None, 76 | version=None, tags=None): 77 | CEmitter.__init__(self, stream, canonical=canonical, 78 | indent=indent, width=width, encoding=encoding, 79 | allow_unicode=allow_unicode, line_break=line_break, 80 | explicit_start=explicit_start, explicit_end=explicit_end, 81 | version=version, tags=tags) 82 | Representer.__init__(self, default_style=default_style, 83 | default_flow_style=default_flow_style) 84 | Resolver.__init__(self) 85 | 86 | -------------------------------------------------------------------------------- /lib/yaml/dumper.py: -------------------------------------------------------------------------------- 1 | 2 | __all__ = ['BaseDumper', 'SafeDumper', 'Dumper'] 3 | 4 | from emitter import * 5 | from serializer import * 6 | from representer import * 7 | from resolver import * 8 | 9 | class BaseDumper(Emitter, Serializer, BaseRepresenter, BaseResolver): 10 | 11 | def __init__(self, stream, 12 | default_style=None, default_flow_style=None, 13 | canonical=None, indent=None, width=None, 14 | allow_unicode=None, line_break=None, 15 | encoding=None, explicit_start=None, explicit_end=None, 16 | version=None, tags=None): 17 | Emitter.__init__(self, stream, canonical=canonical, 18 | indent=indent, width=width, 19 | allow_unicode=allow_unicode, line_break=line_break) 20 | Serializer.__init__(self, encoding=encoding, 21 | explicit_start=explicit_start, explicit_end=explicit_end, 22 | version=version, tags=tags) 23 | Representer.__init__(self, default_style=default_style, 24 | default_flow_style=default_flow_style) 25 | Resolver.__init__(self) 26 | 27 | class SafeDumper(Emitter, Serializer, SafeRepresenter, Resolver): 28 | 29 | def __init__(self, stream, 30 | default_style=None, default_flow_style=None, 31 | canonical=None, indent=None, width=None, 32 | allow_unicode=None, line_break=None, 33 | encoding=None, explicit_start=None, explicit_end=None, 34 | version=None, tags=None): 35 | Emitter.__init__(self, stream, canonical=canonical, 36 | indent=indent, width=width, 37 | allow_unicode=allow_unicode, line_break=line_break) 38 | Serializer.__init__(self, encoding=encoding, 39 | explicit_start=explicit_start, explicit_end=explicit_end, 40 | version=version, tags=tags) 41 | SafeRepresenter.__init__(self, default_style=default_style, 42 | default_flow_style=default_flow_style) 43 | Resolver.__init__(self) 44 | 45 | class Dumper(Emitter, Serializer, Representer, Resolver): 46 | 47 | def __init__(self, stream, 48 | default_style=None, default_flow_style=None, 49 | canonical=None, indent=None, width=None, 50 | allow_unicode=None, line_break=None, 51 | encoding=None, explicit_start=None, explicit_end=None, 52 | version=None, tags=None): 53 | Emitter.__init__(self, stream, canonical=canonical, 54 | indent=indent, width=width, 55 | allow_unicode=allow_unicode, line_break=line_break) 56 | Serializer.__init__(self, encoding=encoding, 57 | explicit_start=explicit_start, explicit_end=explicit_end, 58 | version=version, tags=tags) 59 | Representer.__init__(self, default_style=default_style, 60 | default_flow_style=default_flow_style) 61 | Resolver.__init__(self) 62 | 63 | -------------------------------------------------------------------------------- /lib/yaml/error.py: -------------------------------------------------------------------------------- 1 | 2 | __all__ = ['Mark', 'YAMLError', 'MarkedYAMLError'] 3 | 4 | class Mark(object): 5 | 6 | def __init__(self, name, index, line, column, buffer, pointer): 7 | self.name = name 8 | self.index = index 9 | self.line = line 10 | self.column = column 11 | self.buffer = buffer 12 | self.pointer = pointer 13 | 14 | def get_snippet(self, indent=4, max_length=75): 15 | if self.buffer is None: 16 | return None 17 | head = '' 18 | start = self.pointer 19 | while start > 0 and self.buffer[start-1] not in u'\0\r\n\x85\u2028\u2029': 20 | start -= 1 21 | if self.pointer-start > max_length/2-1: 22 | head = ' ... ' 23 | start += 5 24 | break 25 | tail = '' 26 | end = self.pointer 27 | while end < len(self.buffer) and self.buffer[end] not in u'\0\r\n\x85\u2028\u2029': 28 | end += 1 29 | if end-self.pointer > max_length/2-1: 30 | tail = ' ... ' 31 | end -= 5 32 | break 33 | snippet = self.buffer[start:end].encode('utf-8') 34 | return ' '*indent + head + snippet + tail + '\n' \ 35 | + ' '*(indent+self.pointer-start+len(head)) + '^' 36 | 37 | def __str__(self): 38 | snippet = self.get_snippet() 39 | where = " in \"%s\", line %d, column %d" \ 40 | % (self.name, self.line+1, self.column+1) 41 | if snippet is not None: 42 | where += ":\n"+snippet 43 | return where 44 | 45 | class YAMLError(Exception): 46 | pass 47 | 48 | class MarkedYAMLError(YAMLError): 49 | 50 | def __init__(self, context=None, context_mark=None, 51 | problem=None, problem_mark=None, note=None): 52 | self.context = context 53 | self.context_mark = context_mark 54 | self.problem = problem 55 | self.problem_mark = problem_mark 56 | self.note = note 57 | 58 | def __str__(self): 59 | lines = [] 60 | if self.context is not None: 61 | lines.append(self.context) 62 | if self.context_mark is not None \ 63 | and (self.problem is None or self.problem_mark is None 64 | or self.context_mark.name != self.problem_mark.name 65 | or self.context_mark.line != self.problem_mark.line 66 | or self.context_mark.column != self.problem_mark.column): 67 | lines.append(str(self.context_mark)) 68 | if self.problem is not None: 69 | lines.append(self.problem) 70 | if self.problem_mark is not None: 71 | lines.append(str(self.problem_mark)) 72 | if self.note is not None: 73 | lines.append(self.note) 74 | return '\n'.join(lines) 75 | 76 | -------------------------------------------------------------------------------- /lib/yaml/events.py: -------------------------------------------------------------------------------- 1 | 2 | # Abstract classes. 3 | 4 | class Event(object): 5 | def __init__(self, start_mark=None, end_mark=None): 6 | self.start_mark = start_mark 7 | self.end_mark = end_mark 8 | def __repr__(self): 9 | attributes = [key for key in ['anchor', 'tag', 'implicit', 'value'] 10 | if hasattr(self, key)] 11 | arguments = ', '.join(['%s=%r' % (key, getattr(self, key)) 12 | for key in attributes]) 13 | return '%s(%s)' % (self.__class__.__name__, arguments) 14 | 15 | class NodeEvent(Event): 16 | def __init__(self, anchor, start_mark=None, end_mark=None): 17 | self.anchor = anchor 18 | self.start_mark = start_mark 19 | self.end_mark = end_mark 20 | 21 | class CollectionStartEvent(NodeEvent): 22 | def __init__(self, anchor, tag, implicit, start_mark=None, end_mark=None, 23 | flow_style=None): 24 | self.anchor = anchor 25 | self.tag = tag 26 | self.implicit = implicit 27 | self.start_mark = start_mark 28 | self.end_mark = end_mark 29 | self.flow_style = flow_style 30 | 31 | class CollectionEndEvent(Event): 32 | pass 33 | 34 | # Implementations. 35 | 36 | class StreamStartEvent(Event): 37 | def __init__(self, start_mark=None, end_mark=None, encoding=None): 38 | self.start_mark = start_mark 39 | self.end_mark = end_mark 40 | self.encoding = encoding 41 | 42 | class StreamEndEvent(Event): 43 | pass 44 | 45 | class DocumentStartEvent(Event): 46 | def __init__(self, start_mark=None, end_mark=None, 47 | explicit=None, version=None, tags=None): 48 | self.start_mark = start_mark 49 | self.end_mark = end_mark 50 | self.explicit = explicit 51 | self.version = version 52 | self.tags = tags 53 | 54 | class DocumentEndEvent(Event): 55 | def __init__(self, start_mark=None, end_mark=None, 56 | explicit=None): 57 | self.start_mark = start_mark 58 | self.end_mark = end_mark 59 | self.explicit = explicit 60 | 61 | class AliasEvent(NodeEvent): 62 | pass 63 | 64 | class ScalarEvent(NodeEvent): 65 | def __init__(self, anchor, tag, implicit, value, 66 | start_mark=None, end_mark=None, style=None): 67 | self.anchor = anchor 68 | self.tag = tag 69 | self.implicit = implicit 70 | self.value = value 71 | self.start_mark = start_mark 72 | self.end_mark = end_mark 73 | self.style = style 74 | 75 | class SequenceStartEvent(CollectionStartEvent): 76 | pass 77 | 78 | class SequenceEndEvent(CollectionEndEvent): 79 | pass 80 | 81 | class MappingStartEvent(CollectionStartEvent): 82 | pass 83 | 84 | class MappingEndEvent(CollectionEndEvent): 85 | pass 86 | 87 | -------------------------------------------------------------------------------- /lib/yaml/loader.py: -------------------------------------------------------------------------------- 1 | 2 | __all__ = ['BaseLoader', 'SafeLoader', 'Loader'] 3 | 4 | from reader import * 5 | from scanner import * 6 | from parser import * 7 | from composer import * 8 | from constructor import * 9 | from resolver import * 10 | 11 | class BaseLoader(Reader, Scanner, Parser, Composer, BaseConstructor, BaseResolver): 12 | 13 | def __init__(self, stream): 14 | Reader.__init__(self, stream) 15 | Scanner.__init__(self) 16 | Parser.__init__(self) 17 | Composer.__init__(self) 18 | BaseConstructor.__init__(self) 19 | BaseResolver.__init__(self) 20 | 21 | class SafeLoader(Reader, Scanner, Parser, Composer, SafeConstructor, Resolver): 22 | 23 | def __init__(self, stream): 24 | Reader.__init__(self, stream) 25 | Scanner.__init__(self) 26 | Parser.__init__(self) 27 | Composer.__init__(self) 28 | SafeConstructor.__init__(self) 29 | Resolver.__init__(self) 30 | 31 | class Loader(Reader, Scanner, Parser, Composer, Constructor, Resolver): 32 | 33 | def __init__(self, stream): 34 | Reader.__init__(self, stream) 35 | Scanner.__init__(self) 36 | Parser.__init__(self) 37 | Composer.__init__(self) 38 | Constructor.__init__(self) 39 | Resolver.__init__(self) 40 | 41 | -------------------------------------------------------------------------------- /lib/yaml/nodes.py: -------------------------------------------------------------------------------- 1 | 2 | class Node(object): 3 | def __init__(self, tag, value, start_mark, end_mark): 4 | self.tag = tag 5 | self.value = value 6 | self.start_mark = start_mark 7 | self.end_mark = end_mark 8 | def __repr__(self): 9 | value = self.value 10 | #if isinstance(value, list): 11 | # if len(value) == 0: 12 | # value = '' 13 | # elif len(value) == 1: 14 | # value = '<1 item>' 15 | # else: 16 | # value = '<%d items>' % len(value) 17 | #else: 18 | # if len(value) > 75: 19 | # value = repr(value[:70]+u' ... ') 20 | # else: 21 | # value = repr(value) 22 | value = repr(value) 23 | return '%s(tag=%r, value=%s)' % (self.__class__.__name__, self.tag, value) 24 | 25 | class ScalarNode(Node): 26 | id = 'scalar' 27 | def __init__(self, tag, value, 28 | start_mark=None, end_mark=None, style=None): 29 | self.tag = tag 30 | self.value = value 31 | self.start_mark = start_mark 32 | self.end_mark = end_mark 33 | self.style = style 34 | 35 | class CollectionNode(Node): 36 | def __init__(self, tag, value, 37 | start_mark=None, end_mark=None, flow_style=None): 38 | self.tag = tag 39 | self.value = value 40 | self.start_mark = start_mark 41 | self.end_mark = end_mark 42 | self.flow_style = flow_style 43 | 44 | class SequenceNode(CollectionNode): 45 | id = 'sequence' 46 | 47 | class MappingNode(CollectionNode): 48 | id = 'mapping' 49 | 50 | -------------------------------------------------------------------------------- /lib/yaml/serializer.py: -------------------------------------------------------------------------------- 1 | 2 | __all__ = ['Serializer', 'SerializerError'] 3 | 4 | from error import YAMLError 5 | from events import * 6 | from nodes import * 7 | 8 | class SerializerError(YAMLError): 9 | pass 10 | 11 | class Serializer(object): 12 | 13 | ANCHOR_TEMPLATE = u'id%03d' 14 | 15 | def __init__(self, encoding=None, 16 | explicit_start=None, explicit_end=None, version=None, tags=None): 17 | self.use_encoding = encoding 18 | self.use_explicit_start = explicit_start 19 | self.use_explicit_end = explicit_end 20 | self.use_version = version 21 | self.use_tags = tags 22 | self.serialized_nodes = {} 23 | self.anchors = {} 24 | self.last_anchor_id = 0 25 | self.closed = None 26 | 27 | def open(self): 28 | if self.closed is None: 29 | self.emit(StreamStartEvent(encoding=self.use_encoding)) 30 | self.closed = False 31 | elif self.closed: 32 | raise SerializerError("serializer is closed") 33 | else: 34 | raise SerializerError("serializer is already opened") 35 | 36 | def close(self): 37 | if self.closed is None: 38 | raise SerializerError("serializer is not opened") 39 | elif not self.closed: 40 | self.emit(StreamEndEvent()) 41 | self.closed = True 42 | 43 | #def __del__(self): 44 | # self.close() 45 | 46 | def serialize(self, node): 47 | if self.closed is None: 48 | raise SerializerError("serializer is not opened") 49 | elif self.closed: 50 | raise SerializerError("serializer is closed") 51 | self.emit(DocumentStartEvent(explicit=self.use_explicit_start, 52 | version=self.use_version, tags=self.use_tags)) 53 | self.anchor_node(node) 54 | self.serialize_node(node, None, None) 55 | self.emit(DocumentEndEvent(explicit=self.use_explicit_end)) 56 | self.serialized_nodes = {} 57 | self.anchors = {} 58 | self.last_anchor_id = 0 59 | 60 | def anchor_node(self, node): 61 | if node in self.anchors: 62 | if self.anchors[node] is None: 63 | self.anchors[node] = self.generate_anchor(node) 64 | else: 65 | self.anchors[node] = None 66 | if isinstance(node, SequenceNode): 67 | for item in node.value: 68 | self.anchor_node(item) 69 | elif isinstance(node, MappingNode): 70 | for key, value in node.value: 71 | self.anchor_node(key) 72 | self.anchor_node(value) 73 | 74 | def generate_anchor(self, node): 75 | self.last_anchor_id += 1 76 | return self.ANCHOR_TEMPLATE % self.last_anchor_id 77 | 78 | def serialize_node(self, node, parent, index): 79 | alias = self.anchors[node] 80 | if node in self.serialized_nodes: 81 | self.emit(AliasEvent(alias)) 82 | else: 83 | self.serialized_nodes[node] = True 84 | self.descend_resolver(parent, index) 85 | if isinstance(node, ScalarNode): 86 | detected_tag = self.resolve(ScalarNode, node.value, (True, False)) 87 | default_tag = self.resolve(ScalarNode, node.value, (False, True)) 88 | implicit = (node.tag == detected_tag), (node.tag == default_tag) 89 | self.emit(ScalarEvent(alias, node.tag, implicit, node.value, 90 | style=node.style)) 91 | elif isinstance(node, SequenceNode): 92 | implicit = (node.tag 93 | == self.resolve(SequenceNode, node.value, True)) 94 | self.emit(SequenceStartEvent(alias, node.tag, implicit, 95 | flow_style=node.flow_style)) 96 | index = 0 97 | for item in node.value: 98 | self.serialize_node(item, node, index) 99 | index += 1 100 | self.emit(SequenceEndEvent()) 101 | elif isinstance(node, MappingNode): 102 | implicit = (node.tag 103 | == self.resolve(MappingNode, node.value, True)) 104 | self.emit(MappingStartEvent(alias, node.tag, implicit, 105 | flow_style=node.flow_style)) 106 | for key, value in node.value: 107 | self.serialize_node(key, node, None) 108 | self.serialize_node(value, node, key) 109 | self.emit(MappingEndEvent()) 110 | self.ascend_resolver() 111 | 112 | -------------------------------------------------------------------------------- /lib/yaml/tokens.py: -------------------------------------------------------------------------------- 1 | 2 | class Token(object): 3 | def __init__(self, start_mark, end_mark): 4 | self.start_mark = start_mark 5 | self.end_mark = end_mark 6 | def __repr__(self): 7 | attributes = [key for key in self.__dict__ 8 | if not key.endswith('_mark')] 9 | attributes.sort() 10 | arguments = ', '.join(['%s=%r' % (key, getattr(self, key)) 11 | for key in attributes]) 12 | return '%s(%s)' % (self.__class__.__name__, arguments) 13 | 14 | #class BOMToken(Token): 15 | # id = '' 16 | 17 | class DirectiveToken(Token): 18 | id = '' 19 | def __init__(self, name, value, start_mark, end_mark): 20 | self.name = name 21 | self.value = value 22 | self.start_mark = start_mark 23 | self.end_mark = end_mark 24 | 25 | class DocumentStartToken(Token): 26 | id = '' 27 | 28 | class DocumentEndToken(Token): 29 | id = '' 30 | 31 | class StreamStartToken(Token): 32 | id = '' 33 | def __init__(self, start_mark=None, end_mark=None, 34 | encoding=None): 35 | self.start_mark = start_mark 36 | self.end_mark = end_mark 37 | self.encoding = encoding 38 | 39 | class StreamEndToken(Token): 40 | id = '' 41 | 42 | class BlockSequenceStartToken(Token): 43 | id = '' 44 | 45 | class BlockMappingStartToken(Token): 46 | id = '' 47 | 48 | class BlockEndToken(Token): 49 | id = '' 50 | 51 | class FlowSequenceStartToken(Token): 52 | id = '[' 53 | 54 | class FlowMappingStartToken(Token): 55 | id = '{' 56 | 57 | class FlowSequenceEndToken(Token): 58 | id = ']' 59 | 60 | class FlowMappingEndToken(Token): 61 | id = '}' 62 | 63 | class KeyToken(Token): 64 | id = '?' 65 | 66 | class ValueToken(Token): 67 | id = ':' 68 | 69 | class BlockEntryToken(Token): 70 | id = '-' 71 | 72 | class FlowEntryToken(Token): 73 | id = ',' 74 | 75 | class AliasToken(Token): 76 | id = '' 77 | def __init__(self, value, start_mark, end_mark): 78 | self.value = value 79 | self.start_mark = start_mark 80 | self.end_mark = end_mark 81 | 82 | class AnchorToken(Token): 83 | id = '' 84 | def __init__(self, value, start_mark, end_mark): 85 | self.value = value 86 | self.start_mark = start_mark 87 | self.end_mark = end_mark 88 | 89 | class TagToken(Token): 90 | id = '' 91 | def __init__(self, value, start_mark, end_mark): 92 | self.value = value 93 | self.start_mark = start_mark 94 | self.end_mark = end_mark 95 | 96 | class ScalarToken(Token): 97 | id = '' 98 | def __init__(self, value, plain, start_mark, end_mark, style=None): 99 | self.value = value 100 | self.plain = plain 101 | self.start_mark = start_mark 102 | self.end_mark = end_mark 103 | self.style = style 104 | 105 | -------------------------------------------------------------------------------- /lib/yaml3/composer.py: -------------------------------------------------------------------------------- 1 | 2 | __all__ = ['Composer', 'ComposerError'] 3 | 4 | from .error import MarkedYAMLError 5 | from .events import * 6 | from .nodes import * 7 | 8 | class ComposerError(MarkedYAMLError): 9 | pass 10 | 11 | class Composer: 12 | 13 | def __init__(self): 14 | self.anchors = {} 15 | 16 | def check_node(self): 17 | # Drop the STREAM-START event. 18 | if self.check_event(StreamStartEvent): 19 | self.get_event() 20 | 21 | # If there are more documents available? 22 | return not self.check_event(StreamEndEvent) 23 | 24 | def get_node(self): 25 | # Get the root node of the next document. 26 | if not self.check_event(StreamEndEvent): 27 | return self.compose_document() 28 | 29 | def get_single_node(self): 30 | # Drop the STREAM-START event. 31 | self.get_event() 32 | 33 | # Compose a document if the stream is not empty. 34 | document = None 35 | if not self.check_event(StreamEndEvent): 36 | document = self.compose_document() 37 | 38 | # Ensure that the stream contains no more documents. 39 | if not self.check_event(StreamEndEvent): 40 | event = self.get_event() 41 | raise ComposerError("expected a single document in the stream", 42 | document.start_mark, "but found another document", 43 | event.start_mark) 44 | 45 | # Drop the STREAM-END event. 46 | self.get_event() 47 | 48 | return document 49 | 50 | def compose_document(self): 51 | # Drop the DOCUMENT-START event. 52 | self.get_event() 53 | 54 | # Compose the root node. 55 | node = self.compose_node(None, None) 56 | 57 | # Drop the DOCUMENT-END event. 58 | self.get_event() 59 | 60 | self.anchors = {} 61 | return node 62 | 63 | def compose_node(self, parent, index): 64 | if self.check_event(AliasEvent): 65 | event = self.get_event() 66 | anchor = event.anchor 67 | if anchor not in self.anchors: 68 | raise ComposerError(None, None, "found undefined alias %r" 69 | % anchor, event.start_mark) 70 | return self.anchors[anchor] 71 | event = self.peek_event() 72 | anchor = event.anchor 73 | if anchor is not None: 74 | if anchor in self.anchors: 75 | raise ComposerError("found duplicate anchor %r; first occurence" 76 | % anchor, self.anchors[anchor].start_mark, 77 | "second occurence", event.start_mark) 78 | self.descend_resolver(parent, index) 79 | if self.check_event(ScalarEvent): 80 | node = self.compose_scalar_node(anchor) 81 | elif self.check_event(SequenceStartEvent): 82 | node = self.compose_sequence_node(anchor) 83 | elif self.check_event(MappingStartEvent): 84 | node = self.compose_mapping_node(anchor) 85 | self.ascend_resolver() 86 | return node 87 | 88 | def compose_scalar_node(self, anchor): 89 | event = self.get_event() 90 | tag = event.tag 91 | if tag is None or tag == '!': 92 | tag = self.resolve(ScalarNode, event.value, event.implicit) 93 | node = ScalarNode(tag, event.value, 94 | event.start_mark, event.end_mark, style=event.style) 95 | if anchor is not None: 96 | self.anchors[anchor] = node 97 | return node 98 | 99 | def compose_sequence_node(self, anchor): 100 | start_event = self.get_event() 101 | tag = start_event.tag 102 | if tag is None or tag == '!': 103 | tag = self.resolve(SequenceNode, None, start_event.implicit) 104 | node = SequenceNode(tag, [], 105 | start_event.start_mark, None, 106 | flow_style=start_event.flow_style) 107 | if anchor is not None: 108 | self.anchors[anchor] = node 109 | index = 0 110 | while not self.check_event(SequenceEndEvent): 111 | node.value.append(self.compose_node(node, index)) 112 | index += 1 113 | end_event = self.get_event() 114 | node.end_mark = end_event.end_mark 115 | return node 116 | 117 | def compose_mapping_node(self, anchor): 118 | start_event = self.get_event() 119 | tag = start_event.tag 120 | if tag is None or tag == '!': 121 | tag = self.resolve(MappingNode, None, start_event.implicit) 122 | node = MappingNode(tag, [], 123 | start_event.start_mark, None, 124 | flow_style=start_event.flow_style) 125 | if anchor is not None: 126 | self.anchors[anchor] = node 127 | while not self.check_event(MappingEndEvent): 128 | #key_event = self.peek_event() 129 | item_key = self.compose_node(node, None) 130 | #if item_key in node.value: 131 | # raise ComposerError("while composing a mapping", start_event.start_mark, 132 | # "found duplicate key", key_event.start_mark) 133 | item_value = self.compose_node(node, item_key) 134 | #node.value[item_key] = item_value 135 | node.value.append((item_key, item_value)) 136 | end_event = self.get_event() 137 | node.end_mark = end_event.end_mark 138 | return node 139 | 140 | -------------------------------------------------------------------------------- /lib/yaml3/cyaml.py: -------------------------------------------------------------------------------- 1 | 2 | __all__ = ['CBaseLoader', 'CSafeLoader', 'CLoader', 3 | 'CBaseDumper', 'CSafeDumper', 'CDumper'] 4 | 5 | from _yaml import CParser, CEmitter 6 | 7 | from .constructor import * 8 | 9 | from .serializer import * 10 | from .representer import * 11 | 12 | from .resolver import * 13 | 14 | class CBaseLoader(CParser, BaseConstructor, BaseResolver): 15 | 16 | def __init__(self, stream): 17 | CParser.__init__(self, stream) 18 | BaseConstructor.__init__(self) 19 | BaseResolver.__init__(self) 20 | 21 | class CSafeLoader(CParser, SafeConstructor, Resolver): 22 | 23 | def __init__(self, stream): 24 | CParser.__init__(self, stream) 25 | SafeConstructor.__init__(self) 26 | Resolver.__init__(self) 27 | 28 | class CLoader(CParser, Constructor, Resolver): 29 | 30 | def __init__(self, stream): 31 | CParser.__init__(self, stream) 32 | Constructor.__init__(self) 33 | Resolver.__init__(self) 34 | 35 | class CBaseDumper(CEmitter, BaseRepresenter, BaseResolver): 36 | 37 | def __init__(self, stream, 38 | default_style=None, default_flow_style=None, 39 | canonical=None, indent=None, width=None, 40 | allow_unicode=None, line_break=None, 41 | encoding=None, explicit_start=None, explicit_end=None, 42 | version=None, tags=None): 43 | CEmitter.__init__(self, stream, canonical=canonical, 44 | indent=indent, width=width, encoding=encoding, 45 | allow_unicode=allow_unicode, line_break=line_break, 46 | explicit_start=explicit_start, explicit_end=explicit_end, 47 | version=version, tags=tags) 48 | Representer.__init__(self, default_style=default_style, 49 | default_flow_style=default_flow_style) 50 | Resolver.__init__(self) 51 | 52 | class CSafeDumper(CEmitter, SafeRepresenter, Resolver): 53 | 54 | def __init__(self, stream, 55 | default_style=None, default_flow_style=None, 56 | canonical=None, indent=None, width=None, 57 | allow_unicode=None, line_break=None, 58 | encoding=None, explicit_start=None, explicit_end=None, 59 | version=None, tags=None): 60 | CEmitter.__init__(self, stream, canonical=canonical, 61 | indent=indent, width=width, encoding=encoding, 62 | allow_unicode=allow_unicode, line_break=line_break, 63 | explicit_start=explicit_start, explicit_end=explicit_end, 64 | version=version, tags=tags) 65 | SafeRepresenter.__init__(self, default_style=default_style, 66 | default_flow_style=default_flow_style) 67 | Resolver.__init__(self) 68 | 69 | class CDumper(CEmitter, Serializer, Representer, Resolver): 70 | 71 | def __init__(self, stream, 72 | default_style=None, default_flow_style=None, 73 | canonical=None, indent=None, width=None, 74 | allow_unicode=None, line_break=None, 75 | encoding=None, explicit_start=None, explicit_end=None, 76 | version=None, tags=None): 77 | CEmitter.__init__(self, stream, canonical=canonical, 78 | indent=indent, width=width, encoding=encoding, 79 | allow_unicode=allow_unicode, line_break=line_break, 80 | explicit_start=explicit_start, explicit_end=explicit_end, 81 | version=version, tags=tags) 82 | Representer.__init__(self, default_style=default_style, 83 | default_flow_style=default_flow_style) 84 | Resolver.__init__(self) 85 | 86 | -------------------------------------------------------------------------------- /lib/yaml3/dumper.py: -------------------------------------------------------------------------------- 1 | 2 | __all__ = ['BaseDumper', 'SafeDumper', 'Dumper'] 3 | 4 | from .emitter import * 5 | from .serializer import * 6 | from .representer import * 7 | from .resolver import * 8 | 9 | class BaseDumper(Emitter, Serializer, BaseRepresenter, BaseResolver): 10 | 11 | def __init__(self, stream, 12 | default_style=None, default_flow_style=None, 13 | canonical=None, indent=None, width=None, 14 | allow_unicode=None, line_break=None, 15 | encoding=None, explicit_start=None, explicit_end=None, 16 | version=None, tags=None): 17 | Emitter.__init__(self, stream, canonical=canonical, 18 | indent=indent, width=width, 19 | allow_unicode=allow_unicode, line_break=line_break) 20 | Serializer.__init__(self, encoding=encoding, 21 | explicit_start=explicit_start, explicit_end=explicit_end, 22 | version=version, tags=tags) 23 | Representer.__init__(self, default_style=default_style, 24 | default_flow_style=default_flow_style) 25 | Resolver.__init__(self) 26 | 27 | class SafeDumper(Emitter, Serializer, SafeRepresenter, Resolver): 28 | 29 | def __init__(self, stream, 30 | default_style=None, default_flow_style=None, 31 | canonical=None, indent=None, width=None, 32 | allow_unicode=None, line_break=None, 33 | encoding=None, explicit_start=None, explicit_end=None, 34 | version=None, tags=None): 35 | Emitter.__init__(self, stream, canonical=canonical, 36 | indent=indent, width=width, 37 | allow_unicode=allow_unicode, line_break=line_break) 38 | Serializer.__init__(self, encoding=encoding, 39 | explicit_start=explicit_start, explicit_end=explicit_end, 40 | version=version, tags=tags) 41 | SafeRepresenter.__init__(self, default_style=default_style, 42 | default_flow_style=default_flow_style) 43 | Resolver.__init__(self) 44 | 45 | class Dumper(Emitter, Serializer, Representer, Resolver): 46 | 47 | def __init__(self, stream, 48 | default_style=None, default_flow_style=None, 49 | canonical=None, indent=None, width=None, 50 | allow_unicode=None, line_break=None, 51 | encoding=None, explicit_start=None, explicit_end=None, 52 | version=None, tags=None): 53 | Emitter.__init__(self, stream, canonical=canonical, 54 | indent=indent, width=width, 55 | allow_unicode=allow_unicode, line_break=line_break) 56 | Serializer.__init__(self, encoding=encoding, 57 | explicit_start=explicit_start, explicit_end=explicit_end, 58 | version=version, tags=tags) 59 | Representer.__init__(self, default_style=default_style, 60 | default_flow_style=default_flow_style) 61 | Resolver.__init__(self) 62 | 63 | -------------------------------------------------------------------------------- /lib/yaml3/error.py: -------------------------------------------------------------------------------- 1 | 2 | __all__ = ['Mark', 'YAMLError', 'MarkedYAMLError'] 3 | 4 | class Mark: 5 | 6 | def __init__(self, name, index, line, column, buffer, pointer): 7 | self.name = name 8 | self.index = index 9 | self.line = line 10 | self.column = column 11 | self.buffer = buffer 12 | self.pointer = pointer 13 | 14 | def get_snippet(self, indent=4, max_length=75): 15 | if self.buffer is None: 16 | return None 17 | head = '' 18 | start = self.pointer 19 | while start > 0 and self.buffer[start-1] not in '\0\r\n\x85\u2028\u2029': 20 | start -= 1 21 | if self.pointer-start > max_length/2-1: 22 | head = ' ... ' 23 | start += 5 24 | break 25 | tail = '' 26 | end = self.pointer 27 | while end < len(self.buffer) and self.buffer[end] not in '\0\r\n\x85\u2028\u2029': 28 | end += 1 29 | if end-self.pointer > max_length/2-1: 30 | tail = ' ... ' 31 | end -= 5 32 | break 33 | snippet = self.buffer[start:end] 34 | return ' '*indent + head + snippet + tail + '\n' \ 35 | + ' '*(indent+self.pointer-start+len(head)) + '^' 36 | 37 | def __str__(self): 38 | snippet = self.get_snippet() 39 | where = " in \"%s\", line %d, column %d" \ 40 | % (self.name, self.line+1, self.column+1) 41 | if snippet is not None: 42 | where += ":\n"+snippet 43 | return where 44 | 45 | class YAMLError(Exception): 46 | pass 47 | 48 | class MarkedYAMLError(YAMLError): 49 | 50 | def __init__(self, context=None, context_mark=None, 51 | problem=None, problem_mark=None, note=None): 52 | self.context = context 53 | self.context_mark = context_mark 54 | self.problem = problem 55 | self.problem_mark = problem_mark 56 | self.note = note 57 | 58 | def __str__(self): 59 | lines = [] 60 | if self.context is not None: 61 | lines.append(self.context) 62 | if self.context_mark is not None \ 63 | and (self.problem is None or self.problem_mark is None 64 | or self.context_mark.name != self.problem_mark.name 65 | or self.context_mark.line != self.problem_mark.line 66 | or self.context_mark.column != self.problem_mark.column): 67 | lines.append(str(self.context_mark)) 68 | if self.problem is not None: 69 | lines.append(self.problem) 70 | if self.problem_mark is not None: 71 | lines.append(str(self.problem_mark)) 72 | if self.note is not None: 73 | lines.append(self.note) 74 | return '\n'.join(lines) 75 | 76 | -------------------------------------------------------------------------------- /lib/yaml3/events.py: -------------------------------------------------------------------------------- 1 | 2 | # Abstract classes. 3 | 4 | class Event(object): 5 | def __init__(self, start_mark=None, end_mark=None): 6 | self.start_mark = start_mark 7 | self.end_mark = end_mark 8 | def __repr__(self): 9 | attributes = [key for key in ['anchor', 'tag', 'implicit', 'value'] 10 | if hasattr(self, key)] 11 | arguments = ', '.join(['%s=%r' % (key, getattr(self, key)) 12 | for key in attributes]) 13 | return '%s(%s)' % (self.__class__.__name__, arguments) 14 | 15 | class NodeEvent(Event): 16 | def __init__(self, anchor, start_mark=None, end_mark=None): 17 | self.anchor = anchor 18 | self.start_mark = start_mark 19 | self.end_mark = end_mark 20 | 21 | class CollectionStartEvent(NodeEvent): 22 | def __init__(self, anchor, tag, implicit, start_mark=None, end_mark=None, 23 | flow_style=None): 24 | self.anchor = anchor 25 | self.tag = tag 26 | self.implicit = implicit 27 | self.start_mark = start_mark 28 | self.end_mark = end_mark 29 | self.flow_style = flow_style 30 | 31 | class CollectionEndEvent(Event): 32 | pass 33 | 34 | # Implementations. 35 | 36 | class StreamStartEvent(Event): 37 | def __init__(self, start_mark=None, end_mark=None, encoding=None): 38 | self.start_mark = start_mark 39 | self.end_mark = end_mark 40 | self.encoding = encoding 41 | 42 | class StreamEndEvent(Event): 43 | pass 44 | 45 | class DocumentStartEvent(Event): 46 | def __init__(self, start_mark=None, end_mark=None, 47 | explicit=None, version=None, tags=None): 48 | self.start_mark = start_mark 49 | self.end_mark = end_mark 50 | self.explicit = explicit 51 | self.version = version 52 | self.tags = tags 53 | 54 | class DocumentEndEvent(Event): 55 | def __init__(self, start_mark=None, end_mark=None, 56 | explicit=None): 57 | self.start_mark = start_mark 58 | self.end_mark = end_mark 59 | self.explicit = explicit 60 | 61 | class AliasEvent(NodeEvent): 62 | pass 63 | 64 | class ScalarEvent(NodeEvent): 65 | def __init__(self, anchor, tag, implicit, value, 66 | start_mark=None, end_mark=None, style=None): 67 | self.anchor = anchor 68 | self.tag = tag 69 | self.implicit = implicit 70 | self.value = value 71 | self.start_mark = start_mark 72 | self.end_mark = end_mark 73 | self.style = style 74 | 75 | class SequenceStartEvent(CollectionStartEvent): 76 | pass 77 | 78 | class SequenceEndEvent(CollectionEndEvent): 79 | pass 80 | 81 | class MappingStartEvent(CollectionStartEvent): 82 | pass 83 | 84 | class MappingEndEvent(CollectionEndEvent): 85 | pass 86 | 87 | -------------------------------------------------------------------------------- /lib/yaml3/loader.py: -------------------------------------------------------------------------------- 1 | 2 | __all__ = ['BaseLoader', 'SafeLoader', 'Loader'] 3 | 4 | from .reader import * 5 | from .scanner import * 6 | from .parser import * 7 | from .composer import * 8 | from .constructor import * 9 | from .resolver import * 10 | 11 | class BaseLoader(Reader, Scanner, Parser, Composer, BaseConstructor, BaseResolver): 12 | 13 | def __init__(self, stream): 14 | Reader.__init__(self, stream) 15 | Scanner.__init__(self) 16 | Parser.__init__(self) 17 | Composer.__init__(self) 18 | BaseConstructor.__init__(self) 19 | BaseResolver.__init__(self) 20 | 21 | class SafeLoader(Reader, Scanner, Parser, Composer, SafeConstructor, Resolver): 22 | 23 | def __init__(self, stream): 24 | Reader.__init__(self, stream) 25 | Scanner.__init__(self) 26 | Parser.__init__(self) 27 | Composer.__init__(self) 28 | SafeConstructor.__init__(self) 29 | Resolver.__init__(self) 30 | 31 | class Loader(Reader, Scanner, Parser, Composer, Constructor, Resolver): 32 | 33 | def __init__(self, stream): 34 | Reader.__init__(self, stream) 35 | Scanner.__init__(self) 36 | Parser.__init__(self) 37 | Composer.__init__(self) 38 | Constructor.__init__(self) 39 | Resolver.__init__(self) 40 | 41 | -------------------------------------------------------------------------------- /lib/yaml3/nodes.py: -------------------------------------------------------------------------------- 1 | 2 | class Node(object): 3 | def __init__(self, tag, value, start_mark, end_mark): 4 | self.tag = tag 5 | self.value = value 6 | self.start_mark = start_mark 7 | self.end_mark = end_mark 8 | def __repr__(self): 9 | value = self.value 10 | #if isinstance(value, list): 11 | # if len(value) == 0: 12 | # value = '' 13 | # elif len(value) == 1: 14 | # value = '<1 item>' 15 | # else: 16 | # value = '<%d items>' % len(value) 17 | #else: 18 | # if len(value) > 75: 19 | # value = repr(value[:70]+u' ... ') 20 | # else: 21 | # value = repr(value) 22 | value = repr(value) 23 | return '%s(tag=%r, value=%s)' % (self.__class__.__name__, self.tag, value) 24 | 25 | class ScalarNode(Node): 26 | id = 'scalar' 27 | def __init__(self, tag, value, 28 | start_mark=None, end_mark=None, style=None): 29 | self.tag = tag 30 | self.value = value 31 | self.start_mark = start_mark 32 | self.end_mark = end_mark 33 | self.style = style 34 | 35 | class CollectionNode(Node): 36 | def __init__(self, tag, value, 37 | start_mark=None, end_mark=None, flow_style=None): 38 | self.tag = tag 39 | self.value = value 40 | self.start_mark = start_mark 41 | self.end_mark = end_mark 42 | self.flow_style = flow_style 43 | 44 | class SequenceNode(CollectionNode): 45 | id = 'sequence' 46 | 47 | class MappingNode(CollectionNode): 48 | id = 'mapping' 49 | 50 | -------------------------------------------------------------------------------- /lib/yaml3/serializer.py: -------------------------------------------------------------------------------- 1 | 2 | __all__ = ['Serializer', 'SerializerError'] 3 | 4 | from .error import YAMLError 5 | from .events import * 6 | from .nodes import * 7 | 8 | class SerializerError(YAMLError): 9 | pass 10 | 11 | class Serializer: 12 | 13 | ANCHOR_TEMPLATE = 'id%03d' 14 | 15 | def __init__(self, encoding=None, 16 | explicit_start=None, explicit_end=None, version=None, tags=None): 17 | self.use_encoding = encoding 18 | self.use_explicit_start = explicit_start 19 | self.use_explicit_end = explicit_end 20 | self.use_version = version 21 | self.use_tags = tags 22 | self.serialized_nodes = {} 23 | self.anchors = {} 24 | self.last_anchor_id = 0 25 | self.closed = None 26 | 27 | def open(self): 28 | if self.closed is None: 29 | self.emit(StreamStartEvent(encoding=self.use_encoding)) 30 | self.closed = False 31 | elif self.closed: 32 | raise SerializerError("serializer is closed") 33 | else: 34 | raise SerializerError("serializer is already opened") 35 | 36 | def close(self): 37 | if self.closed is None: 38 | raise SerializerError("serializer is not opened") 39 | elif not self.closed: 40 | self.emit(StreamEndEvent()) 41 | self.closed = True 42 | 43 | #def __del__(self): 44 | # self.close() 45 | 46 | def serialize(self, node): 47 | if self.closed is None: 48 | raise SerializerError("serializer is not opened") 49 | elif self.closed: 50 | raise SerializerError("serializer is closed") 51 | self.emit(DocumentStartEvent(explicit=self.use_explicit_start, 52 | version=self.use_version, tags=self.use_tags)) 53 | self.anchor_node(node) 54 | self.serialize_node(node, None, None) 55 | self.emit(DocumentEndEvent(explicit=self.use_explicit_end)) 56 | self.serialized_nodes = {} 57 | self.anchors = {} 58 | self.last_anchor_id = 0 59 | 60 | def anchor_node(self, node): 61 | if node in self.anchors: 62 | if self.anchors[node] is None: 63 | self.anchors[node] = self.generate_anchor(node) 64 | else: 65 | self.anchors[node] = None 66 | if isinstance(node, SequenceNode): 67 | for item in node.value: 68 | self.anchor_node(item) 69 | elif isinstance(node, MappingNode): 70 | for key, value in node.value: 71 | self.anchor_node(key) 72 | self.anchor_node(value) 73 | 74 | def generate_anchor(self, node): 75 | self.last_anchor_id += 1 76 | return self.ANCHOR_TEMPLATE % self.last_anchor_id 77 | 78 | def serialize_node(self, node, parent, index): 79 | alias = self.anchors[node] 80 | if node in self.serialized_nodes: 81 | self.emit(AliasEvent(alias)) 82 | else: 83 | self.serialized_nodes[node] = True 84 | self.descend_resolver(parent, index) 85 | if isinstance(node, ScalarNode): 86 | detected_tag = self.resolve(ScalarNode, node.value, (True, False)) 87 | default_tag = self.resolve(ScalarNode, node.value, (False, True)) 88 | implicit = (node.tag == detected_tag), (node.tag == default_tag) 89 | self.emit(ScalarEvent(alias, node.tag, implicit, node.value, 90 | style=node.style)) 91 | elif isinstance(node, SequenceNode): 92 | implicit = (node.tag 93 | == self.resolve(SequenceNode, node.value, True)) 94 | self.emit(SequenceStartEvent(alias, node.tag, implicit, 95 | flow_style=node.flow_style)) 96 | index = 0 97 | for item in node.value: 98 | self.serialize_node(item, node, index) 99 | index += 1 100 | self.emit(SequenceEndEvent()) 101 | elif isinstance(node, MappingNode): 102 | implicit = (node.tag 103 | == self.resolve(MappingNode, node.value, True)) 104 | self.emit(MappingStartEvent(alias, node.tag, implicit, 105 | flow_style=node.flow_style)) 106 | for key, value in node.value: 107 | self.serialize_node(key, node, None) 108 | self.serialize_node(value, node, key) 109 | self.emit(MappingEndEvent()) 110 | self.ascend_resolver() 111 | 112 | -------------------------------------------------------------------------------- /lib/yaml3/tokens.py: -------------------------------------------------------------------------------- 1 | 2 | class Token(object): 3 | def __init__(self, start_mark, end_mark): 4 | self.start_mark = start_mark 5 | self.end_mark = end_mark 6 | def __repr__(self): 7 | attributes = [key for key in self.__dict__ 8 | if not key.endswith('_mark')] 9 | attributes.sort() 10 | arguments = ', '.join(['%s=%r' % (key, getattr(self, key)) 11 | for key in attributes]) 12 | return '%s(%s)' % (self.__class__.__name__, arguments) 13 | 14 | #class BOMToken(Token): 15 | # id = '' 16 | 17 | class DirectiveToken(Token): 18 | id = '' 19 | def __init__(self, name, value, start_mark, end_mark): 20 | self.name = name 21 | self.value = value 22 | self.start_mark = start_mark 23 | self.end_mark = end_mark 24 | 25 | class DocumentStartToken(Token): 26 | id = '' 27 | 28 | class DocumentEndToken(Token): 29 | id = '' 30 | 31 | class StreamStartToken(Token): 32 | id = '' 33 | def __init__(self, start_mark=None, end_mark=None, 34 | encoding=None): 35 | self.start_mark = start_mark 36 | self.end_mark = end_mark 37 | self.encoding = encoding 38 | 39 | class StreamEndToken(Token): 40 | id = '' 41 | 42 | class BlockSequenceStartToken(Token): 43 | id = '' 44 | 45 | class BlockMappingStartToken(Token): 46 | id = '' 47 | 48 | class BlockEndToken(Token): 49 | id = '' 50 | 51 | class FlowSequenceStartToken(Token): 52 | id = '[' 53 | 54 | class FlowMappingStartToken(Token): 55 | id = '{' 56 | 57 | class FlowSequenceEndToken(Token): 58 | id = ']' 59 | 60 | class FlowMappingEndToken(Token): 61 | id = '}' 62 | 63 | class KeyToken(Token): 64 | id = '?' 65 | 66 | class ValueToken(Token): 67 | id = ':' 68 | 69 | class BlockEntryToken(Token): 70 | id = '-' 71 | 72 | class FlowEntryToken(Token): 73 | id = ',' 74 | 75 | class AliasToken(Token): 76 | id = '' 77 | def __init__(self, value, start_mark, end_mark): 78 | self.value = value 79 | self.start_mark = start_mark 80 | self.end_mark = end_mark 81 | 82 | class AnchorToken(Token): 83 | id = '' 84 | def __init__(self, value, start_mark, end_mark): 85 | self.value = value 86 | self.start_mark = start_mark 87 | self.end_mark = end_mark 88 | 89 | class TagToken(Token): 90 | id = '' 91 | def __init__(self, value, start_mark, end_mark): 92 | self.value = value 93 | self.start_mark = start_mark 94 | self.end_mark = end_mark 95 | 96 | class ScalarToken(Token): 97 | id = '' 98 | def __init__(self, value, plain, start_mark, end_mark, style=None): 99 | self.value = value 100 | self.plain = plain 101 | self.start_mark = start_mark 102 | self.end_mark = end_mark 103 | self.style = style 104 | 105 | -------------------------------------------------------------------------------- /mkdocs.yml: -------------------------------------------------------------------------------- 1 | site_name: Ansible-CMDB 2 | theme: readthedocs 3 | markdown_extensions: 4 | - admonition 5 | pages: 6 | - About: index.md 7 | - Installation: installation.md 8 | - Usage: usage.md 9 | - FAQ: faq.md 10 | - Development: dev.md 11 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | mako 2 | pyyaml 3 | ushlex 4 | jsonxs 5 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import os 3 | import sys 4 | import re 5 | from distutils.core import setup 6 | from setuptools import find_packages 7 | 8 | def get_long_description(): 9 | path = os.path.join(os.path.dirname(__file__), 'README.md') 10 | with open(path) as f: 11 | return f.read() 12 | 13 | def get_version(): 14 | return open('src/ansiblecmdb/data/VERSION', 'r').read().strip() 15 | 16 | def get_data_files(path, strip='', prefix=''): 17 | data_files = [] 18 | for dirpath, dirnames, filenames in os.walk(path): 19 | files = [os.path.join(dirpath, filename) for filename in filenames] 20 | data_files.append( [prefix + dirpath[len(strip):], files] ) 21 | return data_files 22 | 23 | 24 | if sys.argv[-1] == 'publish': 25 | os.system('python setup.py sdist upload') 26 | print('You should also add a git tag for this version:') 27 | print(' git tag {0}'.format(get_version())) 28 | print(' git push --tags') 29 | sys.exit() 30 | 31 | setup( 32 | name='ansible-cmdb', 33 | version=get_version(), 34 | license='GPLv3', 35 | description='Generate host overview from ansible fact gathering output', 36 | long_description=get_long_description(), 37 | url='https://github.com/fboender/ansible-cmdb', 38 | 39 | author='Ferry Boender', 40 | author_email='ferry.boender@electricmonk.nl', 41 | 42 | package_dir={'': 'src'}, 43 | packages=find_packages('src'), 44 | include_package_data=True, 45 | data_files=\ 46 | get_data_files( 47 | 'src/ansiblecmdb/data', 48 | strip='src', 49 | prefix='lib' 50 | ) + 51 | [['lib/ansiblecmdb/', ['src/ansible-cmdb.py']]], 52 | zip_safe=False, 53 | install_requires=['mako', 'pyyaml', 'ushlex', 'jsonxs'], 54 | scripts=[ 55 | 'src/ansible-cmdb', 56 | ], 57 | 58 | classifiers=[ 59 | 'Development Status :: 5 - Production/Stable', 60 | 'Environment :: Console', 61 | 'Intended Audience :: Developers', 62 | 'Intended Audience :: Information Technology', 63 | 'Intended Audience :: System Administrators', 64 | 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)', 65 | 'Natural Language :: English', 66 | 'Operating System :: POSIX', 67 | 'Programming Language :: Python', 68 | 'Programming Language :: Python :: 2.6', 69 | 'Programming Language :: Python :: 2.7', 70 | 'Programming Language :: Python :: 3', 71 | 'Topic :: System :: Installation/Setup', 72 | 'Topic :: System :: Systems Administration', 73 | 'Topic :: Utilities', 74 | ], 75 | ) 76 | -------------------------------------------------------------------------------- /src/ansible-cmdb: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | # 4 | # Wrapper script to find python version to use. 5 | # 6 | 7 | # Debug message helper 8 | dbg () { 9 | [ "$DEBUG" -eq 1 ] && echo "$*" >&2 10 | } 11 | 12 | # Find suitable python binary 13 | find_py_bin () { 14 | which -a python | while read -r TRY_PY_BIN 15 | do 16 | dbg "Trying python bin: $TRY_PY_BIN" 17 | 18 | PY_VMAJOR=$($TRY_PY_BIN -c "import sys; print(sys.version_info[0])") 19 | PY_VMINOR=$($TRY_PY_BIN -c "import sys; print(sys.version_info[1])") 20 | 21 | if [ "$PY_VMAJOR" -eq 3 ]; then 22 | echo "$TRY_PY_BIN" 23 | exit 0 24 | elif [ "$PY_VMAJOR" -eq 2 ] && [ "$PY_VMINOR" -gt "6" ]; then 25 | echo "$TRY_PY_BIN" 26 | exit 0 27 | fi 28 | done 29 | } 30 | 31 | # Find path to the real ansible-cmdb python script 32 | find_cmdb_bin () { 33 | BIN_DIR=$(dirname "$0") 34 | if [ -f "$BIN_DIR/ansible-cmdb.py" ]; then 35 | dbg "Trying ansible-cmdb bin: $BIN_DIR/ansible-cmdb.py" 36 | echo "$BIN_DIR/ansible-cmdb.py" 37 | elif [ -f "$BIN_DIR/../lib/ansible-cmdb/ansible-cmdb.py" ]; then 38 | dbg "Trying ansible-cmdb bin: $BIN_DIR/../lib/ansible-cmdb/ansible-cmdb.py" 39 | echo "$BIN_DIR/../lib/ansible-cmdb/ansible-cmdb.py" 40 | elif [ -f "$BIN_DIR/../lib/ansiblecmdb/ansible-cmdb.py" ]; then 41 | dbg "Trying ansible-cmdb bin: $BIN_DIR/../lib/ansiblecmdb/ansible-cmdb.py" 42 | echo "$BIN_DIR/../lib/ansiblecmdb/ansible-cmdb.py" 43 | else 44 | echo "Couldn't find $BIN_DIR/ansible-cmdb.py in . or $BIN_DIR/../lib/ansible-cmdb/ or $BIN_DIR/../lib/ansiblecmdb/ (cwd=$PWD)" >&2 45 | exit 2 46 | fi 47 | } 48 | 49 | DEBUG=0 50 | if [ "$1" = "-d" ] || [ "$1" = "--debug" ]; then 51 | DEBUG=1 52 | fi 53 | 54 | PY_BIN="$(find_py_bin)" 55 | if [ -z "$PY_BIN" ]; then 56 | echo "No suitable python version found (v2.7 or higher required). Aborting" >&2 57 | exit 1 58 | fi 59 | 60 | CMDB_BIN="$(find_cmdb_bin)" 61 | if [ -z "$CMDB_BIN" ]; then 62 | echo "Couldn't find ansible-cmdb.py. Aborting" >&2 63 | exit 2 64 | fi 65 | 66 | # Run it 67 | dbg "Using python bin $PY_BIN" 68 | dbg "Using ansible-cmdb bin $CMDB_BIN" 69 | "$PY_BIN" "$CMDB_BIN" "$@" 70 | -------------------------------------------------------------------------------- /src/ansiblecmdb/__init__.py: -------------------------------------------------------------------------------- 1 | from . import ihateyaml 2 | from .parser import HostsParser, DynInvParser 3 | from .ansible import Ansible 4 | -------------------------------------------------------------------------------- /src/ansiblecmdb/data/VERSION: -------------------------------------------------------------------------------- 1 | MASTER 2 | -------------------------------------------------------------------------------- /src/ansiblecmdb/data/static/images/sort_asc.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fboender/ansible-cmdb/3f3e412d2a7be91c97c5a1842f4e57cc85b06961/src/ansiblecmdb/data/static/images/sort_asc.png -------------------------------------------------------------------------------- /src/ansiblecmdb/data/static/images/sort_both.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fboender/ansible-cmdb/3f3e412d2a7be91c97c5a1842f4e57cc85b06961/src/ansiblecmdb/data/static/images/sort_both.png -------------------------------------------------------------------------------- /src/ansiblecmdb/data/static/images/sort_desc.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fboender/ansible-cmdb/3f3e412d2a7be91c97c5a1842f4e57cc85b06961/src/ansiblecmdb/data/static/images/sort_desc.png -------------------------------------------------------------------------------- /src/ansiblecmdb/data/tpl/csv.tpl: -------------------------------------------------------------------------------- 1 | <% 2 | 3 | import sys 4 | import csv 5 | import logging 6 | 7 | log = logging.getLogger(__name__) 8 | 9 | cols = [ 10 | {"title": "Name", "id": "name", "visible": True, "field": lambda h: h.get('name', '')}, 11 | {"title": "OS", "id": "os", "visible": True, "field": lambda h: h['ansible_facts'].get('ansible_distribution', '') + ' ' + h['ansible_facts'].get('ansible_distribution_version', '')}, 12 | {"title": "IP", "id": "ip", "visible": True, "field": lambda h: host['ansible_facts'].get('ansible_default_ipv4', {}).get('address', '')}, 13 | {"title": "Arch", "id": "arch", "visible": True, "field": lambda h: host['ansible_facts'].get('ansible_architecture', 'Unk') + '/' + host['ansible_facts'].get('ansible_userspace_architecture', 'Unk')}, 14 | {"title": "Mem", "id": "mem", "visible": True, "field": lambda h: '%0.0fg' % (int(host['ansible_facts'].get('ansible_memtotal_mb', 0)) / 1000.0)}, 15 | {"title": "MemFree", "id": "memfree", "visible": True, "field": lambda h: '%0.0fg' % (int(host['ansible_facts'].get('ansible_memfree_mb', 0)) / 1000.0)}, 16 | {"title": "MemUsed", "id": "memused", "visible": True, "field": lambda h: '%0.0fg' % (int(host['ansible_facts'].get('ansible_memory_mb', {}).get('real', {}).get('used',0)) / 1000.0)}, 17 | {"title": "CPUs", "id": "cpus", "visible": True, "field": lambda h: str(host['ansible_facts'].get('ansible_processor_count', 0))}, 18 | {"title": "Virt", "id": "virt", "visible": True, "field": lambda h: host['ansible_facts'].get('ansible_virtualization_type', 'Unk') + '/' + host['ansible_facts'].get('ansible_virtualization_role', 'Unk')}, 19 | {"title": "Disk avail", "id": "disk_avail", "visible": True, "field": lambda h: ', '.join(['{0:0.1f}g'.format(i['size_available']/1048576000) for i in host['ansible_facts'].get('ansible_mounts', []) if 'size_available' in i and i['size_available'] > 1])}, 20 | ] 21 | 22 | # Enable columns specified with '--columns' 23 | if columns is not None: 24 | for col in cols: 25 | if col["id"] in columns: 26 | col["visible"] = True 27 | else: 28 | col["visible"] = False 29 | 30 | def get_cols(): 31 | return [col for col in cols if col['visible'] is True] 32 | 33 | fieldnames = [] 34 | for col in get_cols(): 35 | fieldnames.append(col['title']) 36 | 37 | writer = csv.writer(sys.stdout, delimiter=',', quotechar='"', quoting=csv.QUOTE_ALL) 38 | writer.writerow(fieldnames) 39 | for hostname, host in sorted(hosts.items()): 40 | if 'ansible_facts' not in host: 41 | log.warning(u'{0}: No info collected.'.format(hostname)) 42 | else: 43 | out_cols = [] 44 | for col in get_cols(): 45 | out_cols.append(col['field'](host)) 46 | writer.writerow(out_cols) 47 | %> 48 | -------------------------------------------------------------------------------- /src/ansiblecmdb/data/tpl/html_fancy.tpl: -------------------------------------------------------------------------------- 1 | ## -*- coding: utf-8 -*- 2 | <%! from ansiblecmdb.util import to_bool %> 3 | 4 | <%namespace name="defs" file="/html_fancy_defs.html" import="*" /> 5 | 6 | <% 7 | # Default parameter values 8 | local_js = to_bool(context.get('local_js', '0')) 9 | collapsed = to_bool(context.get('collapsed', '0')) 10 | host_details = to_bool(context.get('host_details', '1')) 11 | skip_empty = to_bool(context.get('skip_empty', '0')) 12 | 13 | # Get column definitions from html_fancy_defs.html 14 | cols = var_cols(columns, exclude_columns) 15 | 16 | # Extend default columns with custom columns 17 | cols.extend(cust_cols) 18 | 19 | # Set the Javascript resource URL (local disk or CDN) 20 | if local_js is False: 21 | res_url = "https://cdn.datatables.net/1.10.2/" 22 | else: 23 | res_url = "file://" + data_dir + "/static/" 24 | 25 | # Set the link type for the host overview table's 'host' column (the link that 26 | # takes you to the host details). 27 | link_type = "anchor" 28 | if host_details is False: 29 | link_type = "none" 30 | %> 31 | 32 | <% html_header("Ansible Overview", local_js, res_url) %> 33 | <% html_header_bar("Host overview") %> 34 | <% html_col_toggles(cols) %> 35 | <% html_host_overview(cols, hosts, skip_empty=skip_empty, link_type=link_type) %> 36 | % if host_details is True: 37 | <% html_host_details(hosts, collapsed=collapsed, skip_empty=skip_empty) %> 38 | % endif 39 | <% html_footer_bar(version) %> 40 | 41 | 68 | 69 | <% html_footer() %> 70 | -------------------------------------------------------------------------------- /src/ansiblecmdb/data/tpl/html_fancy_split.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import sys 4 | import os 5 | import codecs 6 | from mako.template import Template 7 | from mako.lookup import TemplateLookup 8 | 9 | def render(hosts, vars={}, tpl_dirs=[]): 10 | if not os.path.isdir('cmdb'): 11 | os.mkdir('cmdb') 12 | 13 | lookup = TemplateLookup(directories=tpl_dirs, 14 | default_filters=['decode.utf8'], 15 | input_encoding='utf-8', 16 | output_encoding='utf-8', 17 | encoding_errors='replace') 18 | 19 | # Render host overview 20 | template = lookup.get_template('html_fancy_split_overview.tpl') 21 | out_file = os.path.join('cmdb', 'index.html') 22 | output = template.render(hosts=hosts, **vars).lstrip().decode('utf8') 23 | with codecs.open(out_file, 'w', encoding='utf8') as f: 24 | f.write(output) 25 | 26 | # Render host details 27 | template = lookup.get_template('html_fancy_split_detail.tpl') 28 | for hostname, host in hosts.items(): 29 | out_file = os.path.join('cmdb', u'{0}.html'.format(hostname)) 30 | output = template.render(host=host, **vars).lstrip().decode('utf8') 31 | with codecs.open(out_file, 'w', encoding='utf8') as f: 32 | f.write(output) 33 | -------------------------------------------------------------------------------- /src/ansiblecmdb/data/tpl/html_fancy_split_detail.tpl: -------------------------------------------------------------------------------- 1 | ## -*- coding: utf-8 -*- 2 | <%! from ansiblecmdb.util import to_bool %> 3 | 4 | <%namespace name="defs" file="/html_fancy_defs.html" import="*" /> 5 | 6 | <% 7 | # Default parameter values 8 | local_js = to_bool(context.get('local_js', '0')) 9 | collapsed = to_bool(context.get('collapsed', '0')) 10 | 11 | # Set the Javascript resource URL (local disk or CDN) 12 | if local_js is False: 13 | res_url = "https://cdn.datatables.net/1.10.2/" 14 | else: 15 | res_url = "file://" + data_dir + "/static/" 16 | %> 17 | 18 | <% html_header(host['name'], local_js, res_url) %> 19 | <% html_header_bar(host['name']) %> 20 |
21 | <% html_host_detail(host, collapsed=collapsed, skip_empty=skip_empty, is_split=True) %> 22 |
23 | <% html_footer_bar(version) %> 24 | 25 | 30 | 31 | <% html_footer() %> 32 | -------------------------------------------------------------------------------- /src/ansiblecmdb/data/tpl/html_fancy_split_overview.tpl: -------------------------------------------------------------------------------- 1 | ## -*- coding: utf-8 -*- 2 | <%! from ansiblecmdb.util import to_bool %> 3 | 4 | <%namespace name="defs" file="/html_fancy_defs.html" import="*" /> 5 | 6 | <% 7 | # Default parameter values 8 | local_js = to_bool(context.get('local_js', '0')) 9 | collapsed = to_bool(context.get('collapsed', '0')) 10 | host_details = to_bool(context.get('host_details', '1')) 11 | skip_empty = to_bool(context.get('skip_empty', '0')) 12 | 13 | # Get column definitions from html_fancy_defs.html 14 | cols = var_cols(columns, exclude_columns) 15 | 16 | # Extend default columns with custom columns 17 | cols.extend(cust_cols) 18 | 19 | # Set the Javascript resource URL (local disk or CDN) 20 | if local_js is False: 21 | res_url = "https://cdn.datatables.net/1.10.2/" 22 | else: 23 | res_url = "file://" + data_dir + "/static/" 24 | 25 | # Set the link type for the host overview table's 'host' column (the link that 26 | # takes you to the host details). 27 | link_type = "external" 28 | if host_details is False: 29 | link_type = "none" 30 | %> 31 | 32 | <% html_header("Ansible Overview", local_js, res_url) %> 33 | <% html_header_bar("Host overview") %> 34 | <% html_col_toggles(cols) %> 35 | <% html_host_overview(cols, hosts, skip_empty=skip_empty, link_type=link_type) %> 36 | 42 | <% html_footer() %> 43 | -------------------------------------------------------------------------------- /src/ansiblecmdb/data/tpl/json.tpl: -------------------------------------------------------------------------------- 1 | <% 2 | import json 3 | 4 | class CustEncoder(json.JSONEncoder): 5 | def default(self, obj): 6 | if isinstance(obj, set): 7 | return list(obj) 8 | return json.JSONEncoder.default(self, obj) 9 | 10 | print(json.dumps(hosts, indent=2, cls=CustEncoder)) 11 | %> 12 | -------------------------------------------------------------------------------- /src/ansiblecmdb/data/tpl/markdown_split.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import sys 4 | import os 5 | import codecs 6 | from mako.template import Template 7 | from mako.lookup import TemplateLookup 8 | 9 | def render(hosts, vars={}, tpl_dirs=[]): 10 | if not os.path.isdir('cmdb'): 11 | os.mkdir('cmdb') 12 | 13 | lookup = TemplateLookup(directories=tpl_dirs, 14 | default_filters=['decode.utf8'], 15 | input_encoding='utf-8', 16 | output_encoding='utf-8', 17 | encoding_errors='replace') 18 | 19 | # Render host overview 20 | template = lookup.get_template('markdown_split_overview.tpl') 21 | out_file = os.path.join('cmdb', 'overview.md') 22 | output = template.render(hosts=hosts, **vars).lstrip().decode('utf8') 23 | with codecs.open(out_file, 'w', encoding='utf8') as f: 24 | f.write(output) 25 | 26 | # Render host details 27 | template = lookup.get_template('markdown_split_detail.tpl') 28 | for hostname, host in sorted(hosts.items()): 29 | out_file = os.path.join('cmdb', '{0}.md'.format(hostname)) 30 | output = template.render(hostname=hostname, host=host, **vars).lstrip().decode('utf8') 31 | with codecs.open(out_file, 'w', encoding='utf8') as f: 32 | f.write(output) 33 | -------------------------------------------------------------------------------- /src/ansiblecmdb/data/tpl/markdown_split_overview.tpl: -------------------------------------------------------------------------------- 1 | <% 2 | import datetime 3 | %>\ 4 | ${"#"} Hosts 5 | 6 | % for hostname, host in sorted(hosts.items()): 7 | * [${hostname}](${hostname}.md) 8 | % endfor 9 | 10 | Generated by [ansible-cmdb](https://github.com/fboender/ansible-cmdb) vMASTER on ${datetime.datetime.now().strftime('%c')}. © Ferry Boender 11 | -------------------------------------------------------------------------------- /src/ansiblecmdb/data/tpl/sql.tpl: -------------------------------------------------------------------------------- 1 | <% 2 | from jsonxs import jsonxs 3 | %> 4 | <%def name="col_fqdn(host)"><% 5 | return jsonxs(host, 'ansible_facts.ansible_fqdn', default='') 6 | %> 7 | <%def name="col_main_ip(host)"><% 8 | default_ipv4 = '' 9 | if jsonxs(host, 'ansible_facts.ansible_os_family', default='') == 'Windows': 10 | ipv4_addresses = [ip for ip in jsonxs(host, 'ansible_facts.ansible_ip_addresses', default=[]) if ':' not in ip] 11 | if ipv4_addresses: 12 | default_ipv4 = ipv4_addresses[0] 13 | else: 14 | default_ipv4 = jsonxs(host, 'ansible_facts.ansible_default_ipv4.address', default='') 15 | 16 | return default_ipv4.strip() 17 | %> 18 | <%def name="col_os_name(host)"><% 19 | return jsonxs(host, 'ansible_facts.ansible_distribution', default='') 20 | %> 21 | <%def name="col_os_version(host)"><% 22 | if jsonxs(host, 'ansible_facts.ansible_distribution', default='') in ["OpenBSD"]: 23 | return jsonxs(host, 'ansible_facts.ansible_distribution_release', default='') 24 | else: 25 | return jsonxs(host, 'ansible_facts.ansible_distribution_version', default='') 26 | endif 27 | %> 28 | <%def name="col_system_type(host)"><% 29 | return jsonxs(host, 'ansible_facts.ansible_system', default='') 30 | %> 31 | <%def name="col_kernel(host)"><% 32 | return jsonxs(host, 'ansible_facts.ansible_kernel', default='') 33 | %> 34 | <%def name="col_arch_hardware(host)"><% 35 | return jsonxs(host, 'ansible_facts.ansible_architecture', default='') 36 | %> 37 | <%def name="col_arch_userspace(host)"><% 38 | return jsonxs(host, 'ansible_facts.ansible_userspace_architecture', default='') 39 | %> 40 | <%def name="col_virt_type(host)"><% 41 | return jsonxs(host, 'ansible_facts.ansible_virtualization_type', default='?') 42 | %> 43 | <%def name="col_virt_role(host)"><% 44 | return jsonxs(host, 'ansible_facts.ansible_virtualization_role', default='?') 45 | %> 46 | <%def name="col_cpu_type(host)"><% 47 | cpu_type = jsonxs(host, 'ansible_facts.ansible_processor', default=0) 48 | if isinstance(cpu_type, list) and len(cpu_type) > 0: 49 | return cpu_type[-1] 50 | else: 51 | return '' 52 | %> 53 | <%def name="col_vcpus(host)"><% 54 | if jsonxs(host, 'ansible_facts.ansible_distribution', default='') in ["OpenBSD"]: 55 | return jsonxs(host, 'ansible_facts.ansible_processor_count', default=0) 56 | else: 57 | return jsonxs(host, 'ansible_facts.ansible_processor_vcpus', default=jsonxs(host, 'ansible_facts.ansible_processor_cores', default=0)) 58 | endif 59 | %> 60 | <%def name="col_ram(host)"><% 61 | return '%0.1f' % ((int(jsonxs(host, 'ansible_facts.ansible_memtotal_mb', default=0)) / 1024.0)) 62 | %> 63 | <%def name="col_disk_total(host)"><% 64 | for i in jsonxs(host, 'ansible_facts.ansible_mounts', default=[]): 65 | if i["mount"] == '/': 66 | return round(i.get('size_total', 0) / 1073741824.0, 1) 67 | endif 68 | endfor 69 | return 0 70 | %> 71 | <%def name="col_disk_free(host)"><% 72 | for i in jsonxs(host, 'ansible_facts.ansible_mounts', default=[]): 73 | if i["mount"] == '/': 74 | try: 75 | return round(i["size_available"] / 1073741824.0, 1) 76 | except: 77 | return 0 78 | endtry 79 | endif 80 | endfor 81 | return 0 82 | %> 83 | DROP TABLE IF EXISTS hosts; 84 | CREATE TABLE hosts ( 85 | name VARCHAR(255), 86 | fqdn VARCHAR(255), 87 | main_ip VARCHAR(15), 88 | os_name VARCHAR(80), 89 | os_version VARCHAR(40), 90 | system_type VARCHAR(40), 91 | kernel VARCHAR(40), 92 | arch_hardware VARCHAR(12), 93 | arch_userspace VARCHAR(12), 94 | virt_type VARCHAR(20), 95 | virt_role VARCHAR(20), 96 | cpu_type VARCHAR(60), 97 | vcpus INT, 98 | ram FLOAT, 99 | disk_total FLOAT, 100 | disk_free FLOAT 101 | ); 102 | 103 | % for hostname, host in sorted(hosts.items()): 104 | INSERT INTO hosts ( 105 | name, 106 | fqdn, 107 | main_ip, 108 | os_name, 109 | os_version, 110 | system_type, 111 | kernel, 112 | arch_hardware, 113 | arch_userspace, 114 | virt_type, 115 | virt_role, 116 | cpu_type, 117 | vcpus, 118 | ram, 119 | disk_total, 120 | disk_free 121 | ) VALUES ( 122 | "${jsonxs(host, 'name', default='Unknown')}", 123 | "${col_fqdn(host)}", 124 | "${col_main_ip(host)}", 125 | "${col_os_name(host)}", 126 | "${col_os_version(host)}", 127 | "${col_system_type(host)}", 128 | "${col_kernel(host)}", 129 | "${col_arch_hardware(host)}", 130 | "${col_arch_userspace(host)}", 131 | "${col_virt_type(host)}", 132 | "${col_virt_role(host)}", 133 | "${col_cpu_type(host)}", 134 | ${col_vcpus(host)}, 135 | ${col_ram(host)}, 136 | ${col_disk_total(host)}, 137 | ${col_disk_free(host)} 138 | ); 139 | %endfor 140 | -------------------------------------------------------------------------------- /src/ansiblecmdb/data/tpl/txt_table.tpl: -------------------------------------------------------------------------------- 1 | <% 2 | import sys 3 | import logging 4 | 5 | log = logging.getLogger(__name__) 6 | 7 | col_space = 2 8 | 9 | cols = [ 10 | {"title": "Name", "id": "name", "visible": True, "field": lambda h: h.get('name', '')}, 11 | {"title": "OS", "id": "os", "visible": True, "field": lambda h: h['ansible_facts'].get('ansible_distribution', '') + ' ' + h['ansible_facts'].get('ansible_distribution_version', '')}, 12 | {"title": "IP", "id": "ip", "visible": True, "field": lambda h: host['ansible_facts'].get('ansible_default_ipv4', {}).get('address', '')}, 13 | {"title": "Mac", "id": "mac", "visible": True, "field": lambda h: host['ansible_facts'].get('ansible_default_ipv4', {}).get('macaddress', '')}, 14 | {"title": "Arch", "id": "arch", "visible": True, "field": lambda h: host['ansible_facts'].get('ansible_architecture', 'Unk') + '/' + host['ansible_facts'].get('ansible_userspace_architecture', 'Unk')}, 15 | {"title": "Mem", "id": "mem", "visible": True, "field": lambda h: '%0.0fg' % (int(host['ansible_facts'].get('ansible_memtotal_mb', 0)) / 1000.0)}, 16 | {"title": "MemFree", "id": "memfree", "visible": True, "field": lambda h: '%0.0fg' % (int(host['ansible_facts'].get('ansible_memfree_mb', 0)) / 1000.0)}, 17 | {"title": "MemUsed", "id": "memused", "visible": True, "field": lambda h: '%0.0fg' % (int(host['ansible_facts'].get('ansible_memory_mb', {}).get('real', {}).get('used',0)) / 1000.0)}, 18 | {"title": "CPUs", "id": "cpus", "visible": True, "field": lambda h: str(host['ansible_facts'].get('ansible_processor_count', 0))}, 19 | {"title": "Virt", "id": "virt", "visible": True, "field": lambda h: host['ansible_facts'].get('ansible_virtualization_type', 'Unk') + '/' + host['ansible_facts'].get('ansible_virtualization_role', 'Unk')}, 20 | {"title": "Disk avail", "id": "disk_avail", "visible": True, "field": lambda h: ', '.join(['{0:0.1f}g'.format(i['size_available']/1048576000) for i in host['ansible_facts'].get('ansible_mounts', []) if 'size_available' in i and i['size_available'] > 1])}, 21 | ] 22 | 23 | # Enable columns specified with '--columns' 24 | if columns is not None: 25 | for col in cols: 26 | if col["id"] in columns: 27 | col["visible"] = True 28 | else: 29 | col["visible"] = False 30 | 31 | def get_cols(): 32 | return [col for col in cols if col['visible'] is True] 33 | 34 | # Find longest value in a column 35 | col_longest = {} 36 | 37 | # Init col width to titles' len 38 | for col in get_cols(): 39 | col_longest[col['title']] = len(col['title']) 40 | 41 | for hostname, host in hosts.items(): 42 | for col in get_cols(): 43 | try: 44 | field_value = col['field'](host) 45 | if len(field_value) > col_longest.get(col['title'], 0): 46 | col_longest[col['title']] = len(field_value) 47 | except KeyError: 48 | pass 49 | 50 | # Print out headers 51 | for col in get_cols(): 52 | sys.stdout.write(col['title'].ljust(col_longest[col['title']] + col_space)) 53 | sys.stdout.write('\n') 54 | 55 | for col in get_cols(): 56 | sys.stdout.write(u'-' * col_longest[col['title']] + (u' ' * col_space)) 57 | sys.stdout.write('\n') 58 | 59 | # Print out columns 60 | for hostname, host in sorted(hosts.items()): 61 | if 'ansible_facts' not in host: 62 | log.warning(u'{0}: No info collected.'.format(hostname)) 63 | else: 64 | for col in get_cols(): 65 | sys.stdout.write(col['field'](host).ljust(col_longest[col['title']]) + (' ' * col_space)) 66 | sys.stdout.write('\n') 67 | %> 68 | -------------------------------------------------------------------------------- /src/ansiblecmdb/ihateyaml.py: -------------------------------------------------------------------------------- 1 | """ 2 | Custom Yaml library wrapper, because Yaml is a bag of shit. 3 | """ 4 | 5 | try: 6 | import yaml 7 | except ImportError: 8 | import yaml3 as yaml 9 | 10 | class StupidYAMLShit(yaml.SafeLoader): 11 | """ 12 | FUCK PYYAML. This class overrides some insanely deep shit which took at 13 | least two hours to get working. This class overrides SafeLoader and handles 14 | tags (e.g. '!bullshit') nonsense in PyYAML, because obviously there is no 15 | ignore_tags option or a simple callback that actually works. That would be 16 | user-friendly, and user-friendliness is insanity, amirite?! 17 | 18 | Also, there is no single entry point to hook into this, so we need to 19 | specifically inherit from *SafeLoader* and not from *Loader*. Thanks for 20 | wasting some more of my fucking time PyYAML, you turd. 21 | 22 | On a pyyaml-rage-induced side note: How many apps are vulnerable because 23 | all the PyYaml docs mention 'load' and not 'safe_load'? Was this diseased 24 | pile of gunk written by a PHP programmer?! 25 | 26 | """ 27 | def handle_tag(self, node_name, node): 28 | # I just *know* there are gonna be problems with simply returning a 29 | # Scalar, but I don't give a fuck at this point. 30 | if node_name == "vault": 31 | new_node = yaml.ScalarNode(node_name, 'ENCRYPTED CONTENTS REDACTED') 32 | else: 33 | new_node = yaml.ScalarNode(node_name, node.value) 34 | 35 | return self.construct_scalar(new_node) 36 | 37 | 38 | # Fugly! 39 | StupidYAMLShit.add_multi_constructor( 40 | u'!', 41 | StupidYAMLShit.handle_tag) 42 | 43 | 44 | def safe_load(contents): 45 | return yaml.load(contents, Loader=StupidYAMLShit) 46 | -------------------------------------------------------------------------------- /src/ansiblecmdb/render.py: -------------------------------------------------------------------------------- 1 | import os 2 | import imp 3 | from mako.template import Template 4 | from mako.lookup import TemplateLookup 5 | 6 | 7 | class Render: 8 | """ 9 | Wrapper class to facilitate rendering. 10 | 11 | This is mostly a helper class for finding template locations and 12 | initializing Mako properly. It can also call executable "templates" (python 13 | scripts) for rendering. 14 | 15 | """ 16 | def __init__(self, tpl, tpl_dirs): 17 | self.tpl = tpl 18 | self.tpl_dirs = tpl_dirs 19 | self.tpl_possibilities = self._tpl_possibilities() 20 | self.tpl_file = self._find_tpl() 21 | 22 | def _tpl_possibilities(self): 23 | """ 24 | Construct a list of possible paths to templates. 25 | """ 26 | tpl_possibilities = [ 27 | os.path.realpath(self.tpl) 28 | ] 29 | for tpl_dir in self.tpl_dirs: 30 | tpl_possibilities.append(os.path.realpath(os.path.join(tpl_dir, "{0}.tpl".format(self.tpl)))) 31 | tpl_possibilities.append(os.path.realpath(os.path.join(tpl_dir, "{0}.py".format(self.tpl)))) 32 | 33 | return tpl_possibilities 34 | 35 | def _find_tpl(self): 36 | """ 37 | Find a template in the list of possible paths. 38 | """ 39 | for tpl_possibility in self.tpl_possibilities: 40 | if os.path.isfile(tpl_possibility): 41 | return tpl_possibility 42 | 43 | return None 44 | 45 | def render(self, hosts, vars={}): 46 | """ 47 | Render a mako or .py file. 48 | """ 49 | if self.tpl_file.endswith(".tpl"): 50 | return self._render_mako(hosts, vars) 51 | elif self.tpl_file.endswith(".py"): 52 | return self._render_py(hosts, vars) 53 | else: 54 | raise ValueError("Don't know how to handle '{0}'".format(self.tpl_file)) 55 | 56 | def _render_mako(self, hosts, vars={}): 57 | lookup = TemplateLookup(directories=self.tpl_dirs, 58 | default_filters=['decode.utf8'], 59 | input_encoding='utf-8', 60 | output_encoding='utf-8', 61 | encoding_errors='replace') 62 | template = Template(filename=self.tpl_file, 63 | lookup=lookup, 64 | default_filters=['decode.utf8'], 65 | input_encoding='utf-8', 66 | output_encoding='utf-8') 67 | return template.render(hosts=hosts, **vars) 68 | 69 | def _render_py(self, hosts, vars={}): 70 | module = imp.load_source('r', self.tpl_file) 71 | return module.render(hosts, vars=vars, tpl_dirs=self.tpl_dirs) 72 | -------------------------------------------------------------------------------- /src/ansiblecmdb/util.py: -------------------------------------------------------------------------------- 1 | import copy 2 | import os 3 | import stat 4 | 5 | 6 | def is_executable(path): 7 | """ 8 | Determine whether `path` points to an executable file. 9 | """ 10 | return stat.S_IXUSR & os.stat(path)[stat.ST_MODE] 11 | 12 | 13 | def deepupdate(target, src, overwrite=True): 14 | """Deep update target list, dict or set or other iterable with src 15 | For each k,v in src: if k doesn't exist in target, it is deep copied from 16 | src to target. Otherwise, if v is a list, target[k] is extended with 17 | src[k]. If v is a set, target[k] is updated with v, If v is a dict, 18 | recursively deep-update it. If `overwrite` is False, existing values in 19 | target will not be overwritten. 20 | 21 | Examples: 22 | >>> t = {'name': 'Ferry', 'hobbies': ['programming', 'sci-fi']} 23 | >>> deepupdate(t, {'hobbies': ['gaming']}) 24 | >>> print t 25 | {'name': 'Ferry', 'hobbies': ['programming', 'sci-fi', 'gaming']} 26 | """ 27 | for k, v in src.items(): 28 | if type(v) == list: 29 | if not k in target: 30 | target[k] = copy.deepcopy(v) 31 | elif overwrite is True: 32 | target[k].extend(v) 33 | elif type(v) == dict: 34 | if not k in target: 35 | target[k] = copy.deepcopy(v) 36 | else: 37 | deepupdate(target[k], v, overwrite=overwrite) 38 | elif type(v) == set: 39 | if not k in target: 40 | target[k] = v.copy() 41 | elif overwrite is True: 42 | if type(target[k]) == list: 43 | target[k].extend(v) 44 | elif type(target[k]) == set: 45 | target[k].update(v) 46 | else: 47 | raise TypeError("Cannot update {} with {}".format(type(target[k]), type(v))) 48 | else: 49 | if k not in target or overwrite is True: 50 | target[k] = copy.copy(v) 51 | 52 | 53 | def find_path(dirs, path_to_find): 54 | """ 55 | Go through a bunch of dirs and see if dir+path_to_find exists there. 56 | Returns the first dir that matches. Otherwise, return None. 57 | """ 58 | for dir in dirs: 59 | if os.path.exists(os.path.join(dir, path_to_find)): 60 | return dir 61 | return None 62 | 63 | 64 | def to_bool(s): 65 | """ 66 | Convert string `s` into a boolean. `s` can be 'true', 'True', 1, 'false', 67 | 'False', 0. 68 | 69 | Examples: 70 | 71 | >>> to_bool("true") 72 | True 73 | >>> to_bool("0") 74 | False 75 | >>> to_bool(True) 76 | True 77 | """ 78 | if isinstance(s, bool): 79 | return s 80 | elif s.lower() in ['true', '1']: 81 | return True 82 | elif s.lower() in ['false', '0']: 83 | return False 84 | else: 85 | raise ValueError("Can't cast '%s' to bool" % (s)) 86 | -------------------------------------------------------------------------------- /test/f_extend/extend/debian.dev.local: -------------------------------------------------------------------------------- 1 | { 2 | "ansible_facts": { 3 | "ansible_env": { 4 | "EDITOR": "nano" 5 | } 6 | }, 7 | "software": [ 8 | "Apache2", 9 | "MySQL5.5" 10 | ] 11 | } 12 | -------------------------------------------------------------------------------- /test/f_extend/out_setup/debian.dev.local: -------------------------------------------------------------------------------- 1 | { 2 | "ansible_facts": { 3 | "ansible_env": { 4 | "EDITOR": "vim" 5 | } 6 | }, 7 | "changed": false 8 | } 9 | -------------------------------------------------------------------------------- /test/f_factcache/hosts: -------------------------------------------------------------------------------- 1 | [dev] 2 | debian.dev.local dtap=dev 3 | -------------------------------------------------------------------------------- /test/f_factcache/out/debian.dev.local: -------------------------------------------------------------------------------- 1 | {"ansible_all_ipv4_addresses": ["192.168.56.2"], "ansible_all_ipv6_addresses": ["fe80::a00:27ff:fef9:98a7"], "ansible_architecture": "x86_64", "ansible_bios_date": "12/01/2006", "ansible_bios_version": "VirtualBox", "ansible_cmdline": {"BOOT_IMAGE": "/vmlinuz-2.6.32-5-amd64", "quiet": true, "ro": true, "root": "/dev/mapper/debian-root"}, "ansible_date_time": {"date": "2015-08-30", "day": "30", "epoch": "1440923780", "hour": "10", "iso8601": "2015-08-30T08:36:20Z", "iso8601_micro": "2015-08-30T08:36:20.457036Z", "minute": "36", "month": "08", "second": "20", "time": "10:36:20", "tz": "CEST", "tz_offset": "+0200", "weekday": "Sunday", "year": "2015"}, "ansible_default_ipv4": {"address": "192.168.56.2", "alias": "eth0", "gateway": "192.168.56.1", "interface": "eth0", "macaddress": "08:00:27:f9:98:a7", "mtu": 1500, "netmask": "255.255.255.0", "network": "192.168.56.0", "type": "ether"}, "ansible_default_ipv6": {}, "ansible_devices": {"sda": {"holders": [], "host": "SATA controller: Intel Corporation 82801HBM/HEM (ICH8M/ICH8M-E) SATA AHCI Controller (rev 02)", "model": "VBOX HARDDISK", "partitions": {"sda1": {"sectors": "497664", "sectorsize": 512, "size": "243.00 MB", "start": "2048"}, "sda2": {"sectors": "2", "sectorsize": 512, "size": "1.00 KB", "start": "501758"}, "sda5": {"sectors": "209211392", "sectorsize": 512, "size": "99.76 GB", "start": "501760"}}, "removable": "0", "rotational": "1", "scheduler_mode": "cfq", "sectors": "209715200", "sectorsize": "512", "size": "100.00 GB", "support_discard": null, "vendor": "ATA"}, "sr0": {"holders": [], "host": "IDE interface: Intel Corporation 82371AB/EB/MB PIIX4 IDE (rev 01)", "model": "CD-ROM", "partitions": {}, "removable": "1", "rotational": "1", "scheduler_mode": "cfq", "sectors": "2097151", "sectorsize": "512", "size": "1024.00 MB", "support_discard": null, "vendor": "VBOX"}}, "ansible_distribution": "Debian", "ansible_distribution_major_version": "6", "ansible_distribution_release": "NA", "ansible_distribution_version": "6.0.10", "ansible_domain": "", "ansible_env": {"EDITOR": "vim", "GDK_USE_XFT": "1", "HOME": "/home/fboender", "LANG": "en_US.UTF-8", "LANGUAGE": "en_US.UTF-8", "LC_ALL": "en_US.UTF-8", "LC_CTYPE": "en_US.UTF-8", "LESS": "-RgiMSx4 -FX", "LOGNAME": "fboender", "LS_COLORS": "rs=0:di=01;34:ln=01;36:mh=00:pi=40;33:so=01;35:do=01;35:bd=40;33;01:cd=40;33;01:or=40;31;01:su=37;41:sg=30;43:ca=30;41:tw=30;42:ow=34;42:st=37;44:ex=01;32:*.tar=01;31:*.tgz=01;31:*.arj=01;31:*.taz=01;31:*.lzh=01;31:*.lzma=01;31:*.tlz=01;31:*.txz=01;31:*.zip=01;31:*.z=01;31:*.Z=01;31:*.dz=01;31:*.gz=01;31:*.lz=01;31:*.xz=01;31:*.bz2=01;31:*.bz=01;31:*.tbz=01;31:*.tbz2=01;31:*.tz=01;31:*.deb=01;31:*.rpm=01;31:*.jar=01;31:*.rar=01;31:*.ace=01;31:*.zoo=01;31:*.cpio=01;31:*.7z=01;31:*.rz=01;31:*.jpg=01;35:*.jpeg=01;35:*.gif=01;35:*.bmp=01;35:*.pbm=01;35:*.pgm=01;35:*.ppm=01;35:*.tga=01;35:*.xbm=01;35:*.xpm=01;35:*.tif=01;35:*.tiff=01;35:*.png=01;35:*.svg=01;35:*.svgz=01;35:*.mng=01;35:*.pcx=01;35:*.mov=01;35:*.mpg=01;35:*.mpeg=01;35:*.m2v=01;35:*.mkv=01;35:*.ogm=01;35:*.mp4=01;35:*.m4v=01;35:*.mp4v=01;35:*.vob=01;35:*.qt=01;35:*.nuv=01;35:*.wmv=01;35:*.asf=01;35:*.rm=01;35:*.rmvb=01;35:*.flc=01;35:*.avi=01;35:*.fli=01;35:*.flv=01;35:*.gl=01;35:*.dl=01;35:*.xcf=01;35:*.xwd=01;35:*.yuv=01;35:*.cgm=01;35:*.emf=01;35:*.axv=01;35:*.anx=01;35:*.ogv=01;35:*.ogx=01;35:*.aac=00;36:*.au=00;36:*.flac=00;36:*.mid=00;36:*.midi=00;36:*.mka=00;36:*.mp3=00;36:*.mpc=00;36:*.ogg=00;36:*.ra=00;36:*.wav=00;36:*.axa=00;36:*.oga=00;36:*.spx=00;36:*.xspf=00;36::ow=103;30;01:", "PAGER": "less", "PATH": "~/bin:/usr/local/bin:/usr/bin:/bin:/usr/bin/X11:/usr/games:/usr/local/sbin:/usr/sbin:/sbin:~/bin/:", "PWD": "/home/fboender", "SHELL": "/bin/bash", "SHLVL": "1", "SSH_CLIENT": "192.168.56.1 60106 22", "SSH_CONNECTION": "192.168.56.1 60106 192.168.56.2 22", "SSH_TTY": "/dev/pts/1", "TERM": "xterm-256color", "TNS_ADMIN": "/usr/local/lib/instantclient_10_2/network/admin/", "USER": "fboender", "_": "/bin/sh"}, "ansible_eth0": {"active": true, "device": "eth0", "ipv4": {"address": "192.168.56.2", "netmask": "255.255.255.0", "network": "192.168.56.0"}, "ipv6": [{"address": "fe80::a00:27ff:fef9:98a7", "prefix": "64", "scope": "link"}], "macaddress": "08:00:27:f9:98:a7", "module": "pcnet32", "mtu": 1500, "promisc": false, "type": "ether"}, "ansible_fips": false, "ansible_form_factor": "Other", "ansible_fqdn": "localhost", "ansible_hostname": "dev", "ansible_interfaces": ["lo", "eth0"], "ansible_kernel": "2.6.32-5-amd64", "ansible_lo": {"active": true, "device": "lo", "ipv4": {"address": "127.0.0.1", "netmask": "255.0.0.0", "network": "127.0.0.0"}, "ipv6": [{"address": "::1", "prefix": "128", "scope": "host"}], "mtu": 16436, "promisc": false, "type": "loopback"}, "ansible_lsb": {"codename": "squeeze", "description": "Debian GNU/Linux 6.0.10 (squeeze)", "id": "Debian", "major_release": "6", "release": "6.0.10"}, "ansible_machine": "x86_64", "ansible_machine_id": "00a3ac55878f7a9340c879050000036c", "ansible_memfree_mb": 217, "ansible_memory_mb": {"nocache": {"free": 395, "used": 101}, "real": {"free": 217, "total": 496, "used": 279}, "swap": {"cached": 0, "free": 727, "total": 727, "used": 0}}, "ansible_memtotal_mb": 496, "ansible_mounts": [{"device": "/dev/mapper/debian-root", "fstype": "ext3", "mount": "/", "options": "rw,errors=remount-ro", "size_available": 92955344896, "size_total": 104680742912, "uuid": "NA"}, {"device": "/dev/sda1", "fstype": "ext2", "mount": "/boot", "options": "rw", "size_available": 209807360, "size_total": 238787584, "uuid": "NA"}], "ansible_nodename": "dev.local", "ansible_os_family": "Debian", "ansible_pkg_mgr": "apt", "ansible_processor": ["GenuineIntel", "Intel(R) Core(TM) i7-4712HQ CPU @ 2.30GHz"], "ansible_processor_cores": 1, "ansible_processor_count": 1, "ansible_processor_threads_per_core": 1, "ansible_processor_vcpus": 1, "ansible_product_name": "VirtualBox", "ansible_product_serial": "NA", "ansible_product_uuid": "NA", "ansible_product_version": "1.2", "ansible_python_version": "2.6.6", "ansible_selinux": false, "ansible_ssh_host_key_dsa_public": "AAAAB3NzaC1kc3MAAACBAOJWOpQVltXw3wNsRq20+r37aOHiD11hNvNttywbVkNPLo+s7Q0Y0lctaOWl9WR4b3EK55t+a7/sCqS7Qy5CGwtMmsg3ayUUNZLSwwAkAZ2UISyYRbb3AJwMb3HqBXu6P/lm5GRDEycU+bQUUdVOsBe6kwMEKUdBtsa++ipCCCcNAAAAFQCUI0c8CwmSvtwcuj7JTjEJnhBKiQAAAIEA4Mhgav6N18adoQ6xvgHNVrdf/ilNOv1tFUpL2pFlH21zrONj19/hT/HSyj7CeDV0Hpfwg1gGYI12TNgf+9NDOfz2ceXef6QVfG1Nf8j7HAp9KoSU50MCM9la3oTUnN4AwwPGp8ItuHwzmGubt1UaVaBPpeeNrMCWqewHF8bgZmAAAACAB68uE+BWPsGpKqdXeaohvinF296nWc0urbXQ6yPVaATT96UP+vT2QToZY+4Zkcs6l3gL0kS7s8Y/50AxbvO1yKFhIqBnH/p4tV21jdTnXL066bbU60f5tjC5/ty+zYQREKAm3XiLxOSRyyC0M34bFVIqCtZ5tMax2xtaRndDlys=", "ansible_ssh_host_key_rsa_public": "AAAAB3NzaC1yc2EAAAADAQABAAABAQDFwue0q1kD5CgZczAKg10/DFyKWgxoSK1J/r/Tk9PqvckNjwVx7Yn78rElXgo4SCMceWPIucb8Yl8FpmdnuXH8/yn5i+snOpBQddoFun/CiB3HUw28T2M7Y9q4QtEcMiULBq1oiCoYJfNU9o3aD2caxk8OhcrF5k7Ec5DIyAGN8doYxey6icl6ohUJR6x6jnZO+6uoSKyHwxS3HBZ+6RrVY7ckCuRk/w24P7YM5sEnHZ9dnS4uTVCYKrJpygYUbN/HrSNuIIAQpvitZWua6t7mFy1zugCc0Lj8QbPStnsntIVWoIwWY+iFnFrS6N3IiGHAyOv6Jla0P3HEFmrhoVIH", "ansible_swapfree_mb": 727, "ansible_swaptotal_mb": 727, "ansible_system": "Linux", "ansible_system_vendor": "innotek GmbH", "ansible_user_dir": "/home/fboender", "ansible_user_gecos": "fboender,,,", "ansible_user_gid": 1000, "ansible_user_id": "fboender", "ansible_user_shell": "/bin/bash", "ansible_user_uid": 1000, "ansible_userspace_architecture": "x86_64", "ansible_userspace_bits": "64", "ansible_virtualization_role": "guest", "ansible_virtualization_type": "virtualbox", "module_setup": true} -------------------------------------------------------------------------------- /test/f_hostparse/hosts: -------------------------------------------------------------------------------- 1 | 2 | [db] 3 | db.dev.local 4 | 5 | [db:vars] 6 | function=db 7 | 8 | [dev_local] 9 | db.dev.local 10 | 11 | [dev:children] 12 | dev_local 13 | 14 | [dev:vars] 15 | dtap=dev 16 | 17 | [web] 18 | web[01:03].dev.local 19 | 20 | [frontend] 21 | fe[01:03].dev[01:02].local 22 | -------------------------------------------------------------------------------- /test/f_hostparse/out/db.dev.local: -------------------------------------------------------------------------------- 1 | { 2 | "ansible_facts": { 3 | "ansible_fqdn": "localhost", 4 | "ansible_hostname": "dev", 5 | "ansible_nodename": "db.dev.local", 6 | "module_setup": true 7 | }, 8 | "changed": false 9 | } 10 | -------------------------------------------------------------------------------- /test/f_inventory/dyninv.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | import sys 4 | 5 | inv = """\ 6 | { 7 | "databases": { 8 | "hosts": ["host1.example.com", "host2.example.com"], 9 | "vars": { 10 | "a": true 11 | } 12 | }, 13 | "webservers": [ "host2.example.com", "host3.example.com" ], 14 | "atlanta": { 15 | "hosts": [ "host1.example.com", "host4.example.com", "host5.example.com" ], 16 | "vars": { 17 | "b": false 18 | }, 19 | "children": [ "marietta", "5points" ] 20 | }, 21 | "marietta": [ "host6.example.com" ], 22 | "5points": [ "host7.example.com" ], 23 | "_meta" : { 24 | "hostvars": { 25 | "moocow.example.com": { 26 | "asdf" : 1234 27 | }, 28 | "llama.example.com": { 29 | "asdf" : 5678 30 | } 31 | } 32 | } 33 | } 34 | """ 35 | 36 | sys.stdout.write(inv) 37 | -------------------------------------------------------------------------------- /test/f_inventory/hostsdir/hosts_db: -------------------------------------------------------------------------------- 1 | [db] 2 | db.dev.local 3 | 4 | [db:vars] 5 | function=db 6 | -------------------------------------------------------------------------------- /test/f_inventory/hostsdir/hosts_dev: -------------------------------------------------------------------------------- 1 | [dev_local] 2 | db.dev.local 3 | 4 | [dev:children] 5 | dev_local 6 | 7 | [dev:vars] 8 | dtap=dev 9 | -------------------------------------------------------------------------------- /test/f_inventory/mixeddir/config.ini: -------------------------------------------------------------------------------- 1 | # Just empty ini file 2 | [empty] 3 | 4 | ini_setting = 1 5 | -------------------------------------------------------------------------------- /test/f_inventory/mixeddir/dyninv.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | import sys 4 | 5 | inv = """\ 6 | { 7 | "databases": { 8 | "hosts": ["host1.example.com", "host2.example.com"], 9 | "vars": { 10 | "a": true 11 | } 12 | }, 13 | "webservers": [ "host2.example.com", "host3.example.com" ], 14 | "atlanta": { 15 | "hosts": [ "host1.example.com", "host4.example.com", "host5.example.com" ], 16 | "vars": { 17 | "b": false 18 | }, 19 | "children": [ "marietta", "5points" ] 20 | }, 21 | "marietta": [ "host6.example.com" ], 22 | "5points": [ "host7.example.com" ], 23 | "_meta" : { 24 | "hostvars": { 25 | "moocow.example.com": { 26 | "asdf" : 1234 27 | }, 28 | "llama.example.com": { 29 | "asdf" : 5678 30 | } 31 | } 32 | } 33 | } 34 | """ 35 | 36 | sys.stdout.write(inv) 37 | -------------------------------------------------------------------------------- /test/f_inventory/mixeddir/hosts: -------------------------------------------------------------------------------- 1 | 2 | [db] 3 | db.dev.local 4 | 5 | [db:vars] 6 | function=db 7 | 8 | [dev_local] 9 | db.dev.local 10 | 11 | [dev:children] 12 | dev_local 13 | 14 | [dev:vars] 15 | dtap=dev 16 | 17 | [web] 18 | web[01:03].dev.local 19 | 20 | [frontend] 21 | fe[01:03].dev[01:02].local 22 | -------------------------------------------------------------------------------- /test/f_inventory/out/db.dev.local: -------------------------------------------------------------------------------- 1 | { 2 | "ansible_facts": { 3 | "ansible_fqdn": "localhost", 4 | "ansible_hostname": "dev", 5 | "ansible_nodename": "db.dev.local", 6 | "module_setup": true 7 | }, 8 | "changed": false 9 | } 10 | -------------------------------------------------------------------------------- /test/test.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import unittest 3 | import imp 4 | import os 5 | 6 | sys.path.insert(0, os.path.realpath('../lib')) 7 | sys.path.insert(0, os.path.realpath('../src')) 8 | import ansiblecmdb 9 | 10 | 11 | class ExtendTestCase(unittest.TestCase): 12 | """ 13 | Test the extending of facts. 14 | """ 15 | def testExtendOverrideParams(self): 16 | """ 17 | Test that we can override a native fact 18 | """ 19 | fact_dirs = ['f_extend/out_setup', 'f_extend/extend'] 20 | ansible = ansiblecmdb.Ansible(fact_dirs) 21 | env_editor = ansible.hosts['debian.dev.local']['ansible_facts']['ansible_env']['EDITOR'] 22 | self.assertEqual(env_editor, 'nano') 23 | 24 | def testExtendAddParams(self): 25 | """ 26 | Test that we can add new facts 27 | """ 28 | fact_dirs = ['f_extend/out_setup', 'f_extend/extend'] 29 | ansible = ansiblecmdb.Ansible(fact_dirs) 30 | software = ansible.hosts['debian.dev.local']['software'] 31 | self.assertIn('Apache2', software) 32 | 33 | 34 | class HostParseTestCase(unittest.TestCase): 35 | """ 36 | Test specifics of the hosts inventory parser 37 | """ 38 | def testChildGroupHosts(self): 39 | """ 40 | Test that children groups contain all hosts they should. 41 | """ 42 | fact_dirs = ['f_hostparse/out'] 43 | inventories = ['f_hostparse/hosts'] 44 | ansible = ansiblecmdb.Ansible(fact_dirs, inventories) 45 | groups = ansible.hosts['db.dev.local']['groups'] 46 | self.assertIn('db', groups) 47 | self.assertIn('dev', groups) 48 | self.assertIn('dev_local', groups) 49 | 50 | def testChildGroupVars(self): 51 | """ 52 | Test that all vars applied against a child group are set on the hosts. 53 | """ 54 | fact_dirs = ['f_hostparse/out'] 55 | inventories = ['f_hostparse/hosts'] 56 | ansible = ansiblecmdb.Ansible(fact_dirs, inventories) 57 | host_vars = ansible.hosts['db.dev.local']['hostvars'] 58 | self.assertEqual(host_vars['function'], 'db') 59 | self.assertEqual(host_vars['dtap'], 'dev') 60 | 61 | def testExpandHostDef(self): 62 | """ 63 | Verify that host ranges are properly expanded. E.g. db[01-03].local -> 64 | db01.local, db02.local, db03.local. 65 | """ 66 | fact_dirs = ['f_hostparse/out'] 67 | inventories = ['f_hostparse/hosts'] 68 | ansible = ansiblecmdb.Ansible(fact_dirs, inventories) 69 | self.assertIn('web02.dev.local', ansible.hosts) 70 | self.assertIn('fe03.dev02.local', ansible.hosts) 71 | 72 | 73 | class InventoryTestCase(unittest.TestCase): 74 | def testHostsDir(self): 75 | """ 76 | Verify that we can specify a directory as the hosts inventory file and 77 | that all files are parsed. 78 | """ 79 | fact_dirs = ['f_inventory/out'] 80 | inventories = ['f_inventory/hostsdir'] 81 | ansible = ansiblecmdb.Ansible(fact_dirs, inventories) 82 | host_vars = ansible.hosts['db.dev.local']['hostvars'] 83 | groups = ansible.hosts['db.dev.local']['groups'] 84 | self.assertEqual(host_vars['function'], 'db') 85 | self.assertIn('db', groups) 86 | 87 | def testDynInv(self): 88 | """ 89 | Verify that we can specify a path to a dynamic inventory as the 90 | inventory file, and it will be executed, it's output parsed and added 91 | as available hosts. 92 | """ 93 | fact_dirs = ['f_inventory/out'] # Reuse f_hostparse 94 | inventories = ['f_inventory/dyninv.py'] 95 | ansible = ansiblecmdb.Ansible(fact_dirs, inventories) 96 | self.assertIn('host5.example.com', ansible.hosts) 97 | host_vars = ansible.hosts['host5.example.com']['hostvars'] 98 | groups = ansible.hosts['host5.example.com']['groups'] 99 | self.assertEqual(host_vars['b'], False) 100 | self.assertIn("atlanta", groups) 101 | 102 | def testMixedDir(self): 103 | """ 104 | Verify that a mixed dir of hosts files and dynamic inventory scripts is 105 | parsed correctly. 106 | """ 107 | fact_dirs = ['f_inventory/out'] 108 | inventories = ['f_inventory/mixeddir'] 109 | ansible = ansiblecmdb.Ansible(fact_dirs, inventories) 110 | # results from dynamic inventory 111 | self.assertIn("host4.example.com", ansible.hosts) 112 | self.assertIn("moocow.example.com", ansible.hosts) 113 | # results from normal hosts file. 114 | self.assertIn("web03.dev.local", ansible.hosts) 115 | # INI file ignored. 116 | self.assertNotIn("ini_setting", ansible.hosts) 117 | 118 | 119 | class FactCacheTestCase(unittest.TestCase): 120 | """ 121 | Test that we properly read fact-cached output dirs. 122 | """ 123 | def testFactCache(self): 124 | fact_dirs = ['f_factcache/out'] 125 | inventories = ['f_factcache/hosts'] 126 | ansible = ansiblecmdb.Ansible(fact_dirs, inventories, fact_cache=True) 127 | host_vars = ansible.hosts['debian.dev.local']['hostvars'] 128 | groups = ansible.hosts['debian.dev.local']['groups'] 129 | ansible_facts = ansible.hosts['debian.dev.local']['ansible_facts'] 130 | self.assertIn('dev', groups) 131 | self.assertEqual(host_vars['dtap'], 'dev') 132 | self.assertIn('ansible_env', ansible_facts) 133 | 134 | 135 | if __name__ == '__main__': 136 | unittest.main(exit=True) 137 | 138 | try: 139 | os.unlink('../src/ansible-cmdbc') # FIXME: Where is this coming from? Our weird import I assume. 140 | except Exception: 141 | pass 142 | -------------------------------------------------------------------------------- /test/test.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | echo "Python v2" 4 | python2 test.py 5 | 6 | echo "Python v3" 7 | python3 test.py 8 | --------------------------------------------------------------------------------