├── .github ├── FUNDING.yml └── workflows │ └── main.yml ├── .gitignore ├── .pre-commit-config.yaml ├── LICENSE ├── README.md ├── docs └── source │ ├── API reference.rst │ ├── CLI tool.rst │ ├── Doc Tag │ └── Doc Tag.rst │ ├── Extend Tag │ └── Extend Tag.rst │ ├── FAQ.rst │ ├── Forming Results Structure │ ├── Absolute path.rst │ ├── Anonymous group.rst │ ├── Dynamic Path.rst │ ├── Dynamic path with path formatters.rst │ ├── Group Name Attribute.rst │ ├── Null path name attribute.rst │ ├── Path formatters.rst │ └── index.rst │ ├── Groups │ ├── Attributes.rst │ ├── Functions.rst │ └── index.rst │ ├── Inputs │ ├── Attributes.rst │ ├── Functions.rst │ └── index.rst │ ├── Installation.rst │ ├── Lookup Tables │ └── Lookup Tables.rst │ ├── Macro Tag │ └── Macro Tag.rst │ ├── Match Variables │ ├── Functions.rst │ ├── Indicators.rst │ ├── Patterns.rst │ └── index.rst │ ├── Outputs │ ├── Attributes.rst │ ├── Formatters.rst │ ├── Functions.rst │ ├── Returners.rst │ └── index.rst │ ├── Overview.rst │ ├── Performance.rst │ ├── Quick start.rst │ ├── TTP Internals │ └── index.rst │ ├── TTP Templates Collection.rst │ ├── Template Tag │ └── Template Tag.rst │ ├── Template Variables │ ├── Attributes.rst │ ├── Getters.rst │ └── index.rst │ ├── Writing templates │ ├── How to filter with TTP.rst │ ├── How to parse hierarchical (configuration) data.rst │ ├── How to parse show commands output.rst │ ├── How to parse text tables.rst │ ├── How to produce time series data with TTP.rst │ └── index.rst │ ├── _images │ ├── cdp_diagram.png │ ├── groups_vaidate_fun_example_1.png │ └── terminal_returner_colorama.png │ ├── conf.py │ ├── index.rst │ └── spelling_wordlist.txt ├── poetry.lock ├── pylintrc ├── pyproject.toml ├── readthedocs.yaml ├── test ├── pytest │ ├── assets │ │ ├── TTP_TEMPLATES_DIR_TEST │ │ │ └── test_ttp_templates_dir_env_variable.txt │ │ ├── excel_out_test_excel_formatter_update_source.xlsx │ │ ├── extend_groups_filter_test.txt │ │ ├── extend_groups_recursive_extend_load.txt │ │ ├── extend_groups_recursive_extend_load_several_top_groups.txt │ │ ├── extend_test_inputs_filter.txt │ │ ├── extend_test_lookups_filter.txt │ │ ├── extend_test_nested_group_filter.txt │ │ ├── extend_test_output_filter.txt │ │ ├── extend_test_vars_filter.txt │ │ ├── extend_vars_and_lookup_tag.txt │ │ ├── extend_vlan.txt │ │ ├── extend_vlan_anon.txt │ │ ├── extend_vlan_with_template_tag.txt │ │ ├── test_extend_tag_bgp_config.txt │ │ ├── test_extend_tag_within_group.txt │ │ ├── test_extend_tag_within_group_with_anonymous_group.txt │ │ ├── test_extend_tag_within_group_with_multiple_groups.txt │ │ ├── test_extend_tag_within_group_with_non_hierarch_template.txt │ │ ├── test_lookup_include_csv.csv │ │ ├── test_lookup_include_yaml.txt │ │ └── yaml_vars.txt │ ├── mock_data │ │ ├── dataset_1 │ │ │ ├── data_1.txt │ │ │ ├── data_2.txt │ │ │ └── data_XYZ.txt │ │ └── dataset_2 │ │ │ ├── data_1.log │ │ │ └── data_2.txt │ ├── test_N2G_formatter.py │ ├── test_anonymous_group.py │ ├── test_answers_and_docs.py │ ├── test_empty_group_results.py │ ├── test_extend_tag.py │ ├── test_group_chain_attribute.py │ ├── test_group_functions.py │ ├── test_group_lookup.py │ ├── test_group_name_attribute.py │ ├── test_headers_indicator.py │ ├── test_inputs.py │ ├── test_line_indicator.py │ ├── test_lookups.py │ ├── test_match_condition_fun.py │ ├── test_match_functions.py │ ├── test_misc.py │ ├── test_output_formatters.py │ ├── test_output_functions.py │ ├── test_output_returners.py │ ├── test_regexes.py │ ├── test_structure.py │ ├── test_syslog_returner.py │ ├── test_template_variables.py │ ├── test_ttp_parser_methods.py │ └── yang_modules │ │ ├── iana-if-type@2017-01-19.yang │ │ ├── ietf-inet-types@2013-07-15.yang │ │ ├── ietf-interfaces@2018-02-20.yang │ │ ├── ietf-ip@2018-02-22.yang │ │ ├── ietf-yang-types@2013-07-15.yang │ │ └── library │ │ └── yang-library.json ├── test_logging_from_module.py ├── test_structure.py ├── ttp_dns_test.txt └── ttp_tests.txt └── ttp ├── __init__.py ├── formatters ├── __init__.py ├── csv_formatter.py ├── excel_formatter.py ├── jinja2_formatter.py ├── json_formatter.py ├── n2g_formatter.py ├── pprint_formatter.py ├── raw_formatter.py ├── table_formatter.py ├── tabulate_formatter.py └── yaml_formatter.py ├── group ├── __init__.py ├── contains.py ├── contains_val.py ├── containsall.py ├── delete.py ├── equal.py ├── exclude.py ├── exclude_val.py ├── excludeall.py ├── expand.py ├── itemize.py ├── items2dict.py ├── lookup.py ├── macro.py ├── record.py ├── set_.py ├── sformat.py ├── to_converters.py ├── to_ip.py ├── validate_cerberus.py └── void.py ├── input ├── __init__.py ├── commands.py ├── macro.py └── test.py ├── lookup ├── __init__.py └── geoip2.py ├── match ├── __init__.py ├── copy_.py ├── count.py ├── dns_lookups.py ├── geoip_lookup.py ├── ip.py ├── item.py ├── joinmatches.py ├── let.py ├── lookup.py ├── mac_eui.py ├── macro.py ├── raise_.py ├── re_.py ├── record.py ├── set_.py ├── string.py ├── to.py ├── unrange.py ├── uptimeparse.py └── void.py ├── output ├── __init__.py ├── deepdiffer.py ├── is_equal.py ├── macro.py ├── transform.py ├── validate_cerberus.py └── validate_yangson.py ├── patterns ├── __init__.py └── get_pattern.py ├── returners ├── __init__.py ├── file_returner.py ├── self_returner.py ├── syslog_returner.py └── terminal_returner.py ├── ttp.py ├── utils ├── __init__.py ├── get_attributes.py ├── guess.py ├── load_python_exec_py2.py ├── load_python_exec_py3.py ├── loaders.py └── quick_parse.py └── variable ├── __init__.py ├── getfilename.py ├── gethostname.py └── time_funcs.py /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | custom: ["https://paypal.me/dmulyalin"] 2 | -------------------------------------------------------------------------------- /.github/workflows/main.yml: -------------------------------------------------------------------------------- 1 | name: TTP Tests 2 | on: 3 | push: 4 | branches: 5 | - master 6 | pull_request: 7 | branches: 8 | - master 9 | 10 | jobs: 11 | linters: 12 | name: linters 13 | strategy: 14 | matrix: 15 | python-version: [ '3.7' ] 16 | platform: [windows-latest] 17 | runs-on: ${{ matrix.platform }} 18 | steps: 19 | - uses: actions/checkout@v2 20 | - name: Setup python 21 | uses: actions/setup-python@v1 22 | with: 23 | python-version: ${{ matrix.python-version }} 24 | architecture: x64 25 | - name: Install Dependencies 26 | run: | 27 | python -m pip install poetry 28 | python -m poetry install 29 | - name: Run pre-commit 30 | run: python -m poetry run pre-commit run --all-files 31 | 32 | run_tests: 33 | name: Testing on Python ${{ matrix.python-version }} (${{ matrix.platform}}) 34 | defaults: 35 | run: 36 | # that is to run pytest from within tests directory 37 | working-directory: test/pytest 38 | shell: bash 39 | strategy: 40 | fail-fast: false 41 | matrix: 42 | python-version: [ '3.7', '3.8', '3.9', '3.10' ] 43 | platform: [ubuntu-latest, macOS-10.15, windows-latest] 44 | runs-on: ${{ matrix.platform }} 45 | 46 | steps: 47 | - uses: actions/checkout@v2 48 | - name: Setup Python 49 | uses: actions/setup-python@v1 50 | with: 51 | python-version: ${{ matrix.python-version }} 52 | 53 | - name: Install dependencies 54 | run: | 55 | python -m pip install poetry 56 | python -m poetry install -E full 57 | - name: Run pytest 58 | run: python -m poetry run pytest -vv 59 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | Data/* 3 | Output/* 4 | ttp.egg-info/* 5 | dist/* 6 | build/* 7 | private/* 8 | how to upload to pypi.txt 9 | docs/build/* 10 | docs/make.bat 11 | docs/Makefile 12 | .pytest_cache/v/cache/nodeids 13 | test/pytest/Output/* 14 | test/pytest/nornir.log 15 | ttp/ttp_dict_cache.pickle 16 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | # to run pre-commit on demand using below configuration: 2 | # pre-commit run --all-files 3 | 4 | repos: 5 | - repo: https://github.com/pre-commit/pre-commit-hooks 6 | rev: v2.3.0 7 | hooks: 8 | - id: check-ast 9 | exclude: .+_py2.py # skip Python2 files 10 | - id: end-of-file-fixer 11 | exclude: ^test/ 12 | - id: requirements-txt-fixer 13 | - id: trailing-whitespace 14 | exclude: ^test/ 15 | - id: check-added-large-files 16 | args: ['--maxkb=1000'] 17 | exclude: ^test/ 18 | # black 19 | - repo: https://github.com/psf/black 20 | rev: 22.8.0 21 | hooks: 22 | - id: black 23 | exclude: ^test/|^docs/|gitignore|README|LICENSE|requirements|pre-commit-config|pylintrc|readthedocs|.+_py2.py 24 | args: 25 | - "--line-length=88" 26 | # uncomment and fix it whenevere have time: 27 | # # flake8 28 | # - repo: https://github.com/pre-commit/pre-commit-hooks 29 | # rev: v2.3.0 30 | # hooks: 31 | # - id: flake8 32 | # exclude: ^test/|^docs/|gitignore|README|LICENSE|requirements|pre-commit-config|pylintrc|readthedocs 33 | # args: 34 | # - "--max-line-length=88" 35 | # - "--max-complexity=19" 36 | # - "--select=B,C,E,F,W,T4,B9" 37 | # - "--ignore=F403,E402,E722,E203,W503,C901,E501,F821" 38 | # # bandit - security checks 39 | # - repo: https://github.com/PyCQA/bandit 40 | # rev: 1.7.0 41 | # hooks: 42 | # - id: bandit 43 | # exclude: ^test/|^docs/|gitignore|README|LICENSE|requirements|pre-commit-config|pylintrc|readthedocs 44 | # # pylint, to run it directly use: python3 -m pylint ./ttr/ --rcfile=pylintrc 45 | # - repo: local 46 | # hooks: 47 | # - id: pylint 48 | # name: pylint 49 | # entry: python3 -m pylint 50 | # language: system 51 | # args: 52 | # - "--rcfile=pylintrc" 53 | # exclude: ^test/|^docs/|gitignore|README|LICENSE|requirements|pre-commit-config|pylintrc|readthedocs 54 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 apraksim 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /docs/source/API reference.rst: -------------------------------------------------------------------------------- 1 | API reference 2 | ============= 3 | 4 | API reference for TTP module. 5 | 6 | .. automodule:: ttp 7 | 8 | .. autoclass:: ttp 9 | :members: 10 | 11 | .. autofunction:: quick_parse 12 | -------------------------------------------------------------------------------- /docs/source/CLI tool.rst: -------------------------------------------------------------------------------- 1 | CLI tool 2 | ======== 3 | 4 | TTP comes with simple CLI tool that takes path to data, path to template and produces parsing results. Results can be represented in one of formats supported by CLI tool - yaml, json, raw or pprint, results will be printer to screen. Alternatively, format can be specified using template output tags and printed to screen or returned to file using returners. 5 | 6 | Sample usage:: 7 | 8 | ttp --data "/path/to/data/" --template "path/to/template.txt" --outputter json 9 | 10 | results will be printed to screen in JSON format. 11 | 12 | **Available options** 13 | 14 | * ``-d, --data`` Path to data file or directory with files to process 15 | * ``-dp, --data-prefix`` OS base path to folder with data separated across additional folders as specified in TTP input tags 16 | * ``-t, --template`` Path to text file with template content 17 | * ``-tn, --template-name`` Name of template within file referenced by -t option if file has python (.py) extension 18 | * ``-o, --outputter`` Format results using yaml, json, raw or pprint formatter and prints them to terminal 19 | * ``-ot, --out-template`` Name of template to output results for 20 | * ``-l, --logging`` Logging level - "DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL" 21 | * ``-lf, --log-file`` OS path to file where to write logs instead of printing them to terminal 22 | * ``-T, --Timing`` Print simple timing information to screen about time spent on parsing data 23 | * ``s, --structure`` Final results structure - 'list', 'dictionary' or 'flat_list' 24 | * ``-v, --vars`` JSON string containing variables to add to TTP object 25 | * ``--one`` Forcefully run parsing using single process 26 | * ``--multi`` Forcefully run parsing in multiple processes 27 | -------------------------------------------------------------------------------- /docs/source/Doc Tag/Doc Tag.rst: -------------------------------------------------------------------------------- 1 | Doc Tag 2 | ======= 3 | 4 | Doc tag allows to add any notes or documentation to TTP template. In addition content of all ``doc`` tags 5 | assigned to ``__doc__`` attribute of the template objects within TTP parser object. 6 | 7 | .. warning:: Doc tag cannot contain ``<`` and ``>`` characters, have to use escape sequences instead - ``<`` and ``>`` 8 | 9 | Single template can contain as many ```` tags as required at the top level of the document. 10 | 11 | **Example** 12 | 13 | In this template ```` tag helps to document information about the template and it's usage:: 14 | 15 | 16 | TTP template to parse Cisco IOS "show ip arp" output. 17 | 18 | Template can be invoked using Netmiko run_ttp method like this: 19 | 20 | import pprint 21 | from netmiko import ConnectHandler 22 | 23 | net_connect = ConnectHandler( 24 | device_type="cisco_ios", 25 | host="1.2.3.4", 26 | username="admin", 27 | password="admin", 28 | ) 29 | 30 | res = net_connect.run_ttp("ttp://misc/netmiko/cisco.ios.arp.txt", res_kwargs={"structure": "flat_list"}) 31 | 32 | pprint.pprint(res) 33 | 34 | 35 | 36 | 37 | commands = [ 38 | "show ip arp" 39 | ] 40 | 41 | 42 | 43 | {{ protocol }} {{ ip | IP }} {{ age | replace("-", "-1") }} {{ mac | mac_eui }} {{ type | let("interface", "Uncknown") }} 44 | {{ protocol }} {{ ip | IP }} {{ age | replace("-", "-1") }} {{ mac | mac_eui }} {{ type }} {{ interface | resuball("short_interface_names") }} 45 | 46 | -------------------------------------------------------------------------------- /docs/source/Forming Results Structure/Anonymous group.rst: -------------------------------------------------------------------------------- 1 | Anonymous group 2 | =============== 3 | 4 | If no nested dictionary functionality required or results structure needs to be kept as flat as possible, templates without tag can be used - so called *non hierarchical templates*. 5 | 6 | Top tag can also lack of name attribute, making at anonymous group - group without a name. 7 | 8 | In both cases above, TTP will automatically reconstruct tag name attribute making it equal to ``_anonymous_*`` value, note ``*`` path formatter, that is to make sure that anonymous group results will **always be a list**. 9 | 10 | At the end ``_anonymous_`` group results merged with the rest of groups' results. Because of how results combined, template that has anonymous groups will always produce a list results structure. 11 | 12 | .. note:: 13 | 14 | tag without name attribute does have support for all group attributes and functions as well as nested groups. However, keep in mind that for nested groups name attribute inherited from parent groups. 15 | 16 | **Example** 17 | 18 | Example for without *name* attribute. 19 | 20 | Data:: 21 | 22 | interface Port-Chanel11 23 | description Storage 24 | ! 25 | interface Loopback0 26 | description RID 27 | ip address 10.0.0.3/24 28 | ! 29 | interface Vlan777 30 | description Management 31 | ip address 192.168.0.1/24 32 | vrf MGMT 33 | ! 34 | 35 | Template:: 36 | 37 | 38 | interface {{ interface }} 39 | description {{ description }} 40 | 41 | ip address {{ ip }}/{{ mask }} 42 | 43 | vrf {{ vrf }} 44 | !{{_end_}} 45 | 46 | 47 | Result:: 48 | 49 | [ 50 | [ 51 | { 52 | "description": "Storage", 53 | "interface": "Port-Chanel11" 54 | }, 55 | { 56 | "description": "RID", 57 | "interface": "Loopback0", 58 | "ips": { 59 | "ip": "10.0.0.3", 60 | "mask": "24" 61 | } 62 | }, 63 | { 64 | "description": "Management", 65 | "interface": "Vlan777", 66 | "ips": { 67 | "ip": "192.168.0.1", 68 | "mask": "24" 69 | }, 70 | "vrf": "MGMT" 71 | } 72 | ] 73 | ] 74 | -------------------------------------------------------------------------------- /docs/source/Forming Results Structure/Dynamic path with path formatters.rst: -------------------------------------------------------------------------------- 1 | Dynamic path with path formatters 2 | ================================= 3 | 4 | Dynamic path with path formatters is also supported. In example below child for *interfaces* will be a list. 5 | 6 | **Example** 7 | 8 | Data:: 9 | 10 | interface Port-Chanel11 11 | description Storage 12 | ! 13 | interface Loopback0 14 | description RID 15 | ip address 10.0.0.3/24 16 | ! 17 | interface Vlan777 18 | description Management 19 | ip address 192.168.0.1/24 20 | vrf MGMT 21 | 22 | Template:: 23 | 24 | 25 | interface {{ interface }} 26 | description {{ description }} 27 | ip address {{ ip }}/{{ mask }} 28 | vrf {{ vrf }} 29 | 30 | 31 | Result:: 32 | 33 | [ 34 | { 35 | "interfaces": [ 36 | { 37 | "Loopback0": { 38 | "description": "RID", 39 | "ip": "10.0.0.3", 40 | "mask": "24" 41 | }, 42 | "Port-Chanel11": { 43 | "description": "Storage" 44 | }, 45 | "Vlan777": { 46 | "description": "Management", 47 | "ip": "192.168.0.1", 48 | "mask": "24", 49 | "vrf": "MGMT" 50 | } 51 | } 52 | ] 53 | } 54 | ] 55 | -------------------------------------------------------------------------------- /docs/source/Forming Results Structure/Group Name Attribute.rst: -------------------------------------------------------------------------------- 1 | Group Name Attribute 2 | ==================== 3 | 4 | Group attribute *name* used to uniquely identify group and its results within results structure. This attribute is a dot separated string, there is every dot represents a next level in hierarchy. This string is split into **path items** using dot character and converted into nested hierarchy of dictionaries and/or lists. 5 | 6 | Consider a group with this name attribute value:: 7 | 8 | 9 | interface {{ interface }} 10 | description {{ description }} 11 | ip address {{ ip }}/{{ mask }} 12 | vrf {{ vrf }} 13 | 14 | 15 | If below data parsed with that template:: 16 | 17 | interface Vlan777 18 | description Management 19 | ip address 192.168.0.1/24 20 | vrf MGMT 21 | 22 | This result will be produced:: 23 | 24 | [ 25 | { 26 | "interfaces": { 27 | "vlan": { 28 | "L3": { 29 | "vrf-enabled": { 30 | "description": "Management", 31 | "interface": "Vlan777", 32 | "ip": "192.168.0.1", 33 | "mask": "24", 34 | "vrf": "MGMT" 35 | } 36 | } 37 | } 38 | } 39 | } 40 | ] 41 | 42 | Name attribute allows to from arbitrary (from practical perspective) depth structure in deterministic fashion, enabling further programmatic consumption of produced results. 43 | -------------------------------------------------------------------------------- /docs/source/Forming Results Structure/Null path name attribute.rst: -------------------------------------------------------------------------------- 1 | Null path name attribute 2 | ======================== 3 | 4 | It is possible to specify null path as a name, null path looks like ``name="_"`` or null path can be used as a first item in the path - ``name="_.nextlevel"``. 5 | 6 | Special handling implemented for null path - TTP will merge results with parent for group with null path, as a result null path ``_`` will not appear in results. 7 | 8 | One of the use cases for this feature is to create a group that will behave like a normal group in terms of results forming and processing, but will merge with parent in the process of saving into overall results. 9 | 10 | **Example** 11 | 12 | In this example peer_software used together with _line_ indicator to extract results, however, for _line_ to behave properly it was defined within separate group with explicit ``_start_`` and ``_end_`` indicators. First, this is how template would look like without null path:: 13 | 14 | 15 | Device ID: switch-2.net 16 | IP address: 10.251.1.49 17 | 18 | Version : 19 | Cisco Internetwork Operating System Software 20 | IOS (tm) s72033_rp Software (s72033_rp-PK9SV-M), Version 12.2(17d)SXB11a, RELEASE SOFTWARE (fc1) 21 | 22 | advertisement version: 2 23 | 24 | 25 | 26 | Device ID: {{ peer_hostname }} 27 | IP address: {{ peer_ip }} 28 | 29 | 30 | Version : {{ _start_ }} 31 | {{ peer_software | _line_ }} 32 | {{ _end_ }} 33 | 34 | 35 | 36 | 37 | And result would be:: 38 | 39 | [ 40 | [ 41 | { 42 | "peer_hostname": "switch-2.net", 43 | "peer_ip": "10.251.1.49", 44 | "peer_software": { 45 | "peer_software": "Cisco Internetwork Operating System Software \nIOS (tm) s72033_rp Software (s72033_rp-PK9SV-M), Version 12.2(17d)SXB11a, RELEASE SOFTWARE (fc1)" 46 | } 47 | } 48 | ] 49 | ] 50 | 51 | Above results have a bit of redundancy in them as they have unnecessary hierarchy to store peer_software details, to avoid that, null path can be used:: 52 | 53 | 54 | Device ID: switch-2.net 55 | IP address: 10.251.1.49 56 | 57 | Version : 58 | Cisco Internetwork Operating System Software 59 | IOS (tm) s72033_rp Software (s72033_rp-PK9SV-M), Version 12.2(17d)SXB11a, RELEASE SOFTWARE (fc1) 60 | 61 | advertisement version: 2 62 | 63 | 64 | 65 | Device ID: {{ peer_hostname }} 66 | IP address: {{ peer_ip }} 67 | 68 | 69 | Version : {{ _start_ }} 70 | {{ peer_software | _line_ }} 71 | {{ _end_ }} 72 | 73 | 74 | 75 | 76 | Results with new template:: 77 | 78 | [ 79 | [ 80 | { 81 | "peer_hostname": "switch-2.net", 82 | "peer_ip": "10.251.1.49", 83 | "peer_software": "Cisco Internetwork Operating System Software \nIOS (tm) s72033_rp Software (s72033_rp-PK9SV-M), Version 12.2(17d)SXB11a, RELEASE SOFTWARE (fc1)" 84 | } 85 | ] 86 | ] 87 | 88 | Even though peer_software match variable was defined in separate group, because of null path, it was merged with parent group, flattening results structure. 89 | -------------------------------------------------------------------------------- /docs/source/Forming Results Structure/Path formatters.rst: -------------------------------------------------------------------------------- 1 | .. _path_formatters: 2 | 3 | Path formatters 4 | =============== 5 | 6 | By default ttp assumes that all the *path items* must be joined into a dictionary structure, in other words group name "item1.item2.item3" will be transformed into nested dictionary:: 7 | 8 | {"item1": 9 | {"item2": 10 | {"item3": {} 11 | } 12 | } 13 | } 14 | 15 | That structure will be populated with results as parsing progresses, but in case if for "item3" more than single result datum needs to be saved, ttp will transform "item3" child to list and save further results by appending them to that list. That process happens automatically but can be influenced using *path formatters*. 16 | 17 | Supported path formatters \* and \*\* for group *name* attribute can be used following below rules: 18 | 19 | * If single start character \* used as a suffix (appended to the end) of path item, next level (child) of this path item always will be a list 20 | * If double start character \*\* used as a suffix (appended to the end) of path item, next level (child) of this path item always will be a dictionary 21 | 22 | **Example** 23 | 24 | Consider this group with name attribute formed in such a way that interfaces item child will be a list and child of L3 path item also will be a list.:: 25 | 26 | 27 | interface {{ interface }} 28 | description {{ description }} 29 | ip address {{ ip }}/{{ mask }} 30 | vrf {{ vrf }} 31 | 32 | 33 | If below data parsed with that template:: 34 | 35 | interface Vlan777 36 | description Management 37 | ip address 192.168.0.1/24 38 | vrf MGMT 39 | 40 | This result will be produced:: 41 | 42 | [ 43 | { 44 | "interfaces": [ <----this is the start of nested list 45 | { 46 | "vlan": { 47 | "L3": [ <----this is the start of another nested list 48 | { 49 | "vrf-enabled": { 50 | "description": "Management", 51 | "interface": "Vlan777", 52 | "ip": "192.168.0.1", 53 | "mask": "24", 54 | "vrf": "MGMT" 55 | } 56 | } 57 | ] 58 | } 59 | } 60 | ] 61 | } 62 | ] 63 | -------------------------------------------------------------------------------- /docs/source/Forming Results Structure/index.rst: -------------------------------------------------------------------------------- 1 | .. _results_structure: 2 | 3 | Forming Results Structure 4 | ========================= 5 | 6 | TTP supports variety of techniques to influence results structure. Majority of them revolving around group name attribute, which represents dot separated path of keys within results structure - that is generally helps for results within given template. Other methods can influence results representation across several templates. 7 | 8 | .. toctree:: 9 | :maxdepth: 2 10 | :titlesonly: 11 | 12 | Group Name Attribute 13 | Path formatters 14 | Dynamic Path 15 | Dynamic path with path formatters 16 | Anonymous group 17 | Null path name attribute 18 | Absolute path 19 | 20 | Expanding Match Variables 21 | ------------------------- 22 | 23 | Match variables can have name with dot characters in it. Group function :ref:`Groups/Functions:expand` can be used to transform names in a nested dictionary. However, path expansion contained within this group this particular results datum only. 24 | 25 | Template results mode 26 | --------------------- 27 | 28 | Templates support :ref:`Template Tag/Template Tag:results` attribute that can help to influence results structure within given template. 29 | 30 | TTP object results structure 31 | ---------------------------- 32 | 33 | TTP object ``result`` method have support for ``structure`` keyword, allowing to combine results across several templates in either a list or dictionary manner. 34 | -------------------------------------------------------------------------------- /docs/source/Groups/index.rst: -------------------------------------------------------------------------------- 1 | Groups 2 | ====== 3 | 4 | Groups are the core component of ttp together with match variables. Group is a collection of regular expressions derived from template, groups denoted using XML group tag (, , ) and can be nested to form hierarchy. Parsing results for each group combined into a single datum - dictionary, that dictionary merged with bigger set of results data. 5 | 6 | As ttp was developed primarily for parsing semi-structured configuration data of various network elements, groups concept stems from the fact that majority of configuration data can be divided in distinctive pieces of information, each of which can denote particular property or feature configured on device, moreover, it is not uncommon that these pieces of information can be broken down into even smaller pieces of repetitive data. TTP helps to combine regular expressions in groups for the sake of parsing small, repetitive pieces of text data. 7 | 8 | For example, this is how industry standard CLI configuration data for interfaces might look like:: 9 | 10 | interface Vlan163 11 | description [OOB management] 12 | ip address 10.0.10.3 255.255.255.0 13 | ! 14 | interface GigabitEthernet6/41 15 | description [uplink to core] 16 | ip address 192.168.10.3 255.255.255.0 17 | 18 | It is easy to notice that there is a lot of data which is the same and there is a lot of information which is different as well, if we would say that overall device's interfaces configuration is a collection of repetitive data, with interfaces being a smallest available datum, we can outline it in ttp template below and use it parse valuable information from text data:: 19 | 20 | 21 | interface {{ interface }} 22 | description {{ description | PHRASE }} 23 | ip address {{ ip }} {{ mask }} 24 | 25 | 26 | After parsing this configuration data with that template results will be:: 27 | 28 | [ 29 | { 30 | "interfaces": [ 31 | { 32 | "description": "[OOB management]", 33 | "interface": "Vlan163", 34 | "ip": "10.0.10.3", 35 | "mask": "255.255.255.0" 36 | }, 37 | { 38 | "description": "[uplink to core]", 39 | "interface": "GigabitEthernet6/41", 40 | "ip": "192.168.10.3", 41 | "mask": "255.255.255.0" 42 | } 43 | ] 44 | } 45 | ] 46 | 47 | As a result each interfaces group produced separate dictionary and all interfaces dictionaries were combined in a list under *interfaces* key which is derived from group name. 48 | 49 | Group reference 50 | ------------------- 51 | 52 | .. toctree:: 53 | :maxdepth: 2 54 | 55 | Attributes 56 | Functions 57 | -------------------------------------------------------------------------------- /docs/source/Inputs/Functions.rst: -------------------------------------------------------------------------------- 1 | Functions 2 | =================== 3 | 4 | Input tag support functions to pre-process data. 5 | 6 | .. list-table:: 7 | :widths: 10 90 8 | :header-rows: 1 9 | 10 | * - Attribute 11 | - Description 12 | * - `functions attribute`_ 13 | - pipe-separated list of functions 14 | * - `macro`_ 15 | - comma-separated list of macro functions to run input data through 16 | * - `extract_commands`_ 17 | - comma-separated list of commands output to extract from text data 18 | * - `test`_ 19 | - Test function to verify input function handling 20 | 21 | functions attribute 22 | ------------------------------------------------------------------------ 23 | ``functions="function1('attributes') | function2('attributes') | ... | functionN('attributes')"`` 24 | 25 | * functionN - name of the input function together with it's attributes 26 | 27 | This attribute allow to define a sequence of function, the main advantage of using string of functions against defining functions directly in the input tag is the fact that functions order will be honored, otherwise functionality is the same. 28 | 29 | .. warning:: pipe '|' symbol must be used to separate function names, not comma 30 | 31 | macro 32 | ------------------------------------------------------------------------ 33 | ``macro="name1, name2, ... , nameN"`` 34 | 35 | * nameN - comma separated string of macro functions names that should be used to run input data through. The sequence is *preserved* and macros executed in specified order, in other words macro named name2 will run after macro name1. 36 | 37 | Macro brings Python language capabilities to input data processing and validation during TTP module execution, as it allows to run custom python functions. Macro functions referenced by their name in input tag macro definitions. 38 | 39 | Macro function must accept only one attribute to hold input data text. 40 | 41 | Depending on data returned by macro function, TTP will behave differently according to these rules: 42 | 43 | * If macro returns True or False - original data unchanged, macro handled as condition functions, stopping further functions execution on False and keeps processing input data on True 44 | * If macro returns None - data processing continues, no additional logic associated 45 | * If macro returns single item - that item replaces original data supplied to macro and processed further by other input tag functions 46 | 47 | extract_commands 48 | ------------------------------------------------------------------------ 49 | ``extract_commands="command1, command2, ... , commandN"`` 50 | 51 | Purpose of this function is for each network device command string TTP can extract associated data from input text, so that input groups will only process data they designed to parse 52 | 53 | ..note:: to be able to successfully extract show commands output, text data should contain device hostname together with command itself. ``gethostname`` function will be called on data to extract hostname 54 | 55 | **Example** 56 | 57 | In below template, only "show interfaces" command output will be processed, as only that command specified in input ``extract_commands`` attribute. 58 | 59 | Template:: 60 | 61 | 62 | cpe1#show int 63 | GigabitEthernet33 is up, line protocol is up 64 | Hardware is CSR vNIC, address is 0800.2779.9999 (bia 0800.2779.9999) 65 | cpe1#show interfaces 66 | GigabitEthernet44 is up, line protocol is up 67 | Hardware is CSR vNIC, address is 0800.2779.e896 (bia 0800.2779.e896) 68 | cpe1#show interf 69 | GigabitEthernet55 is up, line protocol is up 70 | Hardware is CSR vNIC, address is 0800.2779.e888 (bia 0800.2779.e888) 71 | 72 | 73 | 74 | {{ interface }} is up, line protocol is up 75 | Hardware is CSR vNIC, address is {{ mac }} (bia {{ bia_mac }}) 76 | 77 | 78 | Result:: 79 | 80 | [ 81 | [ 82 | { 83 | "interfaces_status": { 84 | "bia_mac": "0800.2779.e896", 85 | "interface": "GigabitEthernet44", 86 | "mac": "0800.2779.e896" 87 | } 88 | } 89 | ] 90 | ] 91 | 92 | test 93 | ------------------------------------------------------------------------ 94 | ``test=""`` 95 | 96 | Test function to verify input function call, test simply prints informational message to the screen, indicating that input test function was called. 97 | -------------------------------------------------------------------------------- /docs/source/Inputs/index.rst: -------------------------------------------------------------------------------- 1 | Inputs 2 | ======= 3 | 4 | Inputs can be used to specify data location and how it should be loaded or filtered. Inputs can be attached to groups for parsing, for instance this particular input data should be parsed by this set of groups only. That can help to increase the overall performance as only data belonging to particular group will be parsed. 5 | 6 | .. note:: Order of inputs preserved as internally they represented using OrderedDict object, that can be useful if data produced by first input needs to bused by other inputs. 7 | 8 | Assuming we have this folders structure to store data that needs to be parsed:: 9 | 10 | /my/base/path/ 11 | Data/ 12 | Inputs/ 13 | data-1/ 14 | sw-1.conf 15 | sw-1.txt 16 | data-2/ 17 | sw-2.txt 18 | sw3.txt 19 | 20 | Where content:: 21 | 22 | [sw-1.conf] 23 | interface GigabitEthernet3/7 24 | switchport access vlan 700 25 | ! 26 | interface GigabitEthernet3/8 27 | switchport access vlan 800 28 | ! 29 | 30 | [sw-1.txt] 31 | interface GigabitEthernet3/2 32 | switchport access vlan 500 33 | ! 34 | interface GigabitEthernet3/3 35 | switchport access vlan 600 36 | ! 37 | 38 | [sw-2.txt] 39 | interface Vlan221 40 | ip address 10.8.14.130/25 41 | 42 | interface Vlan223 43 | ip address 10.10.15.130/25 44 | 45 | [sw3.txt] 46 | interface Vlan220 47 | ip address 10.9.14.130/24 48 | 49 | interface Vlan230 50 | ip address 10.11.15.130/25 51 | 52 | Template below uses inputs in such a way that for "data-1" folder only files that have ".txt" extension will be parsed by group "interfaces1", for input named "dataset-2" only files with names matching "sw\-\d.*" regular expression will be parsed by "interfaces2" group. In addition, base path provided that will be appended to each url within *url* input parameter. Tag text for input "dataset-1" structured using YAML representation, while "dataset-2" uses python language definition. 53 | 54 | As a result of inputs filtering, only "sw-1.txt" will be processed by "dataset-1" input because it is the only file that has ".txt" extension, only "sw-2.txt" will be processed by input "dataset-2" because "sw3.txt" not matched by "sw\-\d.*" regular expression. 55 | 56 | Template:: 57 | 58 | 79 | 80 | And result would be:: 81 | 82 | [ 83 | { 84 | "interfaces1": [ 85 | { 86 | "access_vlan": "500", 87 | "interface": "GigabitEthernet3/2" 88 | }, 89 | { 90 | "access_vlan": "600", 91 | "interface": "GigabitEthernet3/3" 92 | } 93 | ] 94 | }, 95 | { 96 | "interfaces2": [ 97 | { 98 | "interface": "Vlan221", 99 | "ip": "10.8.14.130", 100 | "mask": "25" 101 | }, 102 | { 103 | "interface": "Vlan223", 104 | "ip": "10.10.15.130", 105 | "mask": "25" 106 | } 107 | ] 108 | } 109 | ] 110 | 111 | Inputs reference 112 | ------------------- 113 | 114 | .. toctree:: 115 | :maxdepth: 2 116 | 117 | Attributes 118 | Functions 119 | -------------------------------------------------------------------------------- /docs/source/Installation.rst: -------------------------------------------------------------------------------- 1 | Installation 2 | ============ 3 | 4 | Using pip:: 5 | 6 | pip install ttp 7 | 8 | Or download code from GitHub, unzip, navigate to folder and run:: 9 | 10 | python setup.py install or python -m pip install . 11 | 12 | Or install GIT and run installation of latest source code from GitHub master brunch:: 13 | 14 | python -m pip install git+https://github.com/dmulyalin/ttp 15 | 16 | Install all optional dependencies:: 17 | 18 | pip install ttp[full] 19 | 20 | 21 | Additional dependencies 22 | ----------------------- 23 | 24 | TTP mainly uses built-in libraries. However, additional modules need to be installed on the system for certain features to work. 25 | 26 | **Group Functions** 27 | 28 | * :ref:`Groups/Functions:cerberus` - requires `Cerberus library `_ 29 | 30 | **Output Formatters** 31 | 32 | * :ref:`Outputs/Formatters:yaml` - requires `PyYAML module `_ 33 | * :ref:`Outputs/Formatters:tabulate` - requires `tabulate module `_ 34 | * :ref:`Outputs/Formatters:jinja2` - requires `Jinja2 module `_ 35 | * :ref:`Outputs/Formatters:excel` - requires `openpyxl `_ 36 | * :ref:`Outputs/Formatters:N2G` - requires `N2G module `_ 37 | 38 | **Output Functions** 39 | 40 | * :ref:`Outputs/Functions:deepdiff` - requires `deepdiff library `_ 41 | 42 | **Lookup Tables** 43 | 44 | * INI lookup tables - requires `configparser `_ 45 | * :ref:`Lookup Tables/Lookup Tables:geoip2 database` - requires `GeoIP2 `_ 46 | 47 | All above optional dependencies can be installed using `full` extras: 48 | 49 | pip install ttp[full] 50 | -------------------------------------------------------------------------------- /docs/source/Macro Tag/Macro Tag.rst: -------------------------------------------------------------------------------- 1 | Macro Tag 2 | ========= 3 | 4 | TTP has a number of built-in function for various systems - function for groups, functions for outputs, functions for variables and functions for match variables. To extend this functionality even further, TTP allows to define custom functions using tags. 5 | 6 | Macro is a python code within tag text. This code can contain a number of function definitions, these functions can be referenced within TTP templates. 7 | 8 | .. warning:: Python `exec `_ function used to load macro code, as a result it is unsafe to use templates from untrusted sources, as code within macro tag will be executed on template load. 9 | 10 | For further details check: 11 | 12 | * Match variables :ref:`Match Variables/Functions:macro` 13 | * Groups :ref:`Groups/Functions:macro` 14 | * Outputs :ref:`Outputs/Functions:macro` 15 | * Inputs :ref:`Inputs/Functions:macro` 16 | 17 | TTP internally uses ``_ttp_`` dictionary to contain reference to all groups, inputs, outputs, match variables and getter functions. That dictionary injected in global space of macro function and can be used to call TTP functions. 18 | -------------------------------------------------------------------------------- /docs/source/Match Variables/index.rst: -------------------------------------------------------------------------------- 1 | Match Variables 2 | =============== 3 | 4 | Match variables are used as names (keys) for information (values) that needs to be extracted from text data. 5 | You can declare a match variable by naming it within double curly brackets, ``{{`` and ``}}``. For instance:: 6 | 7 | 8 | interface {{ interface }} 9 | switchport trunk allowed vlan add {{ trunk_vlans }} 10 | 11 | 12 | The match variables ``interface`` and ``trunk_vlans`` will store matching values extracted from this sample data:: 13 | 14 | interface GigabitEthernet3/4 15 | switchport trunk allowed vlan add 771,893 16 | ! 17 | interface GigabitEthernet3/5 18 | switchport trunk allowed vlan add 138,166-173 19 | 20 | After parsing, TTP will produce this result:: 21 | 22 | [ 23 | { 24 | "interfaces": { 25 | "interface": "GigabitEthernet3/4", 26 | "trunk_vlans": "771,893" 27 | }, 28 | { 29 | "interface": "GigabitEthernet3/5", 30 | "trunk_vlans": "138,166-173" 31 | } 32 | } 33 | ] 34 | 35 | You can also combine match variables with indicators, functions, and/or regular expression patterns. 36 | These help define the way your data is parsed, processed and structured - especially when combined with groups. 37 | 38 | 39 | Match Variables reference 40 | ------------------------- 41 | 42 | .. toctree:: 43 | :maxdepth: 2 44 | 45 | Indicators 46 | Functions 47 | Patterns 48 | -------------------------------------------------------------------------------- /docs/source/Outputs/Attributes.rst: -------------------------------------------------------------------------------- 1 | Attributes 2 | ========== 3 | 4 | There are a number of attributes that outputs system can use. Some attributes can be specific to output itself (name, description), others can be used by formatters or returners. 5 | 6 | .. list-table:: 7 | :widths: 10 90 8 | :header-rows: 1 9 | 10 | * - Name 11 | - Description 12 | * - `name`_ 13 | - name of the output, can be referenced in group *output* attribute 14 | * - `description`_ 15 | - attribute to contain description of outputter 16 | * - `load`_ 17 | - name of the loader to use to load output tag text 18 | * - `returner`_ 19 | - returner to use to return data e.g. self, file, terminal 20 | * - `format`_ 21 | - formatter to use to format results 22 | * - `condition`_ 23 | - condition to check before running output 24 | 25 | name 26 | ****************************************************************************** 27 | ``name="output_name"`` 28 | 29 | Name of the output, optional attribute, can be used to reference it in groups :ref:`Groups/Attributes:output` attribute, in that case that output will become group specific and will only process results for this group. 30 | 31 | description 32 | ****************************************************************************** 33 | ``name="desription_string"`` 34 | 35 | desription_string, optional string that contains output description or notes, can serve documentation purposes. 36 | 37 | load 38 | ****************************************************************************** 39 | ``load="loader_name"`` 40 | 41 | Name of the loader to use to render supplied output tag text data, default is python. 42 | 43 | Supported loaders: 44 | 45 | * python - uses python `exec `_ method to load data structured in native Python formats 46 | * yaml - relies on `PyYAML `_ to load YAML structured data 47 | * json - used to load JSON formatted variables data 48 | * ini - `configparser `_ Python standard module used to read variables from ini structured file 49 | * csv - csv formatted data loaded with Python *csv* standard library module 50 | 51 | returner 52 | ****************************************************************************** 53 | ``returner=returner_name"`` 54 | 55 | Name of the returner to use to return results. 56 | 57 | format 58 | ****************************************************************************** 59 | ``format=formatter_name"`` 60 | 61 | Name of the formatter to use to format results. 62 | 63 | condition 64 | ****************************************************************************** 65 | ``condition="template_variable_name, template_variable_value"`` 66 | 67 | Where: 68 | 69 | * ``template_variable_name`` - name of template variable to use for condition check 70 | * ``template_variable_value`` - value to evaluate 71 | 72 | Attribute to check condition for equality - if ``template_variable_value`` parameter equal to value of 73 | template variable with name ``template_variable_name`` condition satisfied. 74 | 75 | Used to conditionally run outputters - if condition met, outputter will run, outputter skipped otherwise. 76 | 77 | **Example** 78 | 79 | Here we conditionally run csv output formatter using ``convert_to_csv`` template variable:: 80 | 81 | data = """ 82 | interface GigabitEthernet1/3.251 83 | description Customer #32148 84 | encapsulation dot1q 251 85 | ip address 172.16.33.10 255.255.255.128 86 | shutdown 87 | ! 88 | interface GigabitEthernet1/4 89 | description vCPEs access control 90 | ip address 172.16.33.10 255.255.255.128 91 | """ 92 | template = """ 93 | 94 | interface {{ interface }} 95 | description {{ description }} 96 | encapsulation dot1q {{ vlan }} 97 | ip address {{ ip }} {{ mask }} 98 | 99 | 100 | 101 | """ 102 | parser1 = ttp(data=data, template=template, vars={"convert_to_csv": False}) 103 | parser1.parse() 104 | res1 = parser1.result() 105 | 106 | parser2 = ttp(data=data, template=template, vars={"convert_to_csv": True}) 107 | parser2.parse() 108 | res2 = parser2.result() 109 | 110 | pprint.pprint(res1) 111 | # prints: 112 | # [[[{'interface': 'GigabitEthernet1/3.251', 113 | # 'ip': '172.16.33.10', 114 | # 'mask': '255.255.255.128', 115 | # 'vlan': '251'}, 116 | # {'interface': 'GigabitEthernet1/4', 117 | # 'ip': '172.16.33.10', 118 | # 'mask': '255.255.255.128'}]]] 119 | 120 | pprint.pprint(res2) 121 | # prints: 122 | # ['"interface","ip","mask","vlan"\n' 123 | # '"GigabitEthernet1/3.251","172.16.33.10","255.255.255.128","251"\n' 124 | # '"GigabitEthernet1/4","172.16.33.10","255.255.255.128",""'] 125 | 126 | Outputter ```` indicates that this outputter will only run if 127 | ``convert_to_csv`` template variable set to ``True`` 128 | -------------------------------------------------------------------------------- /docs/source/Outputs/index.rst: -------------------------------------------------------------------------------- 1 | Outputs 2 | ======= 3 | 4 | Outputs system allows to process parsing results, format them in certain way and return results to various destination. For instance, using yaml formatter results can take a form of YAML syntax and using file returner these results can be saved into file. 5 | 6 | Outputs can be chained, say results after passing through first outputter will serve as an input for next outputter. That allows to implement complex processing logic of results produced by ttp. 7 | 8 | The opposite way would be that each output defined in template will work with parsing results, transform them in different way and return to different destinations. An example of such a behavior might be the case when first outputter form csv table and saves it onto the file, while second outputter will render results with Jinja2 template and print them to the screen. 9 | 10 | In addition two types of outputter exists - template specific and group specific. Template specific outputs will process template overall results, while group-specific will work with results of this particular group only. 11 | 12 | There is a set of function available in outputs to process/modify results further. 13 | 14 | .. note:: If several outputs provided - they run sequentially in the order defined in template. Within single output, processing order is - functions run first, after that formatters, followed by returners. 15 | 16 | Outputs reference 17 | ------------------- 18 | 19 | .. toctree:: 20 | :maxdepth: 2 21 | 22 | Attributes 23 | Functions 24 | Formatters 25 | Returners 26 | -------------------------------------------------------------------------------- /docs/source/Overview.rst: -------------------------------------------------------------------------------- 1 | Overview 2 | ========= 3 | 4 | TTP is a Python module that allows relatively fast performance parsing of semi-structured text data using templates. TTP was developed to enable programmatic access to data produced by CLI of networking devices, but, it can be used to parse any semi-structured text that contains distinctive repetition patterns. 5 | 6 | In the simplest case ttp takes two files as an input - data that needs to be parsed and parsing template, returning results structure with extracted information. 7 | 8 | Same data can be parsed by several templates producing results accordingly, templates are easy to create and users encouraged to write their own ttp templates. 9 | 10 | Motivation 11 | ---------- 12 | 13 | While networking devices continue to develop API capabilities, there is a big footprint of legacy and not-so devices in the field, these devices are lacking of any well developed API to retrieve structured data, the closest they can get is SNMP and CLI text output. Moreover, even if some devices have API capable of representing their configuration or state data in the format that can be consumed programmatically, in certain cases, the amount of work that needs to be done to make use of these capabilities outweighs the benefits or value of produced results. 14 | 15 | There are a number of tools available to parse text data, but, author of TTP believes that parsing data is only part of the work flow, where the ultimate goal is to make use of the actual data. 16 | 17 | Say we have configuration files and we want to create a report of all IP addresses configured on devices together with VRFs and interface descriptions, report should have csv format. To do that we have (1) collect data from various inputs and maybe sort and prepare it, (2) parse that data, (3) format it in certain way and (4) save it somewhere or pass to other program(s). TTP has built-in capabilities to address all of these steps to produce desired outcome. 18 | 19 | Core Functionality 20 | ------------------ 21 | 22 | TTP has a number of systems built into it: 23 | 24 | * Groups system - help to define results hierarchy and data processing functions with filtering 25 | * Parsing system - uses regular expressions derived out of templates to parse and process data 26 | * Input system - used to define various input data sources, help to retrieve data, prepare it and map to the groups for parsing 27 | * Output system - allows to format parsing results and return them to certain destinations 28 | * Macro - inline Python code that can be used to process results and extend TTP functionality, having access to _ttp_ dictionary containing all groups, match, inputs, outputs functions 29 | * Lookup tables - helps to enrich results with additional information or reference results across different templates or groups to combine them 30 | * Template variables - variables store, accessible during template execution for caching or retrieving values 31 | * Template tags - to define several independent templates within single file together with results forming mode 32 | * Extend tags - helps to extend template with other templates to facilitate re-use of templates 33 | * CLI tool - allows to run templates directly from command line 34 | * Lazy loader system - TTP only imports function it uses within the templates, that significantly decreases start time 35 | * Multiprocessing system - controls the start and data exchange between several Python processes to increase parsing performance 36 | * Logging system - helps to troubleshoot and debug TTP 37 | -------------------------------------------------------------------------------- /docs/source/Performance.rst: -------------------------------------------------------------------------------- 1 | Performance 2 | =========== 3 | 4 | TTP has performance of approximately 211 lines per millisecond on Intel Core i5-3320M CPU @ 2.6GHz (CPU End-of-Life July 2014) if running in multiprocess mode, dataset of 3,262,464 lines can be parsed in under 16 seconds best case and under 22 seconds worst case. Multiprocessing mode approximately 30-40% faster compared to running in single process, the difference is more significant the more data has to be parsed. 5 | 6 | When TTP ready to parse data it goes through decision logic to determine parsing mode following below rules: 7 | 8 | * run in single process if ``one=True`` was set for TTP parse method 9 | * run in multiprocess if ``multi=True`` was set for TTP parse method 10 | * run in single process if overall size of loaded data less then 5MByte 11 | * run in multiprocess if overall size of loaded data more then 5MByte and at least two datums loaded 12 | 13 | In multiprocessing mode, TTP starts one process per each CPU core on the system and forms a queue of work, there each item contains data for single input datum. For instance we have a folder with 100 files to process, TTP forms queue of 100 chunks of work, each chunk containing text data from single file, in multiprocessing mode that work distributed across several cores in such a way that as long as chunk of work finished by the process it picks up another chunk, without waiting for other processes to finish. 14 | 15 | Multiprocessing mode restrictions 16 | --------------------------------- 17 | 18 | While multiprocessing mode has obvious processing speed increase benefits, it comes with several restrictions. 19 | 20 | * per_template results mode not supported with multiprocessing as no results shared between processes, only per_input mode supported with multiprocessing 21 | * startup time for multiprocessing is slower compared to single process, as each process takes time to initiate 22 | * global variables space not shared between processes, as a result a number of functions will not be able to operate properly, such as: 23 | 24 | * match variable count function - ``globvar`` will not have access to global variables 25 | * match variable record function - record cannot save variables in global namespace 26 | * match variable lookup function - will not work if reference group that parse different inputs due to ``_ttp_['template_obj']`` not shared between processes 27 | 28 | General performance considerations 29 | ----------------------------------- 30 | 31 | Keep data processing out of TTP if you are after best performance, the more processing/functions TTP has to run, the more time it will take to finish parsing. 32 | 33 | During parsing, avoid use of broad match regular expressions, such as ``.*`` unless no other options left, one such expression used for ``_line_`` indicator internally. As a result of excessive matches, processing time can increase significantly. Strongly consider using ``_end_`` indicator together with any broad match regexes to limit the scope of text processed. 34 | 35 | Consider providing TTP with as clean data as possible - data that contains only text that will be matched by TTP. That will help to save CPU cycles by not processing unrelated data, also that will guarantee that no false positive matches exist. For instance, input ``commands`` function can be used to pre-process data and present only required commands output to certain groups. 36 | -------------------------------------------------------------------------------- /docs/source/Quick start.rst: -------------------------------------------------------------------------------- 1 | Quick start 2 | =========== 3 | 4 | TTP can be used as a module, as a CLI tool or as a script. 5 | 6 | As a module 7 | ----------- 8 | 9 | Sample code:: 10 | 11 | from ttp import ttp 12 | 13 | data_to_parse = """ 14 | interface Loopback0 15 | description Router-id-loopback 16 | ip address 192.168.0.113/24 17 | ! 18 | interface Vlan778 19 | description CPE_Acces_Vlan 20 | ip address 2002::fd37/124 21 | ip vrf CPE1 22 | ! 23 | """ 24 | 25 | ttp_template = """ 26 | interface {{ interface }} 27 | ip address {{ ip }}/{{ mask }} 28 | description {{ description }} 29 | ip vrf {{ vrf }} 30 | """ 31 | 32 | # create parser object and parse data using template: 33 | parser = ttp(data=data_to_parse, template=ttp_template) 34 | parser.parse() 35 | 36 | # print result in JSON format 37 | results = parser.result(format='json')[0] 38 | print(results) 39 | [ 40 | [ 41 | { 42 | "description": "Router-id-loopback", 43 | "interface": "Loopback0", 44 | "ip": "192.168.0.113", 45 | "mask": "24" 46 | }, 47 | { 48 | "description": "CPE_Acces_Vlan", 49 | "interface": "Vlan778", 50 | "ip": "2002::fd37", 51 | "mask": "124", 52 | "vrf": "CPE1" 53 | } 54 | ] 55 | ] 56 | 57 | # or in csv format 58 | csv_results = parser.result(format='csv')[0] 59 | print(csv_results) 60 | description,interface,ip,mask,vrf 61 | Router-id-loopback,Loopback0,192.168.0.113,24, 62 | CPE_Acces_Vlan,Vlan778,2002::fd37,124,CPE1 63 | 64 | As a CLI tool 65 | ------------- 66 | 67 | Sample command to run in terminal:: 68 | 69 | ttp --data "path/to/data_to_parse.txt" --template "path/to/ttp_template.txt" --outputter json 70 | 71 | [ 72 | [ 73 | { 74 | "description": "Router-id-loopback", 75 | "interface": "Loopback0", 76 | "ip": "192.168.0.113", 77 | "mask": "24" 78 | }, 79 | { 80 | "description": "CPE_Acces_Vlan", 81 | "interface": "Vlan778", 82 | "ip": "2002::fd37", 83 | "mask": "124", 84 | "vrf": "CPE1" 85 | } 86 | ] 87 | ] 88 | 89 | Where file ``path/to/data_to_parse.txt`` contains:: 90 | 91 | interface Loopback0 92 | description Router-id-loopback 93 | ip address 192.168.0.113/24 94 | ! 95 | interface Vlan778 96 | description CPE_Acces_Vlan 97 | ip address 2002::fd37/124 98 | ip vrf CPE1 99 | ! 100 | 101 | And file ``path/to/ttp_template.txt`` contains:: 102 | 103 | interface {{ interface }} 104 | ip address {{ ip }}/{{ mask }} 105 | description {{ description }} 106 | ip vrf {{ vrf }} 107 | -------------------------------------------------------------------------------- /docs/source/TTP Templates Collection.rst: -------------------------------------------------------------------------------- 1 | TTP Templates Collection 2 | ======================== 3 | 4 | `TTP Templates `_ repository contains a number of TTP templates. 5 | 6 | Install:: 7 | 8 | pip install ttp_templates 9 | 10 | To reference templates from ``ttp_templates``, ttp parser ``template`` argument should be of ``ttp://`` format, where ``path`` is an OS path to template text file within ``ttp_templates`` repository. 11 | 12 | Sample code:: 13 | 14 | from ttp import ttp 15 | import pprint 16 | 17 | data = """ 18 | 19 | interface Lo0 20 | ip address 124.171.238.50 32 21 | ! 22 | interface Lo1 23 | description this interface has description 24 | ip address 1.1.1.1 32 25 | 26 | """ 27 | 28 | parser = ttp(data=data, template="ttp://platform/test_platform_show_run_pipe_sec_interface.txt") 29 | parser.parse() 30 | res = parser.result() 31 | 32 | pprint.pprint(res) 33 | 34 | # prints: 35 | # 36 | # [[[{'interface': 'Lo0', 'ip': '124.171.238.50', 'mask': '32'}, 37 | # {'description': 'this interface has description', 38 | # 'interface': 'Lo1', 39 | # 'ip': '1.1.1.1', 40 | # 'mask': '32'}]]] 41 | 42 | Where ``platform/test_platform_show_run_pipe_sec_interface.txt`` is a text file from ``ttp_templates`` repository with content:: 43 | 44 | 45 | interface {{ interface }} 46 | description {{ description | re(".+") }} 47 | encapsulation dot1q {{ dot1q }} 48 | ip address {{ ip }} {{ mask }} 49 | shutdown {{ disabled | set(True) }} 50 | 51 | -------------------------------------------------------------------------------- /docs/source/Template Variables/Getters.rst: -------------------------------------------------------------------------------- 1 | Getters 2 | ======= 3 | 4 | TTP template variables also support a number of getters - functions targeted to get some information and assign it to variable. Getters called for each input datum. 5 | 6 | .. list-table:: 7 | :widths: 10 90 8 | :header-rows: 1 9 | 10 | * - Function 11 | - Description 12 | * - `gethostname`_ 13 | - this function tries to extract hostname out of source data prompts 14 | * - `getfilename`_ 15 | - returns a name of the source data 16 | * - `get_time`_ 17 | - returns current time 18 | * - `get_date`_ 19 | - returns current date 20 | * - `get_timestamp`_ 21 | - returns combination of current date and time 22 | * - `get_timestamp_ms`_ 23 | - returns combination of current date and time with milliseconds 24 | * - `get_timestamp_iso`_ 25 | - returns timestamp in ISO format in UTC timezone 26 | * - `get_time_ns`_ 27 | - returns current time in nanoseconds since Epoch 28 | 29 | gethostname 30 | ------------------------------------------------------------------------------ 31 | ``var_name="gethostname"`` 32 | 33 | Using this getter function TTP tries to extract device's hostname out of it prompt. 34 | 35 | Supported prompts are: 36 | 37 | * Juniper such as ``some.user@hostname>`` 38 | * Huawei such as ```` 39 | * Cisco IOS Exec such as ``hostname>`` 40 | * Cisco IOS XR such as ``RP/0/4/CPU0:hostname#`` 41 | * Cisco IOS Privileged such as ``hostname#`` 42 | * Fortigate such as ``hostname (context) #`` 43 | * Nokia (ALU) SROS such as ``A:hostname>``, ``*A:hostname#`` or ``*A:ALA-12>config#`` 44 | 45 | **Example** 46 | 47 | Template:: 48 | 49 | 50 | switch1#show run int 51 | interface GigabitEthernet3/11 52 | description input_1_data 53 | 54 | 55 | 56 | hostname_var = "gethostname" 57 | 58 | 59 | 60 | interface {{ interface }} 61 | description {{ description }} 62 | 63 | 64 | Result:: 65 | 66 | [ 67 | { 68 | "interfaces": { 69 | "description": "input_1_data", 70 | "interface": "GigabitEthernet3/11" 71 | }, 72 | "vars": { 73 | "hostname_var": "switch1" 74 | } 75 | } 76 | ] 77 | 78 | getfilename 79 | ------------------------------------------------------------------------------ 80 | ``var_name="getfilename"`` 81 | 82 | This function returns the name of input data file if data was loaded from file, if data was loaded from text it will return "text_data". 83 | 84 | get_time 85 | ------------------------------------------------------------------------------ 86 | ``var_name="get_time"`` 87 | 88 | Returns current time in ``%H:%M:%S`` format. 89 | 90 | get_date 91 | ------------------------------------------------------------------------------ 92 | ``var_name="get_date"`` 93 | 94 | Returns current date in ``%Y-%m-%d`` format. 95 | 96 | get_timestamp 97 | ------------------------------------------------------------------------------ 98 | ``var_name="get_timestamp"`` 99 | 100 | Returns current timestamp in ``%Y-%m-%d %H:%M:%S`` format. 101 | 102 | get_timestamp_ms 103 | ------------------------------------------------------------------------------ 104 | ``var_name="get_timestamp_ms"`` 105 | 106 | Returns current timestamp but with milliseconds precision in a format of ``%Y-%m-%d %H:%M:%S.%ms`` 107 | 108 | get_timestamp_iso 109 | ------------------------------------------------------------------------------ 110 | ``var_name="get_timestamp_iso"`` 111 | 112 | Returns current timestamp in ISO format with UTC timezone e.g. ``2020-06-30T11:07:01.212349+00:00``. Uses python datetime function to produce timestamp. 113 | 114 | get_time_ns 115 | ------------------------------------------------------------------------------ 116 | ``var_name="get_time_ns"`` 117 | 118 | This function uses time.time_ns method to return current time in nanoseconds since Epoch 119 | -------------------------------------------------------------------------------- /docs/source/Template Variables/index.rst: -------------------------------------------------------------------------------- 1 | Template Variables 2 | ================== 3 | 4 | TTP supports definition of arbitrary variables using dedicated xml tags , or . Withing these tags variables can be defined in various formats and loaded using one of supported loaders. Variables can also be defined in external text files and loaded using *include* attribute. 5 | 6 | Various values can be recorded in template variables before, during or after parsing. That additional data can be added to results, used for dynamic path constructions. 7 | 8 | Inputs reference 9 | ------------------- 10 | 11 | .. toctree:: 12 | :maxdepth: 2 13 | 14 | Attributes 15 | Getters 16 | -------------------------------------------------------------------------------- /docs/source/Writing templates/How to filter with TTP.rst: -------------------------------------------------------------------------------- 1 | How to filter with TTP 2 | ====================== 3 | -------------------------------------------------------------------------------- /docs/source/Writing templates/How to parse show commands output.rst: -------------------------------------------------------------------------------- 1 | How to parse show commands output 2 | ================================= 3 | 4 | Show commands output parsing with TTP is the same as parsing any text data that contains repetitive patterns and require a certain level of familiarity with tools built into TTP to not only parse but also process match results. 5 | 6 | As a use case let's consider parsing "show cdp neighbors detail" command output of Cisco IOS device, source data:: 7 | 8 | my_switch_1#show cdp neighbors detail 9 | ------------------------- 10 | Device ID: switch-2.net 11 | Entry address(es): 12 | IP address: 10.251.1.49 13 | Platform: cisco WS-C6509, Capabilities: Router Switch IGMP 14 | Interface: GigabitEthernet4/6, Port ID (outgoing port): GigabitEthernet1/5 15 | Holdtime : 130 sec 16 | 17 | Version : 18 | Cisco Internetwork Operating System Software 19 | IOS (tm) s72033_rp Software (s72033_rp-PK9SV-M), Version 12.2(17d)SXB11a, RELEASE SOFTWARE (fc1) 20 | Technical Support: http://www.cisco.com/techsupport 21 | Copyright (c) 1986-2006 by cisco Systems, Inc. 22 | Compiled Thu 13-Apr-06 04:50 by kehsiao 23 | 24 | advertisement version: 2 25 | VTP Management Domain: '' 26 | Duplex: full 27 | Unidirectional Mode: off 28 | 29 | ------------------------- 30 | Device ID: switch-2 31 | Entry address(es): 32 | IP address: 10.151.28.7 33 | Platform: cisco WS-C3560-48TS, Capabilities: Switch IGMP 34 | Interface: GigabitEthernet1/1, Port ID (outgoing port): GigabitEthernet0/1 35 | Holdtime : 165 sec 36 | 37 | Version : 38 | Cisco IOS Software, C3560 Software (C3560-IPBASE-M), Version 12.2(25)SEB2, RELEASE SOFTWARE (fc1) 39 | Copyright (c) 1986-2005 by Cisco Systems, Inc. 40 | Compiled Tue 07-Jun-05 23:34 by yenanh 41 | 42 | advertisement version: 2 43 | Protocol Hello: OUI=0x00000C, Protocol ID=0x0112; payload len=27, value=00000000FFFFFFFF010221FF00000000000000152BC02D80FF0000 44 | VTP Management Domain: '' 45 | Native VLAN: 500 46 | Duplex: full 47 | Unidirectional Mode: off 48 | 49 | The goal is to get this results structure:: 50 | 51 | { 52 | local_hostname: str, 53 | local_interface: str, 54 | peer_hostname: str, 55 | peer_interface: str, 56 | peer_ip: str, 57 | peer_platform: str, 58 | peer_capabilities: [cap1, cap2], 59 | peer_software: str 60 | } 61 | 62 | 63 | Template to achieve this:: 64 | 65 | 66 | hostname="gethostname" 67 | 68 | 69 | 70 | ------------------------- {{ _start_ }} 71 | Device ID: {{ peer_hostname }} 72 | IP address: {{ peer_ip }} 73 | Platform: {{ peer_platform | ORPHRASE }}, Capabilities: {{ peer_capabilities | ORPHRASE | split(" ") }} 74 | Interface: {{ local_interface }}, Port ID (outgoing port): {{ peer_interface }} 75 | {{ local_hostname | set("hostname") }} 76 | 77 | 78 | Version : {{ _start_ }} 79 | {{ peer_software | _line_ }} 80 | {{ _end_ }} 81 | 82 | 83 | 84 | 85 | Results:: 86 | 87 | [[[ 88 | { 89 | "local_hostname": "my_switch_1", 90 | "local_interface": "GigabitEthernet4/6", 91 | "peer_capabilities": [ 92 | "Router", 93 | "Switch", 94 | "IGMP" 95 | ], 96 | "peer_hostname": "switch-2.net", 97 | "peer_interface": "GigabitEthernet1/5", 98 | "peer_ip": "10.251.1.49", 99 | "peer_platform": "cisco WS-C6509", 100 | "peer_software": "Cisco Internetwork Operating System Software \nIOS (tm) s72033_rp Software (s72033_rp-PK9SV-M), Version 12.2(17d)SXB11a, RELEASE SOFTWARE (fc1)\nTechnical Support: http://www.cisco.com/techsupport\nCopyright (c) 1986-2006 by cisco Systems, Inc.\nCompiled Thu 13-Apr-06 04:50 by kehsiao" 101 | }, 102 | { 103 | "local_hostname": "my_switch_1", 104 | "local_interface": "GigabitEthernet1/1", 105 | "peer_capabilities": [ 106 | "Switch", 107 | "IGMP" 108 | ], 109 | "peer_hostname": "switch-2", 110 | "peer_interface": "GigabitEthernet0/1", 111 | "peer_ip": "10.151.28.7", 112 | "peer_platform": "cisco WS-C3560-48TS", 113 | "peer_software": "Cisco IOS Software, C3560 Software (C3560-IPBASE-M), Version 12.2(25)SEB2, RELEASE SOFTWARE (fc1)\nCopyright (c) 1986-2005 by Cisco Systems, Inc.\nCompiled Tue 07-Jun-05 23:34 by yenanh" 114 | } 115 | ]]] 116 | 117 | Special attention should be paid to this aspects of above template: 118 | 119 | * use of explicit _start_ indicator to define start of the group 120 | * ORPHRASE regex formatter to match a single word and collection of words 121 | * _line_ indicator used within separate group to combine software version description, that group has special null path - "_" - indicating that result for this group should be merged with parent group 122 | * explicit use of _end_ indicator to make sure that only relevant information matched 123 | * special handling of peer_capabilities match result by converting into list by splitting match result using space character 124 | -------------------------------------------------------------------------------- /docs/source/Writing templates/How to produce time series data with TTP.rst: -------------------------------------------------------------------------------- 1 | How to produce time series data with TTP 2 | ======================================== 3 | 4 | Time stamped data is very easy to produce with TTP, as it has built-in time related functions, allowing to add timestamp to match results. For example, interface counters can be parsed with TTP every X number of seconds, marked with timestamp, producing simple time series data. 5 | 6 | Consider this source data:: 7 | 8 | GigabitEthernet1 is up, line protocol is up 9 | 297 packets input, 25963 bytes, 0 no buffer 10 | 160 packets output, 26812 bytes, 0 underruns 11 | GigabitEthernet2 is up, line protocol is up 12 | 150 packets input, 2341 bytes, 0 no buffer 13 | 351 output errors, 3459 collisions, 0 interface resets 14 | 15 | And the goal is to get this result:: 16 | 17 | { 18 | timestamp: { 19 | interface: { 20 | in_pkts: int, 21 | out_pkts: int 22 | } 23 | } 24 | } 25 | 26 | Template to produce above structure is:: 27 | 28 | 29 | timestamp = "get_timestamp_ms" 30 | 31 | 32 | 33 | {{ interface }} is up, line protocol is up 34 | {{ in_pkts}} packets input, 25963 bytes, 0 no buffer 35 | {{ out_pkts }} packets output, 26812 bytes, 0 underruns 36 | 37 | 38 | Results after parsing above data with template:: 39 | 40 | [ 41 | [ 42 | { 43 | "2019-11-10 16:18:32.523": { 44 | "GigabitEthernet1": { 45 | "in_pkts": "297", 46 | "out_pkts": "160" 47 | }, 48 | "GigabitEthernet2": { 49 | "in_pkts": "150" 50 | } 51 | } 52 | } 53 | ] 54 | ] 55 | 56 | Attention should be paid to the fact, that timestamps produced using local time of the system that happens to parse text data, as a result get_time_ns function can be used to produce time in nanoseconds since the epoch (midnight, 1st of January, 1970) in UTC. 57 | -------------------------------------------------------------------------------- /docs/source/Writing templates/index.rst: -------------------------------------------------------------------------------- 1 | Writing templates 2 | ================= 3 | 4 | Writing templates is simple. 5 | 6 | To create template, take data that needs to be parsed and replace portions of it with match variables:: 7 | 8 | # Data we want to parse 9 | interface Loopback0 10 | description Router-id-loopback 11 | ip address 192.168.0.113/24 12 | ! 13 | interface Vlan778 14 | description CPE_Acces_Vlan 15 | ip address 2002::fd37/124 16 | ip vrf CPE1 17 | ! 18 | 19 | # TTP template 20 | interface {{ interface }} 21 | ip address {{ ip }}/{{ mask }} 22 | description {{ description }} 23 | ip vrf {{ vrf }} 24 | 25 | Above data and template can be saved in two files, and ttp CLI tool can be used to parse it with command:: 26 | 27 | ttp -d "/path/to/data/file.txt" -t "/path/to/template.txt" --outputter json 28 | 29 | And get these results:: 30 | 31 | [ 32 | [ 33 | { 34 | "description": "Router-id-loopback", 35 | "interface": "Loopback0", 36 | "ip": "192.168.0.113", 37 | "mask": "24" 38 | }, 39 | { 40 | "description": "CPE_Acces_Vlan", 41 | "interface": "Vlan778", 42 | "ip": "2002::fd37", 43 | "mask": "124", 44 | "vrf": "CPE1" 45 | } 46 | ] 47 | ] 48 | 49 | .. note:: TTP match variables names used as regular expressions group names, hence they should be valid Python identifiers. However, TTP sanitizes match variable name prior to using it in regex by replacing all non-alpha characters with underscore and prepending underscore if variable name starts with digit. Original variable name used as is to store results. 50 | 51 | Above process is very similar to writing `Jinja2 `_ templates but in reverse direction - we have text and we need to transform it into structured data, as opposed to having structured data, that needs to be rendered with Jinja2 template to produce text. 52 | 53 | .. warning:: Indentation is important. Trailing spaces and tabs are ignored by TTP. 54 | 55 | TTP use leading spaces and tabs to produce better match results, exact number of leading spaces and tabs used to form regular expressions. There is a way to ignore indentation by the use of :ref:`Match Variables/Indicators:ignore` indicator coupled with ``[\s\t]*`` or ``\s+`` or ``\s{1,3}`` or ``\t+`` etc. regular expressions. 56 | 57 | TTP supports various output formats, for instance, if we need to emit data not in json but csv format we can use outputter and write this template:: 58 | 59 | 60 | interface {{ interface }} 61 | ip address {{ ip }}/{{ mask }} 62 | description {{ description }} 63 | ip vrf {{ vrf }} 64 | 65 | 66 | 67 | 68 | Run ttp CLI tool without -o option to print only results produced by outputter defined within template:: 69 | 70 | ttp -d "/path/to/data/file.txt" -t "/path/to/template.txt" 71 | 72 | We told TTP that ``returner="terminal"``, because of that results will be printed to terminal screen:: 73 | 74 | description,interface,ip,mask,vrf 75 | Router-id-loopback,Loopback0,192.168.0.113,24, 76 | CPE_Acces_Vlan,Vlan778,2002::fd37,124,CPE1 77 | 78 | XML Primer 79 | ---------- 80 | 81 | TBD 82 | 83 | HOW TOs 84 | ------- 85 | 86 | .. toctree:: 87 | :maxdepth: 2 88 | 89 | How to parse hierarchical (configuration) data 90 | How to parse text tables 91 | How to parse show commands output 92 | How to filter with TTP 93 | How to produce time series data with TTP 94 | -------------------------------------------------------------------------------- /docs/source/_images/cdp_diagram.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dmulyalin/ttp/d6476d387ca2078e0306c2546148850d3ceb246c/docs/source/_images/cdp_diagram.png -------------------------------------------------------------------------------- /docs/source/_images/groups_vaidate_fun_example_1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dmulyalin/ttp/d6476d387ca2078e0306c2546148850d3ceb246c/docs/source/_images/groups_vaidate_fun_example_1.png -------------------------------------------------------------------------------- /docs/source/_images/terminal_returner_colorama.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dmulyalin/ttp/d6476d387ca2078e0306c2546148850d3ceb246c/docs/source/_images/terminal_returner_colorama.png -------------------------------------------------------------------------------- /docs/source/conf.py: -------------------------------------------------------------------------------- 1 | # Configuration file for the Sphinx documentation builder. 2 | # 3 | # This file only contains a selection of the most common options. For a full 4 | # list see the documentation: 5 | # http://www.sphinx-doc.org/en/master/config 6 | 7 | # -- Path setup -------------------------------------------------------------- 8 | 9 | # If extensions (or modules to document with autodoc) are in another directory, 10 | # add these directories to sys.path here. If the directory is relative to the 11 | # documentation root, use os.path.abspath to make it absolute, like shown here. 12 | # 13 | import os 14 | import sys 15 | sys.path.insert(0, "../../ttp/") 16 | 17 | 18 | # -- Project information ----------------------------------------------------- 19 | 20 | project = 'ttp' 21 | copyright = '2021, dmulyalin' 22 | author = 'dmulyalin' 23 | 24 | # The full version, including alpha/beta/rc tags 25 | release = '0.9.0' 26 | 27 | 28 | # -- General configuration --------------------------------------------------- 29 | 30 | # Add any Sphinx extension module names here, as strings. They can be 31 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 32 | # ones. 33 | extensions = [ 34 | 'sphinx.ext.autodoc', 35 | 'sphinx.ext.napoleon', 36 | 'sphinx.ext.autosectionlabel', 37 | 'sphinxcontrib.spelling', 38 | ] 39 | 40 | # Add any paths that contain templates here, relative to this directory. 41 | templates_path = ['_templates'] 42 | 43 | # List of patterns, relative to source directory, that match files and 44 | # directories to ignore when looking for source files. 45 | # This pattern also affects html_static_path and html_extra_path. 46 | exclude_patterns = [] 47 | 48 | # use index.rst instead of contents.rst: 49 | master_doc = 'index' 50 | 51 | # -- Options for HTML output ------------------------------------------------- 52 | 53 | # The theme to use for HTML and HTML Help pages. See the documentation for 54 | # a list of builtin themes. 55 | # 56 | on_rtd = os.environ.get('READTHEDOCS') == 'True' 57 | if not on_rtd: 58 | html_theme = 'classic' 59 | else: 60 | html_theme = 'sphinx_rtd_theme' 61 | # add level to nav bar - https://stackoverflow.com/questions/27669376/show-entire-toctree-in-read-the-docs-sidebar 62 | # and this - https://sphinx-rtd-theme.readthedocs.io/en/stable/configuring.html#table-of-contents-options 63 | html_theme_options = { 64 | 'navigation_depth': 4, 65 | 'collapse_navigation': True, 66 | 'sticky_navigation': False 67 | } 68 | 69 | # Add any paths that contain custom static files (such as style sheets) here, 70 | # relative to this directory. They are copied after the builtin static files, 71 | # so a file named "default.css" will overwrite the builtin "default.css". 72 | html_static_path = ['_static'] 73 | 74 | # to make crossreferencing section names between documents work 75 | autosectionlabel_prefix_document = True 76 | -------------------------------------------------------------------------------- /docs/source/index.rst: -------------------------------------------------------------------------------- 1 | Template Text Parser 2 | ==================== 3 | .. toctree:: 4 | :maxdepth: 2 5 | :titlesonly: 6 | 7 | Overview 8 | Installation 9 | Quick start 10 | FAQ 11 | Match Variables/index 12 | Groups/index 13 | Forming Results Structure/index 14 | Inputs/index 15 | Outputs/index 16 | Template Tag/Template Tag 17 | Template Variables/index 18 | Lookup Tables/Lookup Tables 19 | Macro Tag/Macro Tag 20 | Doc Tag/Doc Tag 21 | Extend Tag/Extend Tag 22 | Writing templates/index 23 | CLI tool 24 | TTP Templates Collection 25 | API reference 26 | Performance 27 | TTP Internals/index 28 | -------------------------------------------------------------------------------- /docs/source/spelling_wordlist.txt: -------------------------------------------------------------------------------- 1 | IPv 2 | ip 3 | url 4 | ttp 5 | outputtters 6 | formatters 7 | getters 8 | outputter 9 | formatter 10 | boolean 11 | lookups 12 | kwargs 13 | args 14 | TTP 15 | Mbyte 16 | Mbytes 17 | regexes 18 | bgp 19 | config 20 | prepended 21 | prepend 22 | hostname 23 | iso 24 | ini 25 | Huawei 26 | Fortigate 27 | getfilename 28 | dataset 29 | pathchar 30 | py 31 | namespace 32 | datum 33 | datums 34 | multiprocess 35 | Multiprocess 36 | programmatically 37 | syslog 38 | pre 39 | txt 40 | -------------------------------------------------------------------------------- /pylintrc: -------------------------------------------------------------------------------- 1 | # example pylintrc file - https://github.com/PyCQA/pylint/blob/main/pylintrc 2 | 3 | [MESSAGES CONTROL] 4 | 5 | # Disable the message, report, category or checker with the given id(s). You 6 | # can either give multiple identifiers separated by comma (,) or put this 7 | # option multiple times (only on the command line, not in the configuration 8 | # file where it should appear only once).You can also use "--disable=all" to 9 | # disable everything first and then reenable specific checks. For example, if 10 | # you want to run only the similarities checker, you can use "--disable=all 11 | # --enable=similarities". If you want to run only the classes checker, but have 12 | # no Warning level messages displayed, use"--disable=all --enable=classes 13 | # --disable=W" 14 | 15 | disable= 16 | line-too-long, 17 | logging-format-interpolation, 18 | broad-except, 19 | invalid-name, 20 | bare-except, 21 | too-many-locals, 22 | too-many-arguments, 23 | raise-missing-from, 24 | # done by black 25 | format, 26 | missing-module-docstring, 27 | undefined-variable, 28 | 29 | [DESIGN] 30 | 31 | # Maximum number of arguments for function / method 32 | max-args=50 33 | 34 | # Maximum number of locals for function / method body 35 | max-locals=50 36 | 37 | # Maximum number of return / yield for function / method body 38 | max-returns=20 39 | 40 | # Maximum number of attributes for a class (see R0902). 41 | max-attributes=50 42 | 43 | [SPELLING] 44 | # need - pip install pyenchant 45 | spelling-dict=en_US 46 | 47 | # List of comma separated words that should not be checked. 48 | spelling-ignore-words=ttr,multitemplate,xlsx,pprint,ip,datums,vrf,vid,openpyxl,CSV,csv,yaml,json,CLI,BGP,txt,subfolders,argparser,nonconfig,config,Yangson,autofunction,filepath,kwargs,args,hostname,ydir,submodule,yangson,subfolder,yml,stringio,autoclass,renderes,validator,noindex,yfile,DictReader,ret,inet,ipmask,namespace,typedef,ietf,noqa,formatters,TTP,ttp,outputter,strftime,regexes,ETree,XML,bool,funcs,getters,endswith,joinchar,joinmatches,startempty,iterable,unicode,ini,configparser,py,boolean,endswith,lookahead,xml 49 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "ttp" 3 | version = "0.9.5" 4 | description = "Template Text Parser" 5 | authors = ["Denis Mulyalin "] 6 | maintainers = ["Denis Mulyalin "] 7 | license = "MIT" 8 | readme = "README.md" 9 | homepage = "https://github.com/dmulyalin/ttp" 10 | repository = "https://github.com/dmulyalin/ttp" 11 | documentation = "https://ttp.readthedocs.io/" 12 | keywords = ["Parsing", "TTP", "regex"] 13 | classifiers = [ 14 | "Topic :: Utilities", 15 | "Programming Language :: Python :: 2.7", 16 | "Programming Language :: Python :: 3.6", 17 | "Programming Language :: Python :: 3.7", 18 | "Programming Language :: Python :: 3.8", 19 | "Programming Language :: Python :: 3.9", 20 | "Programming Language :: Python :: 3.10", 21 | "License :: OSI Approved :: MIT License", 22 | "Operating System :: POSIX :: Linux", 23 | ] 24 | 25 | [tool.poetry.dependencies] 26 | python = ">=2.7,<4.0" 27 | 28 | # optional dependencies for extras definition 29 | cerberus = { version = "1.3.*", optional = true, markers = "python_version >= '3.7'" } 30 | jinja2 = { version = "3.0.*", optional = true, markers = "python_version >= '3.7'" } 31 | pyyaml = { version = "6.0", optional = true, markers = "python_version >= '3.7'" } 32 | deepdiff = { version = "5.8.*", optional = true, markers = "python_version >= '3.7'" } 33 | openpyxl = { version = "3.0.*", optional = true, markers = "python_version >= '3.7'" } 34 | tabulate = { version = "0.8.*", optional = true, markers = "python_version >= '3.7'" } 35 | ttp_templates = { version = "0.*", optional = true, markers = "python_version >= '3.7'" } 36 | yangson = { version = "1.4.*", optional = true, markers = "python_version >= '3.7'" } 37 | n2g = { version = "0.2.*", optional = true, markers = "python_version >= '3.7'" } 38 | 39 | # docs dependencies for extras definition 40 | readthedocs-sphinx-search = { version = "0.1.1", optional = true, markers = "python_version >= '3.7'" } 41 | Sphinx = { version = "4.3.0", optional = true, markers = "python_version >= '3.7'" } 42 | sphinx_rtd_theme = { version = "1.0.0", optional = true, markers = "python_version >= '3.7'" } 43 | sphinxcontrib-applehelp = { version = "1.0.1", optional = true, markers = "python_version >= '3.7'" } 44 | sphinxcontrib-devhelp = { version = "1.0.1", optional = true, markers = "python_version >= '3.7'" } 45 | sphinxcontrib-htmlhelp = { version = "2.0.0", optional = true, markers = "python_version >= '3.7'" } 46 | sphinxcontrib-jsmath = { version = "1.0.1", optional = true, markers = "python_version >= '3.7'" } 47 | sphinxcontrib-napoleon = { version = "0.7", optional = true, markers = "python_version >= '3.7'" } 48 | sphinxcontrib-qthelp = { version = "1.0.2", optional = true, markers = "python_version >= '3.7'" } 49 | sphinxcontrib-serializinghtml = { version = "1.1.5", optional = true, markers = "python_version >= '3.7'" } 50 | sphinxcontrib-spelling = { version = "7.2.1", optional = true, markers = "python_version >= '3.7'" } 51 | 52 | [tool.poetry.dev-dependencies] 53 | bandit = { version = "*", markers = "python_version >= '3.7'" } 54 | black = { version = "*", markers = "python_version >= '3.7'" } 55 | flake8 = { version = "*", markers = "python_version >= '3.7'" } 56 | pre-commit = { version = "*", markers = "python_version >= '3.7'" } 57 | pyenchant = { version = "*", markers = "python_version >= '3.7'" } 58 | pylint = { version = "*", markers = "python_version >= '3.7'" } 59 | pytest = { version = ">=7.1", markers = "python_version >= '3.7'" } 60 | 61 | [build-system] 62 | requires = ["poetry-core>=1.0.0"] 63 | build-backend = "poetry.core.masonry.api" 64 | 65 | [tool.poetry.scripts] 66 | ttp = 'ttp.ttp:cli_tool' 67 | 68 | [tool.poetry.extras] 69 | full = [ 70 | "cerberus", 71 | "jinja2", 72 | "pyyaml", 73 | "deepdiff", 74 | "openpyxl", 75 | "tabulate", 76 | "ttp_templates", 77 | "yangson", 78 | "n2g", 79 | ] 80 | docs = [ 81 | "readthedocs-sphinx-search", 82 | "Sphinx", 83 | "sphinx_rtd_theme", 84 | "sphinxcontrib-applehelp", 85 | "sphinxcontrib-devhelp", 86 | "sphinxcontrib-htmlhelp", 87 | "sphinxcontrib-jsmath", 88 | "sphinxcontrib-napoleon", 89 | "sphinxcontrib-qthelp", 90 | "sphinxcontrib-serializinghtml", 91 | "sphinxcontrib-spelling", 92 | ] 93 | -------------------------------------------------------------------------------- /readthedocs.yaml: -------------------------------------------------------------------------------- 1 | version: 2 2 | 3 | build: 4 | os: "ubuntu-20.04" 5 | tools: 6 | python: "3.7" 7 | 8 | sphinx: 9 | configuration: docs/source/conf.py 10 | 11 | # Python requirements required to build the docs 12 | python: 13 | install: 14 | - method: pip 15 | path: . 16 | - method: pip 17 | path: . 18 | extra_requirements: 19 | - docs 20 | system_packages: true 21 | -------------------------------------------------------------------------------- /test/pytest/assets/TTP_TEMPLATES_DIR_TEST/test_ttp_templates_dir_env_variable.txt: -------------------------------------------------------------------------------- 1 | 2 | vlan {{ vlan }} 3 | name {{ name }} 4 | -------------------------------------------------------------------------------- /test/pytest/assets/excel_out_test_excel_formatter_update_source.xlsx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dmulyalin/ttp/d6476d387ca2078e0306c2546148850d3ceb246c/test/pytest/assets/excel_out_test_excel_formatter_update_source.xlsx -------------------------------------------------------------------------------- /test/pytest/assets/extend_groups_filter_test.txt: -------------------------------------------------------------------------------- 1 | 2 | vlan {{ vlan_2 }} 3 | name {{ name_2 }} 4 | 5 | 6 | 7 | vlan {{ vlan }} 8 | name {{ name }} 9 | -------------------------------------------------------------------------------- /test/pytest/assets/extend_groups_recursive_extend_load.txt: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | logging host {{ syslog }} 5 | 6 | 7 | 8 | 9 | 10 | 11 | -------------------------------------------------------------------------------- /test/pytest/assets/extend_groups_recursive_extend_load_several_top_groups.txt: -------------------------------------------------------------------------------- 1 | 2 | logging host {{ syslog }} 3 | 4 | 5 | 6 | 7 | -------------------------------------------------------------------------------- /test/pytest/assets/extend_test_inputs_filter.txt: -------------------------------------------------------------------------------- 1 | 2 | vlan 1234 3 | name some_vlan 4 | ! 5 | vlan 910 6 | name one_more 7 | ! 8 | 9 | 10 | 11 | 12 | vlan 4321 13 | name vlan_name_4321 14 | ! 15 | vlan 700 16 | name another_vlan 17 | ! 18 | 19 | 20 | 21 | 22 | vlan 123 23 | name vlan_name_123 24 | ! 25 | vlan 800 26 | name another_vlan_800 27 | ! 28 | 29 | 30 | 31 | 32 | vlan 777 33 | name vlan_name_777 34 | 35 | 36 | 37 | 38 | a = 1 39 | b = 2 40 | -------------------------------------------------------------------------------- /test/pytest/assets/extend_test_lookups_filter.txt: -------------------------------------------------------------------------------- 1 | 2 | '65100': 3 | as_description: Private ASN 1 4 | as_name: Subs 5 | prefix_num: '734' 6 | '65101': 7 | as_description: Cust-1 ASN 1 8 | as_name: Cust1 9 | prefix_num: '156' 10 | 11 | 12 | 13 | 14 | '65100': 15 | as_description: Private ASN 16 | as_name: Subs 17 | prefix_num: '734' 18 | '65101': 19 | as_description: Cust-1 ASN 20 | as_name: Cust1 21 | prefix_num: '156' 22 | 23 | 24 | 25 | 26 | '65100': 27 | as_description: Private ASN 2 28 | as_name: Subs 29 | prefix_num: '734' 30 | '65101': 31 | as_description: Cust-1 ASN 2 32 | as_name: Cust1 33 | prefix_num: '156' 34 | 35 | 36 | 37 | 38 | var_1: value_1 39 | var_2: value_2 40 | var_2: [1,2,3,4,"a"] 41 | INTF_RE: "GigabitEthernet\\S+|Fast\\S+" 42 | -------------------------------------------------------------------------------- /test/pytest/assets/extend_test_nested_group_filter.txt: -------------------------------------------------------------------------------- 1 | 2 | ip address {{ ip }} {{ mask }} 3 | 4 | 5 | 6 | ip address {{ ip }} {{ mask }} secondary 7 | -------------------------------------------------------------------------------- /test/pytest/assets/extend_test_output_filter.txt: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /test/pytest/assets/extend_test_vars_filter.txt: -------------------------------------------------------------------------------- 1 | 2 | c = 3 3 | d = 4 4 | 5 | 6 | 7 | e = 5 8 | f = 6 9 | 10 | 11 | 12 | g = 7 13 | 14 | 15 | 16 | r = 8 17 | -------------------------------------------------------------------------------- /test/pytest/assets/extend_vars_and_lookup_tag.txt: -------------------------------------------------------------------------------- 1 | 2 | '65100': 3 | as_description: Private ASN 4 | as_name: Subs 5 | prefix_num: '734' 6 | '65101': 7 | as_description: Cust-1 ASN 8 | as_name: Cust1 9 | prefix_num: '156' 10 | 11 | 12 | 13 | var_1: value_1 14 | var_2: value_2 15 | var_2: [1,2,3,4,"a"] 16 | INTF_RE: "GigabitEthernet\\S+|Fast\\S+" 17 | -------------------------------------------------------------------------------- /test/pytest/assets/extend_vlan.txt: -------------------------------------------------------------------------------- 1 | 2 | vlan {{ vlan }} 3 | name {{ name }} 4 | -------------------------------------------------------------------------------- /test/pytest/assets/extend_vlan_anon.txt: -------------------------------------------------------------------------------- 1 | vlan {{ vlan }} 2 | name {{ name }} -------------------------------------------------------------------------------- /test/pytest/assets/extend_vlan_with_template_tag.txt: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /test/pytest/assets/test_extend_tag_bgp_config.txt: -------------------------------------------------------------------------------- 1 | 2 | router-id {{ rid }} 3 | -------------------------------------------------------------------------------- /test/pytest/assets/test_extend_tag_within_group.txt: -------------------------------------------------------------------------------- 1 | 2 | router-id {{ rid }} 3 | -------------------------------------------------------------------------------- /test/pytest/assets/test_extend_tag_within_group_with_anonymous_group.txt: -------------------------------------------------------------------------------- 1 | 2 | router-id {{ rid }} 3 | -------------------------------------------------------------------------------- /test/pytest/assets/test_extend_tag_within_group_with_multiple_groups.txt: -------------------------------------------------------------------------------- 1 | 2 | router-id {{ rid }} 3 | 4 | 5 | 6 | address-family ipv4 vrf {{ vrf }} 7 | -------------------------------------------------------------------------------- /test/pytest/assets/test_extend_tag_within_group_with_non_hierarch_template.txt: -------------------------------------------------------------------------------- 1 | router-id {{ rid }} -------------------------------------------------------------------------------- /test/pytest/assets/test_lookup_include_csv.csv: -------------------------------------------------------------------------------- 1 | ASN,as_name,as_description 2 | 65100,Customer_1,Private ASN for CN451275 3 | 65101,CPEs,Private ASN for FTTB CPEs -------------------------------------------------------------------------------- /test/pytest/assets/test_lookup_include_yaml.txt: -------------------------------------------------------------------------------- 1 | '65100': 2 | as_description: Private ASN 3 | as_name: Subs 4 | prefix_num: '734' 5 | '65101': 6 | as_description: Cust-1 ASN 7 | as_name: Cust1 8 | prefix_num: '156' -------------------------------------------------------------------------------- /test/pytest/assets/yaml_vars.txt: -------------------------------------------------------------------------------- 1 | var_1: value_1 2 | var_2: value_2 3 | var_2: [1,2,3,4,"a"] 4 | INTF_RE: "GigabitEthernet\\S+|Fast\\S+" -------------------------------------------------------------------------------- /test/pytest/mock_data/dataset_1/data_1.txt: -------------------------------------------------------------------------------- 1 | interface Lo0 2 | description data_1 file 3 | ip address 1.0.0.0 32 4 | ! 5 | interface Lo1 6 | description this interface has description 7 | ip address 1.1.1.1 32 8 | ! -------------------------------------------------------------------------------- /test/pytest/mock_data/dataset_1/data_2.txt: -------------------------------------------------------------------------------- 1 | interface Lo2 2 | description data-2 file 3 | ip address 2.2.2.2 32 4 | ! 5 | interface Lo3 6 | description this interface has description 7 | ip address 3.3.3.3 32 8 | ! -------------------------------------------------------------------------------- /test/pytest/mock_data/dataset_1/data_XYZ.txt: -------------------------------------------------------------------------------- 1 | interface Lo10 2 | ip address 1.100.0.0 32 3 | ! 4 | interface Lo11 5 | description this interface from XYZ dataset 6 | ip address 1.11.1.1 32 7 | ! -------------------------------------------------------------------------------- /test/pytest/mock_data/dataset_2/data_1.log: -------------------------------------------------------------------------------- 1 | interface Lo0 2 | description data_1.log file 3 | ip address 1.0.0.0 32 4 | ! 5 | interface Lo1 6 | description this interface has description 7 | ip address 1.1.1.1 32 8 | ! -------------------------------------------------------------------------------- /test/pytest/mock_data/dataset_2/data_2.txt: -------------------------------------------------------------------------------- 1 | interface Lo2 2 | ip address 2.2.2.2 32 3 | ! 4 | interface Lo3 5 | description this interface has description 6 | ip address 3.3.3.3 32 7 | ! -------------------------------------------------------------------------------- /test/pytest/test_anonymous_group.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | sys.path.insert(0, "../..") 4 | import pprint 5 | 6 | from ttp import ttp 7 | 8 | 9 | def test_simple_anonymous_template(): 10 | template_1 = """interface {{ interface }} 11 | description {{ description | ORPHRASE }}""" 12 | 13 | data_1 = """ 14 | interface Port-Chanel11 15 | description Storage Management 16 | interface Loopback0 17 | description RID 18 | """ 19 | parser = ttp(template=template_1, data=data_1) 20 | # check that data added: 21 | datums_added = { 22 | "{}:{}".format(template.name, input_name): input_obj.data 23 | for template in parser._templates 24 | for input_name, input_obj in template.inputs.items() 25 | } 26 | # pprint.pprint(datums_added) 27 | parser.parse() 28 | res = parser.result() 29 | # pprint.pprint(res) 30 | # assert res == [[[{'description': 'Storage Management', 'interface': 'Port-Chanel11'}, {'description': 'RID', 'interface': 'Loopback0'}]]] 31 | 32 | 33 | # test_simple_anonymous_template() 34 | 35 | 36 | def test_anonymous_group_with_vars(): 37 | template = """ 38 | 39 | interface Port-Chanel11 40 | description Storage Management 41 | interface Loopback0 42 | description RID 43 | 44 | 45 | 46 | a = 1 47 | b = 2 48 | 49 | 50 | 51 | interface {{ interface }} 52 | description {{ description | ORPHRASE }} 53 | 54 | """ 55 | parser = ttp(template=template) 56 | parser.parse() 57 | res = parser.result() 58 | # pprint.pprint(res) 59 | assert res == [ 60 | [ 61 | [ 62 | {"description": "Storage Management", "interface": "Port-Chanel11"}, 63 | {"description": "RID", "interface": "Loopback0"}, 64 | {"my": {"var": {"s": {"a": 1, "b": 2}}}}, 65 | ] 66 | ] 67 | ] 68 | 69 | 70 | # test_anonymous_group_with_vars() 71 | 72 | 73 | def test_anonymous_group_with_child_group_empty_absolute_path(): 74 | template = """ 75 | 101 | """ 102 | parser = ttp(template=template) 103 | parser.parse() 104 | res = parser.result() 105 | # pprint.pprint(res) 106 | assert res == [ 107 | [ 108 | {"ip": "10.123.89.55", "mask": "255.255.255.0"}, 109 | {"ip": "10.123.89.56", "mask": "255.255.255.0"}, 110 | {"ip": "10.123.89.55", "mask": "255.255.255.0"}, 111 | ] 112 | ] 113 | 114 | 115 | # test_anonymous_group_with_child_group_empty_absolute_path() 116 | 117 | 118 | def test_anonymous_group_with_per_template_mode(): 119 | template = """ 120 | 132 | """ 133 | datum_1 = """ 134 | hostname r2 135 | ! 136 | interface GigabitEthernet1 137 | vrf forwarding MGMT 138 | ip address 10.123.89.55 255.255.255.0 139 | """ 140 | datum_2 = """ 141 | hostname r1 142 | ! 143 | interface GigabitEthernet1 144 | description some info 145 | vrf forwarding MGMT 146 | ip address 10.123.89.56 255.255.255.0 147 | interface GigabitEthernet2 148 | ip address 10.123.89.55 255.255.255.0 149 | """ 150 | parser_a = ttp(template=template) 151 | parser_a.add_input(datum_1) 152 | parser_a.add_input(datum_2) 153 | parser_a.parse() 154 | res = parser_a.result() 155 | # pprint.pprint(res) 156 | assert res == [ 157 | [ 158 | { 159 | "hostname": "r2", 160 | "interface": "GigabitEthernet1", 161 | "ip": "10.123.89.55", 162 | "mask": "255.255.255.0", 163 | }, 164 | { 165 | "description": "some info", 166 | "hostname": "r1", 167 | "interface": "GigabitEthernet1", 168 | "ip": "10.123.89.56", 169 | "mask": "255.255.255.0", 170 | }, 171 | { 172 | "hostname": "r1", 173 | "interface": "GigabitEthernet2", 174 | "ip": "10.123.89.55", 175 | "mask": "255.255.255.0", 176 | }, 177 | ] 178 | ] 179 | 180 | 181 | # test_anonymous_group_with_per_template_mode() 182 | -------------------------------------------------------------------------------- /test/pytest/test_empty_group_results.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | sys.path.insert(0, "../..") 4 | import pprint 5 | 6 | from ttp import ttp 7 | 8 | 9 | def test_empty_group_results(): 10 | template = """ 11 | 12 | vlan 1234 13 | name some_vlan 14 | vlan 5678 15 | vlan 910 16 | name one_more 17 | vlan 777 18 | 19 | 20 | 21 | vlan {{ vlan }} 22 | name {{ name }} 23 | 24 | """ 25 | parser = ttp(template=template) 26 | parser.parse() 27 | res = parser.result() 28 | # pprint.pprint(res) 29 | assert res == [ 30 | [ 31 | { 32 | "vlans": { 33 | "1234": {"name": "some_vlan"}, 34 | "5678": {}, 35 | "777": {}, 36 | "910": {"name": "one_more"}, 37 | } 38 | } 39 | ] 40 | ] 41 | 42 | 43 | # test_empty_group_results() 44 | -------------------------------------------------------------------------------- /test/pytest/test_group_name_attribute.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | sys.path.insert(0, "../..") 4 | import pprint 5 | 6 | import logging 7 | 8 | logging.basicConfig(level=logging.DEBUG) 9 | 10 | from ttp import ttp 11 | 12 | 13 | def test_group_same_null_path_for_several_groups(): 14 | data = """ 15 | vrf xyz 16 | address-family ipv4 unicast 17 | import route-target 18 | 65000:3507 19 | 65000:3511 20 | 65000:5453 21 | 65000:5535 22 | ! 23 | export route-target 24 | 65000:5453 25 | 65000:5535 26 | ! 27 | ! 28 | ! 29 | """ 30 | template = """ 31 | 32 | vrf {{name}} 33 | 34 | import route-target {{ _start_ }} 35 | {{ import | to_list | joinmatches }} 36 | 37 | ! 38 | 39 | export route-target {{ _start_ }} 40 | {{ export | to_list | joinmatches }} 41 | 42 | 43 | """ 44 | parser = ttp(data=data, template=template, log_level="ERROR") 45 | parser.parse() 46 | res = parser.result() 47 | pprint.pprint(res) 48 | assert res == [ 49 | [ 50 | { 51 | "vrfs": { 52 | "export": ["65000:5453", "65000:5535"], 53 | "import": ["65000:3507", "65000:3511", "65000:5453", "65000:5535"], 54 | "name": "xyz", 55 | } 56 | } 57 | ] 58 | ] 59 | 60 | 61 | # test_group_same_null_path_for_several_groups() 62 | -------------------------------------------------------------------------------- /test/pytest/test_line_indicator.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | sys.path.insert(0, "../..") 4 | import pprint 5 | import logging 6 | 7 | logging.basicConfig(level=logging.DEBUG) 8 | 9 | from ttp import ttp 10 | 11 | 12 | def test_line_when_its_start_re(): 13 | data_to_parse_a = """ 14 | R1#sh vrrp 15 | GigabitEthernet1 - Group 100 16 | DC-LAN Subnet 17 | State is Master 18 | Virtual IP address is 192.168.10.1 19 | Virtual MAC address is 0000.5e00.0164 20 | Advertisement interval is 1.000 sec 21 | Preemption enabled 22 | Priority is 120 23 | VRRS Group name DC_LAN 24 | Track object 1 state Up decrement 30 25 | Authentication text "hash" 26 | Master Router is 192.168.1.233 (local), priority is 120 27 | Master Advertisement interval is 1.000 sec 28 | Master Down interval is 3.531 sec 29 | Some other line 30 | """ 31 | 32 | data_to_parse_b = """ 33 | R2#sh vrrp 34 | GigabitEthernet1 - Group 100 35 | State is Init 36 | Virtual IP address is 192.168.10.1 37 | Virtual MAC address is 0000.5e00.0164 38 | Advertisement interval is 1.000 sec 39 | Preemption enabled 40 | Priority is 115 41 | Authentication text "hash" 42 | Master Router is 192.168.1.233, priority is 120 43 | Master Advertisement interval is 1.000 sec 44 | Master Down interval is 3.550 sec 45 | """ 46 | 47 | ttp_template = """ 48 | 76 | """ 77 | parser = ttp(template=ttp_template) 78 | parser.add_input(data_to_parse_a, template_name="vrrp") 79 | parser.add_input(data_to_parse_b, template_name="vrrp") 80 | parser.parse() 81 | 82 | res = parser.result(structure="dictionary") 83 | # pprint.pprint(res, width=100) 84 | 85 | assert res == { 86 | "vrrp": { 87 | "R1": { 88 | "GigabitEthernet1": { 89 | "Group-100": { 90 | "Auth_Text": '"hash"', 91 | "Group_Name": "DC_LAN", 92 | "Master_IP": "192.168.1.233", 93 | "VRRP_Description": "DC-LAN Subnet", 94 | "VRRP_MAC": "0000.5e00.0164", 95 | "VRRP_Preempt": "enabled", 96 | "VRRP_Priority": "120", 97 | "VRRP_Virtual_IP": "192.168.10.1", 98 | "adv_interval": "1.000", 99 | "master_down": "3.531", 100 | "master_int": "1.000", 101 | "priority": "120", 102 | "track_obj": "1", 103 | "track_obj_decrement": "30", 104 | "track_obj_status": "Up", 105 | } 106 | } 107 | }, 108 | "R2": { 109 | "GigabitEthernet1": { 110 | "Group-100": { 111 | "Auth_Text": '"hash"', 112 | "Master_IP": "192.168.1.233", 113 | "VRRP_MAC": "0000.5e00.0164", 114 | "VRRP_Preempt": "enabled", 115 | "VRRP_Priority": "115", 116 | "VRRP_State": "VRRP Slave for this Group", 117 | "VRRP_Virtual_IP": "192.168.10.1", 118 | "adv_interval": "1.000", 119 | "master_down": "3.550", 120 | "master_int": "1.000", 121 | "priority": "120", 122 | } 123 | } 124 | }, 125 | } 126 | } 127 | 128 | 129 | # test_line_when_its_start_re() 130 | -------------------------------------------------------------------------------- /test/pytest/test_lookups.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | sys.path.insert(0, "../..") 4 | import pprint 5 | 6 | import logging 7 | 8 | logging.basicConfig(level="INFO") 9 | 10 | from ttp import ttp 11 | 12 | 13 | def test_lookup_include_csv(): 14 | template = """ 15 | 16 | 17 | 18 | router bgp 65101 19 | 20 | 21 | 22 | router bgp {{ bgp_as | lookup(ASNs, add_field="as_info") }} 23 | 24 | """ 25 | parser = ttp(template=template) 26 | parser.parse() 27 | lookup_data = parser._templates[0].lookups 28 | res = parser.result() 29 | assert lookup_data == { 30 | "ASNs": { 31 | "65100": { 32 | "as_description": "Private ASN for CN451275", 33 | "as_name": "Customer_1", 34 | }, 35 | "65101": {"as_description": "Private ASN for FTTB CPEs", "as_name": "CPEs"}, 36 | } 37 | } 38 | assert res == [ 39 | [ 40 | { 41 | "bgp_config": { 42 | "as_info": { 43 | "as_description": "Private ASN for FTTB CPEs", 44 | "as_name": "CPEs", 45 | }, 46 | "bgp_as": "65101", 47 | } 48 | } 49 | ] 50 | ] 51 | 52 | 53 | # test_lookup_include_csv() 54 | 55 | 56 | def test_lookup_include_yaml(): 57 | template = """ 58 | 59 | 60 | 61 | 62 | router bgp 65100 63 | 64 | 65 | 66 | router bgp {{ bgp_as | lookup("yaml_look", add_field="as_details") }} 67 | 68 | """ 69 | parser = ttp(template=template) 70 | parser.parse() 71 | lookup_data = parser._templates[0].lookups 72 | res = parser.result() 73 | # pprint.pprint(lookup_data) 74 | # pprint.pprint(res) 75 | assert lookup_data == { 76 | "yaml_look": { 77 | "65100": { 78 | "as_description": "Private ASN", 79 | "as_name": "Subs", 80 | "prefix_num": "734", 81 | }, 82 | "65101": { 83 | "as_description": "Cust-1 ASN", 84 | "as_name": "Cust1", 85 | "prefix_num": "156", 86 | }, 87 | } 88 | } 89 | assert res == [ 90 | [ 91 | { 92 | "bgp_config": { 93 | "as_details": { 94 | "as_description": "Private ASN", 95 | "as_name": "Subs", 96 | "prefix_num": "734", 97 | }, 98 | "bgp_as": "65100", 99 | } 100 | } 101 | ] 102 | ] 103 | 104 | 105 | # test_lookup_include_yaml() 106 | -------------------------------------------------------------------------------- /test/pytest/test_output_returners.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | sys.path.insert(0, "../..") 4 | import pprint 5 | 6 | import logging 7 | 8 | logging.basicConfig(level="ERROR") 9 | 10 | from ttp import ttp 11 | 12 | 13 | def test_file_returner_format_raw(): 14 | template_1 = """ 15 | 16 | interface Port-Chanel11 17 | vlan 10 18 | interface Loopback0 19 | vlan 20 20 | 21 | 22 | 23 | interface {{ interface }} 24 | vlan {{ vlan | to_int }} 25 | 26 | 27 | 32 | """ 33 | parser = ttp(template=template_1) 34 | parser.parse() 35 | # res=parser.result() 36 | # pprint.pprint(res) 37 | with open("./Output/out_test_file_returner.txt", "r") as f: 38 | assert ( 39 | f.read() 40 | == "[[{'vlan': 10, 'interface': 'Port-Chanel11'}, {'vlan': 20, 'interface': 'Loopback0'}]]" 41 | ) 42 | 43 | 44 | # test_file_returner() 45 | 46 | 47 | def test_file_returner_format_raw_incomplete_url(): 48 | template_1 = """ 49 | 50 | interface Port-Chanel11 51 | vlan 10 52 | interface Loopback0 53 | vlan 20 54 | 55 | 56 | 57 | interface {{ interface }} 58 | vlan {{ vlan | to_int }} 59 | 60 | 61 | 66 | """ 67 | parser = ttp(template=template_1) 68 | parser.parse() 69 | # res=parser.result() 70 | # pprint.pprint(res) 71 | with open("./Output/out_test_file_returner_2.txt", "r") as f: 72 | assert ( 73 | f.read() 74 | == "[[{'vlan': 10, 'interface': 'Port-Chanel11'}, {'vlan': 20, 'interface': 'Loopback0'}]]" 75 | ) 76 | 77 | 78 | # test_file_returner_format_raw_incomplete_url() 79 | 80 | 81 | def test_file_returner_format_raw_1(): 82 | template_1 = """ 83 | 84 | interface Port-Chanel11 85 | vlan 10 86 | interface Loopback0 87 | vlan 20 88 | 89 | 90 | 91 | interface {{ interface }} 92 | vlan {{ vlan | to_int }} 93 | 94 | 95 | 96 | returner="file" 97 | url="./Output/" 98 | filename="out_test_file_returner_1.txt" 99 | 100 | """ 101 | parser = ttp(template=template_1) 102 | parser.parse() 103 | # res=parser.result() 104 | # pprint.pprint(res) 105 | with open("./Output/out_test_file_returner_1.txt", "r") as f: 106 | assert ( 107 | f.read() 108 | == "[[{'vlan': 10, 'interface': 'Port-Chanel11'}, {'vlan': 20, 'interface': 'Loopback0'}]]" 109 | ) 110 | 111 | 112 | def test_file_returner_filename_format(): 113 | template_1 = """ 114 | 115 | switch-sw1# show run interfaces 116 | interface Port-Chanel11 117 | vlan 10 118 | interface Loopback0 119 | vlan 20 120 | 121 | 122 | 123 | switch-sw2# show run interfaces 124 | interface Port-Chanel11 125 | vlan 10 126 | interface Loopback0 127 | vlan 20 128 | 129 | 130 | 131 | host_name = "gethostname" 132 | 133 | 134 | 135 | interface {{ interface }} 136 | vlan {{ vlan | to_int }} 137 | {{ hostname | set(host_name) }} 138 | 139 | 140 | 141 | returner="file" 142 | url="./Output/" 143 | filename="out_test_file_returner_{host_name}.txt" 144 | 145 | """ 146 | parser = ttp(template=template_1) 147 | parser.parse() 148 | # res=parser.result() 149 | # pprint.pprint(res) 150 | with open("./Output/out_test_file_returner_switch-sw2.txt", "r") as f: 151 | assert ( 152 | f.read() 153 | == "[[{'vlan': 10, 'interface': 'Port-Chanel11', 'hostname': 'switch-sw1'}, {'vlan': 20, 'interface': 'Loopback0', 'hostname': 'switch-sw1'}], [{'vlan': 10, 'interface': 'Port-Chanel11', 'hostname': 'switch-sw2'}, {'vlan': 20, 'interface': 'Loopback0', 'hostname': 'switch-sw2'}]]" 154 | ) 155 | 156 | 157 | # test_file_returner_filename_format() 158 | 159 | 160 | def test_file_returner_filename_format_group_specific_output(): 161 | template_1 = """ 162 | 163 | switch-sw1# show run interfaces 164 | interface Port-Chanel11 165 | vlan 10 166 | interface Loopback0 167 | vlan 20 168 | 169 | 170 | 171 | switch-sw2# show run interfaces 172 | interface Port-Chanel11 173 | vlan 10 174 | interface Loopback0 175 | vlan 20 176 | 177 | 178 | 179 | host_name = "gethostname" 180 | 181 | 182 | 183 | interface {{ interface }} 184 | vlan {{ vlan | to_int }} 185 | 186 | 187 | 188 | returner="file" 189 | url="./Output/" 190 | filename="out_test_file_returner_{host_name}_group_specific_outputter.txt" 191 | 192 | """ 193 | parser = ttp(template=template_1) 194 | parser.parse() 195 | with open( 196 | "./Output/out_test_file_returner_switch-sw1_group_specific_outputter.txt", "r" 197 | ) as f: 198 | assert ( 199 | f.read() 200 | == "{'_anonymous_': [{'vlan': 10, 'interface': 'Port-Chanel11'}, {'vlan': 20, 'interface': 'Loopback0'}]}" 201 | ) 202 | with open( 203 | "./Output/out_test_file_returner_switch-sw2_group_specific_outputter.txt", "r" 204 | ) as f: 205 | assert ( 206 | f.read() 207 | == "{'_anonymous_': [{'vlan': 10, 'interface': 'Port-Chanel11'}, {'vlan': 20, 'interface': 'Loopback0'}]}" 208 | ) 209 | 210 | 211 | # test_file_returner_filename_format_group_specific_output() 212 | -------------------------------------------------------------------------------- /test/pytest/test_structure.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | sys.path.insert(0, "../..") 4 | 5 | from ttp import ttp 6 | 7 | template_1 = """ 8 | 9 | interface Lo0 10 | ip address 192.168.0.1 32 11 | ! 12 | interface Lo1 13 | ip address 1.1.1.1 32 14 | 15 | 16 | 17 | interface Lo2 18 | ip address 2.2.2.2 32 19 | ! 20 | interface Lo3 21 | ip address 3.3.3.3 32 22 | 23 | 24 | 25 | interface {{ interface }} 26 | ip address {{ ip }} {{ mask }} 27 | 28 | """ 29 | 30 | template_2 = """ 31 | 64 | """ 65 | 66 | 67 | list_structure = [ 68 | [ 69 | [ 70 | {"interface": "Lo0", "ip": "192.168.0.1", "mask": "32"}, 71 | {"interface": "Lo1", "ip": "1.1.1.1", "mask": "32"}, 72 | ], 73 | [ 74 | {"interface": "Lo2", "ip": "2.2.2.2", "mask": "32"}, 75 | {"interface": "Lo3", "ip": "3.3.3.3", "mask": "32"}, 76 | ], 77 | ] 78 | ] 79 | 80 | dictionary_structure = { 81 | "first": [ 82 | [ 83 | {"interface": "Lo0", "ip": "124.171.238.50", "mask": "32"}, 84 | {"interface": "Lo1", "ip": "1.1.1.1", "mask": "32"}, 85 | ] 86 | ], 87 | "second": [ 88 | [ 89 | {"interface": "Lo2", "ip": "124.171.238.50", "mask": "32"}, 90 | {"interface": "Lo3", "ip": "2.2.2.2", "mask": "32"}, 91 | ] 92 | ], 93 | } 94 | 95 | flat_list_structure = [ 96 | {"interface": "Lo0", "ip": "192.168.0.1", "mask": "32"}, 97 | {"interface": "Lo1", "ip": "1.1.1.1", "mask": "32"}, 98 | {"interface": "Lo2", "ip": "2.2.2.2", "mask": "32"}, 99 | {"interface": "Lo3", "ip": "3.3.3.3", "mask": "32"}, 100 | ] 101 | 102 | 103 | def test_list_structure(): 104 | parser = ttp(template=template_1) 105 | parser.parse() 106 | res = parser.result() 107 | assert res == list_structure 108 | 109 | 110 | def test_list_structure_with_outputter(): 111 | parser = ttp(template=template_1) 112 | parser.parse() 113 | res = parser.result(returner="self") 114 | assert res == list_structure 115 | 116 | 117 | def test_dictionary_structure(): 118 | parser = ttp(template=template_2) 119 | parser.parse() 120 | res = parser.result(structure="dictionary") 121 | assert res == dictionary_structure 122 | 123 | 124 | def test_dictionary_structure_with_outputter(): 125 | parser = ttp(template=template_2) 126 | parser.parse() 127 | res = parser.result(structure="dictionary", returner="self") 128 | assert res == dictionary_structure 129 | 130 | 131 | def test_flat_list_structure(): 132 | parser = ttp(template=template_1) 133 | parser.parse() 134 | # just return the results 135 | res = parser.result(structure="flat_list") 136 | assert res == flat_list_structure 137 | 138 | 139 | def test_flat_list_structure_with_outputter(): 140 | parser = ttp(template=template_1) 141 | parser.parse() 142 | res = parser.result(structure="flat_list", returner="self") 143 | assert res == flat_list_structure 144 | -------------------------------------------------------------------------------- /test/pytest/test_syslog_returner.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | sys.path.insert(0, "../..") 4 | import pprint 5 | 6 | from ttp import ttp 7 | 8 | 9 | def test_syslog_returner(): 10 | template = """ 11 | 12 | router-2-lab#show ip arp 13 | Protocol Address Age (min) Hardware Addr Type Interface 14 | Internet 10.1.13.4 - 0050.5685.14d6 ARPA GigabitEthernet3.13 15 | Internet 10.1.13.5 - 0050.5685.14d7 ARPA GigabitEthernet4.14 16 | 17 | 18 | 19 | router-3-lab#show ip arp 20 | Protocol Address Age (min) Hardware Addr Type Interface 21 | Internet 10.1.13.1 98 0050.5685.5cd1 ARPA GigabitEthernet1.11 22 | Internet 10.1.13.3 - 0050.5685.14d5 ARPA GigabitEthernet2.12 23 | 24 | 25 | hostname="gethostname" 26 | 27 | 28 | Internet {{ ip }} {{ age | DIGIT }} {{ mac }} ARPA {{ interface }} 29 | Internet {{ ip }} - {{ mac }} ARPA {{ interface }} 30 | {{ hostname | set(hostname) }} 31 | 32 | 33 | 34 | servers=["192.168.1.175", "192.168.1.105"] 35 | port="10514" 36 | path="arp_table" 37 | iterate=True 38 | facility=77 39 | 40 | """ 41 | parser = ttp(template=template) 42 | parser.parse() 43 | 44 | 45 | # uncomment to test, need some syslog server running to test, for instance all-in-one graylog VM 46 | # test_syslog_returner() 47 | -------------------------------------------------------------------------------- /test/pytest/yang_modules/library/yang-library.json: -------------------------------------------------------------------------------- 1 | { 2 | "ietf-yang-library:modules-state": { 3 | "module-set-id": "", 4 | "module": [ 5 | { 6 | "name": "iana-if-type", 7 | "revision": "2017-01-19", 8 | "namespace": "urn:ietf:params:xml:ns:yang:iana-if-type", 9 | "conformance-type": "implement" 10 | }, 11 | { 12 | "name": "ietf-inet-types", 13 | "revision": "2013-07-15", 14 | "namespace": "urn:ietf:params:xml:ns:yang:ietf-inet-types", 15 | "conformance-type": "import" 16 | }, 17 | { 18 | "name": "ietf-interfaces", 19 | "revision": "2018-02-20", 20 | "namespace": "urn:ietf:params:xml:ns:yang:ietf-interfaces", 21 | "feature": [ 22 | "arbitrary-names", 23 | "pre-provisioning", 24 | "if-mib" 25 | ], 26 | "conformance-type": "implement" 27 | }, 28 | { 29 | "name": "ietf-ip", 30 | "revision": "2018-02-22", 31 | "namespace": "urn:ietf:params:xml:ns:yang:ietf-ip", 32 | "feature": [ 33 | "ipv4-non-contiguous-netmasks", 34 | "ipv6-privacy-autoconf" 35 | ], 36 | "conformance-type": "implement" 37 | }, 38 | { 39 | "name": "ietf-yang-types", 40 | "revision": "2013-07-15", 41 | "namespace": "urn:ietf:params:xml:ns:yang:ietf-yang-types", 42 | "conformance-type": "import" 43 | } 44 | ] 45 | } 46 | } -------------------------------------------------------------------------------- /test/test_logging_from_module.py: -------------------------------------------------------------------------------- 1 | from ttp import ttp 2 | 3 | test217=""" 4 | 5 | user.name@host-site-sw1> show configuration interfaces | display set 6 | set interfaces vlan unit 17 description "som if descript" 7 | set interfaces vlan unit 17 family inet address 20.17.1.253/23 vrrp-group 25 virtual-address 20.17.1.254 8 | set interfaces vlan unit 17 family inet address 20.17.1.252/23 9 | 10 | 11 | 12 | set interfaces {{ interface }} unit {{ unit }} family inet address {{ ip | to_ip }} vrrp-group {{ vrrp_id }} virtual-address {{ vrrp_vip }} 13 | set interfaces {{ interface }} unit {{ unit }} description "{{ description | ORPHRASE }}" 14 | {{ hostname | set("hostname") }} 15 | {{ group | set("group-0") }} 16 | 17 | """ 18 | 19 | parser_Obj = ttp(template=test217, log_level="DEBUG", log_file="myfile.txt") 20 | parser_Obj.result(format='json', returner='terminal') -------------------------------------------------------------------------------- /test/test_structure.py: -------------------------------------------------------------------------------- 1 | from ttp import ttp 2 | import json 3 | 4 | template=""" 5 | 15 | 16 | 26 | """ 27 | 28 | parser=ttp(template=template) 29 | parser.parse() 30 | results = parser.result(structure="dictionary") 31 | print(json.dumps(results, sort_keys=True, indent=4, separators=(',', ': '))) -------------------------------------------------------------------------------- /test/ttp_dns_test.txt: -------------------------------------------------------------------------------- 1 | 2 | interface GigabitEthernet3/11 3 | description bbc.com 4 | switchport trunk allowed vlan add 111,222 5 | ! 6 | 7 | 8 | 9 | interface {{ interface }} 10 | switchport trunk allowed vlan add {{ trunk_vlans }} 11 | description {{ description | dns(record='A', servers='192.168.1.100') }} 12 | !{{ _end_ }} 13 | 14 | 15 | 16 | interface {{ interface }} 17 | switchport trunk allowed vlan add {{ trunk_vlans }} 18 | description {{ description | dns }} 19 | !{{ _end_ }} 20 | 21 | 22 | 23 | interface {{ interface }} 24 | switchport trunk allowed vlan add {{ trunk_vlans }} 25 | description {{ description | dns(record='AAAA') }} 26 | !{{ _end_ }} 27 | 28 | 29 | 30 | interface {{ interface }} 31 | switchport trunk allowed vlan add {{ trunk_vlans }} 32 | description {{ description | dns(record='A', servers='8.8.8.8') }} 33 | !{{ _end_ }} 34 | 35 | 36 | 37 | interface {{ interface }} 38 | switchport trunk allowed vlan add {{ trunk_vlans }} 39 | description {{ description | dns(record='AAAA', add_field='ips') }} 40 | !{{ _end_ }} 41 | 42 | 43 | 48 | { 49 | "interfaces_dnsv4_timeout_test": { 50 | "description": "bbc.com", 51 | "interface": "GigabitEthernet3/11", 52 | "trunk_vlans": "111,222" 53 | }} 54 | 55 | 56 | 61 | { 62 | "interfaces_dnsv4": { 63 | "description": [ 64 | "151.101.128.81", 65 | "151.101.192.81", 66 | "151.101.64.81", 67 | "151.101.0.81" 68 | ], 69 | "interface": "GigabitEthernet3/11", 70 | "trunk_vlans": "111,222" 71 | } 72 | } 73 | 74 | 75 | 80 | { 81 | "interfaces_dnsv6": { 82 | "description": [ 83 | "2a04:4e42:600::81", 84 | "2a04:4e42:400::81", 85 | "2a04:4e42:200::81", 86 | "2a04:4e42::81" 87 | ], 88 | "interface": "GigabitEthernet3/11", 89 | "trunk_vlans": "111,222" 90 | } 91 | } 92 | 93 | 94 | 99 | { 100 | "interfaces_dnsv4_google": { 101 | "description": [ 102 | "151.101.0.81", 103 | "151.101.192.81", 104 | "151.101.64.81", 105 | "151.101.128.81" 106 | ], 107 | "interface": "GigabitEthernet3/11", 108 | "trunk_vlans": "111,222" 109 | } 110 | } 111 | 112 | 113 | 114 | 119 | { 120 | "interfaces_dnsv6_add_field": { 121 | "description": "bbc.com", 122 | "interface": "GigabitEthernet3/11", 123 | "ips": [ 124 | "2a04:4e42:400::81", 125 | "2a04:4e42:600::81", 126 | "2a04:4e42::81", 127 | "2a04:4e42:200::81" 128 | ], 129 | "trunk_vlans": "111,222" 130 | } 131 | } 132 | 133 | 134 | 135 | -------------------------------------------------------------------------------- /ttp/__init__.py: -------------------------------------------------------------------------------- 1 | name = "ttp" 2 | 3 | __all__ = ["ttp"] 4 | __author__ = "Denis Mulyalin " 5 | __version__ = "0.9.5" 6 | from sys import version_info 7 | 8 | # get python version: 9 | python_major_version = version_info.major 10 | 11 | if python_major_version == 3: 12 | from ttp.ttp import ttp 13 | from ttp.utils.quick_parse import quick_parse 14 | elif python_major_version == 2: 15 | from ttp import ttp 16 | from utils.quick_parse import quick_parse 17 | -------------------------------------------------------------------------------- /ttp/formatters/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dmulyalin/ttp/d6476d387ca2078e0306c2546148850d3ceb246c/ttp/formatters/__init__.py -------------------------------------------------------------------------------- /ttp/formatters/csv_formatter.py: -------------------------------------------------------------------------------- 1 | _name_map_ = {"csv_formatter": "csv"} 2 | 3 | 4 | def csv_formatter(data, **kwargs): 5 | """Method to dump list of dictionaries into table 6 | using provided separator, default is comma - ',' 7 | """ 8 | result = "" 9 | # form table - list of lists 10 | table = _ttp_["formatters"]["table"]( 11 | data, **kwargs 12 | ) # pylint: disable=undefined-variable 13 | sep = kwargs.get("sep", ",") 14 | quote = kwargs.get("quote", '"') 15 | sep = "{q}{s}{q}".format(s=sep, q=quote) 16 | row_formatter = "\n{q}{{}}{q}".format(q=quote) 17 | # form results: 18 | result = "{q}{d}{q}".format(d=sep.join(table[0]), q=quote) 19 | for row in table[1:]: 20 | try: 21 | result += row_formatter.format(sep.join(row)) 22 | except TypeError: # might happen if not all values in row are strings 23 | result += row_formatter.format(sep.join([str(i) for i in row])) 24 | return result 25 | -------------------------------------------------------------------------------- /ttp/formatters/excel_formatter.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import os 3 | 4 | log = logging.getLogger(__name__) 5 | 6 | try: 7 | from openpyxl import Workbook, load_workbook 8 | except ImportError: 9 | log.critical( 10 | "output.formatter_excel: openpyxl not installed, install: 'python -m pip install openpyxl'. Exiting" 11 | ) 12 | raise SystemExit() 13 | 14 | _name_map_ = {"excel_formatter": "excel"} 15 | 16 | 17 | def excel_formatter(data, **kwargs): 18 | """Method to format data as an .xlsx table using openpyxl module.""" 19 | # get arguments 20 | try: 21 | table = kwargs["table"] 22 | except KeyError: 23 | log.critical( 24 | "output.formatter_excel: output tag missing table definition. Exiting" 25 | ) 26 | raise SystemExit() 27 | update = kwargs.get("update") 28 | url = kwargs.get("url", "./Output/") 29 | 30 | # from filename 31 | filename = kwargs.get("filename") 32 | if not filename.endswith(".xlsx"): 33 | filename = "{}.xlsx".format(filename) 34 | 35 | # form table_tabs - list of dictionaries 36 | table_tabs = [] 37 | for index, tab_det in enumerate(table): 38 | tab_name = ( 39 | tab_det.pop("tab_name") 40 | if "tab_name" in tab_det 41 | else "Sheet{}".format(index) 42 | ) 43 | headers = tab_det.get("headers", None) 44 | if isinstance(headers, str): 45 | headers = [i.strip() for i in headers.split(",")] 46 | # get attributes out of tab_det 47 | tab_kwargs = { 48 | "path": [i.strip() for i in tab_det.get("path", "").split(".")], 49 | "headers": headers, 50 | "missing": tab_det.get("missing", ""), 51 | "key": tab_det.get("key", ""), 52 | "strict": tab_det.get("strict", True), 53 | } 54 | # form tab table 55 | tab_table_data = _ttp_["formatters"]["table"](data, **tab_kwargs) 56 | table_tabs.append({"name": tab_name, "data": tab_table_data}) 57 | 58 | # check if need to load existing workbook 59 | if update and os.path.exists(os.path.join(url, filename)): 60 | wb = load_workbook(os.path.join(url, filename)) 61 | # create workbook 62 | else: 63 | wb = Workbook(write_only=True) 64 | 65 | # add data to workbook 66 | for tab in table_tabs: 67 | # check if need to add to existing tab 68 | if tab["name"] in wb and update: 69 | ws = wb[tab["name"]] 70 | for row in tab["data"][1:]: 71 | ws.append(row) 72 | # create new tab 73 | else: 74 | ws = wb.create_sheet(title=tab["name"]) 75 | for row in tab["data"]: 76 | ws.append(row) 77 | return wb 78 | -------------------------------------------------------------------------------- /ttp/formatters/jinja2_formatter.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | log = logging.getLogger(__name__) 4 | 5 | _name_map_ = {"jinja2_formatter": "jinja2"} 6 | 7 | 8 | def jinja2_formatter(data, **kwargs): 9 | """Method to render output template using results data.""" 10 | try: 11 | from jinja2 import Environment 12 | except ImportError: 13 | log.critical( 14 | "output.formatter_jinja2: Jinja2 not installed, install: 'python -m pip install jinja2'. Exiting" 15 | ) 16 | raise SystemExit() 17 | # load template: 18 | template_obj = Environment( 19 | loader="BaseLoader", trim_blocks=True, lstrip_blocks=True 20 | ).from_string(_ttp_["output_object"].tag_load) 21 | # render data making whole results accessible from _data_ variable in Jinja2 22 | result = template_obj.render(_data_=data) 23 | return result 24 | -------------------------------------------------------------------------------- /ttp/formatters/json_formatter.py: -------------------------------------------------------------------------------- 1 | _name_map_ = {"json_formatter": "json"} 2 | 3 | 4 | def json_formatter(data, **kwargs): 5 | """Method returns parsing result in json format.""" 6 | from json import dumps 7 | 8 | return dumps(data, sort_keys=True, indent=4, separators=(",", ": ")) 9 | -------------------------------------------------------------------------------- /ttp/formatters/n2g_formatter.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | log = logging.getLogger(__name__) 4 | 5 | 6 | def n2g(data, **kwargs): 7 | # load kwargs 8 | module = kwargs.get("module", "yed") 9 | method = kwargs.get("method", "from_list") 10 | path = kwargs.get("path", []) 11 | node_dups = kwargs.get("node_duplicates", "skip") 12 | link_dups = kwargs.get("link_duplicates", "skip") 13 | method_kwargs = kwargs.get("method_kwargs", {}) 14 | algo = kwargs.get("algo", None) 15 | # import N2G library 16 | try: 17 | if module.lower() == "yed": 18 | from N2G import yed_diagram as create_diagram 19 | elif module.lower() == "drawio": 20 | from N2G import drawio_diagram as create_diagram 21 | else: 22 | log.error( 23 | "No N2G module '{}', supported values are 'yEd', 'DrawIO'".format( 24 | module 25 | ) 26 | ) 27 | return data 28 | except ImportError: 29 | log.error("Failed to import N2G '{}' module".format(module)) 30 | return data 31 | diagram_obj = create_diagram(node_duplicates=node_dups, link_duplicates=link_dups) 32 | # normalize results_data to list: 33 | if isinstance(data, dict): # handle the case for group specific output 34 | data = [data] 35 | # make graph 36 | for result in data: 37 | result_datum = _ttp_["output"]["traverse"](result, path) 38 | getattr(diagram_obj, method)(result_datum, **method_kwargs) 39 | # layout graph 40 | if algo: 41 | diagram_obj.layout(algo=algo) 42 | # return results XML 43 | data = diagram_obj.dump_xml() 44 | return data 45 | -------------------------------------------------------------------------------- /ttp/formatters/pprint_formatter.py: -------------------------------------------------------------------------------- 1 | _name_map_ = {"pprint_formatter": "pprint"} 2 | 3 | 4 | def pprint_formatter(data, **kwargs): 5 | """Method to pprint format results""" 6 | from pprint import pformat 7 | 8 | return pformat(data, indent=4) 9 | -------------------------------------------------------------------------------- /ttp/formatters/raw_formatter.py: -------------------------------------------------------------------------------- 1 | def raw(data, **kwargs): 2 | """Method returns parsing results as python list or dictionary.""" 3 | return data 4 | -------------------------------------------------------------------------------- /ttp/formatters/table_formatter.py: -------------------------------------------------------------------------------- 1 | def table(data, path=None, missing="", key="", strict=True, headers=None, **kwargs): 2 | """Method to form table there table is list of lists, 3 | first item - headers row. Method used by csv/tabulate/excel 4 | formatters. 5 | 6 | :param path: (list) path items list to result within data 7 | :param strict: (bool) strict attribute to use with traverse function, if True 8 | will raise KeyError in case of path not within data 9 | :param missing: (str) value to use for missing headers 10 | :param key: (str) name of key to use for transforming dictionary to list 11 | :param headers: (list) list of table headers 12 | """ 13 | headers = headers or [] 14 | path = path or [] 15 | table = [] 16 | data_to_table = [] 17 | source_data = [] 18 | # normalize source_data to list: 19 | if isinstance(data, list): # handle the case for template/global output 20 | source_data += data 21 | elif isinstance(data, dict): # handle the case for group specific output 22 | source_data.append(data) 23 | # form data_to_table: 24 | for datum in source_data: 25 | item = _ttp_["output"]["traverse"](datum, path, strict) 26 | if not item: # skip empty results 27 | continue 28 | elif isinstance(item, list): 29 | data_to_table += item 30 | elif isinstance(item, dict): 31 | # flatten dictionary data if key was given 32 | if key: 33 | data_to_table += _ttp_["output"]["dict_to_list"]( 34 | data=item, key_name=key 35 | ) 36 | else: 37 | data_to_table.append(item) 38 | # create headers: 39 | if not headers: 40 | headers = set() 41 | for item in data_to_table: 42 | headers.update(list(item.keys())) 43 | headers = sorted(list(headers)) 44 | # save headers row in table: 45 | table.insert(0, headers) 46 | # fill in table with data: 47 | for item in data_to_table: 48 | row = [missing for _ in headers] 49 | for k, v in item.items(): 50 | if k in headers: 51 | row[headers.index(k)] = v 52 | table.append(row) 53 | return table 54 | -------------------------------------------------------------------------------- /ttp/formatters/tabulate_formatter.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | log = logging.getLogger(__name__) 4 | 5 | _name_map_ = {"tabulate_formatter": "tabulate"} 6 | 7 | 8 | def tabulate_formatter(data, **kwargs): 9 | """Method to format data as a table using tabulate module.""" 10 | try: 11 | from tabulate import tabulate 12 | except ImportError: 13 | log.critical( 14 | "output.formatter_tabulate: tabulate not installed, install: 'python -m pip install tabulate'. Exiting" 15 | ) 16 | raise SystemExit() 17 | # form table - list of lists 18 | table = _ttp_["formatters"]["table"](data, **kwargs) 19 | headers = table.pop(0) 20 | attribs = _ttp_["output_object"].attributes.get( 21 | "format_attributes", {"args": [], "kwargs": {}} 22 | ) 23 | # run tabulate: 24 | return tabulate(table, headers=headers, *attribs["args"], **attribs["kwargs"]) 25 | -------------------------------------------------------------------------------- /ttp/formatters/yaml_formatter.py: -------------------------------------------------------------------------------- 1 | _name_map_ = {"yaml_formatter": "yaml"} 2 | 3 | 4 | def yaml_formatter(data, **kwargs): 5 | """Method returns parsing results in yaml format.""" 6 | try: 7 | from yaml import dump 8 | except ImportError: 9 | import logging 10 | 11 | log = logging.getLogger(__name__) 12 | log.critical( 13 | "output.yaml_formatter: yaml not installed, install: 'python -m pip install pyyaml'. Exiting" 14 | ) 15 | raise SystemExit() 16 | return dump(data, default_flow_style=False) 17 | -------------------------------------------------------------------------------- /ttp/group/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dmulyalin/ttp/d6476d387ca2078e0306c2546148850d3ceb246c/ttp/group/__init__.py -------------------------------------------------------------------------------- /ttp/group/contains.py: -------------------------------------------------------------------------------- 1 | def contains(data, *args): 2 | # args = ('v4address', 'v4mask',) 3 | for var in args: 4 | if var in data: 5 | if var in _ttp_["results_object"].record["DEFAULTS"]: 6 | if _ttp_["results_object"].record["DEFAULTS"][var] == data[var]: 7 | return data, False 8 | return data, True 9 | return data, False 10 | -------------------------------------------------------------------------------- /ttp/group/contains_val.py: -------------------------------------------------------------------------------- 1 | def contains_val(data, key, value): 2 | """ 3 | check if certain key has certain value, return true if so and false otherwise 4 | """ 5 | # try to get value variable from parser specific variables 6 | value = _ttp_["vars"].get(value, value) 7 | if not value in data.get(key, ""): 8 | return data, False 9 | return data, None 10 | -------------------------------------------------------------------------------- /ttp/group/containsall.py: -------------------------------------------------------------------------------- 1 | def containsall(data, *args): 2 | # args = ('v4address', 'v4mask',) 3 | for var in args: 4 | if var in data: 5 | if var in _ttp_["results_object"].record["DEFAULTS"]: 6 | if _ttp_["results_object"].record["DEFAULTS"][var] == data[var]: 7 | return data, False 8 | else: 9 | return data, False 10 | return data, None 11 | -------------------------------------------------------------------------------- /ttp/group/delete.py: -------------------------------------------------------------------------------- 1 | _name_map_ = {"delete_func": "del"} 2 | 3 | 4 | def delete_func(data, *args): 5 | for key in args: 6 | if key in data: 7 | _ = data.pop(key) 8 | return data, None 9 | -------------------------------------------------------------------------------- /ttp/group/equal.py: -------------------------------------------------------------------------------- 1 | def equal(data, key, value): 2 | """ 3 | check if certain key has certain value, return true if so and false otherwise 4 | """ 5 | # try to get value variable from parser specific variables 6 | value = _ttp_["vars"].get(value, value) 7 | if data.get(key) != value: 8 | return data, False 9 | return data, None 10 | -------------------------------------------------------------------------------- /ttp/group/exclude.py: -------------------------------------------------------------------------------- 1 | def exclude(data, *args): 2 | # args = ('v4address', 'v4mask',) 3 | for var in args: 4 | if var in data: 5 | if var in _ttp_["results_object"].record["DEFAULTS"]: 6 | continue 7 | return data, False 8 | return data, True 9 | -------------------------------------------------------------------------------- /ttp/group/exclude_val.py: -------------------------------------------------------------------------------- 1 | def exclude_val(data, key, value): 2 | """ 3 | check if certain key has certain value, return False if so and True otherwise 4 | """ 5 | # try to get value variable from parser specific variables 6 | value = _ttp_["vars"].get(value, value) 7 | try: 8 | if data[key] == value: 9 | return data, False 10 | else: 11 | return data, True 12 | except KeyError: 13 | return data, True 14 | -------------------------------------------------------------------------------- /ttp/group/excludeall.py: -------------------------------------------------------------------------------- 1 | def excludeall(data, *args): 2 | # args = ('v4address', 'v4mask',) 3 | found_vars = [] 4 | for var in args: 5 | if var in data: 6 | if var in _ttp_["results_object"].record["DEFAULTS"]: 7 | continue 8 | found_vars.append(var) 9 | if list(args) == found_vars: 10 | return data, False 11 | else: 12 | return data, True 13 | -------------------------------------------------------------------------------- /ttp/group/expand.py: -------------------------------------------------------------------------------- 1 | def expand(data): 2 | """ 3 | Function to expand dot separated match variable names 4 | to nested dictionary structure 5 | """ 6 | # do sanity check on data 7 | if not isinstance(data, dict): 8 | return data, True 9 | ret = {} 10 | # expand match variable names to dictionary 11 | for key, value in data.items(): 12 | ref = ret 13 | keys = key.split(".") 14 | while True: 15 | new_key = keys.pop(0) 16 | # handle last item in keys 17 | if not keys: 18 | if isinstance(ref, dict): 19 | ref[new_key] = value 20 | break 21 | # expand dictionary tree 22 | ref = ref.setdefault(new_key, {}) 23 | del data 24 | return ret, True 25 | -------------------------------------------------------------------------------- /ttp/group/itemize.py: -------------------------------------------------------------------------------- 1 | def itemize(data, key, path=""): 2 | # if no path given use group path to save results 3 | if key in data and not path: 4 | res_path = _ttp_["results_object"].record["PATH"] 5 | # modify path to have single '*' at the end 6 | if res_path[-1].endswith("**") or not res_path[-1].endswith("*"): 7 | res_path[-1] = "{}*".format(res_path[-1].rstrip("*")) 8 | _ttp_["results_object"].record["PATH"] = res_path 9 | # update path cache with current matches 10 | _ttp_["results_object"].dyn_path_cache.update(data) 11 | return data.pop(key), None 12 | # use path provided 13 | elif key in data and path: 14 | # update path cache with current matches 15 | _ttp_["results_object"].dyn_path_cache.update(data) 16 | # form and check path 17 | path = [i.strip() for i in path.split(".")] 18 | if path[-1].endswith("**") or not path[-1].endswith("*"): 19 | path[-1] = "{}*".format(path[-1].rstrip("*")) 20 | processed_path = _ttp_["results_object"].form_path(path) 21 | # save item into results: 22 | _ttp_["results_object"].save_curelements( 23 | result_data=data[key], result_path=processed_path 24 | ) 25 | # group considered to be invalid if no key in it 26 | else: 27 | return data, False 28 | return data, None 29 | -------------------------------------------------------------------------------- /ttp/group/items2dict.py: -------------------------------------------------------------------------------- 1 | def items2dict(data, key_name, value_name): 2 | """ 3 | Function to combine values of key_name and value_name keys in 4 | a key-value pair. 5 | """ 6 | # do sanity checks 7 | if key_name not in data or value_name not in data: 8 | return data, False 9 | # combine values 10 | data[data.pop(key_name)] = data.pop(value_name) 11 | 12 | return data, None 13 | -------------------------------------------------------------------------------- /ttp/group/lookup.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | log = logging.getLogger(__name__) 4 | 5 | 6 | def lookup( 7 | data, 8 | key, 9 | name=None, 10 | template=None, 11 | group=None, 12 | add_field=False, 13 | replace=True, 14 | update=False, 15 | ): 16 | found_value = None 17 | lookup_data = {} 18 | # try get lookup dictionary/data from lookup tags: 19 | if name: 20 | path = [i.strip() for i in name.split(".")] 21 | lookup_data = _ttp_["parser_object"].lookups 22 | # get lookup data from template results 23 | elif template: 24 | path = [i.strip() for i in template.split(".")] 25 | for template in _ttp_["ttp_object"]._templates: 26 | if template.name == path[0]: 27 | # use first input results in the template: 28 | if isinstance(template.results, list): 29 | lookup_data = template.results[0] 30 | # if not list its dictionary, per_template results mode used 31 | else: 32 | lookup_data = template.results 33 | path = path[1:] 34 | break 35 | # get lookup data from group results 36 | elif group: 37 | path = [i.strip() for i in group.split(".")] 38 | # look for group that parses same input 39 | if path[0] in _ttp_["parser_object"].main_results: 40 | lookup_data = _ttp_["parser_object"].main_results[path[0]] 41 | # try to find results for group that parsed different input 42 | else: 43 | for result in _ttp_["template_obj"].results: 44 | if path[0] in result: 45 | lookup_data = result[path[0]] 46 | break 47 | path = path[1:] 48 | else: 49 | log.info("ttp.group.lookup no lookup data found, doing nothing.") 50 | return data, None 51 | # traverse to lookup data 52 | for i in path: 53 | lookup_data = lookup_data.get(i, {}) 54 | # perform lookup: 55 | try: 56 | if isinstance(lookup_data, dict): 57 | found_value = lookup_data[data[key]] 58 | except KeyError: 59 | return data, None 60 | # decide what action to do with found value 61 | if add_field: 62 | try: 63 | data[add_field] = found_value 64 | except: 65 | log.error("ttp.group.lookup failed to add new field '{}'".format(add_field)) 66 | elif update is True and isinstance(found_value, dict): 67 | data.update(found_value) 68 | elif replace: 69 | data[key] = found_value 70 | else: 71 | log.warning( 72 | "ttp.group.lookup nothing done, make sure action directives are correct" 73 | ) 74 | return data, None 75 | -------------------------------------------------------------------------------- /ttp/group/macro.py: -------------------------------------------------------------------------------- 1 | def macro(data, *macro): 2 | result = data 3 | # extract macro names 4 | macro_names_list = [i.strip() for item in macro for i in item.split(",")] 5 | # run macro 6 | for macro_item in macro_names_list: 7 | if macro_item in _ttp_["macro"]: 8 | res = _ttp_["macro"][macro_item](result) 9 | if res is False: 10 | return result, False 11 | elif res in [True, None]: 12 | continue 13 | else: 14 | result = res 15 | return result, True 16 | -------------------------------------------------------------------------------- /ttp/group/record.py: -------------------------------------------------------------------------------- 1 | def record(data, source, target="_use_source_"): 2 | if source in data: 3 | # get source var value: 4 | source_var_value = data[source] 5 | # get target var name: 6 | if target == "_use_source_": 7 | target = source 8 | # record variable: 9 | _ttp_["vars"].update({target: source_var_value}) 10 | _ttp_["global_vars"].update({target: source_var_value}) 11 | return data, None 12 | -------------------------------------------------------------------------------- /ttp/group/set_.py: -------------------------------------------------------------------------------- 1 | _name_map_ = {"set_func": "set"} 2 | 3 | 4 | def set_func(data, source, target="_use_source_", default="_no_default_value_"): 5 | # source - name of source variable to retrieve value 6 | # target - name of variable to save into 7 | if source in _ttp_["vars"]: 8 | source_var_value = _ttp_["vars"][source] 9 | elif source in _ttp_["global_vars"]: 10 | source_var_value = _ttp_["global_vars"][source] 11 | elif default != "_no_default_value_": 12 | source_var_value = default 13 | else: 14 | source_var_value = source 15 | # get target var name: 16 | if target == "_use_source_": 17 | target = source 18 | data.update({target: source_var_value}) 19 | return data, None 20 | -------------------------------------------------------------------------------- /ttp/group/sformat.py: -------------------------------------------------------------------------------- 1 | def sformat(data, string, add_field): 2 | """Function to format string with group match results. 3 | 4 | **Arguments** 5 | 6 | * data - match results data 7 | * string - string to format 8 | * add_field - name of the key to assign formatting results to 9 | """ 10 | try: 11 | data[add_field] = string.format(**data) 12 | except ( 13 | KeyError 14 | ): # KeyError happens when not enough keys in **data supplied to format method 15 | kwargs = _ttp_["global_vars"].copy() 16 | kwargs.update(_ttp_["vars"]) 17 | kwargs.update(data) 18 | try: 19 | data[add_field] = string.format(**kwargs) 20 | except KeyError: 21 | pass 22 | return data, True 23 | -------------------------------------------------------------------------------- /ttp/group/to_converters.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | log = logging.getLogger(__name__) 4 | 5 | 6 | def str_to_unicode(data): 7 | if _ttp_["python_major_version"] == 3: # pylint: disable=undefined-variable 8 | return data, None 9 | for key, value in data.items(): 10 | if isinstance(value, str): 11 | data[key] = unicode(value) # pylint: disable=undefined-variable 12 | return data, None 13 | 14 | 15 | def to_int(data, *keys, intlist=False): 16 | if not keys: 17 | keys = list(data.keys()) 18 | for k in keys: 19 | # check if given key exists 20 | try: 21 | v = data[k] 22 | except KeyError: 23 | continue 24 | 25 | # do best effort string to int conversion 26 | try: 27 | data[k] = int(v) 28 | except: 29 | try: 30 | data[k] = float(v) 31 | except: 32 | pass 33 | 34 | # convert list of integer strings to list of integers 35 | if intlist is True and isinstance(data[k], list): 36 | converted_list = [] 37 | for i in data[k]: 38 | try: 39 | converted_list.append(int(i)) 40 | except: 41 | try: 42 | converted_list.append(float(i)) 43 | except: 44 | converted_list.append(i) 45 | 46 | data[k] = converted_list 47 | 48 | return data, None 49 | -------------------------------------------------------------------------------- /ttp/group/to_ip.py: -------------------------------------------------------------------------------- 1 | def to_ip(data, ip_key, mask_key): 2 | """This method takes ip_key and mask_key and tries to 3 | convert them into ip object 4 | """ 5 | if ip_key in data and mask_key in data: 6 | ip_string = "{}/{}".format(data[ip_key], data[mask_key]) 7 | try: 8 | data[ip_key] = _ttp_["match"]["to_ip"](ip_string)[0] 9 | except: 10 | pass 11 | return data, None 12 | -------------------------------------------------------------------------------- /ttp/group/validate_cerberus.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | log = logging.getLogger(__name__) 4 | 5 | try: 6 | from cerberus import Validator 7 | 8 | HAS_LIBS = True 9 | except ImportError: 10 | log.error( 11 | "ttp.cerberus, failed to import Cerberus library, make sure it is installed" 12 | ) 13 | HAS_LIBS = False 14 | 15 | if HAS_LIBS: 16 | validator_engine = Validator() 17 | 18 | _name_map_ = {"cerberus_validate": "cerberus"} 19 | 20 | 21 | def cerberus_validate( 22 | data, schema, log_errors=False, allow_unknown=True, add_errors=False 23 | ): 24 | """Function to validate data using validation libraries, such as Cerberus.""" 25 | if not HAS_LIBS: 26 | return data, None 27 | # get validation schema 28 | schema_data = _ttp_["vars"].get(schema, None) 29 | if not schema_data: 30 | log.error("ttp.validate, schema '{}' not found".format(schema)) 31 | return data, None 32 | # run validation 33 | validator_engine.allow_unknown = allow_unknown 34 | ret = validator_engine.validate(document=data, schema=schema_data) 35 | if ret == False: 36 | if log_errors: 37 | log.warning( 38 | "ttp.validate, data: '{}', Cerberus validation errors: {}".format( 39 | data, str(validator_engine.errors) 40 | ) 41 | ) 42 | if add_errors: 43 | data["validation_errors"] = validator_engine.errors 44 | return data, None 45 | return data, ret 46 | 47 | 48 | def validate(data, schema, result="valid", info="", errors="", allow_unknown=True): 49 | """Function to validate data using Cerberus validation library and 50 | updated data with this dictionary 51 | { 52 | result field: True|False 53 | info: user defined information string 54 | errors field: validation errors 55 | } 56 | Args:: 57 | * schema - schema template variable name 58 | * result - name of the field to assign validation result 59 | * info - string, contain additional information about test, 60 | will be formatted using .format(data) 61 | * errors - name of the field to assign validation errors 62 | * allow_unknown - informs cerberus to ignore uncknown keys 63 | """ 64 | if not HAS_LIBS: 65 | return data, None 66 | # get validation schema 67 | schema_data = _ttp_["vars"].get(schema, None) 68 | if not schema_data: 69 | log.error("ttp.validate, schema '{}' not found".format(schema)) 70 | return data, None 71 | # run validation 72 | validator_engine.allow_unknown = allow_unknown 73 | ret = validator_engine.validate(document=data, schema=schema_data) 74 | # form results 75 | data[result] = ret 76 | # add validation errors if requested to do so 77 | if info: 78 | data, _ = _ttp_["group"]["sformat"](data, string=info, add_field="info") 79 | if errors: 80 | data[errors] = validator_engine.errors 81 | return data, None 82 | -------------------------------------------------------------------------------- /ttp/group/void.py: -------------------------------------------------------------------------------- 1 | def void(data): 2 | # function to invalidate results 3 | return data, False 4 | -------------------------------------------------------------------------------- /ttp/input/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dmulyalin/ttp/d6476d387ca2078e0306c2546148850d3ceb246c/ttp/input/__init__.py -------------------------------------------------------------------------------- /ttp/input/commands.py: -------------------------------------------------------------------------------- 1 | from re import search 2 | 3 | 4 | def extract_commands(data, *commands): 5 | """Input function to find commands output in the "data" text""" 6 | ret = "" 7 | hostname = _ttp_["variable"]["gethostname"](data, "input find_command function") 8 | if hostname: 9 | for command in commands: 10 | regex = r"{}[#>] *{} *\n([\S\s]+?)(?={}[#>]|$)".format( 11 | hostname, command, hostname 12 | ) 13 | match = search(regex, data) 14 | if match: 15 | ret += "\n{}\n".format(match.group()) 16 | if ret: 17 | return ret, None 18 | return data, None 19 | -------------------------------------------------------------------------------- /ttp/input/macro.py: -------------------------------------------------------------------------------- 1 | def macro(data, macro_name): 2 | result = None 3 | if macro_name in _ttp_["macro"]: 4 | result = _ttp_["macro"][macro_name](data) 5 | # process macro result 6 | if result is True: 7 | return data, True 8 | elif result is False: 9 | return data, False 10 | elif result is None: 11 | return data, None 12 | return result, None 13 | -------------------------------------------------------------------------------- /ttp/input/test.py: -------------------------------------------------------------------------------- 1 | def test(data): 2 | print("Running input test function, data length {} symbols".format(len(data))) 3 | return data, None 4 | -------------------------------------------------------------------------------- /ttp/lookup/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dmulyalin/ttp/d6476d387ca2078e0306c2546148850d3ceb246c/ttp/lookup/__init__.py -------------------------------------------------------------------------------- /ttp/lookup/geoip2.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | log = logging.getLogger(__name__) 4 | 5 | try: 6 | import geoip2.database 7 | 8 | HAS_LIBS = True 9 | except ImportError: 10 | log.error( 11 | "ttp.lookup.geoip2: failed to import geoip2 library, install: pip install geoip2" 12 | ) 13 | HAS_LIBS = False 14 | 15 | 16 | def geoip2_db_loader(lookup_tag_data): 17 | """ 18 | Function takes lookup_tag_data python dictionary, loads 19 | geoip2 moduleand creates reader objects to lookup data. 20 | 21 | lookup_tag_data:: 22 | { 23 | 'city': './path/to/GeoLite2-City.mmdb', 24 | 'asn': './path/to/GeoLite2-ASN.mmdb', 25 | 'country': './path/to/GeoLite2-Country.mmdb' 26 | } 27 | 28 | Returns deictionary of:: 29 | { 30 | "city": GeoLite2-City.mmdb reader object, 31 | "ASN": GeoLite2-ASN.mmdb rteader object, 32 | "country": GeoLite2-Country.mmdb reader object 33 | } 34 | """ 35 | ret = {"city": None, "asn": None, "country": None} 36 | if HAS_LIBS: 37 | for dbname, path_to_db in lookup_tag_data.items(): 38 | if dbname.lower() in ret: 39 | _ = ret.pop(dbname.lower()) 40 | try: 41 | ret[dbname] = geoip2.database.Reader(path_to_db) 42 | except: 43 | log.error( 44 | "ttp.lookup.geoip2: something went wrong, failed to load '{}' mmdb from '{}' path".format( 45 | dbname, path_to_db 46 | ) 47 | ) 48 | return ret 49 | -------------------------------------------------------------------------------- /ttp/match/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dmulyalin/ttp/d6476d387ca2078e0306c2546148850d3ceb246c/ttp/match/__init__.py -------------------------------------------------------------------------------- /ttp/match/copy_.py: -------------------------------------------------------------------------------- 1 | from copy import deepcopy 2 | 3 | _name_map_ = {"copy_func": "copy"} 4 | 5 | 6 | def copy_func(data, name): 7 | return data, {"new_field": {name: deepcopy(data)}} 8 | -------------------------------------------------------------------------------- /ttp/match/count.py: -------------------------------------------------------------------------------- 1 | def count(data, var=None, globvar=None): 2 | if var: 3 | try: 4 | _ttp_["vars"][var] += 1 5 | except KeyError: 6 | _ttp_["vars"][var] = 1 7 | 8 | if globvar: 9 | try: 10 | _ttp_["global_vars"][globvar] += 1 11 | except KeyError: 12 | _ttp_["global_vars"][globvar] = 1 13 | 14 | return data, None 15 | -------------------------------------------------------------------------------- /ttp/match/dns_lookups.py: -------------------------------------------------------------------------------- 1 | import dns.resolver as dnspython 2 | import dns.reversename 3 | 4 | _name_map_ = {"dns_forward": "dns", "dns_reverse": "rdns"} 5 | 6 | dns_resolver_obj = dnspython.dns.resolver.Resolver() 7 | 8 | 9 | def dns_forward(data, record="A", timeout=1, servers=None, add_field=False): 10 | """Performs forward dns lookup using dns_resolver_obj global object""" 11 | servers = servers or [] 12 | dns_resolver_obj.timeout = timeout 13 | if servers: 14 | if isinstance(servers, str): 15 | servers = [i.strip() for i in servers.split(",")] 16 | dns_resolver_obj.nameservers = sorted(servers) 17 | dns_resolver_obj.lifetime = timeout * len(dns_resolver_obj.nameservers) 18 | try: 19 | dns_records = [i.to_text() for i in dns_resolver_obj.query(data, record)] 20 | if add_field and isinstance(add_field, str): 21 | return data, {"new_field": {add_field: dns_records}} 22 | else: 23 | return dns_records, None 24 | except dnspython.dns.resolver.NXDOMAIN: 25 | pass 26 | except dnspython.dns.resolver.NoAnswer: 27 | pass 28 | except dnspython.dns.exception.Timeout: 29 | # re-initialize dns_resolver object, as it will fail to resolve names for 30 | # whatever reason after it timeouts 31 | globals()["dns_resolver_obj"] = dnspython.dns.resolver.Resolver() 32 | return data, None 33 | 34 | 35 | def dns_reverse(data, timeout=1, servers=None, add_field=False): 36 | """Performs reverse dns lookup using global dns_resolver_obj 37 | data - ip address string, e.g. 192.168.0.1 38 | """ 39 | servers = servers or [] 40 | dns_resolver_obj.timeout = timeout 41 | if servers: 42 | if isinstance(servers, str): 43 | servers = [i.strip() for i in servers.split(",")] 44 | dns_resolver_obj.nameservers = servers 45 | dns_resolver_obj.lifetime = timeout * len(dns_resolver_obj.nameservers) 46 | rev_name = dns.reversename.from_address(data) 47 | try: 48 | reverse_record = str(dns_resolver_obj.query(rev_name, "PTR")[0]).rstrip(".") 49 | if add_field and isinstance(add_field, str): 50 | return data, {"new_field": {add_field: reverse_record}} 51 | else: 52 | return reverse_record, None 53 | except dnspython.dns.resolver.NXDOMAIN: 54 | pass 55 | except dnspython.dns.resolver.NoAnswer: 56 | pass 57 | except dnspython.dns.exception.Timeout: 58 | globals()["dns_resolver_obj"] = dnspython.dns.resolver.Resolver() 59 | return data, None 60 | -------------------------------------------------------------------------------- /ttp/match/geoip_lookup.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | log = logging.getLogger(__name__) 4 | 5 | 6 | def geoip_lookup(data, db_name, add_field="geoip_lookup"): 7 | lookup_data = _ttp_["parser_object"].lookups 8 | path = [i.strip() for i in db_name.split(".")] 9 | # get reference to geoip2 reader object 10 | for i in path: 11 | lookup_data = lookup_data.get(i, {}) 12 | if not lookup_data: 13 | return data, None 14 | found_value = None 15 | # lookup country 16 | if path[-1].lower() == "country": 17 | try: 18 | found_value = lookup_data.country(data) 19 | found_value = { 20 | "continent": found_value.continent.names.get("en", ""), 21 | "continent_code": found_value.continent.code, 22 | "country_iso_code": found_value.country.iso_code, 23 | "country": found_value.country.names.get("en", ""), 24 | "network": found_value.traits.network.with_prefixlen, 25 | } 26 | except: 27 | log.error( 28 | "ttp.match.geoip_lookup: something went wrong searching for '{}' IP in database '{}'".format( 29 | data, db_name 30 | ) 31 | ) 32 | # lookup city 33 | elif path[-1].lower() == "city": 34 | try: 35 | found_value = lookup_data.city(data) 36 | found_value = { 37 | "city": found_value.city.name, 38 | "continent": found_value.continent.names.get("en", ""), 39 | "country_iso_code": found_value.country.iso_code, 40 | "country": found_value.country.names.get("en", ""), 41 | "latitude": found_value.location.latitude, 42 | "longitude": found_value.location.longitude, 43 | "accuracy_radius": found_value.location.accuracy_radius, 44 | "postal_code": found_value.postal.code, 45 | "state": found_value.subdivisions.most_specific.name, 46 | "state_iso_code": found_value.subdivisions.most_specific.iso_code, 47 | "network": found_value.traits.network.with_prefixlen, 48 | } 49 | except: 50 | log.error( 51 | "ttp.match.geoip_lookup: something went wrong searching for '{}' IP in database '{}'".format( 52 | data, db_name 53 | ) 54 | ) 55 | # lookup asn 56 | elif path[-1].lower() == "asn": 57 | try: 58 | found_value = lookup_data.asn(data) 59 | found_value = { 60 | "ASN": found_value.autonomous_system_number, 61 | "organization": found_value.autonomous_system_organization, 62 | "network": found_value.network.with_prefixlen, 63 | } 64 | except: 65 | log.error( 66 | "ttp.match.geoip_lookup: something went wrong searching for '{}' IP in database '{}'".format( 67 | data, db_name 68 | ) 69 | ) 70 | # return data 71 | if found_value: 72 | return data, {"new_field": {add_field: found_value}} 73 | return data, None 74 | -------------------------------------------------------------------------------- /ttp/match/item.py: -------------------------------------------------------------------------------- 1 | def item(data, item_index): 2 | """Method to return item of iterable at given index""" 3 | item_index = int(item_index) 4 | # item_index not out of range 5 | if 0 <= item_index and item_index <= len(data) - 1: 6 | return data[item_index], None 7 | # item_index out of right range - return last item 8 | elif 0 <= item_index and item_index >= len(data) - 1: 9 | return data[-1], None 10 | # negative item_index not out of range 11 | elif 0 >= item_index and abs(item_index) <= len(data) - 1: 12 | return data[item_index], None 13 | # negative item_index out of range - return first item 14 | elif 0 >= item_index and abs(item_index) >= len(data): 15 | return data[0], None 16 | -------------------------------------------------------------------------------- /ttp/match/joinmatches.py: -------------------------------------------------------------------------------- 1 | def joinmatches(data, *args, **kwargs): 2 | return data, None 3 | -------------------------------------------------------------------------------- /ttp/match/let.py: -------------------------------------------------------------------------------- 1 | def let(data, name_or_value, var_value="__Undefined_Var_Value__"): 2 | if var_value == "__Undefined_Var_Value__": 3 | data = name_or_value 4 | return data, None 5 | else: 6 | return data, {"new_field": {name_or_value: var_value}} 7 | -------------------------------------------------------------------------------- /ttp/match/lookup.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | log = logging.getLogger(__name__) 4 | 5 | 6 | def lookup(data, name=None, template=None, group=None, add_field=False): 7 | found_value = None 8 | lookup_data = {} 9 | # try get lookup dictionary/data from lookup tags: 10 | if name: 11 | path = [i.strip() for i in name.split(".")] 12 | lookup_data = _ttp_["parser_object"].lookups 13 | # get lookup data from template results 14 | elif template: 15 | path = [i.strip() for i in template.split(".")] 16 | for template in _ttp_["ttp_object"]._templates: 17 | if template.name == path[0]: 18 | # use first input results in the template: 19 | if isinstance(template.results, list): 20 | lookup_data = template.results[0] 21 | # if not list its dictionary, per_template results mode used 22 | else: 23 | lookup_data = template.results 24 | path = path[1:] 25 | break 26 | # get lookup data from group results 27 | elif group: 28 | path = [i.strip() for i in group.split(".")] 29 | for result in _ttp_["template_obj"].results: 30 | if path[0] in result: 31 | lookup_data = result[path[0]] 32 | break 33 | path = path[1:] 34 | else: 35 | log.info("ttp.lookup no lookup data name provided, doing nothing.") 36 | return data, None 37 | for i in path: 38 | lookup_data = lookup_data.get(i, {}) 39 | # perform lookup: 40 | try: 41 | found_value = lookup_data[data] 42 | except KeyError: 43 | return data, None 44 | # decide to replace match result or add new field: 45 | if add_field is not False: 46 | return data, {"new_field": {add_field: found_value}} 47 | else: 48 | return found_value, None 49 | 50 | 51 | def rlookup(data, name, add_field=False): 52 | path = [i.strip() for i in name.split(".")] 53 | found_value = None 54 | # get lookup dictionary/data: 55 | try: 56 | rlookup = _ttp_["parser_object"].lookups 57 | for i in path: 58 | rlookup = rlookup.get(i, {}) 59 | except KeyError: 60 | return data, None 61 | # perfrom rlookup: 62 | if isinstance(rlookup, dict) is False: 63 | return data, None 64 | for key in rlookup.keys(): 65 | if key in data: 66 | found_value = rlookup[key] 67 | break 68 | # decide to replace match result or add new field: 69 | if found_value is None: 70 | return data, None 71 | elif add_field is not False: 72 | return data, {"new_field": {add_field: found_value}} 73 | else: 74 | return found_value, None 75 | 76 | 77 | def gpvlookup(data, name, add_field=False, record=False, multimatch=False): 78 | path = [i.strip() for i in name.split(".")] 79 | found_value = [] 80 | # get lookup dictionary/data: 81 | try: 82 | lookup_data = _ttp_["parser_object"].lookups 83 | for i in path: 84 | lookup_data = lookup_data.get(i, {}) 85 | except KeyError: 86 | log.error("gpvlookup: lookup data not found") 87 | return data, None 88 | # perform glob pattern values lookup 89 | if not isinstance(lookup_data, dict): 90 | log.error( 91 | "gpvlookup: lookup data is not dictionary - {}".format(type(lookup_data)) 92 | ) 93 | return data, None 94 | # import library 95 | from fnmatch import fnmatch 96 | 97 | # find first match and stop 98 | if multimatch is False: 99 | for key, patterns in lookup_data.items(): 100 | for pattern in patterns: 101 | if fnmatch(data, pattern): 102 | found_value.append(key) 103 | break 104 | if found_value: 105 | break 106 | # iterate over all patterns and collect all matches 107 | elif multimatch is True: 108 | for key, patterns in lookup_data.items(): 109 | found_value += [key for pattern in patterns if fnmatch(data, pattern)] 110 | # record found_value if told to do so: 111 | if record is not False: 112 | _ttp_["vars"].update({record: found_value}) 113 | _ttp_["global_vars"].update({record: found_value}) 114 | # decide to replace match result or add new field: 115 | if not found_value: 116 | return data, None 117 | elif add_field is not False: 118 | return data, {"new_field": {add_field: found_value}} 119 | else: 120 | return found_value, None 121 | -------------------------------------------------------------------------------- /ttp/match/mac_eui.py: -------------------------------------------------------------------------------- 1 | """ 2 | Function to convert MAC address onti EUI style format. 3 | 4 | Creds to https://stackoverflow.com/a/29446103 unswers on stackoeverflow 5 | and NAPALM base helpers module 6 | """ 7 | 8 | 9 | from re import sub 10 | 11 | 12 | def mac_eui(data): 13 | mac = str(data) 14 | # remove delimiters and convert to lower case 15 | mac = sub("[.:-]", "", mac).lower() 16 | # mac should only contain letters and numbers, also 17 | # if length now not 12 (eg. 008041aefd7e), staff up to 18 | # 12 with "0" - can happen with some vendors 19 | if mac.isalnum(): 20 | if not len(mac) == 12: 21 | mac += "0" * (12 - len(mac)) 22 | else: 23 | return data, None 24 | # convert mac in canonical form (eg. 00:80:41:ae:fd:7e) 25 | mac = ":".join([mac[i : i + 2] for i, j in enumerate(mac) if not (i % 2)]) 26 | return mac, None 27 | -------------------------------------------------------------------------------- /ttp/match/macro.py: -------------------------------------------------------------------------------- 1 | def macro(data, macro_name): 2 | result = None 3 | if macro_name in _ttp_["macro"]: 4 | result = _ttp_["macro"][macro_name](data) 5 | # process macro result 6 | if result is True: 7 | return data, True 8 | elif result is False: 9 | return data, False 10 | elif result is None: 11 | return data, None 12 | elif isinstance(result, tuple): 13 | if len(result) == 2: 14 | if isinstance(result[1], dict): 15 | return result[0], {"new_field": result[1]} 16 | return result, None 17 | -------------------------------------------------------------------------------- /ttp/match/raise_.py: -------------------------------------------------------------------------------- 1 | _name_map_ = {"raise_func": "raise"} 2 | 3 | 4 | def raise_func(data, message=""): 5 | raise RuntimeError(message) 6 | -------------------------------------------------------------------------------- /ttp/match/re_.py: -------------------------------------------------------------------------------- 1 | import re 2 | 3 | 4 | def startswith_re(data, pattern): 5 | if re.search("^{}".format(pattern), data): 6 | return data, True 7 | return data, False 8 | 9 | 10 | def endswith_re(data, pattern): 11 | if re.search("{}$".format(pattern), data): 12 | return data, True 13 | return data, False 14 | 15 | 16 | def contains_re(data, pattern): 17 | if re.search(pattern, data): 18 | return data, True 19 | return data, False 20 | 21 | 22 | def notstartswith_re(data, pattern): 23 | if not re.search("^{}".format(pattern), data): 24 | return data, True 25 | return data, False 26 | 27 | 28 | def notendswith_re(data, pattern): 29 | if not re.search("{}$".format(pattern), data): 30 | return data, True 31 | return data, False 32 | 33 | 34 | def exclude_re(data, pattern): 35 | if not re.search(pattern, data): 36 | return data, True 37 | return data, False 38 | 39 | 40 | def resub(data, old, new, count=1): 41 | vars = _ttp_["vars"] 42 | if old in vars: 43 | return re.sub(re.escape(vars[old]), new, data, count=count), None 44 | return re.sub(old, new, data, count=count), None 45 | 46 | 47 | def resuball(data, *args): 48 | vars = _ttp_["vars"] 49 | args = list(args) 50 | new = "" 51 | if len(args) > 1: 52 | new = args.pop(0) 53 | for oldValue in args: 54 | if oldValue in vars: 55 | if isinstance(vars[oldValue], list): 56 | for oldVal in vars[oldValue]: 57 | if isinstance(oldVal, str): 58 | data = re.sub(oldVal, new, data) 59 | elif isinstance(vars[oldValue], dict): 60 | for newVal, oldVal in vars[oldValue].items(): 61 | if isinstance(oldVal, list): 62 | for i in oldVal: 63 | if isinstance(i, str): 64 | data = re.sub(i, newVal, data) 65 | elif isinstance(oldVal, str): 66 | data = re.sub(oldVal, newVal, data) 67 | elif isinstance(vars[oldValue], str): 68 | data = re.sub(re.escape(vars[oldValue]), new, data) 69 | else: 70 | data = re.sub(oldValue, new, data) 71 | return data, None 72 | -------------------------------------------------------------------------------- /ttp/match/record.py: -------------------------------------------------------------------------------- 1 | def record(data, record): 2 | _ttp_["vars"].update({record: data}) 3 | _ttp_["global_vars"].update({record: data}) 4 | return data, None 5 | -------------------------------------------------------------------------------- /ttp/match/set_.py: -------------------------------------------------------------------------------- 1 | _name_map_ = {"set_func": "set"} 2 | 3 | 4 | def set_func(data, value, match_line): 5 | vars = _ttp_["vars"] 6 | if data.rstrip() == match_line: 7 | if isinstance(value, str): 8 | if value in vars: 9 | return vars[value], None 10 | elif value in _ttp_["global_vars"]: 11 | return _ttp_["global_vars"][value], None 12 | return value, None 13 | else: 14 | return data, False 15 | -------------------------------------------------------------------------------- /ttp/match/string.py: -------------------------------------------------------------------------------- 1 | _name_map_ = {"sprint": "print"} 2 | 3 | 4 | def exclude(data, pattern): 5 | if pattern not in data: 6 | return data, True 7 | return data, False 8 | 9 | 10 | def equal(data, value): 11 | return data, data == value 12 | 13 | 14 | def notequal(data, value): 15 | return data, data != value 16 | 17 | 18 | def contains(data, *patterns): 19 | for pattern in patterns: 20 | if pattern in data: 21 | return data, True 22 | return data, False 23 | 24 | 25 | def sformat(data, string): 26 | ret = string.format(data) 27 | return ret, None 28 | 29 | 30 | def isdigit(data): 31 | return data, data.strip().isdigit() 32 | 33 | 34 | def notdigit(data): 35 | if not data.strip().isdigit(): 36 | return data, True 37 | return data, False 38 | 39 | 40 | def greaterthan(data, value): 41 | if data.strip().isdigit() and value.strip().isdigit(): 42 | if int(data.strip()) > int(value.strip()): 43 | return data, True 44 | return data, False 45 | 46 | 47 | def lessthan(data, value): 48 | if data.strip().isdigit() and value.strip().isdigit(): 49 | if int(data.strip()) < int(value.strip()): 50 | return data, True 51 | return data, False 52 | 53 | 54 | def join(data, char): 55 | if isinstance(data, list): 56 | return char.join(data), None 57 | else: 58 | return data, None 59 | 60 | 61 | def append(data, char): 62 | # try to get char from global variables 63 | char_value = _ttp_["global_vars"].get(char, char) 64 | # try to get from input specific variables 65 | char_value = _ttp_["vars"].get(char, char) 66 | if isinstance(data, str): 67 | return (data + char_value), None 68 | elif isinstance(data, list): 69 | data.append(char_value) 70 | return data, None 71 | else: 72 | return data, None 73 | 74 | 75 | def prepend(data, char): 76 | # try to get char from global variables 77 | char_value = _ttp_["global_vars"].get(char, char) 78 | # try to get from input specific variables 79 | char_value = _ttp_["vars"].get(char, char) 80 | if isinstance(data, str): 81 | return (char_value + data), None 82 | elif isinstance(data, list): 83 | data.insert(0, char_value) 84 | return data, None 85 | else: 86 | return data, None 87 | 88 | 89 | def sprint(data): 90 | print(data) 91 | return data, None 92 | 93 | 94 | def replaceall(data, *args): 95 | vars_ = _ttp_["vars"] 96 | args = list(args) 97 | new = "" 98 | if len(args) > 1: 99 | new = args.pop(0) 100 | for oldValue in args: 101 | if oldValue in vars_: 102 | if isinstance(vars_[oldValue], list): 103 | for oldVal in vars_[oldValue]: 104 | if isinstance(oldVal, str): 105 | data = data.replace(oldVal, new) 106 | elif isinstance(vars_[oldValue], dict): 107 | for newVal, oldVal in vars_[oldValue].items(): 108 | if isinstance(oldVal, list): 109 | for i in oldVal: 110 | if isinstance(i, str): 111 | data = data.replace(i, newVal) 112 | elif isinstance(oldVal, str): 113 | data = data.replace(oldVal, newVal) 114 | else: 115 | data = data.replace(oldValue, new) 116 | return data, None 117 | 118 | 119 | def truncate(data, truncate): 120 | d_split = data.split(" ") 121 | if len(d_split) >= truncate: 122 | data = " ".join(d_split[0:truncate]) 123 | return data, None 124 | -------------------------------------------------------------------------------- /ttp/match/to.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | log = logging.getLogger(__name__) 4 | 5 | 6 | def to_str(data): 7 | return str(data), None 8 | 9 | 10 | def to_list(data): 11 | return [data], None 12 | 13 | 14 | def to_int(data): 15 | try: 16 | return int(data), None 17 | except ValueError: 18 | log.error( 19 | "ttp.to_int: ValueError, failed to convert value '{}' to integer".format( 20 | data 21 | ) 22 | ) 23 | return data, None 24 | 25 | 26 | def to_float(data): 27 | try: 28 | return float(data), None 29 | except TypeError: 30 | log.error( 31 | "ttp.to_int: TypeError, failed to convert value '{}' to float".format(data) 32 | ) 33 | return data, None 34 | 35 | 36 | def to_unicode(data): 37 | if _ttp_["python_major_version"] == 2: 38 | try: 39 | return unicode(data), None # pylint: disable=undefined-variable 40 | except: 41 | log.error( 42 | "ttp.to_int: failed to convert value '{}' to unicode string".format( 43 | data 44 | ) 45 | ) 46 | return data, None 47 | -------------------------------------------------------------------------------- /ttp/match/unrange.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | log = logging.getLogger(__name__) 4 | 5 | 6 | def unrange(data, rangechar, joinchar): 7 | """ 8 | data - string, e.g. '8,10-13,20' 9 | rangechar - e.g. '-' for above string 10 | joinchar - e.g.',' for above string 11 | returns - e.g. '8,10,11,12,13,20 string 12 | """ 13 | result = [] 14 | # check if range char actually in data: 15 | if rangechar not in data: 16 | return data, None 17 | 18 | for item in data.split(rangechar): 19 | # form split list checking that i is not empty 20 | item_split = [i for i in item.split(joinchar) if i] 21 | if result: 22 | start_int = int(result[-1]) 23 | try: 24 | end_int = int(item_split[0]) 25 | except ValueError as e: 26 | log.error( 27 | "ttp.match.unrange: Unrange failed, data '{}', rangechar '{}', joinchar '{}', error: {}".format( 28 | data, rangechar, joinchar, e 29 | ) 30 | ) 31 | return data, None 32 | list_of_ints_range = [str(i) for i in list(range(start_int, end_int))] 33 | result += list_of_ints_range[1:] + item_split 34 | else: 35 | result = item_split 36 | data = joinchar.join(result) 37 | return data, None 38 | -------------------------------------------------------------------------------- /ttp/match/uptimeparse.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | This function took most of the code from 'parsetime' python library: 4 | https://pypi.org/project/python-parsetime/ 5 | 6 | but code was stripped to provide matching for required time formats only 7 | """ 8 | 9 | import re 10 | 11 | YEARS = r"(?P\d+)\s*(?:ys?|yrs?.?|years?)" 12 | MONTHS = r"(?P\d+)\s*(?:mos?.?|mths?.?|months?)" 13 | WEEKS = r"(?P[\d.]+)\s*(?:w|wks?|weeks?)" 14 | DAYS = r"(?P[\d.]+)\s*(?:d|dys?|days?)" 15 | HOURS = r"(?P[\d.]+)\s*(?:h|hrs?|hours?)" 16 | MINS = r"(?P[\d.]+)\s*(?:m|(mins?)|(minutes?))" 17 | SECS = r"(?P[\d.]+)\s*(?:s|secs?|seconds?)" 18 | SEPARATORS = r"[,/]" 19 | 20 | OPT = lambda x: r"(?:{x})?".format(x=x) 21 | OPTSEP = lambda x: r"(?:{x}\s*(?:{SEPARATORS}\s*)?)?".format(x=x, SEPARATORS=SEPARATORS) 22 | 23 | TIMEFORMATS = [ 24 | r"{YEARS}\s*{MONTHS}\s*{WEEKS}\s*{DAYS}\s*{HOURS}\s*{MINS}\s*{SECS}".format( 25 | YEARS=OPTSEP(YEARS), 26 | MONTHS=OPTSEP(MONTHS), 27 | WEEKS=OPTSEP(WEEKS), 28 | DAYS=OPTSEP(DAYS), 29 | HOURS=OPTSEP(HOURS), 30 | MINS=OPTSEP(MINS), 31 | SECS=OPT(SECS), 32 | ) 33 | ] 34 | 35 | COMPILED_TIMEFORMATS = [ 36 | re.compile(r"\s*" + timefmt + r"\s*$", re.I) for timefmt in TIMEFORMATS 37 | ] 38 | 39 | MULTIPLIERS = dict( 40 | [ 41 | ("years", 60 * 60 * 24 * 365), 42 | ("months", 60 * 60 * 24 * 30), 43 | ("weeks", 60 * 60 * 24 * 7), 44 | ("days", 60 * 60 * 24), 45 | ("hours", 60 * 60), 46 | ("mins", 60), 47 | ("secs", 1), 48 | ] 49 | ) 50 | 51 | 52 | def uptimeparse(data, format="seconds"): 53 | """ 54 | Parse a time expression like: 55 | 2 years, 27 weeks, 3 days, 10 hours, 46 minutes 56 | 27 weeks, 3 days, 10 hours, 48 minutes 57 | 58 | returning a number of seconds. 59 | """ 60 | for timefmt in COMPILED_TIMEFORMATS: 61 | match = timefmt.match(data) 62 | if match and match.group(0).strip(): 63 | mdict = match.groupdict() 64 | # if all of the fields are integer numbers 65 | if all(v.isdigit() for v in list(mdict.values()) if v): 66 | if format == "seconds": 67 | return ( 68 | sum( 69 | [ 70 | MULTIPLIERS[k] * int(v, 10) 71 | for (k, v) in list(mdict.items()) 72 | if v is not None 73 | ] 74 | ), 75 | None, 76 | ) 77 | elif format == "dict": 78 | return {k: v for k, v in mdict.items() if v is not None}, None 79 | break 80 | return data, None 81 | -------------------------------------------------------------------------------- /ttp/match/void.py: -------------------------------------------------------------------------------- 1 | def void(data): 2 | # function to invalidate results 3 | return data, False 4 | -------------------------------------------------------------------------------- /ttp/output/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dmulyalin/ttp/d6476d387ca2078e0306c2546148850d3ceb246c/ttp/output/__init__.py -------------------------------------------------------------------------------- /ttp/output/deepdiffer.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | log = logging.getLogger(__name__) 4 | 5 | try: 6 | from deepdiff import DeepDiff 7 | 8 | HAS_LIBS = True 9 | except ImportError: 10 | HAS_LIBS = False 11 | log.error( 12 | "ttp.output failed to import deepdiff library, install 'pip install deepdiff'" 13 | ) 14 | 15 | _name_map_ = {"deepdiff_func": "deepdiff"} 16 | 17 | 18 | def deepdiff_func( 19 | data, 20 | input_before=None, 21 | input_after=None, 22 | template_before=None, 23 | mode="bulk", 24 | add_field=False, 25 | var_before=None, 26 | **kwargs 27 | ): 28 | """ 29 | Function to compare two structures. 30 | 31 | * data - list of dictionaries, results data 32 | * before - name of input that contains old data 33 | * after - name of input that contains new data 34 | * add_key - name of the key to add to data instead of replacing it 35 | * kwargs - arguments supported by deepdiff DeepDiff class e.g. ignore_order or verbose_level 36 | * mode - 'bulk' or 'iterate' 37 | """ 38 | if HAS_LIBS is False: 39 | return data 40 | # get template object of this output 41 | template_obj = _ttp_["output_object"].template_obj 42 | 43 | # get data_before - data to compare with 44 | if input_before: 45 | # get inputs names to results index mapping, e.g.: 46 | # {'input_after': [3, 4], 'input_before': [0, 1], 'one_more': [2]} 47 | if template_obj.results_method.lower() == "per_input": 48 | input_to_results_index = {} 49 | counter = 0 50 | for input_name, details in template_obj.inputs.items(): 51 | data_len = len(details.data) 52 | input_to_results_index[input_name] = [ 53 | i + counter for i in range(data_len) 54 | ] 55 | counter += data_len 56 | data_before = [ 57 | data[index] for index in input_to_results_index[input_before] 58 | ] 59 | elif template_obj.results_method.lower() == "per_template": 60 | log.error( 61 | "ttp.output.deepdiff; Template 'per_template' results method not supported with input_before as a reference to source data" 62 | ) 63 | return data 64 | # if template name provided - source data from template results 65 | elif template_before: 66 | for template in _ttp_["ttp_object"]._templates: 67 | if template.name == template_before: 68 | data_before = template.results 69 | break 70 | # if need to use vars content 71 | elif var_before: 72 | data_before = template_obj.vars[var_before] 73 | 74 | # get data after - data to compare against 75 | data_after = data 76 | if input_after: 77 | data_after = [data[index] for index in input_to_results_index[input_after]] 78 | 79 | # run compare 80 | result = {} 81 | if mode == "bulk": 82 | result = DeepDiff(data_before, data_after, **kwargs) 83 | elif mode == "iterate": 84 | result = [DeepDiff(data_before[0], item, **kwargs) for item in data_after] 85 | else: 86 | log.error( 87 | "ttp.output.deepdiff; Unsupported compare mode: '{}', supported are 'bulk' or 'iterate'".format( 88 | mode 89 | ) 90 | ) 91 | return data 92 | 93 | # return results 94 | if add_field: 95 | if isinstance(data, list): 96 | data.append({add_field: result}) 97 | elif isinstance(data, dict): 98 | data[add_field] = result 99 | return data 100 | else: 101 | return result 102 | -------------------------------------------------------------------------------- /ttp/output/is_equal.py: -------------------------------------------------------------------------------- 1 | def is_equal(data): 2 | data_to_compare_with = _ttp_["output_object"].tag_load 3 | is_equal = False 4 | if "_anonymous_" in data: 5 | if data["_anonymous_"] == data_to_compare_with: 6 | is_equal = True 7 | elif data == data_to_compare_with: 8 | is_equal = True 9 | return { 10 | "output_name": _ttp_["output_object"].name, 11 | "output_description": _ttp_["output_object"].attributes.get( 12 | "description", "None" 13 | ), 14 | "is_equal": is_equal, 15 | } 16 | -------------------------------------------------------------------------------- /ttp/output/macro.py: -------------------------------------------------------------------------------- 1 | def macro(data, macro_name): 2 | result = None 3 | if macro_name in _ttp_["macro"]: 4 | result = _ttp_["macro"][macro_name](data) 5 | # process macro result 6 | if result is None: 7 | return data 8 | return result 9 | -------------------------------------------------------------------------------- /ttp/output/transform.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | log = logging.getLogger(__name__) 4 | 5 | 6 | def traverse(data, path, strict=True): 7 | """Method to traverse dictionary data and return element 8 | at given path. 9 | 10 | strict - if True will fail with KeyError if path item not in data 11 | will return empty dict if fails to find path item in data 12 | """ 13 | result = data 14 | # need to check path, in case if standalone function use 15 | if isinstance(path, str): 16 | path = [i.strip() for i in path.split(".")] 17 | if isinstance(data, dict): 18 | for i in path: 19 | result = result[i] if strict else result.get(i, {}) 20 | elif isinstance(data, list): 21 | result = [traverse(i, path, strict) for i in data] 22 | return result 23 | 24 | 25 | def _set_data(data, path, new_data): 26 | # not yet tested, hence not available 27 | # add new_data to data at given path or override existing results 28 | if isinstance(path, str): 29 | path = [i.strip() for i in path.split(".")] 30 | last_index = len(path) - 1 31 | datum = data 32 | for path_index, path_item in enumerate(path): 33 | if not isinstance(datum, dict): 34 | break 35 | if last_index == path_index: 36 | datum[path_item] = new_data 37 | break 38 | if path_item in datum: 39 | datum = datum[path_item] 40 | else: 41 | datum[path_item] = {} 42 | 43 | 44 | def dict_to_list(data, key_name="key", path=None, strict=False): 45 | """Flatten dictionary data, e.g. if data is this: 46 | { "Fa0" : {"admin": "administratively down"}, 47 | "Ge0/1": {"access_vlan": "24"}} 48 | and key_name="interface", it will become this list: 49 | [ {"admin": "administratively down", "interface": "Fa0"}, 50 | {"access_vlan": "24", "interface": "Ge0/1"} ] 51 | """ 52 | result = [] 53 | traversed_data = data 54 | if path: 55 | traversed_data = traverse(data, path, strict) 56 | if isinstance(traversed_data, dict): 57 | for k, v in traversed_data.items(): 58 | if not isinstance(v, dict): 59 | return traversed_data 60 | v.update({key_name: k}) 61 | result.append(v) 62 | elif isinstance(traversed_data, list): 63 | # run recusrsion 64 | result = [dict_to_list(data=item, key_name=key_name) for item in traversed_data] 65 | return result 66 | -------------------------------------------------------------------------------- /ttp/output/validate_cerberus.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | log = logging.getLogger(__name__) 4 | 5 | try: 6 | from cerberus import Validator 7 | 8 | HAS_LIBS = True 9 | except ImportError: 10 | log.error( 11 | "ttp.validate, failed to import Cerberus library, make sure it is installed" 12 | ) 13 | HAS_LIBS = False 14 | 15 | if HAS_LIBS: 16 | validator_engine = Validator() 17 | 18 | 19 | def _run_validation(data, schema_data, info, errors, result, validator_engine): 20 | ret = {result: validator_engine.validate(document=data, schema=schema_data)} 21 | if info: 22 | try: 23 | formatted, _ = _ttp_["group"]["sformat"](data, string=info, add_field="inf") 24 | ret["info"] = formatted["inf"] 25 | except: 26 | ret["info"] = info 27 | if errors: 28 | ret[errors] = validator_engine.errors 29 | return ret 30 | 31 | 32 | def validate(data, schema, result="valid", info="", errors="", allow_unknown=True): 33 | """Function to validate data using Cerberus validation library. 34 | Args:: 35 | * schema - schema template variable name 36 | * result - name of the field to assign validation result 37 | * info - string, contains additional information about test 38 | * errors - name of the field to assign validation errors 39 | * allow_unknown - informs cerberus to ignore uncknown keys 40 | """ 41 | if not HAS_LIBS: 42 | return data 43 | # get validation schema from template variables 44 | schema_data = _ttp_["output_object"].template_obj.vars.get(schema, None) 45 | if not schema_data: 46 | log.error("ttp.output.validate, schema '{}' not found".format(schema)) 47 | return data 48 | validator_engine.allow_unknown = allow_unknown 49 | # run validation 50 | if isinstance(data, dict): 51 | return _run_validation( 52 | data, schema_data, info, errors, result, validator_engine 53 | ) 54 | elif isinstance(data, list): 55 | return [ 56 | _run_validation(i, schema_data, info, errors, result, validator_engine) 57 | for i in data 58 | if isinstance(i, dict) 59 | ] 60 | -------------------------------------------------------------------------------- /ttp/patterns/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dmulyalin/ttp/d6476d387ca2078e0306c2546148850d3ceb246c/ttp/patterns/__init__.py -------------------------------------------------------------------------------- /ttp/patterns/get_pattern.py: -------------------------------------------------------------------------------- 1 | PHRASE = r"(\S+ {1})+?\S+" 2 | ROW = r"(\S+ +)+?\S+" 3 | ORPHRASE = r"\S+|(\S+ {1})+?\S+" 4 | DIGIT = r"\d+" 5 | IP = r"(?:[0-9]{1,3}\.){3}[0-9]{1,3}" 6 | PREFIX = r"(?:[0-9]{1,3}\.){3}[0-9]{1,3}/[0-9]{1,2}" 7 | IPV6 = r"(?:[a-fA-F0-9]{1,4}:|:){1,7}(?:[a-fA-F0-9]{1,4}|:?)" 8 | PREFIXV6 = r"(?:[a-fA-F0-9]{1,4}:|:){1,7}(?:[a-fA-F0-9]{1,4}|:?)/[0-9]{1,3}" 9 | _line_ = r".+" 10 | WORD = r"\S+" 11 | MAC = r"(?:[0-9a-fA-F]{2}(:|\.|\-)){5}([0-9a-fA-F]{2})|(?:[0-9a-fA-F]{4}(:|\.|\-)){2}([0-9a-fA-F]{4})" 12 | 13 | 14 | def get(name): 15 | try: 16 | re = globals()[name] 17 | return re 18 | except KeyError: 19 | return False 20 | -------------------------------------------------------------------------------- /ttp/returners/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dmulyalin/ttp/d6476d387ca2078e0306c2546148850d3ceb246c/ttp/returners/__init__.py -------------------------------------------------------------------------------- /ttp/returners/file_returner.py: -------------------------------------------------------------------------------- 1 | import time 2 | import logging 3 | import os 4 | 5 | _name_map_ = {"file_returner": "file"} 6 | 7 | log = logging.getLogger(__name__) 8 | ctime = time.strftime("%Y-%m-%d_%H-%M-%S") 9 | 10 | 11 | def file_returner(D, **kwargs): 12 | """Method to write data into file 13 | Args: 14 | url (str): os path there to save files 15 | filename (str): name of the file 16 | """ 17 | url = kwargs.get("url", "./Output/") 18 | if "filename" in kwargs: 19 | filename = kwargs["filename"] 20 | try: 21 | fkwargs = _ttp_["global_vars"].copy() 22 | fkwargs.update(_ttp_["vars"]) 23 | filename = filename.format(**fkwargs) 24 | except Exception as e: 25 | log.error("file_returner: failed format filename - '{}'".format(e)) 26 | else: 27 | filename = "output_{}.txt".format(ctime) 28 | # if no filename provided, add outputter name to filename 29 | if not kwargs.get("filename", False): 30 | filename = _ttp_["output_object"].name + "_" + filename 31 | # check if path exists already, create it if not: 32 | if not os.path.exists(url): 33 | os.mkdir(url) 34 | # form file path: 35 | file_path = os.path.join(url, filename) 36 | # save excel workbook to file: 37 | if hasattr(D, "save") and hasattr(D, "worksheets"): 38 | log.info("output.returner_file: saving excel workbook") 39 | if not file_path.endswith(".xlsx"): 40 | file_path += ".xlsx" 41 | D.save(file_path) 42 | # save data to text file 43 | else: 44 | log.info("output.returner_file: saving text results to file") 45 | with open(file_path, "w") as f: 46 | if not isinstance(D, str): 47 | f.write(str(D)) 48 | else: 49 | f.write(D) 50 | -------------------------------------------------------------------------------- /ttp/returners/self_returner.py: -------------------------------------------------------------------------------- 1 | _name_map_ = {"self_returner": "self"} 2 | 3 | 4 | def self_returner(D, **kwargs): 5 | """Method to indicate that processed data need to be returned""" 6 | _ttp_["output_object"].return_to_self = True 7 | -------------------------------------------------------------------------------- /ttp/returners/syslog_returner.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import logging.handlers 3 | import json 4 | import time 5 | import traceback 6 | 7 | log = logging.getLogger(__name__) 8 | 9 | 10 | def syslog(data, **kwargs): 11 | # get kwargs 12 | servers = kwargs.get("servers", None) 13 | servers = [servers] if isinstance(servers, str) else servers 14 | if not servers: 15 | log.error( 16 | "ttp.returners.syslog: no syslog servers addresses found, doing nothing..." 17 | ) 18 | return 19 | port = int(kwargs.get("port", 514)) 20 | facility = kwargs.get("facility", 77) 21 | path = kwargs.get("path", []) 22 | iterate = kwargs.get("iterate", True) 23 | interval = kwargs.get("interval", 1) / 1000 24 | # normalize source_data to list: 25 | source_data = data if isinstance(data, list) else [data] 26 | # initiate isolated logger 27 | syslog_logger = logging.getLogger("_Custom_Syslog_Logger_") 28 | syslog_logger.propagate = False 29 | syslog_logger.setLevel(logging.INFO) 30 | for server in servers: 31 | handler = logging.handlers.SysLogHandler( 32 | address=(server, port), facility=facility 33 | ) 34 | handler.append_nul = False 35 | syslog_logger.addHandler(handler) 36 | # send data 37 | for datum in source_data: 38 | if not datum: 39 | log.error( 40 | "TTP:syslog returner, datum '{}' is empty, path: '{}'".format( 41 | str(datum), path 42 | ) 43 | ) 44 | continue 45 | try: 46 | item = _ttp_["output"]["traverse"](datum, path) 47 | except: 48 | tb = traceback.format_exc() 49 | log.error( 50 | "TTP:syslog returner, failed traverse data, path: '{}', error:\n'{}\ndatum: {}..".format( 51 | path, tb, str(datum)[:120] 52 | ) 53 | ) 54 | continue 55 | if not item: # skip empty results 56 | continue 57 | elif isinstance(item, list) and iterate: 58 | for i in item: 59 | time.sleep(interval) 60 | try: 61 | syslog_logger.info(json.dumps(i)) 62 | except: 63 | tb = traceback.format_exc() 64 | log.error( 65 | "TTP:syslog returner, failed send log item; path: '{}', error:\n'{}".format( 66 | path, tb 67 | ) 68 | ) 69 | else: 70 | time.sleep(interval) 71 | try: 72 | syslog_logger.info(json.dumps(item)) 73 | except: 74 | tb = traceback.format_exc() 75 | log.error( 76 | "TTP:syslog returner, failed send log item; path: '{}', error:\n'{}".format( 77 | path, tb 78 | ) 79 | ) 80 | # clean up 81 | handler.close() 82 | syslog_logger.removeHandler(handler) 83 | del syslog_logger 84 | -------------------------------------------------------------------------------- /ttp/returners/terminal_returner.py: -------------------------------------------------------------------------------- 1 | _name_map_ = {"terminal_returner": "terminal"} 2 | 3 | 4 | def terminal_returner(data, **kwargs): 5 | """Returner that prints results to terminal""" 6 | # add colouring 7 | if "colour" in kwargs: 8 | from colorama import init 9 | 10 | init() 11 | R = "\033[0;31;40m" # RED 12 | G = "\033[0;32;40m" # GREEN 13 | Y = "\033[0;33;40m" # Yellow 14 | # B = "\033[0;34;40m" # Blue 15 | N = "\033[0m" # Reset 16 | fttr = "{}{}{}" # formatter 17 | # get colour words from output 18 | red_words = kwargs.get( 19 | "red", 20 | "False,No,Failed,Error,Failure,Fail,false,no,failed,error,failure,fail", 21 | ) 22 | green_words = kwargs.get("green", "True,Yes,Success,Ok,true,yes,success,ok") 23 | yellow_words = kwargs.get("yellow", "Warning,warning") 24 | # convert colour words to lists 25 | red_words = [i.strip() for i in red_words.split(",")] 26 | green_words = [i.strip() for i in green_words.split(",")] 27 | yellow_words = [i.strip() for i in yellow_words.split(",")] 28 | # add colouring to output 29 | for red_word in red_words: 30 | data = data.replace(red_word, fttr.format(R, red_word, N)) 31 | for green_word in green_words: 32 | data = data.replace(green_word, fttr.format(G, green_word, N)) 33 | for yeallow_word in yellow_words: 34 | data = data.replace(yeallow_word, fttr.format(Y, yeallow_word, N)) 35 | # print output 36 | if _ttp_["python_major_version"] is 2: 37 | if isinstance(data, str) or isinstance( 38 | data, unicode 39 | ): # pylint: disable=undefined-variable 40 | print(data) 41 | else: 42 | print(str(data).replace("\\n", "\n")) 43 | elif _ttp_["python_major_version"] is 3: 44 | if isinstance(data, str): 45 | print(data) 46 | else: 47 | print(str(data).replace("\\n", "\n")) 48 | -------------------------------------------------------------------------------- /ttp/utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dmulyalin/ttp/d6476d387ca2078e0306c2546148850d3ceb246c/ttp/utils/__init__.py -------------------------------------------------------------------------------- /ttp/utils/get_attributes.py: -------------------------------------------------------------------------------- 1 | import re 2 | import sys, traceback 3 | 4 | import logging 5 | 6 | log = logging.getLogger(__name__) 7 | 8 | 9 | class _UndefSubst(dict): 10 | # class that overrides dictionary missing method to return value instead 11 | # of raising KeyError, that will lead to eval 'NameError: name ... is not defined', 12 | # that is needed to support simpler syntax definition, e.g. func_name="bla, name=value" 13 | # instead of func_name="'bla', name='value'" 14 | def __missing__(self, key): 15 | if key.lower() == "false": 16 | return False 17 | elif key.lower() == "true": 18 | return True 19 | elif key.lower() == "none": 20 | return None 21 | else: 22 | return key 23 | 24 | 25 | def _get_args_kwargs(*args, **kwargs): 26 | # function to load args and kwargs 27 | return {"args": args, "kwargs": kwargs} 28 | 29 | 30 | def get_attributes(line): 31 | """Extract attributes from variable line string. 32 | Example: 33 | 'exclude(-VM-)' -> [{'name': 'exclude', 'args': ['-VM-'], 'kwargs': {}}] 34 | Args: 35 | line (str): string that contains variable attributes i.e. "contains('vlan') | upper | split('.')" 36 | Returns: 37 | List of opts dictionaries containing extracted attributes 38 | """ 39 | 40 | result = [] 41 | ATTRIBUTES = [i.strip() for i in line.split("|") if i.strip()] 42 | for item in ATTRIBUTES: 43 | opts = {"args": [], "kwargs": {}, "name": ""} 44 | # re search attributes like set(), upper, joinchar(',','-') 45 | try: 46 | itemDict = re.search( 47 | r"^(?P\S+?)\s?(\((?P.*)\))?$", item 48 | ).groupdict() 49 | except AttributeError as e: 50 | log.critical( 51 | "ttp.get_attributes failed to parse attributes for: '{}' with error: {};\nExiting...".format( 52 | item, e 53 | ) 54 | ) 55 | raise SystemExit() 56 | opts["name"] = itemDict["name"] 57 | options = itemDict["options"] 58 | # create options list from options string using eval: 59 | if options: 60 | try: 61 | args_kwargs = eval( 62 | "_get_args_kwargs({})".format(options), _UndefSubst(globals()) 63 | ) 64 | except: 65 | exc_type, exc_value, exc_traceback = sys.exc_info() 66 | traceback_error = "".join( 67 | traceback.format_exception(exc_type, exc_value, exc_traceback) 68 | ) 69 | log.critical( 70 | """Failed to load arg/kwargs from line '{}' for options '{}', traceback:\n{}""".format( 71 | line, options, traceback_error 72 | ) 73 | ) 74 | raise SystemExit() 75 | opts.update(args_kwargs) 76 | result.append(opts) 77 | return result 78 | -------------------------------------------------------------------------------- /ttp/utils/guess.py: -------------------------------------------------------------------------------- 1 | import difflib 2 | 3 | 4 | def guess(word, possibilities, count=3, cutoff=0.8): 5 | return difflib.get_close_matches(word, possibilities, n=count, cutoff=cutoff) 6 | -------------------------------------------------------------------------------- /ttp/utils/load_python_exec_py2.py: -------------------------------------------------------------------------------- 1 | def load_python_exec(text_data, builtins=None): 2 | """ 3 | Function to provide compatibility with python 2.6 for loading text formatted in 4 | python using exec built-in method. Exec syntaxes in pyton 2.6 different 5 | compared to python3.x and python3 spits "Invlaid Syntaxis error" while trying to 6 | run code below. 7 | """ 8 | data = {} 9 | globals_dict = {"__builtins__" : builtins, "_ttp_": _ttp_, "False": False, "True": True, "None": None} 10 | # below can run on python2.7 as exec is a statements not function for python2.7: 11 | try: 12 | exec compile(text_data, '', 'exec') in globals_dict, data # pylint: disable=syntax-error 13 | except NameError: 14 | # NameError can occur if we have "True" or "False" in text_data 15 | # that way eval will catch it, but exec will through and error: 16 | # NameError: name 'True' is not defined 17 | pass 18 | # add extracted functions to globals for recursion to work 19 | globals_dict.update(data) 20 | # run eval in case if data still empty as we might have python dictionary or list 21 | # expressed as string 22 | if not data: 23 | data = eval(text_data, None, None) 24 | return data 25 | -------------------------------------------------------------------------------- /ttp/utils/load_python_exec_py3.py: -------------------------------------------------------------------------------- 1 | def load_python_exec(text_data, builtins=None): 2 | """ 3 | Function to provide compatibility with python 3.7 for loading text formwatted in 4 | python using exec built-in method. Exec syntaxis in pyton 2.6 different 5 | compared to python3.x and python3 spits "Invlaid Syntaxis error" while trying to 6 | run code below. 7 | """ 8 | data = {} 9 | globals_dict = { 10 | "__builtins__": builtins, 11 | "_ttp_": _ttp_, 12 | "False": False, 13 | "True": True, 14 | "None": None, 15 | } 16 | # below can run on python3.7 as exec is a function not statement for python3.7: 17 | exec(compile(text_data, "", "exec"), globals_dict, data) 18 | # add extracted functions to globals for recursion to work 19 | globals_dict.update(data) 20 | # run eval in case if data still empty as we might have python dictionary or list 21 | # expressed as string 22 | if not data: 23 | data = eval(text_data, None, None) 24 | return data 25 | -------------------------------------------------------------------------------- /ttp/utils/quick_parse.py: -------------------------------------------------------------------------------- 1 | from ttp import ttp 2 | 3 | 4 | def quick_parse( 5 | data=None, template=None, ttp_kwargs=None, parse_kwargs=None, result_kwargs=None 6 | ): 7 | """ 8 | Function to parse data and return results. 9 | 10 | :param data: (str) data to parse 11 | :param template: (str) template string 12 | :param ttp_kwargs: (dict) kwargs to use while instantiating TTP object 13 | :param parse_kwargs: (dict ) kwargs to use with ``parse`` method call 14 | :param result_kwargs: (dict ) kwargs to use with ``result`` method call 15 | 16 | Sample usage:: 17 | 18 | from ttp import quick_parse 19 | 20 | template = ''' 21 | 22 | interface {{ interface }} 23 | description {{ description | ORPHRASE }} 24 | ip address {{ ip }} {{ mask }} 25 | 26 | ''' 27 | 28 | data = ''' 29 | interface Lo0 30 | ip address 124.171.238.50 32 31 | ! 32 | interface Lo1 33 | description this interface has description 34 | ip address 1.1.1.1 32 35 | ''' 36 | 37 | parsing_result = quick_parse(data, template) 38 | """ 39 | ttp_kwargs = ttp_kwargs or {} 40 | parse_kwargs = parse_kwargs or {} 41 | result_kwargs = result_kwargs or {} 42 | # instantiate TTP parser object 43 | if data: 44 | parser = ttp(data=data, template=template, **ttp_kwargs) 45 | else: 46 | parser = ttp(template=template, **ttp_kwargs) 47 | 48 | # parse 49 | parser.parse(**parse_kwargs) 50 | 51 | # return results 52 | return parser.result(**result_kwargs) 53 | -------------------------------------------------------------------------------- /ttp/variable/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dmulyalin/ttp/d6476d387ca2078e0306c2546148850d3ceb246c/ttp/variable/__init__.py -------------------------------------------------------------------------------- /ttp/variable/getfilename.py: -------------------------------------------------------------------------------- 1 | def getfilename(data, *args, **kwargs): 2 | """Return dataname""" 3 | return args[0] 4 | -------------------------------------------------------------------------------- /ttp/variable/gethostname.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | from re import finditer 4 | import logging 5 | 6 | log = logging.getLogger(__name__) 7 | 8 | 9 | def gethostname(data, *args, **kwargs): 10 | """Description: Method to find hostname in show 11 | command output, uses symbols '# ', '<', '>' to find hostname 12 | """ 13 | REs = [ 14 | { 15 | "alu_sros": r"\n\S{1,2}:(\S+?)[>#].*(?=\n)" 16 | }, # e.g. 'A:hostname>', '*A:hostname>', 'A:hostname#', '*A:hostname#', 17 | # 'A:ALA-12>config>system#', '*A:ALA-12>config>system#' 18 | # ios-xr prompt re must go before ios privilege prompt re 19 | {"ios_xr": r"\n\S+:(\S+)#.*(?=\n)"}, # e.g. 'RP/0/4/CPU0:hostname#' 20 | {"ios_priv": r"\n(\S+)#.*(?=\n)"}, # e.g. 'hostname#' 21 | {"juniper": r"\n\S*@(\S+)>.*(?=\n)"}, # e.g. 'some.user@router-fw-host>' 22 | {"huawei": r"\n<(\S+)>.*(?=\n)"}, # e.g. '' 23 | {"ios_exec": r"\n(\S+)>.*(?=\n)"}, # e.g. 'hostname>' 24 | { 25 | "fortigate": r"\n(\S+ \(\S+\)) #.*(?=\n)" 26 | }, # e.g. 'forti-hostname (Default) #' 27 | ] 28 | UTF_BOM = [ 29 | "", 30 | "þÿ", 31 | "ÿþ", 32 | "\ufeff", 33 | ] # byte order marks (BOM) to strip from beginning 34 | # of the hostname, some text files can have them 35 | for item in REs: 36 | name, regex = list(item.items())[0] 37 | match_iter = finditer(regex, data) 38 | try: 39 | match = next(match_iter) 40 | hostname_match = match.group(1) 41 | for i in UTF_BOM: 42 | hostname_match = hostname_match.lstrip(i) 43 | return hostname_match 44 | except StopIteration: 45 | continue 46 | log.error( 47 | 'ttp.functions.variable_gethostname: "{}" file, Hostname not found'.format( 48 | args[0] 49 | ) 50 | ) 51 | return False 52 | -------------------------------------------------------------------------------- /ttp/variable/time_funcs.py: -------------------------------------------------------------------------------- 1 | import time 2 | 3 | 4 | def get_time(*args, **kwargs): 5 | strformat = "%H:%M:%S" 6 | return time.strftime(strformat) 7 | 8 | 9 | def get_date(*args, **kwargs): 10 | strformat = "%Y-%m-%d" 11 | return time.strftime(strformat) 12 | 13 | 14 | def get_timestamp_ms(*args, **kwargs): 15 | strformat = "%Y-%m-%d %H:%M:%S.{ms}" 16 | return time.strftime(strformat).format(ms=str(time.time()).split(".")[-1][:3]) 17 | 18 | 19 | def get_timestamp(*args, **kwargs): 20 | strformat = "%Y-%m-%d %H:%M:%S" 21 | return time.strftime(strformat) 22 | 23 | 24 | def get_time_ns(*args, **kwargs): 25 | # Return the current time in nanoseconds since the Epoch 26 | return time.time_ns() 27 | 28 | 29 | def get_timestamp_iso(*args, **kwargs): 30 | # Returns timestamp in ISO format in UTC timezone 31 | # e.g.: 2020-04-07T05:27:17.613549+00:00 32 | import datetime 33 | 34 | class UTC(datetime.tzinfo): 35 | def utcoffset(self, dt): 36 | return datetime.timedelta(0) 37 | 38 | def tzname(self, dt): 39 | return "UTC" 40 | 41 | def dst(self, dt): 42 | return datetime.timedelta(0) 43 | 44 | utc = UTC() 45 | return datetime.datetime.now(utc).isoformat() 46 | --------------------------------------------------------------------------------