├── docs ├── source │ ├── _static │ │ └── .placeholder │ ├── _templates │ │ └── .placeholder │ ├── index.rst │ └── conf.py └── make.bat ├── requirements-dev.txt ├── requirements-doc.txt ├── requirements.txt ├── media └── architecture.png ├── fragscapy ├── _author.py ├── __main__.py ├── modifications │ ├── __init__.py │ ├── print.py │ ├── summary.py │ ├── echo.py │ ├── select.py │ ├── ipv4_frag.py │ ├── reorder.py │ ├── tcp_segment.py │ ├── ipv6_frag.py │ ├── drop_proba.py │ ├── ipv6_atomic_frag.py │ ├── drop_one.py │ ├── duplicate.py │ ├── ipv4_overlap.py │ ├── ipv6_overlap.py │ ├── field.py │ ├── delay.py │ ├── tcp_overlap.py │ ├── ipv6_ext_hdr_mixup.py │ ├── mod.py │ └── utils.py ├── __init__.py ├── modlist.py ├── _version.py ├── packetlist.py ├── config.py ├── commandline.py ├── netfilter.py ├── tests.py └── engine.py ├── config_examples ├── 01_ping_no_mod.json ├── 02_ping_duplicate.json ├── 03_http_fragment6.json ├── 06_tcp_segmentation.json ├── 04_http_proxy.json ├── 05_complete_mess.json ├── README.md └── 00_template.json ├── config_common ├── ipv4_tiny_fragments.json ├── ipv6_tiny_fragments.json ├── ipv6_atomic_fragments.json ├── ipv4_overlapping.json ├── ipv6_overlapping.json ├── tcp_segmentation.json ├── ipv4_fragmentation.json ├── ipv6_fragmentation.json ├── tcp_overlapping.json └── README.md ├── LICENSE.txt ├── .gitignore ├── setup.py ├── Makefile ├── README.md └── .pylintrc /docs/source/_static/.placeholder: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /requirements-dev.txt: -------------------------------------------------------------------------------- 1 | pylint 2 | -------------------------------------------------------------------------------- /requirements-doc.txt: -------------------------------------------------------------------------------- 1 | sphinx 2 | -------------------------------------------------------------------------------- /docs/source/_templates/.placeholder: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | scapy 2 | fnfqueue >= 1.1.1 3 | tqdm 4 | inflection 5 | -------------------------------------------------------------------------------- /media/architecture.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AMOSSYS/Fragscapy/HEAD/media/architecture.png -------------------------------------------------------------------------------- /fragscapy/_author.py: -------------------------------------------------------------------------------- 1 | """Defines the author of Fragscapy.""" 2 | 3 | __author__ = "Maël Kervella" 4 | -------------------------------------------------------------------------------- /fragscapy/__main__.py: -------------------------------------------------------------------------------- 1 | """Runs Fragscapy.main() as the main entry for terminal-based use.""" 2 | 3 | from fragscapy import main 4 | 5 | 6 | main() 7 | -------------------------------------------------------------------------------- /config_examples/01_ping_no_mod.json: -------------------------------------------------------------------------------- 1 | { 2 | "cmd": "/bin/ping -c 10 www.example.com", 3 | 4 | "nfrules": [ 5 | ], 6 | 7 | "input": [ 8 | ], 9 | 10 | "output": [ 11 | ] 12 | } 13 | -------------------------------------------------------------------------------- /config_examples/02_ping_duplicate.json: -------------------------------------------------------------------------------- 1 | { 2 | "cmd": "/bin/ping -c 10 www.example.com", 3 | 4 | "nfrules": [ 5 | {"host": "www.example.com"} 6 | ], 7 | 8 | "input": [ 9 | ], 10 | 11 | "output": [ 12 | { 13 | "mod_name": "duplicate", 14 | "mod_opts": "first", 15 | "optional": true 16 | } 17 | ] 18 | } 19 | -------------------------------------------------------------------------------- /config_examples/03_http_fragment6.json: -------------------------------------------------------------------------------- 1 | { 2 | "cmd": "/usr/bin/wget http://www.example.com", 3 | 4 | "nfrules": [ 5 | {"host": "www.example.com", "port": 80} 6 | ], 7 | 8 | "input": [ 9 | ], 10 | 11 | "output": [ 12 | { 13 | "mod_name": "ipv6_frag", 14 | "mod_opts": "range 50 10000 50" 15 | } 16 | ] 17 | } 18 | -------------------------------------------------------------------------------- /config_examples/06_tcp_segmentation.json: -------------------------------------------------------------------------------- 1 | { 2 | "cmd": "/usr/bin/wget -T 2 -t 1 http://www.example.com -O run/results/index_{i}_{j}.html", 3 | 4 | "nfrules": [ 5 | {"host": "www.example.com", "port": 80} 6 | ], 7 | 8 | "input": [ 9 | ], 10 | 11 | "output": [ 12 | { 13 | "mod_name": "tcp_segment", 14 | "mod_opts": "range 4 200" 15 | } 16 | ] 17 | } 18 | -------------------------------------------------------------------------------- /config_common/ipv4_tiny_fragments.json: -------------------------------------------------------------------------------- 1 | { 2 | "cmd": "/usr/bin/curl -4 -f -m 1 http://www.example.com -o results/index_{i}_{j}.html", 3 | 4 | "nfrules": [ 5 | {"host": "www.example.com", "port": 80, "ipv6": false, "input_chain": false} 6 | ], 7 | 8 | "input": [ 9 | ], 10 | 11 | "output": [ 12 | { 13 | "mod_name": "ipv4_frag", 14 | "mod_opts": "range 1 100" 15 | } 16 | ] 17 | } 18 | -------------------------------------------------------------------------------- /config_common/ipv6_tiny_fragments.json: -------------------------------------------------------------------------------- 1 | { 2 | "cmd": "/usr/bin/curl -6 -f -m 1 http://www.example.com -o results/index_{i}_{j}.html", 3 | 4 | "nfrules": [ 5 | {"host": "www.example.com", "port": 80, "ipv4": false, "input_chain": false} 6 | ], 7 | 8 | "input": [ 9 | ], 10 | 11 | "output": [ 12 | { 13 | "mod_name": "ipv6_frag", 14 | "mod_opts": "range 1 100" 15 | } 16 | ] 17 | } 18 | -------------------------------------------------------------------------------- /config_common/ipv6_atomic_fragments.json: -------------------------------------------------------------------------------- 1 | { 2 | "cmd": "/usr/bin/curl -6 -f -m 1 http://www.example.com -o results/index_{i}_{j}.html", 3 | 4 | "nfrules": [ 5 | {"host": "www.example.com", "port": 80, "ipv4": false, "input_chain": false} 6 | ], 7 | 8 | "input": [ 9 | ], 10 | 11 | "output": [ 12 | { 13 | "mod_name": "ipv6_atomic_frag", 14 | "mod_opts": 65535 15 | } 16 | ] 17 | } 18 | -------------------------------------------------------------------------------- /config_common/ipv4_overlapping.json: -------------------------------------------------------------------------------- 1 | { 2 | "cmd": "/usr/bin/curl -4 -f -m 1 http://www.example.com -o results/index_{i}_{j}.html", 3 | 4 | "nfrules": [ 5 | {"host": "www.example.com", "port": 80, "ipv6": false, "input_chain": false} 6 | ], 7 | 8 | "input": [ 9 | ], 10 | 11 | "output": [ 12 | { 13 | "mod_name": "ipv4_overlap", 14 | "mod_opts": ["range 1 1000", "range 1 20"] 15 | }, 16 | { 17 | "mod_name": "reorder", 18 | "mod_opts": "seq_str reverse random", 19 | "optional": true 20 | } 21 | ] 22 | } 23 | -------------------------------------------------------------------------------- /config_common/ipv6_overlapping.json: -------------------------------------------------------------------------------- 1 | { 2 | "cmd": "/usr/bin/curl -6 -f -m 1 http://www.example.com -o results/index_{i}_{j}.html", 3 | 4 | "nfrules": [ 5 | {"host": "www.example.com", "port": 80, "ipv4": false, "input_chain": false} 6 | ], 7 | 8 | "input": [ 9 | ], 10 | 11 | "output": [ 12 | { 13 | "mod_name": "ipv6_overlap", 14 | "mod_opts": ["range 1 1000", "range 1 20"] 15 | }, 16 | { 17 | "mod_name": "reorder", 18 | "mod_opts": "seq_str reverse random", 19 | "optional": true 20 | } 21 | ] 22 | } 23 | -------------------------------------------------------------------------------- /docs/source/index.rst: -------------------------------------------------------------------------------- 1 | .. FragScapy documentation master file, created by 2 | sphinx-quickstart on Fri May 31 16:40:11 2019. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Welcome to FragScapy's documentation! 7 | ===================================== 8 | 9 | .. toctree:: 10 | :maxdepth: 2 11 | :caption: Contents: 12 | 13 | 14 | 15 | Indices and tables 16 | ================== 17 | 18 | * :doc:`/modules` 19 | * :ref:`genindex` 20 | * :ref:`modindex` 21 | * :ref:`search` 22 | -------------------------------------------------------------------------------- /fragscapy/modifications/__init__.py: -------------------------------------------------------------------------------- 1 | """Package regrouping all the possible modifications. 2 | 3 | It already contains some basic modifications that can be used by default. 4 | For any new custom modification, they should be added in this package in 5 | order to be properly by the engine later. The discovery of new modifications 6 | should be automatic. 7 | 8 | There is one slightly different module in this package: `mod`. It is used to 9 | define `Mod`, the base class for any modifications. This is an abstract class. 10 | To write new modifications, one should subclass `Mod` and implements all the 11 | abstract methods. 12 | """ 13 | -------------------------------------------------------------------------------- /config_examples/04_http_proxy.json: -------------------------------------------------------------------------------- 1 | { 2 | "cmd": "/usr/bin/wget -T 5 http://www.example.com:8080", 3 | 4 | "nfrules": [ 5 | {"host": "www.example.com", "port": 80, "output_chain": false}, 6 | {"host": "www.example.com", "port": 8080, "input_chain": false} 7 | ], 8 | 9 | "input": [ 10 | { 11 | "mod_name": "field", 12 | "mod_opts": ["TCP", "sport", 8080] 13 | }, 14 | { 15 | "mod_name": "field", 16 | "mod_opts": ["TCP", "chksum", "none"] 17 | } 18 | ], 19 | 20 | "output": [ 21 | { 22 | "mod_name": "field", 23 | "mod_opts": ["TCP", "dport", 80] 24 | }, 25 | { 26 | "mod_name": "field", 27 | "mod_opts": ["TCP", "chksum", "none"] 28 | } 29 | ] 30 | } 31 | -------------------------------------------------------------------------------- /fragscapy/modifications/print.py: -------------------------------------------------------------------------------- 1 | """Prints the content of a packet list.""" 2 | 3 | from fragscapy.modifications.mod import Mod 4 | 5 | 6 | class Print(Mod): 7 | """Prints the content of a packet list. 8 | 9 | Args: 10 | *args: The arguments of the mods. 11 | 12 | Raises: 13 | ValueError: Unrecognized or incorrect number of parameters. 14 | 15 | Examples: 16 | >>> Print() 17 | """ 18 | 19 | name = "Print" 20 | doc = ("Prints the content of the packet list.\n" 21 | "print") 22 | _nb_args = 0 23 | 24 | def apply(self, pkt_list): 25 | """Prints the content of each packet. See `Mod.apply` for more 26 | details.""" 27 | pkt_list.display() 28 | 29 | return pkt_list 30 | -------------------------------------------------------------------------------- /fragscapy/modifications/summary.py: -------------------------------------------------------------------------------- 1 | """Prints a 1-line summary of the packet.""" 2 | 3 | from fragscapy.modifications.mod import Mod 4 | 5 | 6 | class Summary(Mod): 7 | """Prints a 1-line summary of the packet. 8 | 9 | Args: 10 | *args: The arguments of the mods. 11 | 12 | Raises: 13 | ValueError: Unrecognized or incorrect number of parameters. 14 | 15 | Examples: 16 | >>> Summary() 17 | """ 18 | 19 | name = "Summary" 20 | doc = ("Prints a 1-line summary of the packet.\n" 21 | "summary") 22 | _nb_args = 0 23 | 24 | def apply(self, pkt_list): 25 | """Prints the summary for each packet.See `Mod.apply` for more 26 | details.""" 27 | pkt_list.summary() 28 | 29 | return pkt_list 30 | -------------------------------------------------------------------------------- /config_common/tcp_segmentation.json: -------------------------------------------------------------------------------- 1 | { 2 | "cmd": "/usr/bin/curl -f -m 1 http://www.example.com -o results/index_{i}_{j}.html", 3 | 4 | "nfrules": [ 5 | {"host": "www.example.com", "port": 80, "input_chain": false} 6 | ], 7 | 8 | "input": [ 9 | ], 10 | 11 | "output": [ 12 | { 13 | "mod_name": "tcp_segment", 14 | "mod_opts": "range 1 1000" 15 | }, 16 | { 17 | "mod_name": "drop_proba", 18 | "mod_opts": "seq_float 0.1 0.2 0.3 0.4 0.5", 19 | "optional": true 20 | }, 21 | { 22 | "mod_name": "duplicate", 23 | "mod_opts": "seq_str first last random", 24 | "optional": true 25 | }, 26 | { 27 | "mod_name": "reorder", 28 | "mod_opts": "seq_str reverse random", 29 | "optional": true 30 | } 31 | ] 32 | } 33 | -------------------------------------------------------------------------------- /config_common/ipv4_fragmentation.json: -------------------------------------------------------------------------------- 1 | { 2 | "cmd": "/usr/bin/curl -4 -f -m 1 http://www.example.com -o results/index_{i}_{j}.html", 3 | 4 | "nfrules": [ 5 | {"host": "www.example.com", "port": 80, "ipv6": false, "input_chain": false} 6 | ], 7 | 8 | "input": [ 9 | ], 10 | 11 | "output": [ 12 | { 13 | "mod_name": "ipv4_frag", 14 | "mod_opts": "range 1 1000" 15 | }, 16 | { 17 | "mod_name": "drop_proba", 18 | "mod_opts": "seq_float 0.1 0.2 0.3 0.4 0.5", 19 | "optional": true 20 | }, 21 | { 22 | "mod_name": "duplicate", 23 | "mod_opts": "seq_str first last random", 24 | "optional": true 25 | }, 26 | { 27 | "mod_name": "reorder", 28 | "mod_opts": "seq_str reverse random", 29 | "optional": true 30 | } 31 | ] 32 | } 33 | -------------------------------------------------------------------------------- /config_common/ipv6_fragmentation.json: -------------------------------------------------------------------------------- 1 | { 2 | "cmd": "/usr/bin/curl -6 -f -m 1 http://www.example.com -o results/index_{i}_{j}.html", 3 | 4 | "nfrules": [ 5 | {"host": "www.example.com", "port": 80, "ipv4": false, "input_chain": false} 6 | ], 7 | 8 | "input": [ 9 | ], 10 | 11 | "output": [ 12 | { 13 | "mod_name": "ipv6_frag", 14 | "mod_opts": "range 1 1000" 15 | }, 16 | { 17 | "mod_name": "drop_proba", 18 | "mod_opts": "seq_float 0.1 0.2 0.3 0.4 0.5", 19 | "optional": true 20 | }, 21 | { 22 | "mod_name": "duplicate", 23 | "mod_opts": "seq_str first last random", 24 | "optional": true 25 | }, 26 | { 27 | "mod_name": "reorder", 28 | "mod_opts": "seq_str reverse random", 29 | "optional": true 30 | } 31 | ] 32 | } 33 | -------------------------------------------------------------------------------- /config_common/tcp_overlapping.json: -------------------------------------------------------------------------------- 1 | { 2 | "cmd": "/usr/bin/curl -f -m 1 http://www.example.com -o results/index_{i}_{j}.html", 3 | 4 | "nfrules": [ 5 | {"host": "www.example.com", "port": 80, "input_chain": false} 6 | ], 7 | 8 | "input": [ 9 | ], 10 | 11 | "output": [ 12 | { 13 | "mod_name": "tcp_overlap", 14 | "mod_opts": ["range 1 100", "range 1 20", "seq_str before after"] 15 | }, 16 | { 17 | "mod_name": "drop_proba", 18 | "mod_opts": "seq_float 0.1 0.2 0.3 0.4 0.5", 19 | "optional": true 20 | }, 21 | { 22 | "mod_name": "duplicate", 23 | "mod_opts": "seq_str first last random", 24 | "optional": true 25 | }, 26 | { 27 | "mod_name": "reorder", 28 | "mod_opts": "seq_str reverse random", 29 | "optional": true 30 | } 31 | ] 32 | } 33 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=source 11 | set BUILDDIR=build 12 | 13 | if "%1" == "" goto help 14 | 15 | %SPHINXBUILD% >NUL 2>NUL 16 | if errorlevel 9009 ( 17 | echo. 18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 19 | echo.installed, then set the SPHINXBUILD environment variable to point 20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 21 | echo.may add the Sphinx directory to PATH. 22 | echo. 23 | echo.If you don't have Sphinx installed, grab it from 24 | echo.http://sphinx-doc.org/ 25 | exit /b 1 26 | ) 27 | 28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 29 | goto end 30 | 31 | :help 32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 33 | 34 | :end 35 | popd 36 | -------------------------------------------------------------------------------- /fragscapy/modifications/echo.py: -------------------------------------------------------------------------------- 1 | """Echoes a string.""" 2 | 3 | from fragscapy.modifications.mod import Mod 4 | 5 | class Echo(Mod): 6 | """Echoes a string. 7 | 8 | This modification neither alter the packet nor depend on it. It simply 9 | prints the string that was passed as a parameter. 10 | 11 | Args: 12 | *args: The arguments of the mods. 13 | 14 | Attributes: 15 | string: The string that will be echoed 16 | 17 | Examples: 18 | >>> Echo("Hello, world!").string 19 | Hello, world! 20 | >>> Echo("plop", "i", "plop").string 21 | plop i plop 22 | """ 23 | 24 | name = "Echo" 25 | doc = "Echo a string.\necho " 26 | 27 | def parse_args(self, *args): 28 | """See base class.""" 29 | self.string = " ".join(args) 30 | 31 | def apply(self, pkt_list): 32 | """Print the string. See `Mod.apply` for more details.""" 33 | print(self.string) 34 | 35 | return pkt_list 36 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 Maël KERVELLA 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | 23 | -------------------------------------------------------------------------------- /config_examples/05_complete_mess.json: -------------------------------------------------------------------------------- 1 | { 2 | "cmd": "/usr/bin/wget http://www.example.com", 3 | 4 | "nfrules": [ 5 | {"host": "www.example.com", "port": 80}, 6 | {"host": "www.example.com", "port": 443}, 7 | {"port": 22, "input_chain": false, "ipv4": false, "qnum":18} 8 | ], 9 | 10 | "input": [ 11 | { 12 | "mod_name": "echo", 13 | "mod_opts": "seq_str foo bar fuz", 14 | "optional": true 15 | }, 16 | { 17 | "mod_name": "field", 18 | "mod_opts": ["IPv6", "nh", 20] 19 | }, 20 | { 21 | "mod_name": "field", 22 | "mod_opts": ["IPv6", "plen", "range 1 500"], 23 | "optional": true 24 | }, 25 | { 26 | "mod_name": "summary" 27 | } 28 | ], 29 | 30 | "output": [ 31 | { 32 | "mod_name": "print" 33 | }, 34 | { 35 | "mod_name": "ipv6_frag", 36 | "mod_opts": "range 1280 5000 50" 37 | }, 38 | { 39 | "mod_name": "drop_proba", 40 | "mod_opts": "seq_float 0.1 0.25 0.5 0.75 0.9" 41 | }, 42 | { 43 | "mod_name": "reorder", 44 | "mod_opts": "reverse", 45 | "optional": true 46 | }, 47 | { 48 | "mod_name": "ipv6_ext_hdr_mixup" 49 | }, 50 | { 51 | "mod_name": "summary" 52 | } 53 | ] 54 | } 55 | -------------------------------------------------------------------------------- /config_examples/README.md: -------------------------------------------------------------------------------- 1 | ## Fragscapy -- Configuration files examples 2 | 3 | This directory contains various configuration files that can be used with 4 | FragScapy. They are not intendended to be revelant, they are just here to 5 | show the possibilities. Though most of the configuration files are valid. 6 | 7 | For more relevant, i.e. applicable to real testing, configuration files, see 8 | [_config\_common_ directory](config_common/). 9 | 10 | ### List of configuration files 11 | 12 | | Name | Valid | Description | 13 | | ---- | ----- | ----------- | 14 | | 00_template.json | ✖ | Describes all the fields that can appear in a config file | 15 | | 01_ping_no_mod.json | ✔ | Start a simple ping but does not apply any modification to the packets | 16 | | 02_ping_duplicate.json | ✔ | Ping a host but duplicate each packet sent out | 17 | | 03_http_fragment6.json | ✔ | Wget on a server. All IPv6 packets sent out are fragmented with a size that vary from 50 to 10000 with a step of 50 | 18 | | 04_http_proxy.json | ✔ | Creates a TCP-port proxy from 8080 to 80. The packets sent to 8080 are modified to be sent to 80 and packets received from 80 are modified to be received from 8080 | 19 | | 05_complete_mess.json | ✔ | Does not make a lot of sense. It just apply most of the modifications (with non-relevant parameters) onto a wget command. Mosts of the tests should not pass because the resulting packets makes no sense at all | 20 | | 06_tcp_segmentation.json | ✔ | Wget command on a server but tcp packets are segmented with a size that vary from 4 to 200 with a step of 1 | 21 | -------------------------------------------------------------------------------- /config_common/README.md: -------------------------------------------------------------------------------- 1 | ## Fragscapy -- Commonly used configuration files 2 | 3 | This directory contains examples of configurations files that can be used to 4 | test specific behavior that one may want to test. These files try to test some 5 | corner cases. 6 | 7 | ### List of configuration files 8 | 9 | | Name | Description | Number of tests | 10 | | ---- | ----------- | --------------- | 11 | | ipv4_fragmentation.json | Fragments the IPv4 packets emitted and potentially drop or duplicate some of them and reorder the packets | 72 000 | 12 | | ipv4_overlapping.json | Tries to create overlapping IPv4 fragments that could be used to evade a firewall |60 000 | 13 | | ipv4_tiny_fragments.json | Test only the IPv4 tiny fragments attack i.e. very small fragments | 100 | 14 | | ipv6_fragmentation.json | Fragments the IPv6 packets emitted and potentially drop or duplicate some of them and reorder the packets | 72 000 | 15 | | ipv6_atomic_fragments.json | A single test that creates IPv6 atomic fragments : fragmentation with only 1 fragment. It should not be valid in correct implementations | 1 | 16 | | ipv6_overlapping.json | Tries to create overlapping IPv6 fragments that could be used to evade a firewall |60 000 | 17 | | ipv6_tiny_fragments.json | Test only the IPv6 tiny fragments attack i.e. very small fragments | 100 | 18 | | tcp_segmentation.json | Segment the TCP packets emitted and pottentially drop or duplicate some of them and reorder the segments | 72 000 | 19 | | tcp_overlapping.json | Tries to create overlapping TCP segments that could be used to evade a firewall | 288 000 | 20 | -------------------------------------------------------------------------------- /fragscapy/__init__.py: -------------------------------------------------------------------------------- 1 | """Tool to intercept and modify packets from the network. 2 | 3 | Fragscapy is tool that can be used to intercept packets from the network 4 | and modify them using the `Scapy` package. It can be used to automate a lot 5 | of tests and run multiple configurations at once. 6 | 7 | The most basic usage is to apply the same modification (e.g. duplicating a 8 | packet, dropping a packet, using fragmentation, ...) to all packets. But it 9 | can also be used to generate a series of tests with slightly differents 10 | parameters each. 11 | 12 | This is the intended usage. And to use it, one can either import the different 13 | modules in a python script, use the `commandline` module to use Fragscapy 14 | from a terminal or even use the `fragscapy` command created during the 15 | installation. 16 | 17 | On a more advanced usage, Fragscapy can also simply intercept packets and 18 | send them to the user as python objects that can be modified. It can also 19 | be used to apply a series of modifications to pre-generated Scapy packets, 20 | it may be useful when one needs to modify a lot of packets. 21 | 22 | Fragscapy can even be extended with many different kind of modifications. 23 | Some basic modifications are already provided but one can add a new one that 24 | better matches his intentions. Though be aware that even if the list of 25 | modifications availables is not limited to the one already existing but it 26 | might require some deep knowledge of the code to create a new one. 27 | """ 28 | 29 | from fragscapy._author import __author__ 30 | from fragscapy._version import __version__ 31 | from fragscapy.commandline import command as main 32 | -------------------------------------------------------------------------------- /fragscapy/modifications/select.py: -------------------------------------------------------------------------------- 1 | """Selects only some packets and drop the other ones.""" 2 | 3 | from fragscapy.modifications.mod import Mod 4 | from fragscapy.packetlist import PacketList 5 | 6 | 7 | class Select(Mod): 8 | """Selects only some packets and drop the other ones. 9 | 10 | The selection is specified by giving a sequence of the index to keep. 11 | 12 | Args: 13 | *args: The arguments of the mods. 14 | 15 | Attributes: 16 | sequence: A list of the index to keep. 17 | 18 | Raises: 19 | ValueError: Unrecognized or incorrect number of parameters. 20 | 21 | Examples: 22 | >>> Select(0, 2, 4, 6, 8).sequence 23 | [0, 2, 4, 6, 8] 24 | >>> Select().sequence 25 | [] 26 | """ 27 | 28 | name = "Select" 29 | doc = ("Select only some packet.\n" 30 | "select [id1 [id2 [id3 ...]]]") 31 | 32 | def parse_args(self, *args): 33 | """See base class.""" 34 | self.sequence = [] 35 | for arg in args: 36 | try: 37 | self.sequence.append(int(arg)) 38 | except ValueError: 39 | raise ValueError("Non integer parameter. " 40 | "Got {}".format(arg)) 41 | 42 | def apply(self, pkt_list): 43 | """Keeps only the wanted packets. See `Mod.apply` for more details.""" 44 | new_pl = PacketList() 45 | for i in self.sequence: 46 | new_pl.add_packet(pkt_list[i].pkt, pkt_list[i].delay) 47 | return new_pl 48 | 49 | def __str__(self): 50 | return "{name} {param}".format( 51 | name=self.name, 52 | param=" ".join(str(i) for i in self.sequence) 53 | ) 54 | -------------------------------------------------------------------------------- /fragscapy/modlist.py: -------------------------------------------------------------------------------- 1 | """Defines a list of `Mod` objects.""" 2 | 3 | class ModList(list): 4 | """A list modifications. 5 | 6 | Use the `ModList` as a classic python list, the only difference is the 7 | `.apply(packet_list)` method available to apply the list of modifications 8 | on a `PacketList` object. 9 | 10 | Examples: 11 | Assuming the all the mods and the packet list are already created: 12 | 13 | >>> ml = ModList() 14 | >>> ml.append(mod1) 15 | >>> ml.append(mod2) 16 | >>> ml.append(mod3) 17 | >>> len(ml) 18 | 3 19 | >>> ml.pop() 20 | >>> ml[-1] 21 | mod2 22 | >>> ml.insert(1, mod4) 23 | >>> for mod in ml: 24 | ... print(mod) 25 | mod1 26 | mod4 27 | mod2 28 | mod3 29 | >>> ml.apply(packet_list) 30 | """ 31 | def __str__(self): 32 | ret = [] 33 | ret.append("ModList [") 34 | for mod in self: 35 | ret.append(" - " + str(mod)) 36 | ret.append("]") 37 | return "\n".join(ret) 38 | 39 | def __repr__(self): 40 | ret = [] 41 | ret.append("ModList [") 42 | for mod in self: 43 | ret.append(" - " + repr(mod)) 44 | ret.append("]") 45 | return "\n".join(ret) 46 | 47 | def apply(self, pkt_list): 48 | """Applies all the modifications to a packet list. 49 | 50 | Args: 51 | pkt_list: The `PacketList` object to modify. 52 | 53 | Returns: 54 | The resulting `PacketList` with all modifications applied. 55 | """ 56 | for mod in self: 57 | pkt_list = mod.apply(pkt_list) 58 | return pkt_list 59 | 60 | def is_deterministic(self): 61 | """Are all the mod deterministic (i.e. non-random).""" 62 | return all(mod.is_deterministic() for mod in self) 63 | -------------------------------------------------------------------------------- /fragscapy/modifications/ipv4_frag.py: -------------------------------------------------------------------------------- 1 | """Fragments the IPv4 packets at the L3-layer.""" 2 | 3 | import scapy.layers.inet 4 | import scapy.packet 5 | 6 | from fragscapy.modifications.mod import Mod 7 | from fragscapy.packetlist import PacketList 8 | 9 | 10 | class Ipv4Frag(Mod): 11 | """Fragments the IPv4 packets at the L3-layer. 12 | 13 | Fragment each IPv4 packet. the fragmentation size must be specified. It 14 | represents the maximum size of each packet (including headers). It uses 15 | the scapy's fragmentation function. 16 | 17 | Args: 18 | *args: The arguments of the mods. 19 | 20 | Attributes: 21 | fragsize: The fragmentation size (maximum length of a fragment). 22 | 23 | Raises: 24 | ValueError: Unrecognized or incorrect number of parameters. 25 | 26 | Examples: 27 | >>> Ipv4Frag(32).fragsize 28 | 32 29 | """ 30 | 31 | name = "Ipv4Frag" 32 | doc = ("Fragments the IPv4 packets at the L3-layer\n" 33 | "ipv4_frag ") 34 | _nb_args = 1 35 | 36 | def parse_args(self, *args): 37 | """See base class.""" 38 | try: 39 | self.fragsize = int(args[0]) 40 | except ValueError: 41 | raise ValueError("Parameter 1 unrecognized. " 42 | "Got {}".format(args[0])) 43 | 44 | def apply(self, pkt_list): 45 | """Fragment each IPv6 packet. See `Mod.apply` for more details.""" 46 | new_pl = PacketList() 47 | 48 | for pkt in pkt_list: 49 | if pkt.pkt.haslayer('IP'): 50 | fragments = scapy.layers.inet.fragment(pkt.pkt, self.fragsize) 51 | 52 | index = len(new_pl) - 1 53 | for fragment in fragments: 54 | new_pl.add_packet(fragment) 55 | new_pl.edit_delay(index, pkt.delay) 56 | else: 57 | # Not IPv4 so no fragmentation 58 | new_pl.add_packet(fragment, pkt.delay) 59 | 60 | return new_pl 61 | -------------------------------------------------------------------------------- /fragscapy/modifications/reorder.py: -------------------------------------------------------------------------------- 1 | """Reorder the packet listing.""" 2 | 3 | import enum 4 | import random 5 | 6 | from fragscapy.modifications.mod import Mod 7 | from fragscapy.packetlist import PacketList 8 | 9 | 10 | METHOD = enum.Enum("METHOD", "REVERSE RANDOM") 11 | 12 | 13 | class Reorder(Mod): 14 | """Reorder the packet listing. 15 | 16 | The operation can either reverse the whole packet list or simply 17 | randomly rearrange them. 18 | 19 | Args: 20 | *args: The arguments of the mods. 21 | 22 | Attributes: 23 | method: The method to use (reverse or random) 24 | 25 | Raises: 26 | ValueError: Unrecognized or incorrect number of parameters. 27 | 28 | Examples: 29 | >>> Reorder("reverse").method 30 | REVERSE 31 | >>> Reorder("random").method 32 | RANDOM 33 | """ 34 | 35 | name = "Reorder" 36 | doc = ("Reorder the packet list.\n" 37 | "reorder {reverse|random}") 38 | _nb_args = 1 39 | 40 | def parse_args(self, *args): 41 | """See base class.""" 42 | if args[0] == "reverse": 43 | self.method = METHOD.REVERSE 44 | elif args[0] == "random": 45 | self.method = METHOD.RANDOM 46 | else: 47 | raise ValueError("Parameter 1 unrecognized. " 48 | "Got {}".format(args[0])) 49 | 50 | def is_deterministic(self): 51 | """See base class.""" 52 | return self.method != METHOD.RANDOM 53 | 54 | def apply(self, pkt_list): 55 | """Reorder the packets. See `Mod.apply` for more details.""" 56 | if self.method == METHOD.REVERSE: 57 | sequence = list(range(len(pkt_list)-1, -1, -1)) 58 | elif self.method == METHOD.RANDOM: 59 | sequence = list(range(len(pkt_list))) 60 | random.shuffle(sequence) 61 | new_pl = PacketList() 62 | for i in sequence: 63 | new_pl.add_packet(pkt_list[i].pkt, pkt_list[i].delay) 64 | return new_pl 65 | -------------------------------------------------------------------------------- /fragscapy/modifications/tcp_segment.py: -------------------------------------------------------------------------------- 1 | """Segments the TCP packets at the L4-layer.""" 2 | 3 | from fragscapy.modifications.mod import Mod 4 | from fragscapy.modifications.utils import tcp_segment 5 | from fragscapy.packetlist import PacketList 6 | 7 | 8 | class TcpSegment(Mod): 9 | """Segments the TCP packets at the L4-layer. 10 | 11 | The segmentation size must be specified. It represents the size of the 12 | TCP data in each of the fragments. 13 | 14 | Args: 15 | *args: The arguments of the mods. 16 | 17 | Attributes: 18 | segmentsize: The segmentation size (bytes of TCP data to use). 19 | 20 | Raises: 21 | ValueError: Unrecognized or incorrect number of parameters. 22 | 23 | Examples: 24 | >>> TcpSegment(32).segmentsize 25 | 32 26 | """ 27 | 28 | name = "TcpSegment" 29 | doc = ("Segments the TCP packets at the L4-layer\n" 30 | "tcp_segment ") 31 | _nb_args = 1 32 | 33 | def parse_args(self, *args): 34 | """See base class.""" 35 | try: 36 | self.segmentsize = int(args[0]) 37 | except ValueError: 38 | raise ValueError("Parameter 1 unrecognized. " 39 | "Got '{}'".format(args[0])) 40 | if self.segmentsize < 0: 41 | raise ValueError("'segmentsize' shoudl be positive. " 42 | "Got '{}'".format(self.segmentsize)) 43 | 44 | def apply(self, pkt_list): 45 | """Segments each TCP packet. See `Mod.apply` for more details.""" 46 | new_pl = PacketList() 47 | 48 | for pkt in pkt_list: 49 | if pkt.pkt.haslayer('TCP'): 50 | segments = tcp_segment(pkt.pkt, self.segmentsize) 51 | 52 | index = len(new_pl) - 1 53 | for segment in segments: 54 | new_pl.add_packet(segment) 55 | new_pl.edit_delay(index, pkt.delay) 56 | else: 57 | # Not TCP so no segmentation 58 | new_pl.add_packet(segment, pkt.delay) 59 | 60 | return new_pl 61 | -------------------------------------------------------------------------------- /fragscapy/modifications/ipv6_frag.py: -------------------------------------------------------------------------------- 1 | """Fragments the IPv6 packets at the L3-layer.""" 2 | 3 | from fragscapy.modifications.mod import Mod 4 | from fragscapy.modifications.utils import fragment6 5 | from fragscapy.packetlist import PacketList 6 | 7 | 8 | class Ipv6Frag(Mod): 9 | """Fragments the IPv6 packets at the L3-layer. 10 | 11 | Fragment each IPv6 packet. the fragmentation size must be specified. It 12 | represents the maximum size of each packet (including headers). It uses 13 | the scapy's fragmentation function. 14 | 15 | Args: 16 | *args: The arguments of the mods. 17 | 18 | Attributes: 19 | fragsize: The fragmentation size (maximum length of a fragment). 20 | 21 | Raises: 22 | ValueError: Unrecognized or incorrect number of parameters. 23 | 24 | Examples: 25 | >>> Ipv6Frag(1280).fragsize # Minimum MTU for IPv6 26 | 1280 27 | """ 28 | 29 | name = "Ipv6Frag" 30 | doc = ("Fragments the IPv6 packets at the L3-layer\n" 31 | "ipv6_frag ") 32 | _nb_args = 1 33 | 34 | def parse_args(self, *args): 35 | """See base class.""" 36 | try: 37 | self.fragsize = int(args[0]) 38 | except ValueError: 39 | raise ValueError("Parameter 1 unrecognized. " 40 | "Got {}".format(args[0])) 41 | 42 | def apply(self, pkt_list): 43 | """Fragment each IPv6 packet. See `Mod.apply` for more details.""" 44 | new_pl = PacketList() 45 | 46 | for pkt in pkt_list: 47 | # Checks the packet length to avoid creating atomic fragments 48 | if pkt.pkt.haslayer('IPv6') and len(pkt.pkt) > self.fragsize: 49 | fragments = fragment6(pkt.pkt, self.fragsize) 50 | 51 | index = len(new_pl) - 1 52 | for fragment in fragments: 53 | new_pl.add_packet(fragment) 54 | new_pl.edit_delay(index, pkt.delay) 55 | else: 56 | # Not IPv6 or too small so no fragmentation 57 | new_pl.add_packet(pkt.pkt, pkt.delay) 58 | 59 | return new_pl 60 | -------------------------------------------------------------------------------- /fragscapy/modifications/drop_proba.py: -------------------------------------------------------------------------------- 1 | """Drops each packet with a certain probability.""" 2 | 3 | import random 4 | 5 | from fragscapy.modifications.mod import Mod 6 | 7 | 8 | class DropProba(Mod): 9 | """Drops each packet with a certain probability. 10 | 11 | Delete the packet from the packet list. The parameter is the 12 | probability for each packet to be dropped. 13 | 14 | Args: 15 | *args: The arguments of the mods. 16 | 17 | Attributes: 18 | drop_proba: The probability with which a packet is to be dropped. 19 | None if random. 20 | 21 | Raises: 22 | ValueError: Unrecognized or incorrect number of parameters. 23 | 24 | Examples: 25 | >>> DropProba(0.25).drop_proba 26 | 0.25 27 | """ 28 | 29 | name = "DropProba" 30 | doc = ("Drops each packet with a certain probability.\n" 31 | "dropproba ") 32 | _nb_args = 1 33 | 34 | def parse_args(self, *args): 35 | """See base class.""" 36 | try: 37 | self.drop_proba = float(args[0]) 38 | except ValueError: 39 | raise ValueError("Parameter 1 should be between 0 and 1. " 40 | "Got {}".format(args[0])) 41 | 42 | if self.drop_proba < 0 or self.drop_proba > 1: 43 | raise ValueError("Parameter 1 should be between 0 and 1. " 44 | "Got {}".format(args[0])) 45 | 46 | def is_deterministic(self): 47 | """See base class.""" 48 | return False 49 | 50 | def apply(self, pkt_list): 51 | """Drops each packet with a certain probability. See `Mod.apply` for 52 | more details.""" 53 | # The function to determine if the packet should be kept 54 | condition = lambda _: random.random() < self.drop_proba 55 | # A list of decreasing indexes that should be removed 56 | to_remove = [i for i in range(len(pkt_list)-1, -1, -1) 57 | if condition(pkt_list[i])] 58 | # Remove the indexes (in decreasing order) 59 | for i in to_remove: 60 | pkt_list.remove_packet(i) 61 | 62 | return pkt_list 63 | -------------------------------------------------------------------------------- /docs/source/conf.py: -------------------------------------------------------------------------------- 1 | # Configuration file for the Sphinx documentation builder. 2 | # 3 | # This file only contains a selection of the most common options. For a full 4 | # list see the documentation: 5 | # http://www.sphinx-doc.org/en/master/config 6 | 7 | # -- Path setup -------------------------------------------------------------- 8 | 9 | # If extensions (or modules to document with autodoc) are in another directory, 10 | # add these directories to sys.path here. If the directory is relative to the 11 | # documentation root, use os.path.abspath to make it absolute, like shown here. 12 | # 13 | # import os 14 | # import sys 15 | # sys.path.insert(0, os.path.abspath('.')) 16 | 17 | 18 | # -- Project information ----------------------------------------------------- 19 | 20 | project = 'FragScapy' 21 | copyright = '2019, Maël KERVELLA' 22 | author = 'Maël KERVELLA' 23 | master_doc = 'index' 24 | 25 | # The full version, including alpha/beta/rc tags 26 | from fragscapy._version import __version__ 27 | release = __version__ 28 | 29 | 30 | # -- General configuration --------------------------------------------------- 31 | 32 | # Add any Sphinx extension module names here, as strings. They can be 33 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 34 | # ones. 35 | extensions = [ 36 | 'sphinx.ext.napoleon', 37 | ] 38 | 39 | # Add any paths that contain templates here, relative to this directory. 40 | templates_path = ['_templates'] 41 | 42 | # List of patterns, relative to source directory, that match files and 43 | # directories to ignore when looking for source files. 44 | # This pattern also affects html_static_path and html_extra_path. 45 | exclude_patterns = [] 46 | 47 | 48 | # -- Options for HTML output ------------------------------------------------- 49 | 50 | # The theme to use for HTML and HTML Help pages. See the documentation for 51 | # a list of builtin themes. 52 | # 53 | html_theme = 'alabaster' 54 | 55 | # Add any paths that contain custom static files (such as style sheets) here, 56 | # relative to this directory. They are copied after the builtin static files, 57 | # so a file named "default.css" will overwrite the builtin "default.css". 58 | html_static_path = ['_static'] 59 | -------------------------------------------------------------------------------- /fragscapy/modifications/ipv6_atomic_frag.py: -------------------------------------------------------------------------------- 1 | """Fragments the IPv6 packets at the L3-layer and creates atomic fragments.""" 2 | 3 | from fragscapy.modifications.mod import Mod 4 | from fragscapy.modifications.utils import fragment6 5 | from fragscapy.packetlist import PacketList 6 | 7 | 8 | class Ipv6AtomicFrag(Mod): 9 | """Fragments the IPv6 packets at the L3-layer. 10 | 11 | Fragment each IPv6 packet. the fragmentation size must be specified. It 12 | represents the maximum size of each packet (including headers). It uses 13 | the scapy's fragmentation function. 14 | 15 | The only difference with `Ipv6Frag` is that this modification will create 16 | atomic fragments if the packet size is smaller than the fragmentation 17 | size. 18 | 19 | Args: 20 | *args: The arguments of the mods. 21 | 22 | Attributes: 23 | fragsize: The fragmentation size (maximum length of a fragment). 24 | 25 | Raises: 26 | ValueError: Unrecognized or incorrect number of parameters. 27 | 28 | Examples: 29 | >>> Ipv6AtomicFrag(1280).fragsize # Minimum MTU for IPv6 30 | 1280 31 | """ 32 | 33 | name = "Ipv6AtomicFrag" 34 | doc = ("Fragments the IPv6 packets at the L3-layer and creates atomic " 35 | "fragments\n" 36 | "ipv6_atomic_frag ") 37 | _nb_args = 1 38 | 39 | def parse_args(self, *args): 40 | """See base class.""" 41 | try: 42 | self.fragsize = int(args[0]) 43 | except ValueError: 44 | raise ValueError("Parameter 1 unrecognized. " 45 | "Got {}".format(args[0])) 46 | 47 | def apply(self, pkt_list): 48 | """Fragment each IPv6 packet. See `Mod.apply` for more details.""" 49 | new_pl = PacketList() 50 | 51 | for pkt in pkt_list: 52 | if pkt.pkt.haslayer('IPv6'): 53 | fragments = fragment6(pkt.pkt, self.fragsize) 54 | 55 | index = len(new_pl) - 1 56 | for fragment in fragments: 57 | new_pl.add_packet(fragment) 58 | new_pl.edit_delay(index, pkt.delay) 59 | else: 60 | # Not IPv6 so no fragmentation 61 | new_pl.add_packet(pkt.pkt, pkt.delay) 62 | 63 | return new_pl 64 | -------------------------------------------------------------------------------- /fragscapy/modifications/drop_one.py: -------------------------------------------------------------------------------- 1 | """Drops one of the packets.""" 2 | 3 | import random 4 | 5 | from fragscapy.modifications.mod import Mod 6 | 7 | 8 | class DropOne(Mod): 9 | """Drops one of the packets. 10 | 11 | Delete the packet from the packet list. Can be either the first one, the 12 | last one, a random one or a specific one (by id). 13 | 14 | Args: 15 | *args: The arguments of the mods. 16 | 17 | Attributes: 18 | drop_index: The index to drop. None if random. 19 | 20 | Raises: 21 | ValueError: Unrecognized or incorrect number of parameters. 22 | 23 | Examples: 24 | >>> DropOne("first").drop_index 25 | 0 26 | >>> DropOne(42).drop_index 27 | 42 28 | """ 29 | 30 | name = "DropOne" 31 | doc = ("Drop one of the packets.\n" 32 | "dropone {first|last|random|}") 33 | _nb_args = 1 34 | 35 | def parse_args(self, *args): 36 | """See base class.""" 37 | self.drop_index = None 38 | if args[0] == "first": 39 | self.drop_index = 0 40 | elif args[0] == "last": 41 | self.drop_index = -1 42 | elif args[0] == "random": 43 | pass # Drop index will be calculated later 44 | else: 45 | try: 46 | self.drop_index = int(args[0]) 47 | except ValueError: 48 | raise ValueError("Parameter 1 unrecognized. " 49 | "Got {}".format(args[0])) 50 | 51 | def is_deterministic(self): 52 | """See base class.""" 53 | return self.drop_index is not None # i.e. not random 54 | 55 | def apply(self, pkt_list): 56 | """Drops one packet. See `Mod.apply` for more details.""" 57 | l = len(pkt_list) 58 | if not l: # Avoid the trivial case 59 | return pkt_list 60 | 61 | i = self.drop_index 62 | 63 | if i is None: # Random 64 | if l == 1: # Avoid the case of randint(0, 0) 65 | i = 0 66 | else: 67 | i = random.randint(-l, l-1) 68 | 69 | if -l <= i <= l-1: 70 | pkt_list.remove_packet(i) 71 | 72 | return pkt_list 73 | 74 | def get_params(self): 75 | """See base class.""" 76 | return {k: v if v is not None else "random" 77 | for k, v in super(DropOne, self).get_params().items()} 78 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | 2 | # Created by https://www.gitignore.io/api/python 3 | # Edit at https://www.gitignore.io/?templates=python 4 | 5 | ### Python ### 6 | # Byte-compiled / optimized / DLL files 7 | __pycache__/ 8 | *.py[cod] 9 | *$py.class 10 | 11 | # C extensions 12 | *.so 13 | 14 | # Distribution / packaging 15 | .Python 16 | build/ 17 | develop-eggs/ 18 | dist/ 19 | downloads/ 20 | eggs/ 21 | .eggs/ 22 | lib/ 23 | lib64/ 24 | parts/ 25 | sdist/ 26 | var/ 27 | wheels/ 28 | pip-wheel-metadata/ 29 | share/python-wheels/ 30 | *.egg-info/ 31 | .installed.cfg 32 | *.egg 33 | MANIFEST 34 | 35 | # PyInstaller 36 | # Usually these files are written by a python script from a template 37 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 38 | *.manifest 39 | *.spec 40 | 41 | # Installer logs 42 | pip-log.txt 43 | pip-delete-this-directory.txt 44 | 45 | # Unit test / coverage reports 46 | htmlcov/ 47 | .tox/ 48 | .nox/ 49 | .coverage 50 | .coverage.* 51 | .cache 52 | nosetests.xml 53 | coverage.xml 54 | *.cover 55 | .hypothesis/ 56 | .pytest_cache/ 57 | 58 | # Translations 59 | *.mo 60 | *.pot 61 | 62 | # Django stuff: 63 | *.log 64 | local_settings.py 65 | db.sqlite3 66 | 67 | # Flask stuff: 68 | instance/ 69 | .webassets-cache 70 | 71 | # Scrapy stuff: 72 | .scrapy 73 | 74 | # Sphinx documentation 75 | docs/_build/ 76 | 77 | # PyBuilder 78 | target/ 79 | 80 | # Jupyter Notebook 81 | .ipynb_checkpoints 82 | 83 | # IPython 84 | profile_default/ 85 | ipython_config.py 86 | 87 | # pyenv 88 | .python-version 89 | 90 | # pipenv 91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 93 | # having no cross-platform support, pipenv may install dependencies that don’t work, or not 94 | # install all needed dependencies. 95 | #Pipfile.lock 96 | 97 | # celery beat schedule file 98 | celerybeat-schedule 99 | 100 | # SageMath parsed files 101 | *.sage.py 102 | 103 | # Environments 104 | .env 105 | .venv 106 | env/ 107 | venv/ 108 | ENV/ 109 | env.bak/ 110 | venv.bak/ 111 | 112 | # Spyder project settings 113 | .spyderproject 114 | .spyproject 115 | 116 | # Rope project settings 117 | .ropeproject 118 | 119 | # mkdocs documentation 120 | /site 121 | 122 | # mypy 123 | .mypy_cache/ 124 | .dmypy.json 125 | dmypy.json 126 | 127 | # Pyre type checker 128 | .pyre/ 129 | 130 | # End of https://www.gitignore.io/api/python 131 | 132 | # vim swap files 133 | *.swp 134 | -------------------------------------------------------------------------------- /fragscapy/modifications/duplicate.py: -------------------------------------------------------------------------------- 1 | """Duplicates one of the packets.""" 2 | 3 | import random 4 | 5 | from fragscapy.modifications.mod import Mod 6 | 7 | 8 | class Duplicate(Mod): 9 | """Duplicates one of the packets. 10 | 11 | The duplicate is placed juste after the original one in the list. Can be 12 | either the first one, the last one, a random one or a specific one (by 13 | id). 14 | 15 | Args: 16 | *args: The arguments of the mods. 17 | 18 | Attributes: 19 | duplicate_index: The index to duplicate. None if random. 20 | 21 | Raises: 22 | ValueError: Unrecognized or incorrect number of parameters. 23 | 24 | Examples: 25 | >>> Duplicate("first").duplicate_index 26 | 0 27 | >>> Duplicate(18).duplicate_index 28 | 18 29 | """ 30 | 31 | name = "Duplicate" 32 | doc = ("Duplicate one of the packets.\n" 33 | "duplicate {first|last|random|}") 34 | _nb_args = 1 35 | 36 | def parse_args(self, *args): 37 | """See base class.""" 38 | self.duplicate_index = None 39 | if args[0] == "first": 40 | self.duplicate_index = 0 41 | elif args[0] == "last": 42 | self.duplicate_index = -1 43 | elif args[0] == "random": 44 | pass # Duplicate index will be calculated later 45 | else: 46 | try: 47 | self.duplicate_index = int(args[0]) 48 | except ValueError: 49 | raise ValueError("Parameter 1 unrecognized. " 50 | "Got {}".format(args[0])) 51 | 52 | def is_deterministic(self): 53 | """See base class.""" 54 | return self.duplicate_index is not None # i.e. not random 55 | 56 | def apply(self, pkt_list): 57 | """Duplicates one packet. See `Mod.apply` for more details.""" 58 | l = len(pkt_list) 59 | if not l: # Avoid the trivial case 60 | return pkt_list 61 | 62 | i = self.duplicate_index 63 | 64 | if i is None: # Random 65 | if l == 1: # Avoid the case of randint(0, 0) 66 | i = 0 67 | else: 68 | i = random.randint(-l, l-1) 69 | 70 | if -l <= i <= l-1: 71 | duplicate_packet = pkt_list[i].pkt.copy() 72 | pkt_list.insert_packet(i, duplicate_packet) 73 | 74 | return pkt_list 75 | 76 | def get_params(self): 77 | """See base class.""" 78 | return {k: v if v is not None else "random" 79 | for k, v in super(Duplicate, self).get_params().items()} 80 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import os 4 | try: 5 | from setuptools import setup, find_packages 6 | except ImportError: 7 | from distutils.core import setup 8 | 9 | def find_packages(where='.'): 10 | # os.walk -> list[(dirname, list[subdirs], list[files])] 11 | return [folder.replace("/", ".").lstrip(".") 12 | for (folder, _, fils) in os.walk(where) 13 | if "__init__.py" in fils] 14 | from io import open as io_open 15 | 16 | # Get version from fragscapy/_version.py 17 | __version__ = None 18 | src_dir = os.path.abspath(os.path.dirname(__file__)) 19 | version_file = os.path.join(src_dir, 'fragscapy', '_version.py') 20 | with io_open(version_file, mode='r') as fh: 21 | exec(fh.read()) 22 | 23 | requirements = [] 24 | with io_open("requirements.txt", mode='r') as fh: 25 | requirements = [line.strip().split('#', 1)[0].strip() 26 | for line in fh.readlines()] 27 | 28 | requirements_doc = [] 29 | with io_open("requirements-doc.txt", mode='r') as fh: 30 | requirements_doc = [line.strip().split('#', 1)[0].strip() 31 | for line in fh.readlines()] 32 | 33 | long_description = "" 34 | with io_open("README.md", mode='r') as fh: 35 | long_description = fh.read() 36 | 37 | setup(name='fragscapy', 38 | version=__version__, 39 | author='Maël Kervella', 40 | author_email='dev@maelkervella.eu', 41 | description="Catch and modify network packets on the fly with Scapy", 42 | long_description=long_description, 43 | long_description_content_type="text/markdown", 44 | license="MIT License", 45 | url='https://github.com/AMOSSYS/Fragscapy', 46 | packages=['fragscapy'] + ['fragscapy.'+i for i in find_packages('fragscapy')], 47 | install_requires=requirements, 48 | extra_requires=requirements_doc, 49 | package_data={ 50 | 'fragscapy': ['README.md', 'LICENSE.txt', 'config_examples/*'], 51 | }, 52 | entry_points={ 53 | 'console_scripts': [ 54 | 'fragscapy=fragscapy:main', 55 | ], 56 | }, 57 | platforms=['Linux'], 58 | classifiers=[ 59 | "Development Status :: 4 - Beta", 60 | "Environment :: Console", 61 | "Intended Audience :: Developers", 62 | "Intended Audience :: Information Technology", 63 | "Intended Audience :: Science/Research", 64 | "Intended Audience :: System Administrators", 65 | "Intended Audience :: Telecomunications Industry", 66 | "License :: OSI Approved :: MIT License", 67 | "Operating System :: POSIX :: Linux", 68 | "Programming Laguage :: Python :: 3", 69 | "Topic :: Internet", 70 | "Topic :: Security", 71 | "Topic :: System :: Networking", 72 | "Topic :: System :: Networking :: Monitoring", 73 | ], 74 | keywords='scapy fragroute nfqueue firewall evaluation network packets', 75 | ) 76 | -------------------------------------------------------------------------------- /fragscapy/modifications/ipv4_overlap.py: -------------------------------------------------------------------------------- 1 | """Creates overlapping fragments of the packets.""" 2 | 3 | import random 4 | 5 | import scapy.layers.inet 6 | 7 | from fragscapy.modifications.mod import Mod 8 | from fragscapy.packetlist import PacketList 9 | 10 | 11 | class Ipv4Overlap(Mod): 12 | """Creates overlapping fragments of the packets 13 | 14 | Args: 15 | *args: The argument of the mods. 16 | 17 | Attributes: 18 | fragsize: The fragmentation size (maximum length of a fragment if 19 | there was no overlapping). 20 | overlapsize: The size of the random_data added that will overlap. 21 | 22 | Raises: 23 | ValueError: Unrecognized or incorrect number of parameters. 24 | 25 | Examples: 26 | >>> Ipv4Overlap(64, 32).fragsize 27 | 64 28 | >>> Ipv4Overlap(64, 32).overlapsize 29 | 32 30 | """ 31 | 32 | name = "Ipv4Overlap" 33 | doc = ("Creates overlapping fragments of the packets.\n" 34 | "ipv4overlap \n" 35 | " - 'fragsize' is the fragmentation size in octets to use\n" 36 | " (not the size of the final packets but the size of the\n" 37 | " packets as if there was no overlapping)\n" 38 | " - 'overlapsize' is the size in octets of random data that\n" 39 | " overlaps\n" 40 | "The final size of the packets is 'fragsize + overlapsize'.") 41 | 42 | def parse_args(self, *args): 43 | try: 44 | self.fragsize = int(args[0]) 45 | except ValueError: 46 | raise ValueError("Parameter 1 unrecognized. " 47 | "Got '{}'".format(args[0])) 48 | if self.fragsize < 0: 49 | raise ValueError("'fragsize' should be positive." 50 | "Got '{}'".format(self.fragsize)) 51 | 52 | try: 53 | self.overlap = int(args[1]) 54 | except ValueError: 55 | raise ValueError("Parameter 2 unrecognized. " 56 | "Got '{}'".format(args[1])) 57 | if self.overlap < 0: 58 | raise ValueError("'overlap' should be positive." 59 | "Got '{}'".format(self.overlap)) 60 | 61 | 62 | def apply(self, pkt_list): 63 | new_pl = PacketList() 64 | 65 | for pkt in pkt_list: 66 | if pkt.pkt.haslayer('IP'): 67 | fragments = scapy.layers.inet.fragment(pkt.pkt, self.fragsize) 68 | 69 | index = len(new_pl) - 1 70 | for fragment in fragments: 71 | random_data = bytes(random.randrange(0, 0xff) 72 | for _ in range(self.overlap)) 73 | fragment = fragment/random_data 74 | new_pl.add_packet(fragment) 75 | new_pl.edit_delay(index, pkt.delay) 76 | else: 77 | new_pl.add_packet(fragment, pkt.delay) 78 | 79 | return new_pl 80 | -------------------------------------------------------------------------------- /fragscapy/modifications/ipv6_overlap.py: -------------------------------------------------------------------------------- 1 | """Creates overlapping fragments of the packets.""" 2 | 3 | import random 4 | 5 | from fragscapy.modifications.mod import Mod 6 | from fragscapy.modifications.utils import fragment6 7 | from fragscapy.packetlist import PacketList 8 | 9 | 10 | class Ipv6Overlap(Mod): 11 | """Creates overlapping fragments of the packets 12 | 13 | Args: 14 | *args: The argument of the mods. 15 | 16 | Attributes: 17 | fragsize: The fragmentation size (maximum length of a fragment if 18 | there was no overlapping). 19 | overlapsize: The size of the random_data added that will overlap. 20 | 21 | Raises: 22 | ValueError: Unrecognized or incorrect number of parameters. 23 | 24 | Examples: 25 | >>> Ipv6Overlap(64, 32).fragsize 26 | 64 27 | >>> Ipv6Overlap(64, 32).overlapsize 28 | 32 29 | """ 30 | 31 | name = "Ipv6Overlap" 32 | doc = ("Creates overlapping fragments of the packets.\n" 33 | "ipv6overlap \n" 34 | " - 'fragsize' is the fragmentation size in octets to use\n" 35 | " (not the size of the final packets but the size of the\n" 36 | " packets as if there was no overlapping)\n" 37 | " - 'overlapsize' is the size in octets of random data that\n" 38 | " overlaps\n" 39 | "The final size of the packets is 'fragsize + overlapsize'.") 40 | 41 | def parse_args(self, *args): 42 | try: 43 | self.fragsize = int(args[0]) 44 | except ValueError: 45 | raise ValueError("Parameter 1 unrecognized. " 46 | "Got '{}'".format(args[0])) 47 | if self.fragsize < 0: 48 | raise ValueError("'fragsize' should be positive." 49 | "Got '{}'".format(self.fragsize)) 50 | 51 | try: 52 | self.overlap = int(args[1]) 53 | except ValueError: 54 | raise ValueError("Parameter 2 unrecognized. " 55 | "Got '{}'".format(args[1])) 56 | if self.overlap < 0: 57 | raise ValueError("'overlap' should be positive." 58 | "Got '{}'".format(self.overlap)) 59 | 60 | 61 | def apply(self, pkt_list): 62 | new_pl = PacketList() 63 | 64 | for pkt in pkt_list: 65 | if pkt.pkt.haslayer('IPv6'): 66 | fragments = fragment6(pkt.pkt, self.fragsize) 67 | 68 | index = len(new_pl) - 1 69 | for fragment in fragments: 70 | random_data = bytes(random.randrange(0, 0xff) 71 | for _ in range(self.overlap)) 72 | fragment = fragment/random_data 73 | new_pl.add_packet(fragment) 74 | new_pl.edit_delay(index, pkt.delay) 75 | else: 76 | new_pl.add_packet(fragment, pkt.delay) 77 | 78 | return new_pl 79 | -------------------------------------------------------------------------------- /config_examples/00_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "cmd": "The command to run everytime (absolute path prefered). {i} and {j} can be used in the command, they will be replace by the number of the current test and the number of the current iteration of this test", 3 | 4 | "nfrules": [ 5 | "This contains 1 object per rule to add", 6 | { 7 | "output_chain": "Set to false to not add to the OUTPUT chain", 8 | "input_chain": "Set to false to not add to the INPUT chain", 9 | "proto": "The proto to filter on, see iptables doc for format", 10 | "host": "The ipv4 or hostname to filter on, see iptables doc for format", 11 | "host6": "The ipv6 to filter on, see iptables doc for format", 12 | "port": "The tcp/udp port to filter on, see iptables doc for format", 13 | "ipv4": "Set to false to disable rules on IPv4 (iptables)", 14 | "ipv6": "Set to false to disable rules on IPv6 (ip6tables)", 15 | "qnum": "The NFQUEUE number to use, should be even" 16 | } 17 | ], 18 | 19 | "input": [ 20 | "This contains 1 object per modification to apply on the INPUT chain", 21 | { 22 | "mod_name": "Modification name (used to load the module)", 23 | "mod_opts": "Some options to pass to the mod (see mod usage)", 24 | "optional": "Set to true to make the mod optional. The mod is required if not specified." 25 | }, 26 | { 27 | "mod_name": "Mod with 1 integer as a parameter", 28 | "mod_opts": 20 29 | }, 30 | { 31 | "mod_name": "Mod with 1 string as a parameter", 32 | "mod_opts": "The string" 33 | }, 34 | { 35 | "mod_name": "Mod with 3 parameters", 36 | "mod_opts": ["param1", 2, "param3"] 37 | }, 38 | { 39 | "mod_name": "Mod with 0 parameters" 40 | } 41 | ], 42 | 43 | "output": [ 44 | "This contains 1 object per modification to apply on the OUTPUT chain", 45 | { 46 | "mod_name": "Use the python 'None' value.", 47 | "mod_opts": "none" 48 | }, 49 | { 50 | "mod_name": "Force an integer as 1 parameter (same as using an int directly)", 51 | "mod_opts": "int 42" 52 | }, 53 | { 54 | "mod_name": "Force a string as 1 parameter (same as using a str directly with no prefix)", 55 | "mod_opts": "str string" 56 | }, 57 | { 58 | "mod_name": "Specify multiple possible integer as 1 parameter", 59 | "mod_opts": "seq_int 0 2 4 6 8" 60 | }, 61 | { 62 | "mod_name": "Specify a range of possible integer as 1 parameter", 63 | "mod_opts": "range " 64 | }, 65 | { 66 | "mod_name": "Specify a range of possible integer as 1 parameter", 67 | "mod_opts": "range 10 18 2" 68 | }, 69 | { 70 | "mod_name": "Specify a range of possible integer as 1 parameter", 71 | "mod_opts": "range 5" 72 | }, 73 | { 74 | "mod_name": "Specify multiple possible strings as 1 parameter", 75 | "mod_opts": "seq_str string1 string2 string3" 76 | }, 77 | { 78 | "mod_name": "Mixing multiple parameters", 79 | "mod_opts": ["range 5 10", "seq_str foo bar", 5, "int 423"] 80 | } 81 | ] 82 | } 83 | -------------------------------------------------------------------------------- /fragscapy/_version.py: -------------------------------------------------------------------------------- 1 | """Fetches the version of Fragscapy from the commit hash. 2 | 3 | Defines the contant `__version__` based on a version number and the current 4 | commit hash (if it can be found). 5 | This script was mostly "inspired" from `tdqm`'s own script to determine the 6 | version. 7 | """ 8 | 9 | import os 10 | import io 11 | 12 | 13 | __all__ = ["__version__"] 14 | 15 | # major, minor, -extra 16 | VERSION_INFO = 1, 1 17 | 18 | 19 | def get_version(): 20 | """Computes the version based on the version info and the git hash.""" 21 | version = '.'.join(map(str, VERSION_INFO)) 22 | 23 | # auto -extra based on commit hash (if not tagged as release) 24 | scriptdir = os.path.dirname(__file__) 25 | gitdir = os.path.abspath(os.path.join(scriptdir, "..", ".git")) 26 | if os.path.isdir(gitdir): 27 | extra = None 28 | # Open config file to check if we are in tqdm project 29 | with io.open(os.path.join(gitdir, "config"), 'r') as fh_config: 30 | if 'fragscapy' in fh_config.read(): 31 | # Open the HEAD file 32 | with io.open(os.path.join(gitdir, "HEAD"), 'r') as fh_head: 33 | extra = fh_head.readline().strip() 34 | # in a branch => HEAD points to file containing last commit 35 | if 'ref:' in extra: 36 | # reference file path 37 | ref_file = extra[5:] 38 | branch_name = ref_file.rsplit('/', 1)[-1] 39 | 40 | ref_file_path = os.path.abspath(os.path.join( 41 | gitdir, ref_file 42 | )) 43 | # check that we are in git folder 44 | # (by stripping the git folder from the ref file path) 45 | if os.path.relpath( 46 | ref_file_path, gitdir 47 | ).replace('\\', '/') != ref_file: 48 | # out of git folder 49 | extra = None 50 | else: 51 | # open the ref file 52 | with io.open(ref_file_path, 'r') as fh_branch: 53 | commit_hash = fh_branch.readline().strip() 54 | extra = commit_hash[:8] 55 | if branch_name != "master": 56 | extra += '.' + branch_name 57 | 58 | # detached HEAD mode, already have commit hash 59 | else: 60 | extra = extra[:8] 61 | 62 | # Append commit hash (and branch) to version string if not tagged 63 | if extra is not None: 64 | try: 65 | with io.open(os.path.join(gitdir, "refs", "tags", 66 | 'v' + version)) as fdv: 67 | if fdv.readline().strip()[:8] != extra[:8]: 68 | version += '-' + extra 69 | except FileNotFoundError: 70 | version += '-' + extra 71 | 72 | return version 73 | 74 | 75 | # Nice string for the version 76 | __version__ = get_version() 77 | -------------------------------------------------------------------------------- /fragscapy/modifications/field.py: -------------------------------------------------------------------------------- 1 | """Modifies any field of a specific layer in a packet. Only applied if the 2 | layer and the field exists in the packet.""" 3 | 4 | import scapy.layers.all 5 | 6 | from fragscapy.modifications.mod import Mod 7 | 8 | 9 | class Field(Mod): 10 | """Modifies any field of a specific layer in a packet. 11 | 12 | This modification is only applied if both the required layer and the 13 | field exists in the packet. 14 | 15 | The name of the layer and the field refers to the name used by Scapy. 16 | 17 | Args: 18 | *args: The arguments of the mods. 19 | 20 | Attributes: 21 | layer_name: The name of the layer to look for. 22 | field_name: The name of the field to look for. 23 | randval: The volatile random object used by Scapy. 24 | value: The new value to insert. None if random. 25 | 26 | Raises: 27 | ValueError: Unrecognized or incorrect number of parameters. 28 | 29 | Examples: 30 | >>> TcpSport(1234).sport 31 | 1234 32 | """ 33 | 34 | name = "Field" 35 | doc = ("Modifies any field of a specific layer in a packet.\n" 36 | "field {random|}") 37 | _nb_args = 3 38 | 39 | def parse_args(self, *args): 40 | """See base class.""" 41 | self.layer_name = args[0] 42 | self.field_name = args[1] 43 | 44 | layer_class = getattr(scapy.layers.all, self.layer_name) 45 | fieldtype = layer_class().fieldtype[self.field_name] 46 | self.randval = fieldtype.randval() 47 | 48 | self._random = False 49 | if args[2] == "random": 50 | self._random = True 51 | self.value = None # Exact value will be calculated later 52 | else: 53 | self.value = args[2] 54 | try: 55 | if (self.value > self.randval.max 56 | or self.value < self.randval.min): 57 | raise ValueError( 58 | "Parameter 3 must be beetween {} and {}. Got {}" 59 | .format(self.randval.min, self.randval.max, self.value) 60 | ) 61 | except (TypeError, AttributeError): 62 | # self.value cannot be compared 63 | pass 64 | 65 | def is_deterministic(self): 66 | """See base class.""" 67 | return not self._random 68 | 69 | def apply(self, pkt_list): 70 | """Modifies any field of a specific layer in a packet. See `Mod.apply` 71 | for more details.""" 72 | value = self.value 73 | if self._random: 74 | value = self.randval._fix() # pylint: disable=protected-access 75 | 76 | for pkt in pkt_list: 77 | if pkt.pkt.haslayer(self.layer_name): 78 | layer = pkt.pkt.getlayer(self.layer_name) 79 | try: 80 | layer.setfieldval(self.field_name, value) 81 | except AttributeError: 82 | pass # The field name does not exists 83 | 84 | return pkt_list 85 | 86 | def get_params(self): 87 | """See base class.""" 88 | return { 89 | 'layer_name': self.layer_name, 90 | 'field_name': self.field_name, 91 | 'value': self.value if not self._random else "random", 92 | } 93 | -------------------------------------------------------------------------------- /fragscapy/modifications/delay.py: -------------------------------------------------------------------------------- 1 | """Adds delay before one or all the packets.""" 2 | 3 | import enum 4 | import random 5 | 6 | from fragscapy.modifications.mod import Mod 7 | 8 | 9 | METHOD = enum.Enum("METHOD", "FIRST LAST RANDOM ID ALL") 10 | 11 | 12 | class Delay(Mod): 13 | """Adds delay before one or all the packets. 14 | 15 | This modification can add delay to one of the packets (specified either by 16 | 'first', 'last', 'random' or id) or to each one of the packets 17 | (specified by 'all'). 18 | 19 | Note that if the first packet is delayed, all the following packets are 20 | automatically delayed because they are send after the first one. 21 | 22 | Args: 23 | *args: The arguments of the mods. 24 | 25 | Attributes: 26 | delay_all: True if all packets are to be delayed 27 | delay_index: The index of the packet to delay 28 | delay: the delay to add in seconds 29 | 30 | Raises: 31 | ValueError: Unrecognized or incorrect number of parameters. 32 | 33 | Examples: 34 | >>> Delay("first", 3).delay_index 35 | 0 36 | >>> Delay("first", 3).delay 37 | 3 38 | >>> Delay("all", 1).delay_all 39 | True 40 | """ 41 | 42 | name = "Delay" 43 | doc = ("Add some delay (in seconds) before one of or all the packets.\n" 44 | "delay {first|last|random|all|} ") 45 | _nb_args = 2 46 | 47 | def parse_args(self, *args): 48 | """See base class.""" 49 | self.delay_index = None 50 | self.delay_all = False 51 | 52 | # Parse arg1 53 | if args[0] == "first": 54 | self.delay_index = 0 55 | elif args[0] == "last": 56 | self.delay_index = -1 57 | elif args[0] == "random": 58 | pass # Drop index will be calculated later 59 | elif args[0] == "all": 60 | self.delay_all = True 61 | else: 62 | try: 63 | self.delay_index = int(args[0]) 64 | except ValueError: 65 | raise ValueError("Parameter 1 unrecognized. " 66 | "Got {}".format(args[0])) 67 | 68 | # Parse arg2 69 | try: 70 | self.delay = float(args[1]) 71 | except ValueError: 72 | raise ValueError("Parameter 2 unrecognized. " 73 | "Got {}".format(args[1])) 74 | 75 | def is_deterministic(self): 76 | """See base class.""" 77 | return self.delay_all or self.delay_index is not None 78 | 79 | def apply(self, pkt_list): 80 | """Delays the correct packet(s). See `Mod.apply` for more details.""" 81 | l = len(pkt_list) 82 | if not l: # Avoid the trivial case 83 | return pkt_list 84 | 85 | if self.delay_all: 86 | for i in range(l): 87 | pkt_list.edit_delay(i, self.delay) 88 | else: 89 | i = self.delay_index 90 | 91 | if i is None: # Random 92 | if l == 1: # Avoid the case of randint(0, 0) 93 | i = 0 94 | else: 95 | i = random.randint(-l, l-1) 96 | 97 | if -l <= i <= l-1: 98 | pkt_list.edit_delay(i, self.delay) 99 | 100 | return pkt_list 101 | 102 | def get_params(self): 103 | """See base class.""" 104 | return {k: v if v is not None else "random" 105 | for k, v in super(Delay, self).get_params().items()} 106 | -------------------------------------------------------------------------------- /fragscapy/modifications/tcp_overlap.py: -------------------------------------------------------------------------------- 1 | """Segments the TCP packets at the L4-layer.""" 2 | 3 | import random 4 | 5 | from fragscapy.modifications.mod import Mod 6 | from fragscapy.modifications.utils import tcp_segment 7 | from fragscapy.packetlist import PacketList 8 | 9 | 10 | class TcpOverlap(Mod): 11 | """Creates overlapping TCP segments. 12 | 13 | Args: 14 | *args: The arguments of the mods. 15 | 16 | Attributes: 17 | segmentsize: The segmentation size (bytes of TCP data to use). 18 | overlapsize: The size of the random_data that will be added 19 | append_before: True if the random_data should be added before the 20 | packet. 21 | 22 | Raises: 23 | ValueError: Unrecognized or incorrect number of parameters. 24 | 25 | Examples: 26 | >>> TcpOverlap(32, 8, "before").segmentsize 27 | 32 28 | >>> TcpOverlap(32, 8, "before").overlapsize 29 | 8 30 | >>> TcpOverlap(32, 9, "before").append_before 31 | True 32 | """ 33 | 34 | name = "TcpOverlap" 35 | doc = ("Creates overlapping TCP segments\n" 36 | "tcp_segment \n" 37 | " - 'segmentsize' is the segmentation size in octets to use\n" 38 | " (i.e. the size of the TCP payload per segment)\n" 39 | " - 'overlapsize' is the the size in octets of random data that\n" 40 | " overlaps\n" 41 | " - 'append' is either 'before' of 'after' and indicates where to\n" 42 | " add the random data that overlaps.\n" 43 | "The final size of the TCP payload is 'fragsize + overlapsize'") 44 | _nb_args = 3 45 | 46 | def parse_args(self, *args): 47 | """See base class.""" 48 | try: 49 | self.segmentsize = int(args[0]) 50 | except ValueError: 51 | raise ValueError("Parameter 1 unrecognized. " 52 | "Got '{}'".format(args[0])) 53 | if self.segmentsize < 0: 54 | raise ValueError("'segmentsize' should be positive. " 55 | "Got '{}'".format(self.segmentsize)) 56 | 57 | try: 58 | self.overlapsize = int(args[1]) 59 | except ValueError: 60 | raise ValueError("Parameter 2 unrecognized. " 61 | "Got '{}'".format(args[1])) 62 | if self.overlapsize < 0: 63 | raise ValueError("'overlapsize' should be positive. " 64 | "Got '{}'".format(self.overlapsize)) 65 | 66 | if args[2].lower() == "after": 67 | self.append_before = False 68 | elif args[2].lower() == "before": 69 | self.append_before = True 70 | else: 71 | raise ValueError("'overlap' should be either 'after' or 'before'. " 72 | "Got '{}'".format(args[2])) 73 | 74 | def apply(self, pkt_list): 75 | """Segments each TCP packet. See `Mod.apply` for more details.""" 76 | new_pl = PacketList() 77 | 78 | for pkt in pkt_list: 79 | if pkt.pkt.haslayer('TCP'): 80 | random_data = bytes(random.randrange(0, 0xff) 81 | for _ in range(self.overlapsize)) 82 | segments = tcp_segment(pkt.pkt, self.segmentsize, 83 | random_data, self.append_before) 84 | 85 | index = len(new_pl) - 1 86 | for segment in segments: 87 | new_pl.add_packet(segment) 88 | new_pl.edit_delay(index, pkt.delay) 89 | else: 90 | # Not TCP so no segmentation 91 | new_pl.add_packet(segment, pkt.delay) 92 | 93 | return new_pl 94 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: 2 | help 3 | buildclean 4 | pylintclean 5 | compileclean 6 | docclean 7 | clean 8 | pylint 9 | pylint-reports 10 | pyreverse 11 | pyreverse-mod 12 | dependencies 13 | build 14 | install 15 | build-dev 16 | dependencies-dev 17 | install-dev 18 | dependencies-doc 19 | build-doc 20 | 21 | # Help: display the main commands 22 | help: 23 | @echo "Makefile for fragscapy. Most used commands:" 24 | @echo " make install install Fragscapy" 25 | @echo " make install-dev install Fragscapy in dev mode" 26 | @echo " make build-doc build the documentation" 27 | @echo " make clean cleanup the non-necessary files" 28 | @echo " make pylint evluate the code quality with pylint" 29 | @echo " make pylint-reports show the pylint reports" 30 | @echo " make pyreverse generate UML diagram of code without the mods" 31 | @echo " make pyreverse-mod generate UML diagram of code with the mods" 32 | 33 | # Clean commands: clean different kind of files each 34 | buildclean: 35 | @echo "Deleting build files" 36 | @rm -Rf build 37 | @rm -Rf dist 38 | @rm -Rf fragscapy.egg-info 39 | pylintclean: 40 | @echo "Deleting pylint files" 41 | @find . -type d -path './fragscapy*/__pycache__' -exec rm -Rf {} + 42 | @rm -f classes_fragscapy.dot packages_fragscapy.dot classes_fragscapy.svg packages_fragscapy.svg 43 | compileclean: 44 | @echo "Deleting compiled files" 45 | @find . -type f -path './fragscapy/*.py[co]' -delete 46 | docclean: 47 | @echo "Deleting documentation" 48 | @rm -rf docs/_build 49 | @find docs/source/ -not -path 'docs/source/' -not -path 'docs/source/_templates' -not -path 'docs/source/_templates/.placeholder' -not -path 'docs/source/index.rst' -not -path 'docs/source/conf.py' -not -path 'docs/source/_static' -not -path 'docs/source/_static/.placeholder' -print0 | xargs -0 rm -f -- 50 | clean: buildclean pylintclean compileclean docclean 51 | 52 | # Pylint-related commands 53 | pylint: 54 | @if ! command -v pylint > /dev/null; then echo "Pylint not found, run 'make dependencies-dev'."; exit 1; fi 55 | @pylint fragscapy; exit 0 56 | pylint-reports: 57 | @if ! command -v pylint > /dev/null; then echo "Pylint not found, run 'make dependencies-dev'."; exit 1; fi 58 | @pylint fragscapy --reports=y; exit 0 59 | pyreverse: 60 | @if ! command -v pyreverse > /dev/null; then echo "Pyreverse not found, run 'make dependencies-dev'."; exit 1; fi 61 | @if ! command -v dot > /dev/null; then echo "dot not found, install 'graphviz'"; exit 1; fi 62 | @find fragscapy/ -type f -not -path 'fragscapy/modifications/*' | xargs pyreverse -p fragscapy -- fragscapy/modifications/__init__.py fragscapy/modifications/mod.py fragscapy/modifications/utils.py 63 | @dot -Tsvg classes_fragscapy.dot > classes_fragscapy.svg 64 | @dot -Tsvg packages_fragscapy.dot > packages_fragscapy.svg 65 | @echo "Generated files 'classes_fragscapy.svg' and 'packages_fragscapy.svg'" 66 | pyreverse-mod: 67 | @if ! command -v pyreverse > /dev/null; then echo "Pyreverse not found, run 'make dependencies-dev'."; exit 1; fi 68 | @if ! command -v dot > /dev/null; then echo "dot not found, install 'graphviz'"; exit 1; fi 69 | @pyreverse -p fragscapy fragscapy/ 70 | @dot -Tsvg classes_fragscapy.dot > classes_fragscapy.svg 71 | @dot -Tsvg packages_fragscapy.dot > packages_fragscapy.svg 72 | @echo "Generated files 'classes_fragscapy.svg' and 'packages_fragscapy.svg'" 73 | 74 | # Standard install 75 | dependencies: 76 | pip3 install wheel 77 | pip3 install -r requirements.txt 78 | build: buildclean dependencies 79 | ./setup.py sdist bdist_wheel 80 | install: build 81 | ./setup.py install 82 | 83 | # Development install 84 | dependencies-dev: dependencies 85 | pip3 install wheel 86 | pip3 install -r requirements-dev.txt 87 | build-dev: buildclean dependencies-dev 88 | ./setup.py sdist bdist_wheel 89 | install-dev: build-dev 90 | ./setup.py develop --uninstall 91 | ./setup.py develop 92 | 93 | # Documentation building 94 | dependencies-doc: dependencies 95 | pip3 install wheel 96 | pip3 install -r requirements-doc.txt 97 | build-doc: dependencies-doc docclean 98 | mkdir docs/_build 99 | sphinx-apidoc -f -o docs/source fragscapy --separate 100 | sphinx-build -b html docs/source docs/_build 101 | -------------------------------------------------------------------------------- /fragscapy/modifications/ipv6_ext_hdr_mixup.py: -------------------------------------------------------------------------------- 1 | """Mixes-up the order of the IPv6 Extension Header of an IPv6 packet.""" 2 | 3 | import random 4 | 5 | import scapy.packet 6 | 7 | from fragscapy.modifications.mod import Mod 8 | 9 | 10 | IPV6_EXTHDR = ( 11 | "IPv6ExtHdrHopByHop", "IPv6ExtHdrRouting", "IPv6ExtHdrFragment", "ESP", 12 | "AH", "MobileIP", "IPv6ExtDestOpt") 13 | 14 | 15 | def name(layer): 16 | """Returns the class name of a protocol layer.""" 17 | return layer.__class__.__name__ 18 | 19 | 20 | def slice_exthdr(pkt): 21 | """Slices the packet in three parts: 22 | * the 'before the Extension Headers' part 23 | * the chain of 'Extension Headers' as a list 24 | * the 'after the Extension Headers' part 25 | 26 | Args: 27 | pkt: The packet to slice. 28 | 29 | Returns: 30 | A 3-tuple with the 3 parts described above. The parts no longer 31 | contains the other parts. It means, for instance, that the 'before' 32 | part's payload is `NoPayload()` and is not linked to the 'Extension 33 | Headers' nor the 'after' part anymore. 34 | 35 | Examples: 36 | >>> slice_exthdr(IPv6()/IPv6ExtHdrRouting()/AH()/TCP()/"PLOP") 37 | (>, 38 | [, ], 39 | >) 40 | """ 41 | current = pkt 42 | before = scapy.packet.NoPayload() 43 | chain = [] 44 | while current.payload is not scapy.packet.NoPayload(): 45 | if name(current.payload) in IPV6_EXTHDR: 46 | if not chain: 47 | # If this is the first Extension Header, store the 'before' 48 | before = current 49 | chain.append(current.payload) 50 | current = current.payload 51 | # The 'after' is the payload of the last Extension Header 52 | after = chain[-1].payload if chain else scapy.packet.NoPayload() 53 | 54 | # Removes the dependency between the Headers 55 | for hdr in chain: 56 | hdr.payload = scapy.packet.NoPayload() 57 | 58 | return before, chain, after 59 | 60 | 61 | def replace_exthdr(before, exthdr, after): 62 | """Rebuilds a packet from the three parts as in `slice_exthdr`. 63 | 64 | It does not return the packet but instead modifies it directly. 65 | This avoiding having the need to pass a reference to the first 66 | layer. 67 | 68 | Args: 69 | before: The 'before the Extension Headers' part. 70 | exthdr: The new chain of 'Extension Headers'. 71 | after: The 'after the Extension Headers' part. 72 | 73 | Examples: 74 | >>> pkt = IPv6()/IPv6ExtHdrRouting() 75 | >>> replace_exthdr( 76 | ... pkt, 77 | ... [IPv6ExtHdrRouting(), AH()], 78 | ... TCP()/"PLOP" 79 | ... ) 80 | >>> pkt 81 | >>>> 86 | """ 87 | if not exthdr: 88 | return 89 | 90 | new_chain = exthdr[0] 91 | current = new_chain 92 | i = 1 93 | while i < len(exthdr): 94 | current.payload = exthdr[i] 95 | i += 1 96 | current = current.payload 97 | 98 | # Add the 'before' before the new chain of Extension Headers 99 | before.payload = new_chain 100 | # Add the 'after' after the last Extension Header 101 | current.payload = after 102 | 103 | 104 | class Ipv6ExtHdrMixup(Mod): 105 | """Mixes-up the order of the extension headers in an IPv6 packet. 106 | 107 | Randomly changes the order to the Extension Headers of the IPv6 packet 108 | 109 | Args: 110 | *args: The arguments of the mods. 111 | 112 | Raises: 113 | ValueError: Unrecognized or incorrect number of parameters. 114 | 115 | Examples: 116 | >>> Ipv6ExtHdrMixup() 117 | """ 118 | 119 | name = "Ipv6ExtHdrMixup" 120 | doc = ("Mixes-up the order of the extension headers in an IPv6 packet\n" 121 | "ipv6_ext_hdr_mixup") 122 | _nb_args = 0 123 | 124 | def is_deterministic(self): 125 | """See base class.""" 126 | return False 127 | 128 | def apply(self, pkt_list): 129 | """Mixes-up the order of the Extension Headers for each IPv6 packet. 130 | See `Mod.apply` for more info.""" 131 | for pkt in pkt_list: 132 | if pkt.pkt.haslayer('IPv6'): 133 | before, chain, after = slice_exthdr(pkt.pkt) 134 | random.shuffle(chain) 135 | replace_exthdr(before, chain, after) 136 | 137 | return pkt_list 138 | -------------------------------------------------------------------------------- /fragscapy/modifications/mod.py: -------------------------------------------------------------------------------- 1 | """Abstract definition of a modification. 2 | 3 | A modification is a transformation that can be applied to a list of packet. 4 | The `Mod` class defines the abstract base class that should be subclassed and 5 | concretized in order to create a new modification. 6 | """ 7 | 8 | import abc 9 | 10 | class Mod(abc.ABC): 11 | """Abstract object for defining a modification of a packet list. 12 | 13 | This the base class for defining a modification. Any subclass should 14 | redefine the __init__ method to parse the arguments. If the class' 15 | attribute `_nb_args` is redefined, `Mod.__init__()` automatically 16 | check the number of parameters and raises a ValueError if this is not 17 | the right number. 18 | 19 | In addition, any subclass should also redefine the `.apply()` method 20 | to define the behavior of the mod. 21 | 22 | For an even better implementation one could redefine the `.name` and 23 | `.doc` attribute in order to get cleaner usage. But defaults are provided 24 | (respectively the class name and "No usage documented"). 25 | 26 | Args: 27 | *args: The arguments of the mods. 28 | 29 | Attributes: 30 | name: The name of the modification. 31 | doc: A string that describes the goal and the syntax of the 32 | modification. It is displayed when requesting the usage. 33 | 34 | Raises: 35 | ValueError: incorrect number of parameters. 36 | """ 37 | 38 | name = None 39 | doc = None 40 | _nb_args = -1 41 | 42 | def __init__(self, *args): 43 | self.check_args(*args) 44 | self.parse_args(*args) 45 | 46 | def is_deterministic(self): # pylint: disable=no-self-use 47 | """Is the modification deterministic (no random).""" 48 | return True 49 | 50 | def parse_args(self, *args): 51 | """Parses the arguments and extract the necessary data from it. 52 | 53 | Args: 54 | *args: The argument received 55 | 56 | Raises: 57 | ValueError: At least one of the argument cannot be parsed. 58 | """ 59 | 60 | def check_args(self, *args): 61 | """Performs some checks on the arguments. 62 | 63 | Base class only check that the number of arguments is equal to 64 | `self._nb_args`. 65 | 66 | Args: 67 | *args: The arguments received. 68 | 69 | Raises: 70 | ValueError: The arguments are not correct. 71 | """ 72 | if self._nb_args >= 0 and len(args) != self._nb_args: 73 | raise ValueError( 74 | "Incorrect number of parameters specified. " 75 | "Got {}, expected {}.".format(len(args), self._nb_args) 76 | ) 77 | 78 | @abc.abstractmethod 79 | def apply(self, pkt_list): 80 | """Applies the modification to a list of packets. 81 | 82 | It always returns a `PacketList` object but might also modified the 83 | original. Actually the returned object might even be the same 84 | "in-memory" original object. There is no guarantee that `pkt_list` 85 | will not be modified. It depends on the implementation of the 86 | modification. 87 | 88 | Args: 89 | pkt_list: A `PacketList` on which to apply the modifications. 90 | 91 | Returns: 92 | The new `PacketList` object resulting from the modfications. 93 | """ 94 | raise NotImplementedError 95 | 96 | @classmethod 97 | def usage(cls): 98 | """Prints the usage of the modification based on the `name` and `doc` 99 | attributes.""" 100 | if cls.name is None: 101 | print(cls.__class__.__name__.lower()) 102 | else: 103 | print(cls.name) 104 | print("==========") 105 | if cls.doc is None: 106 | print("No usage documented") 107 | else: 108 | print(" ", cls.doc.replace('\n', '\n '), sep='') 109 | 110 | def get_params(self): 111 | """Returns a dictionnary of the options defining the mod.""" 112 | return {k: v for k, v in vars(self).items() if k[0] != "_"} 113 | 114 | def __str__(self): 115 | params = " ".join(str(v) for v in self.get_params().values()) 116 | if params: 117 | return "{name} {params}".format(name=self.name, params=params) 118 | return "{name}".format(name=self.name) 119 | 120 | def __repr__(self): 121 | return "{name}<{params}>".format( 122 | name=self.name, 123 | params=", ".join("{}={}".format(k, v) 124 | for k, v in self.get_params().items()) 125 | ) 126 | -------------------------------------------------------------------------------- /fragscapy/modifications/utils.py: -------------------------------------------------------------------------------- 1 | """Functions and utilities that can be used by mutliple mods to avoid 2 | duplication.""" 3 | 4 | import scapy.layers.inet6 5 | import scapy.packet 6 | 7 | # The IPv6 Headers that needs to be processed for routing 8 | IPV6_PROCESS_HEADERS = ("IPv6", "IPv6ExtHdrHopByHop", "IPv6ExtHdrRouting") 9 | # Next Header code for IPv6ExtHdrFragment 10 | IPV6_NH_FRAG = 44 11 | 12 | 13 | def name(layer): 14 | """Returns the class name of a protocol layer.""" 15 | return layer.__class__.__name__ 16 | 17 | 18 | def fragment6(pkt, fragsize): 19 | """Fragment an IPv6 Scapy packet in fragments of size `fragsize`. 20 | 21 | `scapy.layers.inet6.fragment6` is not sufficient alone since it requires 22 | to already have a Fragment Extension Header in the packet (it used to 23 | indicate to Scapy where to split the Extension Header chain). This function 24 | inserts this Fragment Extension Header automatically according to the RFC 25 | and fragments the packets. 26 | 27 | Args: 28 | pkt: The IPv6 Scapy packet to fragment 29 | 30 | Returns: 31 | A list of fragments (IPv6 packets) of size `fragsize`. 32 | 33 | Examples: 34 | >>> fragment6(IPv6()/IPv6ExtHdrRouting()/AH()/TCP()/"PLOP"*100, 30) 35 | """ 36 | return scapy.layers.inet6.fragment6(ipv6_insert_frag_hdr(pkt), fragsize) 37 | 38 | 39 | 40 | def ipv6_get_per_frag_hdr(pkt): 41 | """Returns the last 'Scapy layer' of the "Per-Fragment Headers" part of 42 | the packet. 43 | 44 | The "Per-Fragment Headers" part is the chain of IPv6 headers that is 45 | repeated for every fragment because they are useful for routing and 46 | defragmenting. 47 | 48 | Args: 49 | pkt: The Scapy packet to examine. 50 | 51 | Returns: 52 | A reference to the last 'Scapy layer' of the "Per-Fragment Headers". 53 | 54 | Examples: 55 | >>> get_per_frag_hdr(IPv6()/IPv6ExtHdrRouting()/AH()/TCP()/"PLOP") 56 | >>> 57 | """ 58 | current = pkt 59 | ret = current 60 | while current is not scapy.packet.NoPayload(): 61 | if name(current) in IPV6_PROCESS_HEADERS: 62 | ret = current 63 | current = current.payload 64 | return ret 65 | 66 | 67 | def ipv6_insert_frag_hdr(pkt): 68 | """Inserts a "Fragment Extension Header" in a packet just after the 69 | "Per-Fragment Headers" part. 70 | 71 | Args: 72 | pkt: The packet to modify. 73 | 74 | Returns: 75 | The same packet with a well-placed Fragment Extension Header. 76 | 77 | Examples: 78 | >>> insert_frag_hdr(IPv6()/IPv6ExtHdrRouting()/AH()/TCP()/"PLOP") 79 | >>>>> 85 | """ 86 | current = ipv6_get_per_frag_hdr(pkt) 87 | current.payload = ( 88 | scapy.layers.inet6.IPv6ExtHdrFragment(nh=current.nh) 89 | / current.payload 90 | ) 91 | try: 92 | current.nh = IPV6_NH_FRAG 93 | except AttributeError: 94 | pass 95 | return pkt 96 | 97 | def tcp_segment(pkt, size, overlap=None, overlap_before=False): 98 | """Segment a TCP packet to a certain size. 99 | 100 | Args: 101 | pkt: The packet to segment 102 | size: The size of the TCP data after segmentation 103 | overlap: A string of data at the beginning or the end that overlaps 104 | the other fragments 105 | overlap_before: Should the overlap data be added at the beginning. 106 | Else it is added at the end. Default is False. 107 | 108 | Returns: 109 | A list of L2-packets with TCP segments 110 | 111 | Examples: 112 | >>> tcp_segment(IP()/TCP()/"PLOP", 3) 113 | [ 114 | >>, 117 | >> 120 | ] 121 | """ 122 | 123 | payload = bytes(pkt.getlayer('TCP').payload) 124 | tcp_l = len(payload) 125 | if not tcp_l: # Trivial case 126 | return [pkt] 127 | 128 | nb_segments = (tcp_l-1)//size + 1 129 | segments = [payload[i*size:(i+1)*size] for i in range(nb_segments)] 130 | 131 | ret = [] 132 | for i, segment in enumerate(segments): 133 | new_pkt = pkt.copy() 134 | if overlap is not None: 135 | # Add some data that overlaps the previous/next fragment 136 | if overlap_before and i != 0: 137 | # All segments except the first one 138 | segment = overlap + segment 139 | elif not overlap_before and i == len(segments) - 1: 140 | # All segments except the last one 141 | segment = segment + overlap 142 | new_pkt.getlayer('TCP').payload = scapy.packet.Raw(segment) 143 | new_pkt.getlayer('TCP').chksum = None 144 | new_pkt.getlayer('TCP').seq = pkt.getlayer('TCP').seq + i*size 145 | if new_pkt.haslayer('IP'): 146 | new_pkt.getlayer('IP').len = None 147 | new_pkt.getlayer('IP').chksum = None 148 | elif new_pkt.haslayer('IPv6'): 149 | new_pkt.getlayer('IPv6').plen = None 150 | new_pkt.getlayer('IPv6').chksum = None 151 | ret.append(new_pkt) 152 | 153 | return ret 154 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # FragScapy 2 | 3 | This project is a command-line tool that can be used to generate 4 | 'fragroute-like' tests using Scapy. 5 | [Fragroute](https://www.monkey.org/~dugsong/fragroute/)'s goal was to 6 | intercept network packets and modify them "on-the-fly" according to the 7 | configuration defined in a config file. So automating tests of different 8 | modifications, requires to kill Fragroute, write a new config file and restart 9 | Fragroute over and over. 10 | Fragscapy is a rewrite in Python using `fnfqueue` and `scapy` packages that 11 | aims to generate a test suite based on a config file. In a certain way, it 12 | will generates a series of 'fragroute-like' tests and run them consecutively 13 | without having to reload everything. Moreover, the modifications can be 14 | extended by adding some python modules to the project. So instead of fragroute, 15 | the set of possible modifications is not definitive and any modification can be 16 | applied to packets as long one can write them using Scapy. 17 | 18 | 19 | ## Setup 20 | 21 | ### Building dependencies 22 | 23 | The project requires some tools to be installed. Those are: 24 | * git (for cloning the repository) 25 | * python >= 3.5 26 | * python-dev >= 3.5 (for building the dependencies with pip) 27 | * gcc (for building the dependencies with pip) 28 | * cffi (for building the dependencies with pip 29 | * make (for simplification of tasks) 30 | * pip (installed by default in a venv) 31 | 32 | The recommended setup is to use a 33 | [virtual environment](https://docs.python.org/3.5/library/venv.html). Once it 34 | is started (or not if using a global installation), the Makefile can do most 35 | of the common tasks. 36 | 37 | On Debian or Ubuntu: 38 | ```bash 39 | apt install -y git python3 python3-dev gcc python3-cffi-backend make python3-venv 40 | python3 -m venv venv 41 | source venv/bin/activate 42 | ``` 43 | 44 | ### Install 45 | 46 | To install fragscapy, run: 47 | ```bash 48 | make install 49 | ``` 50 | 51 | ### Development install 52 | 53 | Fragscapy can be installed in development mode. It means the changes made to 54 | the code will be taken into account without the need to rebuild everything 55 | each time: 56 | ```bash 57 | make install-dev 58 | ``` 59 | 60 | ## Documentation 61 | 62 | To build the development documentation, run: 63 | ```bash 64 | make build-doc 65 | ``` 66 | It is then accessible as HTML files under _docs/\_build/_ 67 | 68 | 69 | ## Usage 70 | 71 | Once installed, there are 3 ways to access fragscapy features: 72 | 1. Using the command `fragscapy`. It was installed together with the package. 73 | 2. Using the python module directly : `python -m fragscapy`. It is nearly the 74 | same as using the command except options can be passed to the python 75 | interpreter. 76 | 3. Importing the package in a python interpreter: e.g. ```from fragscapy.mod import ModList``` 77 | 78 | 79 | ### Command line options 80 | 81 | Run `fragscapy [-h | --help]` for displaying the usage. 82 | 83 | 84 | ### Configuration files 85 | 86 | The main way (except if using fragscapy as an imported python package) to 87 | run a test suite is to describe everything in a config file and pass it to the 88 | command line as an argument. 89 | These config files have a predefined syntax that needs to be respected, else 90 | errors will be thrown. 91 | 92 | Examples of possible configuration files and templates can be found under the 93 | [_config\_examples_ directory](config_examples) and the [_config\_common_ directory](config_common). 94 | 95 | The command `fragscapy checkconfig` can be used to verify a config file is 96 | valid and understandable. It checks that a maximum of operations will run 97 | without errors (but never runs the command itself). 98 | 99 | 100 | ### Modifications 101 | 102 | Various modifications are already available but more can be added. To list 103 | the modifications that are currently detected (and can be used), run 104 | `fragscapy list`. For the details about the options and how to use a specific 105 | modifications, run `fragscapy usage `. 106 | 107 | 108 | ### Results 109 | 110 | At the end of the tests, a summary of the results is displayed based on the 111 | exit code of the command: `0` means the test succeeded and any other value 112 | means the test failed. To adapt any command that does not respect this 113 | convention, one the following can be appended to the command in the 114 | configuration file: 115 | ```bash 116 | my_cmd; if [ $? -eq 4 ]; then return 0; else return 1; fi 117 | my_cmd; e=$?; if [ $e -eq 3 ] || [ $e -eq 2 ]; then return 0; else return $e; fi 118 | ``` 119 | 120 | 121 | ## Adding modifications 122 | 123 | Adding a modifications is meant to be as easy as possible: a python module 124 | needs to be added to the _fragscapy/modifications/_ directory and that's all, 125 | it will be detected automatically. 126 | However, the content of the file needs to respect specific rules to work 127 | correctly: 128 | 1. The name of the file should be the name of the modification using 129 | [snake_case](https://en.wikipedia.org/wiki/Snake_case). It is used to 130 | import the correct module based on the modification name. 131 | 2. The module should define a class whose name is the name of the modification 132 | using [UpperCamelCase/CapWords](https://en.wikipedia.org/wiki/Camel_case). 133 | It is used to import the correct object based on the modification name. 134 | 3. This class should subclass `fragscapy.modifications.mod.Mod` class. It is 135 | used to define the default behavior and an interface every modification 136 | should respect. 137 | 4. This class should override the `__init__(self, *args)` method where `args` 138 | contains a list of strings with the options to create the modification 139 | 5. This class should override the `apply(self, pkt_list)` method where 140 | `pkt_list` is a `PacketList` object that contains the scapy packets to 141 | modify. 142 | This is all the rules that need to be respected. Anything else (adding other 143 | classes, methods, sumodules, ...) is left to the developer to decide according 144 | to its needs. 145 | 146 | The already existing mods can be used as a reference on how to develop a mod 147 | since they all respect these rules too. 148 | 149 | ## Code architecture 150 | 151 | Here is a schema of how the different objects interact together and what they 152 | are doing. 153 | 154 | ![media/architecture.png](media/architecture.png "The architecture of the project") 155 | -------------------------------------------------------------------------------- /fragscapy/packetlist.py: -------------------------------------------------------------------------------- 1 | """A list of wrappers around Scapy packets and metadata (e.g. delay).""" 2 | 3 | import time 4 | 5 | import scapy.sendrecv 6 | 7 | 8 | # The minimum time (in seconds) a packet will be delayed 9 | MIN_TIME_DELAY = 0.01 10 | 11 | 12 | def _safe_delay(delay): 13 | """Checks that `delay` is a positive float number else raises a 14 | ValueError.""" 15 | try: 16 | delay = float(delay) 17 | except ValueError: 18 | raise ValueError("{} is not a valid delay (not a number)".format(delay)) 19 | if delay < 0: 20 | raise ValueError("{} is not a valid delay (not positive)".format(delay)) 21 | return delay 22 | 23 | 24 | class PacketStruct(object): 25 | """Wrapper around a Scapy packet and a delay. 26 | 27 | The delay is used when sending the Scapy packet, it is delayed by the same 28 | amount of seconds. That way, the user can easily control the delay between 29 | each packet to be sent. 30 | 31 | Args: 32 | pkt: The Scapy packet. 33 | delay: The delay (in seconds) before sending the packet. 34 | 35 | Attributes: 36 | pkt: The Scapy packet. 37 | 38 | Examples: 39 | >>> pkt = PacketStruct(IP()/TCP()/"PLOP", 25) 40 | >>> pkt.display() 41 | Delay of 25.0 seconds 42 | ###[ IP ]### 43 | version = 4 44 | [...] 45 | ###[ TCP ]### 46 | sport = ftp_data 47 | [...] 48 | ###[ Raw ]### 49 | load = 'PLOP' 50 | >>> print(repr(pkt)) 51 | PacketStruct(pkt=44B, delay=25.0s) 52 | """ 53 | def __init__(self, pkt, delay): 54 | self.pkt = pkt 55 | self._delay = _safe_delay(delay) 56 | 57 | @property 58 | def delay(self): 59 | """The delay to wait before sending the packet.""" 60 | return self._delay 61 | 62 | @delay.setter 63 | def delay(self, val): 64 | self._delay = _safe_delay(val) 65 | 66 | def send(self): 67 | """Sends the packet as a Layer-3 packet.""" 68 | # Only sleep if above the min limit 69 | if self.delay > MIN_TIME_DELAY: 70 | time.sleep(self.delay) 71 | scapy.sendrecv.send(self.pkt) 72 | 73 | def sendp(self): 74 | """Sends the packet as a Layer-2 packet.""" 75 | # Only sleep if above the min limit 76 | if self.delay > MIN_TIME_DELAY: 77 | time.sleep(self.delay) 78 | scapy.sendrecv.sendp(self.pkt) 79 | 80 | def display(self): 81 | """Displays the content of the packet. 82 | 83 | Displays the delay (if any) followed by the details of the underlying 84 | Scapy packet. 85 | """ 86 | if self.delay > MIN_TIME_DELAY: 87 | print("Delay of {} seconds".format(self.delay)) 88 | self.pkt.display() 89 | 90 | def copy(self): 91 | """Returns a copy of the packet.""" 92 | return PacketStruct(self.pkt, self.delay) 93 | 94 | def __str__(self): 95 | ret = [] 96 | if self.delay > MIN_TIME_DELAY: 97 | ret.append(str(self.delay) + "s") 98 | ret.append(str(self.pkt)) 99 | return "\n".join(ret) 100 | 101 | def __bytes__(self): 102 | ret = [] 103 | if self.delay > MIN_TIME_DELAY: 104 | ret.append(bytes(str(self.delay), encoding='ascii') + b"s") 105 | ret.append(bytes(self.pkt)) 106 | return b"\n".join(ret) 107 | 108 | def __repr__(self): 109 | return "PacketStruct(pkt={}B, delay={}s)".format( 110 | len(self.pkt), self.delay 111 | ) 112 | 113 | 114 | class PacketList(object): 115 | """A list of PacketStruct to be sent. 116 | 117 | This list can be altered (edit, append, insert, remove) before being 118 | really sent. For each packet a delay can be specified. This delay will 119 | be respected and waited before actually sending the packet. 120 | 121 | Attributes: 122 | pkts: The list of packets 123 | 124 | Examples: 125 | >>> pl = PacketList() 126 | >>> pl.add_packet(IP()/TCP()/"PLOP", 25) 127 | >>> pl.add_packet(IP()/TCP()/"PLIP", 2) 128 | >>> pl.display() 129 | Delay of 25.0 seconds 130 | ###[ IP ]### 131 | version = 4 132 | [...] 133 | ###[ TCP ]### 134 | sport = ftp_data 135 | [...] 136 | ###[ Raw ]### 137 | load = 'PLOP' 138 | Delay of 2.0 seconds 139 | ###[ IP ]### 140 | version = 4 141 | [...] 142 | ###[ TCP ]### 143 | sport = ftp_data 144 | [...] 145 | ###[ Raw ]### 146 | load = 'PLIP' 147 | >>> repr(pl) 148 | 'PacketList(pkts=[PacketStruct(pkt=44B, delay=25.0s), 149 | PacketStruct(pkt=44B, delay=2.0s)])' 150 | """ 151 | def __init__(self): 152 | self.pkts = [] 153 | 154 | def __getitem__(self, index): 155 | return self.pkts[index] 156 | 157 | def __len__(self): 158 | return len(self.pkts) 159 | 160 | def __iter__(self): 161 | return iter(self.pkts) 162 | 163 | def add_packet(self, pkt, delay=0): 164 | """Adds a new Scapy packet at the end of the list. 165 | 166 | Args: 167 | pkt: The Scapy packet to add. 168 | delay: The delay to respect before sending the packet. 169 | """ 170 | self.pkts.append(PacketStruct(pkt, delay)) 171 | 172 | def edit_delay(self, index, delay): 173 | """Changes the delay before packet emission. 174 | 175 | Args: 176 | index: Position of the packet to change. 177 | delay: The new delay. 178 | """ 179 | self.pkts[index].delay = delay 180 | 181 | def edit_packet(self, index, pkt): 182 | """Changes the Scapy packet. 183 | 184 | Args: 185 | index: Position of the packet to change. 186 | pkt: The new Scapy packet. 187 | """ 188 | self.pkts[index].pkt = pkt 189 | 190 | def remove_packet(self, index): 191 | """Removes a packet from the list. 192 | 193 | Args: 194 | index: Position of the packet to remove. 195 | """ 196 | del self.pkts[index] 197 | 198 | def insert_packet(self, index, pkt, delay=0): 199 | """Inserts a new packet in the list at the given index. 200 | 201 | Args: 202 | index: Position to insert the new packet. 203 | pkt: The new packet itself. 204 | delay: Delay to respect before sending packet. 205 | """ 206 | self.pkts.insert(index, PacketStruct(pkt, delay)) 207 | 208 | def send_all(self): 209 | """Sends all packets in the list as Layer3 packets.""" 210 | for pkt in self.pkts: 211 | pkt.send() 212 | 213 | def sendp_all(self): 214 | """Sends all packets in the list as Layer2 packets.""" 215 | for pkt in self.pkts: 216 | pkt.sendp() 217 | 218 | def display(self): 219 | """Displays the details of each packet of the packet list.""" 220 | for pkt in self.pkts: 221 | pkt.display() 222 | 223 | def __str__(self): 224 | ret = [] 225 | ret.append("PacketList [") 226 | for pkt in self.pkts: 227 | ret.append(str(pkt)) 228 | ret.append("]") 229 | return "\n".join(ret) 230 | 231 | def __bytes__(self): 232 | ret = [] 233 | ret.append(b"PacketList [") 234 | for pkt in self.pkts: 235 | ret.append(bytes(pkt)) 236 | ret.append(b"]") 237 | return b"\n".join(ret) 238 | 239 | def __repr__(self): 240 | return "PacketList(pkts=[{}])".format( 241 | ', '.join(repr(pkt) for pkt in self.pkts) 242 | ) 243 | -------------------------------------------------------------------------------- /fragscapy/config.py: -------------------------------------------------------------------------------- 1 | """Configuration parser and helper for fragscapy. 2 | 3 | The `config module` is responsible for sanitizing, checking and parsing a 4 | configfile provided by the user. It raises `ConfigError` and `ConfigWarning` 5 | if wrong parameters are detected. 6 | """ 7 | 8 | import json 9 | import warnings 10 | 11 | class ConfigError(ValueError): 12 | """Raises a configuration error about `key`.""" 13 | def __init__(self, key): 14 | self.key = key 15 | super(ConfigError, self).__init__( 16 | "Error: Unable to read '{}'".format(key) 17 | ) 18 | 19 | 20 | class ConfigWarning(Warning): 21 | """Warning during the configuration parsing.""" 22 | 23 | 24 | def config_warning(msg): 25 | """Raises a warning about something, details in `msg`.""" 26 | warnings.warn( 27 | "{}".format(msg), 28 | ConfigWarning 29 | ) 30 | 31 | 32 | def json_loadf(filename): 33 | """Wrapper arround `json.load` to load directly from filename.""" 34 | with open(filename) as f: 35 | return json.load(f) 36 | 37 | 38 | class Config(object): 39 | """Configuration parser wrapper. 40 | 41 | Parse some given data to load the configuration to run fragscapy with. 42 | This is a wrapper that checks the correct format of the data and raises 43 | errors or warning when an anomaly is found. If everything is alright, it 44 | exposes the configuration as read-only data. 45 | 46 | Args: 47 | data: The data to parse. It may be a file or a string depending on 48 | the parser used. If the default parser is used, it should be a 49 | filename. 50 | parser: The parser to use. It should respect the data types 51 | expected in the configuration. Default is `json_loadf`. 52 | 53 | Attributes: 54 | data: The data to parse. It may be a file or a string depending on 55 | the parser used. If the default parser is used, it should be a 56 | filename. 57 | parser: The parser to use. It should respect the data types 58 | expected in the configuration. Default is `json_loadf`. 59 | 60 | Examples: 61 | >>> config = Config('config.json') 62 | >>> config.nfrules 63 | [{'host': 'www.lmddgtfy.com', 'port': 8080}, 64 | {'host': 'www.lmddgtfy.com', 'port': 80}] 65 | >>> config.input 66 | ['tcp_sport 8080', 'echo "80 -> 8080"'] 67 | >>> config.output 68 | ['tcp_dport 8080', 'echo "8080 -> 80"'] 69 | """ 70 | def __init__(self, data, parser=None): 71 | if parser is None: 72 | parser = json_loadf 73 | 74 | self.parser = parser 75 | self.data = data 76 | 77 | self._cmd = "" 78 | self._nfrules = list() 79 | self._input = list() 80 | self._output = list() 81 | 82 | self._parse() 83 | 84 | @property 85 | def cmd(self): 86 | """The command to run for each test.""" 87 | return self._cmd 88 | 89 | @property 90 | def nfrules(self): 91 | """A list of key-words args to pass to NFQueueRule.""" 92 | return self._nfrules 93 | 94 | @property 95 | def output(self): 96 | """A list of args to pass to a modification for the output chain.""" 97 | return self._output 98 | 99 | @property 100 | def input(self): 101 | """A list of args to pass to a modification for the input chain.""" 102 | return self._input 103 | 104 | def _parse(self): 105 | """Parses the data, fill the attributes and raises ConfigError when 106 | needed.""" 107 | # Parse the data and interrupt if not readable 108 | try: 109 | user_data = self.parser(self.data) 110 | except Exception: 111 | raise ConfigError('.not_parsable') 112 | 113 | if not isinstance(user_data, dict): 114 | raise ConfigError('.not_dict') 115 | 116 | # Parse all the data coming from the user 117 | # and warn about unknown options 118 | for key, value in user_data.items(): 119 | if key == 'cmd': 120 | self._parse_cmd(value) 121 | elif key == 'nfrules': 122 | self._parse_nfrules(value) 123 | elif key == 'input': 124 | self._parse_input(value) 125 | elif key == 'output': 126 | self._parse_output(value) 127 | else: 128 | config_warning( 129 | "Unrecognized option found : '.{}'".format(key) 130 | ) 131 | 132 | # Warning in the case of no nfrules which is weird but doable. 133 | if not self._nfrules: 134 | config_warning( 135 | "No Netfilter rules configured, which means no data will be " 136 | "intercepted. Be sure this is the intended behavior." 137 | ) 138 | 139 | def _parse_cmd(self, user_cmd): 140 | """Parses the section of the command from the data. 141 | 142 | Args: 143 | user_cmd: The section read from the data that is about the 144 | command. 145 | 146 | Raises: 147 | ConfigError: The command is not a string. 148 | """ 149 | if not isinstance(user_cmd, str): 150 | raise ConfigError('.cmd.not_str') 151 | if not user_cmd[0] == '/': 152 | config_warning( 153 | "The command is relative, you should consider using absolute " 154 | "commands for better stability." 155 | ) 156 | self._cmd = user_cmd 157 | 158 | def _parse_nfrules(self, user_nfrules): 159 | """Parses the section of the netfilter rules from the data. 160 | 161 | Args: 162 | user_nfrules: The section read from the data that is about the 163 | netfilter rules. 164 | 165 | Raises: 166 | ConfigError: The NF rules specification has not the right format. 167 | See message for details. 168 | """ 169 | if not isinstance(user_nfrules, list): 170 | raise ConfigError('.nfrules.not_list') 171 | 172 | self._nfrules = list() 173 | for i, user_nfrule in enumerate(user_nfrules): 174 | if not isinstance(user_nfrule, dict): 175 | raise ConfigError('.nfrules.{}.not_dict'.format(i)) 176 | self._nfrules.append(user_nfrule) 177 | 178 | def _parse_input(self, user_input): 179 | """Parses the section of the INPUT modification list from the data. 180 | 181 | Args: 182 | user_input: The section read from the data that is about the INPUT 183 | modification list. 184 | 185 | Raises: 186 | ConfigError: The 'user_cmd' object has not the right format. See 187 | message for details. 188 | """ 189 | if not isinstance(user_input, list): 190 | raise ConfigError('.input.not_list') 191 | 192 | self._input = list() 193 | for i, mod in enumerate(user_input): 194 | try: 195 | self._input.append(_parse_mod(mod)) 196 | except ConfigError as e: 197 | raise ConfigError('.input.{}{}'.format(i, e.key)) 198 | 199 | def _parse_output(self, user_output): 200 | """Parses the section of the OUTPUT modification list from the data. 201 | 202 | Args: 203 | user_output: The section read from the data that is about the 204 | OUTPUT modification list. 205 | 206 | Raises: 207 | ConfigError: The 'user_cmd' object has not the right format. See 208 | message for details. 209 | """ 210 | if not isinstance(user_output, list): 211 | raise ConfigError('.output.not_list') 212 | 213 | self._output = list() 214 | for i, mod in enumerate(user_output): 215 | try: 216 | self._output.append(_parse_mod(mod)) 217 | except ConfigError as e: 218 | raise ConfigError('.output.{}{}'.format(i, e.key)) 219 | 220 | 221 | def _parse_mod(mod): 222 | """Parses a modification from the user data config. 223 | 224 | Args: 225 | mod: The dictionary that was extracted from the data and that should 226 | represent a modification. 227 | 228 | Returns: 229 | A sanitized dictionary representing the modification. For example : 230 | 231 | {"mod_name": "echo", "mod_opts", ["seq_str plap plop plip"]} 232 | 233 | Raises: 234 | ConfigError: See details in the 'key' parameter. 235 | """ 236 | if not isinstance(mod, dict): 237 | raise ConfigError('.not_dict') 238 | 239 | mod_name = None 240 | mod_opts = list() 241 | optional = False 242 | 243 | for key, value in mod.items(): 244 | if key == "mod_name": 245 | mod_name = value 246 | elif key == "mod_opts": 247 | if not isinstance(value, list): 248 | value = [value] 249 | mod_opts = value 250 | elif key == "optional": 251 | if not isinstance(value, bool): 252 | raise ConfigError('.optional.not_bool') 253 | else: 254 | optional = value 255 | else: 256 | config_warning("Unrecognized option : {}".format(key)) 257 | 258 | if mod_name is None: 259 | raise ConfigError('.missing_mod_name') 260 | 261 | return { 262 | "mod_name": mod_name, 263 | "mod_opts": mod_opts, 264 | "optional": optional, 265 | } 266 | -------------------------------------------------------------------------------- /fragscapy/commandline.py: -------------------------------------------------------------------------------- 1 | """Command-line specific operations and parsing. 2 | 3 | Handles everything related to the command line and its many options. The main 4 | entry point is `command()` which will parse the arguments from `sys.args` and 5 | triggers the correct function depending on the arguments given. 6 | """ 7 | 8 | import argparse 9 | import logging 10 | import traceback 11 | 12 | import scapy.config 13 | 14 | from fragscapy._author import __author__ 15 | from fragscapy._version import __version__ 16 | from fragscapy.config import Config 17 | from fragscapy.engine import Engine 18 | from fragscapy.modgenerator import get_all_mods, get_mod 19 | 20 | 21 | PROG_NAME = "Fragscapy" 22 | DESCRIPTION = ("Runs a series of tests on the network and modify the packets " 23 | "on the fly in order to test the behavior of the machines on " 24 | "the network") 25 | EPILOG = "Fragscapy {version} - {author}".format( 26 | version=__version__, author=__author__) 27 | 28 | 29 | def command(): 30 | """Parses the arguments from the command line and trigger the action. 31 | 32 | The main sub-commands are: 33 | * 'list' for listing the mods that can be detected 34 | * 'usage' for detailling the usage of one (or multiple) mods 35 | * 'checkconfig' to check various aspects of a configuration file 36 | * 'start' to run the test suite described by a config file 37 | """ 38 | parser = argparse.ArgumentParser( 39 | description=DESCRIPTION, epilog=EPILOG, prog=PROG_NAME 40 | ) 41 | 42 | parser.add_argument( 43 | '-V', '--version', 44 | action='version', 45 | version="Fragscapy {version}".format(version=__version__) 46 | ) 47 | 48 | subparsers = parser.add_subparsers(dest='subcmd') 49 | 50 | # fragscapy list 51 | subparsers.add_parser('list', help="List the available mods") 52 | 53 | # fragscapy usage 54 | parser_usage = subparsers.add_parser( 55 | 'usage', 56 | help="Details the usage of a mod" 57 | ) 58 | parser_usage.add_argument( 59 | 'mod', 60 | type=str, 61 | nargs='+', 62 | help="The name of a mod to show the usage" 63 | ) 64 | 65 | # fragscapy checkconfig 66 | parser_checkconfig = subparsers.add_parser( 67 | 'checkconfig', 68 | help="Parse and check a config file without running the test suite" 69 | ) 70 | parser_checkconfig.add_argument( 71 | 'config_files', 72 | nargs='+', 73 | type=str, 74 | metavar='', 75 | help="The config file to use" 76 | ) 77 | parser_checkconfig.add_argument( 78 | '--modif-file', 79 | type=str, 80 | metavar='', 81 | help="Where to write the modifications, default is 'modifications.txt'" 82 | ) 83 | parser_checkconfig.add_argument( 84 | '--traceback', '--tb', 85 | action='store_true', 86 | help="Show the traceback when an error occurs" 87 | ) 88 | parser_checkconfig.add_argument( 89 | '--no-progressbar', 90 | action='store_true', 91 | help=("Disable the progressbar. Can be useful in non interactive " 92 | "terminals") 93 | ) 94 | parser_checkconfig.add_argument( 95 | '--append', '-a', 96 | action='store_true', 97 | help=("Do not delete the result files. Instead append the new results " 98 | "to them.") 99 | ) 100 | 101 | # fragscapy start 102 | parser_start = subparsers.add_parser('start', help="Start the tests") 103 | parser_start.add_argument( 104 | 'config_files', 105 | nargs='+', 106 | type=str, 107 | metavar='', 108 | help="The config file to use" 109 | ) 110 | parser_start.add_argument( 111 | '--modif-file', 112 | type=str, 113 | metavar='', 114 | help="Where to write the modifications, default is 'modifications.txt'" 115 | ) 116 | parser_start.add_argument( 117 | '--stdout', '-o', 118 | type=str, 119 | default=0, 120 | metavar='', 121 | nargs='?', 122 | help=("Where to redirect stdout. {i} and {j} can be used to include " 123 | "respectively the modification number and the iteration number " 124 | "in the filename. If not specified, stdout is dropped. If " 125 | "specified with no arguments, stdout is displayed to stdout.") 126 | ) 127 | parser_start.add_argument( 128 | '--stderr', '-e', 129 | type=str, 130 | default=0, 131 | metavar='', 132 | nargs='?', 133 | help=("Where to redirect stderr. {i} and {j} can be used to include " 134 | "respectively the modification number and the iteration number " 135 | "in the filename. If not specified, stderr is dropped. If " 136 | "specified with no arguments, stderr is displayed to stderr.") 137 | ) 138 | parser_start.add_argument( 139 | '--scapy-output', 140 | action='store_true', 141 | help="Enable the standard scapy output for each packet sent" 142 | ) 143 | parser_start.add_argument( 144 | '--no-progressbar', 145 | action='store_true', 146 | help=("Disable the progressbar. Can be useful in non interactive " 147 | "terminals") 148 | ) 149 | parser_start.add_argument( 150 | '--no-results', 151 | action='store_true', 152 | help=("Disable the display of the results at the end.") 153 | ) 154 | parser_start.add_argument( 155 | '--local-pcap', '-W', 156 | type=str, 157 | metavar='', 158 | help=("Dump the content of the packets sent and received by " 159 | "localhost (packets as the command see them)") 160 | ) 161 | parser_start.add_argument( 162 | '--remote-pcap', '-w', 163 | type=str, 164 | metavar='', 165 | help=("Dump the content of the packets sent to and received from " 166 | "the remote host (packets as the remote host see them)") 167 | ) 168 | parser_start.add_argument( 169 | '--append', '-a', 170 | action='store_true', 171 | help=("Do not delete the result files. Instead append the new results " 172 | "to them.") 173 | ) 174 | parser_start.add_argument( 175 | '--repeat', '-r', 176 | type=int, 177 | metavar='', 178 | default='10', 179 | help=("How many times should the non-deterministic tests be repeated. " 180 | "Some tests have random behavior, they can be repeated multiple " 181 | "times with the same configuration. Default is 10.") 182 | ) 183 | 184 | args = parser.parse_args() 185 | 186 | if args.subcmd == 'list': 187 | list_mods() 188 | elif args.subcmd == 'usage': 189 | usage(args) 190 | elif args.subcmd == 'checkconfig': 191 | checkconfig(args) 192 | elif args.subcmd == 'start': 193 | start(args) 194 | else: 195 | parser.print_usage() 196 | 197 | 198 | def list_mods(): 199 | """Lists all the mods that can be detected.""" 200 | all_mods_name = sorted(map( 201 | lambda x: x.name or x.__class__.__name__.lower(), 202 | get_all_mods() 203 | )) 204 | print("Found {} available mods:".format(len(all_mods_name))) 205 | for mod in all_mods_name: 206 | print(" - {}".format(mod)) 207 | 208 | 209 | def start(args): 210 | """Runs the test suite. 211 | 212 | Args: 213 | args: The arguments found in the `argparse.ArgumentParser` 214 | """ 215 | if not args.scapy_output: 216 | # Removes warning messages 217 | logging.getLogger("scapy.runtime").setLevel(logging.ERROR) 218 | # Removes verbose send messages 219 | scapy.config.conf.verb = 0 220 | 221 | for i, config_file in enumerate(args.config_files): 222 | print("[{}]".format(config_file)) 223 | config = Config(config_file) 224 | kwargs = _filter_kwargs( 225 | args, 226 | ['modif_file', 'local_pcap', 'remote_pcap', 'append', 'repeat'] 227 | ) 228 | kwargs['progressbar'] = not args.no_progressbar 229 | kwargs['display_results'] = not args.no_results 230 | # To distinguish between '', '-o' and '-o plop', we tricked the option 231 | # into default to 0 in the first case (None for the second and plop the 232 | # thrid). 233 | if args.stdout != 0: 234 | kwargs['stdout'] = args.stdout 235 | if args.stderr != 0: 236 | kwargs['stderr'] = args.stderr 237 | kwargs = _format_config_name(kwargs, i) 238 | engine = Engine(config, **kwargs) 239 | engine.start() 240 | print() 241 | 242 | 243 | def usage(args): 244 | """Prints the usage for specific mods. 245 | 246 | Args: 247 | args: The arguments found in the `argparse.ArgumentParser` 248 | """ 249 | for mod_name in args.mod: 250 | try: 251 | mod = get_mod(mod_name) 252 | mod.usage() 253 | print("") 254 | except ModuleNotFoundError: 255 | print("Unknown modification: '{}'".format(mod_name)) 256 | 257 | 258 | def checkconfig(args): 259 | """Checks that the config file looks correct. 260 | 261 | It does not guarantee that there will be no crash during the test suite 262 | but it tries to catch everything before running it. 263 | 264 | Args: 265 | args: The arguments found in the `argparse.ArgumentParser` 266 | """ 267 | for i, config_file in enumerate(args.config_files): 268 | print("[{}]".format(config_file)) 269 | try: 270 | print(">>> Loading config file") 271 | config = Config(config_file) 272 | print(">>> Loading engine") 273 | kwargs = _filter_kwargs(args, ['modif_file', 'append']) 274 | kwargs['progressbar'] = not args.no_progressbar 275 | kwargs = _format_config_name(kwargs, i) 276 | engine = Engine(config, **kwargs) 277 | print(">>> Checking Netfilter rules") 278 | engine.check_nfrules() 279 | print(">>> Checking mod list generation (output to '{}')" 280 | .format(args.modif_file)) 281 | engine.check_modlist_generation() 282 | engine.unbind_queues() 283 | except BaseException as e: # pylint: disable=broad-except 284 | if args.traceback: 285 | traceback.print_tb(e.__traceback__) 286 | print("{name}: {msg}".format(name=e.__class__.__name__, msg=e)) 287 | print() 288 | 289 | 290 | def _format_config_name(kwargs, config): 291 | for key, value in kwargs.items(): 292 | if key in ['modif_file', 'stdout', 'stderr', 'local_pcap', 293 | 'remote_pcap']: 294 | kwargs[key] = value.replace('{conf}', str(config)) 295 | return kwargs 296 | 297 | 298 | def _filter_kwargs(args, keys): 299 | """Filters and transforms argparse's args to a kwargs. 300 | 301 | Args: 302 | args: The arguments found in the `argparse.ArgumentParser` 303 | keys: The keys to keep 304 | """ 305 | kwargs = dict() 306 | for k in keys: 307 | if hasattr(args, k) and getattr(args, k) is not None: 308 | kwargs[k] = getattr(args, k) 309 | return kwargs 310 | 311 | 312 | if __name__ == '__main__': 313 | command() 314 | -------------------------------------------------------------------------------- /fragscapy/netfilter.py: -------------------------------------------------------------------------------- 1 | """Netfilter-related manipulations (i.e. NF rules and packets in NFQUEUE). 2 | 3 | NFQUEUE is a special Netfilter target that can be used to send packet to 4 | userland and modify them before sending them. To access them, this module uses 5 | `fnfqueue` module (which makes use of libnfqueue). 6 | 7 | Here it can be used to capture traffic (with an optional filter on protocol, 8 | host and/or port) and cast them to Scapy packets so they can be manipulated. 9 | Once the python modification is done, one would simply invoke the `.mangle()` 10 | or `.drop()` methods to notify Netfilter of either a new packet or the want to 11 | drop the packet. So far, the only L3-protocols supported are IPv4 and IPv6. 12 | Other protocols are not yet supported and those packets and accepted without 13 | being sent to the user. 14 | 15 | The main objects to use are `NFQueueRule` which is used to manipulate iptables 16 | and ip6tables rules and `NFQueue`, the queue that can be iterated over to 17 | access the packets in the NFQUEUE target. 18 | """ 19 | 20 | import abc 21 | import collections 22 | import os 23 | import subprocess 24 | 25 | import fnfqueue 26 | 27 | import scapy.data 28 | import scapy.layers.inet 29 | import scapy.layers.inet6 30 | import scapy.sendrecv 31 | 32 | 33 | # Define a constant structure that holds the options for iptables together 34 | Chain = collections.namedtuple('Chain', 35 | ['name', 'host_opt', 'port_opt', 'qnum']) 36 | OUTPUT = Chain('OUTPUT', '-d', '--dport', 0) 37 | INPUT = Chain('INPUT', '-s', '--sport', 1) 38 | 39 | 40 | class NFQueueRule(object): # pylint: disable=too-many-instance-attributes 41 | """A Netfilter rule to enable/disable the NFQUEUE. 42 | 43 | Manipulates the iptables and ip6tables to make use of the NFQUEUE for the 44 | packets that are to be routed through python. It is used to insert and 45 | remove the correct iptables/ip6tables rules with the correct optional 46 | filters that configure netfilter to send the matching packets to the 47 | NFQUEUE target. 48 | 49 | Args: 50 | output_chain: Apply the rule on the output chain if 'True'. Default is 51 | 'True'. 52 | input_chain: Apply the rule on the input chain if 'True'. Default is 53 | 'True'. 54 | proto: The protocol name (iptables-style) to filter on. If set to 55 | 'None' and `port` is set, defaults to 'tcp', else defaults to 56 | 'None' and all protocols will match. 57 | host: The hostname or IPv4 to filter on. Default is 'None', which 58 | means all hosts will match. 59 | host6: The IPv6 to filter on. Default is 'None'. If `host` is also set 60 | to 'None', all hosts (IPv4 and IPv6) will match, else, if `host` 61 | is set to a hostname, the same hostname is used for IPv6 (iptables 62 | resolves once and for all to IPv4 and IPv6 when the rules are 63 | created). 64 | port: The TCP/UDP port to filter on. If sets to 'None', which is the 65 | default, all ports will match. 66 | ipv4: Enable IPv4 if 'True'. Default is 'True'. 67 | ipv6: Enable IPv6 if 'True'. Default is 'True'. 68 | qnum: The Queue number for the NFQUEUE target. Default is '0'. To 69 | respect, how this modules uses NFQUEUE, it should be even: qnum is 70 | used for OUTPUT and qnum+1 is used for INPUT. If qnum is odd, a 71 | `ValueError` is raised. 72 | 73 | Attributes: 74 | output_chain: Apply the rule on the output chain if 'True'. 75 | input_chain: Apply the rule on the input chain if 'True'. 76 | proto: The protocol name (iptables-style) to filter on. 'None' means 77 | all proto will match. 78 | host: The hostname or IPv4 to filter on. 'None' means all hosts will 79 | match. 80 | host6: The IPv6 to filter on. 'None' means all hosts will match. 81 | port: The TCP/UDP port to filter on. 'None' means all ports will 82 | match. 83 | ipv4: Enable IPv4 if 'True'. 84 | ipv6: Enable IPv6 if 'True'. 85 | qnum: The Queue number for the NFQUEUE target. 86 | 87 | Raises: 88 | ValueError: See the message for details. Wrong combination of 89 | parameters. 90 | 91 | Examples: 92 | >>> # Prepare the rules with the correct filter 93 | >>> http_alt_rule = NFQueueRule( 94 | ... input_chain=False, host="www.lmddgtfy.com", port=8080) 95 | >>> http_rule = NFQueueRule( 96 | ... output_chain=False, host="www.lmddgtfy.com", port=80) 97 | >>> # Insert those rules 98 | >>> http_alt_rule.insert() 99 | >>> http_rule.insert() 100 | >>> # Remove the rules when finished 101 | >>> http_alt_rule.remove() 102 | >>> http_rule.remove() 103 | """ 104 | # pylint: disable=too-many-arguments 105 | def __init__(self, output_chain=True, input_chain=True, proto=None, 106 | host=None, host6=None, port=None, ipv4=True, ipv6=True, 107 | qnum=0): 108 | if not output_chain and not input_chain: 109 | raise ValueError("Can not deactivate both output_chain and " 110 | "input_chain") 111 | 112 | if not ipv4 and not ipv6: 113 | raise ValueError("Can not deactivate both IPv4 and IPv6") 114 | 115 | if qnum % 2: 116 | raise ValueError("qnum should be even") 117 | 118 | if proto is None and port is not None: 119 | # proto default to 'tcp' when only the port is specified 120 | proto = 'tcp' 121 | if host6 is None: 122 | # host6 default to the same as host if not specified 123 | host6 = host 124 | 125 | self.output_chain = output_chain 126 | self.input_chain = input_chain 127 | self.proto = proto 128 | self.host = host if ipv4 else None 129 | self.host6 = host6 if ipv6 else None 130 | self.port = ( 131 | port if proto is not None and proto.lower() in ('tcp', 'udp') 132 | else None 133 | ) 134 | self.ipv4 = ipv4 135 | self.ipv6 = ipv6 136 | self.qnum = qnum 137 | 138 | def _build_nfqueue_opt(self, h, chain): 139 | """Returns the options to use for building the NFQUEUE rule. 140 | 141 | Args: 142 | h: The current hostname to filter on. 143 | chain: The current chain (changes the direction "src/dst" for 144 | some options and the queue number). 145 | 146 | Returns: 147 | A list of parameters that can be used as options in an 148 | ip(6)tables command. 149 | """ 150 | opt = [] # A list of iptables options 151 | opt.append(chain.name) # OUTPUT or INPUT 152 | if h is not None: 153 | opt.append(chain.host_opt) # -d or -s 154 | opt.append(h) # 155 | if self.proto is not None: 156 | opt.append('-p') # -p 157 | opt.append(self.proto) # 158 | if self.port is not None: 159 | opt.append(chain.port_opt) # --dport or --sport 160 | opt.append(str(self.port)) # 161 | opt.append('-j') # -j 162 | opt.append('NFQUEUE') # NFQUEUE 163 | opt.append('--queue-num') # --queue-num 164 | opt.append(str(self.qnum + chain.qnum)) # or +1 165 | return opt 166 | 167 | def _build_rst_opt(self, h, chain): # pylint: disable=no-self-use 168 | """Returns the options to use for building the "reset TCP's RST flag" 169 | rule. 170 | 171 | Args: 172 | h: The current hostname to filter on. 173 | chain: The current chain (changes the direction "src/dst" for 174 | the port). 175 | 176 | Returns: 177 | A list of parameters that can be used as options in an 178 | ip(6)tables command. 179 | """ 180 | opt = [] # A list of iptabales options 181 | opt.append("OUTPUT") # OUTPUT 182 | if h is not None: 183 | opt.append('-d') # -d 184 | opt.append(h) # 185 | opt.append('-p') # -p 186 | opt.append(self.proto) # 187 | if self.port is not None: 188 | opt.append(chain.port_opt) # --dport or --sport 189 | opt.append(str(self.port)) # 190 | opt.append("--tcp-flags") # --tcp-flags 191 | opt.append("RST") # RST 192 | opt.append("RST") # RST 193 | opt.append("-j") # -j 194 | opt.append("DROP") # DROP 195 | return opt 196 | 197 | def _insert_or_remove(self, insert=True): 198 | """Build and then insert or remove the netfilter rules. 199 | 200 | Both operations are regrouped as they are very similary built. The 201 | only difference is a '-I' or a '-D' in the options. 202 | 203 | Args: 204 | insert: Inserts the rule if 'True', removes it if 'False'. 205 | 206 | Raises: 207 | CalledProcessError: An error occurred while running the 208 | sub-command ip(6)tables. 209 | """ 210 | # Pre-catch non root errors here instead of letting iptables fail. 211 | # Because it returns an exitcode of 2 which can indicate something 212 | # else. 213 | if os.geteuid() != 0: 214 | raise PermissionError("You should be root") 215 | 216 | # The binaries to use (IPv4 and/or IPv6) with the associated hostname 217 | bin_host = [] 218 | if self.ipv4: 219 | bin_host.append(("/sbin/iptables", self.host)) 220 | if self.ipv6: 221 | bin_host.append(("/sbin/ip6tables", self.host6)) 222 | 223 | # The chains to use (OUTUT and/or INPUT) 224 | chains = [] 225 | if self.output_chain: 226 | chains.append(OUTPUT) 227 | if self.input_chain: 228 | chains.append(INPUT) 229 | 230 | # The options builders (_build_nfqueue_opt and/or _build_rst_opt) 231 | opt_builders = [self._build_nfqueue_opt] 232 | if self.proto is not None and self.proto.lower() == 'tcp': 233 | opt_builders.append(self._build_rst_opt) 234 | 235 | for binary, h in bin_host: 236 | for chain in chains: 237 | for opt_builder in opt_builders: 238 | # Build the iptables/ip6tables resulting command 239 | cmd = [] 240 | cmd.append(binary) 241 | if insert: 242 | cmd.append('-I') 243 | else: 244 | cmd.append('-D') 245 | cmd.extend(opt_builder(h, chain)) 246 | # Run the command and raise an exception if an error occurs 247 | subprocess.run(cmd, check=True) 248 | 249 | def insert(self): 250 | """Builds and insert the resulting rules in iptables and ip6tables. 251 | 252 | Raises: 253 | CalledProcessError: exception is raised if an error occurs in the 254 | process. 255 | """ 256 | self._insert_or_remove(insert=True) 257 | 258 | def remove(self): 259 | """Removes the previously inserted rules in iptables and ip6tables. 260 | 261 | Raises: 262 | CalledProcessError: exception is raised if an error occurs in the 263 | process. 264 | """ 265 | self._insert_or_remove(insert=False) 266 | 267 | 268 | class NFQueue(object): 269 | """ 270 | Queue object that contains the different packets in the NFQUEUE target. 271 | It can be iterated over in a for-loop to access them one by one or call 272 | the `next_packet()` method to access only one packet. The packets are 273 | either `fragscapy.netfilter.IP` objects or `fragscapy.netfilter.IPv6` 274 | objects, depending on the `ethertype` parameters received from Layer-2. 275 | 276 | Here is an example of how to setup a proxy from port 8080 to port 80 277 | (see `fragscapy.netfilter.IP` documentation for how to use the packets): 278 | 279 | >>> q = NFQueue() 280 | >>> for p in q: 281 | ... if p.haslayer('TCP'): 282 | ... t = p.getlayer('TCP') 283 | ... print("{}:{} -> ".format(t.sport, t.dport), end='') 284 | ... if t.sport == 80: 285 | ... t.sport = 8080 286 | ... if t.dport == 8080: 287 | ... t.dport = 80 288 | ... t.chksum = None 289 | ... print("{}:{}".format(t.sport, t.dport)) 290 | ... p.mangle() 291 | 292 | :param qnum: The queue number to use. For the same reasons explained in 293 | `NFQueue`'s documentations, qnum should be even and will raise a 294 | `ValueError` exception if not. 295 | """ 296 | def __init__(self, qnum=0): 297 | if qnum % 2: 298 | raise ValueError('qnum should be even') 299 | 300 | self._conn = fnfqueue.Connection() 301 | self._conn.bind(qnum).set_mode( 302 | fnfqueue.MAX_PAYLOAD, fnfqueue.COPY_PACKET 303 | ) 304 | self._conn.bind(qnum+1).set_mode( 305 | fnfqueue.MAX_PAYLOAD, fnfqueue.COPY_PACKET 306 | ) 307 | 308 | # Has the nfqueue been stopped ? 309 | self._stopped = False 310 | 311 | def __iter__(self): 312 | return self 313 | 314 | def __next__(self): 315 | return self.next_packet() 316 | 317 | def next_packet(self): 318 | """Returns the next packet in NFQUEUE.""" 319 | if self.is_stopped(): 320 | raise StopIteration 321 | for p in self._conn: 322 | if p.hw_protocol == scapy.data.ETH_P_IP: 323 | return IP(p) 324 | if p.hw_protocol == scapy.data.ETH_P_IPV6: 325 | return IPv6(p) 326 | p.accept() 327 | raise StopIteration 328 | 329 | def is_stopped(self): 330 | """Has the nfqueue been stopped (i.e. cannot be used anymore) ?""" 331 | return self._stopped 332 | 333 | def stop(self): 334 | """Stops the process of the nfqueue by closing the connection.""" 335 | self._stopped = True 336 | self._conn.close() 337 | 338 | def unbind(self): 339 | """Unbind all the NFQUEUES.""" 340 | for queue in list(self._conn.queue.values()): 341 | queue.unbind() 342 | 343 | 344 | class PacketWrapper(abc.ABC): 345 | """ 346 | A Scapy representation of the data received from the NFQUEUE target. 347 | In depth, it is the junction of a `scapy` IP (or IPv6) packet and a 348 | `fnfqueue` packet so both modules can be used : the `scapy` methods 349 | are available the same way as for a scapy packet but the method to 350 | validate (or drop) the `fnfqueue` packets are still usable. 351 | See the corresponding documentation to learn how to use those modules. 352 | 353 | :param pkt: the `fnfqueue` packet received 354 | :param output: Is the packet from the OUTPUT chain. Default is True 355 | """ 356 | 357 | def __init__(self, pkt): 358 | self.scapy_pkt = self.l3_layer(pkt.payload) 359 | self.fnfqueue_pkt = pkt 360 | self._output = pkt.packet.queue_id % 2 == 0 361 | 362 | @property 363 | @abc.abstractmethod 364 | def l3_layer(self): 365 | """The `scapy` l3-layer constructor to use 366 | (e.g. `scapy.layers.inet.IP` or `scapy.layers.inet6.IPv6`).""" 367 | raise NotImplementedError 368 | 369 | @property 370 | def is_input(self): 371 | """True if the packet comes from INPUT chain.""" 372 | return not self._output 373 | 374 | @property 375 | def is_output(self): 376 | """True if the packet comes from OUTPUT chain.""" 377 | return self._output 378 | 379 | def _apply_modifications(self): 380 | """Reports the modifications in the Scapy packet to the fnfqueue 381 | packet.""" 382 | self.fnfqueue_pkt.payload = bytes(self.scapy_pkt) 383 | 384 | def __dir__(self): 385 | ret = ['scapy_pkt', 'fnfqueue_pkt', 'l3_layer', 'is_input', 386 | 'is_output'] 387 | ret.extend(dir(self.scapy_pkt)) 388 | ret.extend(dir(self.fnfqueue_pkt)) 389 | return ret 390 | 391 | def __getattr__(self, name): 392 | try: 393 | return getattr(self.scapy_pkt, name) 394 | except AttributeError: 395 | pass 396 | ret = getattr(self.fnfqueue_pkt, name) 397 | # When accessing the underlying fnfqueue methods, 398 | # force to apply the modifications (useful before calling methods 399 | # such as accept, mangle, verdict, ...) 400 | self._apply_modifications() 401 | return ret 402 | 403 | def raw_send(self): 404 | """Sends the scapy packet directly on a raw socket. 405 | 406 | The charge of dropping the nfqueue packet is left to the user 407 | (if necessary). 408 | """ 409 | scapy.sendrecv.send(self.scapy_pkt) 410 | 411 | 412 | 413 | class IP(PacketWrapper): 414 | """See PacketWrapper documentation.""" 415 | l3_layer = scapy.layers.inet.IP 416 | __doc__ = PacketWrapper.__doc__ 417 | 418 | 419 | class IPv6(PacketWrapper): 420 | """See PacketWrapper documentation.""" 421 | l3_layer = scapy.layers.inet6.IPv6 422 | __doc__ = PacketWrapper.__doc__ 423 | -------------------------------------------------------------------------------- /fragscapy/tests.py: -------------------------------------------------------------------------------- 1 | """A collection of structures to represent the tests that can be run (and 2 | tested). 3 | 4 | The `TestSuite` object defines a modification to use (for both INPUT 5 | and OUTPUT chains) and yields a `RepeatedTestCase` object. This object defines 6 | the exact command and filenames to use in the test and yields a `TestCase` 7 | object. This final object contains all the information about a single and can 8 | run the required command. 9 | 10 | Schema of the hierarchy of test objects:: 11 | 12 | TestSuite 13 | +---> RepeatedTestCase with modification n°1 14 | | +---> TestCase with modification n°1 and repetition n°1 15 | | +---> TestCase with modification n°1 and repetition n°2 16 | | +---> TestCase with modification n°1 and repetition n°3 17 | | +---> ... 18 | +---> RepeatedTestCase with modification n°2 19 | | +---> TestCase with modification n°2 and repetition n°1 20 | | +---> ... 21 | +---> RepeatedTestCase with modification n°3 22 | | +---> TestCase with modification n°4 and repetition n°1 23 | | +---> ... 24 | +---> ... 25 | """ 26 | 27 | 28 | import glob 29 | import os 30 | import string 31 | import subprocess 32 | 33 | 34 | def rm_pattern(pattern): 35 | """Deletes all the files that match a formatting pattern.""" 36 | # Build the args and kwargs to use '*' in the pattern 37 | args = list() 38 | kwargs = dict() 39 | for _, name, _, _ in string.Formatter().parse(pattern): 40 | if name is None: 41 | continue 42 | if name: 43 | kwargs[name] = '*' 44 | else: 45 | args.append('*') 46 | 47 | # Remove the corresponding files 48 | for f in glob.glob(pattern.format(*args, **kwargs)): 49 | os.remove(f) 50 | 51 | 52 | # pylint: disable=too-many-instance-attributes 53 | class TestPatterns(object): 54 | """Regroups all the patterns that creates the command and filenames at 55 | each test iteration. 56 | 57 | Any pattern uses `{i}` and `{j}` as placeholders for respectively the 58 | number of the current modification and the number of the current iteration 59 | of this modification. 60 | 61 | Args: 62 | cmd_pattern: The pattern for the command to execute. 63 | stdout: 'True' if the standard output of the test should be captured. 64 | Default is 'False'. 65 | stdout_pattern: The pattern of the filename to redirect the standard 66 | output of the test to. Use 'None' to redirect to stdout. Default 67 | is 'None' 68 | stderr: 'True' if the standard error of the test should be captured. 69 | Default is 'False'. 70 | stderr_pattern: The pattern of the filename to redirect the standard 71 | error of the test to. Use 'None' to redirect to stdout. Default is 72 | 'None'. 73 | local_pattern: The pattern of the name of the pcap file to which the 74 | local packets details should be dumped to. Use 'None' to not dump 75 | the packet details. Default is 'None'. 76 | remote_pattern: The pattern of the name of the pcap file to which the 77 | remote packets details should be dumped to. Use 'None' to not dump 78 | the packet details. Default is 'None'. 79 | 80 | Attributes: 81 | cmd_pattern: The pattern for the command to execute. 82 | stdout: 'True' if the standard output of the test should be captured 83 | stdout_pattern: The pattern of the filename to redirect the standard 84 | output of the test to. Use 'None' to redirect to stdout. 85 | stderr: 'True' if the standard error of the test should be captured 86 | stderr_pattern: The pattern of the filename to redirect the standard 87 | error of the test to. Use 'None' to redirect to stdout. 88 | local_pattern: The pattern of the name of the pcap file to which the 89 | local packets details should be dumped to. Use 'None' to not dump 90 | the packet details. 91 | remote_pattern: The pattern of the name of the pcap file to which the 92 | remote packets details should be dumped to. Use 'None' to not dump 93 | the packet details. 94 | open_fd: A dictionnary that link a filename and its filedescriptor if 95 | it is already openned (avoid openning the same file multiple 96 | times). 97 | """ 98 | def __init__(self, **kwargs): 99 | self.cmd_pattern = kwargs.pop("cmd_pattern") 100 | self.stdout = kwargs.pop("stdout", False) 101 | self.stdout_pattern = kwargs.pop("stdout_pattern", None) 102 | self.stderr = kwargs.pop("stderr", False) 103 | self.stderr_pattern = kwargs.pop("stderr_pattern", None) 104 | self.local_pcap_pattern = kwargs.pop("local_pcap_pattern", None) 105 | self.remote_pcap_pattern = kwargs.pop("remote_pcap_pattern", None) 106 | 107 | self.open_fd = dict() 108 | 109 | def get_cmd(self, i, j): 110 | """Returns the command based on its pattern and the given `i` and 111 | `j`.""" 112 | return self.cmd_pattern.format(i=i, j=j) 113 | 114 | def get_stdout(self, i, j): 115 | """Returns the stdout file descriptor based on its pattern and the 116 | given `i` and `j`.""" 117 | if self.stdout: 118 | if self.stdout_pattern is None: 119 | return None 120 | fname = self.stdout_pattern.format(i=i, j=j) 121 | if fname not in self.open_fd: 122 | if os.path.dirname(fname): 123 | os.makedirs(os.path.dirname(fname), exist_ok=True) 124 | self.open_fd[fname] = open(fname, "ab") 125 | return self.open_fd[fname] 126 | return subprocess.PIPE 127 | 128 | def get_stderr(self, i, j): 129 | """Returns the stderr file descriptor based on its pattern and the 130 | given `i` and `j`.""" 131 | if self.stderr: 132 | if self.stderr_pattern is None: 133 | return None 134 | fname = self.stderr_pattern.format(i=i, j=j) 135 | if fname not in self.open_fd: 136 | if os.path.dirname(fname): 137 | os.makedirs(os.path.dirname(fname), exist_ok=True) 138 | self.open_fd[fname] = open(fname, "ab") 139 | return self.open_fd[fname] 140 | return subprocess.PIPE 141 | 142 | def get_local_pcap(self, i, j): 143 | """Returns the local pcap filename based on its pattern and the given 144 | `i` and `j`.""" 145 | if self.local_pcap_pattern is None: 146 | return None 147 | fname = self.local_pcap_pattern.format(i=i, j=j) 148 | if os.path.dirname(fname): 149 | os.makedirs(os.path.dirname(fname), exist_ok=True) 150 | return fname 151 | 152 | def get_remote_pcap(self, i, j): 153 | """Returns the remote pcap filename based on its pattern and the given 154 | `i` and `j`.""" 155 | if self.remote_pcap_pattern is None: 156 | return None 157 | fname = self.remote_pcap_pattern.format(i=i, j=j) 158 | if os.path.dirname(fname): 159 | os.makedirs(os.path.dirname(fname), exist_ok=True) 160 | return fname 161 | 162 | def get(self, i, j): 163 | """Returns a dictionnary of all the generated objects based on their 164 | patterns and the given `i` and `j`. It can be used directly as an 165 | input for `TestCase`.""" 166 | return { 167 | "cmd": self.get_cmd(i, j), 168 | "stdout": self.get_stdout(i, j), 169 | "stderr": self.get_stderr(i, j), 170 | "local_pcap": self.get_local_pcap(i, j), 171 | "remote_pcap": self.get_remote_pcap(i, j), 172 | } 173 | 174 | def close_stdout(self, i, j): 175 | """Closes the stdout file descriptor based on its pattern and the 176 | given `i` and `j`.""" 177 | if self.stdout and self.stdout_pattern is not None: 178 | fname = self.stdout_pattern.format(i=i, j=j) 179 | if fname in self.open_fd: 180 | self.open_fd[fname].close() 181 | del self.open_fd[fname] 182 | 183 | def close_stderr(self, i, j): 184 | """Closes the stderr file descriptor based on its pattern and the 185 | given `i` and `j`.""" 186 | if self.stderr and self.stderr_pattern is not None: 187 | fname = self.stderr_pattern.format(i=i, j=j) 188 | if fname in self.open_fd: 189 | self.open_fd[fname].close() 190 | del self.open_fd[fname] 191 | 192 | def close(self, i, j): 193 | """Closes all file descriptor based on its pattern and the 194 | given `i` and `j`.""" 195 | self.close_stderr(i, j) 196 | self.close_stdout(i, j) 197 | 198 | def close_all(self): 199 | """Closes all open file descriptors.""" 200 | for fname in self.open_fd: 201 | self.open_fd[fname].close() 202 | self.open_fd = dict() 203 | 204 | def remove_all(self): 205 | """Removes all files that can match the patterns of `stdout_pattern`, 206 | `stderr_pattern`, `local_pcap_pattern` and `remote_pcap_pattern`.""" 207 | if self.stdout and self.stdout_pattern is not None: 208 | rm_pattern(self.stdout_pattern) 209 | if self.stderr and self.stderr_pattern is not None: 210 | rm_pattern(self.stderr_pattern) 211 | if self.local_pcap_pattern is not None: 212 | rm_pattern(self.local_pcap_pattern) 213 | if self.remote_pcap_pattern is not None: 214 | rm_pattern(self.remote_pcap_pattern) 215 | 216 | 217 | class TestCase(object): 218 | """A situation to be tested. It contains all the informations (filenames, 219 | id, command and result) that represents the test. 220 | 221 | Args: 222 | cmd: The command to run. 223 | stdout: The file descriptor to redirect standard output to. 'None' is 224 | for stdout. Default is 'None'. 225 | stderr: The file descriptor to redirect standard error to. 'None' is 226 | for stderr. Default is 'None'. 227 | local_pcap: The filename of the pcap file to dump local packets 228 | details to. 'None' is for not dumping the packet details. Default 229 | is 'None' 230 | remote_pcap: The filename of the pcap file to dump remote packets 231 | details to. 'None' is for not dumping the packet details. Default 232 | is 'None'. 233 | test_id: The number of this test a.k.a. `j`. 234 | 235 | Attributes: 236 | cmd: The command to run. 237 | stdout: The file descriptor to redirect standard output to. 'None' is 238 | for stdout. 239 | stderr: The file descriptor to redirect standard error to. 'None' is 240 | for stderr. 241 | local_pcap: The filename of the pcap file to dump local packets 242 | details to. 'None' is for not dumping the packet details. 243 | remote_pcap: The filename of the pcap file to dump remote packets 244 | details to. 'None' is for not dumping the packet details. 245 | test_id: The number of this test a.k.a. `j`. 246 | result: The `ProcessCompleted` returned by the subprocess or 'None' if 247 | not run yet. 248 | """ 249 | 250 | def __init__(self, **kwargs): 251 | self.cmd = kwargs.pop("cmd") 252 | self.stdout = kwargs.pop("stdout", None) 253 | self.stderr = kwargs.pop("stderr", None) 254 | self.local_pcap = kwargs.pop("local_pcap", None) 255 | self.remote_pcap = kwargs.pop("remote_pcap", None) 256 | self.test_id = kwargs.pop("test_id") 257 | self.result = None 258 | 259 | def run(self): 260 | """Executes the user command in a sub-process. Redirect stdout and 261 | stderr to the corresponding files.""" 262 | self.result = subprocess.run(self.cmd, stdout=self.stdout, 263 | stderr=self.stderr, shell=True) 264 | 265 | def is_done(self): 266 | """Returns 'True' if the command has been run at least once.""" 267 | return self.result is not None 268 | 269 | def is_success(self): 270 | """Returns 'True' if the command returned a zero exitcode.""" 271 | return self.is_done() and self.result.returncode == 0 272 | 273 | def is_failure(self): 274 | """Returns 'True' if the command returned a non-zero exitcode.""" 275 | return self.is_done() and self.result.returncode != 0 276 | 277 | 278 | # pylint: disable=too-few-public-methods 279 | class RepeatedTestCase(object): 280 | """A series of `TestCase` that might be repeated mulitple times if 281 | the modifications are not deterministic. 282 | 283 | All the repeated tests will be run with the same modifications. 284 | 285 | Args: 286 | modlists: A 2-tuple of `(input_modlist, output_modlist)`. 287 | modif_file: The name of the modification file (the same for all 288 | repeated tests because it does not change) 289 | test_id: The number of this test a.k.a. 'i'. 290 | test_patterns: The `TestPattern` object to use for each of the tests. 291 | repeat: The number of times the test case should be repeated (only 292 | used if the tests are non-deterministic) 293 | 294 | Attributes: 295 | input_modlist: The modification list applied on INPUT chain. 296 | output_modlist: The modification list applied on OUTPUT chain. 297 | modif_file: The name of the modification file (the same for all 298 | repeated tests because it does not change) 299 | test_id: The number of this test a.k.a. 'i'. 300 | test_pattern: The `TestPattern` object to use for each of the tests. 301 | repeat: The number of times a test case must be repeated. 302 | test_generated: A list of all `TestCase` objects generated so far. 303 | """ 304 | # pylint: disable=too-many-arguments 305 | def __init__(self, modlists, modif_file, test_id, test_patterns, repeat): 306 | self.input_modlist = modlists[0] 307 | self.output_modlist = modlists[1] 308 | self.modif_file = modif_file 309 | self.test_id = test_id 310 | self.test_patterns = test_patterns 311 | 312 | # Only repeat if the tests are non-deterministic 313 | if (self.input_modlist.is_deterministic() 314 | and self.output_modlist.is_deterministic()): 315 | self.repeat = 1 316 | else: 317 | self.repeat = repeat 318 | 319 | self.tests_generated = list() 320 | 321 | def __iter__(self): 322 | for j in range(self.repeat): 323 | test = TestCase( 324 | **self.test_patterns.get(self.test_id, j), 325 | test_id=j, 326 | ) 327 | self.tests_generated.append(test) 328 | yield test 329 | # The files must be closed right after being used 330 | # to avoid keeping too many file descriptors opened 331 | self.test_patterns.close(self.test_id, j) 332 | 333 | 334 | class TestSuite(object): 335 | """A series of tests to run described by some patterns and the 336 | `ModListGenerator`. 337 | 338 | Args: 339 | ml_iterator: An iterator over all possible 2-tuples of 340 | `(input_modlist, output_modlist)` that needs to be tested. 341 | modif_file_pattern: The pattern for the modification file (separated 342 | from the others because it does not require the `j` argument). 343 | repeat: The number of times a test case should be repeated (only used 344 | if the tests are non-deterministic) 345 | **kwargs: All other arguments are passed to the constructor of 346 | `TestPatterns`. 347 | 348 | Attributes: 349 | ml_iterator: An iterator over all possible 2-tuples of 350 | `(input_modlist, output_modlist)` that needs to be tested. 351 | modif_file_pattern: The pattern for the modification file (separated 352 | from the others because it does not require the `j` argument). 353 | repeat: The number of times a test case must be repeated. 354 | test_patterns: The `TestPatterns` object that will be used to generate 355 | the filenames and commands of each test case. 356 | test_generated: A list of all `RepeatedTestCase` objects generated so 357 | far. 358 | """ 359 | def __init__(self, **kwargs): 360 | self.ml_iterator = kwargs.pop("ml_iterator") 361 | self.modif_file_pattern = kwargs.pop("modif_file_pattern") 362 | self.repeat = kwargs.pop("repeat") 363 | self.test_patterns = TestPatterns(**kwargs) 364 | self.tests_generated = list() 365 | 366 | def flush_modif_files(self): 367 | """Deletes all the files that match `modif_file_pattern`.""" 368 | rm_pattern(self.modif_file_pattern) 369 | 370 | def flush_all_files(self): 371 | """Deletes all the files that could be generated during the process of 372 | the tests.""" 373 | self.flush_modif_files() 374 | self.test_patterns.remove_all() 375 | 376 | def __iter__(self): 377 | for test_id, modlists in enumerate(self.ml_iterator): 378 | # Makes sure the modif_file directory exists 379 | modif_file = self.modif_file_pattern.format(i=test_id) 380 | if os.path.dirname(modif_file): 381 | os.makedirs(os.path.dirname(modif_file), exist_ok=True) 382 | 383 | # Creates a RepeatedTestCase with these modlists 384 | repeated_test_case = RepeatedTestCase( 385 | modlists, 386 | modif_file, 387 | test_id, 388 | self.test_patterns, 389 | self.repeat, 390 | ) 391 | self.tests_generated.append(repeated_test_case) 392 | yield repeated_test_case 393 | 394 | # Closing all remaining files for safety 395 | self.test_patterns.close_all() 396 | -------------------------------------------------------------------------------- /.pylintrc: -------------------------------------------------------------------------------- 1 | [MASTER] 2 | 3 | # A comma-separated list of package or module names from where C extensions may 4 | # be loaded. Extensions are loading into the active Python interpreter and may 5 | # run arbitrary code. 6 | extension-pkg-whitelist= 7 | 8 | # Add files or directories to the blacklist. They should be base names, not 9 | # paths. 10 | ignore=CVS 11 | 12 | # Add files or directories matching the regex patterns to the blacklist. The 13 | # regex matches against base names, not paths. 14 | ignore-patterns= 15 | 16 | # Python code to execute, usually for sys.path manipulation such as 17 | # pygtk.require(). 18 | #init-hook= 19 | 20 | # Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the 21 | # number of processors available to use. 22 | jobs=1 23 | 24 | # Control the amount of potential inferred values when inferring a single 25 | # object. This can help the performance when dealing with large functions or 26 | # complex, nested conditions. 27 | limit-inference-results=100 28 | 29 | # List of plugins (as comma separated values of python modules names) to load, 30 | # usually to register additional checkers. 31 | load-plugins= 32 | 33 | # Pickle collected data for later comparisons. 34 | persistent=yes 35 | 36 | # Specify a configuration file. 37 | #rcfile= 38 | 39 | # When enabled, pylint would attempt to guess common misconfiguration and emit 40 | # user-friendly hints instead of false-positive error messages. 41 | suggestion-mode=yes 42 | 43 | # Allow loading of arbitrary C extensions. Extensions are imported into the 44 | # active Python interpreter and may run arbitrary code. 45 | unsafe-load-any-extension=no 46 | 47 | 48 | [MESSAGES CONTROL] 49 | 50 | # Only show warnings with the listed confidence levels. Leave empty to show 51 | # all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED. 52 | confidence= 53 | 54 | # Disable the message, report, category or checker with the given id(s). You 55 | # can either give multiple identifiers separated by comma (,) or put this 56 | # option multiple times (only on the command line, not in the configuration 57 | # file where it should appear only once). You can also use "--disable=all" to 58 | # disable everything first and then reenable specific checks. For example, if 59 | # you want to run only the similarities checker, you can use "--disable=all 60 | # --enable=similarities". If you want to run only the classes checker, but have 61 | # no Warning level messages displayed, use "--disable=all --enable=classes 62 | # --disable=W". 63 | disable=print-statement, 64 | parameter-unpacking, 65 | unpacking-in-except, 66 | old-raise-syntax, 67 | backtick, 68 | long-suffix, 69 | old-ne-operator, 70 | old-octal-literal, 71 | import-star-module-level, 72 | non-ascii-bytes-literal, 73 | raw-checker-failed, 74 | bad-inline-option, 75 | locally-disabled, 76 | file-ignored, 77 | suppressed-message, 78 | useless-suppression, 79 | deprecated-pragma, 80 | use-symbolic-message-instead, 81 | apply-builtin, 82 | basestring-builtin, 83 | buffer-builtin, 84 | cmp-builtin, 85 | coerce-builtin, 86 | execfile-builtin, 87 | file-builtin, 88 | long-builtin, 89 | raw_input-builtin, 90 | reduce-builtin, 91 | standarderror-builtin, 92 | unicode-builtin, 93 | xrange-builtin, 94 | coerce-method, 95 | delslice-method, 96 | getslice-method, 97 | setslice-method, 98 | no-absolute-import, 99 | old-division, 100 | dict-iter-method, 101 | dict-view-method, 102 | next-method-called, 103 | metaclass-assignment, 104 | indexing-exception, 105 | raising-string, 106 | reload-builtin, 107 | oct-method, 108 | hex-method, 109 | nonzero-method, 110 | cmp-method, 111 | input-builtin, 112 | round-builtin, 113 | intern-builtin, 114 | unichr-builtin, 115 | map-builtin-not-iterating, 116 | zip-builtin-not-iterating, 117 | range-builtin-not-iterating, 118 | filter-builtin-not-iterating, 119 | using-cmp-argument, 120 | eq-without-hash, 121 | div-method, 122 | idiv-method, 123 | rdiv-method, 124 | exception-message-attribute, 125 | invalid-str-codec, 126 | sys-max-int, 127 | bad-python3-import, 128 | deprecated-string-function, 129 | deprecated-str-translate-call, 130 | deprecated-itertools-function, 131 | deprecated-types-field, 132 | next-method-defined, 133 | dict-items-not-iterating, 134 | dict-keys-not-iterating, 135 | dict-values-not-iterating, 136 | deprecated-operator-function, 137 | deprecated-urllib-function, 138 | xreadlines-attribute, 139 | deprecated-sys-function, 140 | exception-escape, 141 | comprehension-escape, 142 | similarities, 143 | useless-super-delegation, 144 | useless-object-inheritance 145 | 146 | # Enable the message, report, category or checker with the given id(s). You can 147 | # either give multiple identifier separated by comma (,) or put this option 148 | # multiple time (only on the command line, not in the configuration file where 149 | # it should appear only once). See also the "--disable" option for examples. 150 | enable=c-extension-no-member 151 | 152 | 153 | [REPORTS] 154 | 155 | # Python expression which should return a note less than 10 (10 is the highest 156 | # note). You have access to the variables errors warning, statement which 157 | # respectively contain the number of errors / warnings messages and the total 158 | # number of statements analyzed. This is used by the global evaluation report 159 | # (RP0004). 160 | evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) 161 | 162 | # Template used to display messages. This is a python new-style format string 163 | # used to format the message information. See doc for all details. 164 | #msg-template= 165 | 166 | # Set the output format. Available formats are text, parseable, colorized, json 167 | # and msvs (visual studio). You can also give a reporter class, e.g. 168 | # mypackage.mymodule.MyReporterClass. 169 | output-format=text 170 | 171 | # Tells whether to display a full report or only the messages. 172 | reports=no 173 | 174 | # Activate the evaluation score. 175 | score=yes 176 | 177 | 178 | [REFACTORING] 179 | 180 | # Maximum number of nested blocks for function / method body 181 | max-nested-blocks=5 182 | 183 | # Complete name of functions that never returns. When checking for 184 | # inconsistent-return-statements if a never returning function is called then 185 | # it will be considered as an explicit return statement and no message will be 186 | # printed. 187 | never-returning-functions=sys.exit 188 | 189 | 190 | [VARIABLES] 191 | 192 | # List of additional names supposed to be defined in builtins. Remember that 193 | # you should avoid defining new builtins when possible. 194 | additional-builtins= 195 | 196 | # Tells whether unused global variables should be treated as a violation. 197 | allow-global-unused-variables=yes 198 | 199 | # List of strings which can identify a callback function by name. A callback 200 | # name must start or end with one of those strings. 201 | callbacks=cb_, 202 | _cb 203 | 204 | # A regular expression matching the name of dummy variables (i.e. expected to 205 | # not be used). 206 | dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ 207 | 208 | # Argument names that match this expression will be ignored. Default to name 209 | # with leading underscore. 210 | ignored-argument-names=_.*|^ignored_|^unused_ 211 | 212 | # Tells whether we should check for unused import in __init__ files. 213 | init-import=no 214 | 215 | # List of qualified module names which can have objects that can redefine 216 | # builtins. 217 | redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io 218 | 219 | 220 | [STRING] 221 | 222 | # This flag controls whether the implicit-str-concat-in-sequence should 223 | # generate a warning on implicit string concatenation in sequences defined over 224 | # several lines. 225 | check-str-concat-over-line-jumps=no 226 | 227 | 228 | [TYPECHECK] 229 | 230 | # List of decorators that produce context managers, such as 231 | # contextlib.contextmanager. Add to this list to register other decorators that 232 | # produce valid context managers. 233 | contextmanager-decorators=contextlib.contextmanager 234 | 235 | # List of members which are set dynamically and missed by pylint inference 236 | # system, and so shouldn't trigger E1101 when accessed. Python regular 237 | # expressions are accepted. 238 | generated-members= 239 | 240 | # Tells whether missing members accessed in mixin class should be ignored. A 241 | # mixin class is detected if its name ends with "mixin" (case insensitive). 242 | ignore-mixin-members=yes 243 | 244 | # Tells whether to warn about missing members when the owner of the attribute 245 | # is inferred to be None. 246 | ignore-none=yes 247 | 248 | # This flag controls whether pylint should warn about no-member and similar 249 | # checks whenever an opaque object is returned when inferring. The inference 250 | # can return multiple potential results while evaluating a Python object, but 251 | # some branches might not be evaluated, which results in partial inference. In 252 | # that case, it might be useful to still emit no-member and other checks for 253 | # the rest of the inferred objects. 254 | ignore-on-opaque-inference=yes 255 | 256 | # List of class names for which member attributes should not be checked (useful 257 | # for classes with dynamically set attributes). This supports the use of 258 | # qualified names. 259 | ignored-classes=optparse.Values,thread._local,_thread._local 260 | 261 | # List of module names for which member attributes should not be checked 262 | # (useful for modules/projects where namespaces are manipulated during runtime 263 | # and thus existing member attributes cannot be deduced by static analysis. It 264 | # supports qualified module names, as well as Unix pattern matching. 265 | ignored-modules= 266 | 267 | # Show a hint with possible names when a member name was not found. The aspect 268 | # of finding the hint is based on edit distance. 269 | missing-member-hint=yes 270 | 271 | # The minimum edit distance a name should have in order to be considered a 272 | # similar match for a missing member name. 273 | missing-member-hint-distance=1 274 | 275 | # The total number of similar names that should be taken in consideration when 276 | # showing a hint for a missing member. 277 | missing-member-max-choices=1 278 | 279 | 280 | [SIMILARITIES] 281 | 282 | # Ignore comments when computing similarities. 283 | ignore-comments=yes 284 | 285 | # Ignore docstrings when computing similarities. 286 | ignore-docstrings=yes 287 | 288 | # Ignore imports when computing similarities. 289 | ignore-imports=no 290 | 291 | # Minimum lines number of a similarity. 292 | min-similarity-lines=4 293 | 294 | 295 | [FORMAT] 296 | 297 | # Expected format of line ending, e.g. empty (any line ending), LF or CRLF. 298 | expected-line-ending-format= 299 | 300 | # Regexp for a line that is allowed to be longer than the limit. 301 | ignore-long-lines=^\s*(# )??$ 302 | 303 | # Number of spaces of indent required inside a hanging or continued line. 304 | indent-after-paren=4 305 | 306 | # String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 307 | # tab). 308 | indent-string=' ' 309 | 310 | # Maximum number of characters on a single line. 311 | max-line-length=80 312 | 313 | # Maximum number of lines in a module. 314 | max-module-lines=1000 315 | 316 | # List of optional constructs for which whitespace checking is disabled. `dict- 317 | # separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}. 318 | # `trailing-comma` allows a space between comma and closing bracket: (a, ). 319 | # `empty-line` allows space-only lines. 320 | no-space-check=trailing-comma, 321 | dict-separator 322 | 323 | # Allow the body of a class to be on the same line as the declaration if body 324 | # contains single statement. 325 | single-line-class-stmt=no 326 | 327 | # Allow the body of an if to be on the same line as the test if there is no 328 | # else. 329 | single-line-if-stmt=no 330 | 331 | 332 | [SPELLING] 333 | 334 | # Limits count of emitted suggestions for spelling mistakes. 335 | max-spelling-suggestions=4 336 | 337 | # Spelling dictionary name. Available dictionaries: none. To make it working 338 | # install python-enchant package.. 339 | spelling-dict= 340 | 341 | # List of comma separated words that should not be checked. 342 | spelling-ignore-words= 343 | 344 | # A path to a file that contains private dictionary; one word per line. 345 | spelling-private-dict-file= 346 | 347 | # Tells whether to store unknown words to indicated private dictionary in 348 | # --spelling-private-dict-file option instead of raising a message. 349 | spelling-store-unknown-words=no 350 | 351 | 352 | [MISCELLANEOUS] 353 | 354 | # List of note tags to take in consideration, separated by a comma. 355 | notes=FIXME, 356 | XXX, 357 | TODO 358 | 359 | 360 | [BASIC] 361 | 362 | # Naming style matching correct argument names. 363 | argument-naming-style=snake_case 364 | 365 | # Regular expression matching correct argument names. Overrides argument- 366 | # naming-style. 367 | #argument-rgx= 368 | 369 | # Naming style matching correct attribute names. 370 | attr-naming-style=snake_case 371 | 372 | # Regular expression matching correct attribute names. Overrides attr-naming- 373 | # style. 374 | #attr-rgx= 375 | 376 | # Bad variable names which should always be refused, separated by a comma. 377 | bad-names=foo, 378 | bar, 379 | baz, 380 | toto, 381 | tutu, 382 | tata 383 | 384 | # Naming style matching correct class attribute names. 385 | class-attribute-naming-style=any 386 | 387 | # Regular expression matching correct class attribute names. Overrides class- 388 | # attribute-naming-style. 389 | #class-attribute-rgx= 390 | 391 | # Naming style matching correct class names. 392 | class-naming-style=PascalCase 393 | 394 | # Regular expression matching correct class names. Overrides class-naming- 395 | # style. 396 | #class-rgx= 397 | 398 | # Naming style matching correct constant names. 399 | const-naming-style=UPPER_CASE 400 | 401 | # Regular expression matching correct constant names. Overrides const-naming- 402 | # style. 403 | #const-rgx= 404 | 405 | # Minimum line length for functions/classes that require docstrings, shorter 406 | # ones are exempt. 407 | docstring-min-length=-1 408 | 409 | # Naming style matching correct function names. 410 | function-naming-style=snake_case 411 | 412 | # Regular expression matching correct function names. Overrides function- 413 | # naming-style. 414 | #function-rgx= 415 | 416 | # Good variable names which should always be accepted, separated by a comma. 417 | good-names=e, 418 | f, 419 | h, 420 | i, 421 | j, 422 | k, 423 | l, 424 | n, 425 | nh, 426 | m, 427 | p, 428 | q, 429 | s, 430 | ex, 431 | Run, 432 | _, 433 | __ 434 | 435 | # Include a hint for the correct naming format with invalid-name. 436 | include-naming-hint=no 437 | 438 | # Naming style matching correct inline iteration names. 439 | inlinevar-naming-style=any 440 | 441 | # Regular expression matching correct inline iteration names. Overrides 442 | # inlinevar-naming-style. 443 | #inlinevar-rgx= 444 | 445 | # Naming style matching correct method names. 446 | method-naming-style=snake_case 447 | 448 | # Regular expression matching correct method names. Overrides method-naming- 449 | # style. 450 | #method-rgx= 451 | 452 | # Naming style matching correct module names. 453 | module-naming-style=snake_case 454 | 455 | # Regular expression matching correct module names. Overrides module-naming- 456 | # style. 457 | #module-rgx= 458 | 459 | # Colon-delimited sets of names that determine each other's naming style when 460 | # the name regexes allow several styles. 461 | name-group= 462 | 463 | # Regular expression which should only match function or class names that do 464 | # not require a docstring. 465 | no-docstring-rgx=^_ 466 | 467 | # List of decorators that produce properties, such as abc.abstractproperty. Add 468 | # to this list to register other decorators that produce valid properties. 469 | # These decorators are taken in consideration only for invalid-name. 470 | property-classes=abc.abstractproperty 471 | 472 | # Naming style matching correct variable names. 473 | variable-naming-style=snake_case 474 | 475 | # Regular expression matching correct variable names. Overrides variable- 476 | # naming-style. 477 | #variable-rgx= 478 | 479 | 480 | [LOGGING] 481 | 482 | # Format style used to check logging format string. `old` means using % 483 | # formatting, while `new` is for `{}` formatting. 484 | logging-format-style=new 485 | 486 | # Logging modules to check that the string format arguments are in logging 487 | # function parameter format. 488 | logging-modules=logging 489 | 490 | 491 | [CLASSES] 492 | 493 | # List of method names used to declare (i.e. assign) instance attributes. 494 | defining-attr-methods=__init__, 495 | __new__, 496 | setUp 497 | 498 | # List of member names, which should be excluded from the protected access 499 | # warning. 500 | exclude-protected=_asdict, 501 | _fields, 502 | _replace, 503 | _source, 504 | _make 505 | 506 | # List of valid names for the first argument in a class method. 507 | valid-classmethod-first-arg=cls 508 | 509 | # List of valid names for the first argument in a metaclass class method. 510 | valid-metaclass-classmethod-first-arg=cls 511 | 512 | 513 | [DESIGN] 514 | 515 | # Maximum number of arguments for function / method. 516 | max-args=5 517 | 518 | # Maximum number of attributes for a class (see R0902). 519 | max-attributes=7 520 | 521 | # Maximum number of boolean expressions in an if statement. 522 | max-bool-expr=5 523 | 524 | # Maximum number of branch for function / method body. 525 | max-branches=12 526 | 527 | # Maximum number of locals for function / method body. 528 | max-locals=15 529 | 530 | # Maximum number of parents for a class (see R0901). 531 | max-parents=7 532 | 533 | # Maximum number of public methods for a class (see R0904). 534 | max-public-methods=20 535 | 536 | # Maximum number of return / yield for function / method body. 537 | max-returns=6 538 | 539 | # Maximum number of statements in function / method body. 540 | max-statements=50 541 | 542 | # Minimum number of public methods for a class (see R0903). 543 | min-public-methods=2 544 | 545 | 546 | [IMPORTS] 547 | 548 | # Allow wildcard imports from modules that define __all__. 549 | allow-wildcard-with-all=no 550 | 551 | # Analyse import fallback blocks. This can be used to support both Python 2 and 552 | # 3 compatible code, which means that the block might have code that exists 553 | # only in one or another interpreter, leading to false positives when analysed. 554 | analyse-fallback-blocks=no 555 | 556 | # Deprecated modules which should not be used, separated by a comma. 557 | deprecated-modules=optparse,tkinter.tix 558 | 559 | # Create a graph of external dependencies in the given file (report RP0402 must 560 | # not be disabled). 561 | ext-import-graph= 562 | 563 | # Create a graph of every (i.e. internal and external) dependencies in the 564 | # given file (report RP0402 must not be disabled). 565 | import-graph= 566 | 567 | # Create a graph of internal dependencies in the given file (report RP0402 must 568 | # not be disabled). 569 | int-import-graph= 570 | 571 | # Force import order to recognize a module as part of the standard 572 | # compatibility libraries. 573 | known-standard-library= 574 | 575 | # Force import order to recognize a module as part of a third party library. 576 | known-third-party=enchant 577 | 578 | 579 | [EXCEPTIONS] 580 | 581 | # Exceptions that will emit a warning when being caught. Defaults to 582 | # "BaseException, Exception". 583 | overgeneral-exceptions=BaseException, 584 | Exception 585 | -------------------------------------------------------------------------------- /fragscapy/engine.py: -------------------------------------------------------------------------------- 1 | """Runs the test suite based on the config of the user. 2 | 3 | The `Engine` is the main engine for fragscapy. It is used to setup the 4 | Netfilter rules, generate the mod lists, run the test suite and cleanup 5 | everything at the end. 6 | 7 | The `EngineThread` is a thread in charge of modifying the intercept packets 8 | and sending them back to the network. 9 | """ 10 | 11 | import threading 12 | import warnings 13 | 14 | import scapy.utils 15 | import tqdm 16 | 17 | from fragscapy.modgenerator import ModListGenerator 18 | from fragscapy.netfilter import NFQueue, NFQueueRule 19 | from fragscapy.packetlist import PacketList 20 | from fragscapy.tests import TestSuite 21 | 22 | 23 | MODIF_FILE = "modifications.txt" # Details of each mod on this file 24 | 25 | 26 | class EngineError(ValueError): 27 | """An Error during the execution of the engine.""" 28 | 29 | 30 | class EngineWarning(Warning): 31 | """Warning during the execution of the engine.""" 32 | 33 | 34 | def engine_warning(msg): 35 | """Raises a warning about the engine, details in `msg`.""" 36 | warnings.warn( 37 | "{}".format(msg), 38 | EngineWarning 39 | ) 40 | 41 | 42 | def _append_to_display_list(display_list, i, j, limit): 43 | """Utility function to add the test i_j to the list that will be displayed 44 | within the limit given.""" 45 | if len(display_list) < limit: 46 | display_list.append("n°{}_{}".format(i, j)) 47 | elif len(display_list) == limit: 48 | display_list.append("...") 49 | 50 | 51 | def mlgen_product(in_ml, out_ml): 52 | """Optimized equivalent of `itertools.product`. 53 | 54 | It yields the fact that the ModListGenerator objects can be iterated 55 | over multiple times (which is not the cas for all iterables) to avoid 56 | storing intermediate values. 57 | """ 58 | return ((x, y) for x in in_ml for y in out_ml) 59 | 60 | 61 | # pylint: disable=too-many-instance-attributes 62 | class EngineThread(threading.Thread): 63 | """Thread of the engine modifying the packets in NFQUEUE. 64 | 65 | This thread, once started, catches and transform the packet in the 66 | NFQUEUE. The thread applies the `input_modlist` (resp. the 67 | `output_modlist`) to the packets caught on the INPUT chain (resp. the 68 | OUTPUT chain). 69 | 70 | These two mod lists can be thread-safely replaced at any time. 71 | 72 | Args: 73 | nfqueue (:obj:`NFQueue`): The NF queue used to catch the packets. 74 | input_modlist (:obj:`ModList`, optional): The list of modifications to 75 | apply to the packets on the INPUT chain. If not set, it should be 76 | set before starting the thread. 77 | output_modlist (:obj:`ModList`, optional): The list of modifications 78 | to apply to the packets on the OUTPUT chain. If not set, it should 79 | be set before starting the thread. 80 | local_pcap (str, optional): A pcap file where the packets of the local 81 | side should dumped to. Default is 'None' which means the packets 82 | are not dumped. 83 | remote_pcap (str, optional): A pcap file where the packets of the 84 | remote side should dumped to. Default is 'None' which means the 85 | packets are not dumped. 86 | *args: The args passed to the `Thread` class. 87 | **kwargs: The kwargs passed to the `Thread` class. 88 | 89 | Examples: 90 | Assuming the nfqueue, modlist1, modlist2 and modlist3 objects exists 91 | 92 | >>> engine_th = EngineThread(nfqueue, modlist1, modlist2) 93 | >>> engine_th.start() # Start processing the packets 94 | >>> engine_th.input_modlist # Thread-safe copy of the input modlist 95 | >>> engine_th.output_modlist = modlist3 # Thread-safe modification 96 | """ 97 | 98 | def __init__(self, nfqueue, *args, **kwargs): 99 | self._nfqueue = nfqueue 100 | self._nfqueue_lock = threading.RLock() 101 | self._input_modlist = kwargs.pop("input_modlist", None) 102 | self._output_modlist = kwargs.pop("output_modlist", None) 103 | self._input_lock = threading.Lock() 104 | self._output_lock = threading.Lock() 105 | self._local_pcap = kwargs.pop("local_pcap", None) 106 | self._remote_pcap = kwargs.pop("remote_pcap", None) 107 | self._local_pcap_lock = threading.Lock() 108 | self._remote_pcap_lock = threading.Lock() 109 | super(EngineThread, self).__init__(*args, **kwargs) 110 | 111 | @property 112 | def input_modlist(self): 113 | """The modlist applied to the packets on INPUT chain. Read/Write is 114 | thread-safe.""" 115 | with self._input_lock: 116 | return self._input_modlist.copy() 117 | 118 | @input_modlist.setter 119 | def input_modlist(self, new): 120 | with self._input_lock: 121 | self._input_modlist = new 122 | 123 | @property 124 | def output_modlist(self): 125 | """The modlist applied to the packets on OUTPUT chain. Read/Write is 126 | thread-safe.""" 127 | with self._output_lock: 128 | return self._output_modlist.copy() 129 | 130 | @output_modlist.setter 131 | def output_modlist(self, new): 132 | with self._output_lock: 133 | self._output_modlist = new 134 | 135 | @property 136 | def local_pcap(self): 137 | """A pcap file where the packets of the local side should be dumped 138 | to. 'None' means the packets are not dumped. Read/Write is 139 | thread-safe.""" 140 | with self._local_pcap_lock: 141 | return self._local_pcap 142 | 143 | @local_pcap.setter 144 | def local_pcap(self, new): 145 | with self._local_pcap_lock: 146 | self._local_pcap = new 147 | 148 | @property 149 | def remote_pcap(self): 150 | """A pcap file where the packets of the remote side should be dumped 151 | to. 'None' means the packets are not dumped. Read/Write is 152 | thread-safe.""" 153 | with self._remote_pcap_lock: 154 | return self._remote_pcap 155 | 156 | @remote_pcap.setter 157 | def remote_pcap(self, new): 158 | with self._remote_pcap_lock: 159 | self._remote_pcap = new 160 | 161 | def _process_input(self, packet): 162 | """Applies the input modifications on `packet`.""" 163 | # Dump the packet before anything else 164 | if self.remote_pcap is not None: 165 | scapy.utils.wrpcap(self.remote_pcap, packet.scapy_pkt, 166 | append=True) 167 | 168 | # Checks that the INPUT modlist is populated 169 | with self._input_lock: 170 | if self._input_modlist is None: 171 | raise EngineError( 172 | "Can't run the engine with no INPUT modlist" 173 | ) 174 | 175 | # Put the packet in a packet list 176 | packetlist = PacketList() 177 | packetlist.add_packet(packet.scapy_pkt) 178 | 179 | with self._input_lock: 180 | packetlist = self._input_modlist.apply(packetlist) 181 | 182 | pl_len = len(packetlist) 183 | 184 | # Warning if there is creation of a packet (more than 1) 185 | if pl_len > 1: 186 | engine_warning( 187 | "More than 1 packet resulting in the INPUT chain. " 188 | "Got {} packets in the result. They can't be sent to " 189 | "the NFQUEUE. Only the first one will be reinserted " 190 | "to the NFQUEUE.".format(pl_len) 191 | ) 192 | 193 | if pl_len == 0: 194 | # If there is no packet in the result, 195 | # Inform libnfqueue to drop the waiting packet 196 | packet.drop() 197 | else: 198 | # If there is at least 1 packet in the result, send it 199 | # Modify the initial packet with the new content 200 | packet.scapy_pkt = packetlist[0].pkt 201 | # Dump the packet just before sending it 202 | if self.local_pcap is not None: 203 | scapy.utils.wrpcap(self.local_pcap, packet.scapy_pkt, 204 | append=True) 205 | # Mangle the packet to the NFQUEUE (so it is sent 206 | # correctly to the local application) 207 | packet.mangle() 208 | 209 | def _process_output(self, packet): 210 | """Applies the output modifications on `packet`.""" 211 | # Dump the packet before anything else 212 | if self.local_pcap is not None: 213 | scapy.utils.wrpcap(self.local_pcap, packet.scapy_pkt, 214 | append=True) 215 | 216 | # Checks that the OUTPUT modlist is populated 217 | with self._output_lock: 218 | if self._output_modlist is None: 219 | raise EngineError( 220 | "Can't run the engine with no OUTPUT modlist" 221 | ) 222 | 223 | # Put the packet in a packet list 224 | packetlist = PacketList() 225 | packetlist.add_packet(packet.scapy_pkt) 226 | 227 | with self._output_lock: 228 | packetlist = self._output_modlist.apply(packetlist) 229 | 230 | # Dump the packets just before sending it 231 | if self.remote_pcap is not None: 232 | scapy.utils.wrpcap( 233 | self.remote_pcap, 234 | [pkt.pkt for pkt in packetlist], 235 | append=True 236 | ) 237 | # Send all the packets resulting 238 | packetlist.send_all() 239 | # Drop the old packet in NFQUEUE 240 | packet.drop() 241 | 242 | def run(self): 243 | """Runs the main loop of the thread. 244 | 245 | Checks that there are starting input_modlist and output_modlist and 246 | then starts catching the packet in the NFQUEUE and process them. 247 | 248 | Raises: 249 | EngineError: There is a modlist (input or output) missing. 250 | """ 251 | # Process the queue infinitely 252 | if not self.is_stopped(): 253 | for packet in self._nfqueue: 254 | with self._nfqueue_lock: 255 | if self.is_stopped(): 256 | break 257 | if packet.is_input: 258 | self._process_input(packet) 259 | else: 260 | self._process_output(packet) 261 | 262 | def is_stopped(self): 263 | """Has the thread been stopped ?""" 264 | with self._nfqueue_lock: 265 | return self._nfqueue.is_stopped() 266 | 267 | def stop(self): 268 | """Stops the thread by stopping the nfqueue processing.""" 269 | with self._nfqueue_lock: 270 | self._nfqueue.stop() 271 | 272 | 273 | # pylint: disable=too-many-instance-attributes 274 | class Engine(object): 275 | """Main engine to run fragscapy, given a `Config` object. 276 | 277 | The engine will parse the configuration, extract the necessary 278 | `NFQueueRule` and `NFQueue` objects. It also extracts the 279 | `ModListGenerator` for the INPUT and the OUTPUT chain. Finaly, it also 280 | creates the `EngineThread` that will process the packets thanks to all 281 | these objects. 282 | 283 | Once the initialisation is done, all the internal objects are ready to be 284 | started and process the incoming packets. The engine can be started with 285 | the `.start()` method. 286 | 287 | Once started, the engine set up the NF rules necessary to intercept the 288 | packets and start the thread(s) that process them. 289 | 290 | The next step of the process, is the main loop that generates 2 `ModList` 291 | (input and output) from the `ModListGenerator`, set thoses in the threads 292 | that process the packets and run the command that was specified in the 293 | config. It then loops back to generating the next `ModList`. 294 | 295 | All the arguments about files can use the formating '{i}' and '{j}' to 296 | have a different file for each test. They respectively contains the 297 | id of the current modification and the number of the current iteration 298 | of the same test (in case of non-deterministic tests). Note the only 299 | excpetion is 'modif_file' which only accepts '{i}' because it does not 300 | not change when only '{j}' changes. 301 | 302 | Unless `append=True` is specified, the modif, sdtout and stderr filenames 303 | that matches the provided patterns are removed before running the test so 304 | the results are not appended to previous files. 305 | 306 | Args: 307 | config (:obj:`Config`): The configuration to use to get all the 308 | necessary data. 309 | progressbar (bool, optional): Show a progressbar during the process. 310 | Default is 'True'. 311 | dsiplay_results (bool, optional): Display the results at the end of 312 | the tests. Default is 'True'. 313 | modif_file (str, optional): The filename where to write the 314 | modifications. Default is 'modifications.txt'. 315 | stdout (str, optional): The filename where to redirect stdout. 316 | If not specified (the default), the output is dropped. 317 | If set to 'None', the output is redirected to stdout. 318 | stderr (str, optional): The filename where to redirect stderr. 319 | If not specified (the default), the error output is 320 | dropped. If set to 'None', the error output is redirected to 321 | stderr. 322 | local_pcap (str, optional): A pcap file where the packets of the local 323 | side should dumped to. Default is 'None' which means the packets 324 | are not dumped. 325 | remote_pcap (str, optional): A pcap file where the packets of the 326 | remote side should dumped to. Default is 'None' which means the 327 | packets are not dumped. 328 | append (bool, optional): If 'True', do not erase the existing files 329 | (modif, stdout and stderr), append the results to them instead. 330 | Default is 'False' 331 | 332 | Attributes: 333 | progressbar (bool): Shows a progressbar during the process if True. 334 | diplay_results (bool): Display the results at the end of the tests if 335 | True. 336 | modif_file (str, optional): The filename where to write the 337 | modifications. 338 | stdout (bool): 'False' if stdout of the command should be dropped. 339 | stdout_file (str): The filename where to redirect stdout. 340 | 'None' means the output is dropped 341 | stderr (bool): 'False' if stderr of the command should be dropped. 342 | stderr_file (str): The filename where to redirect stderr. 343 | 'None' means the error output is dropped. 344 | local_pcap (str): A pcap file where the packets of the local side 345 | should dumped to. 'None' means the packets are not dumped. 346 | remote_pcap (str): A pcap file where the packets of the remote side 347 | should dumped to. 'None' means the packets are not dumped. 348 | append (bool): If 'True', do not erase the existing files 349 | (modif, stdout and stderr), append the results to them instead. 350 | 351 | Examples: 352 | >>> engine = Engine(Config("my_conf.json")) 353 | >>> engine.start() 354 | 100%|████████████████████████████| 200/200 [00:00<00:00, 21980.42it/s] 355 | 356 | >>> engine = Engine(Config("my_conf.json"), progressbar=False) 357 | >>> engine.start() 358 | """ 359 | 360 | # Template of the infos for each modification 361 | MODIF_TEMPLATE = ( 362 | "Modification n°{i}{repeat}:\n" 363 | "> INPUT:\n" 364 | "{input_modlist}\n" 365 | "\n" 366 | "> OUTPUT:\n" 367 | "{output_modlist}\n" 368 | "==================================================" 369 | "\n" 370 | "\n" 371 | ) 372 | # Template used to display the results 373 | RESULTS_TEMPLATE = ( 374 | "Results ({nb_tests} tests done over {nb_mods} scenarios)\n" 375 | "==================\n" 376 | "Pass : {nb_passed}\n" 377 | " {display_passed}\n" 378 | "Fail : {nb_failed}\n" 379 | " {display_failed}\n" 380 | "Not Done : {nb_not_done}\n" 381 | " {display_not_done}" 382 | ) 383 | 384 | 385 | def __init__(self, config, **kwargs): 386 | self.progressbar = kwargs.pop("progressbar", True) 387 | self.display_results = kwargs.pop("display_results", True) 388 | 389 | # Build the generator for all mods 390 | in_ml = ModListGenerator(config.input) 391 | out_ml = ModListGenerator(config.output) 392 | ml_iterator = mlgen_product(in_ml, out_ml) 393 | if self.progressbar: # Use tqdm for showing progressbar 394 | ml_iterator = tqdm.tqdm(ml_iterator, total=len(in_ml)*len(out_ml)) 395 | 396 | # The test suite object 397 | self.test_suite = TestSuite( 398 | ml_iterator=ml_iterator, 399 | cmd_pattern=config.cmd, 400 | modif_file_pattern=kwargs.pop("modif_file", MODIF_FILE), 401 | repeat=kwargs.pop("repeat", 10), 402 | stdout="stdout" in kwargs, 403 | stdout_pattern=kwargs.pop("stdout", None), 404 | stderr="stderr" in kwargs, 405 | stderr_pattern=kwargs.pop("stderr", None), 406 | local_pcap_pattern=kwargs.pop("local_pcap", None), 407 | remote_pcap_pattern=kwargs.pop("remote_pcap", None) 408 | ) 409 | self.append = kwargs.pop("append", False) 410 | 411 | # Populate the NFQUEUE-related objects 412 | self._nfrules = list() 413 | self._nfqueues = list() 414 | self._qnums = set() 415 | for nfrule in config.nfrules: 416 | self._nfrules.append(NFQueueRule(**nfrule)) 417 | qnum = nfrule.get('qnum', 0) 418 | if not qnum % 2: 419 | self._qnums.add(qnum) 420 | for qnum in self._qnums: 421 | self._nfqueues.append(NFQueue(qnum=qnum)) 422 | 423 | # Prepare the threads that catches, modify and send the packets 424 | self._engine_threads = list() 425 | for nfqueue in self._nfqueues: 426 | self._engine_threads.append(EngineThread(nfqueue)) 427 | 428 | def _write_modlist_to_file(self, repeated_test_case): 429 | """Writes the modification details to the 'modif_file'.""" 430 | repeat = ("(repeated {} times)".format(repeated_test_case.repeat) 431 | if repeated_test_case.repeat > 1 432 | else "") 433 | with open(repeated_test_case.modif_file, "a") as mod_file: 434 | mod_file.write(self.MODIF_TEMPLATE.format( 435 | i=repeated_test_case.test_id, 436 | repeat=repeat, 437 | input_modlist=repeated_test_case.input_modlist, 438 | output_modlist=repeated_test_case.output_modlist 439 | )) 440 | 441 | def _update_modlists(self, repeated_test_case): 442 | """Changes the modlist in all the threads.""" 443 | for engine_thread in self._engine_threads: 444 | engine_thread.input_modlist = repeated_test_case.input_modlist 445 | engine_thread.output_modlist = repeated_test_case.output_modlist 446 | self._write_modlist_to_file(repeated_test_case) 447 | 448 | def _update_pcap_files(self, test_case): 449 | """Changes the pcap files in all the threads.""" 450 | for engine_thread in self._engine_threads: 451 | engine_thread.local_pcap = test_case.local_pcap 452 | engine_thread.remote_pcap = test_case.remote_pcap 453 | 454 | def _insert_nfrules(self): 455 | """Inserts all the NF rules using `ip(6)tables`.""" 456 | for nfrule in self._nfrules: 457 | nfrule.insert() 458 | 459 | def _remove_nfrules(self): 460 | """Removes all the NF rules using `ip(6)tables`.""" 461 | for nfrule in self._nfrules: 462 | nfrule.remove() 463 | 464 | def _start_threads(self): 465 | """Starts the engine threads used to process the packets.""" 466 | for engine_thread in self._engine_threads: 467 | engine_thread.start() 468 | 469 | def _stop_threads(self): 470 | """Send the signal to stop the threads used to process the packets.""" 471 | for engine_thread in self._engine_threads: 472 | engine_thread.stop() 473 | 474 | def _join_threads(self): 475 | """Joins the engine threads used to process the packets.""" 476 | for engine_thread in self._engine_threads: 477 | engine_thread.join() 478 | 479 | def pre_run(self): 480 | """Runs all the actions that need to be run before `.run()`.""" 481 | if not self.append: 482 | self.test_suite.flush_all_files() 483 | self._insert_nfrules() 484 | self._start_threads() 485 | 486 | def run(self): 487 | """Runs the test suite. 488 | 489 | Generates a modlist, run the command and do it over and over until all 490 | the possible modlists are exhausted. 491 | """ 492 | # When interrupted in the middle of the test_suite, we continue to 493 | # generate all the TestCase so they will appear in the 'not done' 494 | # sections of the results 495 | interrupted = False 496 | 497 | for repeated_test_case in self.test_suite: 498 | try: 499 | if not interrupted: 500 | self._update_modlists(repeated_test_case) 501 | for test_case in repeated_test_case: 502 | try: 503 | if not interrupted: 504 | self._update_pcap_files(test_case) 505 | test_case.run() 506 | except (KeyboardInterrupt, ProcessLookupError): 507 | interrupted = True 508 | except (KeyboardInterrupt, ProcessLookupError): 509 | interrupted = True 510 | 511 | def post_run(self): 512 | """Runs all the actions that need to be run after `.run()`.""" 513 | self.unbind_queues() 514 | self._stop_threads() 515 | self._join_threads() 516 | self._remove_nfrules() 517 | 518 | def unbind_queues(self): 519 | """Unbind any NFQUEUE open by the engine previously.""" 520 | for nfqueue in self._nfqueues: 521 | nfqueue.unbind() 522 | 523 | def print_results(self): 524 | """Prints a summary of which test passed and which did not.""" 525 | display_limit = 80 // len("n°ii_j, ") # Max 80 chars 526 | 527 | nb_tests, nb_mods, nb_passed, nb_failed, nb_not_done = 0, 0, 0, 0, 0 528 | display_passed = list() 529 | display_failed = list() 530 | display_not_done = list() 531 | for repeated_test_case in self.test_suite.tests_generated: 532 | nb_mods += 1 533 | for test_case in repeated_test_case.tests_generated: 534 | nb_tests += 1 535 | if test_case.is_success(): 536 | nb_passed += 1 537 | display_list = display_passed 538 | elif test_case.is_failure(): 539 | nb_failed += 1 540 | display_list = display_failed 541 | else: 542 | nb_not_done += 1 543 | display_list = display_not_done 544 | _append_to_display_list( 545 | display_list, 546 | repeated_test_case.test_id, 547 | test_case.test_id, 548 | display_limit 549 | ) 550 | 551 | results = self.RESULTS_TEMPLATE.format( 552 | nb_tests=nb_tests, 553 | nb_mods=nb_mods, 554 | nb_passed=nb_passed, 555 | display_passed=", ".join(display_passed), 556 | nb_failed=nb_failed, 557 | display_failed=", ".join(display_failed), 558 | nb_not_done=nb_not_done, 559 | display_not_done=", ".join(display_not_done), 560 | ) 561 | print(results) 562 | 563 | def start(self): 564 | """Starts the test suite by running `.pre_run()`, `.run()` and 565 | finally `.post_run()`.""" 566 | self.pre_run() 567 | self.run() 568 | self.post_run() 569 | if self.display_results: 570 | self.print_results() 571 | 572 | def check_nfrules(self): 573 | """Checks that the NF rules should work without errors.""" 574 | self._insert_nfrules() 575 | self._remove_nfrules() 576 | 577 | def check_modlist_generation(self): 578 | """Checks that the ModListGenerator will generate all mods.""" 579 | if not self.append: 580 | self.test_suite.flush_modif_files() 581 | for repeated_test_case in self.test_suite: 582 | self._write_modlist_to_file(repeated_test_case) 583 | --------------------------------------------------------------------------------