├── .gitattributes
├── .gitignore
├── CONFIG.md
├── CONTRIBUTION.md
├── LICENSE
├── README.md
├── SUPPORT.md
├── elrond.pdf
├── elrond
├── CONFIG.md
├── SUPPORT.md
├── VIRTUALMACHINE.md
├── __init__.py
├── config.sh
├── elrond.py
├── elrond.sh
├── images
│ ├── elrond_background.jpg
│ ├── elrond_meme.jpeg
│ ├── elrond_screenshot.jpg
│ ├── logo_trans_big.png
│ ├── logo_trans_small.png
│ ├── logo_white.png
│ ├── splunkicon_clear.png
│ ├── splunkicon_white.png
│ ├── splunklogo_clear.png
│ ├── splunklogo_white.png
│ ├── wwhf.jpg
│ └── wwhf.png
├── rivendell
│ ├── analysis
│ │ ├── analysis.py
│ │ ├── ioc_exclusions
│ │ ├── iocs.py
│ │ └── keywords.py
│ ├── audit.py
│ ├── collect
│ │ ├── collect.py
│ │ ├── files
│ │ │ ├── carve.py
│ │ │ ├── compare.py
│ │ │ ├── files.py
│ │ │ ├── i30.py
│ │ │ └── select.py
│ │ ├── linux.py
│ │ ├── mac.py
│ │ ├── reorganise.py
│ │ ├── users
│ │ │ ├── linux.py
│ │ │ ├── mac.py
│ │ │ └── windows.py
│ │ └── windows.py
│ ├── core
│ │ ├── core.py
│ │ ├── gandalf.py
│ │ └── identify.py
│ ├── main.py
│ ├── memory
│ │ ├── extract.py
│ │ ├── memory.py
│ │ ├── plugins.py
│ │ ├── profiles.py
│ │ └── volcore.py
│ ├── meta.py
│ ├── mount.py
│ ├── post
│ │ ├── clam.py
│ │ ├── clean.py
│ │ ├── elastic
│ │ │ ├── config.py
│ │ │ └── ingest.py
│ │ ├── mitre
│ │ │ ├── nav_attack.py
│ │ │ └── nav_config.py
│ │ ├── splunk
│ │ │ ├── app
│ │ │ │ ├── app.py
│ │ │ │ ├── nav.py
│ │ │ │ ├── transforms.py
│ │ │ │ └── views
│ │ │ │ │ ├── cyberchef.py
│ │ │ │ │ ├── pages.py
│ │ │ │ │ └── views.py
│ │ │ ├── config.py
│ │ │ └── ingest.py
│ │ └── yara.py
│ └── process
│ │ ├── browser.py
│ │ ├── extractions
│ │ ├── clipboard.py
│ │ ├── evtx.py
│ │ ├── mail.py
│ │ ├── mft.py
│ │ ├── plist.py
│ │ ├── registry
│ │ │ ├── dumpreg.py
│ │ │ ├── profile.py
│ │ │ └── system.py
│ │ ├── shimcache.py
│ │ ├── sru.py
│ │ ├── usb.py
│ │ ├── usn.py
│ │ ├── wbem.py
│ │ └── wmi.py
│ │ ├── linux.py
│ │ ├── mac.py
│ │ ├── nix.py
│ │ ├── process.py
│ │ ├── select.py
│ │ ├── timeline.py
│ │ └── windows.py
└── tools
│ ├── config
│ ├── mitre.py
│ ├── rip.pl
│ ├── scripts
│ │ ├── VMware.sh
│ │ ├── apfs-fuse.sh
│ │ ├── cloud.sh
│ │ ├── finish.sh
│ │ ├── indx.sh
│ │ ├── navigator.sh
│ │ ├── nsrl.sh
│ │ ├── regrip.sh
│ │ ├── repo.sh
│ │ ├── tools.sh
│ │ ├── update.sh
│ │ └── volatility3.sh
│ └── vmware-install.pl
│ └── srum_dump
│ ├── SRUM_TEMPLATE3.xlsx
│ └── srum_dump.py
├── make.sh
└── update.sh
/.gitattributes:
--------------------------------------------------------------------------------
1 | *.deb filter=lfs diff=lfs merge=lfs -text
2 | *.zip filter=lfs diff=lfs merge=lfs -text
3 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Testing Environment
2 | TEST/*
3 | rivendell/*/__pycache__/*
4 |
5 | # Python Tools for Visual Studio (PTVS)
6 | __pycache__/
7 | *.pyc
8 | .vscode/*
9 |
10 | # Mac
11 | .DS_Store
12 | xcode/
--------------------------------------------------------------------------------
/CONFIG.md:
--------------------------------------------------------------------------------
1 | # elrond Configuration
2 |
3 | [Prepare Virtual Machine](https://github.com/cyberg3cko/elrond/blob/main/elrond/VIRTUALMACHINE.md)
4 |
5 | ---
6 |
7 |
8 | ⚠️ _The following script will partition and format /dev/sdb. If you have not configured the second HDD as recommended above, it may delete data if you have another drive mounted. You can change this location, by editing the [init.sh](https://github.com/cyberg3cko/elrond/blob/main/elrond/tools/scripts/init.sh) script_
9 |
10 | `sudo git clone https://github.com/cyberg3cko/elrond.git /opt/elrond && sudo /opt/elrond/./make.sh`
11 | - ↓ ↓ `ENTER c g` *(apfs-fuse on x64 architecture only)*
12 |
13 | ---
14 |
15 |
16 | [Revert Virtual Machine](https://github.com/cyberg3cko/elrond/blob/main/elrond/VIRTUALMACHINE.md)
--------------------------------------------------------------------------------
/CONTRIBUTION.md:
--------------------------------------------------------------------------------
1 |
2 |
3 | ## Contributing
4 |
5 | Contributions are what make the open source community such an amazing place to be learn, inspire, and create. Any contributions you make are **greatly appreciated**.
6 |
7 | 1. Fork the Project
8 | 2. Create your Feature Branch (`git checkout -b cyber1of3/AdditionalFeature`)
9 | 3. Commit your Changes (`git commit -m 'Add some AdditionalFeature'`)
10 | 4. Push to the Branch (`git push origin cyber1of3/AdditionalFeature`)
11 | 5. Open a Pull Request
12 |
13 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2020 cyberg3cko
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/SUPPORT.md:
--------------------------------------------------------------------------------
1 | # Additional Tools & Commands to Facilitate elrond Analysis
2 |
3 | Additional commands and tools to help get data ready for elrond
4 |
5 | # Table of Contents
6 |
7 | * [Preparing Disk Images](#Preparing-Disk-Images)
8 | * [Merging Multiple VMDK Files](#Merging-multiple-VMDK-Files)
9 | * [Convert DMG to E01](#Convert-DMG-to-E01)
10 | * [Preparing Memory Images](Preparing-Memory-Images)
11 | * [Capturing Memory](#Capturing-Memory)
12 | * [Creating Symbol Tables (volatility3)](#Creating-Symbol-Tables-volatility3)
13 | * [Creating Profiles (volatility2.6)](#Creating-Profiles-volatility26)
14 | * [Exporting VM as OVA](#Exporting-VM-as-OVA)
15 | * [Appendix](#Appendix)
16 |
17 |
18 | # Preparing Disk Images
19 |
20 | ## Merging multiple VMDK Files
21 | * VMware Fusion
22 |
23 | `/Applications/VMware\ Fusion.app/Contents/Library/vmware-vdiskmanager -r .vmwarevm/Virtual\ Disk.vmdk -t 0 .vmdk`
24 | * VMware Workstation
25 |
26 | `C:\Program Files (x86)\VMware\VMware Player\vmware-vdiskmanager.exe -r .vmwarevm\VirtualDisk.vmdk -t 0 .vmdk`
27 |
28 |
29 |
30 | ## Convert DMG to E01
31 | If you have collected a macOS disk image in the form of a DMG, you can convert it into E01. Note, this can only be done on a macOS device (preferably not the same host where the disk was acquired).
32 | `brew install libewf`
33 | >
34 | `hdiutil attach -nomount .dmg`
35 | >
36 | `diskutil list`
37 | * Confirm device name which DMG has been mounted
38 |
39 | `ewfacquire -t evidence -v /dev/diskN`
40 | * Create evidence.E01 from /dev/diskN (N being the number it has been assigned - usually 3 or 4 but depends on how many additional disks or images are mounted)
41 | * Adjust 'Evidence segment file size' to a value larger then the size of the DMG - this forces it to create a single E01 file as opposed to multiple
42 |
43 | `hdiutil detach /dev/disk4`
44 |
45 |
46 |
47 |
48 | # Preparing Memory Images
49 |
50 | ## Capturing Memory
51 | ### macOS
52 | * Download osxpmem from https://github.com/cyberg3cko/elrond/tree/main/tools/
53 |
54 | `sudo chown -R root:wheel osxpmem.app/ && sudo chmod +x osxpmem.app/osxpmem && sudo osxpmem.app/osxpmem -o mem.aff4 && sudo osxpmem.app/osxpmem -e /dev/pmem -o mem.raw mem.aff4`
55 |
56 | ### Linux
57 | * Download avml from https://github.com/cyberg3cko/elrond/tree/main/tools/
58 |
59 | ` sudo chmod +x avml && sudo ./avml $(uname -r).mem`
60 |
61 |
62 |
63 | ## Creating Symbol Tables (volatility3)
64 | ### Linux
65 | #### Analysis Machine
66 | * Download relevant debug symbol and execute the following commands (relevant to your distro):
67 |
68 | Obtain the relevant debug symbol
69 | `wget `
70 | * RHEL: https://access.redhat.com/solutions/9907
71 | `yum install /tmp/`
72 | * Ubuntu: http://ddebs.ubuntu.com/ubuntu/pool/main/l/linux/
73 | `dpkg -x /tmp/`
74 |
75 | Copy created symbol table to Analysis Machine
76 | `sudo ./dwarf2json linux --elf /tmp/usr/lib/debug/boot/.ddeb > .../volatility3/volatility3/symbols/linux/.json`
77 |
78 | See Appendix for information on additional Linux distros
79 |
80 |
81 | ## Creating Profiles (volatility2.6)
82 | ### macOS
83 | #### Target Machine
84 | * Identify the relevant Kernel for the version of macOS you are analysing
85 |
86 | #### Analysis Machine
87 | * Download the relevant Kernel Debug Kit: http://developer.apple.com/hardwaredrivers
88 | * Download volatility3 from https://github.com/cyberg3cko/elrond/tree/main/tools/
89 |
90 | `unzip volatility3.zip`
91 | >
92 | `dwarfdump -arch x86_64 /Library/Developer/KDKs/KDK__16D32.kdk/System/Library/Kernels/kernel.dSYM > _x64.dwarfdump`
93 | >
94 | `python tools/mac/convert.py .dwarfdump converted-_x64.dwarfdump`
95 | >
96 | `python tools/mac/convert.py converted-_x64.dwarfdump > 10.12.3.64bit.vtypes`
97 | >
98 | `dsymutil -s -arch x86_64 /Library/Developer/KDKs/KDK__16D32.kdk/System/Library/Kernels/kernel > .64bit.symbol.dsymutil`
99 | >
100 | `zip .64bit.zip .64bit.symbol.dsymutil .64bit.vtypes`
101 | >
102 | `cp .64bit.zip volatility/plugins/overlays/mac/`
103 |
104 |
105 |
106 |
107 | ### Linux
108 | #### Target Machine
109 | * Identify the relevant Kernel for the version of Linux you are analysing
110 |
111 | `uname -r`
112 | >
113 | `cd Downloads/ && sudo apt-get install build-essential && sudo apt-get install dwarfdump && git clone https://github.com/volatilityfoundation/volatility.git && cd volatility/tools/linux/ && sudo make -C /lib/modules/$(uname -r)-generic/build/ CONFIG_DEBUG_INFO=y M=$PWD modules && sudo rm -rf module.dwarf && sudo dwarfdump -di ./module.o > module.dwarf && sudo zip Ubuntu64-$(uname -r)-generic.zip module.dwarf /boot/System.map-$(uname -r)-generic && ls -lah`
114 |
115 |
116 |
117 | ### Analysis Machine
118 | `cp [RHEL|Ubuntu]64-$(uname -r).zip volatility/plugins/overlays/linux/`
119 |
120 | * Download volatility from https://github.com/cyberg3cko/elrond/tree/main/tools/
121 |
122 | `sudo apt-get install build-essential && sudo apt-get install dwarfdump`
123 | >
124 | `unzip volatility.zip && sudo rm -rf __MACOSX/ && cd volatility/tools/linux/ && HOSTNAME=$(uname -r)`
125 | >
126 | `sudo make -C /lib/modules/$(uname -r)/build/ CONFIG_DEBUG_INFO=y M=$PWD modules`
127 | >
128 | `sudo rm -rf module.dwarf && sudo dwarfdump -di ./module.o > module.dwarf`
129 | >
130 | `sudo zip [RHEL|Ubuntu]64-$(uname -r).zip module.dwarf /boot/System.map-$(uname -r)`
131 |
132 |
133 |
134 | # Exporting VM as OVA
135 | * VMware Fusion
136 |
137 | `/Applications/VMware\ Fusion.app/Contents/Library/VMware\ OVF\ Tool/./ovftool --acceptAllEulas .vmwarevm/.vmx .ova`
138 |
139 |
140 |
141 | # Appendix
142 | ### Additional Linux Distro debuginfo
143 | Required for volatility3 symbol tables, you will need to obain the relevant debuginfo and then install it in accordance with your Linux distro
144 | * CentOS: http://debuginfo.centos.org
145 | * SUSE: http://blog.dynofu.me/post/2015/08/31/linux-kernel-dbuginfo.html
146 | * Debian: https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=365349
147 | * Fedora: https://fedoraproject.org/wiki/Packaging:Debuginfo
148 | * Oracle UEK: https://oss.oracle.com/ol6/debuginfo/
149 |
150 |
151 | ### Building avml (Analysis Machine)
152 | **Only required if execution of avml fails**
153 | `sudo mkdir /tmp/make_avml && sudo cd /tmp/make_avml && sudo apt-get install musl-dev musl-tools musl && sudo chmod 777 -R /tmp && curl https://sh.rustup.rs -sSf | sh -s -- -y && sudo snap install rustup --classic && rustup install stable && rustup target add x86_64-unknown-linux-musl && cargo new avml-main --bin && cd avml-main/ && cargo build --release --target x86_64-unknown-linux-musl && cp target/x86_64-unknown-linux-musl/release/avml `
154 | >
155 | `cd target/x86_64-unknown-linux-musl/release/` (directory path might be slightly different)
156 |
157 |
158 |
159 | ### Pre-created Profiles (volatility2.6)
160 |
161 | For a full list/repository of currently developed profiles for volatility2.6 please visit https://github.com/cyberg3cko/profiles
162 | Of course, you develop your own, please adhere to the following naming conventions:
163 | * Uploading to GitHub (directory structure):
164 | * **profiles -> Mac -> 10.11 -> ElCapitan_10.11.1_15B42.zip**
165 | * **profiles -> Linux -> x64 -> Ubuntu1010[-4.4.0-203-generic].zip**
166 | * Importing into volatility locally (.../volatility/plugins/overlays/[mac|linux]/):
167 | * **10.11/ElCapitan_10.11.1_15B42.zip**
168 | * **LinuxUbuntu1010[-4.4.0-203-generic]x64.zip**
169 |
--------------------------------------------------------------------------------
/elrond.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cyberg3cko/elrond/5cfbed2461433cbcd91b35d0d36659cf32d59c6f/elrond.pdf
--------------------------------------------------------------------------------
/elrond/CONFIG.md:
--------------------------------------------------------------------------------
1 | # elrond Configuration
2 |
3 | [Prepare Virtual Machine](https://github.com/cyberg3cko/elrond/blob/main/elrond/VIRTUALMACHINE.md)
4 |
5 | ---
6 |
7 |
8 | ⚠️ _The following script will partition and format /dev/sdb. If you have not configured the second HDD as recommended above, it may delete data if you have another drive mounted. You can change this location, by editing the [init.sh](https://github.com/cyberg3cko/elrond/blob/main/elrond/tools/scripts/init.sh) script_
9 |
10 | `sudo git clone https://github.com/cyberg3cko/elrond.git /opt/elrond && sudo /opt/elrond/./make.sh`
11 | - ↓ ↓ `ENTER c g` *(apfs-fuse on x64 architecture only)*
12 |
13 | ---
14 |
15 |
16 | [Revert Virtual Machine](https://github.com/cyberg3cko/elrond/blob/main/elrond/VIRTUALMACHINE.md)
--------------------------------------------------------------------------------
/elrond/SUPPORT.md:
--------------------------------------------------------------------------------
1 | # Additional Tools & Commands to Facilitate elrond Analysis
2 |
3 | Additional commands and tools to help get data ready for elrond
4 |
5 | # Table of Contents
6 |
7 | * [Preparing Disk Images](#Preparing-Disk-Images)
8 | * [Merging Multiple VMDK Files](#Merging-multiple-VMDK-Files)
9 | * [Convert DMG to E01](#Convert-DMG-to-E01)
10 | * [Preparing Memory Images](Preparing-Memory-Images)
11 | * [Capturing Memory](#Capturing-Memory)
12 | * [Creating Symbol Tables (volatility3)](#Creating-Symbol-Tables-volatility3)
13 | * [Creating Profiles (volatility2.6)](#Creating-Profiles-volatility26)
14 | * [Exporting VM as OVA](#Exporting-VM-as-OVA)
15 | * [Appendix](#Appendix)
16 |
17 |
18 | # Preparing Disk Images
19 |
20 | ## Merging multiple VMDK Files
21 | * VMware Fusion
22 |
23 | `/Applications/VMware\ Fusion.app/Contents/Library/vmware-vdiskmanager -r .vmwarevm/Virtual\ Disk.vmdk -t 0 .vmdk`
24 | * VMware Workstation
25 |
26 | `C:\Program Files (x86)\VMware\VMware Player\vmware-vdiskmanager.exe -r .vmwarevm\VirtualDisk.vmdk -t 0 .vmdk`
27 |
28 |
29 |
30 | ## Convert DMG to E01
31 | If you have collected a macOS disk image in the form of a DMG, you can convert it into E01. Note, this can only be done on a macOS device (preferably not the same host where the disk was acquired).
32 | `brew install libewf`
33 | >
34 | `hdiutil attach -nomount .dmg`
35 | >
36 | `diskutil list`
37 | * Confirm device name which DMG has been mounted
38 |
39 | `ewfacquire -t evidence -v /dev/diskN`
40 | * Create evidence.E01 from /dev/diskN (N being the number it has been assigned - usually 3 or 4 but depends on how many additional disks or images are mounted)
41 | * Adjust 'Evidence segment file size' to a value larger then the size of the DMG - this forces it to create a single E01 file as opposed to multiple
42 |
43 | `hdiutil detach /dev/disk4`
44 |
45 |
46 |
47 |
48 | # Preparing Memory Images
49 |
50 | ## Capturing Memory
51 | ### macOS
52 | * Download osxpmem from https://github.com/cyberg3cko/elrond/tree/main/tools/
53 |
54 | `sudo chown -R root:wheel osxpmem.app/ && sudo chmod +x osxpmem.app/osxpmem && sudo osxpmem.app/osxpmem -o mem.aff4 && sudo osxpmem.app/osxpmem -e /dev/pmem -o mem.raw mem.aff4`
55 |
56 | ### Linux
57 | * Download avml from https://github.com/cyberg3cko/elrond/tree/main/tools/
58 |
59 | ` sudo chmod +x avml && sudo ./avml $(uname -r).mem`
60 |
61 |
62 |
63 | ## Creating Symbol Tables (volatility3)
64 | ### Linux
65 | #### Analysis Machine
66 | * Download relevant debug symbol and execute the following commands (relevant to your distro):
67 |
68 | Obtain the relevant debug symbol
69 | `wget `
70 | * RHEL: https://access.redhat.com/solutions/9907
71 | `yum install /tmp/`
72 | * Ubuntu: http://ddebs.ubuntu.com/ubuntu/pool/main/l/linux/
73 | `dpkg -x /tmp/`
74 |
75 | Copy created symbol table to Analysis Machine
76 | `sudo ./dwarf2json linux --elf /tmp/usr/lib/debug/boot/.ddeb > .../volatility3/volatility3/symbols/linux/.json`
77 |
78 | See Appendix for information on additional Linux distros
79 |
80 |
81 | ## Creating Profiles (volatility2.6)
82 | ### macOS
83 | #### Target Machine
84 | * Identify the relevant Kernel for the version of macOS you are analysing
85 |
86 | #### Analysis Machine
87 | * Download the relevant Kernel Debug Kit: http://developer.apple.com/hardwaredrivers
88 | * Download volatility3 from https://github.com/cyberg3cko/elrond/tree/main/tools/
89 |
90 | `unzip volatility3.zip`
91 | >
92 | `dwarfdump -arch x86_64 /Library/Developer/KDKs/KDK__16D32.kdk/System/Library/Kernels/kernel.dSYM > _x64.dwarfdump`
93 | >
94 | `python tools/mac/convert.py .dwarfdump converted-_x64.dwarfdump`
95 | >
96 | `python tools/mac/convert.py converted-_x64.dwarfdump > 10.12.3.64bit.vtypes`
97 | >
98 | `dsymutil -s -arch x86_64 /Library/Developer/KDKs/KDK__16D32.kdk/System/Library/Kernels/kernel > .64bit.symbol.dsymutil`
99 | >
100 | `zip .64bit.zip .64bit.symbol.dsymutil .64bit.vtypes`
101 | >
102 | `cp .64bit.zip volatility/plugins/overlays/mac/`
103 |
104 |
105 |
106 |
107 | ### Linux
108 | #### Target Machine
109 | * Identify the relevant Kernel for the version of Linux you are analysing
110 |
111 | `uname -r`
112 | >
113 | `cd Downloads/ && sudo apt-get install build-essential && sudo apt-get install dwarfdump && git clone https://github.com/volatilityfoundation/volatility.git && cd volatility/tools/linux/ && sudo make -C /lib/modules/$(uname -r)-generic/build/ CONFIG_DEBUG_INFO=y M=$PWD modules && sudo rm -rf module.dwarf && sudo dwarfdump -di ./module.o > module.dwarf && sudo zip Ubuntu64-$(uname -r)-generic.zip module.dwarf /boot/System.map-$(uname -r)-generic && ls -lah`
114 |
115 |
116 |
117 | ### Analysis Machine
118 | `cp [RHEL|Ubuntu]64-$(uname -r).zip volatility/plugins/overlays/linux/`
119 |
120 | * Download volatility from https://github.com/cyberg3cko/elrond/tree/main/tools/
121 |
122 | `sudo apt-get install build-essential && sudo apt-get install dwarfdump`
123 | >
124 | `unzip volatility.zip && sudo rm -rf __MACOSX/ && cd volatility/tools/linux/ && HOSTNAME=$(uname -r)`
125 | >
126 | `sudo make -C /lib/modules/$(uname -r)/build/ CONFIG_DEBUG_INFO=y M=$PWD modules`
127 | >
128 | `sudo rm -rf module.dwarf && sudo dwarfdump -di ./module.o > module.dwarf`
129 | >
130 | `sudo zip [RHEL|Ubuntu]64-$(uname -r).zip module.dwarf /boot/System.map-$(uname -r)`
131 |
132 |
133 |
134 | # Exporting VM as OVA
135 | * VMware Fusion
136 |
137 | `/Applications/VMware\ Fusion.app/Contents/Library/VMware\ OVF\ Tool/./ovftool --acceptAllEulas .vmwarevm/.vmx .ova`
138 |
139 |
140 |
141 | # Appendix
142 | ### Additional Linux Distro debuginfo
143 | Required for volatility3 symbol tables, you will need to obain the relevant debuginfo and then install it in accordance with your Linux distro
144 | * CentOS: http://debuginfo.centos.org
145 | * SUSE: http://blog.dynofu.me/post/2015/08/31/linux-kernel-dbuginfo.html
146 | * Debian: https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=365349
147 | * Fedora: https://fedoraproject.org/wiki/Packaging:Debuginfo
148 | * Oracle UEK: https://oss.oracle.com/ol6/debuginfo/
149 |
150 |
151 | ### Building avml (Analysis Machine)
152 | **Only required if execution of avml fails**
153 | `sudo mkdir /tmp/make_avml && sudo cd /tmp/make_avml && sudo apt-get install musl-dev musl-tools musl && sudo chmod 777 -R /tmp && curl https://sh.rustup.rs -sSf | sh -s -- -y && sudo snap install rustup --classic && rustup install stable && rustup target add x86_64-unknown-linux-musl && cargo new avml-main --bin && cd avml-main/ && cargo build --release --target x86_64-unknown-linux-musl && cp target/x86_64-unknown-linux-musl/release/avml `
154 | >
155 | `cd target/x86_64-unknown-linux-musl/release/` (directory path might be slightly different)
156 |
157 |
158 |
159 | ### Pre-created Profiles (volatility2.6)
160 |
161 | For a full list/repository of currently developed profiles for volatility2.6 please visit https://github.com/cyberg3cko/profiles
162 | Of course, you develop your own, please adhere to the following naming conventions:
163 | * Uploading to GitHub (directory structure):
164 | * **profiles -> Mac -> 10.11 -> ElCapitan_10.11.1_15B42.zip**
165 | * **profiles -> Linux -> x64 -> Ubuntu1010[-4.4.0-203-generic].zip**
166 | * Importing into volatility locally (.../volatility/plugins/overlays/[mac|linux]/):
167 | * **10.11/ElCapitan_10.11.1_15B42.zip**
168 | * **LinuxUbuntu1010[-4.4.0-203-generic]x64.zip**
169 |
--------------------------------------------------------------------------------
/elrond/VIRTUALMACHINE.md:
--------------------------------------------------------------------------------
1 | # Preparing Virtual Machine
2 |
3 | * [Parallels](#Parallels)
4 | * [VMware](#VMware)
5 |
6 | ## Parallels
7 |
8 | Download and import [Ubuntu 22.04](https://ubuntu.com/download/server/arm) (adjust settings such as RAM if desired)
9 |
10 | ##### **Menu Bar -> Actions -> Configure... ->**
11 | - Hardware...
12 | - +...
13 | - Hard Disk...
14 | - **Type: `New image file`**
15 | - **Location: `Ubuntu 22.04-0.hdd`
16 | - **Size: `20.0GB`** (more, if desired)
17 | - **OK**
18 | - Start Ubuntu
19 |
20 |
21 | ### Reverting Virtual Machine
22 |
23 | ##### **Menu Bar -> Actions -> Configure... ->**
24 |
25 | - **Security -> Isolate Linux from ...**
26 |
27 |
28 | ## VMware
29 |
30 | ### _Virtual Machine Settings_
31 | Download and import [SANS SIFT Worksation](https://www.sans.org/tools/sift-workstation/) (adjust settings such as RAM if desired)
32 |
33 | ##### **Menu Bar -> Virtual Machine -> Settings... ->**
34 |
35 | - **Network Adaptor -> NAT/Bridged**
36 |
37 | ##### **Menu Bar -> Virtual Machine -> Settings... ->**
38 | - Add Device...
39 | - New Hard Disk...
40 | - Add...
41 | - **File name: `Virtual Disk 2.vmdk`**
42 | - **Disk Size: `20.00GB`** (more, if desired)
43 | - **Advanced options: `None`**
44 | - **Apply**
45 | - Start SIFT
46 |
47 |
48 | ### Reverting Virtual Machine
49 |
50 | - ##### **Menu Bar -> Virtual Machine -> Settings... ->**
51 | - **Network Adaptor -> Custom (Private)**
52 |
53 | ---
54 |
55 | _If you experience any issues, please try building a new SIFT Workstation VM and then raise an issue inline with the instructions in the [README.md](https://github.com/cyberg3cko/elrond/blob/main/elrond/README.md)_
56 |
--------------------------------------------------------------------------------
/elrond/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cyberg3cko/elrond/5cfbed2461433cbcd91b35d0d36659cf32d59c6f/elrond/__init__.py
--------------------------------------------------------------------------------
/elrond/config.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | USERPROFILE=$(cat /etc/passwd | grep 1000 | cut -d ":" -f 1)
4 | HOSTNAME=$(hostname)
5 |
6 | clear
7 | printf "\n\n +--------- \e[1;31mOPTIONAL\e[m: RDS Hash Sets Download ---------+\n\n \e[0;36m$ /opt/cyber1of3/elrond/tools/config/scripts/./nsrl.sh\e[m \n\n\n"
8 | sleep 20
9 |
10 | sudo chmod 777 /etc/sysctl.conf
11 | echo fs.inotify.max_user_watches=1048576 | sudo tee -a /etc/sysctl.conf
12 | sudo chmod 644 /etc/sysctl.conf
13 | # creating linux_swap space
14 | sudo swapon /dev/sdb
15 | sudo swapoff /dev/sdb
16 | sudo umount /dev/sdb
17 | sudo mkswap /dev/sdb
18 | sudo swapon /dev/sdb
19 | sudo cp /etc/fstab /etc/fstab.orig
20 | sudo chmod 777 /etc/fstab
21 | echo "/dev/sdb swap swap defaults 0 0" >> /etc/fstab
22 | sudo chmod 664 /etc/fstab
23 |
24 | #/opt/cyber1of3/elrond/tools/config/scripts/./cloud.sh
25 | /opt/cyber1of3/elrond/tools/config/scripts/./tools.sh
26 |
27 | # setting hostname to cyber1of3 if not SANS SIFT
28 | if [[ "$(hostname)" != *"siftworkstation"* ]]; then
29 | sudo hostnamectl set-hostname cyber1of3
30 | fi
31 |
32 | # installing vmware-tools if applicable
33 | if [[ "$(sudo dmesg | grep -E "DMI|Hypervisor")" == *"VMware"* ]]; then
34 | # installing vmware_tools
35 | /opt/cyber1of3/elrond/tools/config/scripts/./VMware.sh
36 | fi
37 |
38 | # installing apfs-fuse if architecture is not ARM
39 | if [[ "$(uname -a)" != *"aarch"* ]]; then
40 | # installing apfs-fuse
41 | /opt/cyber1of3/elrond/tools/config/scripts/./apfs-fuse.sh
42 | wget -O /tmp/vscode.deb https://vscode.download.prss.microsoft.com/dbazure/download/stable/b58957e67ee1e712cebf466b995adf4c5307b2bd/code_1.89.0-1714530869_amd64.deb
43 | else
44 | wget -O /tmp/vscode.deb https://vscode.download.prss.microsoft.com/dbazure/download/stable/b58957e67ee1e712cebf466b995adf4c5307b2bd/code_1.89.0-1714529372_arm64.deb
45 | fi
46 | # installing code
47 | sudo dpkg -i /tmp/vscode.deb
48 |
49 | # installing ShimCacheParser if not installed
50 | if [ -f "/usr/local/bin/ShimCacheParser.py" ]; then
51 | # skip
52 | else
53 | wget https://raw.githubusercontent.com/mandiant/ShimCacheParser/refs/heads/master/ShimCacheParser.py
54 | sudo mv ShimCacheParser.py /usr/local/bin/
55 | sudo chown -R root:root /usr/local/bin/ShimCacheParser.py
56 | sudo chmod -R 755 /usr/local/bin/ShimCacheParser.py
57 | sudo chmod +x /usr/local/bin/ShimCacheParser.py
58 | fi
59 |
60 | # installing regripper if not installed
61 | if [ -d "/usr/local/src/regripper" ]; then
62 | # updating regripper
63 | sudo cp /usr/share/regripper/rip.pl /usr/share/regripper/rip.pl.old
64 | sudo sed -i 's/my \$VERSION/# Add: Define the variable plugindir\nmy \$plugindir = File::Spec->catfile\(\$scriptdir, "plugins"\);\n\nmy \$VERSION/' /usr/share/regripper/rip.pl
65 | else
66 | sudo /opt/cyber1of3/elrond/tools/config/scripts/./regrip.sh
67 | fi
68 |
69 | /opt/cyber1of3/elrond/tools/config/scripts/./volatility3.sh
70 | printf "\n -> Downloading MITRE ATT&CK Framework Enterprise v15.1..."
71 | sudo mkdir /opt/cyber1of3/elrond/tools/attack-navigator
72 | sudo chmod -R 744 /opt/cyber1of3/elrond/tools/attack-navigator
73 | sudo chown -R "$USERPROFILE":"$USERPROFILE" /opt/cyber1of3/elrond/tools/attack-navigator
74 | sudo python3 /opt/cyber1of3/elrond/tools/config/mitre.py
75 |
76 | # configuring elastic
77 | sudo /bin/systemctl daemon-reload
78 | sudo /bin/systemctl enable elasticsearch.service
79 | sudo /bin/systemctl enable kibana.service
80 | sudo sysctl -w vm.max_map_count=262144
81 | echo vm.max_map_count=262144 | sudo tee -a /etc/sysctl.conf
82 | sleep 1
83 | sudo sysctl -p
84 |
85 | /opt/cyber1of3/elrond/tools/config/scripts/./navigator.sh
86 | /opt/cyber1of3/elrond/tools/config/scripts/./finish.sh
87 | sleep 2
88 |
89 | clear
90 | printf "\n\n -> '"$(hostname)"' has been successfully configured for elrond; a reboot is required. Press ENTER to continue..."
91 | read answer
92 | echo '' | sudo tee ~/.bash_history
93 | history -c
94 | sudo reboot
95 |
--------------------------------------------------------------------------------
/elrond/elrond.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | USERPROFILE=$(cat /etc/passwd | grep 1000 | cut -d ":" -f 1)
4 | HOSTNAME=$(hostname)
5 |
6 | # change desktop background
7 | gsettings set org.gnome.desktop.background picture-uri file:///opt/elrond/elrond/images/elrond_background.jpg
8 |
9 | # change favourite apps
10 | gsettings set org.gnome.shell favorite-apps "['org.gnome.seahorse.Application.desktop', 'org.gnome.Nautilus.desktop', 'org.flameshot.Flameshot.desktop', 'firefox_firefox.desktop', 'firefox.desktop', 'org.gnome.Terminal.desktop', 'code.desktop', 'bless.desktop', 'cyberchef_cyberchef.desktop', 'wireshark.desktop', 'cutter-re.desktop', 'sqlitebrowser_sqlitebrowser.desktop', 'maltego.desktop']"
11 |
12 | # configure terminal to launch on login
13 | sudo rm -rf /home/$USERPROFILE/.config/autostart/gnome-terminal.desktop
14 | sudo rm -rf gnome-terminal.desktop
15 | echo "[Desktop Entry]
16 | Type=Application
17 | Exec=gnome-terminal
18 | Hidden=false
19 | NoDisplay=false
20 | X-GNOME-Autostart-enabled=true
21 | Name[en_NG]=Terminal
22 | Name=Terminal
23 | Comment[en_NG]=Start Terminal On Startup
24 | Comment=Start Terminal On Startup" > gnome-terminal.desktop
25 | sudo chmod 744 gnome-terminal.desktop
26 | sudo chown -R "$USERPROFILE":"$USERPROFILE" gnome-terminal.desktop
27 | mkdir -p /home/$USERPROFILE/.config/autostart
28 | sudo mv gnome-terminal.desktop /home/$USERPROFILE/.config/autostart/
29 | sudo chmod 744 /home/$USERPROFILE/.config/autostart/gnome-terminal.desktop
30 | sudo chmod -R 744 ~/Desktop/CobaltStrike-Defence
31 |
32 | echo '' | sudo tee ~/.bash_history
33 | history -c
34 | clear
35 | sudo reboot
--------------------------------------------------------------------------------
/elrond/images/elrond_background.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cyberg3cko/elrond/5cfbed2461433cbcd91b35d0d36659cf32d59c6f/elrond/images/elrond_background.jpg
--------------------------------------------------------------------------------
/elrond/images/elrond_meme.jpeg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cyberg3cko/elrond/5cfbed2461433cbcd91b35d0d36659cf32d59c6f/elrond/images/elrond_meme.jpeg
--------------------------------------------------------------------------------
/elrond/images/elrond_screenshot.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cyberg3cko/elrond/5cfbed2461433cbcd91b35d0d36659cf32d59c6f/elrond/images/elrond_screenshot.jpg
--------------------------------------------------------------------------------
/elrond/images/logo_trans_big.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cyberg3cko/elrond/5cfbed2461433cbcd91b35d0d36659cf32d59c6f/elrond/images/logo_trans_big.png
--------------------------------------------------------------------------------
/elrond/images/logo_trans_small.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cyberg3cko/elrond/5cfbed2461433cbcd91b35d0d36659cf32d59c6f/elrond/images/logo_trans_small.png
--------------------------------------------------------------------------------
/elrond/images/logo_white.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cyberg3cko/elrond/5cfbed2461433cbcd91b35d0d36659cf32d59c6f/elrond/images/logo_white.png
--------------------------------------------------------------------------------
/elrond/images/splunkicon_clear.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cyberg3cko/elrond/5cfbed2461433cbcd91b35d0d36659cf32d59c6f/elrond/images/splunkicon_clear.png
--------------------------------------------------------------------------------
/elrond/images/splunkicon_white.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cyberg3cko/elrond/5cfbed2461433cbcd91b35d0d36659cf32d59c6f/elrond/images/splunkicon_white.png
--------------------------------------------------------------------------------
/elrond/images/splunklogo_clear.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cyberg3cko/elrond/5cfbed2461433cbcd91b35d0d36659cf32d59c6f/elrond/images/splunklogo_clear.png
--------------------------------------------------------------------------------
/elrond/images/splunklogo_white.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cyberg3cko/elrond/5cfbed2461433cbcd91b35d0d36659cf32d59c6f/elrond/images/splunklogo_white.png
--------------------------------------------------------------------------------
/elrond/images/wwhf.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cyberg3cko/elrond/5cfbed2461433cbcd91b35d0d36659cf32d59c6f/elrond/images/wwhf.jpg
--------------------------------------------------------------------------------
/elrond/images/wwhf.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cyberg3cko/elrond/5cfbed2461433cbcd91b35d0d36659cf32d59c6f/elrond/images/wwhf.png
--------------------------------------------------------------------------------
/elrond/rivendell/analysis/keywords.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3 -tt
2 | import os
3 | import re
4 | import time
5 | from datetime import datetime
6 |
7 | from rivendell.audit import write_audit_log_entry
8 |
9 |
10 | def write_keywords(
11 | output_directory,
12 | verbosity,
13 | img,
14 | vssimage,
15 | keyword_search_file,
16 | keywords_target_file,
17 | eachkeyword,
18 | encoding_choice,
19 | vsstext,
20 | ):
21 | keyword_line_number = 1
22 | for eachline in keyword_search_file:
23 | if eachkeyword.lower().strip() in eachline.lower().strip():
24 | (
25 | entry,
26 | prnt,
27 | ) = "{},{},keyword identified,{} (line {}) found in {}\n".format(
28 | datetime.now().isoformat(),
29 | vssimage,
30 | eachkeyword.strip(),
31 | keyword_line_number,
32 | keywords_target_file.split("/")[-1],
33 | ), " -> {} -> identified keyword '{}' on line {} in '{}' from {}{}".format(
34 | datetime.now().isoformat().replace("T", " "),
35 | eachkeyword.strip(),
36 | keyword_line_number,
37 | keywords_target_file.split("/")[-1],
38 | vssimage,
39 | vsstext,
40 | )
41 | write_audit_log_entry(verbosity, output_directory, entry, prnt)
42 | keyword_match_entry = "{},{},{},{},{},{},{}\n".format(
43 | str(
44 | datetime.fromtimestamp(
45 | os.path.getctime(keywords_target_file.split(": ")[0])
46 | )
47 | ),
48 | str(
49 | datetime.fromtimestamp(
50 | os.path.getatime(keywords_target_file.split(": ")[0])
51 | )
52 | ),
53 | str(
54 | datetime.fromtimestamp(
55 | os.path.getmtime(keywords_target_file.split(": ")[0])
56 | )
57 | ),
58 | eachkeyword.strip(),
59 | keywords_target_file.replace(",", "%2C"),
60 | str(keyword_line_number),
61 | eachline.strip().replace(",", "%2C").replace("\n", "\\n"),
62 | )
63 | kw_match_entry = (
64 | str(keyword_match_entry.split())[2:-2]
65 | .replace("', '", " ")
66 | .replace("\\x", "\\\\x")
67 | .replace("\\\\\\", "\\\\")
68 | )
69 | if len(keyword_match_entry.split(",")[-1]) > 200:
70 | kw_match_entry = (
71 | ",".join(keyword_match_entry.split(",")[0:-1])
72 | + ","
73 | + keyword_match_entry.split(",")[-1][0:200]
74 | + "<>TRUNCATED<>\n"
75 | )
76 | else:
77 | kw_match_entry = kw_match_entry + "\n"
78 | with open(
79 | output_directory + img.split("::")[0] + "/analysis/keyword_matches.csv",
80 | "a",
81 | encoding=encoding_choice,
82 | ) as keyword_matches_results_file:
83 | keyword_matches_results_file.write(kw_match_entry)
84 | keyword_line_number += 1
85 |
86 |
87 | def search_keywords(
88 | verbosity,
89 | output_directory,
90 | img,
91 | keywords,
92 | keywords_target_list,
93 | vssimage,
94 | insert,
95 | vsstext,
96 | ):
97 | if not os.path.exists(output_directory + img.split("::")[0] + "/analysis/"):
98 | os.mkdir(output_directory + img.split("::")[0] + "/analysis/")
99 | with open(
100 | output_directory + img.split("::")[0] + "/analysis/keyword_matches.csv",
101 | "a",
102 | ) as keyword_matches_results_file:
103 | keyword_matches_results_file.write(
104 | "CreationTime,LastAccessTime,LastWriteTime,keyword,Filename,line_number,line_entry\n"
105 | )
106 | with open(keywords[0], "r") as keywords_source_file:
107 | for eachkeyword in keywords_source_file:
108 | if verbosity != "":
109 | print(
110 | " Searching for keyword '{}' from {}...".format(
111 | eachkeyword.strip(), insert
112 | )
113 | )
114 | for keywords_target_file in keywords_target_list:
115 | try:
116 | encoding_choice = "UTF-8"
117 | with open(
118 | keywords_target_file, "r", encoding=encoding_choice
119 | ) as keyword_search_file:
120 | write_keywords(
121 | output_directory,
122 | verbosity,
123 | img,
124 | vssimage,
125 | keyword_search_file,
126 | keywords_target_file,
127 | eachkeyword,
128 | encoding_choice,
129 | vsstext,
130 | )
131 | except:
132 | encoding_choice = "ISO-8859-1"
133 | with open(
134 | keywords_target_file, "r", encoding=encoding_choice
135 | ) as keyword_search_file:
136 | write_keywords(
137 | output_directory,
138 | verbosity,
139 | img,
140 | vssimage,
141 | keyword_search_file,
142 | keywords_target_file,
143 | eachkeyword,
144 | encoding_choice,
145 | vsstext,
146 | )
147 |
148 |
149 | def build_keyword_list(mnt):
150 | keywords_target_list = []
151 | for keyword_search_root, _, keyword_search_file in os.walk(mnt):
152 | for keyword_search_file in keyword_search_file:
153 | try:
154 | if (
155 | os.stat(
156 | os.path.join(keyword_search_root, keyword_search_file)
157 | ).st_size
158 | > 0
159 | and os.stat(
160 | os.path.join(keyword_search_root, keyword_search_file)
161 | ).st_size
162 | < 100000000
163 | and not os.path.islink(
164 | os.path.join(keyword_search_root, keyword_search_file)
165 | ) # 100MB
166 | ):
167 | with open(
168 | os.path.join(keyword_search_root, keyword_search_file), "r"
169 | ) as filetest:
170 | filetest.readline()
171 | keywords_target_list.append(
172 | os.path.join(keyword_search_root, keyword_search_file)
173 | )
174 | except:
175 | pass
176 | try:
177 | if (
178 | os.stat(
179 | os.path.join(keyword_search_root, keyword_search_file)
180 | ).st_size
181 | > 0
182 | and os.stat(
183 | os.path.join(keyword_search_root, keyword_search_file)
184 | ).st_size
185 | < 100000000
186 | and not os.path.islink(
187 | os.path.join(keyword_search_root, keyword_search_file)
188 | ) # 100MB
189 | ):
190 | with open(
191 | os.path.join(keyword_search_root, keyword_search_file),
192 | "r",
193 | encoding="ISO-8859-1",
194 | ) as filetest:
195 | filetest.readline()
196 | keywords_target_list.append(
197 | os.path.join(keyword_search_root, keyword_search_file)
198 | )
199 | except:
200 | pass
201 | return keywords_target_list
202 |
203 |
204 | def prepare_keywords(verbosity, output_directory, auto, imgs, flags, keywords, stage):
205 | if stage == "mounting":
206 | if not auto:
207 | yes_kw = input(
208 | " Do you wish to conduct Keyword Searching for '{}'? Y/n [Y] ".format(
209 | img.split("::")[0]
210 | )
211 | )
212 | if auto or yes_kw != "n":
213 | print(
214 | "\n\n -> \033[1;36mCommencing Keyword Searching Phase...\033[1;m\n ----------------------------------------"
215 | )
216 | time.sleep(1)
217 | for mnt, img in imgs.items():
218 | stage = "keyword searching"
219 | if "vss" in img.split("::")[1]:
220 | vssimage, vsstext = (
221 | "'"
222 | + img.split("::")[0]
223 | + "' ("
224 | + img.split("::")[1]
225 | .split("_")[1]
226 | .replace("vss", "volume shadow copy #")
227 | + ")",
228 | " ("
229 | + img.split("::")[1]
230 | .split("_")[1]
231 | .replace("vss", "volume shadow copy #")
232 | + ")",
233 | )
234 | else:
235 | vssimage, vsstext = "'" + img.split("::")[0] + "'", ""
236 | print(" Conducting Keyword Searching for {}...".format(vssimage))
237 | entry, prnt = "{},{},{},commenced\n".format(
238 | datetime.now().isoformat(), vssimage.replace("'", ""), stage
239 | ), " -> {} -> {} commenced for '{}'{}".format(
240 | datetime.now().isoformat().replace("T", " "),
241 | stage,
242 | img.split("::")[0],
243 | vsstext,
244 | )
245 | write_audit_log_entry(verbosity, output_directory, entry, prnt)
246 | print(
247 | " Assessing readable files in {} before searching for keywords...".format(
248 | vssimage
249 | )
250 | )
251 | keywords_target_list = build_keyword_list(mnt)
252 |
253 | search_keywords(
254 | verbosity,
255 | output_directory,
256 | img,
257 | keywords,
258 | keywords_target_list,
259 | vssimage,
260 | vssimage,
261 | vsstext,
262 | )
263 | print(" -> Completed Keyword Searching Phase for {}".format(vssimage))
264 | entry, prnt = "{},{},{},completed\n".format(
265 | datetime.now().isoformat(),
266 | vssimage.replace("'", ""),
267 | "keyword searching",
268 | ), " -> {} -> keyword searching completed for {}".format(
269 | datetime.now().isoformat().replace("T", " "), vssimage
270 | )
271 | write_audit_log_entry(verbosity, output_directory, entry, prnt)
272 | print()
273 | print(
274 | " ----------------------------------------\n -> Completed Keyword Searching Phase.\n"
275 | )
276 | time.sleep(1)
277 | else:
278 | for each in imgs:
279 | if os.path.exists(
280 | os.path.join(output_directory, each.split("::")[0], "artefacts")
281 | ):
282 | mnt = os.path.join(output_directory, each.split("::")[0], "artefacts")
283 | keywords_target_list = build_keyword_list(mnt)
284 | search_keywords(
285 | verbosity,
286 | output_directory,
287 | each.split("::")[0],
288 | keywords,
289 | keywords_target_list,
290 | each.split("::")[0],
291 | "collected/processed artefacts",
292 | vsstext,
293 | )
294 | if os.path.exists(
295 | os.path.join(output_directory, each.split("::")[0], "files")
296 | ): # for office documents and archives - extract and then build keyword search list
297 | mnt = os.path.join(output_directory, each.split("::")[0], "files")
298 | keywords_target_list = build_keyword_list(mnt)
299 | search_keywords(
300 | verbosity,
301 | output_directory,
302 | each.split("::")[0],
303 | keywords,
304 | keywords_target_list,
305 | each.split("::")[0],
306 | "collected files",
307 | vsstext,
308 | )
309 | if "keyword searching" not in str(flags):
310 | flags.append("03keyword searching")
311 |
--------------------------------------------------------------------------------
/elrond/rivendell/audit.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3 -tt
2 | from datetime import datetime
3 |
4 |
5 | def write_audit_log_entry(verbosity, output_directory, entry, prnt):
6 | if "LastWriteTime,elrond_host,elrond_stage,elrond_log_entry\n" in entry:
7 | writemode = "w"
8 | else:
9 | writemode = "a"
10 | with open(
11 | output_directory
12 | + str(str(prnt.split("'")[-2]).split("/")[-1]).split("::")[0]
13 | + "/log.audit",
14 | writemode,
15 | ) as logentry:
16 | logentry.write(entry.replace("'", ""))
17 | if prnt != "":
18 | print(prnt)
19 |
20 |
21 | def manage_error(output_directory, verbosity, error, state, img, item, vsstext):
22 | entry, prnt = "{},{},{} failed ({}),'{}'\n".format(
23 | datetime.now().isoformat(),
24 | img.split("::")[0],
25 | state,
26 | str(error).split("] ")[-1],
27 | item.strip("/").split("/")[-1],
28 | ), " -> {} -> ERROR - {}: {}; {} failed for '{}'{} from '{}'".format(
29 | datetime.now().isoformat().replace("T", " "),
30 | str(error).split("] ")[-1].split(": ")[0],
31 | "'" + str(error).split("] ")[-1].split(": ")[1].strip("'")[-24:-4] + "'",
32 | state,
33 | item.strip("/").split("/")[-1],
34 | vsstext.replace("vss", "volume shadow copy #"),
35 | img.split("::")[0],
36 | )
37 | write_audit_log_entry(verbosity, output_directory, entry, prnt)
38 |
--------------------------------------------------------------------------------
/elrond/rivendell/collect/files/carve.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3 -tt
2 | import os
3 | import subprocess
4 | import time
5 | from datetime import datetime
6 |
7 | from rivendell.audit import write_audit_log_entry
8 |
9 |
10 | def carve_files(output_directory, verbosity, d, artefact_directory, img, vssimage):
11 | print(
12 | "\n \033[1;33mCarving files from unallocated space for {}...\033[1;m".format(
13 | vssimage
14 | )
15 | )
16 | subprocess.Popen(
17 | [
18 | "foremost",
19 | d + img.split("::")[0],
20 | "-o",
21 | artefact_directory + "/carved",
22 | ],
23 | stdout=subprocess.PIPE,
24 | stderr=subprocess.PIPE,
25 | ).communicate()
26 | if os.path.exists(artefact_directory + "/carved/audit.txt"):
27 | os.remove(artefact_directory + "/carved/audit.txt")
28 | for eachdir in os.listdir(artefact_directory + "/carved"):
29 | for eachfile in os.listdir(artefact_directory + "/carved/" + eachdir):
30 | print(" Successfully carved '{}' from {}".format(eachfile, vssimage))
31 | entry, prnt = "{},{},{},'{}'\n".format(
32 | datetime.now().isoformat(),
33 | vssimage.replace("'", ""),
34 | "carving",
35 | eachfile,
36 | ), " -> {} -> {} artefact '{}' for {}".format(
37 | datetime.now().isoformat().replace("T", " "),
38 | "carved",
39 | eachfile,
40 | vssimage,
41 | )
42 | write_audit_log_entry(verbosity, output_directory, entry, prnt)
43 | time.sleep(0.5)
44 |
45 | entry, prnt = "{},{},{},completed\n".format(
46 | datetime.now().isoformat(), vssimage.replace("'", ""), "carving"
47 | ), " -> {} -> {} artefacts from {}".format(
48 | datetime.now().isoformat().replace("T", " "),
49 | "carved",
50 | vssimage,
51 | )
52 | write_audit_log_entry(verbosity, output_directory, entry, prnt)
53 | print(
54 | " \033[1;33mCarved all available files, from {}\n\033[1;m".format(
55 | vssimage
56 | )
57 | )
58 |
--------------------------------------------------------------------------------
/elrond/rivendell/collect/files/compare.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3 -tt
2 | import os
3 | import shutil
4 | from datetime import datetime
5 |
6 | from rivendell.audit import write_audit_log_entry
7 |
8 |
9 | def multiple_files(source, destination, increment):
10 | def copy_files(source, destination, increment):
11 | if os.path.exists(source):
12 | shutil.copy2(source, destination + "." + str(increment))
13 |
14 | if os.path.exists(destination + "." + str(increment)):
15 | increment += 1
16 | multiple_files(source, destination, increment)
17 | else:
18 | copy_files(source, destination, increment)
19 | increment += 1
20 |
21 |
22 | def compare_include_exclude(
23 | output_directory,
24 | verbosity,
25 | stage,
26 | img,
27 | vssimage,
28 | recpath,
29 | filetype,
30 | recovered_file_root,
31 | recovered_file,
32 | increment,
33 | collectfiles,
34 | ):
35 | def successful_copy(
36 | verbosity, output_directory, img, stage, vssimage, recovered_file, filetype
37 | ):
38 | (
39 | entry,
40 | prnt,
41 | ) = "{},{},{},{} '{}'\n".format(
42 | datetime.now().isoformat(),
43 | img.split("::")[0],
44 | stage,
45 | filetype,
46 | recovered_file,
47 | ), " -> {} -> {} {} '{}' from {}".format(
48 | datetime.now().isoformat().replace("T", " "),
49 | stage.replace(",", " &"),
50 | filetype,
51 | recovered_file,
52 | vssimage,
53 | )
54 | write_audit_log_entry(verbosity, output_directory, entry, prnt)
55 |
56 | if os.path.exists(
57 | output_directory + img.split("::")[0] + recpath + recovered_file
58 | ): # multiple files with the same name
59 | if collectfiles != True:
60 | with open(collectfiles.split(":")[1]) as include_or_exclude_selection_file:
61 | for inc_ex_line in include_or_exclude_selection_file:
62 | if collectfiles.split(":")[0] == "include" and (
63 | inc_ex_line.strip() in recovered_file
64 | ):
65 | multiple_files(
66 | os.path.join(recovered_file_root, recovered_file),
67 | output_directory
68 | + img.split("::")[0]
69 | + recpath
70 | + recovered_file,
71 | increment,
72 | )
73 | successful_copy(
74 | verbosity,
75 | output_directory,
76 | img,
77 | stage,
78 | vssimage,
79 | recovered_file,
80 | filetype,
81 | )
82 | elif collectfiles.split(":")[0] == "exclude" and (
83 | inc_ex_line.strip() not in recovered_file
84 | ):
85 | multiple_files(
86 | os.path.join(recovered_file_root, recovered_file),
87 | output_directory
88 | + img.split("::")[0]
89 | + recpath
90 | + recovered_file,
91 | increment,
92 | )
93 | successful_copy(
94 | verbosity,
95 | output_directory,
96 | img,
97 | stage,
98 | vssimage,
99 | recovered_file,
100 | filetype,
101 | )
102 | else:
103 | multiple_files(
104 | os.path.join(recovered_file_root, recovered_file),
105 | output_directory + img.split("::")[0] + recpath + recovered_file,
106 | increment,
107 | )
108 | else: # files with unique name
109 | if collectfiles != True:
110 | with open(collectfiles.split(":")[1]) as include_or_exclude_selection_file:
111 | for inc_ex_line in include_or_exclude_selection_file:
112 | if collectfiles.split(":")[0] == "include" and (
113 | inc_ex_line.strip() in recovered_file
114 | ):
115 | if os.path.exists(
116 | os.path.join(recovered_file_root, recovered_file)
117 | ):
118 | shutil.copy2(
119 | os.path.join(recovered_file_root, recovered_file),
120 | output_directory + img.split("::")[0] + recpath,
121 | )
122 | successful_copy(
123 | verbosity,
124 | output_directory,
125 | img,
126 | stage,
127 | vssimage,
128 | recovered_file,
129 | filetype,
130 | )
131 | else:
132 | copy_success = False
133 | elif collectfiles.split(":")[0] == "exclude" and (
134 | inc_ex_line.strip() not in recovered_file
135 | ):
136 | if os.path.exists(
137 | os.path.join(recovered_file_root, recovered_file)
138 | ):
139 | shutil.copy2(
140 | os.path.join(recovered_file_root, recovered_file),
141 | output_directory + img.split("::")[0] + recpath,
142 | )
143 | successful_copy(
144 | verbosity,
145 | output_directory,
146 | img,
147 | stage,
148 | vssimage,
149 | recovered_file,
150 | filetype,
151 | )
152 | else:
153 | copy_success = False
154 | else:
155 | copy_success = False
156 | else:
157 | if os.path.exists(os.path.join(recovered_file_root, recovered_file)):
158 | shutil.copy2(
159 | os.path.join(recovered_file_root, recovered_file),
160 | output_directory + img.split("::")[0] + recpath,
161 | )
162 | successful_copy(
163 | verbosity,
164 | output_directory,
165 | img,
166 | stage,
167 | vssimage,
168 | recovered_file,
169 | filetype,
170 | )
171 | else:
172 | copy_success = False
173 |
--------------------------------------------------------------------------------
/elrond/rivendell/collect/files/i30.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3 -tt
2 | import os
3 | import shutil
4 | import subprocess
5 | from datetime import datetime
6 |
7 | from rivendell.audit import write_audit_log_entry
8 | from rivendell.mount import obtain_offset
9 |
10 |
11 | def rip_i30(output_directory, img, offset):
12 | if not os.path.exists(
13 | output_directory + img.split("::")[0] + "/" + "artefacts/I30_" + offset + ".csv"
14 | ):
15 | indxripper_result = subprocess.Popen(
16 | [
17 | "sudo",
18 | "python3.9",
19 | "/opt/elrond/elrond/tools/INDXRipper/INDXRipper.py",
20 | "-w",
21 | "csv",
22 | "-o",
23 | "{}".format(offset),
24 | "/mnt/i30_{}/ewf1".format(img.split("::")[0]),
25 | output_directory
26 | + img.split("::")[0]
27 | + "/"
28 | + "artefacts/I30_"
29 | + offset
30 | + ".csv",
31 | ],
32 | stdout=subprocess.PIPE,
33 | stderr=subprocess.PIPE,
34 | ).communicate()
35 | else:
36 | indxripper_result = ["", ""]
37 | return indxripper_result
38 |
39 |
40 | def extract_i30(
41 | output_directory,
42 | verbosity,
43 | stage,
44 | d,
45 | img,
46 | vssimage,
47 | ):
48 | if ("Windows" in img.split("::")[1] and "memory_" not in img.split("::")[1]) and (
49 | "I30_"
50 | not in str(
51 | os.listdir(output_directory + img.split("::")[0] + "/" + "artefacts")
52 | )
53 | ):
54 | if verbosity != "":
55 | print(
56 | " Extracting '$I30' records from '{}'...".format(img.split("::")[0])
57 | )
58 | for image_root, _, image_files in os.walk(d):
59 | for image_file in image_files:
60 | if (
61 | image_file.endswith(".E01") or image_file.endswith(".e01")
62 | ) and img.split("::")[0] in image_file:
63 | i30_source = os.path.join(image_root, image_file)
64 | if os.path.exists("/mnt/i30_{}".format(img.split("::")[0])):
65 | subprocess.Popen(
66 | ["umount", "/mnt/i30_{}".format(img.split("::")[0])],
67 | stdout=subprocess.PIPE,
68 | stderr=subprocess.PIPE,
69 | ).communicate()
70 | shutil.rmtree("/mnt/i30_{}".format(img.split("::")[0]))
71 | os.mkdir("/mnt/i30_{}".format(img.split("::")[0]))
72 | os.chmod("/mnt/i30_{}".format(img.split("::")[0]), 0o0777)
73 | if img.split("::")[0].endswith(".E01") or img.split("::")[
74 | 0
75 | ].endswith(".e01"):
76 | subprocess.Popen(
77 | [
78 | "ewfmount",
79 | i30_source,
80 | "/mnt/i30_{}".format(img.split("::")[0]),
81 | ],
82 | stdout=subprocess.PIPE,
83 | stderr=subprocess.PIPE,
84 | ).communicate()
85 | subprocess.Popen(
86 | [
87 | "chmod",
88 | "-f",
89 | "777",
90 | "/mnt/i30_{}".format(img.split("::")[0]),
91 | ]
92 | ).communicate()
93 | subprocess.Popen(
94 | [
95 | "chmod",
96 | "-f",
97 | "777",
98 | "/mnt/i30_{}/ewf1".format(img.split("::")[0]),
99 | ]
100 | ).communicate()
101 | indxripper_result = rip_i30(output_directory, img, "0")
102 | if (
103 | "invalid volume boot record"
104 | in str(indxripper_result[1])[2:-3]
105 | ):
106 | offset_values = obtain_offset(
107 | "/mnt/i30_{}/ewf1".format(img.split("::")[0])
108 | )
109 | for eachoffset in offset_values:
110 | if verbosity != "":
111 | print(
112 | " Extracting '$I30' records from offset '#{}' for '{}'...".format(
113 | eachoffset, img.split("::")[0]
114 | )
115 | )
116 | indxripper_result = rip_i30(
117 | output_directory, img, str(eachoffset)
118 | )
119 | if str(indxripper_result[1]) != "b''":
120 | (
121 | entry,
122 | prnt,
123 | ) = "{},{},recovery,$I30 records (failed)\n".format(
124 | datetime.now().isoformat(),
125 | vssimage.replace("'", ""),
126 | ), " -> {} -> recovery of $I30 records failed from {}".format(
127 | datetime.now().isoformat().replace("T", " "),
128 | vssimage,
129 | )
130 | write_audit_log_entry(
131 | verbosity, output_directory, entry, prnt
132 | )
133 | elif str(indxripper_result[1]) == "b''":
134 | (
135 | entry,
136 | prnt,
137 | ) = "{},{},{},$I30 records (#{})\n".format(
138 | datetime.now().isoformat(),
139 | vssimage.replace("'", ""),
140 | stage,
141 | eachoffset,
142 | ), " -> {} -> {} $I30 records (#{}) from {}".format(
143 | datetime.now().isoformat().replace("T", " "),
144 | stage,
145 | eachoffset,
146 | vssimage,
147 | )
148 | write_audit_log_entry(
149 | verbosity, output_directory, entry, prnt
150 | )
151 | else:
152 | entry, prnt = "{},{},{},$I30 records\n".format(
153 | datetime.now().isoformat(),
154 | vssimage.replace("'", ""),
155 | stage,
156 | ), " -> {} -> {} $I30 records from {}".format(
157 | datetime.now().isoformat().replace("T", " "),
158 | stage,
159 | vssimage,
160 | )
161 | write_audit_log_entry(
162 | verbosity, output_directory, entry, prnt
163 | )
164 | subprocess.Popen(
165 | ["umount", "/mnt/i30_{}".format(img.split("::")[0])],
166 | stdout=subprocess.PIPE,
167 | stderr=subprocess.PIPE,
168 | ).communicate()
169 | shutil.rmtree("/mnt/i30_{}".format(img.split("::")[0]))
170 |
--------------------------------------------------------------------------------
/elrond/rivendell/core/core.py:
--------------------------------------------------------------------------------
1 | import os
2 | import shutil
3 | import sys
4 | import time
5 | from datetime import datetime
6 |
7 | from rivendell.analysis.analysis import analyse_artefacts
8 | from rivendell.analysis.keywords import prepare_keywords
9 | from rivendell.audit import write_audit_log_entry
10 | from rivendell.collect.collect import collect_artefacts
11 | from rivendell.collect.reorganise import reorganise_artefacts
12 | from rivendell.process.select import select_pre_process_artefacts
13 | from rivendell.process.timeline import create_plaso_timeline
14 |
15 |
16 | def collect_process_keyword_analysis_timeline(
17 | auto,
18 | collect,
19 | process,
20 | analysis,
21 | extractiocs,
22 | timeline,
23 | vss,
24 | collectfiles,
25 | nsrl,
26 | keywords,
27 | volatility,
28 | metacollected,
29 | superquick,
30 | quick,
31 | reorganise,
32 | symlinks,
33 | userprofiles,
34 | verbose,
35 | d,
36 | cwd,
37 | sha256,
38 | flags,
39 | system_artefacts,
40 | output_directory,
41 | verbosity,
42 | f,
43 | allimgs,
44 | imgs,
45 | path,
46 | volchoice,
47 | vssmem,
48 | memtimeline,
49 | stage,
50 | ):
51 | if (collect and len(imgs) != 0) or reorganise:
52 | if collect:
53 | collect_artefacts(
54 | auto,
55 | vss,
56 | collectfiles,
57 | nsrl,
58 | keywords,
59 | volatility,
60 | metacollected,
61 | superquick,
62 | quick,
63 | symlinks,
64 | userprofiles,
65 | verbose,
66 | d,
67 | cwd,
68 | sha256,
69 | flags,
70 | system_artefacts,
71 | output_directory,
72 | verbosity,
73 | f,
74 | imgs,
75 | path,
76 | volchoice,
77 | vssmem,
78 | memtimeline,
79 | stage,
80 | )
81 | else:
82 | imgs = reorganise_artefacts(
83 | output_directory, verbosity, d, allimgs, flags, auto, volatility
84 | )
85 | for eachdir in os.listdir(output_directory):
86 | if (
87 | os.path.isdir(os.path.join(output_directory, eachdir))
88 | and eachdir != ".DS_Store"
89 | ):
90 | if len(os.listdir(os.path.join(output_directory, eachdir))) == 0:
91 | os.rmdir(os.path.join(output_directory, eachdir))
92 | if process:
93 | select_pre_process_artefacts(
94 | output_directory,
95 | verbosity,
96 | d,
97 | flags,
98 | stage,
99 | cwd,
100 | imgs,
101 | f,
102 | path,
103 | vssmem,
104 | volatility,
105 | volchoice,
106 | vss,
107 | memtimeline,
108 | collectfiles,
109 | )
110 | if os.path.exists("/opt/elrond/elrond/tools/.profiles"):
111 | os.remove("/opt/elrond/elrond/tools/.profiles")
112 | if keywords:
113 | if not os.path.exists(keywords[0]):
114 | continue_with_kw = input(
115 | "\n {} is an invalid path because it does not exist. Continue? Y/n [Y] \n".format(
116 | keywords[0]
117 | )
118 | )
119 | if continue_with_kw == "n":
120 | sys.exit()
121 | else:
122 | print(
123 | "\n\n -> \033[1;36mCommencing Keyword Searching phase for proccessed artefacts...\033[1;m\n ----------------------------------------"
124 | )
125 | time.sleep(1)
126 | prepare_keywords(
127 | verbosity,
128 | output_directory,
129 | auto,
130 | imgs,
131 | flags,
132 | keywords,
133 | "keyword searching",
134 | )
135 | print(
136 | " ----------------------------------------\n -> Completed Keyword Searching phase for proccessed artefacts.\n"
137 | )
138 | time.sleep(1)
139 | if analysis or extractiocs:
140 | alysdirs = []
141 | for eachdir in os.listdir(output_directory):
142 | if os.path.exists(output_directory + eachdir + "/artefacts"):
143 | alysdirs.append(output_directory + eachdir + "/artefacts")
144 | if len(alysdirs) > 0:
145 | print(
146 | "\n\n -> \033[1;36mCommencing Analysis Phase...\033[1;m\n ----------------------------------------"
147 | )
148 | time.sleep(1)
149 | for mnt, img in imgs.items():
150 | if "vss" in img.split("::")[1]:
151 | vssimage = (
152 | "'"
153 | + img.split("::")[0]
154 | + "' ("
155 | + img.split("::")[1]
156 | .split("_")[1]
157 | .replace("vss", "volume shadow copy #")
158 | + ")"
159 | )
160 | else:
161 | vssimage = "'" + img.split("::")[0] + "'"
162 | analyse_artefacts(
163 | verbosity,
164 | output_directory,
165 | img,
166 | mnt,
167 | analysis,
168 | extractiocs,
169 | vssimage,
170 | )
171 | else:
172 | print(
173 | " -> Analysis could not be conducted as there are no artefacts processed (-P), please try again.\n"
174 | )
175 | flags.append("04analysis")
176 | print(
177 | " ----------------------------------------\n -> Completed Analysis Phase.\n"
178 | )
179 | time.sleep(1)
180 | if timeline:
181 | stage, timelineimages = "timeline", []
182 | print(
183 | "\n\n -> \033[1;36mCommencing Timeline Phase...\033[1;m\n ----------------------------------------"
184 | )
185 | time.sleep(1)
186 | for img in imgs: # Identifying images for timelining
187 | if not img.split("::")[1].endswith("memory"):
188 | timelineimages.append(img.split("::")[0])
189 | if len(timelineimages) > 0:
190 | for each in os.listdir(output_directory):
191 | if each + "/" == output_directory or each == img.split("::")[0]:
192 | if not os.path.exists(
193 | output_directory + img.split("::")[0] + "/artefacts/"
194 | ):
195 | os.makedirs(
196 | output_directory + img.split("::")[0] + "/artefacts/"
197 | )
198 | for timelineimage in timelineimages:
199 | timelineexist = input(
200 | " Does a timeline already exist for '{}'? Y/n [n] ".format(
201 | timelineimage
202 | )
203 | )
204 | if timelineexist != "Y":
205 | create_plaso_timeline(
206 | verbosity, output_directory, stage, img, d, timelineimage
207 | )
208 | else:
209 |
210 | def doTimelineFile(timelinepath):
211 | if not os.path.exists(timelinepath):
212 | timelinepath = input(
213 | " '{}' does not exist and/or is an invalid csv file.\n Please provide a valid file path: ".format(
214 | timelinepath
215 | )
216 | )
217 | doTimelineFile(timelinepath)
218 | return timelinepath
219 |
220 | timelinepath = input(
221 | " Please provide the full file path of the timeline: "
222 | )
223 | timelinefile = doTimelineFile(timelinepath)
224 | if os.path.exists(".plaso"):
225 | shutil.rmtree("./.plaso")
226 | with open(timelinefile) as tlf:
227 | firstline = tlf.readline()
228 | if "Message" not in firstline and "Artefact" not in firstline:
229 | os.mkdir(".plaso")
230 | shutil.copy2(timelinefile, "./.plaso/plaso_timeline.csvtmp")
231 | create_plaso_timeline()
232 | else:
233 | shutil.copy2(
234 | timelinefile,
235 | output_directory
236 | + timelineimage
237 | + "/artefacts/plaso_timeline.csv",
238 | )
239 | print(" -> Completed Timeline Phase for '{}'.".format(timelineimage))
240 | entry, prnt = "{},{},{},{}\n".format(
241 | datetime.now().isoformat(), timelineimage, stage, timelineimage
242 | ), " -> {} -> {} completed for '{}'".format(
243 | datetime.now().isoformat().replace("T", " "),
244 | stage,
245 | timelineimage,
246 | )
247 | write_audit_log_entry(verbosity, output_directory, entry, prnt)
248 | print()
249 | flags.append("05timelining")
250 | print(
251 | " ----------------------------------------\n -> Completed Timelining Phase.\n"
252 | )
253 | time.sleep(1)
254 |
--------------------------------------------------------------------------------
/elrond/rivendell/core/identify.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3 -tt
2 | import os
3 | import time
4 | from datetime import datetime
5 |
6 | from rivendell.audit import write_audit_log_entry
7 | from rivendell.meta import extract_metadata
8 | from rivendell.memory.memory import process_memory
9 |
10 |
11 | def print_identification(verbosity, output_directory, disk_image, osplatform):
12 | print(" Identified platform of '{}' for '{}'.".format(osplatform, disk_image))
13 | entry, prnt = "{},{},identified platform,{}\n".format(
14 | datetime.now().isoformat(),
15 | disk_image,
16 | osplatform,
17 | ), " -> {} -> identified platform of '{}' for '{}'".format(
18 | datetime.now().isoformat().replace("T", " "),
19 | osplatform,
20 | disk_image,
21 | )
22 | write_audit_log_entry(verbosity, output_directory, entry, prnt)
23 |
24 |
25 | def identify_disk_image(verbosity, output_directory, disk_image, mount_location):
26 | if not mount_location.endswith("/"):
27 | mount_location = mount_location + "/"
28 | if len(os.listdir(mount_location)) > 0:
29 | if (
30 | "MFTMirr" in str(os.listdir(mount_location))
31 | or ("Bitmap" in str(os.listdir(mount_location)))
32 | or ("LogFile" in str(os.listdir(mount_location)))
33 | or ("Boot" in str(os.listdir(mount_location)))
34 | or ("Windows" in str(os.listdir(mount_location)))
35 | ):
36 | if "MSOCache" in str(os.listdir(mount_location)):
37 | windows_os = "Windows7"
38 | elif "Windows" in str(os.listdir(mount_location)) or "Boot" in str(
39 | os.listdir(mount_location)
40 | ):
41 | if (
42 | "BrowserCore" in str(os.listdir(mount_location + "Windows/"))
43 | or "Containers" in str(os.listdir(mount_location + "Windows/"))
44 | or "IdentityCRL" in str(os.listdir(mount_location + "Windows/"))
45 | ):
46 | windows_os = "Windows Server 2022"
47 | elif (
48 | "DsfrAdmin" in str(os.listdir(mount_location + "Windows/"))
49 | and "WaaS" in str(os.listdir(mount_location + "Windows/"))
50 | and "WMSysPr9.prx" in str(os.listdir(mount_location + "Windows/"))
51 | ):
52 | windows_os = "Windows Server 2019"
53 | elif "InfusedApps" in str(os.listdir(mount_location + "Windows/")):
54 | windows_os = "Windows Server 2016"
55 | elif "ToastData" in str(os.listdir(mount_location + "Windows/")):
56 | windows_os = "Windows Server 2012R2"
57 | else:
58 | windows_os = "Windows Server"
59 | else:
60 | windows_os = "Windows10"
61 | """
62 | else:
63 | windows_os = "Windows11"
64 | """
65 | print_identification(verbosity, output_directory, disk_image, windows_os)
66 | disk_image = disk_image + "::" + windows_os
67 | elif "root" in str(os.listdir(mount_location)) and "media" in str(
68 | os.listdir(mount_location)
69 | ):
70 | print_identification(verbosity, output_directory, disk_image, "Linux")
71 | disk_image = disk_image + "::Linux"
72 | elif os.path.exists(mount_location + "root"):
73 | if "Applications" in str(os.listdir(mount_location + "root")):
74 | print_identification(verbosity, output_directory, disk_image, "macOS")
75 | disk_image = disk_image + "::macOS"
76 | return disk_image
77 |
78 |
79 | def identify_memory_image(
80 | verbosity,
81 | output_directory,
82 | flags,
83 | auto,
84 | superquick,
85 | quick,
86 | metacollected,
87 | cwd,
88 | sha256,
89 | nsrl,
90 | f,
91 | ot,
92 | d,
93 | path,
94 | volchoice,
95 | vss,
96 | vssmem,
97 | memtimeline,
98 | ):
99 | if not auto:
100 | wtm = input(" Do you wish to process '{}'? Y/n [Y] ".format(f))
101 | else:
102 | wtm = "y"
103 | if wtm != "n":
104 | if not superquick and not quick and not metacollected:
105 | extract_metadata(
106 | verbosity,
107 | output_directory,
108 | f,
109 | path,
110 | "metadata",
111 | sha256,
112 | nsrl,
113 | )
114 | entry, prnt = (
115 | "LastWriteTime,elrond_host,elrond_stage,elrond_log_entry\n",
116 | " -> {} -> created audit log file for '{}'".format(
117 | datetime.now().isoformat().replace("T", " "), f
118 | ),
119 | )
120 | write_audit_log_entry(verbosity, output_directory, entry, prnt)
121 | if volchoice == "2.6":
122 | symbolprofile, vssmem = process_memory(
123 | output_directory,
124 | verbosity,
125 | d,
126 | "process",
127 | f,
128 | path,
129 | "2.6",
130 | vss,
131 | vssmem,
132 | memtimeline,
133 | )
134 | elif volchoice == "3":
135 | symbolprofile, vssmem = process_memory(
136 | output_directory,
137 | verbosity,
138 | d,
139 | "process",
140 | f,
141 | path,
142 | "3",
143 | vss,
144 | vssmem,
145 | memtimeline,
146 | )
147 | else:
148 | symbolprofile, vssmem = process_memory(
149 | output_directory,
150 | verbosity,
151 | d,
152 | "process",
153 | f,
154 | path,
155 | "2.6",
156 | vss,
157 | vssmem,
158 | memtimeline,
159 | )
160 | symbolprofile, vssmem = process_memory(
161 | output_directory,
162 | verbosity,
163 | d,
164 | "process",
165 | f,
166 | path,
167 | "3",
168 | vss,
169 | vssmem,
170 | memtimeline,
171 | )
172 | if "Win" in symbolprofile or "win" in symbolprofile:
173 | memoryplatform = "Windows memory"
174 | elif (
175 | "macOS" == symbolprofile
176 | or "Mac" in symbolprofile
177 | or "11." in symbolprofile
178 | or "10." in symbolprofile
179 | ):
180 | memoryplatform = "macOS memory"
181 | else:
182 | memoryplatform = "Linux memory"
183 | ot[d] = "{}::{}_{}".format(
184 | f,
185 | memoryplatform.replace(" ", "_").split("_")[1],
186 | memoryplatform.replace(" ", "_").split("_")[0],
187 | )
188 | if "02processing" not in str(flags):
189 | flags.append("02processing")
190 | os.chdir(cwd)
191 | else:
192 | print(" OK. '{}' will not be processed.\n".format(f))
193 | return ot
194 |
195 |
196 | def identify_gandalf_host(output_directory, verbosity, host_info_file):
197 | time.sleep(2)
198 | with open(host_info_file) as host_info:
199 | gandalf_host, osplatform = host_info.readline().strip().split("::")
200 | print_identification(verbosity, output_directory, gandalf_host, osplatform)
201 | return gandalf_host, osplatform
202 |
--------------------------------------------------------------------------------
/elrond/rivendell/memory/memory.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3 -tt
2 | import os
3 | import shutil
4 | import subprocess
5 | import time
6 | from datetime import datetime
7 |
8 | from rivendell.audit import write_audit_log_entry
9 | from rivendell.memory.profiles import identify_profile
10 | from rivendell.memory.volcore import (
11 | assess_volatility_choice,
12 | )
13 | from rivendell.memory.volcore import (
14 | dump_vol3_ziphex,
15 | )
16 | from rivendell.memory.volcore import (
17 | choose_custom_profile,
18 | )
19 | from rivendell.memory.volatility3.Linux import Linux
20 | from rivendell.memory.volatility3.macOS1 import macOS1
21 | from rivendell.memory.volatility3.macOS2 import macOS2
22 |
23 |
24 | def vol3_check_os(artefact, memext, plugin):
25 | vol3oscheck = str(
26 | subprocess.Popen(
27 | [
28 | "python3",
29 | "/usr/local/lib/python3.8/dist-packages/volatility3/vol.py",
30 | "-f",
31 | artefact + memext,
32 | plugin,
33 | ],
34 | stdout=subprocess.PIPE,
35 | stderr=subprocess.PIPE,
36 | ).communicate()[0]
37 | )[2:-1]
38 | return vol3oscheck
39 |
40 |
41 | def process_memory(
42 | output_directory,
43 | verbosity,
44 | d,
45 | stage,
46 | img,
47 | artefact,
48 | volchoice,
49 | vss,
50 | vssmem,
51 | memtimeline,
52 | ):
53 | if artefact.endswith("hiberfil.sys"):
54 | memext = ".raw"
55 | else:
56 | memext = ""
57 | if stage == "processing":
58 | if "vss" in artefact:
59 | mempath, volprefix, vssimage = (
60 | artefact.split("/")[-5]
61 | + "/artefacts/cooked/"
62 | + artefact.split("/")[-2]
63 | + "/memory/",
64 | " ",
65 | "'"
66 | + img.split("::")[0]
67 | + "' ("
68 | + img.split("::")[1]
69 | .split("_")[1]
70 | .replace("vss", "volume shadow copy #")
71 | + ")",
72 | )
73 | else:
74 | mempath, volprefix, vssimage = (
75 | img.split("::")[0].split("/")[-1] + "/artefacts/cooked/memory/",
76 | " ",
77 | "'" + img.split("::")[0] + "'",
78 | )
79 | else:
80 | mempath, volprefix, vssimage = (
81 | artefact.split("/")[-1],
82 | " ",
83 | "'" + img.split("::")[0] + "'",
84 | )
85 | if volchoice != "3":
86 | profile, vssmem = identify_profile(
87 | output_directory,
88 | verbosity,
89 | d,
90 | stage,
91 | img,
92 | vss,
93 | vssimage,
94 | vssmem,
95 | artefact,
96 | volchoice,
97 | volprefix,
98 | mempath,
99 | memext,
100 | memtimeline,
101 | )
102 | vssmem = profile
103 | profiledirs = []
104 | for eachfile in os.listdir(
105 | "/usr/local/lib/python2.7/dist-packages/volatility/plugins/overlays/mac/"
106 | ):
107 | if eachfile.endswith(".zip"):
108 | profiledirs.append(eachfile)
109 | for eachfile in os.listdir(
110 | "/usr/local/lib/python2.7/dist-packages/volatility/plugins/overlays/linux/"
111 | ):
112 | if eachfile.endswith(".zip"):
113 | profiledirs.append(eachfile)
114 | if len(profiledirs) > 0:
115 | deleteprofiles = input(
116 | "\tIt is not good practice to keep too many custom profiles in volatility as it can cause volatility to run extremely slowly\n\tWould you like to remove the following custom profiles?\n\t {}\t Y/n [Y] ".format(
117 | str(profiledirs)[2:-2].replace("', '", "\n\t ")
118 | )
119 | )
120 | if deleteprofiles != "n":
121 | for eachprofiledir in profiledirs:
122 | if os.path.isdir(eachprofiledir):
123 | shutil.rmtree(
124 | "/usr/lib/python2.7/dist-packages/volatility/plugins/overlays/linux/"
125 | + eachprofiledir
126 | )
127 | else:
128 | os.remove(
129 | "/usr/lib/python2.7/dist-packages/volatility/plugins/overlays/linux/"
130 | + eachprofiledir
131 | )
132 | else:
133 | if artefact.endswith("hiberfil.sys"):
134 | profile, vssmem = identify_profile(
135 | output_directory,
136 | verbosity,
137 | d,
138 | stage,
139 | vss,
140 | vssimage,
141 | vssmem,
142 | artefact,
143 | volchoice,
144 | volprefix,
145 | mempath,
146 | memext,
147 | memtimeline,
148 | )
149 | vol3oscheck = vol3_check_os(artefact, memext, "windows.info.Info")
150 | if (
151 | "Windows" in vol3oscheck
152 | and "windows" in vol3oscheck
153 | and "ntkrnl" in vol3oscheck
154 | ) or (vssmem.startswith("Win")):
155 | profile, profileplatform = "Windows", "Windows"
156 | else:
157 | profile, ziphexdump1, ziphexdump2 = "macOS", macOS1(), macOS2()
158 | dump_vol3_ziphex(d, profile, ziphexdump1 + ziphexdump2)
159 | vol3oscheck = vol3_check_os(artefact, memext, "mac.list_files.List_Files")
160 | if "MacOS" in vol3oscheck and "/System/Library/" in vol3oscheck:
161 | profileplatform = "macOS"
162 | else:
163 | profile, ziphexdump = "Linux", Linux()
164 | dump_vol3_ziphex(d, profile, ziphexdump)
165 | profileplatform = "Linux"
166 | vol3oscheck = vol3_check_os(artefact, memext, "linux.elfs.Elfs")
167 | if "linux" in vol3oscheck and "sudo" in vol3oscheck:
168 | pass
169 | else:
170 | print(
171 | " elrond has identified that there is no available symbol table for this image.\n You will need to create your own symbol table; information is provided in SUPPORT.md\n Once you have created the symbol table and placed it in the respective directory (.../volatility3/volatility3/symbols[/windows/mac/linux]/), return to elrond.".format(
172 | artefact
173 | )
174 | )
175 | time.sleep(5)
176 | customprofile = choose_custom_profile(volchoice)
177 | if customprofile != "SKIPPED" and customprofile != "S":
178 | if "::Windows" in customprofile:
179 | profileplatform = "Windows"
180 | elif "::macOS" in customprofile:
181 | profileplatform = "macOS"
182 | else:
183 | profileplatform = "Linux"
184 | profile = customprofile.split("::")[0]
185 | else:
186 | profile, profileplatform = "", ""
187 | if os.path.exists(
188 | "/usr/local/lib/python3.8/dist-packages/volatility3/volatility3/symbols/__pycache__"
189 | ):
190 | shutil.rmtree(
191 | "/usr/local/lib/python3.8/dist-packages/volatility3/volatility3/symbols/__pycache__"
192 | )
193 | if os.path.exists(
194 | "/usr/local/lib/python3.8/dist-packages/volatility3/volatility3/symbols/__MACOSX"
195 | ):
196 | shutil.rmtree(
197 | "/usr/local/lib/python3.8/dist-packages/volatility3/volatility3/symbols/__MACOSX"
198 | )
199 | if stage != "processing":
200 | if profile != "":
201 | entry, prnt = "{},identification,{},{} ({})\n".format(
202 | datetime.now().isoformat(),
203 | artefact.split("/")[-1],
204 | profileplatform,
205 | profile,
206 | ), " -> {} -> identified platform as '{}' for '{}'".format(
207 | datetime.now().isoformat().replace("T", " "),
208 | profileplatform,
209 | artefact.split("/")[-1],
210 | )
211 | print(
212 | " Identified platform of '{}' for '{}'.".format(
213 | profile, artefact.split("/")[-1]
214 | )
215 | )
216 | write_audit_log_entry(verbosity, output_directory, entry, prnt)
217 | else:
218 | entry, prnt = "{},identification,{},skipped\n".format(
219 | datetime.now().isoformat(),
220 | artefact.split("/")[-1],
221 | ), " -> {} -> identification of platform SKIPPED for '{}'".format(
222 | datetime.now().isoformat().replace("T", " "),
223 | artefact.split("/")[-1],
224 | )
225 | print(
226 | " Identification SKIPPED for '{}'.".format(
227 | artefact.split("/")[-1]
228 | )
229 | )
230 | write_audit_log_entry(verbosity, output_directory, entry, prnt)
231 | if profile != "" and profileplatform != "":
232 | assess_volatility_choice(
233 | verbosity,
234 | output_directory,
235 | volchoice,
236 | volprefix,
237 | artefact,
238 | profile,
239 | mempath,
240 | memext,
241 | vssimage,
242 | memtimeline,
243 | )
244 | return profile, vssmem
245 |
--------------------------------------------------------------------------------
/elrond/rivendell/meta.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3 -tt
2 | import os
3 | import re
4 | import subprocess
5 | from datetime import datetime
6 |
7 | from rivendell.audit import write_audit_log_entry
8 |
9 |
10 | def extract_metadata(
11 | verbosity, output_directory, img, imgloc, stage, sha256, nsrl
12 | ): # comment - do not meta file multiple times
13 | for hr, _, hf in os.walk(imgloc):
14 | for intgfile in hf:
15 | metaimg, metapath, unknowngoods = (
16 | img.split("::")[0],
17 | os.path.join(hr, intgfile),
18 | {},
19 | )
20 | if not os.path.exists(output_directory + metaimg + "/meta.audit"):
21 | with open(
22 | output_directory + metaimg + "/meta.audit", "w"
23 | ) as metaimglog:
24 | metaimglog.write(
25 | "Filename,SHA256,NSRL,Entropy,Filesize,LastWriteTime,LastAccessTime,LastInodeChangeTime,Permissions,FileType\n"
26 | )
27 | with open(output_directory + metaimg + "/meta.audit", "a") as metaimglog:
28 | try:
29 | iinfo = os.stat(metapath)
30 | isize = iinfo.st_size
31 | if (
32 | isize > 0
33 | and os.path.isfile(metapath)
34 | and not os.path.islink(metapath)
35 | and (
36 | ("Inbox" not in metapath)
37 | or ("Inbox" in metapath and "." in metapath.split("/")[-1])
38 | )
39 | ):
40 | if "_vss" in img and "/vss" in metapath:
41 | if stage == "processing":
42 | metaimage = (
43 | "'"
44 | + img.split("::")[0]
45 | + "' ("
46 | + metapath.split("cooked/")[1][0:4].replace(
47 | "vss", "volume shadow copy #"
48 | )
49 | + ")"
50 | )
51 | elif stage == "metadata":
52 | metaimage = (
53 | "'"
54 | + img.split("::")[0]
55 | + "' ("
56 | + img.split("::")[1]
57 | .split("_")[1]
58 | .replace("vss", "volume shadow copy #")
59 | + ")"
60 | )
61 | else:
62 | metaimage = "'" + img.split("::")[0] + "'"
63 | if verbosity != "":
64 | print(
65 | " Extracting metadata for '{}' for {}...".format(
66 | metapath.split("/")[-1], metaimage
67 | )
68 | )
69 | metaentry = metapath + ","
70 | try:
71 | with open(metapath, "rb") as metafile:
72 | buffer = metafile.read(262144)
73 | while len(buffer) > 0:
74 | sha256.update(buffer)
75 | buffer = metafile.read(262144)
76 | metaentry = metaentry + sha256.hexdigest() + ","
77 | if nsrl and "/files/" in metapath:
78 | entry, prnt = "{},{},{},{}: {}\n".format(
79 | datetime.now().isoformat(),
80 | metaimage.replace("'", ""),
81 | "metadata",
82 | metapath,
83 | metaentry.strip(),
84 | ), " -> {} -> calculating SHA256 hash digest for '{}' and comparing against NSRL for {}".format(
85 | datetime.now().isoformat().replace("T", " "),
86 | intgfile,
87 | metaimage,
88 | )
89 | write_audit_log_entry(
90 | verbosity, output_directory, entry, prnt
91 | )
92 | with open(
93 | "/opt/elrond/elrond/tools/rds_modernm/NSRLFile.txt"
94 | ) as nsrlhashfile:
95 | for i, line in enumerate(nsrlhashfile):
96 | if i != 0:
97 | sha = re.findall(r"\"([^\"]{64})\"", line)
98 | if len(sha) > 0:
99 | if sha256 == sha[0]:
100 | unknowngoods[sha256] = "Y"
101 | else:
102 | unknowngoods[sha256] = "N"
103 | for _, state in unknowngoods.items():
104 | if state == "Y":
105 | metaentry = metaentry + "Y,"
106 | else:
107 | metaentry = metaentry + "N,"
108 | else:
109 | entry, prnt = "{},{},{},{} ({})\n".format(
110 | datetime.now().isoformat(),
111 | metaimage.replace("'", ""),
112 | "metadata",
113 | metapath,
114 | sha256.hexdigest(),
115 | ), " -> {} -> calculating SHA256 hash digest for '{}' from {}".format(
116 | datetime.now().isoformat().replace("T", " "),
117 | intgfile,
118 | metaimage,
119 | )
120 | write_audit_log_entry(
121 | verbosity, output_directory, entry, prnt
122 | )
123 | metaentry = metaentry + "unknown,"
124 | except:
125 | metaentry = metaentry + "N/A,N/A,"
126 | if (
127 | "/files/binaries/" in metapath
128 | or "/files/documents/" in metapath
129 | or "/files/archives/" in metapath
130 | or "/files/scripts/" in metapath
131 | or "/files/lnk/" in metapath
132 | or "/files/web/" in metapath
133 | or "/files/mail/" in metapath
134 | or "/files/virtual/" in metapath
135 | or "{}/user_profiles/".format(img.split("::")[0])
136 | in metapath
137 | ): # do not assess entropy or extract metadata from raw or cooked artefacts - only files
138 | try:
139 | eout = subprocess.Popen(
140 | ["densityscout", "-r", metapath],
141 | stdout=subprocess.PIPE,
142 | stderr=subprocess.PIPE,
143 | ).communicate()[0]
144 | entry, prnt = "{},{},{},{}\n".format(
145 | datetime.now().isoformat(),
146 | metaimage,
147 | "metadata",
148 | str(eout)[88:-5].split("\\n(")[1].split(")")[0],
149 | ), " -> {} -> assessing entropy for '{}' from {}".format(
150 | datetime.now().isoformat().replace("T", " "),
151 | intgfile,
152 | metaimage,
153 | )
154 | write_audit_log_entry(
155 | verbosity, output_directory, entry, prnt
156 | )
157 | if str(eout)[2:-1] != "" and "\\n(" in str(eout)[88:-5]:
158 | metaentry = (
159 | metaentry
160 | + str(eout)[88:-5]
161 | .split("\\n(")[1]
162 | .split(")")[0]
163 | + ","
164 | )
165 | else:
166 | metaentry = metaentry + "N/A,"
167 | except:
168 | metaentry = metaentry + "N/A,"
169 | try:
170 | mout, exifinfo = (
171 | subprocess.Popen(
172 | ["exiftool", metapath],
173 | stdout=subprocess.PIPE,
174 | stderr=subprocess.PIPE,
175 | ).communicate()[0],
176 | [],
177 | )
178 | if str(mout)[2:-3] != "":
179 | mout = (
180 | "File Size"
181 | + str(mout)[2:-3].split("File Size")[1]
182 | )
183 | entry, prnt = "{},{},{},{}\n".format(
184 | datetime.now().isoformat(),
185 | metaimage,
186 | "metadata",
187 | str(exifinfo)
188 | .replace(", ", "||")
189 | .replace("'", "")[1:-1],
190 | ), " -> {} -> extracting exif metadata for '{}' from {}".format(
191 | datetime.now().isoformat().replace("T", " "),
192 | intgfile,
193 | metaimage,
194 | )
195 | write_audit_log_entry(
196 | verbosity, output_directory, entry, prnt
197 | )
198 | for meta in mout.split("\\n"):
199 | exifinfo.append(
200 | meta.replace(" ", "")
201 | .replace(" ", "")
202 | .replace(" : ", ": ")
203 | .replace(": ", ":")
204 | )
205 | metaentry = (
206 | metaentry
207 | + str(
208 | str(exifinfo)
209 | .replace(", ", ",")
210 | .replace("'", "")
211 | .replace("File Size:", "")
212 | .replace("File Modification Date/Time:", "")
213 | .replace("File Access Date/Time:", "")
214 | .replace("File Inode Change Date/Time:", "")
215 | .replace("File Permissions:", "")
216 | .replace("Error:", "")
217 | .replace(" file type", "")[1:-1]
218 | ).lower()
219 | )
220 | else:
221 | metaentry = metaentry + "N/A,N/A,N/A,N/A,N/A,N/A"
222 | except:
223 | metaentry = metaentry + "N/A,N/A,N/A,N/A,N/A,N/A"
224 | metaimglog.write(metaentry + "\n")
225 | except:
226 | pass
227 |
--------------------------------------------------------------------------------
/elrond/rivendell/post/clam.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3 -tt
2 | import os
3 | import re
4 | import subprocess
5 | import time
6 | from datetime import datetime
7 |
8 | from rivendell.audit import write_audit_log_entry
9 |
10 |
11 | def run_clamscan(verbosity, output_directory, loc, img, collectfiles):
12 | if collectfiles:
13 | all_or_collected = input(
14 | " Run ClamAV against all files or just those collected for '{}'?\n [A]ll [C]ollected\t[A]ll ".format(
15 | img.split("::")[0]
16 | )
17 | )
18 | else:
19 | all_or_collected = "A"
20 | if all_or_collected != "A":
21 | clam_dir = output_directory + img.split("::")[0] + "/files"
22 | else:
23 | clam_dir = loc
24 | print(
25 | " Scanning '{}' ({}/) with ClamAV, please stand by...".format(
26 | img.split("::")[0], clam_dir
27 | )
28 | )
29 | if not os.path.exists(output_directory + img.split("::")[0] + "/analysis"):
30 | os.mkdir(output_directory + img.split("::")[0] + "/analysis")
31 | if not os.path.exists(output_directory + img.split("::")[0] + "/analysis/ClamAV"):
32 | os.mkdir(output_directory + img.split("::")[0] + "/analysis/ClamAV")
33 | clam_results = subprocess.Popen(
34 | [
35 | "clamscan",
36 | "-raio",
37 | "--gen-json",
38 | "--leave-temps",
39 | "--tempdir={}/analysis/ClamAV".format(
40 | os.path.join(output_directory, img.split("::")[0])
41 | ),
42 | "--no-summary",
43 | "--log={}/analysis/ClamAVScan.log".format(
44 | os.path.join(output_directory, img.split("::")[0])
45 | ),
46 | clam_dir,
47 | ],
48 | stdout=subprocess.PIPE,
49 | stderr=subprocess.PIPE,
50 | ).communicate()[0]
51 | with open(
52 | "{}/analysis/ClamAVScan.log".format(
53 | os.path.join(output_directory, img.split("::")[0])
54 | )
55 | ) as clam_results:
56 | for count, _ in enumerate(clam_results):
57 | pass
58 | if count > 0:
59 | message = "{} instances of malware identified on "
60 | else:
61 | message = "No evidence of malware identified on "
62 | print(" {}'{}'".format(message, img.split("::")[0]))
63 | (
64 | entry,
65 | prnt,
66 | ) = (
67 | "{},{},clamAV,{}malware found\n".format(
68 | datetime.now().isoformat(),
69 | img.split("::")[0],
70 | message.split("malware ")[0].lower(),
71 | ),
72 | ), " -> {} -> ClamAV identified {}malware on '{}'".format(
73 | datetime.now().isoformat().replace("T", " "),
74 | message.split("malware ")[0].lower(),
75 | img.split("::")[0],
76 | )
77 | write_audit_log_entry(verbosity, output_directory, entry, prnt)
78 |
--------------------------------------------------------------------------------
/elrond/rivendell/post/clean.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3 -tt
2 | import os
3 | import shutil
4 | import time
5 | from datetime import datetime
6 | from zipfile import ZipFile
7 |
8 | from rivendell.audit import write_audit_log_entry
9 |
10 |
11 | def archive_artefacts(verbosity, output_directory):
12 | stage = "archiving"
13 | print(
14 | "\n\n -> \033[1;36mCommencing Archive Phase...\033[1;m\n ----------------------------------------"
15 | )
16 | for each in os.listdir(output_directory):
17 | if os.path.exists(output_directory + each + "/artefacts"):
18 | alist.append(output_directory + each)
19 | for zeach in alist:
20 | print(" Archiving artefacts for {}...".format(zeach.split("/")[-1]))
21 | entry, prnt = "{},{},{},commenced\n".format(
22 | datetime.now().isoformat(), zeach.split("/")[-1], stage
23 | ), " -> {} -> {} artefacts for '{}'".format(
24 | datetime.now().isoformat().replace("T", " "),
25 | stage,
26 | zeach.split("/")[-1],
27 | )
28 | write_audit_log_entry(verbosity, output_directory, entry, prnt)
29 | print(" Creating archive for '{}'...".format(zeach.split("/")[-1]))
30 | z = ZipFile(zeach + "/" + zeach.split("/")[-1] + ".zip", "w")
31 | for ziproot, _, zipfiles in os.walk(zeach):
32 | for zf in zipfiles:
33 | name = ziproot + "/" + zf
34 | if not name.endswith(
35 | zeach.split("/")[-1] + "/" + zeach.split("/")[-1] + ".log"
36 | ) and not name.endswith(
37 | zeach.split("/")[-1] + "/" + zeach.split("/")[-1] + ".zip"
38 | ):
39 | z.write(name)
40 | print(" -> Completed Archiving Phase for '{}'".format(zeach.split("/")[-1]))
41 | entry, prnt = "{},{},{},completed\n".format(
42 | datetime.now().isoformat(), zeach.split("/")[-1], stage
43 | ), " -> {} -> archiving completed for '{}'".format(
44 | datetime.now().isoformat().replace("T", " "),
45 | zeach.split("/")[-1],
46 | )
47 | write_audit_log_entry(verbosity, output_directory, entry, prnt)
48 | print(
49 | " ----------------------------------------\n -> Completed Archiving Phase.\n"
50 | )
51 | time.sleep(1)
52 | alist.clear()
53 |
54 |
55 | def delete_artefacts(verbosity, output_directory):
56 | stage = "deleting"
57 | print(
58 | "\n\n -> \033[1;36mCommencing Deletion Phase...\033[1;m\n ----------------------------------------"
59 | )
60 | for each in os.listdir(output_directory):
61 | if os.path.exists(output_directory + each + "/artefacts"):
62 | alist.append(output_directory + each)
63 | for deach in alist:
64 | print(" Deleting artefacts for {}...".format(deach.split("/")[-1]))
65 | entry, prnt = "{},{},{},commenced\n".format(
66 | datetime.now().isoformat(), deach.split("/")[-1], stage
67 | ), " -> {} -> {} artefacts for '{}'".format(
68 | datetime.now().isoformat().replace("T", " "),
69 | stage,
70 | deach.split("/")[-1],
71 | )
72 | write_audit_log_entry(verbosity, output_directory, entry, prnt)
73 | print(" Deleting files for '{}'...".format(deach.split("/")[-1]))
74 | for droot, ddir, dfile in os.walk(deach):
75 | for eachdir in ddir:
76 | name = droot + "/" + eachdir
77 | if not name.endswith(deach):
78 | shutil.rmtree(droot + "/" + eachdir)
79 | for eachfile in dfile:
80 | name = droot + "/" + eachfile
81 | if not name.endswith(
82 | deach.split("/")[-1] + "/" + deach.split("/")[-1] + ".log"
83 | ) and not name.endswith(
84 | deach.split("/")[-1] + "/" + deach.split("/")[-1] + ".zip"
85 | ):
86 | os.remove(droot + "/" + eachfile)
87 | print(" -> Completed Deletion Phase for {}".format(deach.split("/")[-1]))
88 | entry, prnt = "{},{},{},completed\n".format(
89 | datetime.now().isoformat(), deach.split("/")[-1], stage
90 | ), " -> {} -> deletion completed for '{}'".format(
91 | datetime.now().isoformat().replace("T", " "),
92 | deach.split("/")[-1],
93 | )
94 | write_audit_log_entry(verbosity, output_directory, entry, prnt)
95 | print(
96 | " ----------------------------------------\n -> Completed Deletion Phase.\n"
97 | )
98 | time.sleep(1)
99 | alist.clear()
100 |
101 |
102 | alist = []
103 |
--------------------------------------------------------------------------------
/elrond/rivendell/post/splunk/app/views/views.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3 -tt
2 | from rivendell.post.splunk.app.views.pages import create_ascii
3 | from rivendell.post.splunk.app.views.pages import create_ports
4 | from rivendell.post.splunk.app.views.pages import create_subnet
5 | from rivendell.post.splunk.app.views.html.initial_access import (
6 | create_initial_access_html,
7 | )
8 | from rivendell.post.splunk.app.views.html.execution import create_execution_html
9 | from rivendell.post.splunk.app.views.html.persistence import create_persistence_html
10 | from rivendell.post.splunk.app.views.html.privilege_escalation import (
11 | create_privilege_escalation_html,
12 | )
13 | from rivendell.post.splunk.app.views.html.defense_evasion import (
14 | create_defense_evasion_html,
15 | )
16 | from rivendell.post.splunk.app.views.html.credential_access import (
17 | create_credential_access_html,
18 | )
19 | from rivendell.post.splunk.app.views.html.discovery import create_discovery_html
20 | from rivendell.post.splunk.app.views.html.lateral_movement import (
21 | create_lateral_movement_html,
22 | )
23 | from rivendell.post.splunk.app.views.html.collection import create_collection_html
24 | from rivendell.post.splunk.app.views.html.command_and_control import (
25 | create_command_and_control_html,
26 | )
27 | from rivendell.post.splunk.app.views.html.exfiltration import create_exfiltration_html
28 | from rivendell.post.splunk.app.views.html.impact import create_impact_html
29 | from rivendell.post.splunk.app.views.xml.initial_access import create_initial_access_xml
30 | from rivendell.post.splunk.app.views.xml.execution import create_execution_xml
31 | from rivendell.post.splunk.app.views.xml.persistence import create_persistence_xml
32 | from rivendell.post.splunk.app.views.xml.privilege_escalation import (
33 | create_privilege_escalation_xml,
34 | )
35 | from rivendell.post.splunk.app.views.xml.defense_evasion import (
36 | create_defense_evasion_xml,
37 | )
38 | from rivendell.post.splunk.app.views.xml.credential_access import (
39 | create_credential_access_xml,
40 | )
41 | from rivendell.post.splunk.app.views.xml.discovery import create_discovery_xml
42 | from rivendell.post.splunk.app.views.xml.lateral_movement import (
43 | create_lateral_movement_xml,
44 | )
45 | from rivendell.post.splunk.app.views.xml.collection import create_collection_xml
46 | from rivendell.post.splunk.app.views.xml.command_and_control import (
47 | create_command_and_control_xml,
48 | )
49 | from rivendell.post.splunk.app.views.xml.exfiltration import create_exfiltration_xml
50 | from rivendell.post.splunk.app.views.xml.impact import create_impact_xml
51 |
52 |
53 | def create_htmls(sd):
54 | header = '\n\n \n Description
\n '
55 | headings = '\n \n \n \n
Information
\n \n \n ID | \n Operating Systems | \n Tactics | \n Detection | \n
\n \n '
56 | footer = ' | \n
\n
\n
\n \n \n'
57 | create_initial_access_html(sd, header, headings, footer)
58 | create_execution_html(sd, header, headings, footer)
59 | create_persistence_html(sd, header, headings, footer) # unfinished: 1 custom
60 | create_privilege_escalation_html(sd, header, headings, footer)
61 | create_defense_evasion_html(sd, header, headings, footer)
62 | create_credential_access_html(sd, header, headings, footer)
63 | create_discovery_html(sd, header, headings, footer)
64 | create_lateral_movement_html(sd, header, headings, footer)
65 | create_collection_html(sd, header, headings, footer)
66 | create_command_and_control_html(sd, header, headings, footer)
67 | create_exfiltration_html(sd, header, headings, footer)
68 | create_impact_html(sd, header, headings, footer)
69 |
70 |
71 | def create_static_pages(sd):
72 | create_ascii(sd)
73 | create_ports(sd)
74 | create_subnet(sd)
75 |
76 |
77 | def create_xmls(sd):
78 | create_initial_access_xml(sd)
79 | create_execution_xml(sd)
80 | create_persistence_xml(sd) # unfinished: 1 custom
81 | create_privilege_escalation_xml(sd)
82 | create_defense_evasion_xml(sd)
83 | create_credential_access_xml(sd)
84 | create_discovery_xml(sd)
85 | create_lateral_movement_xml(sd)
86 | create_collection_xml(sd)
87 | create_command_and_control_xml(sd)
88 | create_exfiltration_xml(sd)
89 | create_impact_xml(sd)
90 |
--------------------------------------------------------------------------------
/elrond/rivendell/post/yara.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3 -tt
2 | import os
3 | import re
4 | import subprocess
5 | import time
6 | from datetime import datetime
7 |
8 | from rivendell.audit import write_audit_log_entry
9 |
10 |
11 | def validate_yara(verbosity, output_directory, img, yara_file, binary_dir):
12 | yara_valid = str(
13 | subprocess.run(
14 | [
15 | "yara",
16 | yara_file,
17 | "-r",
18 | "/opt/elrond/elrond/tools/scripts",
19 | "-s",
20 | "-w",
21 | "-p",
22 | "32",
23 | ],
24 | capture_output=True,
25 | encoding="UTF-8",
26 | )
27 | )
28 | print()
29 | if "error" in yara_valid:
30 | input(
31 | " '{}' error: {}\n It is advisable to review the syntax of the yara file. Continue? Y/n [Y] ".format(
32 | yara_file.split("/")[-1], yara_valid.split(": error: ")[-1][0:-4]
33 | )
34 | )
35 | validate_yara(verbosity, output_directory, img, yara_file, binary_dir)
36 | else:
37 | invoke_yara(verbosity, output_directory, img, yara_file, binary_dir)
38 |
39 |
40 | def invoke_yara(verbosity, output_directory, img, yara_file, binary_dir):
41 | print(
42 | " Invoking '{}' against '{}', please stand by...".format(
43 | yara_file.split("/")[-1], img.split("::")[0]
44 | )
45 | )
46 | yara_results = re.sub(
47 | r"\\n([^\:\$\ ]+\ )",
48 | r"±§§±\1",
49 | str(
50 | subprocess.Popen(
51 | [
52 | "yara",
53 | yara_file,
54 | "-r",
55 | "/" + binary_dir.strip("/"),
56 | "-s",
57 | "-w",
58 | "-p",
59 | "32",
60 | ],
61 | stdout=subprocess.PIPE,
62 | stderr=subprocess.PIPE,
63 | ).communicate()[0]
64 | )[2:-3],
65 | )
66 | if len(yara_results) > 0:
67 | if not os.path.exists(output_directory + img.split("::")[0] + "/analysis"):
68 | os.mkdir(output_directory + img.split("::")[0] + "/analysis")
69 | with open(
70 | output_directory + img.split("::")[0] + "/analysis/yara.csv", "w"
71 | ) as yara_out_file:
72 | yara_out_file.write(
73 | "yara_rule,yara_file,file,path,memory_address,signature_name,result\n"
74 | )
75 | for each_yara_result in yara_results.split("±§§±"):
76 | sub_results = []
77 | for each_sub_result in each_yara_result.split("\\n")[1:]:
78 | sub_results.append(each_sub_result.split(":$")[1])
79 | sub_results = list(set(sub_results))
80 | for sub_result in sub_results:
81 | (
82 | entry,
83 | prnt,
84 | ) = "{},{},yara,rule '{}' (${}) matched '{}' in '{}'\n".format(
85 | datetime.now().isoformat(),
86 | img.split("::")[0],
87 | each_yara_result.split("\\n")[0].split(" ")[0],
88 | sub_result.split(": ")[0],
89 | sub_result.split(": ")[1],
90 | str(each_yara_result.split("\\n")[0].split(" ")[1:])[2:-2].replace(
91 | "', '", " "
92 | ),
93 | ), " -> {} -> condition '${}' matched '{}' in '{}' from '{}'".format(
94 | datetime.now().isoformat().replace("T", " "),
95 | sub_result.split(": ")[0],
96 | sub_result.split(": ")[1],
97 | str(each_yara_result.split("\\n")[0].split(" ")[1:])[2:-2]
98 | .replace("', '", " ")
99 | .split("/")[-1],
100 | img.split("::")[0],
101 | )
102 | write_audit_log_entry(verbosity, output_directory, entry, prnt)
103 | for each_yara_match in each_yara_result.split("\\n")[1:]:
104 | write_result = (
105 | each_yara_result.split("\\n")[0].split(" ")[0]
106 | + ","
107 | + yara_file
108 | + ","
109 | + each_yara_result.split("\\n")[0].split(" ")[1].split("/")[-1]
110 | + ","
111 | + "/".join(
112 | each_yara_result.split("\\n")[0].split(" ")[1].split("/")[:-1]
113 | )
114 | + ","
115 | + each_yara_match.split(":")[0]
116 | + ","
117 | + each_yara_match.split(":")[1]
118 | + ","
119 | + each_yara_match.split(":")[2].strip()
120 | )
121 | with open(
122 | output_directory + img.split("::")[0] + "/artefacts/yara.csv",
123 | "a",
124 | ) as yara_out_file:
125 | yara_out_file.write(write_result)
126 | time.sleep(0.2)
127 |
128 | print(" Done.")
129 | else:
130 | print(
131 | " No evidence found based on '{}'.".format(yara_file.split("/")[-1])
132 | )
133 | time.sleep(2)
134 |
135 |
136 | def run_yara_signatures(
137 | verbosity, output_directory, img, loc, collectfiles, yara_files
138 | ):
139 | if collectfiles:
140 | all_or_collected = input(
141 | " Run Yara signatures against all files or just those collected for '{}'?\n [A]ll [C]ollected\t[A]ll ".format(
142 | img.split("::")[0]
143 | )
144 | )
145 | else:
146 | all_or_collected = "A"
147 | if all_or_collected != "A":
148 | binary_dir = output_directory + img.split("::")[0] + "/files"
149 | else:
150 | binary_dir = loc
151 | for yara_file in yara_files:
152 | validate_yara(verbosity, output_directory, img, yara_file, binary_dir)
153 |
--------------------------------------------------------------------------------
/elrond/rivendell/process/extractions/evtx.py:
--------------------------------------------------------------------------------
1 | import json
2 | import re
3 | import subprocess
4 | from datetime import datetime
5 |
6 | from rivendell.audit import write_audit_log_entry
7 |
8 |
9 | def extract_evtx(
10 | verbosity,
11 | vssimage,
12 | output_directory,
13 | img,
14 | vss_path_insert,
15 | stage,
16 | artefact,
17 | jsondict,
18 | jsonlist,
19 | evtjsonlist,
20 | ):
21 | with open(
22 | output_directory
23 | + img.split("::")[0]
24 | + "/artefacts/cooked"
25 | + vss_path_insert
26 | + "evt/"
27 | + artefact.split("/")[-1]
28 | + ".json",
29 | "a",
30 | ) as evtjson:
31 | entry, prnt = "{},{},{},'{}' event log\n".format(
32 | datetime.now().isoformat(),
33 | vssimage.replace("'", ""),
34 | stage,
35 | artefact.split("/")[-1],
36 | ), " -> {} -> {} {} for {}".format(
37 | datetime.now().isoformat().replace("T", " "),
38 | stage,
39 | artefact.split("/")[-1],
40 | vssimage,
41 | )
42 | write_audit_log_entry(verbosity, output_directory, entry, prnt)
43 | evtout = str(
44 | subprocess.Popen(
45 | [
46 | "/usr/local/bin/evtx_dump.py",
47 | output_directory
48 | + img.split("::")[0]
49 | + "/artefacts/raw"
50 | + vss_path_insert
51 | + "evt/"
52 | + artefact.split("/")[-1],
53 | ],
54 | stdout=subprocess.PIPE,
55 | stderr=subprocess.PIPE,
56 | ).communicate()
57 | )[3:-9]
58 | for event in evtout.split("\\r\\n"):
59 | if (
60 | event
61 | != '\\n\\n\\n'
62 | ):
63 | for evtrow in event.split("\\n"):
64 | for eachkv in re.findall(
65 | r"(?:\ (?P(?!Name)[^\=]+)\=\"(?P[^\"]+)\"|\<(?P[^\>\/\=\ ]+)(?:\ \D+\=\"\"\>|\=\"|\>)(?P[^\"\>]+)(?:\"\>)?\<\/[^\>]+\>|\[^\"]+)\"\>(?P[^\<]+)\<\/Data\>)",
66 | evtrow,
67 | ):
68 | kv = list(filter(None, eachkv))
69 | if len(kv) > 0:
70 | jsondict[kv[0]] = kv[1]
71 | if len(jsondict) > 0:
72 | jsonlist.append(json.dumps(jsondict))
73 | for eachjson in jsonlist:
74 | try:
75 | eachjson = str(eachjson).replace('""', '"-"')
76 | if '"RegistryKey"' in eachjson:
77 | insert = ', "Registry{}'.format(
78 | str(
79 | str(
80 | re.findall(
81 | r"RegistryKey(\"\: \"[^\"]+\")",
82 | eachjson,
83 | )[0]
84 | ).lower()
85 | )
86 | .replace(" ", "_")
87 | .replace('":_"', '": "')
88 | )
89 | evtjsonlist.append(json.dumps(eachjson[0:-1] + insert + "}"))
90 | else:
91 | evtjsonlist.append(json.dumps(eachjson))
92 | except:
93 | pass
94 | if len(evtjsonlist) > 0:
95 | evtjson.write(
96 | re.sub(
97 | r"\d+\s(Public Primary Certification Authority)\s-\s\w\d",
98 | r"\1",
99 | str(evtjsonlist)
100 | .replace(
101 | "\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\",
102 | "/",
103 | )
104 | .replace("\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\", "/")
105 | .replace("\\\\\\\\\\\\\\\\", "/")
106 | .replace("\\\\\\\\", "/")
107 | .replace("\\\\", "/")
108 | .replace("\\", "/")
109 | .replace('/"', '"')
110 | .replace(
111 | " ",
112 | " ",
113 | )
114 | .replace(" ", " ")
115 | .replace(" ", " ")
116 | .replace(" ", " ")
117 | .replace(" ", " ")
118 | .replace(" ", " ")
119 | .replace(" ", "")
120 | .replace('" ', '"')
121 | .replace(' "', '"')
122 | .replace("//'", "'")
123 | .replace('":"', '": "')
124 | .replace('","', '", "')
125 | .replace('"}"\', \'"{"', '"}, {"')
126 | .replace('[\'"{"', '[{"')
127 | .replace('"}"\']', '"}]')
128 | .replace('/"', "/")
129 | .replace('/, "', '/", "')
130 | .replace('/}, {"', '/"}, {"')
131 | .replace("/}]", '/"}]')
132 | .replace("ProcessName", "Process"),
133 | )
134 | )
135 | evtjsonlist.clear()
136 | jsonlist.clear()
137 |
--------------------------------------------------------------------------------
/elrond/rivendell/process/extractions/mail.py:
--------------------------------------------------------------------------------
1 | import json
2 | import re
3 |
4 |
5 | def extract_email_artefacts(eachmesg, jsondict, mailjsonlist, ThreadNo, allLinks):
6 | try:
7 | ContentTypePattern = re.compile(
8 | r"Content\-Type\:\ (?P[^\;]+)\;",
9 | re.IGNORECASE,
10 | )
11 | ContentType = re.findall(ContentTypePattern, eachmesg)[0]
12 | jsondict["ContentType"] = ContentType
13 | except:
14 | pass
15 | try:
16 | CharsetPattern = re.compile(r"charset\=(?P[^\\]+)\\n", re.IGNORECASE)
17 | Charset = re.findall(CharsetPattern, eachmesg)[0]
18 | jsondict["Charset"] = Charset
19 | except:
20 | pass
21 | try:
22 | ContentTransferEncodingPattern = re.compile(
23 | r"Content\-Transfer\-Encoding\:\ (?P[^\\]+)\\n",
24 | re.IGNORECASE,
25 | )
26 | ContentTransferEncoding = re.findall(ContentTransferEncodingPattern, eachmesg)[
27 | 0
28 | ]
29 | jsondict["ContentTransferEncoding"] = ContentTransferEncoding
30 | except:
31 | pass
32 | MessageContent = re.findall(
33 | r"\\n[^\\]+\\n[^\\]+\\n\'\,\ \'(?P[\S\s]+)",
34 | eachmesg,
35 | )[0]
36 | links = re.findall(r"([A-Za-z]+\:\/\/[^\"\ ]+)", MessageContent[4:-14])
37 | MessageBody = re.sub(
38 | r"\<[^\>]+\>[^\\]+",
39 | r"",
40 | re.sub(
41 | r"[\"\']\,\ [\"\']",
42 | r"",
43 | re.sub(
44 | r"\<\S[^\>]+\>",
45 | r"",
46 | re.sub(
47 | r"\\n\d+\,\ [A-Z]\,\ ",
48 | r"\\n ",
49 | re.sub(
50 | r"\\n\\n",
51 | r"\\n",
52 | re.sub(
53 | r"\\n\\n\\n",
54 | r"",
55 | MessageContent[4:-14]
56 | .replace("', '", "")
57 | .replace("\\n\\n", "\\n")
58 | .replace("\\n\\n", "\\n")
59 | .replace("\\t", "")
60 | .replace("\\n ", "\\n"),
61 | ),
62 | ),
63 | ),
64 | )
65 | .replace("\\n\\n\\n", "")
66 | .replace("\\n\\n", "")
67 | .replace("\\n ", "")
68 | .strip("\\n"),
69 | ),
70 | )
71 | for eachlink in links:
72 | allLinks.append(
73 | eachlink.replace("\\n", "")
74 | .replace("\\t", "")
75 | .strip(",")
76 | .strip("'")
77 | .strip("\\")
78 | .strip(".")
79 | .strip("")
80 | .strip("=")
81 | )
82 | Links = list(set(allLinks))
83 | jsondict["Links"], jsondict["MessageBody"] = (
84 | Links,
85 | MessageBody,
86 | )
87 | ThreadNo += 1
88 | mailjsonlist.append(json.dumps(jsondict))
89 | jsondict.clear()
90 |
--------------------------------------------------------------------------------
/elrond/rivendell/process/extractions/mft.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3 -tt
2 | import re
3 |
4 |
5 | def extract_mft(
6 | output_directory,
7 | img,
8 | vss_path_insert,
9 | mftwrite,
10 | ):
11 | with open(
12 | output_directory
13 | + img.split("::")[0]
14 | + "/artefacts/cooked"
15 | + vss_path_insert
16 | + "..journal_mft.csv"
17 | ) as mftread:
18 | for eachinfo in mftread:
19 | try:
20 | mftentries = (
21 | list(
22 | str(
23 | re.sub(
24 | r"([^\"])\,([^\"])",
25 | r"\1\2",
26 | eachinfo.strip(),
27 | ),
28 | ).split(",")
29 | ),
30 | )
31 | mftrow_information = (
32 | mftentries[0][0].strip('"').strip(",")
33 | + ","
34 | + mftentries[0][1].strip('"').strip(",")
35 | + ","
36 | + mftentries[0][2].strip('"').strip(",")
37 | + ","
38 | + mftentries[0][3].strip('"').strip(",")
39 | + ","
40 | + mftentries[0][4].strip('"').strip(",")
41 | + ","
42 | + mftentries[0][5].strip('"').strip(",")
43 | + ","
44 | + mftentries[0][6].strip('"').strip(",")
45 | + ","
46 | + mftentries[0][8].strip('"').strip(",")
47 | + ","
48 | + mftentries[0][9].strip('"').strip(",")
49 | + ","
50 | + mftentries[0][10].strip('"').strip(",")
51 | + ","
52 | + mftentries[0][11].strip('"').strip(",")
53 | + ","
54 | + mftentries[0][16].strip('"').strip(",")
55 | + ","
56 | + mftentries[0][17].strip('"').strip(",")
57 | + ","
58 | + mftentries[0][18].strip('"').strip(",")
59 | + ","
60 | + mftentries[0][19].strip('"').strip(",")
61 | + ","
62 | + mftentries[0][35].strip('"').strip(",")
63 | + ","
64 | + mftentries[0][36].strip('"').strip(",")
65 | + ","
66 | + mftentries[0][37].strip('"').strip(",")
67 | + ","
68 | + mftentries[0][38].strip('"').strip(",")
69 | + ","
70 | + mftentries[0][39].strip('"').strip(",")
71 | + ","
72 | + mftentries[0][40].strip('"').strip(",")
73 | + ","
74 | + mftentries[0][41].strip('"').strip(",")
75 | + ","
76 | + mftentries[0][42].strip('"').strip(",")
77 | + ","
78 | + mftentries[0][43].strip('"').strip(",")
79 | + ","
80 | + mftentries[0][44].strip('"').strip(",")
81 | + ","
82 | + mftentries[0][45].strip('"').strip(",")
83 | + ","
84 | + mftentries[0][46].strip('"').strip(",")
85 | + ","
86 | + mftentries[0][47].strip('"').strip(",")
87 | + ","
88 | + mftentries[0][48].strip('"').strip(",")
89 | + ","
90 | + mftentries[0][49].strip('"').strip(",")
91 | + ","
92 | + mftentries[0][50].strip('"').strip(",")
93 | + ","
94 | + mftentries[0][51].strip('"').strip(",")
95 | + ","
96 | + mftentries[0][52].strip('"').strip(",")
97 | + ","
98 | + mftentries[0][53].strip('"').strip(",")
99 | + ","
100 | + mftentries[0][54].strip('"').strip(",")
101 | + ","
102 | + mftentries[0][55].strip('"').strip(",")
103 | )
104 | mftrow = (
105 | mftrow_information
106 | + ","
107 | + mftentries[0][7].strip('"').strip(",")
108 | + ","
109 | + mftentries[0][12].strip('"').strip(",")
110 | + ","
111 | + mftentries[0][13].strip('"').strip(",")
112 | + ","
113 | + mftentries[0][14].strip('"').strip(",")
114 | + ","
115 | + mftentries[0][15].strip('"').strip(",")
116 | + ","
117 | + mftentries[0][13].strip('"').strip(",")
118 | )
119 | if len(mftentries[0][20].strip('"').strip(",")) > 0:
120 | mftrow = (
121 | "\n"
122 | + mftrow_information
123 | + ","
124 | + mftentries[0][20].strip('"').strip(",")
125 | + ","
126 | + mftentries[0][21].strip('"').strip(",")
127 | + ","
128 | + mftentries[0][22].strip('"').strip(",")
129 | + ","
130 | + mftentries[0][23].strip('"').strip(",")
131 | + ","
132 | + mftentries[0][24].strip('"').strip(",")
133 | + ","
134 | + mftentries[0][22].strip('"').strip(",")
135 | )
136 | if len(mftentries[0][25].strip('"').strip(",")) > 0:
137 | mftrow = (
138 | "\n"
139 | + mftrow_information
140 | + ","
141 | + mftentries[0][25].strip('"').strip(",")
142 | + ","
143 | + mftentries[0][26].strip('"').strip(",")
144 | + ","
145 | + mftentries[0][27].strip('"').strip(",")
146 | + ","
147 | + mftentries[0][28].strip('"').strip(",")
148 | + ","
149 | + mftentries[0][29].strip('"').strip(",")
150 | + ","
151 | + mftentries[0][27].strip('"').strip(",")
152 | )
153 | if len(mftentries[0][30].strip('"').strip(",")) > 0:
154 | mftrow = (
155 | "\n"
156 | + mftrow_information
157 | + ","
158 | + mftentries[0][30].strip('"').strip(",")
159 | + ","
160 | + mftentries[0][31].strip('"').strip(",")
161 | + ","
162 | + mftentries[0][32].strip('"').strip(",")
163 | + ","
164 | + mftentries[0][33].strip('"').strip(",")
165 | + ","
166 | + mftentries[0][34].strip('"').strip(",")
167 | + ","
168 | + mftentries[0][32].strip('"').strip(",")
169 | )
170 | if (
171 | "record number,good,active,record type,sequence number,parent file rec"
172 | not in mftrow.lower()
173 | and "NoFNRecord,NoFNRecord,NoFNRecord,NoFNRecord,NoFNRecord,NoFNRecord"
174 | not in mftrow
175 | ):
176 | mftwrite.write(mftrow + "\n")
177 | except:
178 | pass
179 |
--------------------------------------------------------------------------------
/elrond/rivendell/process/extractions/registry/dumpreg.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3 -tt
2 | import re
3 | import subprocess
4 |
5 | from rivendell.process.extractions.registry.profile import use_profile_plugins
6 | from rivendell.process.extractions.registry.system import use_system_plugins
7 |
8 |
9 | def extract_dumpreg_system(
10 | artefact,
11 | jsondict,
12 | jsonlist,
13 | regjsonlist,
14 | ):
15 | with open(
16 | artefact + ".json",
17 | "a",
18 | ) as regjson:
19 | rgrplistj = str(
20 | str(
21 | subprocess.Popen(
22 | [
23 | "rip.pl",
24 | "-r",
25 | artefact,
26 | "-f",
27 | artefact.split("/")[-1].split(".")[2].lower(),
28 | ],
29 | stdout=subprocess.PIPE,
30 | stderr=subprocess.PIPE,
31 | ).communicate()[0]
32 | )[2:-1],
33 | )
34 | if type(rgrplistj) == str:
35 | jsonlist, regjsonlist = use_system_plugins(
36 | artefact, jsondict, jsonlist, regjsonlist, rgrplistj, [], []
37 | )
38 | if len(regjsonlist) > 0:
39 | regjson.write(
40 | str(regjsonlist)
41 | .replace(
42 | "\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\",
43 | "/",
44 | )
45 | .replace("\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\", "/")
46 | .replace("\\\\\\\\\\\\\\\\", "/")
47 | .replace("\\\\\\\\", "/")
48 | .replace("\\\\", "/")
49 | .replace("\\", "/")
50 | .replace('/"', '"')
51 | .replace(
52 | " ",
53 | " ",
54 | )
55 | .replace(" ", " ")
56 | .replace(" ", " ")
57 | .replace(" ", " ")
58 | .replace(" ", " ")
59 | .replace(" ", " ")
60 | .replace(" ", "")
61 | .replace('" ', '"')
62 | .replace(' "', '"')
63 | .replace("//'", "'")
64 | .replace('":"', '": "')
65 | .replace('","', '", "')
66 | .replace('"}"\', \'"{"', '"}, {"')
67 | .replace('[\'"{"', '[{"')
68 | .replace('"}"\']', '"}]')
69 | )
70 | regjsonlist.clear()
71 | jsonlist.clear()
72 |
73 |
74 | def extract_dumpreg_profile(
75 | artefact,
76 | jsondict,
77 | jsonlist,
78 | regjsonlist,
79 | ):
80 | with open(
81 | artefact + ".json",
82 | "a",
83 | ) as regjson:
84 | rgrplistj = (
85 | str(
86 | subprocess.Popen(
87 | [
88 | "rip.pl",
89 | "-r",
90 | artefact,
91 | "-f",
92 | artefact.split("/")[-1]
93 | .split(".")[2]
94 | .lower()
95 | .replace("dat", ""),
96 | ],
97 | stdout=subprocess.PIPE,
98 | stderr=subprocess.PIPE,
99 | ).communicate()[0]
100 | )[2:-1],
101 | )
102 | if type(rgrplistj) == str:
103 | jsonlist, regjsonlist = use_profile_plugins(
104 | artefact,
105 | jsondict,
106 | jsonlist,
107 | regjsonlist,
108 | rgrplistj,
109 | artefact.split("/")[-1].split(".")[2].lower().replace("dat", ""),
110 | "UNKNOWN (dumpreg)",
111 | )
112 | if len(regjsonlist) > 0:
113 | regjson.write(
114 | str(regjsonlist)
115 | .replace(
116 | "\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\",
117 | "/",
118 | )
119 | .replace("\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\", "/")
120 | .replace("\\\\\\\\\\\\\\\\", "/")
121 | .replace("\\\\\\\\", "/")
122 | .replace("\\\\", "/")
123 | .replace("\\", "/")
124 | .replace('/"', '"')
125 | .replace(
126 | " ",
127 | " ",
128 | )
129 | .replace(" ", " ")
130 | .replace(" ", " ")
131 | .replace(" ", " ")
132 | .replace(" ", " ")
133 | .replace(" ", " ")
134 | .replace(" ", "")
135 | .replace('" ', '"')
136 | .replace(' "', '"')
137 | .replace("//'", "'")
138 | .replace('":"', '": "')
139 | .replace('","', '", "')
140 | .replace('"}"\', \'"{"', '"}, {"')
141 | .replace('[\'"{"', '[{"')
142 | .replace('"}"\']', '"}]')
143 | )
144 | regjsonlist.clear()
145 | jsonlist.clear()
146 |
147 |
148 | def extract_dumpreg_guess(
149 | artefact,
150 | jsondict,
151 | jsonlist,
152 | regjsonlist,
153 | ):
154 | with open(
155 | artefact + ".json",
156 | "a",
157 | ) as regjson:
158 | rgrplistguess = str(
159 | (
160 | str(
161 | subprocess.Popen(
162 | [
163 | "rip.pl",
164 | "-r",
165 | artefact,
166 | "-g",
167 | ],
168 | stdout=subprocess.PIPE,
169 | stderr=subprocess.PIPE,
170 | ).communicate()[0]
171 | )[2:-1],
172 | )
173 | )
174 | hive_guess = re.findall(
175 | r"(sam|security|software|system|ntuser|usrclass)", rgrplistguess
176 | )
177 | if len(hive_guess) > 0:
178 | guessed_hive = hive_guess[0]
179 | if (
180 | guessed_hive == "sam"
181 | or guessed_hive == "security"
182 | or guessed_hive == "software"
183 | or guessed_hive == "system"
184 | or guessed_hive == "ntuser"
185 | or guessed_hive == "usrclass"
186 | ):
187 | rgrplistj = str(
188 | str(
189 | subprocess.Popen(
190 | [
191 | "rip.pl",
192 | "-r",
193 | artefact,
194 | "-f",
195 | guessed_hive,
196 | ],
197 | stdout=subprocess.PIPE,
198 | stderr=subprocess.PIPE,
199 | ).communicate()[0]
200 | )[2:-1],
201 | )
202 | if type(rgrplistj) == str:
203 | if (
204 | guessed_hive == "sam"
205 | or guessed_hive == "security"
206 | or guessed_hive == "software"
207 | or guessed_hive == "system"
208 | ):
209 | jsonlist, regjsonlist = use_system_plugins(
210 | artefact,
211 | jsondict,
212 | jsonlist,
213 | regjsonlist,
214 | rgrplistj,
215 | [],
216 | [],
217 | )
218 | else:
219 | jsonlist, regjsonlist = use_profile_plugins(
220 | artefact,
221 | jsondict,
222 | jsonlist,
223 | regjsonlist,
224 | rgrplistj,
225 | guessed_hive.lower(),
226 | "UNKNOWN (dumpreg)",
227 | )
228 | else:
229 | guessed_hive = ""
230 | if len(regjsonlist) > 0:
231 | regjson.write(
232 | str(regjsonlist)
233 | .replace(
234 | "\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\",
235 | "/",
236 | )
237 | .replace("\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\", "/")
238 | .replace("\\\\\\\\\\\\\\\\", "/")
239 | .replace("\\\\\\\\", "/")
240 | .replace("\\\\", "/")
241 | .replace("\\", "/")
242 | .replace('/"', '"')
243 | .replace(
244 | " ",
245 | " ",
246 | )
247 | .replace(" ", " ")
248 | .replace(" ", " ")
249 | .replace(" ", " ")
250 | .replace(" ", " ")
251 | .replace(" ", " ")
252 | .replace(" ", "")
253 | .replace('" ', '"')
254 | .replace(' "', '"')
255 | .replace("//'", "'")
256 | .replace('":"', '": "')
257 | .replace('","', '", "')
258 | .replace('"}"\', \'"{"', '"}, {"')
259 | .replace('[\'"{"', '[{"')
260 | .replace('"}"\']', '"}]')
261 | )
262 | else:
263 | guessed_hive = ""
264 | regjsonlist.clear()
265 | jsonlist.clear()
266 | else:
267 | guessed_hive = ""
268 | else:
269 | guessed_hive = ""
270 | return guessed_hive
271 |
--------------------------------------------------------------------------------
/elrond/rivendell/process/extractions/shimcache.py:
--------------------------------------------------------------------------------
1 | import os
2 | import re
3 | import subprocess
4 | from datetime import datetime
5 |
6 | from rivendell.audit import write_audit_log_entry
7 |
8 |
9 | def extract_shimcache(
10 | verbosity, vssimage, output_directory, img, vss_path_insert, stage
11 | ):
12 | with open(
13 | output_directory
14 | + img.split("::")[0]
15 | + "/artefacts/cooked"
16 | + vss_path_insert
17 | + ".shimcache.csv",
18 | "a",
19 | ):
20 | entry, prnt = "{},{},{},'ShimCache'\n".format(
21 | datetime.now().isoformat(),
22 | vssimage.replace("'", ""),
23 | stage,
24 | ), " -> {} -> {} ShimCache for {}".format(
25 | datetime.now().isoformat().replace("T", " "),
26 | stage,
27 | vssimage,
28 | )
29 | write_audit_log_entry(verbosity, output_directory, entry, prnt)
30 | subprocess.Popen(
31 | [
32 | "/usr/local/bin/ShimCacheParser.py",
33 | "-i",
34 | output_directory
35 | + img.split("::")[0]
36 | + "/artefacts/raw"
37 | + vss_path_insert
38 | + ".SYSTEM",
39 | "-o",
40 | output_directory
41 | + img.split("::")[0]
42 | + "/artefacts/cooked"
43 | + vss_path_insert
44 | + ".shimcache.csv",
45 | ],
46 | stdout=subprocess.PIPE,
47 | stderr=subprocess.PIPE,
48 | ).communicate()
49 | with open(
50 | output_directory
51 | + img.split("::")[0]
52 | + "/artefacts/cooked"
53 | + vss_path_insert
54 | + ".shimcache.csv",
55 | "r",
56 | ) as shimread:
57 | for shimline in shimread:
58 | winproc = str(
59 | re.findall(r"[^\,]+\,[^\,]+(\,[^\,]+).*", shimline)[0]
60 | ).lower()
61 | tempshimline = re.sub(
62 | r"([^\,]+\,[^\,]+)(\,[^\,]+)(.*)",
63 | r"\1\2\3_-_-_-_-_-_",
64 | shimline,
65 | )
66 | newshimline = tempshimline.replace("_-_-_-_-_-_", winproc)
67 | with open(
68 | output_directory
69 | + img.split("::")[0]
70 | + "/artefacts/cooked"
71 | + vss_path_insert
72 | + "shimcache.csv",
73 | "a",
74 | ) as shimwrite:
75 | shimwrite.write(
76 | newshimline.replace("Last Modified", "LastWriteTime")
77 | .replace(",path", ",Process")
78 | .replace("\\", "/")
79 | )
80 | try:
81 | os.remove(
82 | output_directory
83 | + img.split("::")[0]
84 | + "/artefacts/raw"
85 | + vss_path_insert
86 | + ".SYSTEM"
87 | )
88 | os.remove(
89 | output_directory
90 | + img.split("::")[0]
91 | + "/artefacts/cooked"
92 | + vss_path_insert
93 | + ".shimcache.csv"
94 | )
95 | except:
96 | pass
97 |
--------------------------------------------------------------------------------
/elrond/rivendell/process/extractions/sru.py:
--------------------------------------------------------------------------------
1 | import pandas as pd
2 | import re
3 | from datetime import datetime
4 |
5 | from rivendell.audit import write_audit_log_entry
6 |
7 |
8 | def extract_sru(
9 | verbosity, vssimage, output_directory, img, vss_path_insert, stage
10 | ):
11 | with open(
12 | output_directory
13 | + img.split("::")[0]
14 | + "/artefacts/cooked"
15 | + vss_path_insert
16 | + "SRUDB.json",
17 | "a",
18 | ):
19 | entry, prnt = "{},{},{},'SRUDB.dat'\n".format(
20 | datetime.now().isoformat(),
21 | vssimage.replace("'", ""),
22 | stage,
23 | ), " -> {} -> {} SRUDB.dat for {}".format(
24 | datetime.now().isoformat().replace("T", " "),
25 | stage,
26 | vssimage,
27 | )
28 | write_audit_log_entry(verbosity, output_directory, entry, prnt)
29 | dfs = pd.read_excel(output_directory + img.split("::")[0] + "/artefacts/cooked" + vss_path_insert + "SRUDB.dat.xlsx", sheet_name=None)
30 | rows = []
31 | # cycle through each worksheet tab
32 | for name, sheet in dfs.items():
33 | headers = list(sheet)
34 | # cycle through each row in respective tab
35 | for _, row in sheet.iterrows():
36 | columns = {}
37 | # cycle through each column in respective row
38 | for header in headers:
39 | columns["System Resource"] = name
40 | columns[header] = str(row[header])
41 | rows.append(columns)
42 | with open(output_directory + img.split("::")[0] + "/artefacts/cooked" + vss_path_insert + "SRUDB.json", "w") as srujson:
43 | valid_json = re.sub(r"'(\}, \{)'(System Resource\": \")", r'"\1"\2', str(rows)[3:-3].replace("', '", '", "').replace("': '", '": "'))
44 | srujson.write('[{{"{}"}}]'.format(valid_json))
45 |
--------------------------------------------------------------------------------
/elrond/rivendell/process/extractions/usb.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3 -tt
2 | import json
3 | import re
4 |
5 |
6 | def extract_usb(
7 | output_directory,
8 | img,
9 | vss_path_insert,
10 | jsondict,
11 | jsonlist,
12 | setupdata,
13 | ):
14 | for session in setupdata.split("[Boot Session: ")[0][21:-14].split(" "):
15 | for eachkv in re.findall(r"^(?P[^\=]+)\ \=\ (?P[\S]+)", session):
16 | kv = list(eachkv)
17 | if len(kv) > 0:
18 | jsondict[kv[0]] = kv[1]
19 | if len(jsondict) > 0:
20 | jsonlist.append(json.dumps(jsondict))
21 | jsondict.clear()
22 | for session in setupdata.split("[Boot Session: ")[1:]:
23 | for section in session.split(">>> ["):
24 | jsondict["BootDateStart"], jsondict["BootTimeStart"] = (
25 | session[0:10],
26 | session[11:23],
27 | )
28 | if len(section) > 26:
29 | (
30 | jsondict["Artefact"],
31 | jsondict["Status"],
32 | jsondict["StartDate"],
33 | jsondict["StartTime"],
34 | ) = (
35 | section.split(">>> ")[0][0:-2],
36 | section.split("<<< ")[-1].split(": ")[1][0:-4],
37 | section.split(">>> ")[1].split("<<< ")[0][14:24],
38 | section.split(">>> ")[1].split("<<< ")[0][25:37],
39 | )
40 | try:
41 | (
42 | jsondict["SectionEndDate"],
43 | jsondict["SectionEndTime"],
44 | ) = (
45 | section.split(">>> ")[1].split("<<< ")[1][12:][0:10],
46 | section.split(">>> ")[1].split("<<< ")[1][12:][11:23],
47 | )
48 | except:
49 | pass
50 | for eachinfo in (
51 | section.split(">>> ")[1].split("<<< ")[0][38:].split("\n")
52 | ):
53 | for eachkv in re.findall(
54 | r"^\ +(?P[A-Za-z]+)\:\ +(?P[\S\ ]+)",
55 | eachinfo.replace("!", "")
56 | .replace("__", "_")
57 | .replace("__", "_")
58 | .strip("."),
59 | ):
60 | kv = list(eachkv)
61 | if len(kv) > 0:
62 | (
63 | jsondict["ActionObject"],
64 | jsondict["ActionInstruction"],
65 | ) = (kv[1], kv[0])
66 | for eachkv in re.findall(
67 | r"\ (?P\d{2}\:\d{2}\:\d{2}\.\d{3})",
68 | eachinfo.replace("__", "_").replace("__", "_").strip("."),
69 | ):
70 | jsondict["ActionTime"] = eachkv
71 | if len(jsondict) > 8:
72 | jsonlist.append(json.dumps(jsondict))
73 | jsondict.clear()
74 | if len(jsonlist) > 0:
75 | with open(
76 | output_directory
77 | + img.split("::")[0]
78 | + "/artefacts/cooked"
79 | + vss_path_insert
80 | + "usb.log.json",
81 | "a",
82 | ) as usbjson:
83 | usbout = (
84 | str(jsonlist)
85 | .replace("'{", "{")
86 | .replace("}'", "}")
87 | .replace("'[", "[")
88 | .replace("]'", "]")
89 | .replace("\\\\n", "")
90 | .replace("\\\\", "\\")
91 | )
92 | usbjson.write("[{")
93 | for eachinfo in usbout.split("}, {"):
94 | usbsd, usbst = re.findall(
95 | r"\"StartDate\"\:\ \"([^\"]+)", eachinfo
96 | ), re.findall(r"\"StartTime\"\:\ \"([^\"]+)", eachinfo)
97 | if len(usbsd) == 1 and len(usbst) == 1:
98 | usb_json = str(
99 | re.sub(
100 | r"([^,:] )\"",
101 | r"\1",
102 | str(
103 | re.sub(
104 | r"\"( )",
105 | r"\1",
106 | "}, {"
107 | + str(
108 | re.sub(
109 | r"(, \"StartTime\": \"[^\"]+\")",
110 | r"\1, \"LastWriteTime\": "
111 | + '"'
112 | + usbsd[0]
113 | + " "
114 | + usbst[0]
115 | + '"',
116 | eachinfo,
117 | )
118 | ).replace('\\"', '"'),
119 | )
120 | ),
121 | )
122 | ).replace(' , "', '" , "')
123 | usb_json = re.sub(
124 | r"([^\\]\\)([^\\])",
125 | r"\1\\\2",
126 | usb_json.replace('": ""', '": "')
127 | .replace('"", "', '", "')
128 | .replace('="', "="),
129 | )
130 | usbjson.write(usb_json)
131 |
--------------------------------------------------------------------------------
/elrond/rivendell/process/extractions/usn.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3 -tt
2 | import subprocess
3 | from datetime import datetime
4 |
5 | from rivendell.audit import write_audit_log_entry
6 |
7 |
8 | def extract_usn(
9 | verbosity,
10 | vssimage,
11 | output_directory,
12 | img,
13 | vss_path_insert,
14 | stage,
15 | artefact,
16 | ):
17 | entry, prnt = "{},{},{},'{}' usn journal\n".format(
18 | datetime.now().isoformat(),
19 | vssimage.replace("'", ""),
20 | stage,
21 | artefact.split("/")[-1].split("_")[-1],
22 | ), " -> {} -> {} '{}' for {}".format(
23 | datetime.now().isoformat().replace("T", " "),
24 | stage,
25 | artefact.split("/")[-1].split("_")[-1],
26 | vssimage,
27 | )
28 | write_audit_log_entry(verbosity, output_directory, entry, prnt)
29 | # python usn.py --csv -f usnjournal -o usn.csv
30 | """print(
31 | "python3",
32 | "/opt/elrond/elrond/tools/USN-Journal-Parser/usn.py",
33 | "--csv",
34 | "-f",
35 | artefact,
36 | "-o",
37 | output_directory
38 | + img.split("::")[0]
39 | + "/artefacts/cooked"
40 | + vss_path_insert
41 | + artefact.split("/")[-1]
42 | + ".csv",
43 | )"""
44 | subprocess.Popen(
45 | [
46 | "python3",
47 | "/opt/elrond/elrond/tools/USN-Journal-Parser/usn.py",
48 | "--csv",
49 | "-f",
50 | artefact,
51 | "-o",
52 | output_directory
53 | + img.split("::")[0]
54 | + "/artefacts/cooked"
55 | + vss_path_insert
56 | + artefact.split("/")[-1]
57 | + ".csv",
58 | ],
59 | stdout=subprocess.PIPE,
60 | stderr=subprocess.PIPE,
61 | ).communicate()
62 |
--------------------------------------------------------------------------------
/elrond/rivendell/process/extractions/wbem.py:
--------------------------------------------------------------------------------
1 | import os
2 | import re
3 | import subprocess
4 | from datetime import datetime
5 |
6 | from rivendell.audit import write_audit_log_entry
7 |
8 |
9 | def extract_wbem(
10 | verbosity,
11 | vssimage,
12 | output_directory,
13 | img,
14 | vss_path_insert,
15 | stage,
16 | artefact,
17 | ):
18 | entry, prnt = "{},{},{},'{}' wbem evidence\n".format(
19 | datetime.now().isoformat(),
20 | vssimage.replace("'", ""),
21 | stage,
22 | artefact.split("/")[-1].split("_")[-1],
23 | ), " -> {} -> {} '{}' for {}".format(
24 | datetime.now().isoformat().replace("T", " "),
25 | stage,
26 | artefact.split("/")[-1].split("_")[-1],
27 | vssimage,
28 | )
29 | write_audit_log_entry(verbosity, output_directory, entry, prnt)
30 | subprocess.Popen( # CCM_RUA_Finder
31 | [
32 | "python2.7",
33 | "/opt/elrond/elrond/tools/WMI_Forensics/CCM_RUA_Finder.py",
34 | "-i",
35 | artefact,
36 | "-o",
37 | output_directory
38 | + img.split("::")[0]
39 | + "/artefacts/cooked"
40 | + vss_path_insert
41 | + "wbem/."
42 | + artefact.split("/")[-1]
43 | + ".tsv",
44 | ],
45 | stdout=subprocess.PIPE,
46 | stderr=subprocess.PIPE,
47 | ).communicate()
48 | if os.path.exists(
49 | output_directory
50 | + img.split("::")[0]
51 | + "/artefacts/cooked"
52 | + vss_path_insert
53 | + "wbem/."
54 | + artefact.split("/")[-1]
55 | + ".tsv"
56 | ):
57 | with open(
58 | output_directory
59 | + img.split("::")[0]
60 | + "/artefacts/cooked"
61 | + vss_path_insert
62 | + "wbem/."
63 | + artefact.split("/")[-1]
64 | + ".tsv"
65 | ) as wbem_tsv:
66 | with open(
67 | output_directory
68 | + img.split("::")[0]
69 | + "/artefacts/cooked"
70 | + vss_path_insert
71 | + "wbem/"
72 | + artefact.split("/")[-1]
73 | + "-CCM_RUA.csv",
74 | "a",
75 | ) as wbem_csv:
76 | for tab_line in wbem_tsv:
77 | wbem_csv.write(tab_line.replace(",", "‚").replace("\t", ","))
78 | # wbem_csv.write("{}\n".format(tab_line.replace(",", "‚").replace("\t", ","))) # during testing, there were no lines so unsure if a newline is provided automatically or not
79 | os.remove(
80 | output_directory
81 | + img.split("::")[0]
82 | + "/artefacts/cooked"
83 | + vss_path_insert
84 | + "wbem/."
85 | + artefact.split("/")[-1]
86 | + ".tsv"
87 | )
88 | try:
89 | pywmipf = subprocess.Popen( # PyWMIPersistenceFinder
90 | [
91 | "python2.7",
92 | "/opt/elrond/elrond/tools/WMI_Forensics/PyWMIPersistenceFinder.py",
93 | artefact,
94 | ],
95 | stdout=subprocess.PIPE,
96 | stderr=subprocess.PIPE,
97 | ).communicate()[0]
98 | persistence_pairs = (
99 | str(pywmipf)[8:-3]
100 | .split("Enumerating Filters and Consumers...\\n\\n ")[1]
101 | .split("\\n\\n\\n Thanks for using PyWMIPersistenceFinder")[0]
102 | .replace(":\\n\\n ", "::")
103 | .replace("\\n ", " ")
104 | .replace("\\n ", "||")
105 | .replace("Filter: \\n\\t\\t", "")
106 | .replace("\\n\\t\\t", "||")
107 | .replace(": ", "::")
108 | .replace(": ", "::")
109 | .replace("Bindings::", "\nBindings::")
110 | .replace("\\\\n", "")
111 | .replace("\\n", "")
112 | )
113 | with open(
114 | output_directory
115 | + img.split("::")[0]
116 | + "/artefacts/cooked"
117 | + vss_path_insert
118 | + "wbem/"
119 | + artefact.split("/")[-1]
120 | + "-Persistence.csv",
121 | "a",
122 | ) as wbemcsv:
123 | elements = []
124 | headers = (
125 | str(re.findall(r"([^:\|]+)::", persistence_pairs))[2:-2]
126 | .strip("\\n")
127 | .replace("', '", ",")
128 | )
129 | elements.append(
130 | str(re.findall(r"::([^:\|]+)", persistence_pairs))[2:-2].replace(
131 | "', '", ","
132 | )
133 | )
134 | wbemcsv.write("{}\n".format(headers))
135 | for element in elements:
136 | wbemcsv.write("{}\n".format(element))
137 | # need to add additional field such as timestamp, host
138 | except:
139 | pass
140 |
--------------------------------------------------------------------------------
/elrond/rivendell/process/extractions/wmi.py:
--------------------------------------------------------------------------------
1 | import subprocess
2 | from datetime import datetime
3 |
4 | from rivendell.audit import write_audit_log_entry
5 |
6 |
7 | def extract_wmi(
8 | verbosity,
9 | vssimage,
10 | output_directory,
11 | img,
12 | vss_path_insert,
13 | stage,
14 | artefact,
15 | jsondict,
16 | jsonlist,
17 | wmijsonlist,
18 | ):
19 | with open(
20 | output_directory
21 | + img.split("::")[0]
22 | + "/artefacts/cooked"
23 | + vss_path_insert
24 | + "wmi/"
25 | + artefact.split("/")[-1]
26 | + ".json",
27 | "a",
28 | ) as wmijson:
29 | entry, prnt = "{},{},{},'{}' wmi evidence\n".format(
30 | datetime.now().isoformat(),
31 | vssimage.replace("'", ""),
32 | stage,
33 | artefact.split("/")[-1].split("_")[-1],
34 | ), " -> {} -> {} '{}' for {}".format(
35 | datetime.now().isoformat().replace("T", " "),
36 | stage,
37 | artefact.split("/")[-1].split("_")[-1],
38 | vssimage,
39 | )
40 | write_audit_log_entry(verbosity, output_directory, entry, prnt)
41 | # experiencing error: event_header_chunk = ChunkParser.parse(chunks[0].payload)
42 | subprocess.Popen(
43 | [
44 | "python3",
45 | "/opt/elrond/elrond/tools/etl-parser/bin/etl2xml",
46 | "-i",
47 | artefact,
48 | "-o",
49 | output_directory
50 | + img.split("::")[0]
51 | + "/artefacts/cooked"
52 | + vss_path_insert
53 | + "wmi/."
54 | + artefact.split("/")[-1]
55 | + ".xml",
56 | ],
57 | stdout=subprocess.PIPE,
58 | stderr=subprocess.PIPE,
59 | ).communicate()
60 | wmijsonlist.clear()
61 | jsonlist.clear()
62 |
--------------------------------------------------------------------------------
/elrond/rivendell/process/linux.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3 -tt
2 | import os
3 | import re
4 | import shlex
5 | import subprocess
6 | from datetime import datetime
7 |
8 | from rivendell.audit import write_audit_log_entry
9 |
10 |
11 | def tidy_journalentry(entry):
12 | journalentry = '{{"{}'.format(entry[0:-1].replace('\\n"', '"'))
13 | journalentry = re.sub(r'([^\\])\\("[^\\])', r"\1\2", journalentry)
14 | journalentry = re.sub(
15 | r'("MESSAGE":")([^_]+)"(\)?",")', r"\1\2'\3", journalentry
16 | )
17 | journalentry = re.sub(
18 | r'("MESSAGE":")([^_]+)"([^"\']+\'\)?",")', r"\1\2'\3", journalentry
19 | )
20 | journalentry = re.sub(
21 | r'("MESSAGE":")([^_]+)"(, \'[^"\']+\'\)?",")', r"\1\2'\3", journalentry
22 | )
23 | journalcount = 0
24 | while journalcount < 500:
25 | journalentry = re.sub(r'("MESSAGE":"[^_]+)"(, )"', r"\1'\2'", journalentry)
26 | journalcount += 1
27 | journalentry = re.sub(
28 | r'("MESSAGE":")([^_]+)"([^"\']+\', \')', r"\1\2'\3", journalentry
29 | )
30 | journalentry = re.sub(
31 | r'("MESSAGE":"[^"]+)"([^"]+)"([^"]+",")', r"\1'\2'\3", journalentry
32 | )
33 | journalentry = re.sub(r'("MESSAGE":"[^>]+)"(",")', r"\1'\2", journalentry)
34 | journalentry = re.sub(
35 | r'("MESSAGE":"[^>]+)"([^"\']+\'",")', r"\1'\2", journalentry
36 | )
37 | journalcount = 0
38 | while journalcount < 500:
39 | journalentry = re.sub(r'("MESSAGE":"[^>:]+)" ', r"\1' ", journalentry)
40 | journalcount += 1
41 | journalentry = re.sub(r'(\' [^=]+=)"([^"\']+)', r"\1'\2", journalentry)
42 | journalentry = re.sub(r'([^=:]+=)"([^"\']+)', r"\1'\2", journalentry)
43 | journalentry = re.sub(r'(\'[^"]+)" ', r"\1' ", journalentry)
44 | journalentry = re.sub(r'(=\'[^"]+)"(",")', r"\1'\2", journalentry)
45 | journalentry = re.sub(r'(","MESSAGE":"[^,]+)" ', r"\1 ", journalentry)
46 | journalentry = re.sub(r'("MESSAGE":"[^"]+ )"([^",]+)"', r"\1'\2'", journalentry)
47 | journalentry = re.sub(r'("MESSAGE":"[^"]+ )"([^",]+)"', r"\1'\2'", journalentry)
48 | journalentry = re.sub(r'("MESSAGE":"[^"]+ )"([^",]+)"', r"\1'\2'", journalentry)
49 | journalentry = re.sub(r'\'([^"\']+)"(\) \()"', r"\1'\2'", journalentry)
50 | journalentry = re.sub(r'(\'[^\']+\')(,"[^"]+":")', r'\1"\2', journalentry)
51 | journalentry = re.sub(
52 | r'(\'[^\']+\'[^\]]+)"(\][^"]+",")', r"\1'\2", journalentry
53 | )
54 | journalentry = re.sub(
55 | r'(","[^"]+":")"([^"]+)"(","_)', r"\1'\2'\3", journalentry
56 | )
57 | journalentry = re.sub(
58 | r'(\'[^\']+\'[^\']+)"(\) \([^\)]+\)"\})', r"\1'\2", journalentry
59 | )
60 | journalentry = re.sub(r'([^=]+=\'[^"]+)"("\})', r"\1'\2", journalentry)
61 | journalentry = re.sub(r'( \' [^\']+\'[^"]+)"\)(","_)', r"\1'\2", journalentry)
62 | journalentry = re.sub(
63 | r'((?:MESSAGE|SYSLOG_RAW)":\[)([^\]]+)(\],"_)', r'\1"\2"\3', journalentry
64 | )
65 | journalentry = re.sub(r'("[^\']+)\'(:"")', r'\1"\2', journalentry)
66 | journalentry = re.sub(r'(":"[^"]+)"([^"]+)"(")', r"\1'\2'\3", journalentry)
67 | journalentry = re.sub(r'("MESSAGE":"[^"]+)(\},)', r'\1"\2', journalentry)
68 | journalentry = re.sub(r'("[^\'"]+)\'(:)\'(",")', r'\1"\2"\3', journalentry)
69 | while journalcount < 500:
70 | journalentry = re.sub(
71 | r'(","MESSAGE":"[^"]+)"([^\}]+\}",")', r"\1'\2'\3 ", journalentry
72 | )
73 | journalcount += 1
74 | journalentry = re.sub(
75 | r'(":"[^"]+)"([^"]+)"([^"]+",")', r"\1'\2'\3", journalentry
76 | )
77 | journalentry = journalentry.replace("\\'", "'")
78 | journalentry = journalentry.replace("\\\\'", "'")
79 | journalentry = journalentry.replace("\\\\\\'", "'")
80 | journalentry = re.sub(r'("MESSAGE":"[^"\}]+\')(\})', r'\1"\2', journalentry)
81 | journalentry = re.sub(r'(, [^"\']+: )"([^"]+)"', r"\1'\2'", journalentry)
82 | journalentry = re.sub(
83 | r'(\()"([^"]+)", "([^"]+)"(\))', r"\1'\2', '\3'\4", journalentry
84 | )
85 | journalentry = re.sub(r'(\' \'[^"\']+)"("\})', r"\1'\2", journalentry)
86 | journalentry = re.sub(r'(\' \'[^"\']+\')\'(,")', r'\1"\2', journalentry)
87 | journalentry = re.sub(r'(":"[^"]+)"([^"]+"\})', r"\1'\2", journalentry)
88 | journalentry = re.sub(r'(":")"([^"]+)"(",")', r"\1'\2'\3", journalentry)
89 | journalentry = re.sub(r'(\', \'[^"]+)"("\})', r"\1'\2", journalentry)
90 | journalentry = re.sub(r'(":"[^"]+\')(,"[^"]+":")', r'\1"\2', journalentry)
91 | journalentry = re.sub(r'(")"([^"]+)"("\})', r"\1'\2'\3", journalentry)
92 | journalentry = re.sub(r'(","[^"]+":"[^"]+)"([^"]+",")', r"\1'\2", journalentry)
93 | journalentry = journalentry.replace(' "--', " '--")
94 | journalentry = journalentry.replace('" --', "' --")
95 | journalentry = journalentry.replace('" \\"--', "' '--")
96 | journalentry = journalentry.replace("\\'", "'")
97 | journalentry = re.sub(r"([^\\]\\)'", r"\1'", journalentry)
98 | journalentry = re.sub(r"([^\\]\\)x", r"\1\\x", journalentry)
99 | journalentry = re.sub(r"( [^=]+=)\\\\\"([^\"']+)(?:'|\")", r"\1'\2'", journalentry)
100 | journalentry = re.sub(r"([^\\])\\(' [^=]+=)", r"\1\2", journalentry)
101 | journalentry = re.sub(r"([^\\'\"])\\(\"\},)", r"\1'\2", journalentry)
102 | journalentry = re.sub(r"(\":\"[^\"]+)\\\\\"([^\"']+)(\")", r"\1'\2'\3", journalentry)
103 | journalentry = re.sub(r'(")\\\\"([^"]+)', r'\1\2', journalentry)
104 | journalentry = re.sub(r'\\\\"([^"]+)', r"'\1'", journalentry)
105 | journalentry = re.sub(r"('[^']+)\\\\(')", r"\1\2", journalentry)
106 | journalentry = re.sub(r"(')\"([^\}])", r"\1\2", journalentry)
107 | journalentry = re.sub(r'(\')(,"[^"]+":")', r"\1\2", journalentry)
108 | journalentry = re.sub(r"([^\\])\\(')", r"\1\2", journalentry)
109 | journalentry = re.sub(r'(":"[^"]+\')(,")', r'\1"\2', journalentry)
110 | journalentry = journalentry.strip("\\")
111 | journalentry = '{}"}},\n'.format(journalentry)
112 | journalentry = re.sub(r'([^\"])("\}\},)', r'\1"\2', journalentry)
113 | journalentry = journalentry.replace('":"},\n', '":""},\n')
114 | journalentry = journalentry.replace('\\\\""},', "'\"},")
115 | return journalentry
116 |
117 |
118 | def process_journal(
119 | verbosity, vssimage, output_directory, img, vss_path_insert, stage, artefact
120 | ):
121 | journal_tmpfile = output_directory + img.split("::")[0] + "/artefacts/cooked" + vss_path_insert + ".journalctl.json"
122 | journal_outfile = output_directory + img.split("::")[0] + "/artefacts/cooked" + vss_path_insert + "journalctl.json"
123 | if not os.path.exists(
124 | journal_outfile
125 | ):
126 | try:
127 | os.makedirs(
128 | output_directory
129 | + img.split("::")[0]
130 | + "/artefacts/cooked"
131 | + vss_path_insert
132 | )
133 | except:
134 | pass
135 | journal_command = shlex.split(
136 | 'journalctl -D {} --all --output=json'.format(
137 | "/".join(artefact.split("/")[0:-1])
138 | )
139 | )
140 | journal_command_output = str(subprocess.Popen(
141 | journal_command,
142 | stdout=subprocess.PIPE,
143 | stderr=subprocess.PIPE,
144 | ).communicate()[0])[2:-3]
145 | with open(
146 | output_directory
147 | + img.split("::")[0]
148 | + "/artefacts/cooked"
149 | + vss_path_insert
150 | + ".journalctl.json",
151 | "a",
152 | ) as journaljson:
153 | if verbosity != "":
154 | print(
155 | " Processing systemd journals for {}...".format(
156 | vssimage,
157 | )
158 | )
159 | entry, prnt = "{},{},{},systemd journals\n".format(
160 | datetime.now().isoformat(),
161 | vssimage.replace("'", ""),
162 | stage,
163 | ), " -> {} -> {} systemd journals from {}".format(
164 | datetime.now().isoformat().replace("T", " "),
165 | stage,
166 | vssimage,
167 | )
168 | write_audit_log_entry(verbosity, output_directory, entry, prnt)
169 | journal_command_output = re.sub(r'(:)(\[[^\["]+,\d+\])(\}\\n\{")', r'\1"\2"\3', journal_command_output)
170 | for entry in journal_command_output[2:-2].split('"}\\n{"'):
171 | journalentry = tidy_journalentry(entry)
172 | with open(journal_tmpfile, "a") as journaljson:
173 | journaljson.write(journalentry)
174 | with open(journal_tmpfile) as journaljson:
175 | journal = journaljson.read()
176 | with open(journal_outfile, "w") as finaljournaljson:
177 | finaljournaljson.write("[{}]".format(journal[0:-2]))
178 | os.remove(journal_tmpfile)
179 |
--------------------------------------------------------------------------------
/elrond/rivendell/process/mac.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3 -tt
2 | import json
3 | import os
4 | import plistlib
5 | import re
6 | from datetime import datetime
7 |
8 | from rivendell.audit import write_audit_log_entry
9 | from rivendell.process.extractions.plist import (
10 | format_plist_extractions,
11 | )
12 |
13 |
14 | def repair_malformed_plist(plist_out):
15 | plist_out = re.sub(r"'(: \d+, )'", r'"\1"', plist_out)
16 | plist_out = re.sub(r"'(: )((?:True|False))(, )'", r'"\1"\2"\3"', plist_out)
17 | plist_out = plist_out.replace('[\\"', '["').replace('\\"]', '"]')
18 | plist_out = plist_out.replace('": "[{"', '": [{"').replace('"}]", "', '"}], "')
19 | plist_out = plist_out.replace('": "[\'', '": ["').replace('\']", "', '"], "')
20 | plist_out = re.sub(r"'(: \d+\}\])\"(, \")", r'"\1\2', plist_out)
21 | plist_out = (
22 | plist_out.replace(']"}]', '"]}]')
23 | .replace('""]}]', '"]}]')
24 | .replace('": "[\'', '": ["')
25 | .replace('\']", "', '"], "')
26 | .replace('": [">', '": ["')
27 | .replace('": "}, \'', '": {{}}, "')
28 | .replace("': [", '": [')
29 | .replace("]'}]\"}}]", "]}]}}]")
30 | .replace("': ['", '": ["')
31 | .replace('\']", "', '"], "')
32 | .replace("'}, '", '"}, "')
33 | .replace("': {'", '": {"')
34 | .replace("'}, {'", '"}, {"')
35 | .replace('": "[\'', '": ["')
36 | .replace("'}}, {'", "'}}, {'")
37 | .replace('": "[', '": ["')
38 | .replace('": [""', '": ["')
39 | .replace('\']"}], "', '"]}], "')
40 | .replace("']\"}}", '"]}}')
41 | .replace('": ["]", "', '": [], "')
42 | .replace('": ["]"}', '": []}')
43 | )
44 | plist_out = re.sub(r"': (-?\d+)\}, '", r'": "\1"}, "', plist_out)
45 | plist_out = re.sub(r"(\": \[\{\"[^']+)'(: )(b')", r'\1"\2"\3', plist_out)
46 | plist_out = re.sub(r'\'(: \d+\}\])"(\})', r'"\1\2', plist_out)
47 | plist_out = plist_out.replace('": [", "', '": ["')
48 | plist_out = re.sub(r'(\w+)(\])"([\}\]]{1,2})', r'\1"\2\3', plist_out)
49 | plist_out = plist_out.replace('\']"}, "', '"]}, "')
50 | plist_out = re.sub(r"(\w+)'(], \")", r'\1"\2', plist_out)
51 | plist_out = re.sub(r"(\w+)(: )(\[')", r'\1"\2"\3', plist_out)
52 | plist_out = plist_out.replace('[\\"', '["').replace('\\"]', '"]')
53 | plist_out = re.sub(r"(\w+)(\])(, \"\w+)", r"\1'\2\"\3", plist_out)
54 | plist_out = plist_out.re.sub(r'(\w+)\'\}\]"\}, \{"', r'\1"}]}, {"', plist_out)
55 | plist_out = re.sub(
56 | r'(": "[^"]+", "[^\']+)\'(: )([^,]+)(, )\'([^"]+": ")',
57 | r'\1"\2"\3"\4"\5',
58 | plist_out,
59 | )
60 | plist_out = re.sub(r'(\w+)\'(\}\])"(\})', r'\1"\2\3', plist_out)
61 | plist_out = re.sub(
62 | r"(\d+\])'(\}\])\"(\})",
63 | r"\1\2\3",
64 | )
65 | plist_out = re.sub(r'(w+)\'(\])"(\})', r'\1"\2\3', plist_out)
66 | plist_out = plist_out.replace("', {'", '", {"')
67 | plist_out = re.sub(r"'(: \d+}, {)'", r'"\1"', plist_out)
68 | return plist_out
69 |
70 |
71 | def process_plist(
72 | verbosity, vssimage, output_directory, img, vss_path_insert, stage, artefact
73 | ):
74 | if not os.path.exists(
75 | output_directory
76 | + img.split("::")[0]
77 | + "/artefacts/cooked"
78 | + vss_path_insert
79 | + "plists/"
80 | + artefact.split("/")[-1]
81 | + ".json"
82 | ):
83 | try:
84 | os.makedirs(
85 | output_directory
86 | + img.split("::")[0]
87 | + "/artefacts/cooked"
88 | + vss_path_insert
89 | + "plists"
90 | )
91 | except:
92 | pass
93 | with open(
94 | output_directory
95 | + img.split("::")[0]
96 | + "/artefacts/cooked"
97 | + vss_path_insert
98 | + "plists/"
99 | + artefact.split("/")[-1]
100 | + ".json",
101 | "a",
102 | ) as plistjson:
103 | try:
104 | with open(artefact, "rb") as plist:
105 | plistdata = plistlib.load(plist)
106 | if verbosity != "":
107 | print(
108 | " Processing '{}' plist for {}...".format(
109 | artefact.split("/")[-1].split("+")[-1],
110 | vssimage,
111 | )
112 | )
113 | entry, prnt = "{},{},{},'{}' plist file\n".format(
114 | datetime.now().isoformat(),
115 | vssimage.replace("'", ""),
116 | stage,
117 | artefact.split("/")[-1].split("+")[-1],
118 | ), " -> {} -> {} plist file '{}' from {}".format(
119 | datetime.now().isoformat().replace("T", " "),
120 | stage,
121 | artefact.split("/")[-1].split("+")[-1],
122 | vssimage,
123 | )
124 | write_audit_log_entry(verbosity, output_directory, entry, prnt)
125 | pliststr = format_plist_extractions(plistdata)
126 | if (
127 | '"Program"' in pliststr
128 | and '"ProgramArguments"' in pliststr
129 | and '"Label"' in pliststr
130 | ):
131 | insert = ', "Process{}, "CommandLine{}, "Plist{}'.format(
132 | str(
133 | str(
134 | re.findall(
135 | r"Program(\"\: \"[^\"]+\")",
136 | pliststr,
137 | )[0]
138 | ).lower()
139 | ),
140 | str(
141 | str(
142 | str(
143 | re.findall(
144 | r"ProgramArguments(\"\: \"\[[^\]]+\])",
145 | pliststr,
146 | )[0]
147 | ).lower()
148 | )
149 | )
150 | .replace('"', "")
151 | .replace(",", ""),
152 | str(
153 | str(
154 | re.findall(r"Label(\"\: \"[^\"]+\")", pliststr)[0]
155 | ).lower()
156 | ),
157 | )
158 | plistout = str(pliststr[0:-1] + insert + "}")
159 | elif '"Program"' in pliststr and '"ProgramArguments"' in pliststr:
160 | insert = ', "Process{}", "CommandLine{}'.format(
161 | str(
162 | str(
163 | re.findall(
164 | r"Program(\"\: \"[^\"]+\")",
165 | pliststr,
166 | )[0]
167 | ).lower()
168 | ),
169 | str(
170 | str(
171 | str(
172 | re.findall(
173 | r"ProgramArguments(\"\: \"\[[^\]]+\])",
174 | pliststr,
175 | )[0]
176 | ).lower()
177 | )
178 | )
179 | .replace('"', "")
180 | .replace(",", ""),
181 | )
182 | plistout = str(pliststr[0:-1] + insert + "}")
183 | elif '"Program"' in pliststr and '"Label"' in pliststr:
184 | insert = ', "Process{}", "Plist{}'.format(
185 | str(
186 | str(
187 | re.findall(
188 | r"Program(\"\: \"[^\"]+\")",
189 | pliststr,
190 | )[0]
191 | ).lower()
192 | ),
193 | str(
194 | str(
195 | re.findall(r"Label(\"\: \"[^\"]+\")", pliststr)[0]
196 | ).lower()
197 | ),
198 | )
199 | plistout = str(pliststr[0:-1] + insert + "}")
200 | elif '"ProgramArguments"' in pliststr and '"Label"' in pliststr:
201 | insert = ', "CommandLine{}, "Plist{}'.format(
202 | str(
203 | str(
204 | str(
205 | re.findall(
206 | r"ProgramArguments(\"\: \"\[[^\]]+\])",
207 | pliststr,
208 | )[0]
209 | ).lower()
210 | )
211 | )
212 | .replace('"', "")
213 | .replace(",", ""),
214 | str(
215 | str(
216 | re.findall(r"Label(\"\: \"[^\"]+\")", pliststr)[0]
217 | ).lower()
218 | ),
219 | )
220 | plistout = str(pliststr[0:-1] + insert + "}")
221 | elif '"Program"' in pliststr:
222 | insert = ', "Process{}"'.format(
223 | str(
224 | str(
225 | re.findall(
226 | r"Program(\"\: \"[^\"]+\")",
227 | pliststr,
228 | )[0]
229 | ).lower()
230 | )
231 | )
232 | plistout = str(pliststr[0:-1] + insert + "}")
233 | elif '"ProgramArguments"' in pliststr:
234 | insert = ', "Command{}'.format(
235 | str(
236 | str(
237 | str(
238 | re.findall(
239 | r"ProgramArguments(\"\: \"\[[^\]]+\])",
240 | pliststr,
241 | )[0]
242 | ).lower()
243 | )
244 | )
245 | .replace('"', "")
246 | .replace(",", "")
247 | )
248 | plistout = str(pliststr[0:-1] + insert + "}")
249 | elif '"Label"' in pliststr:
250 | insert = ', "Plist{}'.format(
251 | str(
252 | str(
253 | re.findall(r"Label(\"\: \"[^\"]+\")", pliststr)[0]
254 | ).lower()
255 | )
256 | )
257 | plistout = str(pliststr[0:-1] + insert + "}")
258 | else:
259 | plistout = pliststr
260 | plist_out = (
261 | plistout.replace("', '", '", "')
262 | .replace("': '", '": "')
263 | .replace('": "[{\'', '": "[{"')
264 | .replace('\'}]", "', '"}]", "')
265 | )
266 | plist_out = repair_malformed_plist(plist_out)
267 | plistjson.write("[{}]".format(plist_out))
268 |
269 | except:
270 | pass
271 |
--------------------------------------------------------------------------------
/elrond/rivendell/process/timeline.py:
--------------------------------------------------------------------------------
1 | import os
2 | import re
3 | import shutil
4 | import subprocess
5 | import time
6 | from datetime import datetime
7 |
8 | from rivendell.audit import write_audit_log_entry
9 |
10 |
11 | def convert_plaso_timeline(verbosity, output_directory, stage, img):
12 | lineno = 0
13 | with open(
14 | output_directory + img.split("::")[0] + "/artefacts/plaso_timeline.csv",
15 | "a",
16 | ) as plasocsv:
17 | plasocsv.write(
18 | "LastWriteTime,timestamp_desc,logsource,source_long,message,parser,display_name,tag,Message,Artefact\n"
19 | )
20 | with open("./plaso_timeline.csvtmp", "r") as plasotmp:
21 | for eachline in plasotmp:
22 | if lineno != 0:
23 | (
24 | LastWriteTime,
25 | timestamp_desc__logsource__source_long,
26 | Message,
27 | parser,
28 | Artefact,
29 | tag,
30 | ) = re.findall(
31 | r"^([^,]+),([^,]+,[^,]+,[^,]+),([^,]+),([^,]+),([^,]+),([^,]+)",
32 | eachline,
33 | )[
34 | 0
35 | ]
36 | if (
37 | LastWriteTime != "0000-00-00T00:00:00"
38 | ): # removing all entries without timestamp to reduce size
39 | plasocsv.write(
40 | "{},{},{},{},{},{},{},{}\n".format(
41 | LastWriteTime,
42 | timestamp_desc__logsource__source_long,
43 | Message,
44 | parser,
45 | Artefact,
46 | tag,
47 | Message.lower().replace("\\\\", "/").replace("\\", "/"),
48 | Artefact.lower()
49 | .replace("\\\\", "/")
50 | .replace("\\", "/"),
51 | )
52 | )
53 | lineno += 1
54 | entry, prnt = "{},{},{},{}\n".format(
55 | datetime.now().isoformat(),
56 | img.split("::")[0],
57 | stage,
58 | img.split("::")[0],
59 | ), " -> {} -> {} '{}'".format(
60 | datetime.now().isoformat().replace("T", " "),
61 | stage,
62 | img.split("::")[0],
63 | )
64 | write_audit_log_entry(verbosity, output_directory, entry, prnt)
65 |
66 |
67 | def create_plaso_timeline(verbosity, output_directory, stage, img, d, timelineimage):
68 | print("\n Creating timeline for {}...".format(timelineimage))
69 | entry, prnt = "{},{},{},commenced\n".format(
70 | datetime.now().isoformat(), timelineimage, stage
71 | ), " -> {} -> creating timeline for '{}'".format(
72 | datetime.now().isoformat().replace("T", " "), timelineimage
73 | )
74 | write_audit_log_entry(verbosity, output_directory, entry, prnt)
75 | for image_directory in os.listdir(d):
76 | if os.path.exists(os.path.join(d, image_directory, img.split("::")[0])):
77 | timelineimagepath = os.path.join(d, image_directory, img.split("::")[0])
78 | print(
79 | " Entering plaso to create timeline for '{}', please stand by...".format(
80 | timelineimage
81 | )
82 | )
83 | time.sleep(2)
84 | if os.path.exists(".plaso"):
85 | shutil.rmtree("./.plaso")
86 | os.mkdir(".plaso")
87 | os.chdir("./.plaso")
88 | subprocess.Popen(
89 | [
90 | "psteal.py",
91 | "--source",
92 | timelineimagepath,
93 | "-o",
94 | "dynamic",
95 | "-w",
96 | "./plaso_timeline.csvtmp",
97 | ]
98 | ).communicate()[
99 | 0
100 | ] # https://plaso.readthedocs.io/en/latest/sources/user/Creating-a-timeline.html
101 | os.chdir("..")
102 | convert_plaso_timeline(verbosity, output_directory, stage, img)
103 | write_audit_log_entry(verbosity, output_directory, entry, prnt)
104 | shutil.rmtree("./.plaso")
105 |
--------------------------------------------------------------------------------
/elrond/tools/config/scripts/VMware.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | USERPROFILE=$(cat /etc/passwd | grep 1000 | cut -d ":" -f 1)
4 | HOSTNAME=$(hostname)
5 |
6 | sudo tar -xvf /opt/elrond/elrond/tools/config/VMwareTools-10.3.23-16594550.tar.gz
7 | sleep 1
8 | cd vmware-tools-distrib
9 | sudo rm -rf vmware-install.pl
10 | sudo cp /opt/elrond/elrond/tools/config/vmware-install.pl .
11 | sudo chmod 755 vmware-install.pl
12 | sudo apt remove open-vm-tools --purge -y
13 | sudo rm -rf /etc/vmware-tools/
14 | yes '' | sudo ./vmware-install.pl -f
15 | cd ..
16 | sudo rm -rf vmware-tools-distrib
17 | sleep 1
18 |
--------------------------------------------------------------------------------
/elrond/tools/config/scripts/apfs-fuse.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | USERPROFILE=$(cat /etc/passwd | grep 1000 | cut -d ":" -f 1)
4 | HOSTNAME=$(hostname)
5 |
6 | sleep 1
7 | clear
8 | printf "\n -> Installing & configuring apfs-fuse...\n\n"
9 | sudo apt install libbz2-dev libattr1-dev cmake cmake-curses-gui -y --fix-missing --allow-unauthenticated
10 | cd /usr/local/bin
11 | sudo git clone https://github.com/cyberg3cko/apfs-fuse.git
12 | cd apfs-fuse
13 | sudo git submodule init
14 | sudo git submodule update
15 | sudo mkdir build
16 | cd build
17 | sudo cmake ..
18 | sudo ccmake .
19 | sudo make
20 | sleep 1
--------------------------------------------------------------------------------
/elrond/tools/config/scripts/cloud.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | USERPROFILE=$(cat /etc/passwd | grep 1000 | cut -d ":" -f 1)
4 | HOSTNAME=$(hostname)
5 |
6 | # install azure cli
7 | curl -sL https://aka.ms/InstallAzureCLIDeb | sudo bash
8 |
9 | # install aws cli
10 | curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip"
11 | unzip awscliv2.zip
12 | sudo ./aws/install
13 | rm awscliv2.zip
14 |
15 | # install gcp cli
16 | sudo snap install google-cloud-cli
--------------------------------------------------------------------------------
/elrond/tools/config/scripts/finish.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | USERPROFILE=$(cat /etc/passwd | grep 1000 | cut -d ":" -f 1)
4 | HOSTNAME=$(hostname)
5 |
6 | # configure .bashrc
7 | echo '
8 | export PS1="\e[1;36m\u@\h:\e[m \e[0;32m\w\e[m\n$ "' >> /home/$USERPROFILE/.bashrc
9 | echo "export PATH=$PATH:/opt/elrond/elrond" >> /home/$USERPROFILE/.bashrc
10 | source ~/.bashrc
11 |
12 | # configure terminal to launch on login
13 | sudo rm -rf /home/$USERPROFILE/.config/autostart/gnome-terminal.desktop
14 | sudo rm -rf gnome-terminal.desktop
15 | echo "[Desktop Entry]
16 | Type=Application
17 | Exec=gnome-terminal -- /opt/elrond/elrond/elrond.sh
18 | Hidden=false
19 | NoDisplay=false
20 | X-GNOME-Autostart-enabled=true
21 | Name[en_NG]=Terminal
22 | Name=Terminal
23 | Comment[en_NG]=Start Terminal On Startup
24 | Comment=Start Terminal On Startup" > gnome-terminal.desktop
25 | sudo chmod 744 gnome-terminal.desktop
26 | sudo chown -R "$USERPROFILE":"$USERPROFILE" gnome-terminal.desktop
27 | mkdir -p /home/$USERPROFILE/.config/autostart
28 | sudo mv gnome-terminal.desktop /home/$USERPROFILE/.config/autostart/
29 | sudo chmod 744 /home/$USERPROFILE/.config/autostart/gnome-terminal.desktop
30 | cp /opt/elrond/elrond/elrond.sh ~/elrond.sh
31 | sudo chmod 744 ~/elrond.sh
32 | sudo chmod +x ~/elrond.sh
33 | sudo chmod 744 /opt/elrond
34 | sudo chown -R "$USERPROFILE":"$USERPROFILE" /opt/elrond
35 | /opt/elrond/elrond/./elrond.sh
36 |
37 | # cleaning uneeded applications
38 | sudo unminimize -y
39 | sudo du -sh /var/cache/apt/archives
40 | sudo apt update --allow-insecure-repositories
41 | sudo apt-get clean
42 | sudo apt update --allow-insecure-repositories
43 | sudo updatedb
44 |
45 | # making dirs if they do not exist
46 | sudo mkdir /mnt/shadow_mount
47 | sudo mkdir /mnt/vss
--------------------------------------------------------------------------------
/elrond/tools/config/scripts/indx.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | USERPROFILE=$(cat /etc/passwd | grep 1000 | cut -d ":" -f 1)
4 | HOSTNAME=$(hostname)
5 |
6 | sleep 1
7 | clear
8 | printf "\n -> Configuring python3.9...\n\n"
9 | python3 -m keyring --disable
10 |
11 | # installing python libraries
12 | USER=$USERPROFILE
13 | python3 -m pip install --upgrade pip
14 | sudo python3 -m pip install --upgrade pip
15 | python3 -m pip install requests pandas openpyxl jupyterlab notebook voila
16 | sudo python3 -m pip install requests pandas openpyxl jupyterlab notebook voila
17 | sudo chmod -R 744 /opt/elrond/
18 | sudo chown -R "$USERPROFILE":"$USERPROFILE" /opt/elrond
19 | cd /opt/etl-parser
20 | sudo python3 -m pip install -e .
21 | cd /opt/elrond/elrond
22 |
23 | PYTHON_KEYRING_BACKEND=keyring.backends.null.Keyring
24 | sudo git clone https://github.com/harelsegev/INDXRipper /opt/elrond/elrond/tools/INDXRipper
25 | /usr/bin/python3.9 -m pip install --upgrade pip
26 | /usr/bin/python3.8 -m pip install --upgrade pip
27 | /usr/bin/python3 -m pip install --upgrade pip
28 | /usr/bin/python -m pip install --upgrade pip
29 | python3.9 -m pip install setuptools
30 | python3.9 -m pip install construct==2.10.68
31 | python3.8 -m pip install setuptools
32 | python3.8 -m pip install construct==2.10.68
33 | python3 -m pip install construct
34 | sleep 1
--------------------------------------------------------------------------------
/elrond/tools/config/scripts/navigator.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | USERPROFILE=$(cat /etc/passwd | grep 1000 | cut -d ":" -f 1)
4 | HOSTNAME=$(hostname)
5 |
6 | sleep 1
7 | clear
8 | printf "\n -> Installing & configuring attack-navigator...\n\n"
9 | curl -fsSL https://deb.nodesource.com/setup_16.x | sudo -E bash -
10 | curl -sL https://dl.yarnpkg.com/debian/pubkey.gpg | gpg --dearmor | sudo tee /usr/share/keyrings/yarnkey.gpg >/dev/null
11 | echo "deb [signed-by=/usr/share/keyrings/yarnkey.gpg] https://dl.yarnpkg.com/debian stable main" | sudo tee /etc/apt/sources.list.d/yarn.list
12 | sudo apt-get update --allow-insecure-repositories
13 | sudo apt install npm nodejs yarn -y --fix-missing --allow-unauthenticated
14 | sleep 2
15 | npm cache clean -f
16 | sudo npm install n -g
17 | sudo -E env "PATH=$PATH" n 16
18 | sudo npm install -g @angular/cli
19 | sudo npm install -g pm2
20 | # sudo git clone https://github.com/mitre-attack/attack-navigator.git /opt/attack-navigator
21 | cd /opt/attack-navigator/nav-app
22 | sudo npm install
23 | sudo pm2 start --time --name="attack-navigator" ng -- serve
24 | sleep 1
25 | sudo curl https://raw.githubusercontent.com/mitre/cti/master/enterprise-attack/enterprise-attack.json -o /opt/attack-navigator/nav-app/src/assets/enterprise-attack.json
26 | sudo chmod -R 755 /opt/attack-navigator/
27 | sudo chown -R "$USERPROFILE":"$USERPROFILE" /opt/attack-navigator/
28 | # sudo pm2 stop attack-navigator
29 | cd /opt/elrond
30 | sleep 1
--------------------------------------------------------------------------------
/elrond/tools/config/scripts/nsrl.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | USERPROFILE=$(cat /etc/passwd | grep 1000 | cut -d ":" -f 1)
4 | HOSTNAME=$(hostname)
5 |
6 | wget -O /opt/elrond/elrond/tools/RDS_2024.03.1_modern.zip https://s3.amazonaws.com/rds.nsrl.nist.gov/RDS/rds_2024.03.1/RDS_2024.03.1_modern.zip
--------------------------------------------------------------------------------
/elrond/tools/config/scripts/regrip.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | USERPROFILE=$(cat /etc/passwd | grep 1000 | cut -d ":" -f 1)
4 | HOSTNAME=$(hostname)
5 |
6 | sleep 1
7 | clear
8 | printf "\n -> Installing and configuring regripper...\n\n"
9 | # downloading regripper for elrond
10 | sudo apt-get install libparse-win32registry-perl -y --fix-missing --allow-unauthenticated
11 | cd /usr/local/src/
12 | sudo rm -r /usr/local/src/regripper/ 2>/dev/null
13 | sudo rm -r /usr/share/regripper/plugins 2>/dev/null
14 | sudo git clone https://github.com/keydet89/RegRipper3.0.git
15 | sudo mv RegRipper3.0 regripper
16 | #sudo mkdir /usr/share/regripper
17 | ln -s /usr/local/src/regripper/plugins /usr/share/regripper/plugins 2>/dev/null
18 | sudo chmod -R 755 regripper/
19 | sudo cp regripper/File.pm /usr/share/perl5/Parse/Win32Registry/WinNT/File.pm
20 | sudo cp regripper/Key.pm /usr/share/perl5/Parse/Win32Registry/WinNT/Key.pm
21 | sudo cp regripper/Base.pm /usr/share/perl5/Parse/Win32Registry/Base.pm
22 | set +H
23 | sudo cp regripper/rip.pl regripper/rip.pl.linux
24 | sudo sed -i '77i my \$plugindir \= \"\/usr\/share\/regripper\/plugins\/\"\;' /usr/local/src/regripper/rip.pl.linux
25 | sudo sed -i '/^#! c:[\]perl[\]bin[\]perl.exe/d' /usr/local/src/regripper/rip.pl.linux
26 | sudo sed -i "1i #!`which perl`" /usr/local/src/regripper/rip.pl.linux
27 | sudo sed -i '2i use lib qw(/usr/lib/perl5/);' /usr/local/src/regripper/rip.pl.linux
28 | sudo cp regripper/rip.pl.linux /usr/local/bin/rip.pl
29 | sudo cp -r /usr/local/src/regripper/ /usr/share/
--------------------------------------------------------------------------------
/elrond/tools/config/scripts/repo.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | USERPROFILE=$(cat /etc/passwd | grep 1000 | cut -d ":" -f 1)
4 | HOSTNAME=$(hostname)
5 |
6 | sleep 1
7 | sudo apt install python3-pip -y --fix-missing --allow-unauthenticated
8 | sudo python3 -m pip install gdown python-evtx python-registry registry libesedb-python
9 | python3 -m pip install gdown python-evtx python-registry registry libesedb-python
10 | export PATH="$HOME/.local/bin:$PATH"
11 | source ~/.bashrc
12 | clear
13 | printf "\n -> Downloading additional elrond components...\n\n"
14 |
15 | # downloading additional tools for elrond
16 | cd /tmp
17 | gdown https://drive.google.com/uc?id=1mVrkLp84jchHRrAxqXvSpDdZhIKsH9Fi
18 | if [ -f "elrond-archive.zip" ]; then
19 | unzip elrond-archive.zip 1> /dev/null
20 | rm -rf __MACOSX/
21 | mv /tmp/VMwareTools-10.3.23-16594550.tar.gz /opt/elrond/elrond/tools/config/VMwareTools-10.3.23-16594550.tar.gz
22 | else
23 | printf "\n 'elrond-archive.zip' did not download successfully.\n Exiting. Please run 'sudo /opt/elrond/./make.sh' again."
24 | exit
25 | fi
26 |
27 | #/opt/elrond/elrond/tools/
28 | unzip tools.zip 1> /dev/null
29 | sudo mv avml /opt/elrond/elrond/tools/
30 | sudo mv avml.zip /opt/elrond/elrond/tools/
31 | sudo mv osxpmem.app.zip /opt/elrond/elrond/tools/
32 | sudo mv volatility.zip /opt/elrond/elrond/tools/
33 | sudo mv volatility3.zip /opt/elrond/elrond/tools/
34 |
35 | #/opt/elrond/elrond/rivendell/memory/
36 | unzip volatility-sym-prof-plug.zip 1> /dev/null
37 | sudo mv volatility /opt/elrond/elrond/rivendell/memory/
38 | sudo mv volatility3 /opt/elrond/elrond/rivendell/memory/
39 |
40 | #/opt/elrond/elrond/rivendell/post/splunk/
41 | unzip apps.zip 1> /dev/null
42 | sudo mkdir /opt/elrond/elrond/rivendell/post/splunk/apps/
43 | sudo mv /tmp/apps/*.py /opt/elrond/elrond/rivendell/post/splunk/apps/
44 | sudo rm -rf /tmp/*.zip
45 | cd ~
--------------------------------------------------------------------------------
/elrond/tools/config/scripts/tools.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | USERPROFILE=$(cat /etc/passwd | grep 1000 | cut -d ":" -f 1)
4 | HOSTNAME=$(hostname)
5 |
6 | # purging unwanted software
7 | sudo apt-get remove thunderbird rhythmbox yelp libreoffice* kdeconnect aisleriot gnome-mines gnome-sudoku gnome-mahjongg cheese ghex simple-scan wxhexeditor scite -y --auto-remove --purge
8 | sudo apt-get purge nodejs -y --auto-remove --purge
9 | sudo apt-get autoremove -y --auto-remove --purge
10 |
11 | # downloading additional repo files
12 | sudo add-apt-repository -y ppa:linuxgndu/sqlitebrowser > /dev/null 2>&1 # db browser for sqlite
13 | yes '' | sudo add-apt-repository ppa:deadsnakes/ppa # INDXRipper
14 | echo 'deb http://download.opensuse.org/repositories/home:/RizinOrg/xUbuntu_22.04/ /' | sudo tee /etc/apt/sources.list.d/home:RizinOrg.list # cutter-re
15 | curl -fsSL https://download.opensuse.org/repositories/home:RizinOrg/xUbuntu_22.04/Release.key | gpg --dearmor | sudo tee /etc/apt/trusted.gpg.d/home_RizinOrg.gpg > /dev/null # cutter-re
16 |
17 | # elastic 7.x
18 | wget -qO - https://artifacts.elastic.co/GPG-KEY-elasticsearch | sudo apt-key add -
19 | echo "deb https://artifacts.elastic.co/packages/7.x/apt stable main" | sudo tee /etc/apt/sources.list.d/elastic-7.x.list
20 |
21 | # elastic 8.x
22 | #echo "deb https://artifacts.elastic.co/packages/8.x/apt stable main" | sudo tee /etc/apt/sources.list.d/elastic-8.x.list
23 | #sudo /usr/share/elasticsearch/bin/elasticsearch-create-enrollment-token --scope kibana
24 | #sudo /usr/share/kibana/bin/kibana-verification-code
25 | #sudo /usr/share/elasticsearch/bin/elasticsearch-reset-password -u elastic
26 |
27 | # attack-navigator repos
28 | curl -fsSL https://deb.nodesource.com/setup_16.x | sudo -E bash -
29 | curl -sL https://dl.yarnpkg.com/debian/pubkey.gpg | gpg --dearmor | sudo tee /usr/share/keyrings/yarnkey.gpg >/dev/null
30 | echo "deb [signed-by=/usr/share/keyrings/yarnkey.gpg] https://dl.yarnpkg.com/debian stable main" | sudo tee /etc/apt/sources.list.d/yarn.list
31 | echo 'deb http://download.opensuse.org/repositories/home:/RizinOrg/xUbuntu_22.04/ /' | sudo tee /etc/apt/sources.list.d/home:RizinOrg.list
32 | curl -fsSL https://download.opensuse.org/repositories/home:RizinOrg/xUbuntu_22.04/Release.key | gpg --dearmor | sudo tee /etc/apt/trusted.gpg.d/home_RizinOrg.gpg > /dev/null
33 |
34 | /opt/elrond/elrond/tools/config/scripts/./repo.sh
35 |
36 | sudo wget https://www.netresec.com/?download=NetworkMiner -O /tmp/networkminer.zip
37 | sudo wget -O /tmp/Maltego.v4.7.0.deb https://downloads.maltego.com/maltego-v4/linux/Maltego.v4.7.0.deb
38 | sudo wget -O /opt/elrond/elrond/tools/.splunk.deb "https://download.splunk.com/products/splunk/releases/9.0.5/linux/splunk-9.0.5-e9494146ae5c-linux-2.6-amd64.deb"
39 |
40 | # installing additional features for elrond
41 | sudo apt update --allow-insecure-repositories
42 | sudo apt-get update --allow-insecure-repositories
43 | sudo apt install libewf-dev ewf-tools mlocate net-tools build-essential libreadline-dev libncursesw5-dev libssl-dev libc6-dev libffi-dev zlib1g-dev apt-transport-https software-properties-common systemd gnupg xz-utils sqlite3 mdbtools yara clamav clamav-daemon john gparted dos2unix sqlitebrowser python3-apt wireshark libguestfs-tools mono-devel openjdk-17-jdk openjdk-17-jre curl jq elasticsearch kibana python3.9 python3.9-venv bless flameshot cutter-re vim nodejs yarn -y --fix-missing --allow-unauthenticated
44 | sudo apt-get install qemu -y --fix-missing --allow-unauthenticated
45 | sudo apt-get install qemu-system -y --fix-missing --allow-unauthenticated
46 | sudo apt-get install checkinstall libgdbm-dev libreadline-dev libnss3-dev libsqlite3-dev tk-dev liblzma-dev -y --fix-missing --allow-unauthenticated
47 | sudo apt install clamav clamav-daemon -y --fix-missing --allow-unauthenticated
48 |
49 | # installing additional software via snap
50 | sudo snap install sqlitebrowser
51 | sudo snap install cyberchef
52 | # installing maltego
53 | sudo dpkg -i /tmp/Maltego.v4.7.0.deb
54 |
55 | # installing network-miner
56 | sudo unzip /tmp/networkminer.zip -d /opt/
57 | sudo chmod +x /opt/NetworkMiner_*/NetworkMiner.exe
58 | sudo chmod -R go+w /opt/NetworkMiner_*/AssembledFiles/
59 | sudo chmod -R go+w /opt/NetworkMiner_*/Captures/
60 |
61 | # initialising clamav
62 | sudo apt update --allow-insecure-repositories
63 | sudo systemctl stop clamav-freshclam
64 | sudo freshclam
65 | sudo systemctl start clamav-freshclam
66 | sudo systemctl stop clamav-freshclam
67 | sudo freshclam
68 | sudo systemctl start clamav-freshclam
69 |
70 | # installing additional github tools
71 | sudo rm -rf /opt/TZWorks
72 | sudo rm -rf /opt/BlueTeamPowerShell
73 | sudo rm -rf /opt/Sysmon/SysmonForLinux
74 | sudo rm -rf /home/$USERPROFILE/Desktop/CobaltStrike-Defence/content
75 | sudo git clone https://github.com/MichaelKoczwara/Awesome-CobaltStrike-Defence /home/$USERPROFILE/Desktop/CobaltStrike-Defence
76 | sudo mkdir -p /home/$USERPROFILE/Desktop/CobaltStrike-Defence/content
77 | sudo mkdir -p /opt/TZWorks /opt/BlueTeamPowerShell /opt/Sysmon/SysmonForLinux
78 |
79 | sudo rm -rf /opt/USN-Journal-Parser
80 | sudo git clone https://github.com/PoorBillionaire/USN-Journal-Parser.git /opt/USN-Journal-Parser
81 | sudo rm -rf /opt/KStrike
82 | sudo git clone https://github.com/cyberg3cko/KStrike.git /opt/KStrike
83 | sudo rm -rf /opt/plaso
84 | sudo git clone https://github.com/log2timeline/plaso.git /opt/plaso
85 | sudo rm -rf /opt/etl-parser
86 | sudo git clone https://github.com/cyberg3cko/etl-parser /opt/etl-parser
87 | sudo rm -rf /opt/bruce
88 | sudo git clone https://github.com/cyberg3cko/bruce.git /opt/bruce
89 | sudo rm -rf /opt/bookstack
90 | sudo git clone https://github.com/BookStackApp/BookStack.git /opt/bookstack
91 | sudo rm -rf /opt/gandalf
92 | sudo git clone https://github.com/cyberg3cko/gandalf.git /opt/gandalf
93 | sudo rm -rf /opt/sigma
94 | sudo git clone https://github.com/SigmaHQ/sigma.git /opt/sigma
95 | sudo rm -rf /opt/DeepBlueCLI
96 | sudo git clone https://github.com/sans-blue-team/DeepBlueCLI.git /opt/DeepBlueCLI
97 | sudo rm -rf /opt/KAPE
98 | sudo git clone https://github.com/EricZimmerman/KapeFiles.git /opt/KAPE
99 | sudo rm -rf /opt/MemProcFS
100 | sudo git clone https://github.com/ufrisk/MemProcFS.git /opt/MemProcFS
101 | sudo rm -rf /opt/WMIExplorer
102 | sudo git clone https://github.com/vinaypamnani/wmie2/ /opt/WMIExplorer
103 | sudo rm -rf /opt/WMI_Forensics
104 | sudo git clone https://github.com/cyberg3cko/WMI_Forensics /opt/WMI_Forensics
105 | sudo rm -rf /opt/PowerForensics
106 | sudo git clone https://github.com/Invoke-IR/PowerForensics.git /opt/PowerForensics
107 | sudo rm -rf /opt/freq
108 | sudo git clone https://github.com/MarkBaggett/freq.git /opt/freq
109 | sudo rm -rf /opt/dnstwist
110 | sudo git clone https://github.com/elceef/dnstwist.git /opt/dnstwist
111 | sudo rm -rf /opt/rdap
112 | sudo git clone https://github.com/cyberg3cko/rdap.git /opt/rdap
113 | sudo rm -rf /opt/sherlock
114 | sudo git clone https://github.com/sherlock-project/sherlock.git /opt/sherlock
115 | sudo rm -rf /opt/TweetScraper
116 | sudo git clone https://github.com/jonbakerfish/TweetScraper.git /opt/TweetScraper
117 | sudo rm -rf /opt/karma
118 | sudo git clone https://github.com/Dheerajmadhukar/karma_v2.git /opt/karma
119 | sudo rm -rf /opt/Sysmon/SysmonForLinux
120 | sudo git clone https://github.com/Sysinternals/SysmonForLinux.git /opt/Sysmon/SysmonForLinux
121 | sudo rm -rf /opt/httrack
122 | sudo git clone https://github.com/xroche/httrack.git --recurse /opt/httrack
123 | sudo rm -rf /opt/attack-navigator
124 | sudo git clone https://github.com/mitre-attack/attack-navigator.git /opt/attack-navigator
125 |
126 | # downloading indx-parser
127 | /opt/elrond/elrond/tools/config/scripts/./indx.sh
--------------------------------------------------------------------------------
/elrond/tools/config/scripts/update.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | USERPROFILE=$(cat /etc/passwd | grep 1000 | cut -d ":" -f 1)
4 | HOSTNAME=$(hostname)
5 |
6 | # removing old versions
7 | sudo rm -rf /opt/elrond/elrond/tools/avml*
8 | sudo rm -rf /opt/elrond/elrond/tools/osxpmem.app.zip
9 | sudo rm -rf /opt/elrond/elrond/tools/volatility*
10 | sudo mv /tmp/apps/*.py /opt/elrond/elrond/rivendell/post/splunk/apps/
11 | clear
12 |
13 | # downloading latest version
14 | sudo git clone https://github.com/cyberg3cko/elrond.git
15 |
16 | # downloading latest version of additional repos
17 | /opt/elrond/elrond/tools/config/scripts/./repo.sh
18 | /opt/elrond/elrond/tools/config/scripts/./tools.sh
19 | /opt/elrond/elrond/tools/config/scripts/./volatility3.sh
20 | /opt/elrond/elrond/tools/config/scripts/./finish.sh
21 |
22 | # finalising update
23 | sudo chmod 744 -R /opt/elrond/
24 | sudo chown "$USERPROFILE":"$USERPROFILE" -R /opt/elrond/
25 | sudo chmod +x /opt/elrond/elrond/elrond.py
26 | echo "export PATH=$PATH:/opt/elrond/elrond" >> /home/$USERPROFILE/.bashrc
27 | printf "\n\n -> '"$(hostname)"' has been successfully updated; a reboot is advised. Press ENTER to continue..."
28 | read answer
29 | sleep 1
30 | sudo updatedb
31 | echo '' | sudo tee ~/.bash_history
32 | history -c
33 | sleep 1
34 | clear
--------------------------------------------------------------------------------
/elrond/tools/config/scripts/volatility3.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | USERPROFILE=$(cat /etc/passwd | grep 1000 | cut -d ":" -f 1)
4 | HOSTNAME=$(hostname)
5 |
6 | # dwarf2json
7 | sleep 1
8 | clear
9 | printf "\n -> Installing & configuring dwarf2json...\n\n"
10 | wget https://golang.org/dl/go1.15.10.linux-amd64.tar.gz
11 | sudo tar -C /usr/local -xzf go1.15.10.linux-amd64.tar.gz
12 | sudo rm -rf go1.15.10.linux-amd64.tar.gz
13 | export PATH=$PATH:/usr/local/go/bin
14 | cd /opt/elrond/elrond/tools/
15 | sudo git clone https://github.com/cyberg3cko/dwarf2json.git
16 | cd /opt/elrond/elrond
17 | sleep 1
18 |
19 | # volatility3
20 | printf "\n -> Installing & configuring volatility3 and dependancies...\n\n"
21 | # sudo apt install build-essential libreadline-dev libncursesw5-dev libssl-dev libc6-dev libffi-dev zlib1g-dev -y --fix-missing --allow-unauthenticated
22 | # sudo apt-get install checkinstall libgdbm-dev libreadline-dev libnss3-dev libsqlite3-dev tk-dev liblzma-dev -y -y --fix-missing --allow-unauthenticated
23 | sudo git clone https://github.com/volatilityfoundation/volatility3.git /usr/local/lib/python3.8/dist-packages/volatility3
24 | sudo mkdir -p /usr/local/lib/python3.8/dist-packages/volatility3/volatility3/symbols/windows/ntkrnlmp.pdb/
25 | sudo mkdir -p /usr/local/lib/python3.8/dist-packages/volatility3/volatility3/symbols/windows/tcpip.pdb/
26 | sudo git clone https://github.com/JPCERTCC/Windows-Symbol-Tables.git
27 | sudo mv Windows-Symbol-Tables/symbols/windows/*.pdb/ /usr/local/lib/python3.8/dist-packages/volatility3/volatility3/symbols/windows/
28 | sudo rm -rf Windows-Symbol-Tables
29 | sudo chmod -R 755 /usr/local/lib/python3.8/dist-packages/volatility3/
30 | sudo chown -R root:root /usr/local/lib/python3.8/dist-packages/volatility3/
31 | sleep 1
--------------------------------------------------------------------------------
/elrond/tools/srum_dump/SRUM_TEMPLATE3.xlsx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cyberg3cko/elrond/5cfbed2461433cbcd91b35d0d36659cf32d59c6f/elrond/tools/srum_dump/SRUM_TEMPLATE3.xlsx
--------------------------------------------------------------------------------
/make.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | USERPROFILE=$(cat /etc/passwd | grep 1000 | cut -d ":" -f 1)
4 | HOSTNAME=$(hostname)
5 |
6 | sudo chmod -R 744 /opt/elrond/
7 | sudo chown -R "$USERPROFILE":"$USERPROFILE" /opt/elrond/
8 | sudo chmod +x /opt/elrond/elrond/config.sh
9 | sudo chmod +x /opt/elrond/elrond/elrond.py
10 | sudo apt install curl -y
11 | sudo /opt/elrond/elrond/./config.sh
--------------------------------------------------------------------------------
/update.sh:
--------------------------------------------------------------------------------
1 | cp /opt/elrond/elrond/tools/config/scripts/update.sh /tmp/
2 | cd /tmp/
3 | ./update.sh
4 | rm /tmp/update.sh
--------------------------------------------------------------------------------