├── .gitignore
├── AUTHORS
├── LICENSE
├── README.md
├── derelocator.py
├── inter_modex.py
├── modex.py
└── tests.py
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | pip-wheel-metadata/
24 | share/python-wheels/
25 | *.egg-info/
26 | .installed.cfg
27 | *.egg
28 | MANIFEST
29 |
30 | # PyInstaller
31 | # Usually these files are written by a python script from a template
32 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
33 | *.manifest
34 | *.spec
35 |
36 | # Installer logs
37 | pip-log.txt
38 | pip-delete-this-directory.txt
39 |
40 | # Unit test / coverage reports
41 | htmlcov/
42 | .tox/
43 | .nox/
44 | .coverage
45 | .coverage.*
46 | .cache
47 | nosetests.xml
48 | coverage.xml
49 | *.cover
50 | *.py,cover
51 | .hypothesis/
52 | .pytest_cache/
53 |
54 | # Translations
55 | *.mo
56 | *.pot
57 |
58 | # Django stuff:
59 | *.log
60 | local_settings.py
61 | db.sqlite3
62 | db.sqlite3-journal
63 |
64 | # Flask stuff:
65 | instance/
66 | .webassets-cache
67 |
68 | # Scrapy stuff:
69 | .scrapy
70 |
71 | # Sphinx documentation
72 | docs/_build/
73 |
74 | # PyBuilder
75 | target/
76 |
77 | # Jupyter Notebook
78 | .ipynb_checkpoints
79 |
80 | # IPython
81 | profile_default/
82 | ipython_config.py
83 |
84 | # pyenv
85 | .python-version
86 |
87 | # pipenv
88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
91 | # install all needed dependencies.
92 | #Pipfile.lock
93 |
94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
95 | __pypackages__/
96 |
97 | # Celery stuff
98 | celerybeat-schedule
99 | celerybeat.pid
100 |
101 | # SageMath parsed files
102 | *.sage.py
103 |
104 | # Environments
105 | .env
106 | .venv
107 | env/
108 | venv/
109 | ENV/
110 | env.bak/
111 | venv.bak/
112 |
113 | # Spyder project settings
114 | .spyderproject
115 | .spyproject
116 |
117 | # Rope project settings
118 | .ropeproject
119 |
120 | # mkdocs documentation
121 | /site
122 |
123 | # mypy
124 | .mypy_cache/
125 | .dmypy.json
126 | dmypy.json
127 |
128 | # Pyre type checker
129 | .pyre/
130 |
131 | # Added by myself
132 | .idea/
133 | .DS_Store
134 | tests_log_*.txt
135 | inter_modex_output_*
--------------------------------------------------------------------------------
/AUTHORS:
--------------------------------------------------------------------------------
1 | modex was created in 2022 at RME, a part of the DisCo research group from University of Zaragoza focused on software and systems security.
2 |
3 | Here is a list of contributors in alphabetical order (by surname):
4 | Pedro Fernández Álvarez (pedrofdez26)
5 |
6 | (c) RME-DisCo Research Group, University of Zaragoza (Spain)
7 | Visit our website to know more about us! https://www.reversea.me
8 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | GNU GENERAL PUBLIC LICENSE
2 | Version 3, 29 June 2007
3 |
4 | Copyright (C) 2007 Free Software Foundation, Inc.
5 | Everyone is permitted to copy and distribute verbatim copies
6 | of this license document, but changing it is not allowed.
7 |
8 | Preamble
9 |
10 | The GNU General Public License is a free, copyleft license for
11 | software and other kinds of works.
12 |
13 | The licenses for most software and other practical works are designed
14 | to take away your freedom to share and change the works. By contrast,
15 | the GNU General Public License is intended to guarantee your freedom to
16 | share and change all versions of a program--to make sure it remains free
17 | software for all its users. We, the Free Software Foundation, use the
18 | GNU General Public License for most of our software; it applies also to
19 | any other work released this way by its authors. You can apply it to
20 | your programs, too.
21 |
22 | When we speak of free software, we are referring to freedom, not
23 | price. Our General Public Licenses are designed to make sure that you
24 | have the freedom to distribute copies of free software (and charge for
25 | them if you wish), that you receive source code or can get it if you
26 | want it, that you can change the software or use pieces of it in new
27 | free programs, and that you know you can do these things.
28 |
29 | To protect your rights, we need to prevent others from denying you
30 | these rights or asking you to surrender the rights. Therefore, you have
31 | certain responsibilities if you distribute copies of the software, or if
32 | you modify it: responsibilities to respect the freedom of others.
33 |
34 | For example, if you distribute copies of such a program, whether
35 | gratis or for a fee, you must pass on to the recipients the same
36 | freedoms that you received. You must make sure that they, too, receive
37 | or can get the source code. And you must show them these terms so they
38 | know their rights.
39 |
40 | Developers that use the GNU GPL protect your rights with two steps:
41 | (1) assert copyright on the software, and (2) offer you this License
42 | giving you legal permission to copy, distribute and/or modify it.
43 |
44 | For the developers' and authors' protection, the GPL clearly explains
45 | that there is no warranty for this free software. For both users' and
46 | authors' sake, the GPL requires that modified versions be marked as
47 | changed, so that their problems will not be attributed erroneously to
48 | authors of previous versions.
49 |
50 | Some devices are designed to deny users access to install or run
51 | modified versions of the software inside them, although the manufacturer
52 | can do so. This is fundamentally incompatible with the aim of
53 | protecting users' freedom to change the software. The systematic
54 | pattern of such abuse occurs in the area of products for individuals to
55 | use, which is precisely where it is most unacceptable. Therefore, we
56 | have designed this version of the GPL to prohibit the practice for those
57 | products. If such problems arise substantially in other domains, we
58 | stand ready to extend this provision to those domains in future versions
59 | of the GPL, as needed to protect the freedom of users.
60 |
61 | Finally, every program is threatened constantly by software patents.
62 | States should not allow patents to restrict development and use of
63 | software on general-purpose computers, but in those that do, we wish to
64 | avoid the special danger that patents applied to a free program could
65 | make it effectively proprietary. To prevent this, the GPL assures that
66 | patents cannot be used to render the program non-free.
67 |
68 | The precise terms and conditions for copying, distribution and
69 | modification follow.
70 |
71 | TERMS AND CONDITIONS
72 |
73 | 0. Definitions.
74 |
75 | "This License" refers to version 3 of the GNU General Public License.
76 |
77 | "Copyright" also means copyright-like laws that apply to other kinds of
78 | works, such as semiconductor masks.
79 |
80 | "The Program" refers to any copyrightable work licensed under this
81 | License. Each licensee is addressed as "you". "Licensees" and
82 | "recipients" may be individuals or organizations.
83 |
84 | To "modify" a work means to copy from or adapt all or part of the work
85 | in a fashion requiring copyright permission, other than the making of an
86 | exact copy. The resulting work is called a "modified version" of the
87 | earlier work or a work "based on" the earlier work.
88 |
89 | A "covered work" means either the unmodified Program or a work based
90 | on the Program.
91 |
92 | To "propagate" a work means to do anything with it that, without
93 | permission, would make you directly or secondarily liable for
94 | infringement under applicable copyright law, except executing it on a
95 | computer or modifying a private copy. Propagation includes copying,
96 | distribution (with or without modification), making available to the
97 | public, and in some countries other activities as well.
98 |
99 | To "convey" a work means any kind of propagation that enables other
100 | parties to make or receive copies. Mere interaction with a user through
101 | a computer network, with no transfer of a copy, is not conveying.
102 |
103 | An interactive user interface displays "Appropriate Legal Notices"
104 | to the extent that it includes a convenient and prominently visible
105 | feature that (1) displays an appropriate copyright notice, and (2)
106 | tells the user that there is no warranty for the work (except to the
107 | extent that warranties are provided), that licensees may convey the
108 | work under this License, and how to view a copy of this License. If
109 | the interface presents a list of user commands or options, such as a
110 | menu, a prominent item in the list meets this criterion.
111 |
112 | 1. Source Code.
113 |
114 | The "source code" for a work means the preferred form of the work
115 | for making modifications to it. "Object code" means any non-source
116 | form of a work.
117 |
118 | A "Standard Interface" means an interface that either is an official
119 | standard defined by a recognized standards body, or, in the case of
120 | interfaces specified for a particular programming language, one that
121 | is widely used among developers working in that language.
122 |
123 | The "System Libraries" of an executable work include anything, other
124 | than the work as a whole, that (a) is included in the normal form of
125 | packaging a Major Component, but which is not part of that Major
126 | Component, and (b) serves only to enable use of the work with that
127 | Major Component, or to implement a Standard Interface for which an
128 | implementation is available to the public in source code form. A
129 | "Major Component", in this context, means a major essential component
130 | (kernel, window system, and so on) of the specific operating system
131 | (if any) on which the executable work runs, or a compiler used to
132 | produce the work, or an object code interpreter used to run it.
133 |
134 | The "Corresponding Source" for a work in object code form means all
135 | the source code needed to generate, install, and (for an executable
136 | work) run the object code and to modify the work, including scripts to
137 | control those activities. However, it does not include the work's
138 | System Libraries, or general-purpose tools or generally available free
139 | programs which are used unmodified in performing those activities but
140 | which are not part of the work. For example, Corresponding Source
141 | includes interface definition files associated with source files for
142 | the work, and the source code for shared libraries and dynamically
143 | linked subprograms that the work is specifically designed to require,
144 | such as by intimate data communication or control flow between those
145 | subprograms and other parts of the work.
146 |
147 | The Corresponding Source need not include anything that users
148 | can regenerate automatically from other parts of the Corresponding
149 | Source.
150 |
151 | The Corresponding Source for a work in source code form is that
152 | same work.
153 |
154 | 2. Basic Permissions.
155 |
156 | All rights granted under this License are granted for the term of
157 | copyright on the Program, and are irrevocable provided the stated
158 | conditions are met. This License explicitly affirms your unlimited
159 | permission to run the unmodified Program. The output from running a
160 | covered work is covered by this License only if the output, given its
161 | content, constitutes a covered work. This License acknowledges your
162 | rights of fair use or other equivalent, as provided by copyright law.
163 |
164 | You may make, run and propagate covered works that you do not
165 | convey, without conditions so long as your license otherwise remains
166 | in force. You may convey covered works to others for the sole purpose
167 | of having them make modifications exclusively for you, or provide you
168 | with facilities for running those works, provided that you comply with
169 | the terms of this License in conveying all material for which you do
170 | not control copyright. Those thus making or running the covered works
171 | for you must do so exclusively on your behalf, under your direction
172 | and control, on terms that prohibit them from making any copies of
173 | your copyrighted material outside their relationship with you.
174 |
175 | Conveying under any other circumstances is permitted solely under
176 | the conditions stated below. Sublicensing is not allowed; section 10
177 | makes it unnecessary.
178 |
179 | 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
180 |
181 | No covered work shall be deemed part of an effective technological
182 | measure under any applicable law fulfilling obligations under article
183 | 11 of the WIPO copyright treaty adopted on 20 December 1996, or
184 | similar laws prohibiting or restricting circumvention of such
185 | measures.
186 |
187 | When you convey a covered work, you waive any legal power to forbid
188 | circumvention of technological measures to the extent such circumvention
189 | is effected by exercising rights under this License with respect to
190 | the covered work, and you disclaim any intention to limit operation or
191 | modification of the work as a means of enforcing, against the work's
192 | users, your or third parties' legal rights to forbid circumvention of
193 | technological measures.
194 |
195 | 4. Conveying Verbatim Copies.
196 |
197 | You may convey verbatim copies of the Program's source code as you
198 | receive it, in any medium, provided that you conspicuously and
199 | appropriately publish on each copy an appropriate copyright notice;
200 | keep intact all notices stating that this License and any
201 | non-permissive terms added in accord with section 7 apply to the code;
202 | keep intact all notices of the absence of any warranty; and give all
203 | recipients a copy of this License along with the Program.
204 |
205 | You may charge any price or no price for each copy that you convey,
206 | and you may offer support or warranty protection for a fee.
207 |
208 | 5. Conveying Modified Source Versions.
209 |
210 | You may convey a work based on the Program, or the modifications to
211 | produce it from the Program, in the form of source code under the
212 | terms of section 4, provided that you also meet all of these conditions:
213 |
214 | a) The work must carry prominent notices stating that you modified
215 | it, and giving a relevant date.
216 |
217 | b) The work must carry prominent notices stating that it is
218 | released under this License and any conditions added under section
219 | 7. This requirement modifies the requirement in section 4 to
220 | "keep intact all notices".
221 |
222 | c) You must license the entire work, as a whole, under this
223 | License to anyone who comes into possession of a copy. This
224 | License will therefore apply, along with any applicable section 7
225 | additional terms, to the whole of the work, and all its parts,
226 | regardless of how they are packaged. This License gives no
227 | permission to license the work in any other way, but it does not
228 | invalidate such permission if you have separately received it.
229 |
230 | d) If the work has interactive user interfaces, each must display
231 | Appropriate Legal Notices; however, if the Program has interactive
232 | interfaces that do not display Appropriate Legal Notices, your
233 | work need not make them do so.
234 |
235 | A compilation of a covered work with other separate and independent
236 | works, which are not by their nature extensions of the covered work,
237 | and which are not combined with it such as to form a larger program,
238 | in or on a volume of a storage or distribution medium, is called an
239 | "aggregate" if the compilation and its resulting copyright are not
240 | used to limit the access or legal rights of the compilation's users
241 | beyond what the individual works permit. Inclusion of a covered work
242 | in an aggregate does not cause this License to apply to the other
243 | parts of the aggregate.
244 |
245 | 6. Conveying Non-Source Forms.
246 |
247 | You may convey a covered work in object code form under the terms
248 | of sections 4 and 5, provided that you also convey the
249 | machine-readable Corresponding Source under the terms of this License,
250 | in one of these ways:
251 |
252 | a) Convey the object code in, or embodied in, a physical product
253 | (including a physical distribution medium), accompanied by the
254 | Corresponding Source fixed on a durable physical medium
255 | customarily used for software interchange.
256 |
257 | b) Convey the object code in, or embodied in, a physical product
258 | (including a physical distribution medium), accompanied by a
259 | written offer, valid for at least three years and valid for as
260 | long as you offer spare parts or customer support for that product
261 | model, to give anyone who possesses the object code either (1) a
262 | copy of the Corresponding Source for all the software in the
263 | product that is covered by this License, on a durable physical
264 | medium customarily used for software interchange, for a price no
265 | more than your reasonable cost of physically performing this
266 | conveying of source, or (2) access to copy the
267 | Corresponding Source from a network server at no charge.
268 |
269 | c) Convey individual copies of the object code with a copy of the
270 | written offer to provide the Corresponding Source. This
271 | alternative is allowed only occasionally and noncommercially, and
272 | only if you received the object code with such an offer, in accord
273 | with subsection 6b.
274 |
275 | d) Convey the object code by offering access from a designated
276 | place (gratis or for a charge), and offer equivalent access to the
277 | Corresponding Source in the same way through the same place at no
278 | further charge. You need not require recipients to copy the
279 | Corresponding Source along with the object code. If the place to
280 | copy the object code is a network server, the Corresponding Source
281 | may be on a different server (operated by you or a third party)
282 | that supports equivalent copying facilities, provided you maintain
283 | clear directions next to the object code saying where to find the
284 | Corresponding Source. Regardless of what server hosts the
285 | Corresponding Source, you remain obligated to ensure that it is
286 | available for as long as needed to satisfy these requirements.
287 |
288 | e) Convey the object code using peer-to-peer transmission, provided
289 | you inform other peers where the object code and Corresponding
290 | Source of the work are being offered to the general public at no
291 | charge under subsection 6d.
292 |
293 | A separable portion of the object code, whose source code is excluded
294 | from the Corresponding Source as a System Library, need not be
295 | included in conveying the object code work.
296 |
297 | A "User Product" is either (1) a "consumer product", which means any
298 | tangible personal property which is normally used for personal, family,
299 | or household purposes, or (2) anything designed or sold for incorporation
300 | into a dwelling. In determining whether a product is a consumer product,
301 | doubtful cases shall be resolved in favor of coverage. For a particular
302 | product received by a particular user, "normally used" refers to a
303 | typical or common use of that class of product, regardless of the status
304 | of the particular user or of the way in which the particular user
305 | actually uses, or expects or is expected to use, the product. A product
306 | is a consumer product regardless of whether the product has substantial
307 | commercial, industrial or non-consumer uses, unless such uses represent
308 | the only significant mode of use of the product.
309 |
310 | "Installation Information" for a User Product means any methods,
311 | procedures, authorization keys, or other information required to install
312 | and execute modified versions of a covered work in that User Product from
313 | a modified version of its Corresponding Source. The information must
314 | suffice to ensure that the continued functioning of the modified object
315 | code is in no case prevented or interfered with solely because
316 | modification has been made.
317 |
318 | If you convey an object code work under this section in, or with, or
319 | specifically for use in, a User Product, and the conveying occurs as
320 | part of a transaction in which the right of possession and use of the
321 | User Product is transferred to the recipient in perpetuity or for a
322 | fixed term (regardless of how the transaction is characterized), the
323 | Corresponding Source conveyed under this section must be accompanied
324 | by the Installation Information. But this requirement does not apply
325 | if neither you nor any third party retains the ability to install
326 | modified object code on the User Product (for example, the work has
327 | been installed in ROM).
328 |
329 | The requirement to provide Installation Information does not include a
330 | requirement to continue to provide support service, warranty, or updates
331 | for a work that has been modified or installed by the recipient, or for
332 | the User Product in which it has been modified or installed. Access to a
333 | network may be denied when the modification itself materially and
334 | adversely affects the operation of the network or violates the rules and
335 | protocols for communication across the network.
336 |
337 | Corresponding Source conveyed, and Installation Information provided,
338 | in accord with this section must be in a format that is publicly
339 | documented (and with an implementation available to the public in
340 | source code form), and must require no special password or key for
341 | unpacking, reading or copying.
342 |
343 | 7. Additional Terms.
344 |
345 | "Additional permissions" are terms that supplement the terms of this
346 | License by making exceptions from one or more of its conditions.
347 | Additional permissions that are applicable to the entire Program shall
348 | be treated as though they were included in this License, to the extent
349 | that they are valid under applicable law. If additional permissions
350 | apply only to part of the Program, that part may be used separately
351 | under those permissions, but the entire Program remains governed by
352 | this License without regard to the additional permissions.
353 |
354 | When you convey a copy of a covered work, you may at your option
355 | remove any additional permissions from that copy, or from any part of
356 | it. (Additional permissions may be written to require their own
357 | removal in certain cases when you modify the work.) You may place
358 | additional permissions on material, added by you to a covered work,
359 | for which you have or can give appropriate copyright permission.
360 |
361 | Notwithstanding any other provision of this License, for material you
362 | add to a covered work, you may (if authorized by the copyright holders of
363 | that material) supplement the terms of this License with terms:
364 |
365 | a) Disclaiming warranty or limiting liability differently from the
366 | terms of sections 15 and 16 of this License; or
367 |
368 | b) Requiring preservation of specified reasonable legal notices or
369 | author attributions in that material or in the Appropriate Legal
370 | Notices displayed by works containing it; or
371 |
372 | c) Prohibiting misrepresentation of the origin of that material, or
373 | requiring that modified versions of such material be marked in
374 | reasonable ways as different from the original version; or
375 |
376 | d) Limiting the use for publicity purposes of names of licensors or
377 | authors of the material; or
378 |
379 | e) Declining to grant rights under trademark law for use of some
380 | trade names, trademarks, or service marks; or
381 |
382 | f) Requiring indemnification of licensors and authors of that
383 | material by anyone who conveys the material (or modified versions of
384 | it) with contractual assumptions of liability to the recipient, for
385 | any liability that these contractual assumptions directly impose on
386 | those licensors and authors.
387 |
388 | All other non-permissive additional terms are considered "further
389 | restrictions" within the meaning of section 10. If the Program as you
390 | received it, or any part of it, contains a notice stating that it is
391 | governed by this License along with a term that is a further
392 | restriction, you may remove that term. If a license document contains
393 | a further restriction but permits relicensing or conveying under this
394 | License, you may add to a covered work material governed by the terms
395 | of that license document, provided that the further restriction does
396 | not survive such relicensing or conveying.
397 |
398 | If you add terms to a covered work in accord with this section, you
399 | must place, in the relevant source files, a statement of the
400 | additional terms that apply to those files, or a notice indicating
401 | where to find the applicable terms.
402 |
403 | Additional terms, permissive or non-permissive, may be stated in the
404 | form of a separately written license, or stated as exceptions;
405 | the above requirements apply either way.
406 |
407 | 8. Termination.
408 |
409 | You may not propagate or modify a covered work except as expressly
410 | provided under this License. Any attempt otherwise to propagate or
411 | modify it is void, and will automatically terminate your rights under
412 | this License (including any patent licenses granted under the third
413 | paragraph of section 11).
414 |
415 | However, if you cease all violation of this License, then your
416 | license from a particular copyright holder is reinstated (a)
417 | provisionally, unless and until the copyright holder explicitly and
418 | finally terminates your license, and (b) permanently, if the copyright
419 | holder fails to notify you of the violation by some reasonable means
420 | prior to 60 days after the cessation.
421 |
422 | Moreover, your license from a particular copyright holder is
423 | reinstated permanently if the copyright holder notifies you of the
424 | violation by some reasonable means, this is the first time you have
425 | received notice of violation of this License (for any work) from that
426 | copyright holder, and you cure the violation prior to 30 days after
427 | your receipt of the notice.
428 |
429 | Termination of your rights under this section does not terminate the
430 | licenses of parties who have received copies or rights from you under
431 | this License. If your rights have been terminated and not permanently
432 | reinstated, you do not qualify to receive new licenses for the same
433 | material under section 10.
434 |
435 | 9. Acceptance Not Required for Having Copies.
436 |
437 | You are not required to accept this License in order to receive or
438 | run a copy of the Program. Ancillary propagation of a covered work
439 | occurring solely as a consequence of using peer-to-peer transmission
440 | to receive a copy likewise does not require acceptance. However,
441 | nothing other than this License grants you permission to propagate or
442 | modify any covered work. These actions infringe copyright if you do
443 | not accept this License. Therefore, by modifying or propagating a
444 | covered work, you indicate your acceptance of this License to do so.
445 |
446 | 10. Automatic Licensing of Downstream Recipients.
447 |
448 | Each time you convey a covered work, the recipient automatically
449 | receives a license from the original licensors, to run, modify and
450 | propagate that work, subject to this License. You are not responsible
451 | for enforcing compliance by third parties with this License.
452 |
453 | An "entity transaction" is a transaction transferring control of an
454 | organization, or substantially all assets of one, or subdividing an
455 | organization, or merging organizations. If propagation of a covered
456 | work results from an entity transaction, each party to that
457 | transaction who receives a copy of the work also receives whatever
458 | licenses to the work the party's predecessor in interest had or could
459 | give under the previous paragraph, plus a right to possession of the
460 | Corresponding Source of the work from the predecessor in interest, if
461 | the predecessor has it or can get it with reasonable efforts.
462 |
463 | You may not impose any further restrictions on the exercise of the
464 | rights granted or affirmed under this License. For example, you may
465 | not impose a license fee, royalty, or other charge for exercise of
466 | rights granted under this License, and you may not initiate litigation
467 | (including a cross-claim or counterclaim in a lawsuit) alleging that
468 | any patent claim is infringed by making, using, selling, offering for
469 | sale, or importing the Program or any portion of it.
470 |
471 | 11. Patents.
472 |
473 | A "contributor" is a copyright holder who authorizes use under this
474 | License of the Program or a work on which the Program is based. The
475 | work thus licensed is called the contributor's "contributor version".
476 |
477 | A contributor's "essential patent claims" are all patent claims
478 | owned or controlled by the contributor, whether already acquired or
479 | hereafter acquired, that would be infringed by some manner, permitted
480 | by this License, of making, using, or selling its contributor version,
481 | but do not include claims that would be infringed only as a
482 | consequence of further modification of the contributor version. For
483 | purposes of this definition, "control" includes the right to grant
484 | patent sublicenses in a manner consistent with the requirements of
485 | this License.
486 |
487 | Each contributor grants you a non-exclusive, worldwide, royalty-free
488 | patent license under the contributor's essential patent claims, to
489 | make, use, sell, offer for sale, import and otherwise run, modify and
490 | propagate the contents of its contributor version.
491 |
492 | In the following three paragraphs, a "patent license" is any express
493 | agreement or commitment, however denominated, not to enforce a patent
494 | (such as an express permission to practice a patent or covenant not to
495 | sue for patent infringement). To "grant" such a patent license to a
496 | party means to make such an agreement or commitment not to enforce a
497 | patent against the party.
498 |
499 | If you convey a covered work, knowingly relying on a patent license,
500 | and the Corresponding Source of the work is not available for anyone
501 | to copy, free of charge and under the terms of this License, through a
502 | publicly available network server or other readily accessible means,
503 | then you must either (1) cause the Corresponding Source to be so
504 | available, or (2) arrange to deprive yourself of the benefit of the
505 | patent license for this particular work, or (3) arrange, in a manner
506 | consistent with the requirements of this License, to extend the patent
507 | license to downstream recipients. "Knowingly relying" means you have
508 | actual knowledge that, but for the patent license, your conveying the
509 | covered work in a country, or your recipient's use of the covered work
510 | in a country, would infringe one or more identifiable patents in that
511 | country that you have reason to believe are valid.
512 |
513 | If, pursuant to or in connection with a single transaction or
514 | arrangement, you convey, or propagate by procuring conveyance of, a
515 | covered work, and grant a patent license to some of the parties
516 | receiving the covered work authorizing them to use, propagate, modify
517 | or convey a specific copy of the covered work, then the patent license
518 | you grant is automatically extended to all recipients of the covered
519 | work and works based on it.
520 |
521 | A patent license is "discriminatory" if it does not include within
522 | the scope of its coverage, prohibits the exercise of, or is
523 | conditioned on the non-exercise of one or more of the rights that are
524 | specifically granted under this License. You may not convey a covered
525 | work if you are a party to an arrangement with a third party that is
526 | in the business of distributing software, under which you make payment
527 | to the third party based on the extent of your activity of conveying
528 | the work, and under which the third party grants, to any of the
529 | parties who would receive the covered work from you, a discriminatory
530 | patent license (a) in connection with copies of the covered work
531 | conveyed by you (or copies made from those copies), or (b) primarily
532 | for and in connection with specific products or compilations that
533 | contain the covered work, unless you entered into that arrangement,
534 | or that patent license was granted, prior to 28 March 2007.
535 |
536 | Nothing in this License shall be construed as excluding or limiting
537 | any implied license or other defenses to infringement that may
538 | otherwise be available to you under applicable patent law.
539 |
540 | 12. No Surrender of Others' Freedom.
541 |
542 | If conditions are imposed on you (whether by court order, agreement or
543 | otherwise) that contradict the conditions of this License, they do not
544 | excuse you from the conditions of this License. If you cannot convey a
545 | covered work so as to satisfy simultaneously your obligations under this
546 | License and any other pertinent obligations, then as a consequence you may
547 | not convey it at all. For example, if you agree to terms that obligate you
548 | to collect a royalty for further conveying from those to whom you convey
549 | the Program, the only way you could satisfy both those terms and this
550 | License would be to refrain entirely from conveying the Program.
551 |
552 | 13. Use with the GNU Affero General Public License.
553 |
554 | Notwithstanding any other provision of this License, you have
555 | permission to link or combine any covered work with a work licensed
556 | under version 3 of the GNU Affero General Public License into a single
557 | combined work, and to convey the resulting work. The terms of this
558 | License will continue to apply to the part which is the covered work,
559 | but the special requirements of the GNU Affero General Public License,
560 | section 13, concerning interaction through a network will apply to the
561 | combination as such.
562 |
563 | 14. Revised Versions of this License.
564 |
565 | The Free Software Foundation may publish revised and/or new versions of
566 | the GNU General Public License from time to time. Such new versions will
567 | be similar in spirit to the present version, but may differ in detail to
568 | address new problems or concerns.
569 |
570 | Each version is given a distinguishing version number. If the
571 | Program specifies that a certain numbered version of the GNU General
572 | Public License "or any later version" applies to it, you have the
573 | option of following the terms and conditions either of that numbered
574 | version or of any later version published by the Free Software
575 | Foundation. If the Program does not specify a version number of the
576 | GNU General Public License, you may choose any version ever published
577 | by the Free Software Foundation.
578 |
579 | If the Program specifies that a proxy can decide which future
580 | versions of the GNU General Public License can be used, that proxy's
581 | public statement of acceptance of a version permanently authorizes you
582 | to choose that version for the Program.
583 |
584 | Later license versions may give you additional or different
585 | permissions. However, no additional obligations are imposed on any
586 | author or copyright holder as a result of your choosing to follow a
587 | later version.
588 |
589 | 15. Disclaimer of Warranty.
590 |
591 | THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
592 | APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
593 | HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
594 | OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
595 | THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
596 | PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
597 | IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
598 | ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
599 |
600 | 16. Limitation of Liability.
601 |
602 | IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
603 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
604 | THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
605 | GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
606 | USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
607 | DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
608 | PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
609 | EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
610 | SUCH DAMAGES.
611 |
612 | 17. Interpretation of Sections 15 and 16.
613 |
614 | If the disclaimer of warranty and limitation of liability provided
615 | above cannot be given local legal effect according to their terms,
616 | reviewing courts shall apply local law that most closely approximates
617 | an absolute waiver of all civil liability in connection with the
618 | Program, unless a warranty or assumption of liability accompanies a
619 | copy of the Program in return for a fee.
620 |
621 | END OF TERMS AND CONDITIONS
622 |
623 | How to Apply These Terms to Your New Programs
624 |
625 | If you develop a new program, and you want it to be of the greatest
626 | possible use to the public, the best way to achieve this is to make it
627 | free software which everyone can redistribute and change under these terms.
628 |
629 | To do so, attach the following notices to the program. It is safest
630 | to attach them to the start of each source file to most effectively
631 | state the exclusion of warranty; and each file should have at least
632 | the "copyright" line and a pointer to where the full notice is found.
633 |
634 |
635 | Copyright (C)
636 |
637 | This program is free software: you can redistribute it and/or modify
638 | it under the terms of the GNU General Public License as published by
639 | the Free Software Foundation, either version 3 of the License, or
640 | (at your option) any later version.
641 |
642 | This program is distributed in the hope that it will be useful,
643 | but WITHOUT ANY WARRANTY; without even the implied warranty of
644 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
645 | GNU General Public License for more details.
646 |
647 | You should have received a copy of the GNU General Public License
648 | along with this program. If not, see .
649 |
650 | Also add information on how to contact you by electronic and paper mail.
651 |
652 | If the program does terminal interaction, make it output a short
653 | notice like this when it starts in an interactive mode:
654 |
655 | Copyright (C)
656 | This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
657 | This is free software, and you are welcome to redistribute it
658 | under certain conditions; type `show c' for details.
659 |
660 | The hypothetical commands `show w' and `show c' should show the appropriate
661 | parts of the General Public License. Of course, your program's commands
662 | might be different; for a GUI interface, you would use an "about box".
663 |
664 | You should also get your employer (if you work as a programmer) or school,
665 | if any, to sign a "copyright disclaimer" for the program, if necessary.
666 | For more information on this, and how to apply and follow the GNU GPL, see
667 | .
668 |
669 | The GNU General Public License does not permit incorporating your program
670 | into proprietary programs. If your program is a subroutine library, you
671 | may consider it more useful to permit linking proprietary applications with
672 | the library. If this is what you want to do, use the GNU Lesser General
673 | Public License instead of this License. But first, please read
674 | .
675 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Modex
2 |
3 | *Modex* is a *Volatility 3* plugin to extract a module as complete as possible from a Windows 10 memory dump. To do that, the plugin collects all the pages of a particular module that are mapped in each process and then performs a mixture to obtain a single file which contains as many pages as possible.
4 |
5 | [](https://www.gnu.org/licenses/gpl-3.0)
6 |
7 | ## Installation
8 |
9 | In order to run the plugin, [Python 3](https://www.python.org/downloads/) (version 3.9 or greater) and [Volatility 3](https://github.com/volatilityfoundation/volatility3) have to be installed on the machine where *Modex* is going to be used. Also, the following Python packages have to be installed: *py-tlsh*, *tabulate*. Additionally, the *Modex* plugin depends on the *SimplePteEnumerator* plugin (file named *simple_pteenum.py* that can be found [here](https://github.com/f-block/volatility-plugins)). The *SimplePteEnumerator* plugin allows *Modex* to know which pages are mapped in a given range inside the private address space of a process, and to differentiate between private and shared pages.
10 |
11 | After installing *Volatility 3*, the *modex.py* file in this repository and all the Python files present in [here](https://github.com/f-block/volatility-plugins) must be placed in the *volatility3/framework/plugins/windows* directory, which is inside the cloned *Volatility 3* repository.
12 |
13 | ## Usage
14 |
15 | To use the *Modex* plugin, you must provide the module that you want to extract. Below is the command to extract the *kernel32.dll* module as complete as possible from a given memory dump:
16 |
17 | ```bash
18 | python3 vol.py -f MemoryDumpFile windows.modex --module kernel32.dll
19 | ```
20 |
21 | After running a command like the one above, *Modex* will generate a directory containing the following files:
22 |
23 | * *.dmp file*: The module after performing the mixture.
24 | * *.json file*: The metadata about the extracted module.
25 | * *.txt file*: A log file with information about the *Modex* execution.
26 |
27 | Sometimes, *Modex* finds anomalies. For us, an anomaly happens when various shared pages with the same offset have different contents. In such cases, we detect the anomaly and report it. If you want to dump the pages that contain the anomalies, you can supply the *--dump-anomalies* option. For more information, you can run the following command: *python3 vol.py windows.modex -h*.
28 |
29 | **Note:** Right now, in the tests we have performed, *Modex* finishes with an *ImportError* (*ImportError: sys.meta_path is None, Python is likely shutting down*), however, this error does not have an effect on the results. We are working to solve this issue, but it is irrelevant to the final *Modex* output.
30 |
31 | You can also check if the output generated by *Modex* is valid. For that, run the *tests.py* program as follows:
32 |
33 | ```bash
34 | python3 tests.py modex_output
35 | ```
36 | For more information about the *tests.py* program, use the command *python3 tests.py -h*.
37 |
38 | # InterModex
39 |
40 | In addition to the *Modex* plugin, this repository also contains the *InterModex* tool (*inter_modex.py*). *InterModex* uses the *Modex* plugin to extract the same module from different memory dumps which were taken from the same Windows 10 machine (before turning it off) and performs a mixture of all of them.
41 |
42 | ## Installation
43 | *InterModex* is a Python 3 command line tool that depends on the *Modex* plugin, as a result, the *Modex* plugin needs to be installed to make use of *InterModex*. Additionally, *Volatility 3* has to be installed as a Python package, which is not necessary for *Modex*, but it is for *InterModex*. The installation as a Python package must be done after copying the necessary files (mentioned in the *Installation* section of *Modex*) inside the *volatility3/framework/plugins/windows* directory. If it was done before, just run the following commands from the cloned *Volatility 3* repository to make *Volatility 3* aware of the new plugins:
44 |
45 | ```bash
46 | python3 setup.py build
47 | python3 setup.py install
48 | ```
49 |
50 | *InterModex* offers the option to perform a derelocation process on the final module. For this optional derelocation process, *InterModex* depends on [*SUM*](https://github.com/reverseame/similarity-unrelocated-module) (*Similarity Unrelocated Module*), so SUM has to be installed on the system where *InterModex* will be used. All the information to install *SUM* is in its [repository](https://github.com/reverseame/similarity-unrelocated-module). One aspect to take into account is that *SUM* a Python 2 tool that is designed to work on Linux systems, as a result, we recommend to use *InterModex* on Linux systems if you want to perform a derelocation process on the extracted module.
51 |
52 | ## Usage
53 |
54 | Here is how to use the *InterModex* tool:
55 |
56 | ```
57 | usage: inter_modex.py [-h] [-a] [-d MEMORY_DUMPS_DIRECTORY] [--detect] [-l {DEBUG,INFO,WARNING,ERROR,CRITICAL}] [-m MODULE] [-o MODEX_OUTPUTS_DIRECTORY] [-p] [-r]
58 | [-s SUM_PATH] [-t VOLATILITY_PATH] [-v]
59 |
60 | Extracts a module as complete as possible from multiple memory dumps
61 |
62 | optional arguments:
63 | -h, --help show this help message and exit
64 | -a, --dump-anomalies When there are different shared pages at the same offset, dump those pages
65 | -d MEMORY_DUMPS_DIRECTORY, --memory-dumps-directory MEMORY_DUMPS_DIRECTORY
66 | directory where the memory dumps are (the Modex plugin will be called)
67 | --detect detect the presence of the DLL hijacking technique
68 | -l {DEBUG,INFO,WARNING,ERROR,CRITICAL}, --log-level {DEBUG,INFO,WARNING,ERROR,CRITICAL}
69 | logging level
70 | -m MODULE, --module MODULE
71 | name of the module to extract
72 | -o MODEX_OUTPUTS_DIRECTORY, --modex-outputs-directory MODEX_OUTPUTS_DIRECTORY
73 | directory where the Modex outputs are (the Modex plugin will not be called)
74 | -p, --perform-derelocation
75 | perform a derelocation process after extracting the module
76 | -r, --remove-modex-outputs
77 | remove the outputs generated by the Modex plugin (only if the Modex plugin is called)
78 | -s SUM_PATH, --sum-path SUM_PATH
79 | path where the sum.py file is
80 | -t VOLATILITY_PATH, --volatility-path VOLATILITY_PATH
81 | path where the vol.py file is
82 | -v, --version show the program version and exit
83 | ```
84 |
85 | As a concrete example, in order to extract the *kernel32.dll* module as complete as possible taking into account data from different memory dumps which are all inside the same directory, you can run the following command:
86 |
87 | ```bash
88 | python3 inter_modex.py --memory-dumps-directory MemoryDumpsDirectory --volatility-path VolatilityPath --module kernel32.dll
89 | ```
90 | Additionally, for *InterModex* to work, the *python3* command has to be a valid command in the command line (and also the *python2* command if you want to derelocate the extracted module with *SUM*). Finally, the outputs produced by the *InterModex* tool are very similar to the ones generated by *Modex*, and the *tests.py* file can also be used to check if an *InterModex* output is correct.
91 |
92 | # DLL Hijacking Detection
93 |
94 | In addition to extracting a module as complete as possible, *Modex* and *InterModex* can also be used to detect the *DLL hijacking* technique. For that, the *--detect* option must be supplied. When this option is provided, the output is a directory that contains a JSON file and a text file. The JSON file contains information about the detection, and the text file is a log file with details about the execution.
95 |
96 | # License
97 |
98 | Licensed under the [GNU GPLv3](LICENSE) license.
99 |
--------------------------------------------------------------------------------
/derelocator.py:
--------------------------------------------------------------------------------
1 | import os
2 | import logging
3 | import argparse
4 | import traceback
5 | import subprocess
6 | from typing import Dict, Any, List
7 |
8 | logger = logging.getLogger(__name__)
9 |
10 |
11 | def perform_derelocation(sum_path: str, module_path: str, output_directory: str) -> None:
12 | logger.debug(f'Performing a derelocation process in the module {module_path}')
13 | elements_inside_output_directory_before: List[str] = os.listdir(output_directory)
14 | sum_command = ['python2', sum_path, module_path, '--dump-dir', output_directory]
15 | with subprocess.Popen(sum_command, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) as sum_tool:
16 | sum_exit_code = sum_tool.wait()
17 | if sum_exit_code == 0:
18 | print(f'SUM executed successfully')
19 | is_module_renamed: bool = False
20 | elements_inside_output_directory_after: List[str] = os.listdir(output_directory)
21 | for element_inside_output_directory_after in elements_inside_output_directory_after:
22 | if not is_module_renamed and element_inside_output_directory_after not in elements_inside_output_directory_before and element_inside_output_directory_after.endswith(
23 | '.dmp'):
24 | os.rename(os.path.join(output_directory, element_inside_output_directory_after),
25 | os.path.join(output_directory, 'module_after_derelocation.dmp'))
26 | is_module_renamed = True
27 | else:
28 | print(f'The execution of SUM was not successful (exit code {sum_exit_code})')
29 |
30 |
31 | def validate_arguments() -> Dict[str, Any]:
32 | """Parse and validate command line arguments."""
33 | arg_parser = argparse.ArgumentParser(
34 | description='Performs a derelocation process on a given module.')
35 | arg_parser.version = '0.1.0'
36 | arg_parser.add_argument('module_path',
37 | help='module path')
38 | arg_parser.add_argument('output_directory',
39 | help='directory where the derelocated module will be placed')
40 | arg_parser.add_argument('sum_path',
41 | help='path where the sum.py file is')
42 | arg_parser.add_argument('-l',
43 | '--log-level',
44 | choices=['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'],
45 | default='INFO',
46 | help='logging level')
47 | arg_parser.add_argument('-v',
48 | '--version',
49 | action='version',
50 | help='show the program version and exit')
51 |
52 | args = arg_parser.parse_args()
53 |
54 | module_path = args.module_path
55 | output_directory = args.output_directory
56 | sum_path = args.sum_path
57 |
58 | if not os.path.isfile(module_path):
59 | raise FileNotFoundError(
60 | f'The module path supplied ({module_path}) does not correspond to a file')
61 |
62 | if not os.path.exists(output_directory):
63 | raise FileNotFoundError(
64 | f'The output directory supplied ({output_directory}) does not exist')
65 |
66 | if not os.path.isfile(sum_path):
67 | raise FileNotFoundError(
68 | f'The path for the sum.py file supplied ({sum_path}) does not correspond to a file')
69 |
70 | module_path = os.path.abspath(module_path)
71 | output_directory = os.path.abspath(output_directory)
72 | sum_path = os.path.abspath(sum_path)
73 |
74 | if args.log_level == 'DEBUG':
75 | log_level_supplied = logging.DEBUG
76 | elif args.log_level == 'INFO':
77 | log_level_supplied = logging.INFO
78 | elif args.log_level == 'WARNING':
79 | log_level_supplied = logging.WARNING
80 | elif args.log_level == 'ERROR':
81 | log_level_supplied = logging.ERROR
82 | elif args.log_level == 'CRITICAL':
83 | log_level_supplied = logging.CRITICAL
84 | else:
85 | raise ValueError(
86 | f'Log level not supported (you supplied {args.log_level}). These are the ones supported: DEBUG, INFO, WARNING, ERROR, CRITICAL')
87 |
88 | logging.basicConfig(level=log_level_supplied)
89 | arguments: Dict[str, Any] = {'module_path': module_path, 'output_directory': output_directory, 'sum_path': sum_path}
90 | return arguments
91 |
92 |
93 | def execute() -> None:
94 | try:
95 | validated_arguments: Dict[str, Any] = validate_arguments()
96 | perform_derelocation(validated_arguments['sum_path'], validated_arguments['module_path'],
97 | validated_arguments['output_directory'])
98 | except Exception as exception:
99 | print(f'An error occurred ({exception}). Here are more details about the error:\n')
100 | print(traceback.format_exc())
101 |
102 |
103 | def main():
104 | execute()
105 |
106 |
107 | if __name__ == '__main__':
108 | main()
109 |
--------------------------------------------------------------------------------
/inter_modex.py:
--------------------------------------------------------------------------------
1 | import os
2 | import logging
3 | import argparse
4 | import traceback
5 | import subprocess
6 | import shutil
7 | import json
8 | from typing import Dict, Any, List
9 |
10 | from modex import Module, Page, get_current_utc_timestamp, create_logger, check_if_all_elements_are_equal, \
11 | check_if_modules_can_be_mixed, mix_modules, get_detection_information_filename, log_detection_process_common_parts, \
12 | get_most_common_element
13 |
14 |
15 | class ModexDetectionAttempt:
16 | def __init__(self, memory_dump_location: str, mapped_modules: List[Module], result: bool,
17 | suspicious_processes: List[int]):
18 | self.memory_dump_location: str = memory_dump_location
19 | self.mapped_modules: List[Module] = mapped_modules
20 | self.result: bool = result
21 | self.suspicious_processes: List[int] = suspicious_processes
22 |
23 |
24 | class ModexExtraction:
25 | def __init__(self, modex_output: str, module_path: str, metadata_path: str):
26 | self.modex_output: str = modex_output
27 | self.module_path: str = module_path
28 | self.metadata_path: str = metadata_path
29 |
30 |
31 | def get_logger(output_directory: str, log_level):
32 | log_file_path = os.path.join(output_directory, 'inter_modex_log.txt')
33 | logger = create_logger(log_file_path, 'inter_modex_logger', log_level)
34 | logger.propagate = False
35 | return logger
36 |
37 |
38 | def obtain_modex_outputs_directory_name(output_directory: str) -> str:
39 | """Obtain the directory name where the outputs generated after calling the Modex plugin will be stored."""
40 | return os.path.join(output_directory, 'modex_outputs')
41 |
42 |
43 | def create_output_directory(output_directory: str, create_modex_outputs_directory: bool) -> None:
44 | """Create the directory that will contain the InterModex output."""
45 | if create_modex_outputs_directory:
46 | os.makedirs(obtain_modex_outputs_directory_name(output_directory))
47 | else:
48 | os.makedirs(output_directory)
49 |
50 |
51 | def get_file_from_modex_output(modex_output: str, extension: str) -> str:
52 | """Get a file stored inside a Modex output which is not hidden and has a particular extension."""
53 | elements_inside_modex_output: List[str] = os.listdir(modex_output)
54 | for element_inside_modex_output in elements_inside_modex_output:
55 | if os.path.isfile(
56 | os.path.join(modex_output, element_inside_modex_output)) and not element_inside_modex_output.startswith(
57 | '.') and element_inside_modex_output.endswith(extension):
58 | return os.path.join(modex_output, element_inside_modex_output)
59 |
60 |
61 | def get_not_hidden_files_inside_directory(directory: str) -> List[str]:
62 | """Get the non-hidden files inside a directory (the file paths returned also include the directory)."""
63 | elements_inside_directory: List[str] = os.listdir(directory)
64 | all_files_inside_directory: List[str] = []
65 | not_hidden_files_inside_directory: List[str] = []
66 | for element_inside_directory in elements_inside_directory:
67 | if os.path.isfile(os.path.join(directory, element_inside_directory)):
68 | all_files_inside_directory.append(element_inside_directory)
69 | for file_inside_directory in all_files_inside_directory:
70 | if not file_inside_directory.startswith('.'):
71 | not_hidden_files_inside_directory.append(os.path.join(directory, file_inside_directory))
72 | return not_hidden_files_inside_directory
73 |
74 |
75 | def convert_page_dictionary_to_page_object(page: Dict[str, Any], module_path: str, module_base_address: int) -> Page:
76 | return Page(module_base_address + page['offset'], page['size'], 'True' if page['is_shared'] else 'False',
77 | module_path, page['sha_256_digest'], page['is_anomalous'])
78 |
79 |
80 | def convert_modex_extraction_to_module(modex_extraction: ModexExtraction) -> Module:
81 | with open(modex_extraction.metadata_path) as metadata_file:
82 | metadata: Dict[str, Any] = json.load(metadata_file)
83 | # Path inside the system whose memory contents where dumped (do not confuse with the module path inside the modex extraction)
84 | module_path: str = metadata['module_path']
85 | module_base_address: int = int(metadata['module_base_address'], 16)
86 | module_size: int = metadata['module_size']
87 | pages: List[Page] = []
88 | for page_as_dictionary in metadata['pages']:
89 | pages.append(convert_page_dictionary_to_page_object(page_as_dictionary, modex_extraction.module_path,
90 | module_base_address))
91 | # The name and the process_id are irrelevant for InterModex
92 | return Module('', module_path, module_base_address, module_size, 0, modex_extraction.module_path, pages)
93 |
94 |
95 | def check_if_modex_ran_successfully(modex_output_directory: str, was_detect_option_supplied: bool) -> bool:
96 | """Check if an output from the Modex plugin contains the files it should contain for a successful execution."""
97 | # An output from the Modex plugin, without taking directories into account and without supplying the --detect option, should contain 3 files if the execution was successful:
98 | # - A .dmp file
99 | # - A .json file
100 | # - A .txt file
101 | # Note: If the --detect option was supplied to Modex, then a .dmp file will not be created
102 |
103 | number_of_generated_files: int = 2 if was_detect_option_supplied else 3
104 | extensions: List[str] = ['.json', '.txt'] if was_detect_option_supplied else ['.dmp', '.json', '.txt']
105 | not_hidden_files_inside_modex_output: List[str] = get_not_hidden_files_inside_directory(modex_output_directory)
106 | if len(not_hidden_files_inside_modex_output) == number_of_generated_files:
107 | presence_of_extensions: List[bool] = [False, False] if was_detect_option_supplied else [False, False, False]
108 | for file_inside_modex_output in not_hidden_files_inside_modex_output:
109 | file_has_required_extension: bool = False
110 | i: int = 0
111 | while i < len(extensions) and not file_has_required_extension:
112 | if file_inside_modex_output.endswith(extensions[i]):
113 | presence_of_extensions[i] = True
114 | file_has_required_extension = True
115 | i += 1
116 | if presence_of_extensions[0] and check_if_all_elements_are_equal(presence_of_extensions):
117 | return True
118 | else:
119 | return False
120 | else:
121 | return False
122 |
123 |
124 | def get_succesful_modex_outputs(modex_outputs_directory: str, was_detect_option_supplied: bool, logger) -> List[str]:
125 | modex_outputs: List[str] = os.listdir(modex_outputs_directory)
126 | for i in range(0, len(modex_outputs)):
127 | modex_outputs[i] = os.path.join(modex_outputs_directory, modex_outputs[i])
128 |
129 | logger.info('\nModex outputs before checking if the Modex executions were successful:')
130 | for modex_output in modex_outputs:
131 | if os.path.isdir(modex_output):
132 | logger.info(f'\t{modex_output}')
133 |
134 | logger.info('\nChecking if the Modex executions were successful:')
135 | successful_modex_outputs: List[str] = []
136 | for modex_output in modex_outputs:
137 | if os.path.isdir(modex_output):
138 | if check_if_modex_ran_successfully(modex_output, was_detect_option_supplied):
139 | successful_modex_outputs.append(modex_output)
140 | logger.info(f'\t{modex_output} meets the requirements for a successful Modex execution')
141 | else:
142 | logger.info(
143 | f'\t{modex_output} does not meet the requirements for a successful Modex execution. As a result, it will not be considered for the mixture.')
144 | return successful_modex_outputs
145 |
146 |
147 | def convert_mapped_modules_from_json_to_objects(mapped_modules: List[Dict[str, Any]]) -> List[Module]:
148 | mapped_modules_converted: List[Module] = []
149 | for mapped_module in mapped_modules:
150 | mapped_modules_converted.append(
151 | Module('', mapped_module['path'], mapped_module['base_address'], mapped_module['size'],
152 | mapped_module['process_id'], '', []))
153 | return mapped_modules_converted
154 |
155 |
156 | def detect_dll_hijacking_across_several_memory_dumps(modex_detection_attempts: List[ModexDetectionAttempt],
157 | output_directory: str, logger) -> None:
158 | detection_info: Dict[str, Any] = {}
159 | mapped_modules: List[Module] = []
160 | for modex_detection_attempt in modex_detection_attempts:
161 | mapped_modules += modex_detection_attempt.mapped_modules
162 |
163 | most_common_path: str = get_most_common_element([module.path.casefold() for module in mapped_modules])
164 | most_common_size: int = get_most_common_element([module.size for module in mapped_modules])
165 |
166 | suspicious_processes: Dict[str, Any] = {} # The keys are memory dumps
167 | for modex_detection_attempt in modex_detection_attempts:
168 | for module in modex_detection_attempt.mapped_modules:
169 | if module.path.casefold() != most_common_path or module.size != most_common_size:
170 | if modex_detection_attempt.memory_dump_location not in suspicious_processes.keys():
171 | suspicious_processes[modex_detection_attempt.memory_dump_location] = [module.process_id]
172 | else:
173 | suspicious_processes[modex_detection_attempt.memory_dump_location].append(module.process_id)
174 |
175 | detection_info['dll_hijacking_detection_result'] = True if suspicious_processes else False
176 | detection_info['suspicious_processes'] = suspicious_processes
177 |
178 | detection_info_path: str = os.path.join(output_directory, get_detection_information_filename())
179 | with open(detection_info_path, 'w') as detection_info_file:
180 | json.dump(detection_info, detection_info_file, ensure_ascii=False, indent=4)
181 |
182 | log_detection_process_common_parts(logger)
183 |
184 |
185 | def perform_detection(modex_outputs_directory: str, output_directory: str, logger) -> None:
186 | successful_modex_outputs: List[str] = get_succesful_modex_outputs(modex_outputs_directory, True, logger)
187 | modex_detection_attempts: List[ModexDetectionAttempt] = []
188 |
189 | # Get the .json files from each successful Modex output and perform the detection
190 | for successful_modex_output in successful_modex_outputs:
191 | json_file: str = get_file_from_modex_output(successful_modex_output, '.json')
192 | if json_file is not None:
193 | with open(json_file) as detection_info_file:
194 | detection_info: Dict[str, Any] = json.load(detection_info_file)
195 | mapped_modules: List[Module] = convert_mapped_modules_from_json_to_objects(detection_info['mapped_modules'])
196 | modex_detection_attempts.append(
197 | ModexDetectionAttempt(detection_info['memory_dump_location'], mapped_modules,
198 | detection_info['dll_hijacking_detection_result'],
199 | detection_info['suspicious_processes']))
200 | else:
201 | logger.info(
202 | f'\tThe Modex output {successful_modex_output} was considered successful, but the .json file does not exist, and it must exist for a Modex output to be successful. As a result, this output will not be considered in the detection process.')
203 |
204 | detect_dll_hijacking_across_several_memory_dumps(modex_detection_attempts, output_directory, logger)
205 |
206 |
207 | def perform_mixture(modex_outputs_directory: str, perform_derelocation: bool, sum_path: str, dump_anomalies: bool,
208 | output_directory: str, logger) -> None:
209 | successful_modex_outputs: List[str] = get_succesful_modex_outputs(modex_outputs_directory, False, logger)
210 |
211 | # Take the .dmp and .json files from each successful Modex output and perform the mixture
212 | modex_extractions: List[ModexExtraction] = []
213 | for successful_modex_output in successful_modex_outputs:
214 | dmp_file: str = get_file_from_modex_output(successful_modex_output, '.dmp')
215 | json_file: str = get_file_from_modex_output(successful_modex_output, '.json')
216 | if dmp_file is not None and json_file is not None:
217 | modex_extractions.append(ModexExtraction(successful_modex_output, dmp_file, json_file))
218 | else:
219 | logger.info(
220 | f'\tThe Modex output {successful_modex_output} was considered successful, but the .dmp file is {dmp_file} and the .json file is {json_file}, and both of them have to exist for a Modex output to be successful. As a result, this output will not be considered for the mixture.')
221 |
222 | logger.info('\nModex outputs that will be mixed:')
223 | for modex_extraction in modex_extractions:
224 | logger.info(f'\t{modex_extraction.modex_output}')
225 |
226 | modules_to_mix: List[Module] = []
227 | for modex_extraction in modex_extractions:
228 | modules_to_mix.append(convert_modex_extraction_to_module(modex_extraction))
229 |
230 | can_modules_be_mixed: bool = check_if_modules_can_be_mixed(modules_to_mix, logger)
231 | mixed_module_filename: str = 'mixed_module.dmp'
232 | mixed_module_metadata_filename: str = 'mixed_module.description.json'
233 | if can_modules_be_mixed:
234 | mix_modules(modules_to_mix, output_directory, mixed_module_filename, mixed_module_metadata_filename,
235 | dump_anomalies, logger, False, None)
236 |
237 | if perform_derelocation and os.path.exists(os.path.join(output_directory, mixed_module_filename)):
238 | derelocator_command = [f'python3 {os.path.join(os.path.dirname(os.path.abspath(__file__)), "derelocator.py")}',
239 | os.path.join(os.getcwd(), output_directory, mixed_module_filename),
240 | os.path.join(os.getcwd(), output_directory), sum_path]
241 | with subprocess.Popen(derelocator_command) as derelocator_tool:
242 | derelocator_exit_code = derelocator_tool.wait()
243 | if derelocator_exit_code == 0:
244 | logger.info(f'\nThe derelocation process was successful')
245 | else:
246 | logger.info(f'\nThe derelocation process was not successful (exit code {derelocator_exit_code})')
247 |
248 |
249 | def perform_operation_after_getting_modex_outputs(module: str, memory_dumps_directory: str, remove_modex_outputs: bool,
250 | perform_derelocation: bool, sum_path: str, volatility_path: str,
251 | dump_anomalies: bool, detect: bool, output_directory: str,
252 | logger) -> None:
253 | memory_dumps: List[str] = get_not_hidden_files_inside_directory(memory_dumps_directory)
254 | logger.info('Memory dumps provided:')
255 | for memory_dump in memory_dumps:
256 | logger.info(f'\t{memory_dump}')
257 | current_working_directory: str = os.getcwd()
258 | logger.debug(f'Working directory before changing it: {current_working_directory}')
259 | modex_outputs_directory_name: str = obtain_modex_outputs_directory_name(output_directory)
260 | os.chdir(modex_outputs_directory_name) # Change the working directory
261 | logger.debug(f'Working directory after changing it: {os.getcwd()}')
262 |
263 | # Invoke the Modex plugin for each memory dump inside the memory dumps directory
264 | logger.info('\nModex plugin execution:')
265 | for memory_dump in memory_dumps:
266 | if detect:
267 | volatility_command = ['python3', volatility_path, '-f', memory_dump, 'windows.modex', '--module', module,
268 | '--detect']
269 | else:
270 | volatility_command = ['python3', volatility_path, '-f', memory_dump, 'windows.modex', '--module', module,
271 | '--dump-anomalies']
272 | with subprocess.Popen(volatility_command, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) as modex_plugin:
273 | print(f'Running the Modex plugin for the following memory dump: {memory_dump}')
274 | modex_plugin_exit_code = modex_plugin.wait()
275 | if modex_plugin_exit_code == 0:
276 | logger.info(f'\tThe Modex plugin executed successfully for the following memory dump: {memory_dump}')
277 | else:
278 | logger.info(
279 | f'\tThe execution of the Modex plugin was not successful (exit code {modex_plugin_exit_code}) for the following memory dump: {memory_dump}')
280 |
281 | os.chdir(current_working_directory) # Restore the working directory
282 | logger.debug(f'Working directory after restoring it: {os.getcwd()}')
283 |
284 | if detect:
285 | perform_detection(modex_outputs_directory_name, output_directory, logger)
286 | else:
287 | # Mix the modules previously extracted
288 | perform_mixture(modex_outputs_directory_name, perform_derelocation, sum_path, dump_anomalies, output_directory,
289 | logger)
290 |
291 | if remove_modex_outputs:
292 | shutil.rmtree(modex_outputs_directory_name)
293 |
294 |
295 | def validate_arguments() -> Dict[str, Any]:
296 | """Parse and validate command line arguments."""
297 | arg_parser = argparse.ArgumentParser(
298 | description='Extracts a module as complete as possible from multiple memory dumps')
299 | arg_parser.version = '0.1.0'
300 | arg_parser.add_argument('-a',
301 | '--dump-anomalies',
302 | action='store_true',
303 | help='When there are different shared pages at the same offset, dump those pages')
304 | arg_parser.add_argument('-d',
305 | '--memory-dumps-directory',
306 | help='directory where the memory dumps are (the Modex plugin will be called)')
307 | arg_parser.add_argument('--detect',
308 | action='store_true',
309 | help='detect the presence of the DLL hijacking technique')
310 | arg_parser.add_argument('-l',
311 | '--log-level',
312 | choices=['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'],
313 | default='INFO',
314 | help='logging level')
315 | arg_parser.add_argument('-m',
316 | '--module',
317 | help='name of the module to extract')
318 | arg_parser.add_argument('-o',
319 | '--modex-outputs-directory',
320 | help='directory where the Modex outputs are (the Modex plugin will not be called)')
321 | arg_parser.add_argument('-p',
322 | '--perform-derelocation',
323 | action='store_true',
324 | help='perform a derelocation process after extracting the module')
325 | arg_parser.add_argument('-r',
326 | '--remove-modex-outputs',
327 | action='store_true',
328 | help='remove the outputs generated by the Modex plugin (only if the Modex plugin is called)')
329 | arg_parser.add_argument('-s',
330 | '--sum-path',
331 | help='path where the sum.py file is')
332 | arg_parser.add_argument('-t',
333 | '--volatility-path',
334 | help='path where the vol.py file is')
335 | arg_parser.add_argument('-v',
336 | '--version',
337 | action='version',
338 | help='show the program version and exit')
339 |
340 | args = arg_parser.parse_args()
341 |
342 | module = args.module
343 | memory_dumps_directory = args.memory_dumps_directory
344 | modex_outputs_directory = args.modex_outputs_directory
345 | remove_modex_outputs = args.remove_modex_outputs
346 | perform_derelocation = args.perform_derelocation
347 | volatility_path = args.volatility_path
348 | dump_anomalies = args.dump_anomalies
349 | sum_path = args.sum_path
350 | detect = args.detect
351 |
352 | if memory_dumps_directory is not None and modex_outputs_directory is not None:
353 | raise ValueError(
354 | 'You cannot supply the --memory-dumps-directory and the --modex-outputs-directory options at the same time (either the modules have already been extracted with the Modex plugin or not)')
355 |
356 | if memory_dumps_directory is None and modex_outputs_directory is None:
357 | raise ValueError(
358 | 'You have to indicate a directory (either where the memory dumps are or where the Modex outputs are)')
359 |
360 | if memory_dumps_directory is not None and module is None:
361 | raise ValueError(
362 | 'If you supply the --memory-dumps-directory option, then the --module option also has to be supplied')
363 |
364 | if memory_dumps_directory is not None and volatility_path is None:
365 | raise ValueError(
366 | 'If you supply the --memory-dumps-directory option, then the --volatility-path option also has to be supplied')
367 |
368 | if modex_outputs_directory is not None and module is not None:
369 | raise ValueError(
370 | 'If you supply the --modex-outputs-directory option, then the --module option cannot be supplied (all the Modex outputs inside the directory supplied are supposed to correspond to the same module)')
371 |
372 | if modex_outputs_directory is not None and remove_modex_outputs:
373 | raise ValueError(
374 | 'You cannot supply the --remove-modex-outputs alongside the --modex-outputs-directory option (the Modex outputs can only be deleted if the Modex plugin is called within InterModex)')
375 |
376 | if perform_derelocation and sum_path is None:
377 | raise ValueError(
378 | 'If you supply the --perform-derelocation option, then the --sum-path also has to be supplied')
379 |
380 | if memory_dumps_directory is not None and not os.path.exists(memory_dumps_directory):
381 | raise FileNotFoundError(
382 | f'The directory supplied with the --memory-dumps-directory option ({memory_dumps_directory}) does not exist')
383 |
384 | if modex_outputs_directory is not None and not os.path.exists(modex_outputs_directory):
385 | raise FileNotFoundError(
386 | f'The directory supplied with the --modex-outputs-directory option ({modex_outputs_directory}) does not exist')
387 |
388 | if volatility_path is not None and not os.path.isfile(volatility_path):
389 | raise FileNotFoundError(
390 | f'The path supplied with the --volatility-path option ({volatility_path}) does not correspond to a file')
391 |
392 | if sum_path is not None and not os.path.isfile(sum_path):
393 | raise FileNotFoundError(
394 | f'The path supplied with the --sum-path option ({sum_path}) does not correspond to a file')
395 |
396 | if module is not None and len(module) > 255:
397 | raise ValueError('The module name is too long')
398 |
399 | if (perform_derelocation or dump_anomalies) and detect:
400 | raise ValueError(
401 | 'You cannot supply the --detect option alongside the --perform-derelocation or --dump-anomalies options')
402 |
403 | if memory_dumps_directory is not None:
404 | memory_dumps_directory = os.path.abspath(memory_dumps_directory)
405 |
406 | if volatility_path is not None:
407 | volatility_path = os.path.abspath(volatility_path)
408 |
409 | if sum_path is not None:
410 | sum_path = os.path.abspath(sum_path)
411 |
412 | if args.log_level == 'DEBUG':
413 | log_level_supplied = logging.DEBUG
414 | elif args.log_level == 'INFO':
415 | log_level_supplied = logging.INFO
416 | elif args.log_level == 'WARNING':
417 | log_level_supplied = logging.WARNING
418 | elif args.log_level == 'ERROR':
419 | log_level_supplied = logging.ERROR
420 | elif args.log_level == 'CRITICAL':
421 | log_level_supplied = logging.CRITICAL
422 | else:
423 | raise ValueError(
424 | f'Log level not supported (you supplied {args.log_level}). These are the ones supported: DEBUG, INFO, WARNING, ERROR, CRITICAL')
425 |
426 | arguments: Dict[str, Any] = {'module': module, 'memory_dumps_directory': memory_dumps_directory,
427 | 'modex_outputs_directory': modex_outputs_directory,
428 | 'remove_modex_outputs': remove_modex_outputs,
429 | 'perform_derelocation': perform_derelocation, 'volatility_path': volatility_path,
430 | 'log_level_supplied': log_level_supplied, 'dump_anomalies': dump_anomalies,
431 | 'sum_path': sum_path, 'detect': detect}
432 | return arguments
433 |
434 |
435 | def execute() -> None:
436 | try:
437 | validated_arguments: Dict[str, Any] = validate_arguments()
438 |
439 | # Directory where the InterModex output will be placed
440 | output_directory: str = f'inter_modex_output_{get_current_utc_timestamp()}'
441 |
442 | modex_outputs_directory = validated_arguments['modex_outputs_directory']
443 | if modex_outputs_directory is not None:
444 | create_output_directory(output_directory, False)
445 | logger = get_logger(output_directory, validated_arguments['log_level_supplied'])
446 | if validated_arguments['detect']:
447 | perform_detection(modex_outputs_directory, output_directory, logger)
448 | else:
449 | perform_mixture(modex_outputs_directory, validated_arguments['perform_derelocation'],
450 | validated_arguments['sum_path'], validated_arguments['dump_anomalies'],
451 | output_directory,
452 | logger)
453 | else:
454 | create_output_directory(output_directory, True)
455 | logger = get_logger(output_directory, validated_arguments['log_level_supplied'])
456 | perform_operation_after_getting_modex_outputs(validated_arguments['module'],
457 | validated_arguments['memory_dumps_directory'],
458 | validated_arguments['remove_modex_outputs'],
459 | validated_arguments['perform_derelocation'],
460 | validated_arguments['sum_path'],
461 | validated_arguments['volatility_path'],
462 | validated_arguments['dump_anomalies'],
463 | validated_arguments['detect'], output_directory,
464 | logger)
465 |
466 | except Exception as exception:
467 | print(f'An error occurred ({exception}). Here are more details about the error:\n')
468 | print(traceback.format_exc())
469 |
470 |
471 | def main():
472 | execute()
473 |
474 |
475 | if __name__ == '__main__':
476 | main()
477 |
--------------------------------------------------------------------------------
/modex.py:
--------------------------------------------------------------------------------
1 | import os
2 | import re
3 | import logging
4 | import hashlib
5 | import json
6 | import tlsh
7 | import time
8 | from typing import List, Dict, Any
9 | from datetime import datetime
10 | from collections import Counter
11 | from tabulate import tabulate
12 |
13 | from volatility3.framework import renderers, interfaces, automagic, plugins, exceptions
14 | from volatility3.framework.configuration import requirements
15 | from volatility3.framework.symbols import intermed
16 | from volatility3.framework.symbols.windows.extensions import pe
17 | from volatility3.framework.renderers import format_hints
18 | from volatility3.plugins.windows import pslist, dlllist, simple_pteenum
19 |
20 |
21 | def get_current_utc_timestamp() -> str:
22 | utc_now = datetime.utcnow()
23 | return utc_now.strftime("%d-%m-%Y_%H-%M-%S_UTC")
24 |
25 |
26 | def create_logger(file_path: str, logger_name: str, log_level):
27 | logger = logging.getLogger(logger_name)
28 | logger.setLevel(log_level)
29 | file_handler = logging.FileHandler(file_path)
30 | file_handler.setLevel(log_level)
31 | logger.addHandler(file_handler)
32 | return logger
33 |
34 |
35 | def get_relevant_page_details(page: List[Any]) -> Dict[str, str]:
36 | relevant_page_details = {}
37 | page_details = page[3]
38 |
39 | page_vaddr_match = re.search(r'\nvaddr: (\w+)\n', page_details)
40 | if page_vaddr_match:
41 | page_vaddr = page_vaddr_match.group(1)
42 | else:
43 | page_vaddr = None
44 |
45 | page_length_match = re.search(r'\nlength: (\w+)\n', page_details)
46 | if page_length_match:
47 | page_length = page_length_match.group(1)
48 | else:
49 | page_length = None
50 |
51 | page_pfn_db_entry_prototype_pte_flag_match = re.search(r'\nhas_proto_set: (\w+)\n', page_details)
52 | if page_pfn_db_entry_prototype_pte_flag_match:
53 | page_pfn_db_entry_prototype_pte_flag = page_pfn_db_entry_prototype_pte_flag_match.group(1)
54 | else:
55 | page_pfn_db_entry_prototype_pte_flag = None
56 |
57 | relevant_page_details['virtual_address'] = page_vaddr
58 | relevant_page_details['size'] = page_length
59 | relevant_page_details['pfn_db_entry_prototype_pte_flag'] = page_pfn_db_entry_prototype_pte_flag
60 | return relevant_page_details
61 |
62 |
63 | def check_if_all_elements_are_equal(elements: List[Any]) -> bool:
64 | return all(element == elements[0] for element in elements)
65 |
66 |
67 | class Page:
68 | def __init__(self, virtual_address: int, size: int, pfn_db_entry_prototype_pte_flag: str, module_filename: str,
69 | contents_digest: str = None, is_anomalous: bool = False):
70 | self.virtual_address: int = virtual_address
71 | self.size: int = size # In bytes
72 | # pfn_db_entry_prototype_pte_flag can be 'True', 'False', or 'Undetermined'
73 | self.pfn_db_entry_prototype_pte_flag: str = pfn_db_entry_prototype_pte_flag
74 | self.module_filename: str = module_filename # Filename of the dumped module where the page is
75 | # Regarding the digest of the page contents:
76 | # - For shared pages: SHA-256 digest
77 | # - For pages considered private: TLSH digests will be used to choose a representative page. However, SHA-256 digests will be used in the metadata file.
78 | self.contents_digest: str = contents_digest
79 | self.is_anomalous: bool = is_anomalous # This attribute is for InterModex compatibility
80 |
81 | def get_basic_information(self):
82 | return {'virtual_address': hex(self.virtual_address), 'size': hex(self.size),
83 | 'pfn_db_entry_prototype_pte_flag': self.pfn_db_entry_prototype_pte_flag}
84 |
85 | def is_shared(self):
86 | return True if self.pfn_db_entry_prototype_pte_flag == 'True' else False
87 |
88 | def is_private(self):
89 | return True if self.pfn_db_entry_prototype_pte_flag == 'False' else False
90 |
91 | def is_pfn_db_entry_prototype_pte_flag_undetermined(self):
92 | return True if self.pfn_db_entry_prototype_pte_flag == 'Undetermined' else False
93 |
94 | def is_considered_private(self):
95 | # If it is not clear that a page is shared, it is considered private
96 | return True if self.is_private() or self.is_pfn_db_entry_prototype_pte_flag_undetermined() else False
97 |
98 |
99 | class Module:
100 | def __init__(self, name: str, path: str, base_address: int, size: int, process_id: int, filename: str,
101 | pages: List[Page]):
102 | self.name: str = name
103 | self.path: str = path
104 | self.base_address: int = base_address # Virtual base address
105 | self.size: int = size # In bytes
106 | self.process_id: int = process_id # Identifier of the process where the module is mapped
107 | self.filename: str = filename # Filename of the dumped module
108 | self.pages: List[Page] = pages
109 |
110 | def get_basic_information(self):
111 | return {'name': self.name, 'path': self.path, 'base_address': hex(self.base_address), 'size': hex(self.size),
112 | 'process_id': self.process_id, 'filename': self.filename,
113 | 'number_of_retrieved_pages': len(self.pages)}
114 |
115 | def get_information_for_metadata_file(self):
116 | return {'path': self.path, 'base_address': self.base_address, 'size': self.size, 'process_id': self.process_id}
117 |
118 |
119 | def delete_dmp_files(modules: List[Module]) -> None:
120 | for module in modules:
121 | os.remove(module.filename)
122 |
123 |
124 | def delete_zero_bytes_dmp_files() -> None:
125 | elements_inside_current_working_directory: List[str] = os.listdir(os.getcwd())
126 | for element_inside_current_working_directory in elements_inside_current_working_directory:
127 | if os.path.isfile(
128 | element_inside_current_working_directory) and element_inside_current_working_directory.endswith(
129 | '.dmp') and element_inside_current_working_directory.startswith('pid.') and os.path.getsize(
130 | element_inside_current_working_directory) == 0:
131 | os.remove(element_inside_current_working_directory)
132 |
133 |
134 | def delete_modules_under_syswow64(modules: List[Module], logger) -> List[Module]:
135 | modules_not_under_syswow64: List[Module] = []
136 | modules_under_syswow64: List[Module] = []
137 | syswow64_directory = 'C:\\Windows\\SysWOW64\\'
138 | syswow64_directory_case_insensitive = syswow64_directory.casefold()
139 | for module in modules:
140 | if syswow64_directory_case_insensitive not in module.path.casefold():
141 | modules_not_under_syswow64.append(module)
142 | else:
143 | modules_under_syswow64.append(module)
144 | logger.info(f'\nModule under C:\\Windows\\SysWOW64 identified: {module.path}')
145 | delete_dmp_files(modules_under_syswow64)
146 | return modules_not_under_syswow64
147 |
148 |
149 | def check_if_modules_can_be_mixed(modules: List[Module], logger) -> bool:
150 | # Make sure that the modules to mix:
151 | # - Have the same path
152 | # - Have the same size (check the sizes reported by the Module objects and the sizes of the dumped files)
153 | # - Are mapped at the same virtual base address
154 | paths: List[str] = []
155 | sizes: List[int] = []
156 | base_addresses: List[int] = []
157 | for module in modules:
158 | paths.append(module.path.casefold())
159 | sizes.append(module.size)
160 | sizes.append(os.path.getsize(module.filename))
161 | base_addresses.append(module.base_address)
162 |
163 | are_all_paths_equal: bool = check_if_all_elements_are_equal(paths)
164 | are_all_sizes_equal: bool = check_if_all_elements_are_equal(sizes)
165 | are_all_base_addresses_equal: bool = check_if_all_elements_are_equal(base_addresses)
166 | if False in (are_all_paths_equal, are_all_sizes_equal, are_all_base_addresses_equal):
167 | logger.info(f'''\nThe modules cannot be mixed:
168 | Are all paths equal? {are_all_paths_equal}
169 | Are all sizes equal? {are_all_sizes_equal}
170 | Are all base addresses equal? {are_all_base_addresses_equal}''')
171 | return False
172 | else:
173 | logger.info('\nThe modules can be mixed')
174 | return True
175 |
176 |
177 | def get_shared_pages(pages: List[Page]) -> List[Page]:
178 | shared_pages: List[Page] = []
179 | for page in pages:
180 | if page.is_shared():
181 | shared_pages.append(page)
182 | return shared_pages
183 |
184 |
185 | def count_instances_of_each_element(elements: List[Any]) -> Dict[Any, int]:
186 | """Count how many times each element is present in a list."""
187 | return dict(Counter(elements))
188 |
189 |
190 | def get_page_from_dumped_module(module_filename: str, page_offset: int, page_size: int) -> bytes:
191 | with open(module_filename, mode='rb') as dumped_module:
192 | dumped_module.seek(page_offset)
193 | page_contents = dumped_module.read(page_size)
194 | return page_contents
195 |
196 |
197 | def create_entry_for_page_in_mixed_module_metadata(page_offset: int, page_size: int, is_page_shared: bool,
198 | page_contents_sha_256_digest: str,
199 | is_page_anomalous: bool) -> Dict[str, Any]:
200 | return {'offset': page_offset, 'size': page_size, 'is_shared': is_page_shared,
201 | 'sha_256_digest': page_contents_sha_256_digest, 'is_anomalous': is_page_anomalous}
202 |
203 |
204 | def get_most_common_element(elements: List[Any]) -> Any:
205 | return max(set(elements), key=elements.count)
206 |
207 |
208 | def find_page_with_certain_digest(pages: List[Page], digest: str) -> Page:
209 | for page in pages:
210 | if page.contents_digest == digest:
211 | return page
212 |
213 |
214 | def insert_page_into_mixed_module(page: Page, module_base_address: int, mixed_module: bytearray,
215 | mixed_module_pages_metadata: List[Dict[str, Any]], is_page_anomalous: bool) -> None:
216 | page_offset: int = page.virtual_address - module_base_address # Offset of the page inside the module
217 | page_contents: bytes = get_page_from_dumped_module(page.module_filename, page_offset, page.size)
218 | mixed_module[page_offset: page_offset + page.size] = page_contents
219 | mixed_module_pages_metadata.append(
220 | create_entry_for_page_in_mixed_module_metadata(page_offset, page.size, page.is_shared(), page.contents_digest,
221 | is_page_anomalous))
222 |
223 |
224 | def calculate_page_digests(pages: List[Page], module_base_address: int, use_similarity_digest_algorithm: bool) -> None:
225 | for page in pages:
226 | page_offset_inside_module: int = page.virtual_address - module_base_address
227 | page_contents: bytes = get_page_from_dumped_module(page.module_filename, page_offset_inside_module,
228 | page.size)
229 | if use_similarity_digest_algorithm:
230 | page.contents_digest = tlsh.hash(page_contents)
231 | else:
232 | page.contents_digest = hashlib.sha256(page_contents).hexdigest()
233 |
234 |
235 | def get_page_digests(pages: List[Page]) -> List[str]:
236 | page_digests: List[str] = []
237 | for page in pages:
238 | page_digests.append(page.contents_digest)
239 | return page_digests
240 |
241 |
242 | def check_if_all_tlsh_digests_are_valid(tlsh_digests: List[str]) -> bool:
243 | for tlsh_digest in tlsh_digests:
244 | if tlsh_digest == 'TNULL':
245 | return False
246 | return True
247 |
248 |
249 | def choose_representative_page_digest(page_similarity_digests: List[str], logger) -> str:
250 | """Compare all the page similarity digests received and choose one that is representative."""
251 | are_all_tlsh_digests_valid: bool = check_if_all_tlsh_digests_are_valid(page_similarity_digests)
252 | if len(page_similarity_digests) < 3 or not are_all_tlsh_digests_valid:
253 | return 'INVALID_DIGEST'
254 |
255 | # similarity_scores_table is a table with the similarity scores obtained after comparing all the similarity digests between each other
256 | similarity_scores_table: List[List[int]] = []
257 | for page_similarity_digest_i in page_similarity_digests:
258 | similarity_scores_row: List[int] = []
259 | for page_similarity_digest_j in page_similarity_digests:
260 | similarity_scores_row.append(tlsh.diff(page_similarity_digest_i, page_similarity_digest_j))
261 | similarity_scores_table.append(similarity_scores_row)
262 |
263 | sums_of_individual_similarity_scores_rows: List[int] = []
264 | for similarity_scores_row in similarity_scores_table:
265 | sums_of_individual_similarity_scores_rows.append(sum(similarity_scores_row))
266 | minimum_sum: int = min(sums_of_individual_similarity_scores_rows)
267 | index_of_minimum_sum: int = sums_of_individual_similarity_scores_rows.index(minimum_sum)
268 | representative_page_digest: str = page_similarity_digests[index_of_minimum_sum]
269 |
270 | # Log the process
271 | table_for_log: List[List[str]] = [[''] + page_similarity_digests]
272 | for page_similarity_digest in page_similarity_digests:
273 | table_for_log.append([page_similarity_digest])
274 | current_index_in_table_for_log: int = 1
275 | for similarity_scores_row in similarity_scores_table:
276 | for similarity_score in similarity_scores_row:
277 | table_for_log[current_index_in_table_for_log].append(str(similarity_score))
278 | current_index_in_table_for_log += 1
279 |
280 | logger.info('\t\tSimilarity scores table:')
281 | logger.info(tabulate(table_for_log, tablefmt='grid'))
282 | logger.info(
283 | f'\t\tThe minimum sum ({minimum_sum}) is in the row that corresponds to the digest {page_similarity_digests[index_of_minimum_sum]}')
284 |
285 | return representative_page_digest
286 |
287 |
288 | def dump_page(page: Page, page_offset: int, file_path: str) -> None:
289 | page_contents: bytes = get_page_from_dumped_module(page.module_filename, page_offset, page.size)
290 | with open(file_path, 'wb') as dumped_page:
291 | dumped_page.write(page_contents)
292 |
293 |
294 | def get_detection_information_filename() -> str:
295 | return 'detection_information.json'
296 |
297 |
298 | def log_detection_process_common_parts(logger) -> None:
299 | logger.info(
300 | f'\nThe --detect option was supplied to detect the presence of the DLL hijacking technique. For more details check the {get_detection_information_filename()} file.')
301 |
302 |
303 | def detect_dll_hijacking_inside_one_memory_dump(modules: List[Module], output_directory: str, memory_dump_location: str,
304 | logger) -> List[str]:
305 | mapped_modules_info: List[Dict[str, Any]] = []
306 | for module in modules:
307 | mapped_modules_info.append(module.get_information_for_metadata_file())
308 |
309 | detection_info: Dict[str, Any] = {'memory_dump_location': memory_dump_location,
310 | 'mapped_modules': mapped_modules_info}
311 | most_common_path: str = get_most_common_element([module.path.casefold() for module in modules])
312 | most_common_size: int = get_most_common_element([module.size for module in modules])
313 |
314 | # Look at the cases where the path or the size do not match with the most common ones and mark them as suspicious
315 | suspicious_modules: List[Module] = []
316 | for module in modules:
317 | if module.path.casefold() != most_common_path or module.size != most_common_size:
318 | suspicious_modules.append(module)
319 |
320 | detection_info['dll_hijacking_detection_result'] = True if suspicious_modules else False
321 |
322 | suspicious_processes: List[int] = []
323 | for suspicious_module in suspicious_modules:
324 | suspicious_processes.append(suspicious_module.process_id)
325 |
326 | detection_info['suspicious_processes'] = suspicious_processes
327 |
328 | for mapped_module_info in mapped_modules_info:
329 | mapped_module_info['base_address'] = hex(mapped_module_info['base_address'])
330 |
331 | files_generated: List[str] = []
332 | detection_info_path: str = os.path.join(output_directory, get_detection_information_filename())
333 | with open(detection_info_path, 'w') as detection_info_file:
334 | json.dump(detection_info, detection_info_file, ensure_ascii=False, indent=4)
335 |
336 | files_generated.append(detection_info_path)
337 | log_detection_process_common_parts(logger)
338 | return files_generated
339 |
340 |
341 | def mix_modules(modules: List[Module], output_directory: str, mixed_module_filename: str,
342 | mixed_module_metadata_filename: str, dump_anomalies: bool, logger, is_modex_calling: bool,
343 | start_time, memory_dump_location: str = None) -> List[str]:
344 | if not modules:
345 | return []
346 | module_size: int = modules[0].size
347 | module_base_address: int = modules[0].base_address
348 | module_path: str = modules[0].path
349 | mixed_module: bytearray = bytearray(module_size) # The mixed module is initialized with zeros
350 | mixed_module_pages_metadata: List[Dict[str, Any]] = [] # Metadata about the retrieved pages
351 | files_generated: List[str] = []
352 |
353 | # In the mixture dictionary:
354 | # - The keys are virtual addresses (the virtual address acts here as an id for a page inside a module)
355 | # - The values are lists of pages that all start at the same virtual address
356 |
357 | # In the mixture_shared_state dictionary:
358 | # - The idea for the keys is the same as in the mixture dictionary
359 | # - Each value is a boolean indicating if the page with that virtual address will be marked as shared (True) or not (False) in the mixed module
360 |
361 | mixture: Dict[int, List[Page]] = {}
362 | mixture_shared_state: Dict[int, bool] = {}
363 | for module in modules:
364 | for page in module.pages:
365 | if page.virtual_address not in mixture.keys():
366 | mixture[page.virtual_address] = [page]
367 | mixture_shared_state[page.virtual_address] = False
368 | else:
369 | mixture[page.virtual_address].append(page)
370 |
371 | for virtual_address in mixture.keys():
372 | shared_pages: List[Page] = get_shared_pages(mixture[virtual_address])
373 | if shared_pages:
374 | mixture[virtual_address] = shared_pages
375 | mixture_shared_state[virtual_address] = True
376 | calculate_page_digests(mixture[virtual_address], module_base_address, False)
377 | else:
378 | calculate_page_digests(mixture[virtual_address], module_base_address, True)
379 |
380 | logger.info('\nResults after choosing a page for each available virtual address:')
381 | for virtual_address in mixture.keys():
382 | if mixture_shared_state[virtual_address]:
383 | # The following code (until indicated) is only useful for InterModex
384 | non_anomalous_pages: List[Page] = []
385 | for page in mixture[virtual_address]:
386 | if not page.is_anomalous:
387 | non_anomalous_pages.append(page)
388 | if 0 < len(non_anomalous_pages) < len(mixture[virtual_address]):
389 | mixture[virtual_address] = non_anomalous_pages
390 | # The code only useful fot InterModex ends here
391 |
392 | # Check the contents that a list of shared pages with the same virtual address have, and write to the mixed module accordingly
393 | page_digests: List[str] = get_page_digests(mixture[virtual_address]) # SHA-256 digests
394 | are_all_shared_pages_equal: bool = check_if_all_elements_are_equal(page_digests)
395 | if are_all_shared_pages_equal:
396 | # All the shared pages have the same contents, it does not matter which one is picked
397 | shared_page: Page = mixture[virtual_address][0]
398 | insert_page_into_mixed_module(shared_page, module_base_address, mixed_module,
399 | mixed_module_pages_metadata, shared_page.is_anomalous)
400 | logger.info(
401 | f'\tAll the shared pages whose virtual address is {hex(virtual_address)} ({len(mixture[virtual_address])}) (offset {virtual_address - module_base_address}) are equal (SHA-256 digest: {shared_page.contents_digest})')
402 | else:
403 | most_common_page_digest: str = get_most_common_element(page_digests)
404 | # Find a page with the most common digest, it does not matter which page is picked as long as its digest matches with the most common one
405 | most_common_shared_page: Page = find_page_with_certain_digest(mixture[virtual_address],
406 | most_common_page_digest)
407 | insert_page_into_mixed_module(most_common_shared_page, module_base_address, mixed_module,
408 | mixed_module_pages_metadata, True)
409 | instances_of_each_page_digest: Dict[str, int] = count_instances_of_each_element(page_digests)
410 | logger.info(
411 | f'\tAll the shared pages whose virtual address is {hex(virtual_address)} ({len(mixture[virtual_address])}) (offset {virtual_address - module_base_address}) are not equal (there are {len(instances_of_each_page_digest.keys())} different instances), here is how many times each SHA-256 digest is present:')
412 | for page_digest in instances_of_each_page_digest.keys():
413 | logger.info(f'\t\t{page_digest}: {instances_of_each_page_digest[page_digest]}')
414 |
415 | if dump_anomalies:
416 | different_page_digests = instances_of_each_page_digest.keys()
417 | different_pages: List[Page] = []
418 | for page_digest in different_page_digests:
419 | different_pages.append(find_page_with_certain_digest(mixture[virtual_address], page_digest))
420 | anomalies_directory: str = os.path.join(output_directory, 'anomalies')
421 | if not os.path.exists(anomalies_directory):
422 | os.makedirs(anomalies_directory)
423 | for i in range(0, len(different_pages)):
424 | page: Page = different_pages[i]
425 | page_offset: int = page.virtual_address - module_base_address
426 | page_filename: str = f'shared_page_{i + 1}_at_offset_{page_offset}.dmp'
427 | page_file_path: str = os.path.join(anomalies_directory, page_filename)
428 | dump_page(page, page_offset, page_file_path)
429 | files_generated.append(page_file_path)
430 | else:
431 | # In this case, no shared pages were found that started with the current virtual address.
432 | # As a result, a representative page has to be chosen to be inserted in the mixed module.
433 | logger.info(
434 | f'\tNo shared pages were found that started with the virtual address {hex(virtual_address)} (offset {virtual_address - module_base_address}). As a result, a representative page of the total {len(mixture[virtual_address])} pages has to be chosen. Below is the process followed to choose the representative page:')
435 | page_similarity_digests: List[str] = get_page_digests(mixture[virtual_address]) # TLSH digests
436 |
437 | # 3 or more digests need to exist in order to make a comparison between all of them to finally choose a representative one, so:
438 | # - If only one digest exists, that digest will be chosen
439 | # - If only two digests exist, the first one is chosen
440 | if len(page_similarity_digests) < 3:
441 | representative_page_digest: str = page_similarity_digests[0]
442 | logger.info(
443 | '\t\tNo comparison could be done because there were less than 3 page digests, so the first digest was chosen')
444 | else:
445 | representative_page_digest = choose_representative_page_digest(page_similarity_digests, logger)
446 | if representative_page_digest == 'INVALID_DIGEST':
447 | # If a representative digest cannot be chosen through a comparison, then the first one is chosen
448 | representative_page_digest = page_similarity_digests[0]
449 | logger.info(
450 | '\t\tNo comparison could be done because not all TLSH digests were valid, so the first digest was chosen')
451 |
452 | representative_page: Page = find_page_with_certain_digest(mixture[virtual_address],
453 | representative_page_digest)
454 | calculate_page_digests([representative_page], module_base_address,
455 | False) # Calculate the SHA-256 digest of the representative page
456 | insert_page_into_mixed_module(representative_page, module_base_address, mixed_module,
457 | mixed_module_pages_metadata, False)
458 |
459 | # Statistics about the information extracted
460 | bytes_retrieved: int = 0
461 | shared_bytes_retrieved: int = 0
462 | private_bytes_retrieved: int = 0
463 | for page_metadata_entry in mixed_module_pages_metadata:
464 | page_size: int = page_metadata_entry['size']
465 | bytes_retrieved += page_size
466 | if page_metadata_entry['is_shared']:
467 | shared_bytes_retrieved += page_size
468 | else:
469 | private_bytes_retrieved += page_size
470 |
471 | logger.info('\nInformation about the extracted module:')
472 | logger.info(f'\tModule size: {module_size} bytes')
473 | logger.info(
474 | f'\tTotal bytes retrieved: {bytes_retrieved}. As a result the {bytes_retrieved / module_size:.2%} of the module was retrieved. The pages that were not retrieved are filled with zeros.')
475 | logger.info('\tOf the bytes retrieved:')
476 | logger.info(
477 | f'\t\t{shared_bytes_retrieved / bytes_retrieved:.2%} were shared ({shared_bytes_retrieved} shared bytes in total)')
478 | logger.info(
479 | f'\t\t{private_bytes_retrieved / bytes_retrieved:.2%} were private ({private_bytes_retrieved} private bytes in total)')
480 |
481 | # Join all the metadata about the mixed module
482 | mixed_module_metadata: Dict[str, Any] = {'module_path': module_path.casefold(),
483 | 'module_base_address': hex(module_base_address),
484 | 'module_size': module_size,
485 | 'general_statistics': {'bytes_retrieved': bytes_retrieved,
486 | 'shared_bytes_retrieved': shared_bytes_retrieved,
487 | 'private_bytes_retrieved': private_bytes_retrieved},
488 | 'pages': mixed_module_pages_metadata}
489 |
490 | # Statistics regarding a Modex extraction
491 | if is_modex_calling:
492 | mixed_modules_info: List[Dict[str, Any]] = []
493 | process_ids_where_module_is_mapped: List[int] = []
494 | number_of_pages_mapped_in_each_process: Dict[int, Any] = {} # The keys are process IDs
495 |
496 | for module in modules:
497 | mixed_modules_info.append(module.get_information_for_metadata_file())
498 | process_ids_where_module_is_mapped.append(module.process_id)
499 | number_of_shared_pages: int = 0
500 | number_of_private_pages: int = 0
501 | for page in module.pages:
502 | if page.is_shared():
503 | number_of_shared_pages += 1
504 | else:
505 | number_of_private_pages += 1
506 | number_of_pages_mapped_in_each_process[module.process_id] = {
507 | 'number_of_shared_pages': number_of_shared_pages,
508 | 'number_of_private_pages': number_of_private_pages}
509 |
510 | end_time = time.time()
511 | mixed_module_metadata['mixed_modules'] = mixed_modules_info
512 | mixed_module_metadata['memory_dump_location'] = memory_dump_location
513 | mixed_module_metadata['modex_statistics'] = {
514 | 'process_ids_where_module_is_mapped': process_ids_where_module_is_mapped,
515 | 'number_of_pages_mapped_in_each_process': number_of_pages_mapped_in_each_process,
516 | 'execution_time_in_seconds': end_time - start_time}
517 |
518 | mixed_module_path: str = os.path.join(output_directory, mixed_module_filename)
519 | mixed_module_metadata_path: str = os.path.join(output_directory, mixed_module_metadata_filename)
520 |
521 | with open(mixed_module_path, mode='wb') as dumped_mixed_module:
522 | dumped_mixed_module.write(mixed_module)
523 |
524 | with open(mixed_module_metadata_path, 'w') as mixed_module_metadata_file:
525 | json.dump(mixed_module_metadata, mixed_module_metadata_file, ensure_ascii=False, indent=4)
526 |
527 | files_generated.append(mixed_module_path)
528 | files_generated.append(mixed_module_metadata_path)
529 |
530 | return files_generated
531 |
532 |
533 | class Modex(interfaces.plugins.PluginInterface):
534 | """Extracts a module as complete as possible."""
535 | _required_framework_version = (2, 0, 0)
536 | _version = (0, 1, 0)
537 |
538 | @classmethod
539 | def get_requirements(cls) -> List[interfaces.configuration.RequirementInterface]:
540 | return [
541 | requirements.TranslationLayerRequirement(name='primary',
542 | description='Memory layer for the kernel',
543 | architectures=["Intel32", "Intel64"]),
544 | requirements.SymbolTableRequirement(name="nt_symbols",
545 | description="Windows kernel symbols"),
546 | requirements.PluginRequirement(name='pslist',
547 | plugin=pslist.PsList,
548 | version=(2, 0, 0)),
549 | requirements.PluginRequirement(name='dlllist',
550 | plugin=dlllist.DllList,
551 | version=(2, 0, 0)),
552 | requirements.PluginRequirement(name='simple_pteenum',
553 | plugin=simple_pteenum.SimplePteEnumerator,
554 | version=(0, 9, 0)),
555 | requirements.StringRequirement(name="module",
556 | description="Module name",
557 | optional=False),
558 | requirements.BooleanRequirement(name='dump_anomalies',
559 | description="When there are different shared pages at the same offset, dump those pages",
560 | default=False,
561 | optional=True),
562 | requirements.BooleanRequirement(name='detect',
563 | description="Detect the presence of the DLL hijacking technique",
564 | default=False,
565 | optional=True)
566 | ]
567 |
568 | def run(self):
569 | start_time = time.time()
570 | memory_dump_location = self.context.config['automagic.LayerStacker.single_location']
571 | output_directory: str = f'modex_output_{get_current_utc_timestamp()}' # Directory where the Modex output will be placed
572 | os.makedirs(output_directory)
573 |
574 | log_file_path = os.path.join(output_directory, 'modex_log.txt')
575 | logger = create_logger(log_file_path, 'modex_logger', logging.INFO)
576 |
577 | module_supplied: str = self.config['module'].casefold()
578 | dump_anomalies: bool = self.config['dump_anomalies']
579 | is_detect_option_supplied: bool = self.config['detect']
580 | modules_to_mix: List[Module] = []
581 | files_finally_generated: List[str] = [log_file_path]
582 |
583 | # For each process, find if the supplied module is mapped on it. If so, dump the module.
584 | processes = pslist.PsList.list_processes(self.context,
585 | self.config['primary'],
586 | self.config['nt_symbols'])
587 |
588 | pe_table_name = intermed.IntermediateSymbolTable.create(self.context,
589 | self.config_path,
590 | 'windows',
591 | 'pe',
592 | class_types=pe.class_types)
593 |
594 | for process in processes:
595 | process_id = process.UniqueProcessId
596 | process_layer_name = process.add_process_layer()
597 | for entry in process.load_order_modules():
598 | try:
599 | module_name = entry.BaseDllName.get_string()
600 | module_path = entry.FullDllName.get_string()
601 | if module_name.casefold() == module_supplied:
602 | try:
603 | module_base_address = format_hints.Hex(entry.DllBase)
604 | except exceptions.InvalidAddressException:
605 | module_base_address = None
606 |
607 | try:
608 | module_size = format_hints.Hex(entry.SizeOfImage)
609 | except exceptions.InvalidAddressException:
610 | module_size = None
611 |
612 | if module_base_address is not None and module_size is not None:
613 | if is_detect_option_supplied: # In this case, there is no need to dump the modules
614 | modules_to_mix.append(
615 | Module(module_name, module_path, module_base_address, module_size, process_id, '',
616 | []))
617 | else:
618 | file_handle = dlllist.DllList.dump_pe(self.context,
619 | pe_table_name,
620 | entry,
621 | self.open,
622 | process_layer_name,
623 | prefix=f'pid.{process_id}.')
624 | if file_handle:
625 | file_handle.close()
626 | dumped_module_filename = file_handle.preferred_filename
627 | modules_to_mix.append(
628 | Module(module_name, module_path, module_base_address, module_size,
629 | process_id, dumped_module_filename, []))
630 | except exceptions.InvalidAddressException:
631 | pass
632 |
633 | if not modules_to_mix:
634 | logger.info('The module supplied is not mapped in any process')
635 | return renderers.TreeGrid([("Filename", str)], self._generator(files_finally_generated))
636 |
637 | logger.info(f'Mapped modules (before validation) ({len(modules_to_mix)}):')
638 | for module_to_mix in modules_to_mix:
639 | logger.info(f'\t{module_to_mix.get_basic_information()}')
640 |
641 | # The modules under C:\Windows\SysWOW64 are not taken into account
642 | modules_to_mix = delete_modules_under_syswow64(modules_to_mix, logger)
643 |
644 | if not modules_to_mix:
645 | logger.info(
646 | '\nAll the identified modules are under the C:\\Windows\\SysWOW64 directory, as a result, the execution cannot proceed')
647 | return renderers.TreeGrid([("Filename", str)], self._generator(files_finally_generated))
648 |
649 | if is_detect_option_supplied:
650 | files_finally_generated += detect_dll_hijacking_inside_one_memory_dump(modules_to_mix, output_directory,
651 | memory_dump_location, logger)
652 | return renderers.TreeGrid([("Filename", str)], self._generator(files_finally_generated))
653 |
654 | # Make sure that the modules can be mixed
655 | can_modules_be_mixed: bool = check_if_modules_can_be_mixed(modules_to_mix, logger)
656 | if not can_modules_be_mixed:
657 | delete_dmp_files(modules_to_mix)
658 | delete_zero_bytes_dmp_files()
659 | return renderers.TreeGrid([("Filename", str)], self._generator(files_finally_generated))
660 |
661 | logger.info(f'\nModules to mix (after validation) ({len(modules_to_mix)}):')
662 | for module_to_mix in modules_to_mix:
663 | logger.info(f'\t{module_to_mix.get_basic_information()}')
664 |
665 | # For each dumped module, retrieve information about its pages
666 | for module_to_mix in modules_to_mix:
667 | pages: List[List[Any]] = self.get_module_pages(module_to_mix)
668 | for page in pages:
669 | relevant_page_details: Dict[str, str] = get_relevant_page_details(page)
670 | page_virtual_address = relevant_page_details['virtual_address']
671 | page_size = relevant_page_details['size']
672 | page_pfn_db_entry_prototype_pte_flag = relevant_page_details['pfn_db_entry_prototype_pte_flag']
673 | if None not in (page_virtual_address, page_size, page_pfn_db_entry_prototype_pte_flag):
674 | module_to_mix.pages.append(
675 | Page(int(page_virtual_address, 16), int(page_size, 16), page_pfn_db_entry_prototype_pte_flag,
676 | module_to_mix.filename))
677 |
678 | # Check if the last page retrieved for each module is out of bounds
679 | for module_to_mix in modules_to_mix:
680 | first_out_of_bounds_address: int = module_to_mix.base_address + module_to_mix.size
681 | if module_to_mix.pages[-1].virtual_address == first_out_of_bounds_address:
682 | del module_to_mix.pages[-1]
683 |
684 | logger.info('\nModules to mix (after validation and alongside the retrieved pages for each one):')
685 | for module_to_mix in modules_to_mix:
686 | logger.info(f'\t{module_to_mix.get_basic_information()}')
687 | for page in module_to_mix.pages:
688 | logger.info(f'\t\t{page.get_basic_information()}')
689 |
690 | mixed_module_filename: str = f'{module_supplied.lower()}.dmp'
691 | mixed_module_metadata_filename: str = f'{module_supplied.lower()}.description.json'
692 |
693 | # Perform the mixture
694 | files_finally_generated += mix_modules(modules_to_mix, output_directory, mixed_module_filename,
695 | mixed_module_metadata_filename, dump_anomalies, logger, True, start_time,
696 | memory_dump_location)
697 |
698 | # Delete the .dmp files that were used to create the final .dmp file
699 | delete_dmp_files(modules_to_mix)
700 |
701 | delete_zero_bytes_dmp_files()
702 |
703 | return renderers.TreeGrid([("Filename", str)], self._generator(files_finally_generated))
704 |
705 | def _generator(self, filenames):
706 | for filename in filenames:
707 | yield 0, [filename]
708 |
709 | def get_module_pages(self, module: Module) -> List[List[Any]]:
710 | pages: List[List[Any]] = []
711 | self.context.config['plugins.Modex.SimplePteEnumerator.pid'] = [module.process_id]
712 | self.context.config['plugins.Modex.SimplePteEnumerator.start'] = module.base_address
713 | self.context.config['plugins.Modex.SimplePteEnumerator.end'] = module.base_address + module.size
714 | self.context.config['plugins.Modex.SimplePteEnumerator.include_image_files'] = True
715 | self.context.config['plugins.Modex.SimplePteEnumerator.check_valid'] = True
716 | self.context.config['plugins.Modex.SimplePteEnumerator.print_pages'] = True
717 |
718 | automagics = automagic.choose_automagic(automagic.available(self._context),
719 | simple_pteenum.SimplePteEnumerator)
720 | simple_pteenum_plugin = plugins.construct_plugin(self.context, automagics,
721 | simple_pteenum.SimplePteEnumerator,
722 | self.config_path, self._progress_callback, self.open)
723 | treegrid = simple_pteenum_plugin.run()
724 |
725 | def visitor(node, _accumulator):
726 | pages.append(node.values)
727 | return None
728 |
729 | treegrid.populate(visitor, None)
730 | return pages
731 |
--------------------------------------------------------------------------------
/tests.py:
--------------------------------------------------------------------------------
1 | import argparse
2 | import logging
3 | import os
4 | import json
5 | import hashlib
6 | import binascii
7 | from typing import Tuple, List, Dict, Any
8 | from datetime import datetime
9 |
10 | from modex import get_page_from_dumped_module
11 |
12 | utc_now = datetime.utcnow()
13 | log_filename: str = f'tests_log_{utc_now.strftime("%d-%m-%Y_%H-%M-%S_UTC")}.txt'
14 | logger = logging.getLogger('tests_logger')
15 |
16 |
17 | class Anomaly:
18 | def __init__(self, page_offset: int, anomaly_number_at_offset: int, page_contents: bytes):
19 | self.page_offset: int = page_offset
20 | self.anomaly_number_at_offset: int = anomaly_number_at_offset # Inside the same offset, there can be 2 or more anomalies
21 | self.page_contents: bytes = page_contents
22 |
23 |
24 | def validate_output(directory: str) -> None:
25 | # A Modex output will only have 1 .dmp file
26 | # An InterModex output will have 2 .dmp files if a derelocation process is performed
27 | # In the case of InterModex, the validation focuses on the module to which a derelocation process has not been applied
28 | module_path = None
29 | metadata_path = None
30 |
31 | for filename in os.listdir(directory):
32 | if filename.endswith('.dmp') and 'after_derelocation' not in filename:
33 | module_path = os.path.join(directory, filename)
34 | elif filename.endswith('.json'):
35 | metadata_path = os.path.join(directory, filename)
36 |
37 | if not module_path:
38 | raise FileNotFoundError('The directory supplied does not contain a .dmp file')
39 |
40 | if not metadata_path:
41 | raise FileNotFoundError('The directory supplied does not contain a .json file')
42 |
43 | is_output_correct: bool = True
44 |
45 | with open(metadata_path) as metadata_file:
46 | metadata: Dict[str, Any] = json.load(metadata_file)
47 |
48 | # Validate that each page listed in the metadata is in the module
49 | logger.info('Check that the extracted module and the information in the metadata match:')
50 | dumped_module_size: int = os.path.getsize(module_path)
51 | module_size_in_metadata: int = metadata['module_size']
52 | if dumped_module_size == module_size_in_metadata:
53 | logger.info('\tThe module size has been correctly validated')
54 | else:
55 | is_output_correct = False
56 | logger.info(
57 | f'\tThe module size has not been correctly validated (it is {dumped_module_size} bytes and should be {module_size_in_metadata} bytes)')
58 | # Each element in the metadata['pages'] list contains information about one page
59 | for page in metadata['pages']:
60 | page_contents: bytes = get_page_from_dumped_module(module_path, page['offset'], page['size'])
61 | page_contents_digest: str = hashlib.sha256(page_contents).hexdigest()
62 | if page_contents_digest == page['sha_256_digest']:
63 | logger.info(f'\tThe page at offset {page["offset"]} has been correctly validated')
64 | else:
65 | is_output_correct = False
66 | logger.info(
67 | f'\tThe page at offset {page["offset"]} has not been correctly validated, its digest should be {page["sha_256_digest"]} but it is {page_contents_digest}')
68 |
69 | # Validate that no pages are overlapping
70 | logger.info(
71 | '\nCheck that there are not overlapping pages (module offset:number of times that offset was written to):')
72 | # Each byte/offset/address of the module should not be written to more than once
73 | times_module_offsets_were_written: List[int] = [0] * os.path.getsize(module_path)
74 | for page in metadata['pages']:
75 | page_offset = page['offset']
76 | page_size = page['size']
77 | for i in range(page_offset, page_offset + page_size):
78 | times_module_offsets_were_written[i] = times_module_offsets_were_written[i] + 1
79 |
80 | for i in range(0, len(times_module_offsets_were_written)):
81 | if times_module_offsets_were_written[i] > 1:
82 | is_output_correct = False
83 | logger.info(
84 | f'\t{i}:{times_module_offsets_were_written[i]} (Each offset of the module should not be written to more than once)')
85 | else:
86 | logger.info(
87 | f'\t{i}:{times_module_offsets_were_written[i]}')
88 |
89 | if is_output_correct:
90 | logger.info('\nThe output has been correctly validated')
91 | print(
92 | f'The output has been correctly validated. More details can be found in the generated log file ({log_filename}).')
93 | else:
94 | logger.info('\nThe output has not passed the validations')
95 | print(
96 | f'The output has not passed the validations. You can see why in the generated log file ({log_filename}).')
97 |
98 |
99 | def create_representation_of_anomaly(anomaly_file_path: str) -> Anomaly:
100 | anomaly_filename: str = os.path.basename(anomaly_file_path)
101 | page_offset: int = int(anomaly_filename.split('_')[-1].split('.')[0])
102 | anomaly_number_at_offset: int = int(anomaly_filename.split('_')[2])
103 | with open(anomaly_file_path, mode='rb') as anomaly:
104 | page_contents = anomaly.read()
105 | return Anomaly(page_offset, anomaly_number_at_offset, page_contents)
106 |
107 |
108 | def investigate_anomalies(directory: str) -> None:
109 | # The anomalies investigated here are cases where shared pages with the same offset have different contents
110 | anomalies_directory_name = 'anomalies'
111 | anomalies_directory_not_found_message: str = f'The directory supplied does not contain a directory named "{anomalies_directory_name}"'
112 | filenames: List[str] = os.listdir(directory)
113 | if anomalies_directory_name not in filenames:
114 | raise FileNotFoundError(anomalies_directory_not_found_message)
115 |
116 | anomalies_directory = os.path.join(directory, anomalies_directory_name)
117 | if not os.path.isdir(anomalies_directory):
118 | raise FileNotFoundError(anomalies_directory_not_found_message)
119 |
120 | filenames_inside_anomalies: List[str] = os.listdir(anomalies_directory)
121 | all_anomalies: List[Anomaly] = []
122 | for filename_inside_anomalies in filenames_inside_anomalies:
123 | all_anomalies.append(
124 | create_representation_of_anomaly(os.path.join(anomalies_directory, filename_inside_anomalies)))
125 |
126 | organized_anomalies: Dict[int, List[Anomaly]] = {} # The keys in this dictionary are the page offsets
127 | for anomaly in all_anomalies:
128 | page_offset: int = anomaly.page_offset
129 | if page_offset not in organized_anomalies.keys():
130 | organized_anomalies[page_offset] = [anomaly]
131 | else:
132 | organized_anomalies[page_offset].append(anomaly)
133 |
134 | logger.info('Results after investigating the anomalies:')
135 | for page_offset in organized_anomalies.keys():
136 | all_anomalies_at_certain_offset: List[Anomaly] = organized_anomalies[page_offset]
137 | logger.info(f'\tThere are {len(all_anomalies_at_certain_offset)} anomalies at offset {page_offset}:')
138 | for anomaly in all_anomalies_at_certain_offset:
139 | logger.info(
140 | f'\t\tSHA-256 digest of the page contents that belong to anomaly {anomaly.anomaly_number_at_offset}: {hashlib.sha256(anomaly.page_contents).hexdigest()}')
141 | for anomaly_i in all_anomalies_at_certain_offset:
142 | for anomaly_j in all_anomalies_at_certain_offset:
143 | if anomaly_i.anomaly_number_at_offset < anomaly_j.anomaly_number_at_offset:
144 | if len(anomaly_i.page_contents) != len(anomaly_j.page_contents):
145 | logger.info(
146 | f'\t\tThe anomaly {anomaly_i.anomaly_number_at_offset} has a size of {len(anomaly_i.page_contents)} bytes and the anomaly {anomaly_j.anomaly_number_at_offset} has a size of {anomaly_j.page_contents} bytes. These sizes should be equal, but they are not.')
147 | else:
148 | anomaly_i_page_contents: bytes = anomaly_i.page_contents
149 | anomaly_j_page_contents: bytes = anomaly_j.page_contents
150 | different_bytes_count: int = 0
151 | current_difference_in_anomaly_i: bytearray = bytearray()
152 | current_difference_in_anomaly_j: bytearray = bytearray()
153 | is_index_in_difference: bool = False
154 | current_difference_start_index: int = 0
155 |
156 | logger.info(
157 | f'\t\tDifferences between anomaly {anomaly_i.anomaly_number_at_offset} and anomaly {anomaly_j.anomaly_number_at_offset} (they have the same size ({len(anomaly_i_page_contents)} bytes)):')
158 |
159 | for z in range(0, len(anomaly_i_page_contents)):
160 | are_bytes_different: bool = anomaly_i_page_contents[z] != anomaly_j_page_contents[z]
161 | if are_bytes_different:
162 | different_bytes_count += 1
163 | if is_index_in_difference:
164 | current_difference_in_anomaly_i.append(anomaly_i_page_contents[z])
165 | current_difference_in_anomaly_j.append(anomaly_j_page_contents[z])
166 | else:
167 | is_index_in_difference = True
168 | current_difference_start_index = z
169 | current_difference_in_anomaly_i.clear()
170 | current_difference_in_anomaly_j.clear()
171 | current_difference_in_anomaly_i.append(anomaly_i_page_contents[z])
172 | current_difference_in_anomaly_j.append(anomaly_j_page_contents[z])
173 | elif not are_bytes_different and is_index_in_difference:
174 | is_index_in_difference = False
175 | logger.info(
176 | f'\t\t\tAt offset {current_difference_start_index}: anomaly {anomaly_i.anomaly_number_at_offset} has 0x{binascii.hexlify(bytes(current_difference_in_anomaly_i)).decode("utf-8")} and anomaly {anomaly_j.anomaly_number_at_offset} has 0x{binascii.hexlify(bytes(current_difference_in_anomaly_j)).decode("utf-8")}')
177 |
178 | logger.info(f'\t\t\tNumber of different bytes: {different_bytes_count}')
179 | print(f'The results after investigating the anomalies are in the generated log file ({log_filename})')
180 |
181 |
182 | def validate_arguments() -> Tuple[str, bool]:
183 | """Parse and validate command line arguments."""
184 | arg_parser = argparse.ArgumentParser(
185 | description='Validate and investigate the output produced by the Modex Volatility 3 plugin or the InterModex tool (only if the --detect option was not supplied)')
186 | arg_parser.version = '0.1.0'
187 | arg_parser.add_argument('directory',
188 | help='Directory generated by Modex or InterModex')
189 | arg_parser.add_argument('-i',
190 | '--only-investigate-anomalies',
191 | action='store_true',
192 | help='Do not validate the output, instead, only investigate the anomalies already found')
193 | arg_parser.add_argument('-l',
194 | '--log-level',
195 | choices=['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'],
196 | default='INFO',
197 | help='logging level')
198 | arg_parser.add_argument('-v',
199 | '--version',
200 | action='version',
201 | help='show the program version and exit')
202 |
203 | args = arg_parser.parse_args()
204 |
205 | if args.log_level == 'DEBUG':
206 | log_level_supplied = logging.DEBUG
207 | elif args.log_level == 'INFO':
208 | log_level_supplied = logging.INFO
209 | elif args.log_level == 'WARNING':
210 | log_level_supplied = logging.WARNING
211 | elif args.log_level == 'ERROR':
212 | log_level_supplied = logging.ERROR
213 | elif args.log_level == 'CRITICAL':
214 | log_level_supplied = logging.CRITICAL
215 | else:
216 | raise ValueError(
217 | f'Log level not supported (you supplied {args.log_level}). These are the ones supported: DEBUG, INFO, WARNING, ERROR, CRITICAL')
218 |
219 | logger.setLevel(log_level_supplied)
220 | file_handler = logging.FileHandler(log_filename)
221 | file_handler.setLevel(log_level_supplied)
222 | logger.addHandler(file_handler)
223 |
224 | directory: str = args.directory
225 | if not os.path.exists(directory):
226 | raise FileNotFoundError(f'The directory supplied ({directory}) does not exist')
227 |
228 | arguments: Tuple[str, bool] = (directory, args.only_investigate_anomalies)
229 | return arguments
230 |
231 |
232 | def execute() -> None:
233 | try:
234 | validated_arguments: Tuple[str, bool] = validate_arguments()
235 | directory: str = validated_arguments[0]
236 | only_investigate_anomalies: bool = validated_arguments[1]
237 | if only_investigate_anomalies:
238 | investigate_anomalies(directory)
239 | else:
240 | validate_output(directory)
241 | except Exception as exception:
242 | logger.exception(exception)
243 | print(f'Error: {exception}')
244 |
245 |
246 | def main():
247 | execute()
248 |
249 |
250 | if __name__ == '__main__':
251 | main()
252 |
--------------------------------------------------------------------------------