├── .github
└── workflows
│ └── fastir.yml
├── .gitignore
├── LICENSE
├── Logo_OWN-Noir.ico
├── README.md
├── examples
├── fastir_artifacts.ini
└── own.yaml
├── fastir
├── __init__.py
├── common
│ ├── __init__.py
│ ├── collector.py
│ ├── commands.py
│ ├── file_info.py
│ ├── filesystem.py
│ ├── helpers.py
│ ├── logging.py
│ ├── output.py
│ ├── path_components.py
│ └── variables.py
├── unix
│ ├── __init__.py
│ └── variables.py
└── windows
│ ├── __init__.py
│ ├── registry.py
│ ├── variables.py
│ └── wmi.py
├── fastir_artifacts.py
├── fastir_artifacts.spec
├── integration_tests
├── conftest.py
├── pytest.ini
└── test_execution_results.py
├── requirements-test.txt
├── requirements.txt
└── tests
├── __init__.py
├── conftest.py
├── data
├── MSVCR71.dll
├── filesystem
│ ├── l1
│ │ ├── l1.txt
│ │ └── l2
│ │ │ ├── l2.txt
│ │ │ └── l3
│ │ │ ├── l3.txt
│ │ │ └── l4
│ │ │ └── l4.txt
│ ├── root.txt
│ ├── root2.txt
│ └── test.txt
└── image.raw
├── test_collector.py
├── test_commands.py
├── test_filesystem_manager.py
├── test_os_filesystem.py
├── test_outputs.py
├── test_tsk_filesystem.py
└── test_variables.py
/.github/workflows/fastir.yml:
--------------------------------------------------------------------------------
1 | name: FastIR
2 |
3 | on:
4 | push:
5 | branches: [ master ]
6 | pull_request:
7 | branches: [ master ]
8 | workflow_dispatch:
9 |
10 | jobs:
11 | release:
12 | runs-on: ubuntu-latest
13 | if: github.ref == 'refs/heads/master' && github.event_name == 'push'
14 | outputs:
15 | tag: ${{ steps.release-creation.outputs.automatic_releases_tag }}
16 | steps:
17 | - name: Get current time
18 | uses: 1466587594/get-current-time@v2.0.2
19 | id: current-time
20 | with:
21 | format: YYYYMMDD-HHmm
22 | - name: Create release
23 | uses: "marvinpinto/action-automatic-releases@latest"
24 | id: release-creation
25 | with:
26 | repo_token: "${{ secrets.GITHUB_TOKEN }}"
27 | automatic_release_tag: "latest"
28 | title: "Release ${{ steps.current-time.outputs.formattedTime }}-${{ github.sha }}"
29 | prerelease: false
30 | draft: false
31 | build:
32 | runs-on: ${{ matrix.os }}
33 | needs: release
34 | strategy:
35 | matrix:
36 | os: [windows-2019]
37 | python-version: ['3.10']
38 | arch: [x86, x64]
39 | include:
40 | - os: ubuntu-latest
41 | arch: x64
42 | python-version: '3.10'
43 | - os: macos-latest
44 | arch: x64
45 | python-version: '3.10'
46 | steps:
47 | - name: Use FastIR repository
48 | uses: actions/checkout@v3
49 | with:
50 | fetch-depth: 0
51 | - name: Install python
52 | uses: actions/setup-python@v4
53 | with:
54 | python-version: ${{ matrix.python-version }}
55 | architecture: ${{ matrix.arch }}
56 | - name: Install windows compiler
57 | run: |
58 | choco install mingw make oraclejdk
59 | if: runner.os == 'windows'
60 | - name: Install Dependencies
61 | run: |
62 | python -m pip install --use-pep517 -U -r requirements.txt
63 | python -m pip install --use-pep517 -U -r requirements-test.txt
64 | timeout-minutes: 60
65 | - name: run tests & build
66 | run: |
67 | python -m pytest --cov-report xml --cov fastir tests
68 | pyinstaller fastir_artifacts.spec
69 | - name: run tests on build
70 | run: |
71 | python -m pytest integration_tests
72 | python -m zipfile -c FastIR-Artifacts-${{ runner.os }}-${{ matrix.arch }}.zip dist/fastir_artifacts
73 | - name: Upload asset
74 | uses: AButler/upload-release-assets@v2.0
75 | if: github.ref == 'refs/heads/master' && github.event_name == 'push'
76 | with:
77 | files: FastIR-Artifacts-${{ runner.os }}-${{ matrix.arch }}.zip
78 | repo-token: ${{ secrets.GITHUB_TOKEN }}
79 | release-tag: ${{ needs.release.outputs.tag }}
80 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | pip-wheel-metadata/
24 | share/python-wheels/
25 | *.egg-info/
26 | .installed.cfg
27 | *.egg
28 | MANIFEST
29 | venv/
30 |
31 | # mypy
32 | .mypy_cache/
33 |
34 | # pytest
35 | .pytest_cache/
36 | .coverage
37 | coverage.xml
38 |
39 | # editors
40 | .vscode/
41 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | GNU GENERAL PUBLIC LICENSE
2 | Version 3, 29 June 2007
3 |
4 | Copyright (C) 2007 Free Software Foundation, Inc.
5 | Everyone is permitted to copy and distribute verbatim copies
6 | of this license document, but changing it is not allowed.
7 |
8 | Preamble
9 |
10 | The GNU General Public License is a free, copyleft license for
11 | software and other kinds of works.
12 |
13 | The licenses for most software and other practical works are designed
14 | to take away your freedom to share and change the works. By contrast,
15 | the GNU General Public License is intended to guarantee your freedom to
16 | share and change all versions of a program--to make sure it remains free
17 | software for all its users. We, the Free Software Foundation, use the
18 | GNU General Public License for most of our software; it applies also to
19 | any other work released this way by its authors. You can apply it to
20 | your programs, too.
21 |
22 | When we speak of free software, we are referring to freedom, not
23 | price. Our General Public Licenses are designed to make sure that you
24 | have the freedom to distribute copies of free software (and charge for
25 | them if you wish), that you receive source code or can get it if you
26 | want it, that you can change the software or use pieces of it in new
27 | free programs, and that you know you can do these things.
28 |
29 | To protect your rights, we need to prevent others from denying you
30 | these rights or asking you to surrender the rights. Therefore, you have
31 | certain responsibilities if you distribute copies of the software, or if
32 | you modify it: responsibilities to respect the freedom of others.
33 |
34 | For example, if you distribute copies of such a program, whether
35 | gratis or for a fee, you must pass on to the recipients the same
36 | freedoms that you received. You must make sure that they, too, receive
37 | or can get the source code. And you must show them these terms so they
38 | know their rights.
39 |
40 | Developers that use the GNU GPL protect your rights with two steps:
41 | (1) assert copyright on the software, and (2) offer you this License
42 | giving you legal permission to copy, distribute and/or modify it.
43 |
44 | For the developers' and authors' protection, the GPL clearly explains
45 | that there is no warranty for this free software. For both users' and
46 | authors' sake, the GPL requires that modified versions be marked as
47 | changed, so that their problems will not be attributed erroneously to
48 | authors of previous versions.
49 |
50 | Some devices are designed to deny users access to install or run
51 | modified versions of the software inside them, although the manufacturer
52 | can do so. This is fundamentally incompatible with the aim of
53 | protecting users' freedom to change the software. The systematic
54 | pattern of such abuse occurs in the area of products for individuals to
55 | use, which is precisely where it is most unacceptable. Therefore, we
56 | have designed this version of the GPL to prohibit the practice for those
57 | products. If such problems arise substantially in other domains, we
58 | stand ready to extend this provision to those domains in future versions
59 | of the GPL, as needed to protect the freedom of users.
60 |
61 | Finally, every program is threatened constantly by software patents.
62 | States should not allow patents to restrict development and use of
63 | software on general-purpose computers, but in those that do, we wish to
64 | avoid the special danger that patents applied to a free program could
65 | make it effectively proprietary. To prevent this, the GPL assures that
66 | patents cannot be used to render the program non-free.
67 |
68 | The precise terms and conditions for copying, distribution and
69 | modification follow.
70 |
71 | TERMS AND CONDITIONS
72 |
73 | 0. Definitions.
74 |
75 | "This License" refers to version 3 of the GNU General Public License.
76 |
77 | "Copyright" also means copyright-like laws that apply to other kinds of
78 | works, such as semiconductor masks.
79 |
80 | "The Program" refers to any copyrightable work licensed under this
81 | License. Each licensee is addressed as "you". "Licensees" and
82 | "recipients" may be individuals or organizations.
83 |
84 | To "modify" a work means to copy from or adapt all or part of the work
85 | in a fashion requiring copyright permission, other than the making of an
86 | exact copy. The resulting work is called a "modified version" of the
87 | earlier work or a work "based on" the earlier work.
88 |
89 | A "covered work" means either the unmodified Program or a work based
90 | on the Program.
91 |
92 | To "propagate" a work means to do anything with it that, without
93 | permission, would make you directly or secondarily liable for
94 | infringement under applicable copyright law, except executing it on a
95 | computer or modifying a private copy. Propagation includes copying,
96 | distribution (with or without modification), making available to the
97 | public, and in some countries other activities as well.
98 |
99 | To "convey" a work means any kind of propagation that enables other
100 | parties to make or receive copies. Mere interaction with a user through
101 | a computer network, with no transfer of a copy, is not conveying.
102 |
103 | An interactive user interface displays "Appropriate Legal Notices"
104 | to the extent that it includes a convenient and prominently visible
105 | feature that (1) displays an appropriate copyright notice, and (2)
106 | tells the user that there is no warranty for the work (except to the
107 | extent that warranties are provided), that licensees may convey the
108 | work under this License, and how to view a copy of this License. If
109 | the interface presents a list of user commands or options, such as a
110 | menu, a prominent item in the list meets this criterion.
111 |
112 | 1. Source Code.
113 |
114 | The "source code" for a work means the preferred form of the work
115 | for making modifications to it. "Object code" means any non-source
116 | form of a work.
117 |
118 | A "Standard Interface" means an interface that either is an official
119 | standard defined by a recognized standards body, or, in the case of
120 | interfaces specified for a particular programming language, one that
121 | is widely used among developers working in that language.
122 |
123 | The "System Libraries" of an executable work include anything, other
124 | than the work as a whole, that (a) is included in the normal form of
125 | packaging a Major Component, but which is not part of that Major
126 | Component, and (b) serves only to enable use of the work with that
127 | Major Component, or to implement a Standard Interface for which an
128 | implementation is available to the public in source code form. A
129 | "Major Component", in this context, means a major essential component
130 | (kernel, window system, and so on) of the specific operating system
131 | (if any) on which the executable work runs, or a compiler used to
132 | produce the work, or an object code interpreter used to run it.
133 |
134 | The "Corresponding Source" for a work in object code form means all
135 | the source code needed to generate, install, and (for an executable
136 | work) run the object code and to modify the work, including scripts to
137 | control those activities. However, it does not include the work's
138 | System Libraries, or general-purpose tools or generally available free
139 | programs which are used unmodified in performing those activities but
140 | which are not part of the work. For example, Corresponding Source
141 | includes interface definition files associated with source files for
142 | the work, and the source code for shared libraries and dynamically
143 | linked subprograms that the work is specifically designed to require,
144 | such as by intimate data communication or control flow between those
145 | subprograms and other parts of the work.
146 |
147 | The Corresponding Source need not include anything that users
148 | can regenerate automatically from other parts of the Corresponding
149 | Source.
150 |
151 | The Corresponding Source for a work in source code form is that
152 | same work.
153 |
154 | 2. Basic Permissions.
155 |
156 | All rights granted under this License are granted for the term of
157 | copyright on the Program, and are irrevocable provided the stated
158 | conditions are met. This License explicitly affirms your unlimited
159 | permission to run the unmodified Program. The output from running a
160 | covered work is covered by this License only if the output, given its
161 | content, constitutes a covered work. This License acknowledges your
162 | rights of fair use or other equivalent, as provided by copyright law.
163 |
164 | You may make, run and propagate covered works that you do not
165 | convey, without conditions so long as your license otherwise remains
166 | in force. You may convey covered works to others for the sole purpose
167 | of having them make modifications exclusively for you, or provide you
168 | with facilities for running those works, provided that you comply with
169 | the terms of this License in conveying all material for which you do
170 | not control copyright. Those thus making or running the covered works
171 | for you must do so exclusively on your behalf, under your direction
172 | and control, on terms that prohibit them from making any copies of
173 | your copyrighted material outside their relationship with you.
174 |
175 | Conveying under any other circumstances is permitted solely under
176 | the conditions stated below. Sublicensing is not allowed; section 10
177 | makes it unnecessary.
178 |
179 | 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
180 |
181 | No covered work shall be deemed part of an effective technological
182 | measure under any applicable law fulfilling obligations under article
183 | 11 of the WIPO copyright treaty adopted on 20 December 1996, or
184 | similar laws prohibiting or restricting circumvention of such
185 | measures.
186 |
187 | When you convey a covered work, you waive any legal power to forbid
188 | circumvention of technological measures to the extent such circumvention
189 | is effected by exercising rights under this License with respect to
190 | the covered work, and you disclaim any intention to limit operation or
191 | modification of the work as a means of enforcing, against the work's
192 | users, your or third parties' legal rights to forbid circumvention of
193 | technological measures.
194 |
195 | 4. Conveying Verbatim Copies.
196 |
197 | You may convey verbatim copies of the Program's source code as you
198 | receive it, in any medium, provided that you conspicuously and
199 | appropriately publish on each copy an appropriate copyright notice;
200 | keep intact all notices stating that this License and any
201 | non-permissive terms added in accord with section 7 apply to the code;
202 | keep intact all notices of the absence of any warranty; and give all
203 | recipients a copy of this License along with the Program.
204 |
205 | You may charge any price or no price for each copy that you convey,
206 | and you may offer support or warranty protection for a fee.
207 |
208 | 5. Conveying Modified Source Versions.
209 |
210 | You may convey a work based on the Program, or the modifications to
211 | produce it from the Program, in the form of source code under the
212 | terms of section 4, provided that you also meet all of these conditions:
213 |
214 | a) The work must carry prominent notices stating that you modified
215 | it, and giving a relevant date.
216 |
217 | b) The work must carry prominent notices stating that it is
218 | released under this License and any conditions added under section
219 | 7. This requirement modifies the requirement in section 4 to
220 | "keep intact all notices".
221 |
222 | c) You must license the entire work, as a whole, under this
223 | License to anyone who comes into possession of a copy. This
224 | License will therefore apply, along with any applicable section 7
225 | additional terms, to the whole of the work, and all its parts,
226 | regardless of how they are packaged. This License gives no
227 | permission to license the work in any other way, but it does not
228 | invalidate such permission if you have separately received it.
229 |
230 | d) If the work has interactive user interfaces, each must display
231 | Appropriate Legal Notices; however, if the Program has interactive
232 | interfaces that do not display Appropriate Legal Notices, your
233 | work need not make them do so.
234 |
235 | A compilation of a covered work with other separate and independent
236 | works, which are not by their nature extensions of the covered work,
237 | and which are not combined with it such as to form a larger program,
238 | in or on a volume of a storage or distribution medium, is called an
239 | "aggregate" if the compilation and its resulting copyright are not
240 | used to limit the access or legal rights of the compilation's users
241 | beyond what the individual works permit. Inclusion of a covered work
242 | in an aggregate does not cause this License to apply to the other
243 | parts of the aggregate.
244 |
245 | 6. Conveying Non-Source Forms.
246 |
247 | You may convey a covered work in object code form under the terms
248 | of sections 4 and 5, provided that you also convey the
249 | machine-readable Corresponding Source under the terms of this License,
250 | in one of these ways:
251 |
252 | a) Convey the object code in, or embodied in, a physical product
253 | (including a physical distribution medium), accompanied by the
254 | Corresponding Source fixed on a durable physical medium
255 | customarily used for software interchange.
256 |
257 | b) Convey the object code in, or embodied in, a physical product
258 | (including a physical distribution medium), accompanied by a
259 | written offer, valid for at least three years and valid for as
260 | long as you offer spare parts or customer support for that product
261 | model, to give anyone who possesses the object code either (1) a
262 | copy of the Corresponding Source for all the software in the
263 | product that is covered by this License, on a durable physical
264 | medium customarily used for software interchange, for a price no
265 | more than your reasonable cost of physically performing this
266 | conveying of source, or (2) access to copy the
267 | Corresponding Source from a network server at no charge.
268 |
269 | c) Convey individual copies of the object code with a copy of the
270 | written offer to provide the Corresponding Source. This
271 | alternative is allowed only occasionally and noncommercially, and
272 | only if you received the object code with such an offer, in accord
273 | with subsection 6b.
274 |
275 | d) Convey the object code by offering access from a designated
276 | place (gratis or for a charge), and offer equivalent access to the
277 | Corresponding Source in the same way through the same place at no
278 | further charge. You need not require recipients to copy the
279 | Corresponding Source along with the object code. If the place to
280 | copy the object code is a network server, the Corresponding Source
281 | may be on a different server (operated by you or a third party)
282 | that supports equivalent copying facilities, provided you maintain
283 | clear directions next to the object code saying where to find the
284 | Corresponding Source. Regardless of what server hosts the
285 | Corresponding Source, you remain obligated to ensure that it is
286 | available for as long as needed to satisfy these requirements.
287 |
288 | e) Convey the object code using peer-to-peer transmission, provided
289 | you inform other peers where the object code and Corresponding
290 | Source of the work are being offered to the general public at no
291 | charge under subsection 6d.
292 |
293 | A separable portion of the object code, whose source code is excluded
294 | from the Corresponding Source as a System Library, need not be
295 | included in conveying the object code work.
296 |
297 | A "User Product" is either (1) a "consumer product", which means any
298 | tangible personal property which is normally used for personal, family,
299 | or household purposes, or (2) anything designed or sold for incorporation
300 | into a dwelling. In determining whether a product is a consumer product,
301 | doubtful cases shall be resolved in favor of coverage. For a particular
302 | product received by a particular user, "normally used" refers to a
303 | typical or common use of that class of product, regardless of the status
304 | of the particular user or of the way in which the particular user
305 | actually uses, or expects or is expected to use, the product. A product
306 | is a consumer product regardless of whether the product has substantial
307 | commercial, industrial or non-consumer uses, unless such uses represent
308 | the only significant mode of use of the product.
309 |
310 | "Installation Information" for a User Product means any methods,
311 | procedures, authorization keys, or other information required to install
312 | and execute modified versions of a covered work in that User Product from
313 | a modified version of its Corresponding Source. The information must
314 | suffice to ensure that the continued functioning of the modified object
315 | code is in no case prevented or interfered with solely because
316 | modification has been made.
317 |
318 | If you convey an object code work under this section in, or with, or
319 | specifically for use in, a User Product, and the conveying occurs as
320 | part of a transaction in which the right of possession and use of the
321 | User Product is transferred to the recipient in perpetuity or for a
322 | fixed term (regardless of how the transaction is characterized), the
323 | Corresponding Source conveyed under this section must be accompanied
324 | by the Installation Information. But this requirement does not apply
325 | if neither you nor any third party retains the ability to install
326 | modified object code on the User Product (for example, the work has
327 | been installed in ROM).
328 |
329 | The requirement to provide Installation Information does not include a
330 | requirement to continue to provide support service, warranty, or updates
331 | for a work that has been modified or installed by the recipient, or for
332 | the User Product in which it has been modified or installed. Access to a
333 | network may be denied when the modification itself materially and
334 | adversely affects the operation of the network or violates the rules and
335 | protocols for communication across the network.
336 |
337 | Corresponding Source conveyed, and Installation Information provided,
338 | in accord with this section must be in a format that is publicly
339 | documented (and with an implementation available to the public in
340 | source code form), and must require no special password or key for
341 | unpacking, reading or copying.
342 |
343 | 7. Additional Terms.
344 |
345 | "Additional permissions" are terms that supplement the terms of this
346 | License by making exceptions from one or more of its conditions.
347 | Additional permissions that are applicable to the entire Program shall
348 | be treated as though they were included in this License, to the extent
349 | that they are valid under applicable law. If additional permissions
350 | apply only to part of the Program, that part may be used separately
351 | under those permissions, but the entire Program remains governed by
352 | this License without regard to the additional permissions.
353 |
354 | When you convey a copy of a covered work, you may at your option
355 | remove any additional permissions from that copy, or from any part of
356 | it. (Additional permissions may be written to require their own
357 | removal in certain cases when you modify the work.) You may place
358 | additional permissions on material, added by you to a covered work,
359 | for which you have or can give appropriate copyright permission.
360 |
361 | Notwithstanding any other provision of this License, for material you
362 | add to a covered work, you may (if authorized by the copyright holders of
363 | that material) supplement the terms of this License with terms:
364 |
365 | a) Disclaiming warranty or limiting liability differently from the
366 | terms of sections 15 and 16 of this License; or
367 |
368 | b) Requiring preservation of specified reasonable legal notices or
369 | author attributions in that material or in the Appropriate Legal
370 | Notices displayed by works containing it; or
371 |
372 | c) Prohibiting misrepresentation of the origin of that material, or
373 | requiring that modified versions of such material be marked in
374 | reasonable ways as different from the original version; or
375 |
376 | d) Limiting the use for publicity purposes of names of licensors or
377 | authors of the material; or
378 |
379 | e) Declining to grant rights under trademark law for use of some
380 | trade names, trademarks, or service marks; or
381 |
382 | f) Requiring indemnification of licensors and authors of that
383 | material by anyone who conveys the material (or modified versions of
384 | it) with contractual assumptions of liability to the recipient, for
385 | any liability that these contractual assumptions directly impose on
386 | those licensors and authors.
387 |
388 | All other non-permissive additional terms are considered "further
389 | restrictions" within the meaning of section 10. If the Program as you
390 | received it, or any part of it, contains a notice stating that it is
391 | governed by this License along with a term that is a further
392 | restriction, you may remove that term. If a license document contains
393 | a further restriction but permits relicensing or conveying under this
394 | License, you may add to a covered work material governed by the terms
395 | of that license document, provided that the further restriction does
396 | not survive such relicensing or conveying.
397 |
398 | If you add terms to a covered work in accord with this section, you
399 | must place, in the relevant source files, a statement of the
400 | additional terms that apply to those files, or a notice indicating
401 | where to find the applicable terms.
402 |
403 | Additional terms, permissive or non-permissive, may be stated in the
404 | form of a separately written license, or stated as exceptions;
405 | the above requirements apply either way.
406 |
407 | 8. Termination.
408 |
409 | You may not propagate or modify a covered work except as expressly
410 | provided under this License. Any attempt otherwise to propagate or
411 | modify it is void, and will automatically terminate your rights under
412 | this License (including any patent licenses granted under the third
413 | paragraph of section 11).
414 |
415 | However, if you cease all violation of this License, then your
416 | license from a particular copyright holder is reinstated (a)
417 | provisionally, unless and until the copyright holder explicitly and
418 | finally terminates your license, and (b) permanently, if the copyright
419 | holder fails to notify you of the violation by some reasonable means
420 | prior to 60 days after the cessation.
421 |
422 | Moreover, your license from a particular copyright holder is
423 | reinstated permanently if the copyright holder notifies you of the
424 | violation by some reasonable means, this is the first time you have
425 | received notice of violation of this License (for any work) from that
426 | copyright holder, and you cure the violation prior to 30 days after
427 | your receipt of the notice.
428 |
429 | Termination of your rights under this section does not terminate the
430 | licenses of parties who have received copies or rights from you under
431 | this License. If your rights have been terminated and not permanently
432 | reinstated, you do not qualify to receive new licenses for the same
433 | material under section 10.
434 |
435 | 9. Acceptance Not Required for Having Copies.
436 |
437 | You are not required to accept this License in order to receive or
438 | run a copy of the Program. Ancillary propagation of a covered work
439 | occurring solely as a consequence of using peer-to-peer transmission
440 | to receive a copy likewise does not require acceptance. However,
441 | nothing other than this License grants you permission to propagate or
442 | modify any covered work. These actions infringe copyright if you do
443 | not accept this License. Therefore, by modifying or propagating a
444 | covered work, you indicate your acceptance of this License to do so.
445 |
446 | 10. Automatic Licensing of Downstream Recipients.
447 |
448 | Each time you convey a covered work, the recipient automatically
449 | receives a license from the original licensors, to run, modify and
450 | propagate that work, subject to this License. You are not responsible
451 | for enforcing compliance by third parties with this License.
452 |
453 | An "entity transaction" is a transaction transferring control of an
454 | organization, or substantially all assets of one, or subdividing an
455 | organization, or merging organizations. If propagation of a covered
456 | work results from an entity transaction, each party to that
457 | transaction who receives a copy of the work also receives whatever
458 | licenses to the work the party's predecessor in interest had or could
459 | give under the previous paragraph, plus a right to possession of the
460 | Corresponding Source of the work from the predecessor in interest, if
461 | the predecessor has it or can get it with reasonable efforts.
462 |
463 | You may not impose any further restrictions on the exercise of the
464 | rights granted or affirmed under this License. For example, you may
465 | not impose a license fee, royalty, or other charge for exercise of
466 | rights granted under this License, and you may not initiate litigation
467 | (including a cross-claim or counterclaim in a lawsuit) alleging that
468 | any patent claim is infringed by making, using, selling, offering for
469 | sale, or importing the Program or any portion of it.
470 |
471 | 11. Patents.
472 |
473 | A "contributor" is a copyright holder who authorizes use under this
474 | License of the Program or a work on which the Program is based. The
475 | work thus licensed is called the contributor's "contributor version".
476 |
477 | A contributor's "essential patent claims" are all patent claims
478 | owned or controlled by the contributor, whether already acquired or
479 | hereafter acquired, that would be infringed by some manner, permitted
480 | by this License, of making, using, or selling its contributor version,
481 | but do not include claims that would be infringed only as a
482 | consequence of further modification of the contributor version. For
483 | purposes of this definition, "control" includes the right to grant
484 | patent sublicenses in a manner consistent with the requirements of
485 | this License.
486 |
487 | Each contributor grants you a non-exclusive, worldwide, royalty-free
488 | patent license under the contributor's essential patent claims, to
489 | make, use, sell, offer for sale, import and otherwise run, modify and
490 | propagate the contents of its contributor version.
491 |
492 | In the following three paragraphs, a "patent license" is any express
493 | agreement or commitment, however denominated, not to enforce a patent
494 | (such as an express permission to practice a patent or covenant not to
495 | sue for patent infringement). To "grant" such a patent license to a
496 | party means to make such an agreement or commitment not to enforce a
497 | patent against the party.
498 |
499 | If you convey a covered work, knowingly relying on a patent license,
500 | and the Corresponding Source of the work is not available for anyone
501 | to copy, free of charge and under the terms of this License, through a
502 | publicly available network server or other readily accessible means,
503 | then you must either (1) cause the Corresponding Source to be so
504 | available, or (2) arrange to deprive yourself of the benefit of the
505 | patent license for this particular work, or (3) arrange, in a manner
506 | consistent with the requirements of this License, to extend the patent
507 | license to downstream recipients. "Knowingly relying" means you have
508 | actual knowledge that, but for the patent license, your conveying the
509 | covered work in a country, or your recipient's use of the covered work
510 | in a country, would infringe one or more identifiable patents in that
511 | country that you have reason to believe are valid.
512 |
513 | If, pursuant to or in connection with a single transaction or
514 | arrangement, you convey, or propagate by procuring conveyance of, a
515 | covered work, and grant a patent license to some of the parties
516 | receiving the covered work authorizing them to use, propagate, modify
517 | or convey a specific copy of the covered work, then the patent license
518 | you grant is automatically extended to all recipients of the covered
519 | work and works based on it.
520 |
521 | A patent license is "discriminatory" if it does not include within
522 | the scope of its coverage, prohibits the exercise of, or is
523 | conditioned on the non-exercise of one or more of the rights that are
524 | specifically granted under this License. You may not convey a covered
525 | work if you are a party to an arrangement with a third party that is
526 | in the business of distributing software, under which you make payment
527 | to the third party based on the extent of your activity of conveying
528 | the work, and under which the third party grants, to any of the
529 | parties who would receive the covered work from you, a discriminatory
530 | patent license (a) in connection with copies of the covered work
531 | conveyed by you (or copies made from those copies), or (b) primarily
532 | for and in connection with specific products or compilations that
533 | contain the covered work, unless you entered into that arrangement,
534 | or that patent license was granted, prior to 28 March 2007.
535 |
536 | Nothing in this License shall be construed as excluding or limiting
537 | any implied license or other defenses to infringement that may
538 | otherwise be available to you under applicable patent law.
539 |
540 | 12. No Surrender of Others' Freedom.
541 |
542 | If conditions are imposed on you (whether by court order, agreement or
543 | otherwise) that contradict the conditions of this License, they do not
544 | excuse you from the conditions of this License. If you cannot convey a
545 | covered work so as to satisfy simultaneously your obligations under this
546 | License and any other pertinent obligations, then as a consequence you may
547 | not convey it at all. For example, if you agree to terms that obligate you
548 | to collect a royalty for further conveying from those to whom you convey
549 | the Program, the only way you could satisfy both those terms and this
550 | License would be to refrain entirely from conveying the Program.
551 |
552 | 13. Use with the GNU Affero General Public License.
553 |
554 | Notwithstanding any other provision of this License, you have
555 | permission to link or combine any covered work with a work licensed
556 | under version 3 of the GNU Affero General Public License into a single
557 | combined work, and to convey the resulting work. The terms of this
558 | License will continue to apply to the part which is the covered work,
559 | but the special requirements of the GNU Affero General Public License,
560 | section 13, concerning interaction through a network will apply to the
561 | combination as such.
562 |
563 | 14. Revised Versions of this License.
564 |
565 | The Free Software Foundation may publish revised and/or new versions of
566 | the GNU General Public License from time to time. Such new versions will
567 | be similar in spirit to the present version, but may differ in detail to
568 | address new problems or concerns.
569 |
570 | Each version is given a distinguishing version number. If the
571 | Program specifies that a certain numbered version of the GNU General
572 | Public License "or any later version" applies to it, you have the
573 | option of following the terms and conditions either of that numbered
574 | version or of any later version published by the Free Software
575 | Foundation. If the Program does not specify a version number of the
576 | GNU General Public License, you may choose any version ever published
577 | by the Free Software Foundation.
578 |
579 | If the Program specifies that a proxy can decide which future
580 | versions of the GNU General Public License can be used, that proxy's
581 | public statement of acceptance of a version permanently authorizes you
582 | to choose that version for the Program.
583 |
584 | Later license versions may give you additional or different
585 | permissions. However, no additional obligations are imposed on any
586 | author or copyright holder as a result of your choosing to follow a
587 | later version.
588 |
589 | 15. Disclaimer of Warranty.
590 |
591 | THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
592 | APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
593 | HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
594 | OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
595 | THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
596 | PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
597 | IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
598 | ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
599 |
600 | 16. Limitation of Liability.
601 |
602 | IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
603 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
604 | THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
605 | GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
606 | USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
607 | DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
608 | PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
609 | EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
610 | SUCH DAMAGES.
611 |
612 | 17. Interpretation of Sections 15 and 16.
613 |
614 | If the disclaimer of warranty and limitation of liability provided
615 | above cannot be given local legal effect according to their terms,
616 | reviewing courts shall apply local law that most closely approximates
617 | an absolute waiver of all civil liability in connection with the
618 | Program, unless a warranty or assumption of liability accompanies a
619 | copy of the Program in return for a fee.
620 |
621 | END OF TERMS AND CONDITIONS
622 |
623 | How to Apply These Terms to Your New Programs
624 |
625 | If you develop a new program, and you want it to be of the greatest
626 | possible use to the public, the best way to achieve this is to make it
627 | free software which everyone can redistribute and change under these terms.
628 |
629 | To do so, attach the following notices to the program. It is safest
630 | to attach them to the start of each source file to most effectively
631 | state the exclusion of warranty; and each file should have at least
632 | the "copyright" line and a pointer to where the full notice is found.
633 |
634 |
635 | Copyright (C)
636 |
637 | This program is free software: you can redistribute it and/or modify
638 | it under the terms of the GNU General Public License as published by
639 | the Free Software Foundation, either version 3 of the License, or
640 | (at your option) any later version.
641 |
642 | This program is distributed in the hope that it will be useful,
643 | but WITHOUT ANY WARRANTY; without even the implied warranty of
644 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
645 | GNU General Public License for more details.
646 |
647 | You should have received a copy of the GNU General Public License
648 | along with this program. If not, see .
649 |
650 | Also add information on how to contact you by electronic and paper mail.
651 |
652 | If the program does terminal interaction, make it output a short
653 | notice like this when it starts in an interactive mode:
654 |
655 | Copyright (C)
656 | This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
657 | This is free software, and you are welcome to redistribute it
658 | under certain conditions; type `show c' for details.
659 |
660 | The hypothetical commands `show w' and `show c' should show the appropriate
661 | parts of the General Public License. Of course, your program's commands
662 | might be different; for a GUI interface, you would use an "about box".
663 |
664 | You should also get your employer (if you work as a programmer) or school,
665 | if any, to sign a "copyright disclaimer" for the program, if necessary.
666 | For more information on this, and how to apply and follow the GNU GPL, see
667 | .
668 |
669 | The GNU General Public License does not permit incorporating your program
670 | into proprietary programs. If your program is a subroutine library, you
671 | may consider it more useful to permit linking proprietary applications with
672 | the library. If this is what you want to do, use the GNU Lesser General
673 | Public License instead of this License. But first, please read
674 | .
675 |
--------------------------------------------------------------------------------
/Logo_OWN-Noir.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OWNsecurity/fastir_artifacts/799db876984ebe7d4096c72c609d4cc28fb31e64/Logo_OWN-Noir.ico
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # FastIR Artifacts
2 |
3 | ## What is FastIR Artifacts
4 |
5 | FastIR Artifacts is a forensic artifacts collector that can be used on a live host.
6 |
7 | FastIR Artifacts is focused on artifact collection, there is no parsing or analysis of the collected artifacts.
8 |
9 | It is cross platform: there is one code base that can run on GNU/Linux, Windows or Mac OS X.
10 |
11 | It leverages the [Digital Forensics Artifact Repository](https://github.com/ForensicArtifacts/artifacts) for artifact definitions (the Digital Forensics Artifact Repository is a free, community-sourced, machine-readable knowledge base of digital forensic artifacts).
12 |
13 | It also leverages the [Sleuth Kit library](https://github.com/py4n6/pytsk) if the file system is supported.
14 |
15 | ## Download
16 |
17 | Binaries for Windows, GNU/Linux and Mac OS X can be downloaded from the [release page](../../releases) of the project.
18 |
19 | ## Running
20 |
21 | FastIR Artifacts must be run with admin rights (for instance using sudo on GNU/Linux or Mac OS X, or an UAC elevation on Windows).
22 |
23 | Run FastIR Artifacts with -h argument to see available options.
24 | ```
25 | C:\Users\sekoia\Desktop\fastir_artifacts>fastir_artifacts.exe -h
26 | usage: fastir_artifacts.exe [-h] [-i INCLUDE] [-e EXCLUDE]
27 | [-d DIRECTORY [DIRECTORY ...]] [-l] [-m MAXSIZE]
28 | [-o OUTPUT] [-s]
29 |
30 | FastIR Artifacts - Collect ForensicArtifacts Args that start with '--' (eg.
31 | -i) can also be set in a config file
32 | (fastir_artifacts.ini). Config file
33 | syntax allows: key=value, flag=true, stuff=[a,b,c] (for details, see syntax at
34 | https://goo.gl/R74nmi). If an arg is specified in more than one place, then
35 | commandline values override config file values which override defaults.
36 |
37 | optional arguments:
38 | -h, --help show this help message and exit
39 | -i INCLUDE, --include INCLUDE
40 | Artifacts to collect (comma-separated)
41 | -e EXCLUDE, --exclude EXCLUDE
42 | Artifacts to ignore (comma-separated)
43 | -d DIRECTORY [DIRECTORY ...], --directory DIRECTORY [DIRECTORY ...]
44 | Directory containing Artifacts definitions
45 | -l, --library Keep loading Artifacts definitions from the
46 | ForensicArtifacts library (in addition to custom
47 | directories)
48 | -m MAXSIZE, --maxsize MAXSIZE
49 | Do not collect file with size > n
50 | -o OUTPUT, --output OUTPUT
51 | Directory where the results are created
52 | -s, --sha256 Compute SHA-256 of collected files
53 | ```
54 |
55 | Options can be taken from command line switches or from a `fastir_artifacts.ini` configuration file.
56 |
57 | Without any `include` or `exclude` argument set, FastIR Artifacts will collect a set of artifacts
58 | defined in `examples/sekoia.yaml` designed for quick acquisition.
59 |
60 | ## Creating a custom FastIR Artifacts collector from a release
61 |
62 | To create a custom FastIR Artifacts collector (custom artifact definitions and custom options):
63 |
64 | - download a release for your operating system, unzip it
65 | - create a directory with your custom artifact definitions inside the `fastir_artifacts` folder, for instance `custom_artifacts`
66 | - create a `fastir_artifacts.ini` file
67 | - add a `directory = custom_artifacts` line to the `fastir_artifacts.ini` file
68 | - add more options to the `fastir_artifacts.ini` file for instance `library = True` and `exclude = BrowserCache,WindowsSearchDatabase`
69 | - zip the `fastir_artifacts` folder and ship it
70 |
71 | ## Custom Artifact Types
72 |
73 | FastIR Artifacts supports the following artifact types in addition to the types defined by the [Digital Forensics Artifact Repository](https://github.com/ForensicArtifacts/artifacts).
74 |
75 | ### FileInfo
76 |
77 | The FileInfo artifact type can be used to collect metadata about files instead of collecting the files themselves:
78 |
79 | ```yaml
80 | name: System32 Metadata
81 | doc: Metadata about dll and exe files in System32.
82 | sources:
83 | - type: FILE_INFO
84 | attributes:
85 | paths:
86 | - '%%environ_systemroot%%\System32\*.dll'
87 | - '%%environ_systemroot%%\System32\*.exe'
88 | - '%%environ_systemroot%%\System32\**\*.dll'
89 | - '%%environ_systemroot%%\System32\**\*.exe'
90 | separator: '\'
91 | supported_os: [Windows]
92 | ```
93 |
94 | It collects the following information (stored in a JSONL file using [Elastic Common Schema](https://www.elastic.co/guide/en/ecs/current/index.html)):
95 |
96 | - MD5 hash
97 | - SHA-1 hash
98 | - SHA-256 hash
99 | - Mime type
100 | - File size
101 | - Imphash (PE only)
102 | - Compilation Date (PE only)
103 | - Company Name (PE only)
104 | - File Description (PE only)
105 | - File Version (PE only)
106 | - Internal Name (PE only)
107 | - Product Name (PE only)
108 |
109 | ## Development
110 |
111 | ### Requirements
112 |
113 | python 3 and pip must be installed. FastIR was successfully tested with python 3.6 and 3.7.
114 |
115 | On Windows, Microsoft Visual C++ 14.0 is needed (See [Windows Compilers](https://wiki.python.org/moin/WindowsCompilers)).
116 |
117 | Dependencies can be installed with:
118 | ```
119 | pip install -U -r requirements.txt
120 | ```
121 |
122 | ### Generating binaries
123 |
124 | PyInstaller can freeze FastIR Artifacts into a one-folder bundle:
125 | ```
126 | pyinstaller fastir_artifacts.spec
127 | ```
128 |
--------------------------------------------------------------------------------
/examples/fastir_artifacts.ini:
--------------------------------------------------------------------------------
1 | include = Essentials
2 | sha256 = True
3 |
--------------------------------------------------------------------------------
/examples/own.yaml:
--------------------------------------------------------------------------------
1 | # Custom artifacts.
2 |
3 | name: WindowsXMLEventLogs
4 | doc: Windows XML Event Logs.
5 | sources:
6 | - type: FILE
7 | attributes:
8 | paths: ['%%environ_systemroot%%\System32\winevt\Logs\*.evtx']
9 | separator: '\'
10 | labels: [Logs]
11 | supported_os: [Windows]
12 | ---
13 | name: WindowsEssentials
14 | doc: Most important artifacts on Windows.
15 | sources:
16 | - type: ARTIFACT_GROUP
17 | attributes:
18 | names:
19 | # Registry hives
20 | - 'WindowsUserRegistryFiles'
21 | - 'WindowsUserRegistryTransactionLogFiles'
22 | - 'WindowsSystemRegistryFiles'
23 | - 'WindowsSystemRegistryTransactionLogFiles'
24 | - 'WindowsAMCacheHveFile'
25 | # Execution artifacts
26 | - 'WindowsPrefetchFiles'
27 | - 'WindowsUserRecentFiles'
28 | - 'WindowsUserAutomaticDestinationsJumpLists'
29 | - 'WindowsUserCustomDestinationsJumpLists'
30 | - 'WindowsActivitiesCache'
31 | - 'WindowsRecentFileCacheBCF'
32 | - 'WindowsSystemResourceUsageMonitorDatabaseFile'
33 | # Persistence
34 | - 'WindowsApplicationCompatibilityInstalledShimDatabases'
35 | - 'WindowsCIMRepositoryFiles'
36 | - 'WindowsGroupPolicyScripts'
37 | - 'WindowsScheduledTasks'
38 | - 'WindowsStartupFolders'
39 | # USB
40 | - 'WindowsSetupApiLogs'
41 | # Event logs
42 | - 'WindowsXMLEventLogs'
43 | # NTFS
44 | - 'NTFSMFTFiles'
45 | - 'NTFSLogFile'
46 | # MISC
47 | - 'WindowsBITSQueueManagerDatabases'
48 | - 'WindowsHostsFiles'
49 | - 'WindowsSystemIniFiles'
50 | # Browser history
51 | - 'ChromeHistory'
52 | - 'FirefoxHistory'
53 | - 'InternetExplorerHistory'
54 | - 'OperaHistory'
55 | - 'SafariHistory'
56 | # Commands
57 | - 'WindowsFirewallEnabledRules'
58 | - 'WindowsFirewallRules'
59 | # WMI
60 | - 'WMIAccountUsersDomain'
61 | - 'WMIAntivirusProduct'
62 | - 'WMIComputerSystemProduct'
63 | - 'WMIDNSClientCache'
64 | - 'WMIDrivers'
65 | - 'WMIEnumerateASEC'
66 | - 'WMIEnumerateCLEC'
67 | - 'WMIHotFixes'
68 | - 'WMIInstalledSoftware'
69 | - 'WMILastBootupTime'
70 | - 'WMILoggedOnSessions'
71 | - 'WMILoggedOnUsers'
72 | - 'WMILogicalDisks'
73 | - 'WMINetNeighbors'
74 | - 'WMINetTCPConnections'
75 | - 'WMINetUDPEndpoints'
76 | - 'WMIOperatingSystem'
77 | - 'WMIPhysicalMemory'
78 | - 'WMIProcessList'
79 | - 'WMIProfileUsersHomeDir'
80 | - 'WMIScheduledTasks'
81 | - 'WMIServices'
82 | - 'WMIStartupCommands'
83 | supported_os: [Windows]
84 | ---
85 | name: LinuxEssentials
86 | doc: Most important artifacts on Linux.
87 | sources:
88 | - type: ARTIFACT_GROUP
89 | attributes:
90 | names:
91 | # Configuration files
92 | - 'NfsExportsFile'
93 | - 'SshdConfigFile'
94 | - 'SshUserConfigFile'
95 | # Docker
96 | - 'DockerContainerConfig'
97 | - 'GKEDockerContainerLogs'
98 | # Linux
99 | - 'AnacronFiles'
100 | - 'APTSources'
101 | - 'APTTrustKeys'
102 | - 'CronAtAllowDenyFiles'
103 | - 'DebianPackagesLogFiles'
104 | - 'DebianPackagesStatus'
105 | - 'DebianVersion'
106 | - 'DNSResolvConfFile'
107 | - 'HostAccessPolicyConfiguration'
108 | - 'IPTablesRules'
109 | - 'KernelModules'
110 | - 'LinuxAtJobs'
111 | - 'LinuxAuditLogs'
112 | - 'LinuxAuthLogs'
113 | - 'LinuxCronLogs'
114 | - 'LinuxCronTabs'
115 | - 'LinuxDaemonLogFiles'
116 | - 'LinuxDistributionRelease'
117 | - 'LinuxDSDTTable'
118 | - 'LinuxFstab'
119 | - 'LinuxGrubConfiguration'
120 | - 'LinuxHostnameFile'
121 | - 'LinuxInitrdFiles'
122 | - 'LinuxIssueFile'
123 | - 'LinuxKernelLogFiles'
124 | - 'LinuxLastlogFile'
125 | - 'LinuxLoaderSystemPreloadFile'
126 | - 'LinuxLSBInit'
127 | - 'LinuxLocalTime'
128 | - 'LinuxLSBRelease'
129 | - 'LinuxMessagesLogFiles'
130 | - 'LinuxMountCmd'
131 | - 'LinuxMountInfo'
132 | - 'LinuxPamConfigs'
133 | - 'LinuxPasswdFile'
134 | - 'LinuxReleaseInfo'
135 | - 'LinuxRsyslogConfigs'
136 | - 'LinuxScheduleFiles'
137 | - 'LinuxServices'
138 | - 'LinuxSSDTTables'
139 | - 'LinuxSysLogFiles'
140 | - 'LinuxSyslogNgConfigs'
141 | - 'LinuxSystemdOSRelease'
142 | - 'LinuxSysVInit'
143 | - 'LinuxTimezoneFile'
144 | - 'LinuxUtmpFiles'
145 | - 'LinuxWtmp'
146 | - 'LinuxXinetd'
147 | - 'ListProcessesPsCommand'
148 | - 'LoadedKernelModules'
149 | - 'LoginPolicyConfiguration'
150 | - 'NetgroupConfiguration'
151 | - 'NtpConfFile'
152 | - 'PCIDevicesInfoFiles'
153 | - 'SSHHostPubKeys'
154 | - 'ThumbnailCacheFolder'
155 | - 'YumSources'
156 | - 'ZeitgeistDatabase'
157 | # /proc
158 | - 'LinuxASLREnabled'
159 | - 'LinuxIgnoreICMPBroadcasts'
160 | - 'LinuxKernelBootloader'
161 | - 'LinuxKernelModuleRestrictions'
162 | - 'LinuxKernelModuleTaintStatus'
163 | - 'LinuxNetworkIpForwardingState'
164 | - 'LinuxNetworkPathFilteringSettings'
165 | - 'LinuxNetworkRedirectState'
166 | - 'LinuxProcArp'
167 | - 'LinuxProcMounts'
168 | - 'LinuxProcSysHardeningSettings'
169 | - 'LinuxRestrictedDmesgReadPrivileges'
170 | - 'LinuxRestrictedKernelPointerReadPrivileges'
171 | - 'LinuxSecureFsLinks'
172 | - 'LinuxSecureSuidCoreDumps'
173 | - 'LinuxSyncookieState'
174 | - 'LinuxSysctlCmd'
175 | # Unix
176 | - 'AllShellConfigs'
177 | - 'AllUsersShellHistory'
178 | - 'GlobalShellConfigs'
179 | - 'RootUserShellConfigs'
180 | - 'RootUserShellHistory'
181 | - 'UnixGroups'
182 | - 'UnixHostsFile'
183 | - 'UnixPasswd'
184 | - 'UnixShadowFile'
185 | - 'UnixSudoersConfiguration'
186 | - 'UnixUsersGroups'
187 | - 'UsersShellConfigs'
188 | - 'UsersShellHistory'
189 | # Browser history
190 | - 'ChromeHistory'
191 | - 'FirefoxHistory'
192 | - 'OperaHistory'
193 | # Web Servers
194 | - 'NginxAccessLogs'
195 | - 'ApacheAccessLogs'
196 | - 'WordpressConfigFile'
197 | supported_os: [Linux]
198 | ---
199 | name: DarwinEssentials
200 | doc: Most important artifacts on Darwin.
201 | sources:
202 | - type: ARTIFACT_GROUP
203 | attributes:
204 | names:
205 | # Configuration files
206 | - 'NfsExportsFile'
207 | - 'SshdConfigFile'
208 | - 'SshUserConfigFile'
209 | # Mac OS
210 | - 'MacOSAppleSystemLogFiles'
211 | - 'MacOSApplications'
212 | - 'MacOSApplicationsRecentItems'
213 | - 'MacOSApplicationSupport'
214 | - 'MacOSAtJobs'
215 | - 'MacOSAuditLogFiles'
216 | - 'MacOSBashHistory'
217 | - 'MacOSBashSessions'
218 | - 'MacOSBluetoothPlistFile'
219 | - 'MacOSCoreAnalyticsFiles'
220 | - 'MacOSCronTabs'
221 | - 'MacOSDock'
222 | - 'MacOSGlobalPreferencesPlistFile'
223 | - 'MacOSHostsFile'
224 | - 'MacOSiCloudAccounts'
225 | - 'MacOSiCloudPreferences'
226 | - 'MacOSiDevices'
227 | - 'MacOSInstallationHistory'
228 | - 'MacOSInstallationLogFile'
229 | - 'MacOSiOSBackupInfo'
230 | - 'MacOSiOSBackupManifest'
231 | - 'MacOSiOSBackupMbdb'
232 | - 'MacOSiOSBackupsMainDirectory'
233 | - 'MacOSiOSBackupStatus'
234 | - 'MacOSKeychains'
235 | - 'MacOSKeyboardLayoutPlistFile'
236 | - 'MacOSKextFiles'
237 | - 'MacOSDuetKnowledgeBase'
238 | - 'MacOSLaunchAgentsPlistFiles'
239 | - 'MacOSLaunchDaemonsPlistFiles'
240 | - 'MacOSLastlogFile'
241 | - 'MacOSLoadedKexts'
242 | - 'MacOSLocalTime'
243 | - 'MacOSLoginWindowPlistFile'
244 | - 'MacOSMailAccounts'
245 | - 'MacOSMailBackupTOC'
246 | - 'MacOSMailboxes'
247 | - 'MacOSMailDownloadAttachments'
248 | - 'MacOSMailEnvelopIndex'
249 | - 'MacOSMailIMAP'
250 | - 'MacOSMailMainDirectory'
251 | - 'MacOSMailOpenedAttachments'
252 | - 'MacOSMailPOP'
253 | - 'MacOSMailPreferences'
254 | - 'MacOSMailRecentContacts'
255 | - 'MacOSMailSignatures'
256 | - 'MacOSMiscLogs'
257 | - 'MacOSMountedDMGs'
258 | - 'MacOSNotificationCenter'
259 | - 'MacOSPeriodicSystemFunctions'
260 | - 'MacOSQuarantineEvents'
261 | - 'MacOSRecentItems'
262 | - 'MacOSSidebarLists'
263 | - 'MacOSSleepimageFile'
264 | - 'MacOSStartupItemsPlistFiles'
265 | - 'MacOSSwapFiles'
266 | - 'MacOSSystemConfigurationPreferencesPlistFile'
267 | - 'MacOSSystemInstallationTime'
268 | - 'MacOSSystemLogFiles'
269 | - 'MacOSSystemPreferencesPlistFiles'
270 | - 'MacOSSystemVersionPlistFile'
271 | - 'MacOSTimeMachinePlistFile'
272 | - 'MacOSUnifiedLogging'
273 | - 'MacOSUpdate'
274 | - 'MacOSUserApplicationLogs'
275 | - 'MacOSUserDesktopDirectory'
276 | - 'MacOSUserDocumentsDirectory'
277 | - 'MacOSUserDownloadsDirectory'
278 | - 'MacOSUserGlobalPreferences'
279 | - 'MacOSUserLibraryDirectory'
280 | - 'MacOSUserLoginItems'
281 | - 'MacOSUserMoviesDirectory'
282 | - 'MacOSUserMusicDirectory'
283 | - 'MacOSUserPasswordHashesPlistFiles'
284 | - 'MacOSUserPicturesDirectory'
285 | - 'MacOSUserPreferences'
286 | - 'MacOSUserPublicDirectory'
287 | - 'MacOSUsers'
288 | - 'MacOSUserSocialAccounts'
289 | - 'MacOSUserTrash'
290 | - 'MacOSUtmpFile'
291 | - 'MacOSUtmpxFile'
292 | - 'MacOSWirelessNetworks'
293 | # Unix
294 | - 'AllShellConfigs'
295 | - 'AllUsersShellHistory'
296 | - 'GlobalShellConfigs'
297 | - 'RootUserShellConfigs'
298 | - 'RootUserShellHistory'
299 | - 'UnixGroups'
300 | - 'UnixHostsFile'
301 | - 'UnixPasswd'
302 | - 'UnixShadowFile'
303 | - 'UnixSudoersConfiguration'
304 | - 'UnixUsersGroups'
305 | - 'UsersShellConfigs'
306 | - 'UsersShellHistory'
307 | # Browser history
308 | - 'ChromeHistory'
309 | - 'FirefoxHistory'
310 | - 'OperaHistory'
311 | - 'SafariHistory'
312 | # Web Servers
313 | - 'NginxAccessLogs'
314 | - 'ApacheAccessLogs'
315 | - 'WordpressConfigFile'
316 | supported_os: [Darwin]
317 | ---
318 | name: Essentials
319 | doc: Most important artifacts.
320 | sources:
321 | - type: ARTIFACT_GROUP
322 | attributes:
323 | names:
324 | - 'LinuxEssentials'
325 | - 'DarwinEssentials'
326 | - 'WindowsEssentials'
327 | supported_os: [Darwin,Linux,Windows]
328 |
329 |
--------------------------------------------------------------------------------
/fastir/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OWNsecurity/fastir_artifacts/799db876984ebe7d4096c72c609d4cc28fb31e64/fastir/__init__.py
--------------------------------------------------------------------------------
/fastir/common/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OWNsecurity/fastir_artifacts/799db876984ebe7d4096c72c609d4cc28fb31e64/fastir/common/__init__.py
--------------------------------------------------------------------------------
/fastir/common/collector.py:
--------------------------------------------------------------------------------
1 | import artifacts
2 |
3 | from fastir.common.logging import logger, PROGRESS
4 |
5 |
6 | class AbstractCollector:
7 | def collect(self, output):
8 | raise NotImplementedError
9 |
10 | def register_source(self, artifact_definition, artifact_source, variables):
11 | raise NotImplementedError
12 |
13 |
14 | class Collector:
15 | def __init__(self, platform):
16 | self._platform = platform
17 | self._variables = None
18 | self._sources = 0
19 |
20 | from fastir.common.commands import CommandExecutor
21 | from fastir.common.filesystem import FileSystemManager
22 | self._collectors = [FileSystemManager(), CommandExecutor()]
23 |
24 | if platform == 'Windows':
25 | from fastir.windows.variables import WindowsHostVariables
26 | self._variables = WindowsHostVariables()
27 |
28 | from fastir.windows.wmi import WMIExecutor
29 | from fastir.windows.registry import RegistryCollector
30 | self._collectors.append(WMIExecutor())
31 | self._collectors.append(RegistryCollector())
32 | else:
33 | from fastir.unix.variables import UnixHostVariables
34 | self._variables = UnixHostVariables()
35 |
36 | def register_source(self, artifact_definition, artifact_source):
37 | supported = False
38 |
39 | for collector in self._collectors:
40 | if collector.register_source(artifact_definition, artifact_source, self._variables):
41 | supported = True
42 |
43 | if supported:
44 | self._sources += 1
45 | elif artifact_source.type_indicator != artifacts.definitions.TYPE_INDICATOR_ARTIFACT_GROUP:
46 | logger.warning(f"Cannot process source for '{artifact_definition.name}' because type '{artifact_source.type_indicator}' is not supported")
47 |
48 | def collect(self, output):
49 | logger.log(PROGRESS, f"Collecting artifacts from {self._sources} sources ...")
50 |
51 | for collector in self._collectors:
52 | collector.collect(output)
53 |
54 | logger.log(PROGRESS, "Finished collecting artifacts")
55 | output.close()
56 |
--------------------------------------------------------------------------------
/fastir/common/commands.py:
--------------------------------------------------------------------------------
1 | from subprocess import check_output, STDOUT, CalledProcessError
2 |
3 | import artifacts
4 |
5 | from fastir.common.logging import logger
6 | from fastir.common.collector import AbstractCollector
7 |
8 |
9 | class CommandExecutor(AbstractCollector):
10 | def __init__(self):
11 | self._commands = []
12 |
13 | def add_command(self, artifact, cmd, args):
14 | self._commands.append({
15 | 'artifact': artifact,
16 | 'cmd': cmd,
17 | 'args': args
18 | })
19 |
20 | def collect(self, output):
21 | for command in self._commands:
22 | full_command = [command['cmd']] + command['args']
23 | full_command_str = ' '.join(full_command)
24 |
25 | try:
26 | command_output = check_output(full_command, stderr=STDOUT)
27 | except CalledProcessError as e:
28 | logger.warning(f"Command '{full_command_str}' for artifact '{command['artifact']}' returned error code '{e.returncode}'")
29 | command_output = e.output
30 | except FileNotFoundError:
31 | logger.warning(f"Command '{command['cmd']}' for artifact '{command['artifact']}' could not be found")
32 | command_output = b''
33 |
34 | output.add_collected_command(command['artifact'], full_command_str, command_output)
35 |
36 | def register_source(self, artifact_definition, artifact_source, variables):
37 | if artifact_source.type_indicator == artifacts.definitions.TYPE_INDICATOR_COMMAND:
38 | self.add_command(artifact_definition.name, artifact_source.cmd, artifact_source.args)
39 | return True
40 |
41 | return False
42 |
--------------------------------------------------------------------------------
/fastir/common/file_info.py:
--------------------------------------------------------------------------------
1 | import hashlib
2 | import filetype
3 | from datetime import datetime
4 |
5 | from pefile import PE
6 |
7 | from .logging import logger
8 |
9 |
10 | MAX_PE_SIZE = 50 * 1024 * 1024
11 |
12 |
13 | class FileInfo:
14 | def __init__(self, path_object):
15 | self._path_object = path_object
16 | self.size = path_object.get_size()
17 |
18 | self._info = {}
19 | self._content = b""
20 |
21 | def compute(self):
22 | self.md5 = hashlib.md5()
23 | self.sha1 = hashlib.sha1()
24 | self.sha256 = hashlib.sha256()
25 | self.mime_type = None
26 |
27 | for i, chunk in enumerate(self._path_object.read_chunks()):
28 | self.md5.update(chunk)
29 | self.sha1.update(chunk)
30 | self.sha256.update(chunk)
31 |
32 | if i == 0:
33 | file_type = filetype.guess(chunk)
34 | if file_type:
35 | self.mime_type = file_type.mime
36 |
37 | if self.mime_type == "application/x-msdownload" and self.size < MAX_PE_SIZE:
38 | self._content += chunk
39 |
40 | return self._get_results()
41 |
42 | def _get_results(self):
43 | self._info = {
44 | '@timestamp': datetime.utcnow().isoformat(),
45 | 'file': {
46 | 'size': self.size,
47 | 'path': self._path_object.path,
48 | 'hash': {
49 | 'md5': self.md5.hexdigest(),
50 | 'sha1': self.sha1.hexdigest(),
51 | 'sha256': self.sha256.hexdigest()
52 | }
53 | }
54 | }
55 |
56 | if self.mime_type:
57 | self._info['file']['mime_type'] = self.mime_type
58 |
59 | if len(self._content) > 0:
60 | try:
61 | self._add_pe_info()
62 | except Exception as e:
63 | logger.warning(f"Could not parse PE file '{self._path_object.path}': '{str(e)}'")
64 |
65 | return self._info
66 |
67 | def _add_file_property(self, category, field, value):
68 | self._info['file'].setdefault(category, {})
69 | self._info['file'][category][field] = value
70 |
71 | def _add_vs_info(self, parsed_pe):
72 | VS_INFO_FIELDS = {
73 | b'CompanyName': 'company',
74 | b'FileDescription': 'description',
75 | b'FileVersion': 'file_version',
76 | b'InternalName': 'original_file_name',
77 | b'ProductName': 'product'
78 | }
79 |
80 | if hasattr(parsed_pe, "VS_VERSIONINFO"):
81 | if hasattr(parsed_pe, "FileInfo"):
82 | for finfo in parsed_pe.FileInfo:
83 | for entry in finfo:
84 | if hasattr(entry, 'StringTable'):
85 | for st_entry in entry.StringTable:
86 | for str_entry in st_entry.entries.items():
87 | if str_entry[0] in VS_INFO_FIELDS and str_entry[1]:
88 | self._add_file_property(
89 | 'pe',
90 | VS_INFO_FIELDS[str_entry[0]],
91 | str_entry[1].decode('utf-8', 'replace'))
92 |
93 | def _add_pe_info(self):
94 | parsed_pe = PE(data=self._content)
95 |
96 | self._add_vs_info(parsed_pe)
97 | self._add_file_property('pe', 'imphash', parsed_pe.get_imphash())
98 | self._add_file_property(
99 | 'pe', 'compilation',
100 | datetime.utcfromtimestamp(parsed_pe.FILE_HEADER.TimeDateStamp).isoformat())
101 |
--------------------------------------------------------------------------------
/fastir/common/filesystem.py:
--------------------------------------------------------------------------------
1 | import re
2 | import os
3 |
4 | import pytsk3
5 | import psutil
6 | import artifacts
7 | from artifacts.source_type import FileSourceType
8 |
9 | from fastir.common.logging import logger
10 | from fastir.common.collector import AbstractCollector
11 | from fastir.common.path_components import RecursionPathComponent, GlobPathComponent, RegularPathComponent, PathObject
12 |
13 | CHUNK_SIZE = 5 * 1024 * 1024
14 | PATH_RECURSION_REGEX = re.compile(r"\*\*(?P(-1|\d*))")
15 | PATH_GLOB_REGEX = re.compile(r"\*|\?|\[.+\]")
16 | FILE_INFO_TYPE = "FILE_INFO"
17 | TSK_FILESYSTEMS = ['NTFS', 'ext3', 'ext4']
18 |
19 |
20 | class FileSystem:
21 | def __init__(self):
22 | self._patterns = []
23 |
24 | def add_pattern(self, artifact, pattern, source_type='FILE'):
25 | self._patterns.append({
26 | 'artifact': artifact,
27 | 'pattern': pattern,
28 | 'source_type': source_type
29 | })
30 |
31 | def _relative_path(self, filepath):
32 | raise NotImplementedError
33 |
34 | def _parse(self, pattern):
35 | components = []
36 |
37 | items = pattern.split('/')
38 | for i, item in enumerate(items):
39 | # Search for '**' glob recursion
40 | recursion = PATH_RECURSION_REGEX.search(item)
41 | if recursion:
42 | max_depth = None
43 |
44 | if recursion.group('max_depth'):
45 | max_depth = int(recursion.group('max_depth'))
46 |
47 | components.append(RecursionPathComponent(i < (len(items) - 1), max_depth))
48 | else:
49 | glob = PATH_GLOB_REGEX.search(item)
50 | if glob:
51 | components.append(GlobPathComponent(i < (len(items) - 1), item))
52 | else:
53 | components.append(RegularPathComponent(i < (len(items) - 1), item))
54 |
55 | return components
56 |
57 | def _base_generator(self):
58 | raise NotImplementedError
59 |
60 | def collect(self, output):
61 | for pattern in self._patterns:
62 | logger.debug("Collecting pattern '{}' for artifact '{}'".format(pattern['pattern'], pattern['artifact']))
63 |
64 | # Normalize the pattern, relative to the mountpoint
65 | relative_pattern = self._relative_path(pattern['pattern'])
66 | path_components = self._parse(relative_pattern)
67 |
68 | generator = self._base_generator
69 | for component in path_components:
70 | generator = component.get_generator(generator)
71 |
72 | for path in generator():
73 | try:
74 | if pattern['source_type'] == FILE_INFO_TYPE:
75 | output.add_collected_file_info(pattern['artifact'], path)
76 | else:
77 | output.add_collected_file(pattern['artifact'], path)
78 | except Exception as e:
79 | logger.error(f"Error collecting file '{path.path}': {str(e)}")
80 |
81 |
82 | class TSKFileSystem(FileSystem):
83 | def __init__(self, manager, device, path):
84 | self._manager = manager
85 | self._path = path
86 | self._root = None
87 |
88 | # Unix Device
89 | if self._path.startswith('/'):
90 | self._device = device
91 | else:
92 | # On Windows, we need a specific format '\\.\:'
93 | self._device = r"\\.\{}:".format(device[0])
94 |
95 | # Cache parsed entries for better performances
96 | self._entries_cache = {}
97 | self._entries_cache_last = []
98 |
99 | # Open drive
100 | img_info = pytsk3.Img_Info(self._device)
101 | self._fs_info = pytsk3.FS_Info(img_info)
102 | self._root = self._fs_info.open_dir('')
103 |
104 | super().__init__()
105 |
106 | def _relative_path(self, filepath):
107 | normalized_path = filepath.replace(os.path.sep, '/')
108 | return normalized_path[len(self._path):].lstrip('/')
109 |
110 | def _base_generator(self):
111 | yield PathObject(self, os.path.basename(self._path), self._path, self._root)
112 |
113 | def is_allocated(self, tsk_entry):
114 | return (int(tsk_entry.info.name.flags) & pytsk3.TSK_FS_NAME_FLAG_ALLOC != 0 and
115 | int(tsk_entry.info.meta.flags) & pytsk3.TSK_FS_META_FLAG_ALLOC != 0)
116 |
117 | def is_directory(self, path_object):
118 | return path_object.obj.info.meta.type in [pytsk3.TSK_FS_META_TYPE_DIR, pytsk3.TSK_FS_META_TYPE_VIRT_DIR]
119 |
120 | def is_file(self, path_object):
121 | return path_object.obj.info.meta.type == pytsk3.TSK_FS_META_TYPE_REG
122 |
123 | def is_symlink(self, path_object):
124 | return path_object.obj.info.meta.type == pytsk3.TSK_FS_META_TYPE_LNK
125 |
126 | def _follow_symlink(self, parent, path_object):
127 | # TODO: attempt to follow symlinks with TSK
128 | #
129 | # As a temporary fix, downgrade all links to OSFileSystem so that
130 | # they are still collected
131 | return OSFileSystem('/').get_fullpath(path_object.path)
132 |
133 | def list_directory(self, path_object):
134 | if path_object.path in self._entries_cache:
135 | return self._entries_cache[path_object.path]
136 | else:
137 | # Make sure we do not keep more than 10 000 entries in the cache
138 | if len(self._entries_cache_last) >= 10000:
139 | first = self._entries_cache_last.pop(0)
140 | del self._entries_cache[first]
141 |
142 | entries = []
143 | directory = path_object.obj
144 |
145 | if not isinstance(directory, pytsk3.Directory):
146 | if not self.is_directory(path_object):
147 | return
148 |
149 | directory = path_object.obj.as_directory()
150 |
151 | for entry in directory:
152 | if (
153 | not hasattr(entry, 'info') or
154 | not hasattr(entry.info, 'name') or
155 | not hasattr(entry.info.name, 'name') or
156 | entry.info.name.name in [b'.', b'..'] or
157 | not hasattr(entry.info, 'meta') or
158 | not hasattr(entry.info.meta, 'size') or
159 | not hasattr(entry.info.meta, 'type') or
160 | not self.is_allocated(entry)
161 | ):
162 | continue
163 |
164 | name = entry.info.name.name.decode('utf-8', errors='replace')
165 | filepath = os.path.join(path_object.path, name)
166 | entry_path_object = PathObject(self, name, filepath, entry)
167 |
168 | if entry.info.meta.type == pytsk3.TSK_FS_META_TYPE_LNK:
169 | symlink_object = self._follow_symlink(path_object, entry_path_object)
170 |
171 | if symlink_object:
172 | entries.append(symlink_object)
173 | else:
174 | entries.append(entry_path_object)
175 |
176 | self._entries_cache[path_object.path] = entries
177 | self._entries_cache_last.append(entries)
178 |
179 | return entries
180 |
181 | def get_path(self, parent, name):
182 | for path_object in self.list_directory(parent):
183 | if os.path.normcase(name) == os.path.normcase(path_object.name):
184 | return path_object
185 |
186 | def get_fullpath(self, filepath):
187 | relative_path = self._relative_path(filepath)
188 | path_object = next(self._base_generator())
189 |
190 | for part in relative_path.split('/'):
191 | path_object = self.get_path(path_object, part)
192 |
193 | return path_object
194 |
195 | def read_chunks(self, path_object):
196 | size = path_object.obj.info.meta.size
197 | offset = 0
198 |
199 | while offset < size:
200 | chunk_size = min(CHUNK_SIZE, size - offset)
201 | chunk = path_object.obj.read_random(offset, chunk_size)
202 |
203 | if chunk:
204 | offset += chunk_size
205 | yield chunk
206 | else:
207 | break
208 |
209 | def get_size(self, path_object):
210 | return path_object.obj.info.meta.size
211 |
212 |
213 | class OSFileSystem(FileSystem):
214 | def __init__(self, path):
215 | self._path = path
216 |
217 | super().__init__()
218 |
219 | def _relative_path(self, filepath):
220 | normalized_path = filepath.replace(os.path.sep, '/')
221 | return normalized_path[len(self._path):].lstrip('/')
222 |
223 | def _base_generator(self):
224 | yield PathObject(self, os.path.basename(self._path), self._path)
225 |
226 | def is_directory(self, path):
227 | return os.path.isdir(path.path)
228 |
229 | def is_file(self, path):
230 | return os.path.isfile(path.path)
231 |
232 | def is_symlink(self, path):
233 | # When using syscalls, symlinks are automatically followed
234 | return False
235 |
236 | def list_directory(self, path):
237 | try:
238 | for name in os.listdir(path.path):
239 | yield PathObject(self, name, os.path.join(path.path, name))
240 | except Exception as e:
241 | logger.error(f"Error analyzing directory '{path.path}': {str(e)}")
242 |
243 | def get_path(self, parent, name):
244 | return PathObject(self, name, os.path.join(parent.path, name))
245 |
246 | def get_fullpath(self, fullpath):
247 | return PathObject(self, os.path.basename(fullpath), fullpath)
248 |
249 | def read_chunks(self, path_object):
250 | with open(path_object.path, 'rb') as f:
251 | chunk = f.read(CHUNK_SIZE)
252 |
253 | if chunk:
254 | yield chunk
255 |
256 | def get_size(self, path_object):
257 | stats = os.lstat(path_object.path)
258 |
259 | return stats.st_size
260 |
261 |
262 | class FileSystemManager(AbstractCollector):
263 | def __init__(self):
264 | self._filesystems = {}
265 | self._mount_points = psutil.disk_partitions(True)
266 |
267 | def _get_mountpoint(self, filepath):
268 | best_mountpoint = None
269 | best_mountpoint_length = 0
270 |
271 | for mountpoint in self._mount_points:
272 | if filepath.startswith(mountpoint.mountpoint):
273 | if len(mountpoint.mountpoint) > best_mountpoint_length:
274 | best_mountpoint = mountpoint
275 | best_mountpoint_length = len(mountpoint.mountpoint)
276 |
277 | if best_mountpoint is None:
278 | raise IndexError(f'Could not find a mountpoint for path {filepath}')
279 |
280 | return best_mountpoint
281 |
282 | def _get_filesystem(self, filepath):
283 | # Fetch the mountpoint for this particular path
284 | mountpoint = self._get_mountpoint(filepath)
285 |
286 | # Fetch or create the matching filesystem
287 | if mountpoint.mountpoint not in self._filesystems:
288 | if mountpoint.fstype in TSK_FILESYSTEMS:
289 | try:
290 | self._filesystems[mountpoint.mountpoint] = TSKFileSystem(
291 | self, mountpoint.device, mountpoint.mountpoint)
292 | except OSError:
293 | pass
294 |
295 | if mountpoint.mountpoint not in self._filesystems:
296 | self._filesystems[mountpoint.mountpoint] = OSFileSystem(mountpoint.mountpoint)
297 |
298 | return self._filesystems[mountpoint.mountpoint]
299 |
300 | def get_path_object(self, filepath):
301 | filesystem = self._get_filesystem(filepath)
302 | return filesystem.get_fullpath(filepath)
303 |
304 | def add_pattern(self, artifact, pattern, source_type='FILE'):
305 | pattern = os.path.normpath(pattern)
306 |
307 | # If the pattern starts with '\', it should be applied to all drives
308 | if pattern.startswith('\\'):
309 | for mountpoint in self._mount_points:
310 | if mountpoint.fstype in TSK_FILESYSTEMS:
311 | extended_pattern = os.path.join(mountpoint.mountpoint, pattern[1:])
312 | filesystem = self._get_filesystem(extended_pattern)
313 | filesystem.add_pattern(artifact, extended_pattern, source_type)
314 |
315 | else:
316 | filesystem = self._get_filesystem(pattern)
317 | filesystem.add_pattern(artifact, pattern, source_type)
318 |
319 | def collect(self, output):
320 | for path in list(self._filesystems):
321 | logger.debug(f"Start collection for '{path}'")
322 | self._filesystems[path].collect(output)
323 |
324 | def register_source(self, artifact_definition, artifact_source, variables):
325 | supported = False
326 |
327 | if artifact_source.type_indicator in [artifacts.definitions.TYPE_INDICATOR_FILE, artifacts.definitions.TYPE_INDICATOR_PATH, FILE_INFO_TYPE]:
328 | supported = True
329 |
330 | for p in artifact_source.paths:
331 | for sp in variables.substitute(p):
332 | if artifact_source.type_indicator == artifacts.definitions.TYPE_INDICATOR_PATH and (sp[-1] != '*'):
333 | sp = f"{sp}/**-1"
334 | self.add_pattern(artifact_definition.name, sp, artifact_source.type_indicator)
335 |
336 | return supported
337 |
338 |
339 | class FileInfoSourceType(FileSourceType):
340 | """Custom Source Type to collect file info instead of content"""
341 | TYPE_INDICATOR = FILE_INFO_TYPE
342 |
343 |
344 | # register custom source type
345 | artifacts.registry.ArtifactDefinitionsRegistry.RegisterSourceType(FileInfoSourceType)
346 | artifacts.source_type.SourceTypeFactory.RegisterSourceType(FileInfoSourceType)
347 |
--------------------------------------------------------------------------------
/fastir/common/helpers.py:
--------------------------------------------------------------------------------
1 | import sys
2 |
3 |
4 | def get_operating_system():
5 | if sys.platform == 'linux':
6 | return 'Linux'
7 | elif sys.platform == 'darwin':
8 | return 'Darwin'
9 | elif sys.platform.startswith('win'):
10 | return 'Windows'
11 |
12 | raise ValueError(f"Unsupported Operating System: '{sys.platform}'")
13 |
--------------------------------------------------------------------------------
/fastir/common/logging.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 |
4 | logger = logging.getLogger('fastir')
5 | logger.setLevel(logging.DEBUG)
6 |
7 | PROGRESS = 25
8 | logging.addLevelName(PROGRESS, 'PROGRESS')
9 |
--------------------------------------------------------------------------------
/fastir/common/output.py:
--------------------------------------------------------------------------------
1 | import os
2 | import hashlib
3 | import json
4 | import logging
5 | import zipfile
6 | import platform
7 | import jsonlines
8 | from datetime import datetime
9 | from collections import defaultdict
10 |
11 | from .file_info import FileInfo
12 | from .logging import logger, PROGRESS
13 |
14 |
15 | def parse_human_size(size):
16 | units = {
17 | 'B': 1,
18 | 'K': 1024,
19 | 'M': 1024 * 1024,
20 | 'G': 1024 * 1024 * 1024
21 | }
22 |
23 | if size:
24 | unit = size[-1]
25 |
26 | if unit in units:
27 | return int(size[:-1]) * units[unit]
28 | else:
29 | return int(size)
30 |
31 |
32 | def normalize_filepath(filepath):
33 | # On Windows, make sure we remove the ':' behind the drive letter
34 | if filepath.index(os.path.sep) > 0:
35 | filepath = filepath.replace(':', '', 1)
36 |
37 | return filepath.encode('utf-8', 'backslashreplace').decode('utf-8')
38 |
39 |
40 | class Outputs:
41 | def __init__(self, dirpath, maxsize, sha256):
42 | self._dirpath = dirpath
43 |
44 | self._zip = None
45 | self._maxsize = parse_human_size(maxsize)
46 | self._sha256 = sha256
47 |
48 | self._commands = defaultdict(dict)
49 | self._wmi = defaultdict(dict)
50 | self._registry = defaultdict(lambda: defaultdict(dict))
51 |
52 | self._file_info = None
53 |
54 | self._init_output_()
55 |
56 | def _init_output_(self):
57 | os.umask(0o077)
58 | now = datetime.now().strftime(r'%Y%m%d%H%M%S')
59 |
60 | self._hostname = platform.node()
61 | self._dirpath = os.path.join(self._dirpath, f"{now}-{self._hostname}")
62 |
63 | # Create the directory and set an environment variable that may be used in COMMAND artifacts
64 | os.makedirs(self._dirpath)
65 | os.environ['FAOUTPUTDIR'] = self._dirpath
66 |
67 | self._setup_logging()
68 |
69 | def _setup_logging(self):
70 | logfile = os.path.join(self._dirpath, f'{self._hostname}-logs.txt')
71 |
72 | formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
73 |
74 | file_output = logging.FileHandler(logfile, 'w', 'utf-8')
75 | file_output.setLevel(logging.INFO)
76 | file_output.setFormatter(formatter)
77 |
78 | console_output = logging.StreamHandler()
79 | console_output.setLevel(PROGRESS)
80 | console_output.setFormatter(formatter)
81 |
82 | logger.addHandler(file_output)
83 | logger.addHandler(console_output)
84 |
85 | def add_collected_file_info(self, artifact, path_object):
86 | info = FileInfo(path_object)
87 |
88 | if not self._maxsize or info.size <= self._maxsize:
89 | # Open the result file if this is the first time it is needed
90 | if self._file_info is None:
91 | self._file_info = jsonlines.open(
92 | os.path.join(self._dirpath, f'{self._hostname}-file_info.jsonl'), 'w')
93 |
94 | file_info = info.compute()
95 | file_info['labels'] = {'artifact': artifact}
96 |
97 | self._file_info.write(file_info)
98 |
99 | def add_collected_file(self, artifact, path_object):
100 | logger.info(f"Collecting file '{path_object.path}' for artifact '{artifact}'")
101 |
102 | # Make sure to create the file if it do not exists
103 | if self._zip is None:
104 | self._zip = zipfile.ZipFile(
105 | os.path.join(self._dirpath, f'{self._hostname}-files.zip'), 'w', zipfile.ZIP_DEFLATED)
106 |
107 | if not self._maxsize or path_object.get_size() <= self._maxsize:
108 | # Write file content to zipfile
109 | filename = normalize_filepath(path_object.path)
110 |
111 | if filename not in self._zip.namelist():
112 | zinfo = zipfile.ZipInfo(filename=filename)
113 | zinfo.compress_type = zipfile.ZIP_DEFLATED
114 |
115 | # Read/write by chunks to reduce memory footprint
116 | if self._sha256:
117 | h = hashlib.sha256()
118 | with self._zip._lock:
119 | with self._zip.open(zinfo, mode='w', force_zip64=True) as dest:
120 | for chunk in path_object.read_chunks():
121 | dest.write(chunk)
122 | if self._sha256:
123 | h.update(chunk)
124 | if self._sha256:
125 | logger.info(f"File '{path_object.path}' has SHA-256 '{h.hexdigest()}'")
126 | else:
127 | logger.warning(f"Ignoring file '{path_object.path}' because of its size")
128 |
129 | def add_collected_command(self, artifact, command, output):
130 | logger.info(f"Collecting command '{command}' for artifact '{artifact}'")
131 | self._commands[artifact][command] = output.decode('utf-8', errors='replace')
132 |
133 | def add_collected_wmi(self, artifact, query, output):
134 | logger.info(f"Collecting WMI query '{query}' for artifact '{artifact}'")
135 | self._wmi[artifact][query] = output
136 |
137 | def add_collected_registry_value(self, artifact, key, name, value, type_):
138 | logger.info(f"Collecting Reg value '{name}' from '{key}' for artifact '{artifact}'")
139 | self._registry[artifact][key][name] = {
140 | 'value': value,
141 | 'type': type_
142 | }
143 |
144 | def close(self):
145 | if self._zip:
146 | self._zip.close()
147 |
148 | if self._commands:
149 | with open(os.path.join(self._dirpath, f'{self._hostname}-commands.json'), 'w') as out:
150 | json.dump(self._commands, out, indent=2)
151 |
152 | if self._wmi:
153 | with open(os.path.join(self._dirpath, f'{self._hostname}-wmi.json'), 'w') as out:
154 | json.dump(self._wmi, out, indent=2)
155 |
156 | if self._registry:
157 | with open(os.path.join(self._dirpath, f'{self._hostname}-registry.json'), 'w') as out:
158 | json.dump(self._registry, out, indent=2)
159 |
160 | if self._file_info:
161 | self._file_info.close()
162 |
163 | for handler in logger.handlers[:]:
164 | handler.close()
165 | logger.removeHandler(handler)
166 |
--------------------------------------------------------------------------------
/fastir/common/path_components.py:
--------------------------------------------------------------------------------
1 | from fnmatch import fnmatch
2 |
3 |
4 | class PathObject:
5 | def __init__(self, filesystem, name, path, obj=None):
6 | self.filesystem = filesystem
7 | self.name = name
8 | self.obj = obj
9 | self.path = path
10 |
11 | def is_directory(self):
12 | return self.filesystem.is_directory(self)
13 |
14 | def is_file(self):
15 | return self.filesystem.is_file(self)
16 |
17 | def is_symlink(self):
18 | return self.filesystem.is_symlink(self)
19 |
20 | def list_directory(self):
21 | return self.filesystem.list_directory(self)
22 |
23 | def get_path(self, path):
24 | return self.filesystem.get_path(self, path)
25 |
26 | def read_chunks(self):
27 | return self.filesystem.read_chunks(self)
28 |
29 | def get_size(self):
30 | return self.filesystem.get_size(self)
31 |
32 |
33 | class PathComponent:
34 | def __init__(self, directory):
35 | self._directory = directory
36 | self._generator = None
37 |
38 | def get_generator(self, generator):
39 | self._generator = generator
40 | return self._generate
41 |
42 | def _generate(self):
43 | raise NotImplementedError
44 |
45 |
46 | class RecursionPathComponent(PathComponent):
47 | def __init__(self, directory, max_depth=None):
48 | super().__init__(directory)
49 |
50 | self.max_depth = max_depth or 3
51 |
52 | def _generate(self):
53 | for parent in self._generator():
54 | yield from self._recurse_from_dir(parent, depth=0)
55 |
56 | def _recurse_from_dir(self, parent, depth):
57 | if depth < self.max_depth or self.max_depth == -1:
58 | for path in parent.list_directory():
59 | if path.is_directory():
60 | yield from self._recurse_from_dir(path, depth + 1)
61 |
62 | # Special case when the file is considered to be both a dir and a file
63 | # This only happens with registry keys
64 | if self._directory or path.is_file():
65 | yield path
66 | elif not self._directory:
67 | yield path
68 |
69 |
70 | class GlobPathComponent(PathComponent):
71 | def __init__(self, directory, path):
72 | super().__init__(directory)
73 |
74 | self._path = path
75 |
76 | def _generate(self):
77 | for parent in self._generator():
78 | for path in parent.list_directory():
79 | if fnmatch(path.name, self._path):
80 | if self._directory and path.is_directory():
81 | yield path
82 | elif not self._directory and path.is_file():
83 | yield path
84 |
85 |
86 | class RegularPathComponent(PathComponent):
87 | def __init__(self, directory, path):
88 | super().__init__(directory)
89 |
90 | self._path = path
91 |
92 | def _generate(self):
93 | for parent in self._generator():
94 | path = parent.get_path(self._path)
95 |
96 | if path:
97 | if self._directory and path.is_directory():
98 | yield path
99 | elif not self._directory and path.is_file():
100 | yield path
101 |
--------------------------------------------------------------------------------
/fastir/common/variables.py:
--------------------------------------------------------------------------------
1 | import re
2 |
3 | from .logging import logger
4 |
5 |
6 | class HostVariables:
7 | def __init__(self):
8 | self._variables = []
9 |
10 | self.init_variables()
11 | self.resolve_variables()
12 |
13 | def init_variables(self):
14 | raise NotImplementedError
15 |
16 | def resolve_variables(self):
17 | for variable in self._variables:
18 | values = variable['value']
19 |
20 | if not isinstance(variable['value'], set):
21 | values = set([values])
22 |
23 | resolved_values = set()
24 |
25 | for value in values:
26 | resolved_values.update(self.substitute(value))
27 |
28 | variable['value'] = resolved_values
29 |
30 | def add_variable(self, name, value):
31 | self._variables.append({
32 | 'name': name,
33 | 're': re.compile(re.escape(name), re.IGNORECASE),
34 | 'value': value
35 | })
36 |
37 | def _substitute_value(self, original_value, variable_re, variable_value):
38 | new_value, subs = variable_re.subn(variable_value.replace('\\', r'\\'), original_value)
39 |
40 | if subs:
41 | return self.substitute(new_value)
42 | else:
43 | return set()
44 |
45 | def substitute(self, value):
46 | values = set()
47 |
48 | if value.count('%') < 2:
49 | values.add(value)
50 | else:
51 | for variable in self._variables:
52 | if isinstance(variable['value'], set):
53 | for variable_value in variable['value']:
54 | values.update(self._substitute_value(value, variable['re'], variable_value))
55 | else:
56 | values.update(self._substitute_value(value, variable['re'], variable['value']))
57 |
58 | if not values:
59 | logger.warning(f"Value '{value}' contains unsupported variables")
60 | values.add(value)
61 |
62 | return values
63 |
--------------------------------------------------------------------------------
/fastir/unix/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OWNsecurity/fastir_artifacts/799db876984ebe7d4096c72c609d4cc28fb31e64/fastir/unix/__init__.py
--------------------------------------------------------------------------------
/fastir/unix/variables.py:
--------------------------------------------------------------------------------
1 | import pwd
2 |
3 |
4 | from fastir.common.variables import HostVariables
5 |
6 |
7 | class UnixHostVariables(HostVariables):
8 |
9 | def init_variables(self):
10 | userprofiles = set()
11 |
12 | for pwdent in pwd.getpwall():
13 | userprofiles.add(pwdent.pw_dir)
14 |
15 | self.add_variable('%%users.homedir%%', userprofiles)
16 |
--------------------------------------------------------------------------------
/fastir/windows/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OWNsecurity/fastir_artifacts/799db876984ebe7d4096c72c609d4cc28fb31e64/fastir/windows/__init__.py
--------------------------------------------------------------------------------
/fastir/windows/registry.py:
--------------------------------------------------------------------------------
1 | import os
2 | import json
3 | import winreg
4 |
5 | import artifacts
6 |
7 | from fastir.common.filesystem import FileSystem
8 | from fastir.common.path_components import PathObject
9 | from fastir.common.collector import AbstractCollector
10 |
11 |
12 | class RegistryReader(FileSystem):
13 | def __init__(self, hive, pattern):
14 | self._hive = hive
15 | self._pattern = pattern
16 |
17 | self._keys = {}
18 |
19 | def _key(self, path_object, parent):
20 | if path_object.path not in self._keys:
21 | self._keys[path_object.path] = winreg.OpenKey(parent.obj, path_object.name, 0, winreg.KEY_READ|winreg.KEY_WOW64_64KEY)
22 |
23 | path_object.obj = self._keys[path_object.path]
24 |
25 | return path_object
26 |
27 | def _base_generator(self):
28 | yield PathObject(self, self._hive, self._hive, getattr(winreg, self._hive))
29 |
30 | def keys_to_collect(self):
31 | path_components = self._parse(self._pattern)
32 |
33 | generator = self._base_generator
34 | for component in path_components:
35 | generator = component.get_generator(generator)
36 |
37 | for path in generator():
38 | yield path
39 |
40 | def list_directory(self, path_object):
41 | try:
42 | index = 0
43 | while True:
44 | name = winreg.EnumKey(path_object.obj, index)
45 | index += 1
46 |
47 | try:
48 | keypath = os.path.join(path_object.path, name)
49 | yield self._key(
50 | PathObject(self, name.lower(), keypath),
51 | path_object)
52 | except OSError:
53 | pass
54 | except OSError:
55 | pass
56 |
57 | def get_path(self, parent, name):
58 | try:
59 | return self._key(
60 | PathObject(self, name, os.path.join(parent.path, name)),
61 | parent)
62 | except OSError:
63 | return None
64 |
65 | def is_directory(self, path_object):
66 | try:
67 | winreg.EnumKey(path_object.obj, 0)
68 | return True
69 | except OSError:
70 | return False
71 |
72 | def is_file(self, path_object):
73 | return True
74 |
75 | def close(self):
76 | for _, handle in self._keys.items():
77 | winreg.CloseKey(handle)
78 |
79 | def get_key_values(self, key_to_collect):
80 | try:
81 | index = 0
82 |
83 | while True:
84 | name, value, type_ = winreg.EnumValue(key_to_collect.obj, index)
85 | yield name, self.normalize_value(value), type_
86 | index += 1
87 | except OSError:
88 | pass
89 |
90 | def get_key_value(self, key, value):
91 | try:
92 | value, type_ = winreg.QueryValueEx(key.obj, value)
93 |
94 | return {
95 | "value": self.normalize_value(value),
96 | "type": type_
97 | }
98 | except FileNotFoundError:
99 | return None
100 |
101 | @staticmethod
102 | def normalize_value(value):
103 | try:
104 | json.dumps(value)
105 | return value
106 | except TypeError:
107 | return repr(value)
108 |
109 |
110 | class RegistryCollector(AbstractCollector):
111 | def __init__(self):
112 | self._keys = []
113 | self._values = []
114 |
115 | def add_key(self, artifact, key):
116 | key_parts = key.split('\\')
117 |
118 | self._keys.append({
119 | 'artifact': artifact,
120 | 'hive': key_parts[0],
121 | 'key': '/'.join(key_parts[1:])
122 | })
123 |
124 | def add_value(self, artifact, key, value):
125 | key_parts = key.split('\\')
126 |
127 | self._values.append({
128 | 'artifact': artifact,
129 | 'hive': key_parts[0],
130 | 'key': '/'.join(key_parts[1:]),
131 | 'value': value
132 | })
133 |
134 | def collect(self, output):
135 | for key in self._keys:
136 | reader = RegistryReader(key['hive'], key['key'].lower())
137 |
138 | for key_to_collect in reader.keys_to_collect():
139 | for name, value, type_ in reader.get_key_values(key_to_collect):
140 | output.add_collected_registry_value(
141 | key['artifact'], key_to_collect.path, name, value, type_)
142 |
143 | reader.close()
144 |
145 | for key_value in self._values:
146 | reader = RegistryReader(key_value['hive'], key_value['key'].lower())
147 |
148 | for key_to_collect in reader.keys_to_collect():
149 | value = reader.get_key_value(key_to_collect, key_value['value'])
150 |
151 | if value:
152 | output.add_collected_registry_value(
153 | key_value['artifact'], key_to_collect.path, key_value['value'], value['value'], value['type'])
154 |
155 | reader.close()
156 |
157 | def register_source(self, artifact_definition, artifact_source, variables):
158 | supported = False
159 |
160 | if artifact_source.type_indicator == artifacts.definitions.TYPE_INDICATOR_WINDOWS_REGISTRY_KEY:
161 | supported = True
162 |
163 | for pattern in artifact_source.keys:
164 | for key in variables.substitute(pattern):
165 | self.add_key(artifact_definition.name, key)
166 |
167 | elif artifact_source.type_indicator == artifacts.definitions.TYPE_INDICATOR_WINDOWS_REGISTRY_VALUE:
168 | supported = True
169 |
170 | for key_value in artifact_source.key_value_pairs:
171 | for key in variables.substitute(key_value['key']):
172 | self.add_value(artifact_definition.name, key, key_value['value'])
173 |
174 | return supported
175 |
--------------------------------------------------------------------------------
/fastir/windows/variables.py:
--------------------------------------------------------------------------------
1 | import os
2 | import winreg
3 |
4 | from .wmi import wmi_query
5 | from fastir.common.variables import HostVariables
6 |
7 |
8 | def reg(hive, key, value, alternative_value=None):
9 | k = winreg.OpenKey(hive, key, 0, winreg.KEY_READ|winreg.KEY_WOW64_64KEY)
10 |
11 | try:
12 | v = winreg.QueryValueEx(k, value)
13 | except FileNotFoundError:
14 | if not alternative_value:
15 | raise
16 |
17 | v = winreg.QueryValueEx(k, alternative_value)
18 |
19 | winreg.CloseKey(k)
20 |
21 | return v[0]
22 |
23 |
24 | class WindowsHostVariables(HostVariables):
25 |
26 | def _get_local_users(self):
27 | return wmi_query('SELECT Name, SID FROM Win32_Account WHERE SidType = 1 AND LocalAccount = True')
28 |
29 | def _get_extra_sids(self):
30 | sids = set()
31 |
32 | k1 = winreg.HKEY_USERS
33 |
34 | i = 0
35 | while 1:
36 | try:
37 | sid = winreg.EnumKey(k1, i)
38 |
39 | if '_Classes' not in sid and sid != '.DEFAULT':
40 | sids.add(sid)
41 |
42 | i += 1
43 | except WindowsError:
44 | break
45 |
46 | return sids
47 |
48 | def _get_user_profiles(self):
49 | profiles = set()
50 |
51 | k1 = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, r'SOFTWARE\Microsoft\Windows NT\CurrentVersion\ProfileList')
52 |
53 | i = 0
54 | while 1:
55 | try:
56 | sid = winreg.EnumKey(k1, i)
57 | k2 = winreg.OpenKey(k1, sid)
58 | v = winreg.QueryValueEx(k2, 'ProfileImagePath')
59 | winreg.CloseKey(k2)
60 |
61 | profiles.add(v[0])
62 |
63 | i += 1
64 | except WindowsError:
65 | break
66 |
67 | winreg.CloseKey(k1)
68 |
69 | return profiles
70 |
71 | def init_variables(self):
72 | systemroot = reg(
73 | winreg.HKEY_LOCAL_MACHINE,
74 | r'Software\Microsoft\Windows NT\CurrentVersion',
75 | 'SystemRoot')
76 |
77 | self.add_variable('%systemroot%', systemroot)
78 | self.add_variable('%%environ_systemroot%%', systemroot)
79 | self.add_variable('%systemdrive%', systemroot[:2])
80 | self.add_variable('%%environ_systemdrive%%', systemroot[:2])
81 |
82 | self.add_variable('%%environ_windir%%', reg(
83 | winreg.HKEY_LOCAL_MACHINE,
84 | r'System\CurrentControlSet\Control\Session Manager\Environment',
85 | 'windir'))
86 |
87 | self.add_variable('%%environ_allusersappdata%%', reg(
88 | winreg.HKEY_LOCAL_MACHINE,
89 | r'Software\Microsoft\Windows NT\CurrentVersion\ProfileList',
90 | 'ProgramData'))
91 |
92 | self.add_variable('%%environ_programfiles%%', reg(
93 | winreg.HKEY_LOCAL_MACHINE,
94 | r'Software\Microsoft\Windows\CurrentVersion',
95 | 'ProgramFilesDir'))
96 |
97 | self.add_variable('%%environ_programfiles%%', reg(
98 | winreg.HKEY_LOCAL_MACHINE,
99 | r'Software\Microsoft\Windows\CurrentVersion',
100 | 'ProgramFilesDir'))
101 |
102 | self.add_variable('%%environ_programfilesx86%%', reg(
103 | winreg.HKEY_LOCAL_MACHINE,
104 | r'Software\Microsoft\Windows\CurrentVersion',
105 | 'ProgramFilesDir (x86)', 'ProgramFilesDir'))
106 |
107 | self.add_variable('%%environ_allusersprofile%%', reg(
108 | winreg.HKEY_LOCAL_MACHINE,
109 | r'Software\Microsoft\Windows NT\CurrentVersion\ProfileList',
110 | 'AllUsersProfile', 'ProgramData'))
111 |
112 | self.add_variable('%%users.localappdata%%', reg(
113 | winreg.HKEY_USERS,
114 | r'.DEFAULT\Software\Microsoft\Windows\CurrentVersion\Explorer\User Shell Folders',
115 | 'Local AppData'))
116 |
117 | self.add_variable('%%users.appdata%%', reg(
118 | winreg.HKEY_USERS,
119 | r'.DEFAULT\Software\Microsoft\Windows\CurrentVersion\Explorer\User Shell Folders',
120 | 'AppData'))
121 |
122 | self.add_variable('%%users.temp%%', reg(
123 | winreg.HKEY_USERS,
124 | r'.DEFAULT\Environment',
125 | 'TEMP'))
126 |
127 | self.add_variable('%%users.localappdata_low%%', os.path.join('%USERPROFILE%', reg(
128 | winreg.HKEY_LOCAL_MACHINE,
129 | r'SOFTWARE\Microsoft\Windows\CurrentVersion\Explorer\FolderDescriptions\{A520A1A4-1780-4FF6-BD18-167343C5AF16}',
130 | 'RelativePath')))
131 |
132 | user_profiles = self._get_user_profiles()
133 | self.add_variable('%USERPROFILE%', user_profiles)
134 | self.add_variable('%%users.homedir%%', user_profiles)
135 | self.add_variable('%%users.userprofile%%', user_profiles)
136 |
137 | users = self._get_local_users()
138 | extra_sids = self._get_extra_sids()
139 | self.add_variable('%%users.username%%', set([user['Name'] for user in users]))
140 | self.add_variable('%%users.sid%%', set([user['SID'] for user in users] + [sid for sid in extra_sids]))
141 |
--------------------------------------------------------------------------------
/fastir/windows/wmi.py:
--------------------------------------------------------------------------------
1 | import artifacts
2 | import pywintypes
3 | import win32com.client
4 |
5 | from fastir.common.logging import logger
6 | from fastir.common.collector import AbstractCollector
7 |
8 |
9 | def wmi_query(query, base_object=None):
10 | if base_object is None:
11 | base_object = r'winmgmts:\root\cimv2'
12 |
13 | try:
14 | wmi = win32com.client.GetObject(base_object)
15 | results = wmi.ExecQuery(query)
16 |
17 | objs = []
18 | for result in results:
19 | obj = {}
20 |
21 | for p in result.Properties_:
22 | if isinstance(p.Value, win32com.client.CDispatch):
23 | continue
24 | if isinstance(p.Value, tuple) and len(p.Value) > 0 and isinstance(p.Value[0], win32com.client.CDispatch):
25 | continue
26 |
27 | obj[p.Name] = p.Value
28 |
29 | objs.append(obj)
30 |
31 | return objs
32 | except pywintypes.com_error:
33 | logger.error(f"Error while retrieving results for WMI Query '{query}'")
34 |
35 |
36 | class WMIExecutor(AbstractCollector):
37 | def __init__(self):
38 | self._queries = []
39 |
40 | def add_query(self, artifact, query, base_object):
41 | self._queries.append({
42 | 'artifact': artifact,
43 | 'query': query,
44 | 'base_object': base_object
45 | })
46 |
47 | def collect(self, output):
48 | for query in self._queries:
49 | result = wmi_query(query['query'], query['base_object'])
50 | output.add_collected_wmi(query['artifact'], query['query'], result)
51 |
52 | def register_source(self, artifact_definition, artifact_source, variables):
53 | if artifact_source.type_indicator == artifacts.definitions.TYPE_INDICATOR_WMI_QUERY:
54 | for query in variables.substitute(artifact_source.query):
55 | self.add_query(artifact_definition.name, query, artifact_source.base_object)
56 |
57 | return True
58 |
59 | return False
60 |
--------------------------------------------------------------------------------
/fastir_artifacts.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 | import locale
4 |
5 | import artifacts.reader
6 | import artifacts.definitions
7 | import configargparse
8 |
9 | from fastir.common.output import Outputs
10 | from fastir.common.collector import Collector
11 | from fastir.common.logging import logger, PROGRESS
12 | from fastir.common.helpers import get_operating_system
13 |
14 |
15 | # Using a static blacklist to avoid automatic execution of steps
16 | # that could have a big impact on performance.
17 | BLACKLIST = [
18 | 'WMILoginUsers',
19 | 'WMIUsers',
20 | 'WMIVolumeShadowCopies'
21 | ]
22 |
23 | REGISTRY_TYPES = [
24 | artifacts.definitions.TYPE_INDICATOR_WINDOWS_REGISTRY_KEY,
25 | artifacts.definitions.TYPE_INDICATOR_WINDOWS_REGISTRY_VALUE
26 | ]
27 |
28 |
29 | def get_artifacts_registry(use_library, paths):
30 | reader = artifacts.reader.YamlArtifactsReader()
31 | registry = artifacts.registry.ArtifactDefinitionsRegistry()
32 |
33 | if not paths or use_library:
34 | path = os.path.join(sys.prefix, 'share', 'artifacts')
35 | registry.ReadFromDirectory(reader, path)
36 |
37 | if paths:
38 | for path in paths:
39 | registry.ReadFromDirectory(reader, path)
40 |
41 | return registry
42 |
43 |
44 | def resolve_artifact_groups(registry, artifact_names):
45 | if artifact_names:
46 | artifact_names = artifact_names.split(',')
47 | resolved_names = set()
48 |
49 | for artifact in artifact_names:
50 | definition = registry.GetDefinitionByName(artifact)
51 |
52 | if definition:
53 | resolved_names.add(artifact)
54 | for source in definition.sources:
55 | if source.type_indicator == artifacts.definitions.TYPE_INDICATOR_ARTIFACT_GROUP:
56 | artifact_names += source.names
57 |
58 | return resolved_names
59 |
60 |
61 | def get_artifacts_to_collect(registry, include, exclude, platform, collect_registry):
62 | for artifact_definition in registry.GetDefinitions():
63 | # Apply BLACKLIST, except if the artifact is explicitely requested
64 | if artifact_definition.name in BLACKLIST:
65 | if not include or artifact_definition.name not in include:
66 | continue
67 |
68 | # If a specific list of Artifacts was specified, ignore everything else
69 | if include and artifact_definition.name not in include:
70 | continue
71 |
72 | # Apply exclusion list
73 | if exclude and artifact_definition.name in exclude:
74 | continue
75 |
76 | # We only care about artefacts available for current platform
77 | if artifact_definition.supported_os and platform not in artifact_definition.supported_os:
78 | continue
79 |
80 | for artifact_source in artifact_definition.sources:
81 | if artifact_source.supported_os and platform not in artifact_source.supported_os:
82 | continue
83 |
84 | # Exclude registry artifacts when using the default setup
85 | # Full hives are already collected
86 | if not collect_registry and artifact_source.type_indicator in REGISTRY_TYPES:
87 | continue
88 |
89 | yield artifact_definition, artifact_source
90 |
91 |
92 | def main(arguments):
93 | try:
94 | locale.setlocale(locale.LC_ALL, 'en_US.UTF-8')
95 | except locale.Error:
96 | pass
97 | output = Outputs(arguments.output, arguments.maxsize, arguments.sha256)
98 |
99 | logger.log(PROGRESS, "Loading artifacts ...")
100 |
101 | platform = get_operating_system()
102 | collector = Collector(platform)
103 |
104 | artifacts_registry = get_artifacts_registry(arguments.library, arguments.directory)
105 |
106 | include_artifacts = resolve_artifact_groups(artifacts_registry, arguments.include)
107 | exclude_artifacts = resolve_artifact_groups(artifacts_registry, arguments.exclude)
108 |
109 | for artifact_definition, artifact_source in get_artifacts_to_collect(
110 | artifacts_registry, include_artifacts, exclude_artifacts, platform,
111 | arguments.include or (arguments.directory and not arguments.library)
112 | ):
113 | collector.register_source(artifact_definition, artifact_source)
114 |
115 | collector.collect(output)
116 |
117 |
118 | if __name__ == "__main__":
119 | parser = configargparse.ArgumentParser(
120 | default_config_files=[os.path.join((os.path.dirname(__file__), os.path.dirname(sys.executable))[hasattr(sys, 'frozen')], 'fastir_artifacts.ini')],
121 | description='FastIR Artifacts - Collect ForensicArtifacts')
122 |
123 | parser.add_argument('-i', '--include', help='Artifacts to collect (comma-separated)')
124 | parser.add_argument('-e', '--exclude', help='Artifacts to ignore (comma-separated)')
125 | parser.add_argument('-d', '--directory', help='Directory containing Artifacts definitions', nargs='+')
126 | parser.add_argument(
127 | '-l', '--library',
128 | help='Keep loading Artifacts definitions from the ForensicArtifacts library (in addition to custom directories)',
129 | action='store_true')
130 | parser.add_argument('-m', '--maxsize', help='Do not collect file with size > n')
131 | parser.add_argument('-o', '--output', help='Directory where the results are created', default='.')
132 | parser.add_argument('-s', '--sha256', help='Compute SHA-256 of collected files', action='store_true')
133 |
134 | main(parser.parse_args())
135 |
--------------------------------------------------------------------------------
/fastir_artifacts.spec:
--------------------------------------------------------------------------------
1 | # -*- mode: python -*-
2 |
3 | import os.path
4 | import sys
5 |
6 |
7 | a = Analysis(['fastir_artifacts.py'],
8 | pathex=['.'],
9 | binaries=[],
10 | datas=[(os.path.join(sys.prefix, 'share', 'artifacts'), os.path.join('share', 'artifacts')),
11 | (os.path.join('examples', 'fastir_artifacts.ini'), '.'),
12 | (os.path.join('examples', 'own.yaml'), os.path.join('share', 'artifacts'))],
13 | hiddenimports=[],
14 | hookspath=[],
15 | runtime_hooks=[],
16 | excludes=[],
17 | win_no_prefer_redirects=False,
18 | win_private_assemblies=False,
19 | cipher=None,
20 | noarchive=False)
21 |
22 | pyz = PYZ(a.pure, a.zipped_data, cipher=None)
23 |
24 | exe = EXE(pyz,
25 | a.scripts,
26 | [],
27 | exclude_binaries=True,
28 | name='fastir_artifacts',
29 | debug=False,
30 | bootloader_ignore_signals=False,
31 | strip=False,
32 | upx=False,
33 | console=True,
34 | uac_admin=True,
35 | icon='Logo_OWN-Noir.ico')
36 |
37 | coll = COLLECT(exe,
38 | a.binaries,
39 | a.zipfiles,
40 | a.datas,
41 | strip=False,
42 | upx=False,
43 | name='fastir_artifacts')
44 |
45 | if sys.platform == 'win32':
46 | import glob
47 | import shutil
48 |
49 | # WOW64 redirection will pick up the right msvcp140.dll
50 | if (os.path.exists(os.path.join(os.environ['SYSTEMROOT'], 'System32', 'msvcp140.dll')) and
51 | not os.path.exists(os.path.join('dist', 'fastir_artifacts', 'msvcp140.dll'))):
52 | shutil.copy(os.path.join(os.environ['SYSTEMROOT'], 'System32', 'msvcp140.dll'), os.path.join('dist', 'fastir_artifacts'))
53 |
54 | # Copy Universal CRT
55 | if sys.maxsize > 2 ** 32:
56 | source = os.path.join(os.environ['PROGRAMFILES(X86)'], 'Windows Kits', '10', 'Redist', 'ucrt', 'DLLs', 'x64', '*.dll')
57 | else:
58 | source = os.path.join(os.environ['PROGRAMFILES(X86)'], 'Windows Kits', '10', 'Redist', 'ucrt', 'DLLs', 'x86', '*.dll')
59 |
60 | for f in glob.glob(source):
61 | if not os.path.exists(os.path.join('dist', 'fastir_artifacts', os.path.basename(f))):
62 | shutil.copy(f, os.path.join('dist', 'fastir_artifacts'))
63 |
--------------------------------------------------------------------------------
/integration_tests/conftest.py:
--------------------------------------------------------------------------------
1 | import sys
2 | import pytest
3 |
4 | ALL = set("darwin linux win32".split())
5 |
6 |
7 | def pytest_runtest_setup(item):
8 | supported_platforms = ALL.intersection(mark.name for mark in item.iter_markers())
9 | plat = sys.platform
10 | if supported_platforms and plat not in supported_platforms:
11 | pytest.skip("cannot run on platform %s" % (plat))
12 |
--------------------------------------------------------------------------------
/integration_tests/pytest.ini:
--------------------------------------------------------------------------------
1 | [pytest]
2 | markers =
3 | linux: test running only on Linux
4 | darwin: test running only on macOS
5 | win32: test running only on Windows
6 |
--------------------------------------------------------------------------------
/integration_tests/test_execution_results.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 | import glob
4 | import json
5 | import zipfile
6 | import subprocess
7 | from shutil import rmtree
8 | from tempfile import mkdtemp
9 |
10 | import pytest
11 |
12 |
13 | FASTIR_ROOT = os.path.dirname(os.path.dirname(__file__))
14 | TEST_ARTIFACTS = [
15 | # UNIX Artifacts
16 | 'UnixPasswdFile',
17 |
18 | # Linux Artifacts
19 | 'IPTablesRules',
20 | 'LinuxProcMounts',
21 |
22 | # MacOS Artifacts
23 | 'MacOSUtmpxFile',
24 | 'MacOSLoadedKexts',
25 |
26 | # Windows Artifacts
27 | 'WindowsFirewallEnabledRules',
28 | 'NTFSMFTFiles',
29 | 'WindowsHostsFiles',
30 | 'WMIDrivers',
31 | ]
32 |
33 |
34 | @pytest.fixture(scope='session')
35 | def temp_dir():
36 | dirpath = mkdtemp()
37 |
38 | yield dirpath
39 |
40 | rmtree(dirpath)
41 |
42 |
43 | @pytest.fixture(scope='session')
44 | def fastir_command(temp_dir):
45 | if sys.platform == 'darwin' or sys.platform == 'linux':
46 | command = os.path.join(FASTIR_ROOT, 'dist', 'fastir_artifacts', 'fastir_artifacts')
47 | command = ['sudo', command]
48 | elif sys.platform == 'win32':
49 | command = [os.path.join('dist', 'fastir_artifacts', 'fastir_artifacts.exe')]
50 | else:
51 | raise ValueError(f'Unknown platform {sys.platform}')
52 |
53 | return command + ['-o', temp_dir, '-i', ','.join(TEST_ARTIFACTS)]
54 |
55 |
56 | @pytest.fixture(scope='session')
57 | def fastir_output(fastir_command):
58 | try:
59 | command_output = subprocess.check_output(fastir_command, stderr=subprocess.STDOUT)
60 | except subprocess.CalledProcessError as e:
61 | print(str(e.output, 'utf-8'))
62 | raise
63 |
64 | return command_output
65 |
66 | @pytest.fixture(scope='session')
67 | def fastir_results(fastir_output, temp_dir):
68 | dirname = os.listdir(temp_dir)[0]
69 | results_path = os.path.join(temp_dir, dirname)
70 |
71 | # Fix ownership
72 | if sys.platform == 'darwin' or sys.platform == 'linux':
73 | command = ['sudo', 'chown', '-R', f'{os.getuid()}:{os.getgid()}', results_path]
74 | subprocess.check_output(command)
75 |
76 | return results_path
77 |
78 |
79 | @pytest.fixture(scope='session')
80 | def command_results_file(fastir_results):
81 | return glob.glob(os.path.join(fastir_results, '*-commands.json'))[0]
82 |
83 |
84 | @pytest.fixture(scope='session')
85 | def command_results(command_results_file):
86 | with open(command_results_file, 'r') as f:
87 | yield json.load(f)
88 |
89 |
90 | @pytest.fixture(scope='session')
91 | def files_results_file(fastir_results):
92 | return glob.glob(os.path.join(fastir_results, '*-files.zip'))[0]
93 |
94 |
95 | @pytest.fixture(scope='session')
96 | def files_results(files_results_file):
97 | with zipfile.ZipFile(files_results_file) as zf:
98 | yield zf
99 |
100 |
101 | @pytest.fixture(scope='session')
102 | def files_results_names(files_results):
103 | return files_results.namelist()
104 |
105 |
106 | @pytest.fixture(scope='session')
107 | def logs_results_file(fastir_results):
108 | return glob.glob(os.path.join(fastir_results, '*-logs.txt'))[0]
109 |
110 |
111 | @pytest.fixture(scope='session')
112 | def logs_results(logs_results_file):
113 | with open(logs_results_file, 'r') as f:
114 | yield f.read()
115 |
116 |
117 | def test_collection_successful(fastir_output):
118 | assert b'Finished collecting artifacts' in fastir_output
119 |
120 |
121 | def test_output_directory_exists(fastir_results):
122 | assert os.path.isdir(fastir_results)
123 |
124 |
125 | def test_command_results_exists(command_results_file):
126 | assert os.path.isfile(command_results_file)
127 |
128 |
129 | def test_file_results_exists(files_results_file):
130 | assert os.path.isfile(files_results_file)
131 |
132 |
133 | def test_logs(logs_results_file, logs_results):
134 | assert os.path.isfile(logs_results_file)
135 | assert 'Loading artifacts' in logs_results
136 | assert 'Collecting artifacts from' in logs_results
137 | assert 'Collecting file' in logs_results
138 | assert 'Collecting command' in logs_results
139 | assert 'Finished collecting artifacts' in logs_results
140 |
141 |
142 | #####################
143 | ## Linux Tests
144 | #####################
145 | @pytest.mark.linux
146 | def test_command_iptables(command_results):
147 | assert 'IPTablesRules' in command_results
148 |
149 | for command, output in command_results['IPTablesRules'].items():
150 | assert 'iptables' in command
151 | assert 'Chain INPUT' in output
152 |
153 |
154 | @pytest.mark.linux
155 | @pytest.mark.darwin
156 | def test_file_passwd(files_results_names, files_results):
157 | assert '/etc/passwd' in files_results_names
158 | with files_results.open('/etc/passwd') as f:
159 | assert b'root' in f.read()
160 |
161 |
162 | @pytest.mark.linux
163 | def test_file_mounts(files_results_names, files_results):
164 | assert '/proc/mounts' in files_results_names
165 | with files_results.open('/proc/mounts') as f:
166 | assert b' / ' in f.read()
167 |
168 |
169 | #####################
170 | ## macOS Tests
171 | #####################
172 | @pytest.mark.darwin
173 | def test_file_utmp(files_results_names, files_results):
174 | assert '/var/run/utmpx' in files_results_names
175 |
176 |
177 | @pytest.mark.darwin
178 | def test_command_kextstat(command_results):
179 | assert 'MacOSLoadedKexts' in command_results
180 |
181 | for command, output in command_results['MacOSLoadedKexts'].items():
182 | assert 'kextstat' in command
183 | assert 'com.apple' in output
184 |
185 |
186 | #####################
187 | ## Windows Tests
188 | #####################
189 | @pytest.fixture(scope='session')
190 | def wmi_results_file(fastir_results):
191 | return glob.glob(os.path.join(fastir_results, '*-wmi.json'))[0]
192 |
193 |
194 | @pytest.fixture(scope='session')
195 | def wmi_results(wmi_results_file):
196 | with open(wmi_results_file, 'r') as f:
197 | yield json.load(f)
198 |
199 |
200 | @pytest.mark.win32
201 | def test_command_windows_firewall(command_results):
202 | assert 'WindowsFirewallEnabledRules' in command_results
203 |
204 | for command, output in command_results['WindowsFirewallEnabledRules'].items():
205 | assert 'netsh.exe' in command
206 | assert 'Windows Defender Firewall Rules:' in output
207 |
208 |
209 | @pytest.mark.win32
210 | def test_file_mft(files_results_names, files_results):
211 | assert 'C/$MFT' in files_results_names
212 | with files_results.open('C/$MFT') as f:
213 | assert b'FILE0' in f.read()
214 |
215 |
216 | @pytest.mark.win32
217 | def test_file_hosts(files_results_names, files_results):
218 | assert 'C/Windows/System32/drivers/etc/hosts' in files_results_names
219 | with files_results.open('C/Windows/System32/drivers/etc/hosts') as f:
220 | assert b'This is a sample HOSTS file used by Microsoft TCP/IP for Windows.' in f.read()
221 |
222 |
223 | @pytest.mark.win32
224 | def test_wmi_results_exists(wmi_results_file):
225 | assert os.path.isfile(wmi_results_file)
226 |
227 |
228 | @pytest.mark.win32
229 | def test_wmi_drivers(wmi_results):
230 | assert 'WMIDrivers' in wmi_results
231 | assert len(wmi_results['WMIDrivers']) > 0
232 |
233 | for query, output in wmi_results['WMIDrivers'].items():
234 | assert 'SELECT' in query
235 | assert len(output) > 0
236 | assert 'Description' in output[0]
237 | assert 'DisplayName' in output[0]
238 |
--------------------------------------------------------------------------------
/requirements-test.txt:
--------------------------------------------------------------------------------
1 | pytest==7.1.0
2 | pytest-cov==3.0.0
3 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | git+https://github.com/ForensicArtifacts/artifacts.git@6b4753931aeb55f97a1838bdf582e4100ac2b3ee#egg=artifacts
2 | ConfigArgParse==1.5.3
3 | pypiwin32==223 ; sys_platform == 'win32'
4 | PyYAML==6.0
5 | pytsk3==20211111
6 | PyInstaller==4.10
7 | psutil==5.9.0
8 | jsonlines==3.0.0
9 | filetype==1.0.10
10 | pefile==2021.9.3
11 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OWNsecurity/fastir_artifacts/799db876984ebe7d4096c72c609d4cc28fb31e64/tests/__init__.py
--------------------------------------------------------------------------------
/tests/conftest.py:
--------------------------------------------------------------------------------
1 | # This file contains common fixtures
2 | import os
3 | from shutil import rmtree
4 | from tempfile import mkdtemp
5 | from unittest.mock import patch
6 | from collections import namedtuple
7 |
8 | import pytest
9 |
10 | from fastir.common.output import Outputs
11 | from fastir.common.variables import HostVariables
12 |
13 |
14 | FS_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), 'data', 'filesystem'))
15 |
16 |
17 | @pytest.fixture
18 | def temp_dir():
19 | dirpath = mkdtemp()
20 |
21 | yield dirpath
22 |
23 | rmtree(dirpath)
24 |
25 |
26 | @pytest.fixture
27 | def outputs(temp_dir):
28 | with patch.object(Outputs, 'add_collected_command'):
29 | with patch.object(Outputs, 'add_collected_file'):
30 | with patch.object(Outputs, 'add_collected_file_info'):
31 | outputs = Outputs(temp_dir, maxsize=None, sha256=False)
32 | yield outputs
33 | outputs.close()
34 |
35 |
36 | @pytest.fixture
37 | def test_variables():
38 | class HostVariablesForTests(HostVariables):
39 |
40 | def init_variables(self):
41 | pass
42 |
43 | return HostVariablesForTests()
44 |
45 |
46 | @pytest.fixture
47 | def fake_partitions():
48 | Partition = namedtuple('Partition', ['mountpoint', 'device', 'fstype'])
49 |
50 | test_data = os.path.join(os.path.dirname(__file__), 'data')
51 |
52 | partitions = [
53 | Partition('/', os.path.join(test_data, 'image.raw'), 'NTFS'),
54 | Partition(FS_ROOT, FS_ROOT, 'some_unsupported_fstype'),
55 | Partition(test_data, test_data, 'ext4')
56 | ]
57 |
58 | with patch('psutil.disk_partitions', return_value=partitions) as mock:
59 | yield mock
60 |
--------------------------------------------------------------------------------
/tests/data/MSVCR71.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OWNsecurity/fastir_artifacts/799db876984ebe7d4096c72c609d4cc28fb31e64/tests/data/MSVCR71.dll
--------------------------------------------------------------------------------
/tests/data/filesystem/l1/l1.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OWNsecurity/fastir_artifacts/799db876984ebe7d4096c72c609d4cc28fb31e64/tests/data/filesystem/l1/l1.txt
--------------------------------------------------------------------------------
/tests/data/filesystem/l1/l2/l2.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OWNsecurity/fastir_artifacts/799db876984ebe7d4096c72c609d4cc28fb31e64/tests/data/filesystem/l1/l2/l2.txt
--------------------------------------------------------------------------------
/tests/data/filesystem/l1/l2/l3/l3.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OWNsecurity/fastir_artifacts/799db876984ebe7d4096c72c609d4cc28fb31e64/tests/data/filesystem/l1/l2/l3/l3.txt
--------------------------------------------------------------------------------
/tests/data/filesystem/l1/l2/l3/l4/l4.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OWNsecurity/fastir_artifacts/799db876984ebe7d4096c72c609d4cc28fb31e64/tests/data/filesystem/l1/l2/l3/l4/l4.txt
--------------------------------------------------------------------------------
/tests/data/filesystem/root.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OWNsecurity/fastir_artifacts/799db876984ebe7d4096c72c609d4cc28fb31e64/tests/data/filesystem/root.txt
--------------------------------------------------------------------------------
/tests/data/filesystem/root2.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OWNsecurity/fastir_artifacts/799db876984ebe7d4096c72c609d4cc28fb31e64/tests/data/filesystem/root2.txt
--------------------------------------------------------------------------------
/tests/data/filesystem/test.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OWNsecurity/fastir_artifacts/799db876984ebe7d4096c72c609d4cc28fb31e64/tests/data/filesystem/test.txt
--------------------------------------------------------------------------------
/tests/data/image.raw:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OWNsecurity/fastir_artifacts/799db876984ebe7d4096c72c609d4cc28fb31e64/tests/data/image.raw
--------------------------------------------------------------------------------
/tests/test_collector.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | from artifacts.artifact import ArtifactDefinition
3 | from artifacts.definitions import TYPE_INDICATOR_COMMAND, TYPE_INDICATOR_FILE, TYPE_INDICATOR_PATH
4 |
5 | from fastir.common.collector import Collector
6 | from fastir.common.filesystem import FILE_INFO_TYPE
7 | from fastir.common.helpers import get_operating_system
8 |
9 |
10 | @pytest.fixture
11 | def command_echo():
12 | artifact = ArtifactDefinition('EchoCommand')
13 | artifact.AppendSource(TYPE_INDICATOR_COMMAND, {'cmd': 'echo', 'args': ['test']})
14 |
15 | return artifact
16 |
17 |
18 | @pytest.fixture
19 | def passwords_file():
20 | artifact = ArtifactDefinition('PasswordsFile')
21 | artifact.AppendSource(TYPE_INDICATOR_FILE, {'paths': ['/passwords.txt']})
22 |
23 | return artifact
24 |
25 |
26 | @pytest.fixture
27 | def passwords_file_info():
28 | artifact = ArtifactDefinition('PasswordsFileInfo')
29 | artifact.AppendSource(FILE_INFO_TYPE, {'paths': ['/passwords.txt']})
30 |
31 | return artifact
32 |
33 |
34 | @pytest.fixture
35 | def path_artifact():
36 | artifact = ArtifactDefinition('PathArtifact')
37 | artifact.AppendSource(TYPE_INDICATOR_PATH, {'paths': ['/passwords.txt']})
38 |
39 | return artifact
40 |
41 |
42 | def test_collector(command_echo, passwords_file, passwords_file_info, outputs, fake_partitions):
43 | collector = Collector(get_operating_system())
44 |
45 | collector.register_source(command_echo, command_echo.sources[0])
46 | collector.register_source(passwords_file, passwords_file.sources[0])
47 | collector.register_source(passwords_file_info, passwords_file_info.sources[0])
48 | collector.collect(outputs)
49 |
50 | assert outputs.add_collected_file.call_count == 1
51 | assert outputs.add_collected_command.call_count == 1
52 | assert outputs.add_collected_file_info.call_count == 1
53 |
54 |
55 | def test_unsupported_source(path_artifact, caplog):
56 | collector = Collector(get_operating_system())
57 |
58 | collector.register_source(path_artifact, path_artifact.sources[0])
59 |
60 | if caplog.records:
61 | log = caplog.records[0]
62 | assert log.levelname == "WARNING"
63 | assert log.message == "Cannot process source for 'PathArtifact' because type 'PATH' is not supported"
64 |
--------------------------------------------------------------------------------
/tests/test_commands.py:
--------------------------------------------------------------------------------
1 | from artifacts.artifact import ArtifactDefinition
2 | from artifacts.definitions import TYPE_INDICATOR_COMMAND
3 |
4 | from fastir.common.commands import CommandExecutor
5 |
6 |
7 | def command_artifact(name, command, args):
8 | artifact = ArtifactDefinition(name)
9 | artifact.AppendSource(TYPE_INDICATOR_COMMAND, {'cmd': command, 'args': args})
10 |
11 | return artifact
12 |
13 |
14 | def test_command_execution(outputs, test_variables):
15 | collector = CommandExecutor()
16 | artifact = command_artifact('TestArtifact', 'echo', ['test'])
17 |
18 | assert collector.register_source(artifact, artifact.sources[0], test_variables) is True
19 |
20 | collector.collect(outputs)
21 | outputs.add_collected_command.assert_called_with('TestArtifact', 'echo test', b'test\n')
22 |
23 |
24 | def test_unknown_command(outputs, test_variables, caplog):
25 | collector = CommandExecutor()
26 | artifact = command_artifact('TestArtifact', 'idontexist', [])
27 |
28 | assert collector.register_source(artifact, artifact.sources[0], test_variables) is True
29 |
30 | collector.collect(outputs)
31 | outputs.add_collected_command.assert_called_with('TestArtifact', 'idontexist', b'')
32 |
33 | log = caplog.records[0]
34 | assert log.levelname == "WARNING"
35 | assert log.message == "Command 'idontexist' for artifact 'TestArtifact' could not be found"
36 |
--------------------------------------------------------------------------------
/tests/test_filesystem_manager.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | import pytest
4 | from artifacts.artifact import ArtifactDefinition
5 | from artifacts.definitions import TYPE_INDICATOR_FILE
6 |
7 | from fastir.common.filesystem import FileSystemManager, OSFileSystem, TSKFileSystem
8 |
9 |
10 | FS_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), 'data', 'filesystem'))
11 |
12 |
13 | def fp(relative_path):
14 | """Create a full path from a relative path"""
15 | return os.path.join(FS_ROOT, relative_path)
16 |
17 |
18 | def resolved_paths(outputs):
19 | paths = []
20 |
21 | for call in outputs.add_collected_file.call_args_list:
22 | paths.append(call[0][1].path)
23 |
24 | return paths
25 |
26 |
27 | def file_artifact(name, pattern):
28 | artifact = ArtifactDefinition(name)
29 | artifact.AppendSource(TYPE_INDICATOR_FILE, {'paths': [pattern]})
30 |
31 | return artifact
32 |
33 |
34 | def test_get_path(fake_partitions):
35 | manager = FileSystemManager()
36 |
37 | assert isinstance(manager.get_path_object('/passwords.txt').filesystem, TSKFileSystem)
38 | assert isinstance(manager.get_path_object(fp('root.txt')).filesystem, OSFileSystem)
39 |
40 |
41 | def test_add_artifacts(fake_partitions, outputs, test_variables):
42 | manager = FileSystemManager()
43 |
44 | artifact = file_artifact('TestArtifact', '/passwords.txt')
45 | manager.register_source(artifact, artifact.sources[0], test_variables)
46 |
47 | artifact = file_artifact('TestArtifact2', fp('root.txt'))
48 | manager.register_source(artifact, artifact.sources[0], test_variables)
49 |
50 | manager.collect(outputs)
51 |
52 | assert set(resolved_paths(outputs)) == set(['/passwords.txt', fp('root.txt')])
53 |
54 |
55 | def test_artifact_all_mountpoinrs(fake_partitions, outputs, test_variables):
56 | manager = FileSystemManager()
57 |
58 | artifact = file_artifact('TestArtifact', '\\passwords.txt')
59 | manager.register_source(artifact, artifact.sources[0], test_variables)
60 |
61 | manager.collect(outputs)
62 |
63 | assert resolved_paths(outputs) == ['/passwords.txt']
64 |
65 |
66 | def test_no_mountpoin(fake_partitions):
67 | manager = FileSystemManager()
68 |
69 | with pytest.raises(IndexError):
70 | manager.get_path_object('im_not_a_mountpoint/file.txt')
71 |
--------------------------------------------------------------------------------
/tests/test_os_filesystem.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | import pytest
4 |
5 | from fastir.common.filesystem import OSFileSystem
6 |
7 |
8 | FS_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), 'data', 'filesystem'))
9 |
10 |
11 | @pytest.fixture
12 | def fs_test():
13 | return OSFileSystem(FS_ROOT)
14 |
15 |
16 | def fp(relative_path):
17 | """Create a full path from a relative path"""
18 | return os.path.join(FS_ROOT, relative_path)
19 |
20 |
21 | def resolved_paths(outputs):
22 | paths = []
23 |
24 | for call in outputs.add_collected_file.call_args_list:
25 | assert call[0][0] == 'TestArtifact'
26 | paths.append(
27 | os.path.relpath(call[0][1].path, FS_ROOT).replace(os.path.sep, '/'))
28 |
29 | return paths
30 |
31 |
32 | def test_path_resolution_simple(fs_test, outputs):
33 | fs_test.add_pattern('TestArtifact', fp('root.txt'))
34 | fs_test.collect(outputs)
35 |
36 | assert resolved_paths(outputs) == ['root.txt']
37 |
38 |
39 | def test_path_resolution_simple2(fs_test, outputs):
40 | fs_test.add_pattern('TestArtifact', fp('l1/l2/l2.txt'))
41 | fs_test.collect(outputs)
42 |
43 | assert resolved_paths(outputs) == ['l1/l2/l2.txt']
44 |
45 |
46 | def test_path_resolution_globbing_star(fs_test, outputs):
47 | fs_test.add_pattern('TestArtifact', fp('*.txt'))
48 | fs_test.collect(outputs)
49 |
50 | assert set(resolved_paths(outputs)) == set(['root.txt', 'root2.txt', 'test.txt'])
51 |
52 |
53 | def test_path_resolution_globbing_star2(fs_test, outputs):
54 | fs_test.add_pattern('TestArtifact', fp('root*.txt'))
55 | fs_test.collect(outputs)
56 |
57 | assert set(resolved_paths(outputs)) == set(['root.txt', 'root2.txt'])
58 |
59 |
60 | def test_path_resolution_globbing_question(fs_test, outputs):
61 | fs_test.add_pattern('TestArtifact', fp('root?.txt'))
62 | fs_test.collect(outputs)
63 |
64 | assert resolved_paths(outputs) == ['root2.txt']
65 |
66 |
67 | def test_path_resolution_globbing_star_directory(fs_test, outputs):
68 | fs_test.add_pattern('TestArtifact', fp('l1/*/l2.txt'))
69 | fs_test.collect(outputs)
70 |
71 | assert resolved_paths(outputs) == ['l1/l2/l2.txt']
72 |
73 |
74 | def test_path_resolution_recursive_star(fs_test, outputs):
75 | fs_test.add_pattern('TestArtifact', fp('**/l2.txt'))
76 | fs_test.collect(outputs)
77 |
78 | assert resolved_paths(outputs) == ['l1/l2/l2.txt']
79 |
80 |
81 | def test_path_resolution_recursive_star_default_depth(fs_test, outputs):
82 | fs_test.add_pattern('TestArtifact', fp('**/*.txt'))
83 | fs_test.collect(outputs)
84 |
85 | # Should only go to l3 because 3 is the default depth
86 | assert set(resolved_paths(outputs)) == set([
87 | 'l1/l1.txt', 'l1/l2/l2.txt', 'l1/l2/l3/l3.txt'])
88 |
89 |
90 | def test_path_resolution_recursive_star_custom_depth(fs_test, outputs):
91 | fs_test.add_pattern('TestArtifact', fp('**4/*.txt'))
92 | fs_test.collect(outputs)
93 |
94 | # Should reach l4 because of the custom depth
95 | assert set(resolved_paths(outputs)) == set([
96 | 'l1/l1.txt', 'l1/l2/l2.txt', 'l1/l2/l3/l3.txt', 'l1/l2/l3/l4/l4.txt'])
97 |
98 |
99 | def test_path_resolution_recursive_star_root(fs_test, outputs):
100 | fs_test.add_pattern('TestArtifact', fp('**.txt'))
101 | fs_test.collect(outputs)
102 |
103 | # Should only go to l2 because 3 is the default depth
104 | assert set(resolved_paths(outputs)) == set([
105 | 'root.txt', 'root2.txt', 'test.txt', 'l1/l1.txt', 'l1/l2/l2.txt'])
106 |
107 |
108 | def test_is_symlink(fs_test):
109 | path_object = fs_test.get_fullpath(fp('root.txt'))
110 | assert path_object.is_symlink() is False
111 |
--------------------------------------------------------------------------------
/tests/test_outputs.py:
--------------------------------------------------------------------------------
1 | import io
2 | import os
3 | import glob
4 | import json
5 | import pytest
6 | import platform
7 | from zipfile import ZipFile
8 |
9 | from jsonlines import Reader
10 |
11 | from fastir.common.logging import logger
12 | from fastir.common.filesystem import OSFileSystem
13 | from fastir.common.output import parse_human_size, normalize_filepath, Outputs
14 |
15 |
16 | def output_file_content(dirpath, pattern):
17 | """Read the content of an output file with specified pattern."""
18 | outdir = glob.glob(os.path.join(dirpath, f'*-{platform.node()}'))[0]
19 | filepath = glob.glob(os.path.join(outdir, pattern))[0]
20 |
21 | with open(filepath, 'rb') as f:
22 | return f.read()
23 |
24 |
25 | def test_parse_human_size():
26 | assert parse_human_size('1') == 1
27 | assert parse_human_size('2B') == 2
28 | assert parse_human_size('3K') == 3072
29 | assert parse_human_size('4M') == 4194304
30 | assert parse_human_size('5G') == 5368709120
31 |
32 | with pytest.raises(ValueError):
33 | parse_human_size('124XS')
34 |
35 |
36 | def test_normalize_filepath():
37 | assert normalize_filepath('C:/test'.replace('/', os.path.sep)) == os.path.join('C', 'test')
38 | assert normalize_filepath(os.path.join('', 'usr', 'share')) == os.path.join('', 'usr', 'share')
39 |
40 |
41 | def test_logging(temp_dir):
42 | # Create an Outputs instance and log a message
43 | output = Outputs(temp_dir, None, False)
44 | logger.info('test log message')
45 | output.close()
46 |
47 | # Make sure the log message appears in the output directory
48 | logs = output_file_content(temp_dir, '*-logs.txt')
49 | assert b'test log message' in logs
50 |
51 |
52 | @pytest.fixture
53 | def test_file(temp_dir):
54 | test_file = os.path.join(temp_dir, 'test_file.txt')
55 |
56 | with open(test_file, 'w') as f:
57 | f.write('MZtest content')
58 |
59 | yield test_file
60 |
61 |
62 | @pytest.fixture
63 | def test_pe_file():
64 | DATA_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), 'data'))
65 |
66 | return OSFileSystem('/').get_fullpath(os.path.join(DATA_ROOT, 'MSVCR71.dll'))
67 |
68 |
69 | def test_collect_file_info(temp_dir, test_file):
70 | output = Outputs(temp_dir, None, False)
71 | output.add_collected_file_info('TestArtifact', OSFileSystem('/').get_fullpath(test_file))
72 | output.close()
73 |
74 | with Reader(output_file_content(temp_dir, '*-file_info.jsonl').splitlines()) as jsonl:
75 | record = jsonl.read()
76 |
77 | assert '@timestamp' in record
78 | assert record['file']['path'].endswith('test_file.txt')
79 | assert record['file']['size'] == 14
80 | assert record['file']['mime_type'] == "application/x-msdownload"
81 | assert record['file']['hash']['md5'] == "10dbf3e392abcc57f8fae061c7c0aeec"
82 | assert record['file']['hash']['sha1'] == "7ef0fe6c3855fbac1884e95622d9e45ce1d4ae9b"
83 | assert record['file']['hash']['sha256'] == "cfb91ddbf08c52ff294fdf1657081a98c090d270dbb412a91ace815b3df947b6"
84 |
85 |
86 | def test_collect_pe_file_info(temp_dir, test_pe_file):
87 | output = Outputs(temp_dir, None, False)
88 | output.add_collected_file_info('TestArtifact', test_pe_file)
89 | output.close()
90 |
91 | with Reader(output_file_content(temp_dir, '*-file_info.jsonl').splitlines()) as jsonl:
92 | record = jsonl.read()
93 |
94 | assert '@timestamp' in record
95 | assert record['labels']['artifact'] == "TestArtifact"
96 | assert record['file']['path'].endswith('MSVCR71.dll')
97 | assert record['file']['size'] == 348160
98 | assert record['file']['mime_type'] == "application/x-msdownload"
99 | assert record['file']['hash']['md5'] == "86f1895ae8c5e8b17d99ece768a70732"
100 | assert record['file']['hash']['sha1'] == "d5502a1d00787d68f548ddeebbde1eca5e2b38ca"
101 | assert record['file']['hash']['sha256'] == "8094af5ee310714caebccaeee7769ffb08048503ba478b879edfef5f1a24fefe"
102 | assert record['file']['pe']['company'] == "Microsoft Corporation"
103 | assert record['file']['pe']['description'] == "Microsoft® C Runtime Library"
104 | assert record['file']['pe']['file_version'] == "7.10.3052.4"
105 | assert record['file']['pe']['original_file_name'] == "MSVCR71.DLL"
106 | assert record['file']['pe']['product'] == "Microsoft® Visual Studio .NET"
107 | assert record['file']['pe']['imphash'] == "7acc8c379c768a1ecd81ec502ff5f33e"
108 | assert record['file']['pe']['compilation'] == "2003-02-21T12:42:20"
109 |
110 |
111 | def test_collect_file(temp_dir, test_file):
112 | output = Outputs(temp_dir, None, False)
113 | output.add_collected_file('TestArtifact', OSFileSystem('/').get_fullpath(test_file))
114 | output.close()
115 |
116 | zip_content = io.BytesIO(output_file_content(temp_dir, '*-files.zip'))
117 | zipfile = ZipFile(zip_content)
118 | zipped_file = zipfile.namelist()[0]
119 |
120 | assert zipped_file.endswith('test_file.txt')
121 |
122 |
123 | def test_collect_file_size_filter(temp_dir, test_file):
124 | # Create a file that should be ignored due to its size
125 | test_big_file = os.path.join(temp_dir, 'test_big_file.txt')
126 |
127 | with open(test_big_file, 'w') as f:
128 | f.write('some bigger content')
129 |
130 | output = Outputs(temp_dir, '15', False) # Set maximum size to 15 bytes
131 | output.add_collected_file('TestArtifact', OSFileSystem('/').get_fullpath(test_file))
132 | output.add_collected_file('TestArtifact', OSFileSystem('/').get_fullpath(test_big_file))
133 | output.close()
134 |
135 | zip_content = io.BytesIO(output_file_content(temp_dir, '*-files.zip'))
136 | zipfile = ZipFile(zip_content)
137 | zipped_files = zipfile.namelist()
138 |
139 | assert len(zipped_files) == 1
140 | assert zipped_files[0].endswith('test_file.txt')
141 |
142 | logs = output_file_content(temp_dir, '*-logs.txt')
143 | assert b"test_big_file.txt' because of its size" in logs
144 |
145 |
146 | def test_collect_command(temp_dir):
147 | output = Outputs(temp_dir, None, False)
148 | output.add_collected_command('TestArtifact', 'command', b'output')
149 | output.close()
150 |
151 | commands = json.loads(output_file_content(temp_dir, '*-commands.json'))
152 | assert commands == {
153 | 'TestArtifact': {
154 | 'command': 'output'
155 | }
156 | }
157 |
158 |
159 | def test_collect_wmi(temp_dir):
160 | output = Outputs(temp_dir, None, False)
161 | output.add_collected_wmi('TestArtifact', 'query', 'output')
162 | output.close()
163 |
164 | wmi = json.loads(output_file_content(temp_dir, '*-wmi.json'))
165 | assert wmi == {
166 | 'TestArtifact': {
167 | 'query': 'output'
168 | }
169 | }
170 |
171 |
172 | def test_collect_registry(temp_dir):
173 | output = Outputs(temp_dir, None, False)
174 | output.add_collected_registry_value('TestArtifact', 'key', 'name', 'value', 'type')
175 | output.close()
176 |
177 | registry = json.loads(output_file_content(temp_dir, '*-registry.json'))
178 | assert registry == {
179 | 'TestArtifact': {
180 | 'key': {
181 | 'name': {
182 | 'value': 'value',
183 | 'type': 'type'
184 | }
185 | }
186 | }
187 | }
188 |
--------------------------------------------------------------------------------
/tests/test_tsk_filesystem.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | import pytest
4 |
5 | from fastir.common.filesystem import TSKFileSystem
6 |
7 |
8 | @pytest.fixture
9 | def fs_test():
10 | return TSKFileSystem(
11 | None, os.path.join(os.path.dirname(__file__), 'data', 'image.raw'), '/')
12 |
13 |
14 | def resolved_paths(outputs):
15 | paths = []
16 |
17 | for call in outputs.add_collected_file.call_args_list:
18 | paths.append(call[0][1].path.replace(os.path.sep, '/'))
19 |
20 | return paths
21 |
22 |
23 | def test_all_files(fs_test, outputs):
24 | fs_test.add_pattern('TestArtifact', '/**')
25 | fs_test.collect(outputs)
26 |
27 | # Deleted files and directories should not resolve
28 | assert set(resolved_paths(outputs)) == set([
29 | '/a_directory/another_file',
30 | '/a_directory/a_file',
31 | '/passwords.txt',
32 | ])
33 |
34 |
35 | def test_is_symlink(fs_test):
36 | path_object = fs_test.get_fullpath('/passwords.txt')
37 | assert path_object.is_symlink() is False
38 |
39 |
40 | def test_several_patterns(fs_test, outputs):
41 | # This test is meant to verify that the cache is functionnal
42 | fs_test.add_pattern('TestArtifact', '/**')
43 | fs_test.add_pattern('TestArtifact2', '/a_directory/*')
44 | fs_test.collect(outputs)
45 |
46 | # Deleted files and directories should not resolve
47 | paths = resolved_paths(outputs)
48 | assert set(paths) == set([
49 | '/a_directory/another_file',
50 | '/a_directory/a_file',
51 | '/passwords.txt',
52 | ])
53 | assert paths.count('/a_directory/a_file') == 2
54 |
55 |
56 | def test_read_chunks(fs_test):
57 | path_object = fs_test.get_fullpath('/passwords.txt')
58 | content = next(path_object.read_chunks())
59 |
60 | assert content == b"""place,user,password
61 | bank,joesmith,superrich
62 | alarm system,-,1234
63 | treasure chest,-,1111
64 | uber secret laire,admin,admin
65 | """
66 |
67 |
68 | def test_get_size(fs_test):
69 | path_object = fs_test.get_fullpath('/passwords.txt')
70 | assert path_object.get_size() == 116
71 |
--------------------------------------------------------------------------------
/tests/test_variables.py:
--------------------------------------------------------------------------------
1 | from fastir.common.variables import HostVariables
2 |
3 |
4 | class HostVariablesForTests(HostVariables):
5 |
6 | def init_variables(self):
7 | self.add_variable('%%users.homedir%%', set([
8 | '%%USERDIR%%',
9 | '/tmp/root'
10 | ]))
11 | self.add_variable('%%USERDIR%%', '/home/user')
12 |
13 |
14 | def test_variables():
15 | variables = HostVariablesForTests()
16 |
17 | assert variables.substitute('%%users.homedir%%/test') == set([
18 | '/home/user/test', '/tmp/root/test'
19 | ])
20 | assert variables.substitute('test%%USERDIR%%test') == set([
21 | 'test/home/usertest'
22 | ])
23 | assert variables.substitute('i_dont_have_variables') == set([
24 | 'i_dont_have_variables'
25 | ])
26 | assert variables.substitute('i_contain_%%unsupported%%_variables') == set([
27 | 'i_contain_%%unsupported%%_variables'
28 | ])
29 |
--------------------------------------------------------------------------------