├── .github
└── workflows
│ └── python-app.yml
├── .gitignore
├── .pylintrc
├── CONTRIBUTING.md
├── LICENSE
├── README.md
├── auto_forensicate
├── __init__.py
├── auto_acquire.py
├── errors.py
├── hostinfo.py
├── macdisk.py
├── recipes
│ ├── __init__.py
│ ├── base.py
│ ├── directory.py
│ ├── disk.py
│ ├── firmware.py
│ └── sysinfo.py
├── stamp
│ ├── __init__.py
│ └── manager.py
├── uploader.py
└── ux
│ ├── __init__.py
│ ├── cli.py
│ ├── gui.py
│ └── zenity.py
├── config
├── jenkins
│ ├── e2e.sh
│ └── e2e_tools.py
└── patches
│ ├── boto_pr3561_connection.py.patch
│ └── boto_pr3561_key.py.patch
├── doc
├── FAQ.md
└── gift_video.gif
├── requirements.txt
├── run_tests.py
├── setup.py
├── tests
├── auto_forensicate_tests.py
├── base_tests.py
├── directory_tests.py
├── disk_tests.py
├── firmware_tests.py
├── hostinfo_tests.py
├── stamp_tests.py
├── sysinfo_tests.py
└── uploader_tests.py
└── tools
├── commons.sh
├── remaster.sh
└── remaster_scripts
├── call_auto_forensicate.sh
├── e2e
├── post-install-root.sh
└── post-install-user.sh
├── post-install-root.sh
└── post-install-user.sh
/.github/workflows/python-app.yml:
--------------------------------------------------------------------------------
1 | # This workflow will install Python dependencies, run tests and lint with a single version of Python
2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions
3 |
4 | name: GiftStick Tests
5 |
6 | on:
7 | push:
8 | branches: [ main ]
9 | pull_request:
10 | branches: [ main ]
11 |
12 | jobs:
13 | build:
14 |
15 | runs-on: ubuntu-latest
16 |
17 | steps:
18 | - uses: actions/checkout@v2
19 | - name: Set up Python 3.8
20 | uses: actions/setup-python@v2
21 | with:
22 | python-version: 3.8
23 | - name: Install dependencies
24 | run: |
25 | python -m pip install --upgrade pip
26 | pip install flake8 pytest
27 | if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
28 | - name: Unit tests
29 | run: |
30 | export BOTO_CONFIG=/dev/null
31 | python run_tests.py
32 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | *.egg-info/
24 | .installed.cfg
25 | *.egg
26 | MANIFEST
27 |
28 | # PyInstaller
29 | # Usually these files are written by a python script from a template
30 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
31 | *.manifest
32 | *.spec
33 |
34 | # Installer logs
35 | pip-log.txt
36 | pip-delete-this-directory.txt
37 |
38 | # Unit test / coverage reports
39 | htmlcov/
40 | .tox/
41 | .nox/
42 | .coverage
43 | .coverage.*
44 | .cache
45 | nosetests.xml
46 | coverage.xml
47 | *.cover
48 | .hypothesis/
49 | .pytest_cache/
50 |
51 | # Translations
52 | *.mo
53 | *.pot
54 |
55 | # PyBuilder
56 | target/
57 |
58 | # IPython
59 | profile_default/
60 | ipython_config.py
61 |
62 | # pyenv
63 | .python-version
64 |
65 | # Environments
66 | .env
67 | .venv
68 | env/
69 | venv/
70 | ENV/
71 | env.bak/
72 | venv.bak/
73 |
74 | # Ubuntu ISO remastering artifacts
75 | *.iso
76 | *.iso.remastered
77 | *.iso.remastered.md5
78 | *.img
79 |
80 | # Editor/IDE artifacts
81 | .vscode/
82 |
--------------------------------------------------------------------------------
/.pylintrc:
--------------------------------------------------------------------------------
1 | # Original file copied from:
2 | # http://src.chromium.org/chrome/trunk/tools/depot_tools/pylintrc
3 | #
4 | # Updated with examples from project l2tdevtools.
5 |
6 | [MASTER]
7 |
8 | # Specify a configuration file.
9 | #rcfile=
10 | #
11 | # Add files or directories to the blacklist. They should be base names, not
12 | # paths.
13 | ignore=CVS
14 |
15 | # Add files or directories matching the regex patterns to the blacklist. The
16 | # regex matches against base names, not paths.
17 | ignore-patterns=
18 |
19 | # Python code to execute, usually for sys.path manipulation such as
20 | # pygtk.require().
21 | #init-hook=
22 |
23 | # Use multiple processes to speed up Pylint.
24 | jobs=1
25 |
26 | # List of plugins (as comma separated values of python modules names) to load,
27 | # usually to register additional checkers.
28 | load-plugins=pylint.extensions.docparams
29 |
30 | # Pickle collected data for later comparisons.
31 | persistent=yes
32 |
33 | # Specify a configuration file.
34 | #rcfile=
35 |
36 | # Allow loading of arbitrary C extensions. Extensions are imported into the
37 | # active Python interpreter and may run arbitrary code.
38 | unsafe-load-any-extension=no
39 |
40 |
41 | [MESSAGES CONTROL]
42 |
43 | # Only show warnings with the listed confidence levels. Leave empty to show
44 | # all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED
45 | confidence=
46 |
47 | # Disable the message, report, category or checker with the given id(s). You
48 | # can either give multiple identifier separated by comma (,) or put this option
49 | # multiple time (only on the command line, not in the configuration file where
50 | # it should appear only once).
51 | # CHANGED:
52 | #
53 | # C0103: Invalid name ""
54 | # C0302: Too many lines in module (N)
55 | #
56 | # I0010: Unable to consider inline option ''
57 | # I0011: Locally disabling WNNNN
58 | #
59 | # R0201: Method could be a function
60 | # R0801: Similar lines in N files
61 | # R0901: Too many ancestors (N/7)
62 | # R0902: Too many instance attributes (N/7)
63 | # R0903: Too few public methods (N/2)
64 | # R0904: Too many public methods (N/20)
65 | # R0911: Too many return statements (N/6)
66 | # R0912: Too many branches (N/12)
67 | # R0913: Too many arguments (N/5)
68 | # R0914: Too many local variables (N/15)
69 | # R0915: Too many statements (N/50)
70 | # R0921: Abstract class not referenced
71 | # R0922: Abstract class is only referenced 1 times
72 | #
73 | # W0141: Used builtin function ''
74 | # W0142: Used * or ** magic
75 | # W0402: Uses of a deprecated module 'string'
76 | # W0404: 41: Reimport 'XX' (imported line NN)
77 | # W0511: TODO
78 | # W1201: Specify string format arguments as logging function parameters
79 | #
80 | # Disabled:
81 | # consider-iterating-dictionary
82 | # locally-enabled
83 | # logging-format-interpolation
84 | # no-member
85 | # redefined-variable-type
86 | # simplifiable-if-statement
87 | # too-many-boolean-expressions (N/5)
88 | # too-many-nested-blocks (N/5)
89 | # ungrouped-imports
90 |
91 | disable=
92 | duplicate-code,
93 | parameter-unpacking,
94 | raw-checker-failed,
95 | bad-inline-option,
96 | locally-disabled,
97 | locally-enabled,
98 | file-ignored,
99 | suppressed-message,
100 | useless-suppression,
101 | deprecated-pragma,
102 | no-absolute-import,
103 | missing-param-doc,
104 | metaclass-assignment,
105 | eq-without-hash,
106 | fixme,
107 | logging-format-interpolation,
108 | no-self-use,
109 | too-few-public-methods,
110 | too-many-ancestors,
111 | too-many-boolean-expressions,
112 | too-many-branches,
113 | too-many-instance-attributes,
114 | too-many-lines,
115 | too-many-locals,
116 | too-many-nested-blocks,
117 | too-many-public-methods,
118 | too-many-return-statements,
119 | too-many-statements,
120 | unsubscriptable-object
121 |
122 | # Enable the message, report, category or checker with the given id(s). You can
123 | # either give multiple identifier separated by comma (,) or put this option
124 | # multiple time (only on the command line, not in the configuration file where
125 | # it should appear only once). See also the "--disable" option for examples.
126 | enable=
127 |
128 |
129 | [REPORTS]
130 |
131 | # Python expression which should return a note less than 10 (10 is the highest
132 | # note). You have access to the variables errors warning, statement which
133 | # respectively contain the number of errors / warnings messages and the total
134 | # number of statements analyzed. This is used by the global evaluation report
135 | # (RP0004).
136 | evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
137 |
138 | # Template used to display messages. This is a python new-style format string
139 | # used to format the message information. See doc for all details
140 | #msg-template=
141 |
142 | # Set the output format. Available formats are text, parseable, colorized, json
143 | # and msvs (visual studio).You can also give a reporter class, eg
144 | # mypackage.mymodule.MyReporterClass.
145 | output-format=text
146 |
147 | # Tells whether to display a full report or only the messages
148 | reports=no
149 |
150 | # Activate the evaluation score.
151 | # score=yes
152 | score=no
153 |
154 |
155 | [REFACTORING]
156 |
157 | # Maximum number of nested blocks for function / method body
158 | max-nested-blocks=5
159 |
160 |
161 | [VARIABLES]
162 |
163 | # List of additional names supposed to be defined in builtins. Remember that
164 | # you should avoid to define new builtins when possible.
165 | additional-builtins=
166 |
167 | # Tells whether unused global variables should be treated as a violation.
168 | allow-global-unused-variables=yes
169 |
170 | # List of strings which can identify a callback function by name. A callback
171 | # name must start or end with one of those strings.
172 | callbacks=cb_,_cb
173 |
174 | # A regular expression matching the name of dummy variables (i.e. expectedly
175 | # not used).
176 | dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_
177 |
178 | # Argument names that match this expression will be ignored. Default to name
179 | # with leading underscore
180 | ignored-argument-names=_.*|^ignored_|^unused_
181 |
182 | # Tells whether we should check for unused import in __init__ files.
183 | init-import=no
184 |
185 | # List of qualified module names which can have objects that can redefine
186 | # builtins.
187 | redefining-builtins-modules=six.moves,future.builtins
188 |
189 |
190 | [TYPECHECK]
191 |
192 | # List of decorators that produce context managers, such as
193 | # contextlib.contextmanager. Add to this list to register other decorators that
194 | # produce valid context managers.
195 | contextmanager-decorators=contextlib.contextmanager
196 |
197 | # List of members which are set dynamically and missed by pylint inference
198 | # system, and so shouldn't trigger E1101 when accessed. Python regular
199 | # expressions are accepted.
200 | generated-members=
201 |
202 | # Tells whether missing members accessed in mixin class should be ignored. A
203 | # mixin class is detected if its name ends with "mixin" (case insensitive).
204 | ignore-mixin-members=yes
205 |
206 | # This flag controls whether pylint should warn about no-member and similar
207 | # checks whenever an opaque object is returned when inferring. The inference
208 | # can return multiple potential results while evaluating a Python object, but
209 | # some branches might not be evaluated, which results in partial inference. In
210 | # that case, it might be useful to still emit no-member and other checks for
211 | # the rest of the inferred objects.
212 | ignore-on-opaque-inference=yes
213 |
214 | # List of class names for which member attributes should not be checked (useful
215 | # for classes with dynamically set attributes). This supports the use of
216 | # qualified names.
217 | ignored-classes=optparse.Values,thread._local,_thread._local
218 |
219 | # List of module names for which member attributes should not be checked
220 | # (useful for modules/projects where namespaces are manipulated during runtime
221 | # and thus existing member attributes cannot be deduced by static analysis. It
222 | # supports qualified module names, as well as Unix pattern matching.
223 | ignored-modules=
224 |
225 | # Show a hint with possible names when a member name was not found. The aspect
226 | # of finding the hint is based on edit distance.
227 | missing-member-hint=yes
228 |
229 | # The minimum edit distance a name should have in order to be considered a
230 | # similar match for a missing member name.
231 | missing-member-hint-distance=1
232 |
233 | # The total number of similar names that should be taken in consideration when
234 | # showing a hint for a missing member.
235 | missing-member-max-choices=1
236 |
237 |
238 | [LOGGING]
239 |
240 | # Logging modules to check that the string format arguments are in logging
241 | # function parameter format
242 | logging-modules=logging
243 |
244 |
245 | [BASIC]
246 |
247 | # Naming hint for argument names
248 | # argument-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
249 | argument-name-hint=(([a-z][a-z0-9_]*)|(_[a-z0-9_]*))$
250 |
251 | # Regular expression matching correct argument names
252 | # argument-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
253 | argument-rgx=(([a-z][a-z0-9_]*)|(_[a-z0-9_]*))$
254 |
255 | # Naming hint for attribute names
256 | # attr-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
257 | attr-name-hint=(([a-z][a-z0-9_]*)|(_[a-z0-9_]*))$
258 |
259 | # Regular expression matching correct attribute names
260 | # attr-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
261 | attr-rgx=(([a-z][a-z0-9_]*)|(_[a-z0-9_]*))$
262 |
263 | # Bad variable names which should always be refused, separated by a comma
264 | bad-names=foo,bar,baz,toto,tutu,tata,lol
265 |
266 | # Naming hint for class attribute names
267 | # class-attribute-name-hint=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
268 | class-attribute-name-hint=([A-Za-z_][A-Za-z0-9_]*|(__.*__))$
269 |
270 | # Regular expression matching correct class attribute names
271 | # class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
272 | class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]*|(__.*__))$
273 |
274 | # Naming hint for class names
275 | class-name-hint=[A-Z_][a-zA-Z0-9]+$
276 |
277 | # Regular expression matching correct class names
278 | class-rgx=[A-Z_][a-zA-Z0-9]+$
279 |
280 | # Naming hint for constant names
281 | # const-name-hint=(([A-Z_][A-Z0-9_]*)|(__.*__))$
282 | const-name-hint=(([a-zA-Z_][a-zA-Z0-9_]*)|(__.*__))$
283 |
284 | # Regular expression matching correct constant names
285 | # const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$
286 | const-rgx=(([a-zA-Z_][a-zA-Z0-9_]*)|(__.*__))$
287 |
288 | # Minimum line length for functions/classes that require docstrings, shorter
289 | # ones are exempt.
290 | docstring-min-length=-1
291 |
292 | # Naming hint for function names
293 | # function-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
294 | function-name-hint=[A-Z_][a-zA-Z0-9_]*$
295 |
296 | # Regular expression matching correct function names
297 | # function-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
298 | function-rgx=[A-Z_][a-zA-Z0-9_]*$
299 |
300 | # Good variable names which should always be accepted, separated by a comma
301 | good-names=i,j,k,ex,Run,_
302 |
303 | # Include a hint for the correct naming format with invalid-name
304 | include-naming-hint=no
305 |
306 | # Naming hint for inline iteration names
307 | inlinevar-name-hint=[A-Za-z_][A-Za-z0-9_]*$
308 |
309 | # Regular expression matching correct inline iteration names
310 | inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
311 |
312 | # Naming hint for method names
313 | # method-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
314 | method-name-hint=(test|[A-Z_])[a-zA-Z0-9_]*$
315 |
316 | # Regular expression matching correct method names
317 | # method-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
318 | method-rgx=(test|[A-Z_])[a-zA-Z0-9_]*$
319 |
320 | # Naming hint for module names
321 | module-name-hint=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
322 |
323 | # Regular expression matching correct module names
324 | module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
325 |
326 | # Colon-delimited sets of names that determine each other's naming style when
327 | # the name regexes allow several styles.
328 | name-group=
329 |
330 | # Regular expression which should only match function or class names that do
331 | # not require a docstring.
332 | no-docstring-rgx=^_
333 |
334 | # List of decorators that produce properties, such as abc.abstractproperty. Add
335 | # to this list to register other decorators that produce valid properties.
336 | property-classes=abc.abstractproperty
337 |
338 | # Naming hint for variable names
339 | # variable-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
340 | variable-name-hint=(([a-z][a-z0-9_]*)|(_[a-z0-9_]*))$
341 |
342 | # Regular expression matching correct variable names
343 | # variable-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
344 | variable-rgx=(([a-z][a-z0-9_]*)|(_[a-z0-9_]*))$
345 |
346 |
347 | [MISCELLANEOUS]
348 |
349 | # List of note tags to take in consideration, separated by a comma.
350 | notes=FIXME,XXX,TODO
351 |
352 |
353 | [FORMAT]
354 |
355 | # Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
356 | expected-line-ending-format=
357 |
358 | # Regexp for a line that is allowed to be longer than the limit.
359 | ignore-long-lines=^\s*(# )??$
360 |
361 | # Number of spaces of indent required inside a hanging or continued line.
362 | indent-after-paren=4
363 |
364 | # String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
365 | # tab).
366 | # indent-string=' '
367 | indent-string=' '
368 |
369 | # Maximum number of characters on a single line.
370 | max-line-length=80
371 |
372 | # Maximum number of lines in a module
373 | max-module-lines=1000
374 |
375 | # List of optional constructs for which whitespace checking is disabled. `dict-
376 | # separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}.
377 | # `trailing-comma` allows a space between comma and closing bracket: (a, ).
378 | # `empty-line` allows space-only lines.
379 | no-space-check=trailing-comma,dict-separator
380 |
381 | # Allow the body of a class to be on the same line as the declaration if body
382 | # contains single statement.
383 | single-line-class-stmt=no
384 |
385 | # Allow the body of an if to be on the same line as the test if there is no
386 | # else.
387 | single-line-if-stmt=no
388 |
389 |
390 | [SPELLING]
391 |
392 | # Spelling dictionary name. Available dictionaries: en_US (myspell).
393 | spelling-dict=
394 |
395 | # List of comma separated words that should not be checked.
396 | spelling-ignore-words=
397 |
398 | # A path to a file that contains private dictionary; one word per line.
399 | spelling-private-dict-file=
400 |
401 | # Tells whether to store unknown words to indicated private dictionary in
402 | # --spelling-private-dict-file option instead of raising a message.
403 | spelling-store-unknown-words=no
404 |
405 |
406 | [SIMILARITIES]
407 |
408 | # Ignore comments when computing similarities.
409 | ignore-comments=yes
410 |
411 | # Ignore docstrings when computing similarities.
412 | ignore-docstrings=yes
413 |
414 | # Ignore imports when computing similarities.
415 | ignore-imports=no
416 |
417 | # Minimum lines number of a similarity.
418 | min-similarity-lines=4
419 |
420 |
421 | [DESIGN]
422 |
423 | # Maximum number of arguments for function / method
424 | max-args=10
425 |
426 | # Maximum number of attributes for a class (see R0902).
427 | max-attributes=7
428 |
429 | # Maximum number of boolean expressions in a if statement
430 | max-bool-expr=5
431 |
432 | # Maximum number of branch for function / method body
433 | max-branches=12
434 |
435 | # Maximum number of locals for function / method body
436 | max-locals=15
437 |
438 | # Maximum number of parents for a class (see R0901).
439 | max-parents=7
440 |
441 | # Maximum number of public methods for a class (see R0904).
442 | max-public-methods=20
443 |
444 | # Maximum number of return / yield for function / method body
445 | max-returns=6
446 |
447 | # Maximum number of statements in function / method body
448 | max-statements=50
449 |
450 | # Minimum number of public methods for a class (see R0903).
451 | min-public-methods=2
452 |
453 |
454 | [CLASSES]
455 |
456 | # List of method names used to declare (i.e. assign) instance attributes.
457 | defining-attr-methods=__init__,__new__,setUp
458 |
459 | # List of member names, which should be excluded from the protected access
460 | # warning.
461 | exclude-protected=_asdict,_fields,_replace,_source,_make
462 |
463 | # List of valid names for the first argument in a class method.
464 | valid-classmethod-first-arg=cls
465 |
466 | # List of valid names for the first argument in a metaclass class method.
467 | valid-metaclass-classmethod-first-arg=mcs
468 |
469 |
470 | [IMPORTS]
471 |
472 | # Allow wildcard imports from modules that define __all__.
473 | allow-wildcard-with-all=no
474 |
475 | # Analyse import fallback blocks. This can be used to support both Python 2 and
476 | # 3 compatible code, which means that the block might have code that exists
477 | # only in one or another interpreter, leading to false positives when analysed.
478 | analyse-fallback-blocks=no
479 |
480 | # Deprecated modules which should not be used, separated by a comma
481 | deprecated-modules=optparse,tkinter.tix
482 |
483 | # Create a graph of external dependencies in the given file (report RP0402 must
484 | # not be disabled)
485 | ext-import-graph=
486 |
487 | # Create a graph of every (i.e. internal and external) dependencies in the
488 | # given file (report RP0402 must not be disabled)
489 | import-graph=
490 |
491 | # Create a graph of internal dependencies in the given file (report RP0402 must
492 | # not be disabled)
493 | int-import-graph=
494 |
495 | # Force import order to recognize a module as part of the standard
496 | # compatibility libraries.
497 | known-standard-library=
498 |
499 | # Force import order to recognize a module as part of a third party library.
500 | known-third-party=enchant
501 |
502 |
503 | [EXCEPTIONS]
504 |
505 | # Exceptions that will emit a warning when being caught. Defaults to
506 | # "Exception"
507 | overgeneral-exceptions=Exception
508 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # How to Contribute
2 |
3 | We'd love to accept your patches and contributions to this project. There are
4 | just a few small guidelines you need to follow.
5 |
6 | ## Contributor License Agreement
7 |
8 | Contributions to this project must be accompanied by a Contributor License
9 | Agreement. You (or your employer) retain the copyright to your contribution;
10 | this simply gives us permission to use and redistribute your contributions as
11 | part of the project. Head over to to see
12 | your current agreements on file or to sign a new one.
13 |
14 | You generally only need to submit a CLA once, so if you've already submitted one
15 | (even if it was for a different project), you probably don't need to do it
16 | again.
17 |
18 | ## Code reviews
19 |
20 | All submissions, including submissions by project members, require review. We
21 | use GitHub pull requests for this purpose. Consult
22 | [GitHub Help](https://help.github.com/articles/about-pull-requests/) for more
23 | information on using pull requests.
24 |
25 | Before submitting a pull request for review, please make sure that your code
26 | follows the Style Guide, have proper tests implemented, and that they pass
27 | successfully.
28 |
29 | ```
30 | $ pip install -r requirements.txt
31 | $ python run_tests.py
32 | ```
33 |
34 | ## Style guide
35 |
36 | We primarily follow the Google Python Style Guide. Some variations are:
37 |
38 | * Quote strings as ' or """ instead of "
39 | * Textual strings should be Unicode strings so please include
40 | `from __future__ import unicode_literals` in new python files.
41 | * Use the format() function instead of the %-way of formatting strings.
42 | * Use positional or parameter format specifiers with typing e.g. '{0:s}' or
43 | '{text:s}' instead of '{0}', '{}' or '{:s}'. If we ever want to have
44 | language specific output strings we don't need to change the entire
45 | codebase. It also makes is easier in determining what type every parameter
46 | is expected to be.
47 | * Use "cls" as the name of the class variable in preference of "klass"
48 | * When catching exceptions use "as exception:" not some alternative form like
49 | "as error:" or "as details:"
50 | * Use textual pylint overrides e.g. "# pylint: disable=no-self-argument"
51 | instead of "# pylint: disable=E0213". For a list of overrides see:
52 | http://docs.pylint.org/features.html
53 |
54 | ## Community Guidelines
55 |
56 | This project follows [Google's Open Source Community
57 | Guidelines](https://opensource.google.com/conduct/).
58 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 |
2 | Apache License
3 | Version 2.0, January 2004
4 | http://www.apache.org/licenses/
5 |
6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
7 |
8 | 1. Definitions.
9 |
10 | "License" shall mean the terms and conditions for use, reproduction,
11 | and distribution as defined by Sections 1 through 9 of this document.
12 |
13 | "Licensor" shall mean the copyright owner or entity authorized by
14 | the copyright owner that is granting the License.
15 |
16 | "Legal Entity" shall mean the union of the acting entity and all
17 | other entities that control, are controlled by, or are under common
18 | control with that entity. For the purposes of this definition,
19 | "control" means (i) the power, direct or indirect, to cause the
20 | direction or management of such entity, whether by contract or
21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
22 | outstanding shares, or (iii) beneficial ownership of such entity.
23 |
24 | "You" (or "Your") shall mean an individual or Legal Entity
25 | exercising permissions granted by this License.
26 |
27 | "Source" form shall mean the preferred form for making modifications,
28 | including but not limited to software source code, documentation
29 | source, and configuration files.
30 |
31 | "Object" form shall mean any form resulting from mechanical
32 | transformation or translation of a Source form, including but
33 | not limited to compiled object code, generated documentation,
34 | and conversions to other media types.
35 |
36 | "Work" shall mean the work of authorship, whether in Source or
37 | Object form, made available under the License, as indicated by a
38 | copyright notice that is included in or attached to the work
39 | (an example is provided in the Appendix below).
40 |
41 | "Derivative Works" shall mean any work, whether in Source or Object
42 | form, that is based on (or derived from) the Work and for which the
43 | editorial revisions, annotations, elaborations, or other modifications
44 | represent, as a whole, an original work of authorship. For the purposes
45 | of this License, Derivative Works shall not include works that remain
46 | separable from, or merely link (or bind by name) to the interfaces of,
47 | the Work and Derivative Works thereof.
48 |
49 | "Contribution" shall mean any work of authorship, including
50 | the original version of the Work and any modifications or additions
51 | to that Work or Derivative Works thereof, that is intentionally
52 | submitted to Licensor for inclusion in the Work by the copyright owner
53 | or by an individual or Legal Entity authorized to submit on behalf of
54 | the copyright owner. For the purposes of this definition, "submitted"
55 | means any form of electronic, verbal, or written communication sent
56 | to the Licensor or its representatives, including but not limited to
57 | communication on electronic mailing lists, source code control systems,
58 | and issue tracking systems that are managed by, or on behalf of, the
59 | Licensor for the purpose of discussing and improving the Work, but
60 | excluding communication that is conspicuously marked or otherwise
61 | designated in writing by the copyright owner as "Not a Contribution."
62 |
63 | "Contributor" shall mean Licensor and any individual or Legal Entity
64 | on behalf of whom a Contribution has been received by Licensor and
65 | subsequently incorporated within the Work.
66 |
67 | 2. Grant of Copyright License. Subject to the terms and conditions of
68 | this License, each Contributor hereby grants to You a perpetual,
69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
70 | copyright license to reproduce, prepare Derivative Works of,
71 | publicly display, publicly perform, sublicense, and distribute the
72 | Work and such Derivative Works in Source or Object form.
73 |
74 | 3. Grant of Patent License. Subject to the terms and conditions of
75 | this License, each Contributor hereby grants to You a perpetual,
76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
77 | (except as stated in this section) patent license to make, have made,
78 | use, offer to sell, sell, import, and otherwise transfer the Work,
79 | where such license applies only to those patent claims licensable
80 | by such Contributor that are necessarily infringed by their
81 | Contribution(s) alone or by combination of their Contribution(s)
82 | with the Work to which such Contribution(s) was submitted. If You
83 | institute patent litigation against any entity (including a
84 | cross-claim or counterclaim in a lawsuit) alleging that the Work
85 | or a Contribution incorporated within the Work constitutes direct
86 | or contributory patent infringement, then any patent licenses
87 | granted to You under this License for that Work shall terminate
88 | as of the date such litigation is filed.
89 |
90 | 4. Redistribution. You may reproduce and distribute copies of the
91 | Work or Derivative Works thereof in any medium, with or without
92 | modifications, and in Source or Object form, provided that You
93 | meet the following conditions:
94 |
95 | (a) You must give any other recipients of the Work or
96 | Derivative Works a copy of this License; and
97 |
98 | (b) You must cause any modified files to carry prominent notices
99 | stating that You changed the files; and
100 |
101 | (c) You must retain, in the Source form of any Derivative Works
102 | that You distribute, all copyright, patent, trademark, and
103 | attribution notices from the Source form of the Work,
104 | excluding those notices that do not pertain to any part of
105 | the Derivative Works; and
106 |
107 | (d) If the Work includes a "NOTICE" text file as part of its
108 | distribution, then any Derivative Works that You distribute must
109 | include a readable copy of the attribution notices contained
110 | within such NOTICE file, excluding those notices that do not
111 | pertain to any part of the Derivative Works, in at least one
112 | of the following places: within a NOTICE text file distributed
113 | as part of the Derivative Works; within the Source form or
114 | documentation, if provided along with the Derivative Works; or,
115 | within a display generated by the Derivative Works, if and
116 | wherever such third-party notices normally appear. The contents
117 | of the NOTICE file are for informational purposes only and
118 | do not modify the License. You may add Your own attribution
119 | notices within Derivative Works that You distribute, alongside
120 | or as an addendum to the NOTICE text from the Work, provided
121 | that such additional attribution notices cannot be construed
122 | as modifying the License.
123 |
124 | You may add Your own copyright statement to Your modifications and
125 | may provide additional or different license terms and conditions
126 | for use, reproduction, or distribution of Your modifications, or
127 | for any such Derivative Works as a whole, provided Your use,
128 | reproduction, and distribution of the Work otherwise complies with
129 | the conditions stated in this License.
130 |
131 | 5. Submission of Contributions. Unless You explicitly state otherwise,
132 | any Contribution intentionally submitted for inclusion in the Work
133 | by You to the Licensor shall be under the terms and conditions of
134 | this License, without any additional terms or conditions.
135 | Notwithstanding the above, nothing herein shall supersede or modify
136 | the terms of any separate license agreement you may have executed
137 | with Licensor regarding such Contributions.
138 |
139 | 6. Trademarks. This License does not grant permission to use the trade
140 | names, trademarks, service marks, or product names of the Licensor,
141 | except as required for reasonable and customary use in describing the
142 | origin of the Work and reproducing the content of the NOTICE file.
143 |
144 | 7. Disclaimer of Warranty. Unless required by applicable law or
145 | agreed to in writing, Licensor provides the Work (and each
146 | Contributor provides its Contributions) on an "AS IS" BASIS,
147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
148 | implied, including, without limitation, any warranties or conditions
149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
150 | PARTICULAR PURPOSE. You are solely responsible for determining the
151 | appropriateness of using or redistributing the Work and assume any
152 | risks associated with Your exercise of permissions under this License.
153 |
154 | 8. Limitation of Liability. In no event and under no legal theory,
155 | whether in tort (including negligence), contract, or otherwise,
156 | unless required by applicable law (such as deliberate and grossly
157 | negligent acts) or agreed to in writing, shall any Contributor be
158 | liable to You for damages, including any direct, indirect, special,
159 | incidental, or consequential damages of any character arising as a
160 | result of this License or out of the use or inability to use the
161 | Work (including but not limited to damages for loss of goodwill,
162 | work stoppage, computer failure or malfunction, or any and all
163 | other commercial damages or losses), even if such Contributor
164 | has been advised of the possibility of such damages.
165 |
166 | 9. Accepting Warranty or Additional Liability. While redistributing
167 | the Work or Derivative Works thereof, You may choose to offer,
168 | and charge a fee for, acceptance of support, warranty, indemnity,
169 | or other liability obligations and/or rights consistent with this
170 | License. However, in accepting such obligations, You may act only
171 | on Your own behalf and on Your sole responsibility, not on behalf
172 | of any other Contributor, and only if You agree to indemnify,
173 | defend, and hold each Contributor harmless for any liability
174 | incurred by, or claims asserted against, such Contributor by reason
175 | of your accepting any such warranty or additional liability.
176 |
177 | END OF TERMS AND CONDITIONS
178 |
179 | APPENDIX: How to apply the Apache License to your work.
180 |
181 | To apply the Apache License to your work, attach the following
182 | boilerplate notice, with the fields enclosed by brackets "[]"
183 | replaced with your own identifying information. (Don't include
184 | the brackets!) The text should be enclosed in the appropriate
185 | comment syntax for the file format. We also recommend that a
186 | file or class name and description of purpose be included on the
187 | same "printed page" as the copyright notice for easier
188 | identification within third-party archives.
189 |
190 | Copyright [yyyy] [name of copyright owner]
191 |
192 | Licensed under the Apache License, Version 2.0 (the "License");
193 | you may not use this file except in compliance with the License.
194 | You may obtain a copy of the License at
195 |
196 | http://www.apache.org/licenses/LICENSE-2.0
197 |
198 | Unless required by applicable law or agreed to in writing, software
199 | distributed under the License is distributed on an "AS IS" BASIS,
200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
201 | See the License for the specific language governing permissions and
202 | limitations under the License.
203 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # GiftStick
2 |
3 | 
4 |
5 | ## Summary
6 |
7 | This project contains code which allows an inexperienced user to easily (one
8 | click) upload forensics evidence (such as some information about the system,
9 | a full disk image as well as the system's firmware, if supported) from a
10 | target device (that will boot on an external device containing the code)
11 | to Google Cloud Storage.
12 |
13 | It supports configuring what artifacts to collect and which Cloud credentials
14 | to use.
15 |
16 | This is not an officially supported Google product.
17 |
18 | ## Usage
19 |
20 | ### Make a bootable disk image with the provided script
21 |
22 | In the `tools` directory, the script `remaster.sh` help you with the process of:
23 | * Creating a bootable USB disk image with the required dependencies
24 | * Make sure the image will boot on EFI enabled systems, as well as install
25 | [third-party input drivers for latest
26 | MacBook](https://github.com/cb22/macbook12-spi-driver)
27 | * Create a GCS bucket to receive the evidence, as well as a Service Account
28 | with the proper roles & ACL.
29 | * Make an icon on the system's Desktop with a clickable icon to start the
30 | acquisition process.
31 |
32 | It needs as input :
33 | * a [Xubuntu 20.04 ISO](https://xubuntu.org/download/) (won't work with non-XUbuntu, untested with versions different than 20.04)
34 | * the name of your GCP project
35 | * the name of the GCS bucket (remember those need to be globally unique)
36 |
37 | You need to have installed the [Google Cloud SDK](https://cloud.google.com/sdk/install)
38 | and have [SetUp the environment and logged
39 | in](https://cloud.google.com/sdk/docs/initializing). Then run:
40 |
41 | ```
42 | bash tools/remaster.sh \
43 | --project some-forensics-project-XYZ \
44 | --bucket giftstick-uploads-XYZ
45 | --source_iso xubuntu-20.04-desktop-amd64.iso
46 | ```
47 |
48 |
49 | ### Manually set up the required Google Cloud environment & call the script
50 |
51 | First, the script needs credentials (for example, of a Service Account) that
52 | provide the following roles (see [IAM
53 | roles](https://cloud.google.com/storage/docs/access-control/iam-roles)):
54 | * `roles/storage.objectCreator`, to be able to create (but not overwrite) new
55 | storage objects,
56 | * (optional) `roles/logging.logWriter` for the StackDriver logging system.
57 |
58 | These credentials needs to be downloaded and saved as a JSON file. For
59 | example, using a Service Account named
60 | `uploader@giftstick-project.iam.gserviceaccount.com`, you can create a new key
61 | and save it as `credentials.json`:
62 |
63 | ```
64 | gcloud iam service-accounts --project giftstick-project keys create \
65 | --iam-account "uploader@giftstick-project.iam.gserviceaccount.com" \
66 | credentials.json
67 | ```
68 |
69 | Now pull the code and install dependencies
70 | ```
71 | git clone https://github.com/google/GiftStick
72 | cd GiftStick
73 | pip3 install -r requirements.txt
74 | ```
75 |
76 | Unfortunately, because of
77 | [boto/boto#3699](https://github.com/boto/boto/pull/3699), some patches are
78 | required to work in a Python3 environment:
79 |
80 | ```
81 | $ boto_dir=$(python -c "import boto; print(boto.__path__[0])")
82 | $ patch -p0 "${boto_dir}/connection.py" config/patches/boto_pr3561_connection.py.patch
83 | $ patch -p0 "${boto_dir}/s3/key.py" config/patches/boto_pr3561_key.py.patch
84 | ```
85 |
86 | Once you have booted the system to acquire evidence from that newly created
87 | USB stick, and upload it to a GCS url
88 | `gs://giftstick-bucket/forensics_evidence/` you can run the acquisition script
89 | this way:
90 |
91 | ```
92 | cd auto_forensicate
93 | sudo python auto_forensicate.py \
94 | --gs_keyfile=credentials.json \
95 | --logging stdout \
96 | --acquire all \
97 | gs://giftstick-bucket/forensics_evidence/
98 | ```
99 |
100 | You'll then get the following hierarchy in your GCS bucket:
101 |
102 | ```
103 | gs://giftstick-bucket/forensics_evidence/20181104-1543/SYSTEM_SERIAL/system_info.txt
104 | gs://giftstick-bucket/forensics_evidence/20181104-1543/SYSTEM_SERIAL/stamp.json
105 | gs://giftstick-bucket/forensics_evidence/20181104-1543/SYSTEM_SERIAL/Disks/
106 | gs://giftstick-bucket/forensics_evidence/20181104-1543/SYSTEM_SERIAL/Disks/sda.hash
107 | gs://giftstick-bucket/forensics_evidence/20181104-1543/SYSTEM_SERIAL/Disks/sda.image
108 | ```
109 |
110 | ## Dependencies
111 |
112 | The auto_acquisition scripts need Python3 and have been tested to work with
113 | 20.04 LTS version of Xubuntu. Previous versions should still work but are not
114 | actively supported.
115 |
116 | The following packages should be installed in the system you're booting into:
117 |
118 | * `sudo apt install dcfldd python-pip zenity`
119 | * For Chipsec (optional)
120 | `apt install python-dev libffi-dev build-essential gcc nasm`
121 |
122 |
123 | ## Acquired evidence
124 |
125 | Currently the script uploads the following data:
126 |
127 | * System information (output of `dmidecode`)
128 | * For each block device that is most likely an internal disk:
129 | * all the bytes
130 | * hashes
131 | * the device's information (output of udevadm)
132 | * The system's firmware, dumped with
133 | [Chipsec](https://github.com/chipsec/chipsec)
134 |
135 | It also can upload a folder (for example a mounted filesystem) with
136 | `--acquire directory`. In this case, the script will build a `.tar` file, and
137 | upload it alongside a corresponding `.timeline`, which is a
138 | [bodyfile](https://wiki.sleuthkit.org/index.php?title=Body_file) compatible file
139 | generated with the `find` command (and `stat`, if run on MacOS).
140 |
141 |
142 | ## FAQ
143 |
144 | Some answers to Frequenly Asked Questions can be [found here](doc/FAQ.md)
145 |
--------------------------------------------------------------------------------
/auto_forensicate/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright 2018 Google LLC
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # https://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
--------------------------------------------------------------------------------
/auto_forensicate/errors.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2018 Google LLC
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # https://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Custom errors for the acquisition script."""
16 |
17 | from __future__ import unicode_literals
18 |
19 |
20 | class BadConfigOption(Exception):
21 | """Raised when a faulty configuration option is encountered."""
22 |
23 |
24 | class ForensicateError(Exception):
25 | """Raised when acquisition failed."""
26 |
27 |
28 | class RecipeException(Exception):
29 | """Raised when a Recipe failed acquiring Artifacts."""
30 |
31 |
32 | class RetryableError(ForensicateError):
33 | """Raised when acquisition failed but should be retried."""
34 |
--------------------------------------------------------------------------------
/auto_forensicate/hostinfo.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2018 Google LLC
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # https://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Helpers class for accessing system information."""
16 |
17 | from __future__ import unicode_literals
18 |
19 | import os
20 | import subprocess
21 | import time
22 | import uuid
23 |
24 |
25 | def ReadDMI(name):
26 | """Reads a DMI value from the /sys filesystem.
27 |
28 | Args:
29 | name (str): the name of the DMI value to read.
30 |
31 | Returns:
32 | str: the DMI value, or None if not available.
33 | """
34 | dmi_value = None
35 | dmi_path = os.path.join('/sys/class/dmi/id/', name)
36 | try:
37 | with open(dmi_path, 'r') as d_f:
38 | dmi_value = d_f.read().strip()
39 | except IOError:
40 | pass
41 | return dmi_value
42 |
43 |
44 | def GetChassisSerial():
45 | """Gets the system's chassis serial number.
46 |
47 | Returns:
48 | str: the serial number.
49 | """
50 | return ReadDMI('chassis_serial')
51 |
52 |
53 | def GetMachineUUID():
54 | """Gets the system's product UUID.
55 |
56 | Returns:
57 | str: the product UUID.
58 | """
59 | return ReadDMI('product_uuid')
60 |
61 |
62 | def GetRandomUUID():
63 | """Generates a random UUID.
64 |
65 | Returns:
66 | str: the UUID.
67 | """
68 | return str(uuid.uuid4())
69 |
70 |
71 | def GetIdentifier():
72 | """Gets an identifier for the machine.
73 |
74 | It first tries to use the machine's serial number, then the machine's UUID,
75 | and defaults to a random UUID.
76 |
77 | Returns:
78 | str: the identifier.
79 | """
80 | identifier = (GetChassisSerial() or
81 | GetMachineUUID() or
82 | GetRandomUUID())
83 | return identifier
84 |
85 |
86 | def GetUdevadmInfo(device_name):
87 | """Uses udevadm to pull metadata for a device.
88 |
89 | Args:
90 | device_name(str): the name of the device. ie: 'sda'
91 |
92 | Returns:
93 | dict: a dictionary of udev properties.
94 | """
95 | device_metadata = {}
96 | udevadm_path = Which('udevadm')
97 | cmd = [udevadm_path, 'info', '--query', 'property', '--name', device_name]
98 | udevadm_output = subprocess.check_output(cmd).decode()
99 | device_metadata['udevadm_text_output'] = udevadm_output
100 | for line in udevadm_output.split('\n'):
101 | try:
102 | key, value = line.strip().split('=', 1)
103 | device_metadata[key] = value
104 | except ValueError:
105 | pass
106 | return device_metadata
107 |
108 |
109 | def GetTime():
110 | """Returns the current time as a iso string."""
111 | return time.strftime('%Y%m%d-%H%M%S', time.gmtime())
112 |
113 |
114 | def Which(cmd):
115 | """Searches for a binary in the current PATH environment variable.
116 |
117 | Args:
118 | cmd(str): the binary to search for.
119 | Returns:
120 | str: the first found path to a binary with the same name, or None.
121 | """
122 | path_list = os.environ.get('PATH', os.defpath).split(os.pathsep)
123 | for directory in path_list:
124 | name = os.path.join(directory, cmd)
125 | if os.path.isdir(name):
126 | continue
127 | if os.path.exists(name) and os.access(name, os.F_OK | os.X_OK):
128 | return name
129 | return None
130 |
--------------------------------------------------------------------------------
/auto_forensicate/macdisk.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2018 Google LLC
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # https://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Helper functions to handle Mac OS information."""
16 |
17 | import plistlib
18 | import subprocess
19 | import sys
20 |
21 |
22 | class MacDiskError(Exception):
23 | """Module specific exception class."""
24 |
25 |
26 | def _DictFromSubprocess(command):
27 | """Returns a dict based upon a subprocess call with a -plist argument.
28 |
29 | Args:
30 | command(list(str)): the command to be executed as a list.
31 | Returns:
32 | dict: dictionary from command output.
33 | Raises:
34 | MacDiskError: if the command failed to run.
35 | """
36 |
37 | try:
38 | task = subprocess.Popen(
39 | command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
40 | except OSError as e:
41 | raise MacDiskError('Could not execute: {0:s}'.format(e.strerror))
42 | (stdout, stderr) = task.communicate()
43 |
44 | if task.returncode != 0:
45 | raise MacDiskError(
46 | 'Error running command: {0:s}, stderr: {1:s}' .format(
47 | ' '.join(command), stderr))
48 |
49 | try:
50 | return plistlib.loads(stdout)
51 | except Exception:
52 | raise MacDiskError(
53 | 'Error creating plist from output: {0:s}'.format(stdout))
54 |
55 |
56 | def _DictFromDiskutilInfo(deviceid):
57 | """Calls diskutil info for a specific device ID.
58 |
59 | Args:
60 | deviceid(string): a given device id for a disk like object.
61 | Returns:
62 | dict: resulting plist output.
63 | Raises:
64 | MacDiskError: if deviceid is invalid.
65 | """
66 | command = ['/usr/sbin/diskutil', 'info', '-plist', deviceid]
67 | return _DictFromSubprocess(command)
68 |
69 |
70 | def _DictFromDiskutilList():
71 | """Calls diskutil list -plist and returns as dict.
72 |
73 | Returns:
74 | dict: resulting plist output
75 | """
76 |
77 | command = ['/usr/sbin/diskutil', 'list', '-plist']
78 | return _DictFromSubprocess(command)
79 |
80 |
81 | def WholeDisks():
82 | """Returns a list of all disk objects that are whole disks."""
83 | wholedisks = []
84 | try:
85 | diskutil_dict = _DictFromDiskutilList()
86 | for deviceid in diskutil_dict['WholeDisks']:
87 | wholedisks.append(Disk(deviceid))
88 | except KeyError:
89 | raise MacDiskError('Unable to list all partitions.')
90 | return wholedisks
91 |
92 |
93 | class Disk(object):
94 | """Represents a Mac disk object.
95 |
96 | Note that this also is used for currently mounted disk images as they
97 | really are just 'disks'. Mostly. Can take device ids of the form 'disk1' or
98 | of the form '/dev/disk1'.
99 | """
100 |
101 | def __init__(self, deviceid):
102 | """Initializes a MacDisk object.
103 |
104 | Args:
105 | deviceid(str): Name (or path) to a disk
106 | """
107 | if deviceid.startswith('/dev/'):
108 | deviceid = deviceid.replace('/dev/', '', 1)
109 | self.deviceid = deviceid
110 | self.Refresh()
111 |
112 | def Refresh(self):
113 | """Builds a list of convenience attributes for direct querying."""
114 |
115 | self._attributes = _DictFromDiskutilInfo(self.deviceid)
116 | # These are the keys we are interested in
117 | keys = ['Internal', 'DeviceIdentifier', 'BusProtocol', 'VirtualOrPhysical',
118 | 'DeviceIdentifier', 'TotalSize']
119 |
120 | for key in keys:
121 | try:
122 | attribute = key.lower().replace(' ', '')
123 | setattr(self, attribute, self._attributes[key])
124 | except KeyError: # not all objects have all these attributes
125 | pass
126 |
--------------------------------------------------------------------------------
/auto_forensicate/recipes/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2018 Google LLC
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # https://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 |
--------------------------------------------------------------------------------
/auto_forensicate/recipes/base.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2018 Google LLC
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # https://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Base classes for Artifacts and Recipes."""
16 |
17 | from __future__ import unicode_literals
18 |
19 | import io
20 | import logging
21 | import os
22 | import shutil
23 | import subprocess
24 | import sys
25 | import tempfile
26 | import six
27 |
28 | from auto_forensicate import errors
29 |
30 |
31 | class BaseArtifact(object):
32 | """BaseArtifact class.
33 |
34 | Attributes:
35 | logger (Logger): a Logger object.
36 | name (str): the name of the artifact.
37 | remote_path (str): the path to the artifact in the remote storage.
38 | size (int): the size of the artifact, in bytes.
39 | """
40 |
41 | def __init__(self, name):
42 | """Initializes a BaseArtifact object.
43 |
44 | Args:
45 | name (str): the name of the artifact.
46 |
47 | Raises:
48 | ValueError: if the name is empty or None.
49 | """
50 | self._size = 0
51 | self._stream = None
52 | if name:
53 | self.name = name
54 | else:
55 | raise ValueError('The name of the artifact must not be None or empty.')
56 | self.remote_path = 'Base/{0:s}'.format(self.name)
57 |
58 | self._logger = logging.getLogger(self.__class__.__name__)
59 |
60 | def _GetStream(self):
61 | """Get access to the file-like object.
62 |
63 | Raises:
64 | NotImplementedError: If this method is not implemented.
65 | """
66 | class_name = type(self).__name__
67 | raise NotImplementedError(
68 | '_GetStream() is not implemented in {0:s}'.format(class_name))
69 |
70 | def CloseStream(self):
71 | """Closes the file-like object.
72 |
73 | Raises:
74 | IOError: if this method is called before OpenStream.
75 | """
76 | if self._stream:
77 | self._logger.debug('Closing stream')
78 | self._stream.close()
79 | else:
80 | raise IOError('Illegal call to CloseStream() before OpenStream()')
81 |
82 | def OpenStream(self):
83 | """Get the file-like object to the data of the artifact.
84 |
85 | Returns:
86 | file: Read-only file-like object to the data.
87 | """
88 | self._logger.debug('Opening stream')
89 | if not self._stream:
90 | # pylint: disable=assignment-from-no-return
91 | self._stream = self._GetStream()
92 |
93 | return self._stream
94 |
95 | @property
96 | def size(self):
97 | """The size of the artifact.
98 |
99 | Returns:
100 | int: the size of the artifact in bytes.
101 | """
102 | return self._size
103 |
104 | @property
105 | def readable_size(self):
106 | """The size of the artifact, in human readable form.
107 |
108 | Returns:
109 | str: the size of the artifact in human readable form.
110 | """
111 | current_size = self._size
112 | if current_size == 0:
113 | return 'Unknown size'
114 | for unit in ['B', 'KiB', 'MiB', 'GiB', 'TiB']:
115 | if abs(current_size) < 1024:
116 | return '{0:3,.1f}{1:s}'.format(current_size, unit)
117 | current_size /= 1024.0
118 | return '{0:,.1f}{1:s}'.format(current_size, 'PiB')
119 |
120 |
121 | class StringArtifact(BaseArtifact):
122 | """Class for an artifact that uploads a string to a file."""
123 |
124 | def __init__(self, path, string_content):
125 | """Initializes a StringArtifact object.
126 |
127 | Args:
128 | path (str): the path to the artifact in the remote storage.
129 | string_content (str): the string to upload.
130 |
131 | Raises:
132 | ValueError: if the path doesn't point to a file.
133 | """
134 | super(StringArtifact, self).__init__(os.path.basename(path))
135 | if isinstance(string_content, six.text_type):
136 | self._data = string_content.encode('utf-8')
137 | else:
138 | self._data = string_content
139 | self._size = len(self._data)
140 | self.remote_path = path
141 | self._stream = None
142 |
143 | def _GetStream(self):
144 | """Get access to the file-like object."""
145 | if self._stream is None:
146 | self._stream = io.BytesIO(self._data)
147 | return self._stream
148 |
149 | def CloseStream(self):
150 | if self._stream is None:
151 | raise IOError('Should not call CloseStream() before GetStream()')
152 | self._stream.close()
153 |
154 |
155 | class FileArtifact(BaseArtifact):
156 | """Class for an artifact to upload a File."""
157 |
158 | def __init__(self, path):
159 | """Initializes a FileArtifact object.
160 |
161 | Args:
162 | path (str): the absolute, or relative to the recipe's temporary, path to
163 | the file.
164 |
165 | Raises:
166 | ValueError: if the path doesn't point to a file.
167 | """
168 | super(FileArtifact, self).__init__(os.path.basename(path))
169 | self._path = path
170 |
171 | if os.path.isfile(path):
172 | self._size = os.stat(path).st_size
173 | self.remote_path = 'Files/{0:s}'.format(self.name)
174 |
175 | def _GetStream(self):
176 | """Get the file-like object to the data of the artifact.
177 |
178 | Returns:
179 | file: Read-only file-like object to the data.
180 | """
181 | return open(os.path.realpath(self._path), 'rb')
182 |
183 |
184 | class ProcessOutputArtifact(BaseArtifact):
185 | """Class for an artifact to upload the output of a command."""
186 |
187 | def __init__(self, command, path, ignore_failure=False):
188 | """Initializes a ProcessOutputArtifact object.
189 |
190 | Args:
191 | command (list): the command to run as subprocess.
192 | path (str): the remote path to store the output of the command.
193 | ignore_failure (bool): set to True to not raise if the command failed to
194 | run.
195 |
196 | Raises:
197 | errors.RecipeException: if the command failed to run.
198 | """
199 | super(ProcessOutputArtifact, self).__init__(os.path.basename(path))
200 | self.remote_path = path
201 | self._buffered_content = None
202 | self._command = command
203 | self._ignore_failure = ignore_failure
204 |
205 | def _RunCommand(self):
206 | """Run a command.
207 |
208 | Returns:
209 | bytes: the command output, or an error if it failed to run.
210 | """
211 | command_output = ''
212 | process = subprocess.Popen(
213 | self._command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
214 | self._logger.info('Running command \'%s\'', self._command)
215 | output, error = process.communicate()
216 |
217 | if process.returncode == 0:
218 | command_output = output
219 | self._logger.info('Command %s terminated.', self._command)
220 | self._logger.debug('stderr : \'%s\'', error.strip())
221 | else:
222 | command_output = (
223 | 'Command \'{0!s}\' failed with \'{1!s}\' return code {2:d})'.format(
224 | self._command, error.strip(), process.returncode))
225 | self._logger.error(command_output)
226 | command_output = command_output.encode()
227 | if not self._ignore_failure:
228 | raise errors.RecipeException(
229 | 'Error running ProcessOutputArtifact command')
230 |
231 | return command_output
232 |
233 | def _GetStream(self):
234 | """Get the file-like object to the data of the artifact.
235 |
236 | Returns:
237 | file: Read-only file-like object to the data.
238 | """
239 | if not self._buffered_content:
240 | command_output = self._RunCommand()
241 | self._size = len(command_output)
242 | self._buffered_content = io.BytesIO(command_output)
243 | return self._buffered_content
244 |
245 |
246 | class BaseRecipe(object):
247 | """BaseRecipe class."""
248 |
249 | def __init__(self, name, options=None):
250 | """Initializes a BaseRecipe object.
251 |
252 | Args:
253 | name(str): the name of the Recipe.
254 | options(argparse.Namespace): options parsed from the command line.
255 |
256 | Raises:
257 | ValueError: if the name parameter is None.
258 | """
259 | self._platform = sys.platform
260 | self._workdir = None
261 | if name:
262 | self.name = name
263 | else:
264 | raise ValueError('A Recipe needs a name')
265 | self._options = options
266 | self._origin_dir = os.getcwd()
267 |
268 | self._logger = logging.getLogger(self.__class__.__name__)
269 |
270 | def __enter__(self):
271 | self._workdir = tempfile.mkdtemp()
272 | os.chdir(self._workdir)
273 | return self
274 |
275 | def __exit__(self, exc_type, exc_value, traceback):
276 | os.chdir(self._origin_dir)
277 | shutil.rmtree(self._workdir)
278 |
279 | def GetArtifacts(self):
280 | """Provides a list of Artifacts to upload.
281 |
282 | Returns:
283 | list(BaseArtifact): the artifacts to copy.
284 | """
285 | return list()
286 |
--------------------------------------------------------------------------------
/auto_forensicate/recipes/directory.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2018 Google LLC
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # https://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Handles the acquisition of the directories."""
16 |
17 | import os
18 | import subprocess
19 |
20 | from auto_forensicate import errors
21 | from auto_forensicate.recipes import base
22 | from auto_forensicate.ux import cli
23 | from auto_forensicate.ux import gui
24 |
25 |
26 | def FullPathToName(path):
27 | """Converts a directory path to a name used to save the remote archive.
28 |
29 | Args:
30 | path(str): the path.
31 |
32 | Returns:
33 | str: the name.
34 | """
35 | # In order to limit collision, use the full path, but remove separators
36 | # as this will confuse GCS
37 | return path.replace(os.path.sep, '_')
38 |
39 |
40 | class DirectoryArtifact(base.BaseArtifact):
41 | """The DirectoryArtifact class.
42 |
43 | Attributes:
44 | name (str): the name of the artifact.
45 | remote_path (str): the path to the artifact in the remote storage.
46 | size (int): the size of the artifact, in bytes.
47 | """
48 |
49 | _SUPPORTED_METHODS = ['tar']
50 |
51 | _TAR_COMMAND = [
52 | 'tar', '-c', '-p', '--xattrs', '--acls', '--format=posix', '-f', '-']
53 |
54 | def __init__(self, path, method='tar', compress=False):
55 | """Initializes a DirectoryArtifact object.
56 |
57 | Args:
58 | path(str): the path to the directory.
59 | method(str): the method used for acquisition.
60 | compress(bool): whether to use compression (not supported by all methods).
61 |
62 | Raises:
63 | ValueError: if path is none, or doesn't exist
64 | """
65 | super(DirectoryArtifact, self).__init__(FullPathToName(path))
66 |
67 | if not os.path.exists(path):
68 | raise ValueError(
69 | 'Error with path {0:s} does not exist'.format(path))
70 |
71 | self.path = path
72 | self._size = self._GetSize()
73 | self._copy_command = None
74 | self._method = method
75 | self._compress = compress
76 | if self._method == 'tar':
77 | self.remote_path = 'Directories/{0:s}.tar'.format(self.name)
78 |
79 | if self._compress:
80 | self.remote_path = self.remote_path + '.gz'
81 |
82 | def _GetSize(self):
83 | """Gets the size of the directory to export.
84 |
85 | Returns:
86 | int: The size of the directory in bytes.
87 | """
88 | self._logger.info('Calculating size of "{0:s}"'.format(self.path))
89 | du_process = subprocess.run(
90 | ['du', '-s', '-k', self.path], stdout=subprocess.PIPE, check=False)
91 | du_output = int(du_process.stdout.split()[0]) * 1024
92 | return du_output
93 |
94 | def _GetStream(self):
95 | """Get the file-like object to the data of the artifact.
96 |
97 | Returns:
98 | file: Read-only file-like object to the data.
99 |
100 | Raises:
101 | IOError: If this method is called more than once before CloseStream().
102 | """
103 | if self._copy_command is None:
104 | self._copy_command = self._GenerateCopyCommand()
105 | self._logger.info(
106 | 'Copying directory with command \'{0!s}\''.format(self._copy_command))
107 | self._copyprocess = subprocess.Popen(
108 | self._copy_command, stdin=None,
109 | stdout=subprocess.PIPE, stderr=subprocess.PIPE)
110 | else:
111 | raise IOError('Directory is already being acquired')
112 |
113 | return self._copyprocess.stdout
114 |
115 | def CloseStream(self):
116 | """Closes the file-like object.
117 |
118 | Returns:
119 | str: a return message for the report.
120 |
121 | Raises:
122 | subprocess.CalledProcessError: if the copy process returns with an error.
123 | IOError: if CloseStream() is called before GetStream().
124 | """
125 | if not self._copyprocess:
126 | raise IOError('Illegal call to CloseStream() before GetStream()')
127 |
128 | # If there is anything still to read from the subprocess then CloseStream
129 | # has been called early, terminate the child process to avoid deadlock.
130 | character = self._copyprocess.stdout.read(1)
131 | if character:
132 | self._copyprocess.terminate()
133 | raise subprocess.CalledProcessError(
134 | 0, self._copy_command[0],
135 | 'CloseStream() called but stdout still had data')
136 |
137 | self._copyprocess.wait()
138 | code = self._copyprocess.returncode
139 | error = self._copyprocess.stderr.read()
140 | if code < 0:
141 | raise subprocess.CalledProcessError(code, self._copy_command[0], error)
142 | return error
143 |
144 | def _GenerateCopyCommand(self):
145 | """Builds the command to run on the directory.
146 |
147 | Returns:
148 | list: the argument list for the dd command
149 | """
150 | if self._method == 'tar':
151 | return self._GenerateTarCopyCommand()
152 | else:
153 | raise errors.RecipeException('Unsupported method '+self._method)
154 |
155 | def _GenerateTarCopyCommand(self):
156 | """Creates the full command to execute for the copy.
157 |
158 | Returns:
159 | list(str): the command to run.
160 | """
161 |
162 | command = self._TAR_COMMAND
163 | if self._compress:
164 | command.append('-z')
165 |
166 | command.append(self.path)
167 |
168 | return command
169 |
170 |
171 | class LinuxDirectoryArtifact(DirectoryArtifact):
172 | """The LinuxDirectoryArtifact class."""
173 |
174 | def __init__(self, path, method='tar', compress=False):
175 | """Initializes a LinuxDirectoryArtifact object."""
176 | if method not in self._SUPPORTED_METHODS:
177 | raise errors.RecipeException(
178 | 'Unsupported acquisition method on Linux: '+method)
179 | super().__init__(path, method=method, compress=compress)
180 |
181 |
182 | class MacDirectoryArtifact(DirectoryArtifact):
183 | """The MacDirectoryArtifact class."""
184 |
185 | def __init__(self, path, method='tar', compress=False):
186 | """Initializes a MacDirectoryArtifact object."""
187 | if method not in self._SUPPORTED_METHODS:
188 | raise errors.RecipeException(
189 | 'Unsupported acquisition method on Darwin: '+method)
190 | super().__init__(path, method=method, compress=compress)
191 |
192 |
193 | class DirectoryRecipe(base.BaseRecipe):
194 | """The DirectoryRecipe class.
195 |
196 | This Recipe acquires a mounted filesystem.
197 | """
198 |
199 | def GetArtifacts(self):
200 | """Returns a list of DirectoryArtifacts to acquire.
201 |
202 | Returns:
203 | list(DirectoryArtifacts): the artifacts to acquire.
204 | """
205 | more_to_copy = True
206 | path_list = []
207 | while more_to_copy:
208 | if getattr(self._options, 'no_zenity', False):
209 | path = cli.AskText(
210 | 'Specify the path to the directory you wish to copy')
211 | if not os.path.isdir(path):
212 | print(
213 | 'The following path does not exist or is not a directory:'
214 | '{0:s}'.format(path))
215 | continue
216 | path_list.append(path)
217 | more_to_copy = cli.Confirm('Do you wish to copy another folder?')
218 | else:
219 | path = gui.AskText(
220 | 'Specify the path to the directory you wish to copy')
221 | if not os.path.isdir(path):
222 | # TODO: display an GUI error message
223 | continue
224 | path_list.append(path)
225 | more_to_copy = gui.Confirm('Do you wish to copy another folder?')
226 |
227 | if not path_list:
228 | raise errors.RecipeException('No directory to collect')
229 |
230 | artifacts = []
231 | # Deduplicating paths, as they would cause the code to upload the same file
232 | # multiple times, which might not be allowed by the uploading process.
233 | for directory in list(set(path_list)):
234 |
235 | # 'tar' will not save some metadata such as access time. We generate
236 | # a 'timeline' with the find(1) command to keep this information
237 | timeline_artifact = None
238 | dir_artifact = None
239 | if self._platform == 'darwin':
240 | timeline_artifact = base.ProcessOutputArtifact(
241 | ['find', directory, '-exec', 'stat', '-f',
242 | '0|%N|%i|%p|%u|%u|%z|%a.0|%m.0|%c.0|%B.0', '-t', '%s', '{}',
243 | ';'], 'Directories/{0:s}.timeline'.format(FullPathToName(path)))
244 | dir_artifact = MacDirectoryArtifact(
245 | directory, method=self._options.method,
246 | compress=self._options.compress)
247 | elif self._platform == 'linux':
248 | timeline_artifact = base.ProcessOutputArtifact(
249 | ['find', directory, '-xdev', '-printf',
250 | '0|%p|%i|%M|%U|%G|%s|%A@|%T@|%C@|0\n'],
251 | 'Directores/{0:s}.timeline'.format(FullPathToName(path)))
252 |
253 | dir_artifact = LinuxDirectoryArtifact(
254 | directory, method=self._options.method,
255 | compress=self._options.compress)
256 | else:
257 | raise ValueError('Unsupported platform: {0:s}'.format(self._platform))
258 |
259 | artifacts.append(timeline_artifact)
260 | artifacts.append(dir_artifact)
261 |
262 | return artifacts
263 |
--------------------------------------------------------------------------------
/auto_forensicate/recipes/disk.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2018 Google LLC
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # https://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Handles the acquisition of the raw disk image."""
16 |
17 | from __future__ import unicode_literals
18 |
19 | import json
20 | import os
21 | import subprocess
22 |
23 | from auto_forensicate import errors
24 | from auto_forensicate import hostinfo
25 | from auto_forensicate import macdisk
26 | from auto_forensicate.recipes import base
27 | from auto_forensicate.ux import cli
28 | from auto_forensicate.ux import gui
29 |
30 |
31 | class DiskArtifact(base.BaseArtifact):
32 | """The DiskArtifact class.
33 |
34 | Attributes:
35 | hashlog_filename (str): where dcfldd will store the hashes.
36 | name (str): the name of the artifact.
37 | remote_path (str): the path to the artifact in the remote storage.
38 | size (int): the size of the artifact, in bytes.
39 | """
40 |
41 | _DD_BINARY = 'dcfldd'
42 | _DD_OPTIONS = ['hash=md5,sha1', 'bs=2M', 'conv=noerror', 'hashwindow=128M']
43 |
44 | def __init__(self, path, size, mounted=False, use_dcfldd=True):
45 | """Initializes a DiskArtifact object.
46 |
47 | Only supported implementations are MacOS and Linux.
48 |
49 | Args:
50 | path(str): the path to the disk.
51 | size(str): the size of the disk.
52 | mounted(bool): whether the disk has a mounted partition.
53 | use_dcfldd(bool): whether to use dcfldd to read from the blockdevice.
54 |
55 | Raises:
56 | ValueError: if path is none, doesn't start with '/dev' or size is =< 0.
57 | """
58 | super(DiskArtifact, self).__init__(os.path.basename(path))
59 | if not path.startswith('/dev'):
60 | raise ValueError(
61 | 'Error with path {0:s}: should start with \'/dev\''.format(path))
62 | self._ddprocess = None
63 | self.mounted = mounted
64 | self.use_dcfldd = use_dcfldd
65 | self._path = path
66 | if size > 0:
67 | self._size = size
68 | else:
69 | raise ValueError('Disk size must be an integer > 0')
70 | self.hashlog_filename = '{0:s}.hash'.format(self.name)
71 | self.remote_path = 'Disks/{0:s}.image'.format(self.name)
72 |
73 | def _GenerateDDCommand(self):
74 | """Builds the DD command to run on the disk.
75 |
76 | Returns:
77 | list: the argument list for the dd command
78 | """
79 | dd_binary = hostinfo.Which(self._DD_BINARY)
80 | if not dd_binary:
81 | raise errors.RecipeException(
82 | 'Could not find \'{0:s}\''.format(self._DD_BINARY))
83 | command = [
84 | dd_binary, 'if={0:s}'.format(self._path),
85 | 'hashlog={0:s}'.format(self.hashlog_filename)]
86 | command.extend(self._DD_OPTIONS)
87 | return command
88 |
89 | def _GetStream(self):
90 | """Get the file-like object to the data of the artifact.
91 |
92 | Returns:
93 | file: Read-only file-like object to the data.
94 |
95 | Raises:
96 | IOError: If this method is called more than once before CloseStream().
97 | """
98 | if self.use_dcfldd:
99 | if self._ddprocess is None:
100 | command = self._GenerateDDCommand()
101 | self._logger.info('Opening disk with command \'{0!s}\''.format(command))
102 | self._ddprocess = subprocess.Popen(
103 | command, stdin=None, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
104 | else:
105 | raise IOError('Disk is already opened')
106 | return self._ddprocess.stdout
107 | else:
108 | self._stream = open(self._path, 'rb')
109 | return self._stream
110 |
111 | def CloseStream(self):
112 | """Closes the file-like object.
113 |
114 | Returns:
115 | str: a return message for the report.
116 |
117 | Raises:
118 | errors.RecipeException: if the dcfldd process returns with an error.
119 | IOError: if CloseStream() is called before GetStream().
120 | """
121 | if not self.use_dcfldd:
122 | self._stream.close()
123 | else:
124 | if not self._ddprocess:
125 | raise IOError('Illegal call to CloseStream() before GetStream()')
126 |
127 | # If there is anything still to read from the subprocess then CloseStream
128 | # has been called early, terminate the child process to avoid deadlock.
129 | c = self._ddprocess.stdout.read(1)
130 | if c:
131 | # TODO log this
132 | self._ddprocess.terminate()
133 | raise errors.RecipeException(
134 | 'CloseStream() called but stdout still had data')
135 |
136 | self._ddprocess.wait()
137 | code = self._ddprocess.returncode
138 | error = self._ddprocess.stderr.read()
139 | if code > 0:
140 | raise errors.RecipeException(
141 | 'Command dcfldd returned non-zero exit status {0:d}, '
142 | 'with error: "{1:s}"'.format(code, error.decode()))
143 | return error
144 |
145 | def GetDescription(self):
146 | """Get a human readable description about the device.
147 |
148 | Returns:
149 | str: the description
150 | """
151 | description = 'Name: {0:s} (Size: {1:d})'.format(self.name, self.size)
152 | if self.mounted:
153 | description = '(WARNING: disk has a mounted partition) ' + description
154 | return description
155 |
156 | def ProbablyADisk(self):
157 | """Returns whether this is probably one of the system's internal disks."""
158 | return True
159 |
160 |
161 | class MacDiskArtifact(DiskArtifact):
162 | """The MacDiskArtifact class.
163 |
164 | Attributes:
165 | hashlog_filename (str): where dcfldd will store the hashes.
166 | name (str): the name of the artifact.
167 | remote_path (str): the path to the artifact in the remote storage.
168 | size (int): the size of the artifact, in bytes.
169 | """
170 |
171 | def __init__(self, path, size, use_dcfldd=True):
172 | """Initializes a MacDiskArtifact object.
173 |
174 | Args:
175 | path(str): the path to the disk.
176 | size(str): the size of the disk.
177 | use_dcfldd(bool): whether to use dcfldd to read from the blockdevice.
178 |
179 | Raises:
180 | ValueError: if path is none, doesn't start with '/dev' or size is =< 0.
181 | """
182 | super(MacDiskArtifact, self).__init__(path, size, use_dcfldd=use_dcfldd)
183 | self._macdisk = macdisk.Disk(self.name)
184 |
185 | def _IsUsb(self):
186 | """Whether this device is connected on USB."""
187 | #pylint: disable=no-member
188 | return self._macdisk.busprotocol == 'USB'
189 |
190 | def ProbablyADisk(self):
191 | """Returns whether this is probably one of the system's internal disks."""
192 | if self._IsUsb():
193 | # We ignore USB to try to avoid copying the GiftStick itself.
194 | return False
195 | #pylint: disable=no-member
196 | if self._macdisk.internal and (
197 | # TODO: this needs more research on how to autodetect "interesting"
198 | # disks to copy on MacOS.
199 | self._macdisk.virtualorphysical != 'Virtual'):
200 | return True
201 | return False
202 |
203 |
204 | class LinuxDiskArtifact(DiskArtifact):
205 | """The DiskArtifact class.
206 |
207 | Attributes:
208 | hashlog_filename (str): where dcfldd will store the hashes.
209 | name (str): the name of the artifact.
210 | remote_path (str): the path to the artifact in the remote storage.
211 | size (int): the size of the artifact, in bytes.
212 | """
213 |
214 | def __init__(self, path, size, mounted=False, use_dcfldd=True):
215 | """Initializes a LinuxDiskArtifact object.
216 |
217 | Args:
218 | path(str): the path to the disk.
219 | size(str): the size of the disk.
220 | mounted(bool): whether the disk has a mounted partition.
221 | use_dcfldd(bool): whether to use dcfldd to read from the blockdevice.
222 |
223 | Raises:
224 | ValueError: if path is none, doesn't start with '/dev' or size is =< 0.
225 | """
226 |
227 | super(LinuxDiskArtifact, self).__init__(path, size, use_dcfldd=use_dcfldd)
228 |
229 | self._udevadm_metadata = None
230 |
231 | def GetDescription(self):
232 | """Get a human readable description of the device.
233 |
234 | Returns:
235 | str: the description
236 | """
237 | model = self._GetUdevadmProperty('ID_MODEL')
238 | if self._IsFloppy():
239 | model = 'Floppy Disk'
240 | if not model:
241 | model = self._GetUdevadmProperty('ID_SERIAL') or '(no serial)'
242 | connection = '(internal)'
243 | if self._IsUsb():
244 | model = '{0:s} {1:s}'.format(self._GetUdevadmProperty('ID_VENDOR'), model)
245 | connection = '(usb)'
246 | description = '{0:s}: {1:s} {2:s}'.format(self.name, model, connection)
247 | if self.mounted:
248 | description = '(WARNING: disk has a mounted partition) ' + description
249 | return description
250 |
251 | def _GetUdevadmProperty(self, prop):
252 | """Get a udevadm property.
253 |
254 | Args:
255 | prop(str): the property to query.
256 |
257 | Returns:
258 | str: the value of the property or None if the property is not set.
259 | """
260 | if not self._udevadm_metadata:
261 | self._udevadm_metadata = hostinfo.GetUdevadmInfo(self.name)
262 | return self._udevadm_metadata.get(prop, None)
263 |
264 | def ProbablyADisk(self):
265 | """Returns whether this is probably one of the system's internal disks."""
266 | if self._IsFloppy():
267 | return False
268 | if self._IsUsb():
269 | # We ignore USB to try to avoid copying the GiftStick itself.
270 | return False
271 | return True
272 |
273 | def _IsFloppy(self):
274 | """Whether this block device is a floppy disk."""
275 | # see https://www.kernel.org/doc/html/latest/admin-guide/devices.html
276 | return self._GetUdevadmProperty('MAJOR') == '2'
277 |
278 | def _IsUsb(self):
279 | """Whether this device is connected on USB."""
280 | return self._GetUdevadmProperty('ID_BUS') == 'usb'
281 |
282 |
283 | class DiskRecipe(base.BaseRecipe):
284 | """The DiskRecipe class.
285 |
286 | This Recipe acquires the raw image of all disks on the system.
287 | """
288 |
289 | def __init__(self, name, options=None):
290 | """Class for a disks acquisition Recipe"""
291 | self.use_dcfldd = True
292 | super().__init__(name, options=options)
293 |
294 | def _GetLsblkDict(self):
295 | """Calls lsblk.
296 |
297 | Returns:
298 | dict: the output of the lsblk command.
299 | """
300 | lsblk_path = hostinfo.Which('lsblk')
301 | lsblk_output = subprocess.check_output(
302 | [lsblk_path, '-J', '--bytes', '-o', '+UUID,FSTYPE,SERIAL'])
303 | return json.loads(lsblk_output)
304 |
305 | def _ListAllDisksMac(self):
306 | """Lists all disks connected to the machine.
307 |
308 | Returns:
309 | list(MacDiskArtifact): a list of disks.
310 | """
311 | disk_list = []
312 | for mac_disk in macdisk.WholeDisks():
313 | disk_name = mac_disk.deviceidentifier
314 | disk_size = mac_disk.totalsize
315 | disk = MacDiskArtifact(
316 | os.path.join('/dev', disk_name), disk_size,
317 | use_dcfldd=self.use_dcfldd)
318 | disk_list.append(disk)
319 | return disk_list
320 |
321 | def _IsDiskMounted(self, lsblk_device_dict):
322 | """Returns True if the disk has a mounted partition.
323 |
324 | Args:
325 | lsblk_device_dict: a dict containing the information about the device from
326 | lsblk.
327 | Returns:
328 | bool: True if the disk has at least one mounted partition.
329 | """
330 |
331 | if 'mountpoint' in lsblk_device_dict and lsblk_device_dict['mountpoint']:
332 | return True
333 |
334 | if 'children' in lsblk_device_dict:
335 | res = [
336 | self._IsDiskMounted(grandchild)
337 | for grandchild in lsblk_device_dict['children']]
338 | return any(res)
339 |
340 | return False
341 |
342 | def _ListAllDisksLinux(self):
343 | """Lists all disks connected to the machine.
344 |
345 | Returns:
346 | list(LinuxDiskArtifact): a list of disks.
347 | """
348 | lsblk_dict = self._GetLsblkDict()
349 | disk_list = []
350 | for blockdevice in lsblk_dict.get('blockdevices', None):
351 | if blockdevice.get('type') == 'disk':
352 | disk_name = blockdevice.get('name')
353 | disk_size_str = blockdevice.get('size')
354 | disk_size = int(disk_size_str)
355 | disk = LinuxDiskArtifact(
356 | os.path.join('/dev', disk_name), disk_size,
357 | mounted=self._IsDiskMounted(blockdevice),
358 | use_dcfldd=self.use_dcfldd)
359 | disk_list.append(disk)
360 | return disk_list
361 |
362 | def _ListDisks(self, all_devices=False, names=None):
363 | """Builds a list of DiskArtifact object to acquire.
364 |
365 | Args:
366 | all_devices(bool): whether to also list devices that aren't internal to
367 | the system's (ie: removable media).
368 | names(list(str)): list of disk names (eg: ['sda', 'sdc']) to acquire.
369 | Returns:
370 | list(DiskArtifact): a sorted (and curated) list of disks to acquire.
371 | """
372 | disk_list = []
373 | if self._platform == 'darwin':
374 | disk_list = self._ListAllDisksMac()
375 | else:
376 | disk_list = self._ListAllDisksLinux()
377 |
378 | # We order the list by size, descending.
379 | disk_list = sorted(disk_list, reverse=True, key=lambda disk: disk.size)
380 | if names:
381 | return [disk for disk in disk_list if disk.name in names]
382 | if not all_devices:
383 | # We resort to guessing
384 | return [disk for disk in disk_list if disk.ProbablyADisk()]
385 | return disk_list
386 |
387 | def _GetListDisksArtifact(self):
388 | """Generates a StringArtifact containing information about all disks.
389 |
390 | Returns:
391 | StringArtifact: the artifact.
392 | """
393 | if self._platform == 'darwin':
394 | #pylint: disable=protected-access
395 | diskutil_artifact = base.StringArtifact(
396 | 'Disks/diskutil.txt', json.dumps(
397 | [md._attributes for md in macdisk.WholeDisks()]))
398 | return diskutil_artifact
399 |
400 | lsblk_artifact = base.StringArtifact(
401 | 'Disks/lsblk.txt', json.dumps(self._GetLsblkDict()))
402 | return lsblk_artifact
403 |
404 | def _GetDiskInfoArtifact(self, disk):
405 | """Returns an StringArtifact containing info about a disk being copied.
406 |
407 | Args:
408 | disk(DiskArtifact): the disk object to get info from.
409 |
410 | Returns:
411 | StringArtifact: the disk info artifact.
412 | """
413 | if self._platform == 'darwin':
414 | # TODO
415 | return None
416 |
417 | #pylint: disable=protected-access
418 | udevadm_artifact = base.StringArtifact(
419 | 'Disks/{0:s}.udevadm.txt'.format(disk.name),
420 | disk._GetUdevadmProperty('udevadm_text_output'))
421 | return udevadm_artifact
422 |
423 | def GetArtifacts(self):
424 | """Selects the Artifacts to acquire.
425 |
426 | This tries to return as many Artifacts as possible even if some collection
427 | raised an exception.
428 |
429 | Returns:
430 | list(DiskArtifact): the artifacts corresponding to copy.
431 |
432 | Raises:
433 | errors.RecipeException: when no disk is to be collected.
434 | """
435 | artifacts = []
436 | disks_to_collect = []
437 | if getattr(self._options, 'disable_dcfldd', None):
438 | self._logger.info('Disabling dcfldd')
439 | self.use_dcfldd = False
440 |
441 | if getattr(self._options, 'select_disks', None):
442 | all_disks = self._ListDisks(all_devices=True)
443 | if getattr(self._options, 'no_zenity', False):
444 | disks_to_collect = cli.AskDiskList(all_disks)
445 | else:
446 | disks_to_collect = gui.AskDiskList(all_disks)
447 | elif getattr(self._options, 'disk', None):
448 | disks_to_collect = self._ListDisks(names=self._options.disk)
449 | else:
450 | disks_to_collect = self._ListDisks()
451 |
452 | disk_list_artifact = self._GetListDisksArtifact()
453 | artifacts.append(disk_list_artifact)
454 |
455 | if not disks_to_collect:
456 | self._logger.warn('No disk to collect')
457 |
458 | for disk in disks_to_collect:
459 |
460 | disk_info_artifact = self._GetDiskInfoArtifact(disk)
461 | if disk_info_artifact:
462 | artifacts.append(disk_info_artifact)
463 |
464 | artifacts.append(disk)
465 |
466 | if self.use_dcfldd:
467 | # It is necessary for the DiskArtifact to be appended before the
468 | # hashlog, as the hashlog is generated when dcfldd completes.
469 | hashlog_artifact = base.FileArtifact(disk.hashlog_filename)
470 | hashlog_artifact.remote_path = 'Disks/{0:s}'.format(
471 | hashlog_artifact.name)
472 | artifacts.append(hashlog_artifact)
473 |
474 | return artifacts
475 |
--------------------------------------------------------------------------------
/auto_forensicate/recipes/firmware.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2018 Google LLC
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # https://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Handles the acquisition of the system's firmware."""
16 |
17 | from __future__ import unicode_literals
18 |
19 | from auto_forensicate.recipes import base
20 |
21 |
22 | class ChipsecRecipe(base.BaseRecipe):
23 | """The ChipsecRecipe class, which acquires the system's Firmware."""
24 |
25 | _CHIPSEC_CMD = [
26 | 'chipsec_util', '-l', '/dev/stderr', 'spi', 'dump', '/dev/stdout']
27 |
28 | def GetArtifacts(self):
29 | """Provides a list of Artifacts to upload.
30 |
31 | Returns:
32 | list(BaseArtifact): the artifacts for the system's firmware.
33 | """
34 | if self._platform == 'darwin':
35 | self._logger.info('Firmware acquisition only works on Linux, skipping.')
36 | return []
37 |
38 | # Firmware acquisition will fail on various platforms (ie: QEMU during e2e
39 | # tests, and shouldn't be a reason to mark the full upload as failed.
40 | # So we're setting ignore_failure to True.
41 | firmware_artifact = base.ProcessOutputArtifact(
42 | self._CHIPSEC_CMD, 'Firmware/rom.bin', ignore_failure=True)
43 | return [firmware_artifact]
44 |
--------------------------------------------------------------------------------
/auto_forensicate/recipes/sysinfo.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2018 Google LLC
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # https://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Handles the acquisition of the system's information."""
16 |
17 | from __future__ import unicode_literals
18 |
19 | from auto_forensicate import hostinfo
20 | from auto_forensicate.recipes import base
21 |
22 |
23 | class SysinfoRecipe(base.BaseRecipe):
24 | """The SysinfoRecipe class."""
25 |
26 | _SYSTEM_PROFILER_CMD = [
27 | '/usr/sbin/system_profiler', 'SPHardwareDataType', 'SPSoftwareDataType']
28 | _NETWORKSETUP_CMD = [
29 | '/usr/sbin/networksetup', '-listallhardwareports']
30 |
31 | def GetArtifacts(self):
32 | """Provides a list of Artifacts to upload.
33 |
34 | Returns:
35 | list(BaseArtifact): the artifacts to copy.
36 | """
37 | artifacts_list = []
38 | if self._platform == 'darwin':
39 | # TODO: have hostinfo.Which work on darwin
40 | artifacts_list.append(
41 | base.ProcessOutputArtifact(
42 | self._SYSTEM_PROFILER_CMD, 'system_info.txt'))
43 | artifacts_list.append(
44 | base.ProcessOutputArtifact(
45 | self._NETWORKSETUP_CMD, 'interfaces.txt'))
46 | else:
47 | dmidecode_path = hostinfo.Which('dmidecode')
48 | dmidecode_cmd = [dmidecode_path, '--type=bios', '--type=system']
49 | artifacts_list.append(
50 | base.ProcessOutputArtifact(dmidecode_cmd, 'system_info.txt'))
51 | ip_path = hostinfo.Which('ip')
52 | ip_cmd = [ip_path, 'link', 'show']
53 | artifacts_list.append(
54 | base.ProcessOutputArtifact(ip_cmd, 'interfaces.txt'))
55 | return artifacts_list
56 |
--------------------------------------------------------------------------------
/auto_forensicate/stamp/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2018 Google LLC
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # https://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 |
--------------------------------------------------------------------------------
/auto_forensicate/stamp/manager.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2018 Google LLC
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # https://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Base Stamp classes."""
16 |
17 | from collections import namedtuple
18 | from auto_forensicate import hostinfo
19 |
20 | BaseStamp = namedtuple('Stamp', ['identifier', 'start_time'])
21 |
22 |
23 | class BaseStampManager(object):
24 | """Base class to generate the stamp file."""
25 |
26 | def __init__(self, graphical=True):
27 | """Initializes a BaseStampManager object.
28 |
29 | Args:
30 | graphical (bool): whether we will request information from a graphical
31 | environment.
32 | """
33 | self._graphical = graphical
34 |
35 | def BasePathElements(self, stamp):
36 | """Generates upload paths based on information in stamp.
37 |
38 | Args:
39 | stamp (BaseStamp): device information
40 |
41 | Returns:
42 | list(str): list of elements from the stamp
43 | """
44 | remote_path_elems = [
45 | stamp.start_time,
46 | stamp.identifier
47 | ]
48 |
49 | return remote_path_elems
50 |
51 | def GetStamp(self, graphical=True):
52 | """Generates the "stamp" metadata to upload.
53 |
54 | This contains information such as when the script is run, and the host's ID.
55 |
56 | Args:
57 | graphical(bool): Set to False if requesting the Stamp in an non-graphical
58 | environment.
59 |
60 | Returns:
61 | BaseStamp: the content of the stamp.
62 | """
63 |
64 | stamp = BaseStamp(
65 | identifier=hostinfo.GetIdentifier(),
66 | start_time=hostinfo.GetTime(),
67 | )
68 |
69 | return stamp
70 |
--------------------------------------------------------------------------------
/auto_forensicate/uploader.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2018 Google LLC
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # https://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Implements various cloud upload helpers."""
16 |
17 | from __future__ import unicode_literals
18 |
19 | import argparse
20 | import itertools
21 | import json
22 | import logging
23 | import mmap
24 | import os
25 | try:
26 | from BytesIO import BytesIO
27 | except ImportError:
28 | from io import BytesIO
29 | from six.moves.urllib.parse import urlparse
30 | import boto
31 | from auto_forensicate import errors
32 | from auto_forensicate.recipes import disk
33 |
34 | class BaseUploader(object):
35 | """Base class for an Uploader object."""
36 |
37 | def __init__(self, stamp_manager, stamp=None):
38 | """Initializes the BaseUploader class.
39 |
40 | Args:
41 | stamp_manager (StampManager): the StampManager object for this
42 | context.
43 | stamp (namedtuple): an optional ForensicsStamp containing
44 | the upload metadata.
45 | """
46 | self._stamp_manager = stamp_manager
47 | self._logger = logging.getLogger(self.__class__.__name__)
48 |
49 | self._stamp = stamp or self._stamp_manager.GetStamp()
50 | self._stamp_uploaded = False
51 |
52 | def _UploadStamp(self):
53 | """Upload the 'stamp' (a json file containing metadata)."""
54 |
55 | # TODO: if this fails, raise an Exception that will stop execution
56 | stream = BytesIO(json.dumps(self._stamp._asdict()).encode('utf-8'))
57 | remote_path = self._MakeRemotePath('stamp.json')
58 | self._UploadStream(stream, remote_path)
59 | self._stamp_uploaded = True
60 | self._logger.info('Uploaded %s', remote_path)
61 |
62 | def _MakeRemotePath(self, destination):
63 | """Builds the remote path for an object.
64 |
65 | Args:
66 | destination (str): the destination path for the artifact.
67 | Returns:
68 | str: the sanitized remote path.
69 | """
70 |
71 | remote_path_elems = self._stamp_manager.BasePathElements(self._stamp)
72 | remote_path = '/'.join(remote_path_elems + [destination])
73 |
74 | return remote_path
75 |
76 | def _UploadStream(self, stream, remote_path, update_callback=None):
77 | """Uploads a file object to Google Cloud Storage.
78 |
79 | Args:
80 | stream (file): the file-like object pointing to data to upload.
81 | remote_path (str): the remote path to store the data to.
82 | update_callback (func): an optional function called as upload progresses.
83 |
84 | Raises:
85 | NotImplementedError: if the method is not implemented.
86 | """
87 | raise NotImplementedError('Please implement _UploadStream')
88 |
89 | def UploadArtifact(self, artifact, update_callback=None):
90 | """Uploads a file object to Google Cloud Storage.
91 |
92 | Args:
93 | artifact (BaseArtifact): the Artifact object pointing to data to upload.
94 | update_callback (func): an optional function called as upload progresses.
95 |
96 | Returns:
97 | str: the remote destination where the file was uploaded.
98 | """
99 |
100 | # Upload the 'stamp' file. This allows us to make sure we have write
101 | # permission on the bucket, and fail early if we don't.
102 | if not self._stamp_uploaded:
103 | self._UploadStamp()
104 |
105 | remote_path = self._MakeRemotePath(artifact.remote_path)
106 | self._UploadStream(
107 | artifact.OpenStream(), remote_path, update_callback=update_callback)
108 |
109 | artifact.CloseStream()
110 | return remote_path
111 |
112 |
113 | class LocalCopier(BaseUploader):
114 | """Handles uploads of data to a local directory."""
115 |
116 | def __init__(self, destination_dir, stamp_manager, stamp=None):
117 | """Initializes the LocalCopier class.
118 |
119 | Args:
120 | destination_dir (str): the path to the destination directory.
121 | stamp_manager (StampManager): the StampManager object for this
122 | context.
123 | stamp (namedtuple): an optional ForensicsStamp containing
124 | the upload metadata.
125 | """
126 | super(LocalCopier, self).__init__(stamp_manager=stamp_manager, stamp=stamp)
127 | self.destination_dir = destination_dir
128 |
129 | def _UploadStream(self, stream, remote_path, update_callback=None):
130 | """Copies a file object to a remote directory.
131 |
132 | Args:
133 | stream (file): the file-like object pointing to data to upload.
134 | remote_path (str): the remote path to store the data to.
135 | update_callback (func): an optional function called as upload progresses.
136 | """
137 | destination_file = open(remote_path, 'wb')
138 | copied = 0
139 | buffer_length = 16*1024 # This is the defaults for shutil.copyfileobj()
140 | while True:
141 | buf = stream.read(buffer_length)
142 | if not buf:
143 | break
144 | destination_file.write(buf)
145 | copied += len(buf)
146 | if update_callback:
147 | update_callback(len(buf), copied)
148 |
149 | def _MakeRemotePath(self, destination):
150 | """Builds the remote path for an object.
151 |
152 | Args:
153 | destination (str): the destination path for the artifact.
154 | Returns:
155 | str: the sanitized remote path.
156 | """
157 |
158 | remote_path_elems = (
159 | [self.destination_dir] +
160 | self._stamp_manager.BasePathElements(self._stamp) + [destination])
161 | remote_path = '/'.join(remote_path_elems)
162 |
163 | base_dir = os.path.dirname(remote_path)
164 | if not os.path.exists(base_dir):
165 | os.makedirs(base_dir)
166 |
167 | return remote_path
168 |
169 |
170 | class LocalSplitterCopier(LocalCopier):
171 | """Class for a LocalSplitterCopier.
172 |
173 | This class is a specific implementation of LocalCopier, that will split
174 | DiskArtifacts (and only this class of Artifacts) into a specified number of
175 | slices (10 by default).
176 | """
177 |
178 | def __init__(self, destination_dir, stamp_manager, stamp=None, slices=10):
179 | """Initializes the LocalSplitterCopier class.
180 |
181 | Args:
182 | destination_dir (str): the path to the destination directory.
183 | stamp_manager (StampManager): the StampManager object for this
184 | context.
185 | stamp (namedtuple): an optional ForensicsStamp containing
186 | the upload metadata.
187 | slices (int): the number of slices to split DiskArtifacts into
188 | """
189 | super().__init__(destination_dir, stamp_manager=stamp_manager, stamp=stamp)
190 | self._slices = int(slices)
191 |
192 | def UploadArtifact(self, artifact, update_callback=None):
193 | """Uploads a file object to a local directory.
194 |
195 | Args:
196 | artifact (BaseArtifact): the Artifact object pointing to data to upload.
197 | update_callback (func): an optional function called as upload progresses.
198 |
199 | Returns:
200 | str: the remote destination where the file was uploaded.
201 | """
202 |
203 | # Upload the 'stamp' file. This allows us to make sure we have write
204 | # permission on the bucket, and fail early if we don't.
205 | if not self._stamp_uploaded:
206 | self._UploadStamp()
207 |
208 | if not isinstance(artifact, disk.DiskArtifact):
209 | remote_path = self._MakeRemotePath(artifact.remote_path)
210 | self._UploadStream(
211 | artifact.OpenStream(), remote_path, update_callback=update_callback)
212 | else:
213 | total_uploaded = 0
214 | if self._slices < 1:
215 | raise errors.BadConfigOption(
216 | 'The number of slices needs to be greater than 1')
217 |
218 | # mmap requires that the an offset is a multiple of mmap.PAGESIZE
219 | # so we can't just equally divide the total size in the specified number
220 | # of slices.
221 | number_of_pages = int(artifact.size / self._slices / mmap.PAGESIZE)
222 |
223 | slice_size = number_of_pages * mmap.PAGESIZE
224 | # slice_size might not be equal to (total_size / slices)
225 |
226 | base_remote_path = self._MakeRemotePath(artifact.remote_path)
227 |
228 | stream = artifact.OpenStream()
229 |
230 | for slice_num, seek_position in enumerate(
231 | range(0, artifact.size, slice_size)):
232 | remote_path = f'{base_remote_path}_{slice_num}'
233 |
234 | current_slice_size = slice_size
235 | if seek_position+slice_size > artifact.size:
236 | current_slice_size = artifact.size - seek_position
237 |
238 | mmap_slice = mmap.mmap(
239 | stream.fileno(), length=current_slice_size, offset=seek_position,
240 | access=mmap.ACCESS_READ)
241 |
242 | self._UploadStream(
243 | mmap_slice, remote_path, update_callback=update_callback)
244 |
245 | total_uploaded += current_slice_size
246 | update_callback(total_uploaded, artifact.size)
247 |
248 | artifact.CloseStream()
249 | return remote_path
250 |
251 |
252 | class GCSUploader(BaseUploader):
253 | """Handles resumable uploads of data to Google Cloud Storage."""
254 |
255 | def __init__(self, gs_url, gs_keyfile, client_id, stamp_manager, stamp=None):
256 | """Initializes the GCSUploader class.
257 |
258 | Args:
259 | gs_url (str): the GCS url to the bucket and remote path.
260 | gs_keyfile (str): path of the private key for the Service Account.
261 | client_id (str): the client ID set in the credentials file.
262 | stamp_manager (StampManager): the StampManager object for this
263 | context.
264 | stamp (namedtuple): an optional ForensicsStamp containing
265 | the upload metadata.
266 | """
267 | super(GCSUploader, self).__init__(stamp_manager=stamp_manager, stamp=stamp)
268 | self._boto_configured = False
269 | self._bucket_name = None
270 | self._client_id = client_id
271 | self._gs_keyfile = os.path.abspath(gs_keyfile)
272 | self._gs_url = gs_url
273 |
274 | def _InitBoto(self):
275 | """Initializes the boto library with credentials from self._gs_keyfile."""
276 |
277 | if not boto.config.has_section('Credentials'):
278 | boto.config.add_section('Credentials')
279 |
280 | boto.config.set(
281 | 'Credentials', 'gs_service_key_file', self._gs_keyfile)
282 | boto.config.set(
283 | 'Credentials', 'gs_service_client_id', self._client_id)
284 |
285 | self._boto_configured = True
286 |
287 | def _SplitGCSUrl(self):
288 | """Extracts the bucket name and remote base path from the gs_url argument.
289 |
290 | Returns:
291 | (str, str): a tuple containing GCS bucket name, and the remote base path.
292 |
293 | Raises:
294 | argparse.ArgumentError: if gs_url is invalid
295 | """
296 | parsed_url = urlparse(self._gs_url)
297 | if parsed_url.scheme != 'gs':
298 | raise argparse.ArgumentError(
299 | None, 'Invalid GCS URL \'{0:s}\''.format(self._gs_url))
300 |
301 | bucket_name = parsed_url.netloc
302 | gs_base_path = parsed_url.path
303 |
304 | # This takes care of "//" in a url
305 | remote_base_path = '/'.join(filter(None, gs_base_path.split('/')))
306 |
307 | return (bucket_name, remote_base_path)
308 |
309 | def _MakeRemotePath(self, destination):
310 | """Builds the remote path for an object.
311 |
312 | Args:
313 | destination (str): the destination path for the artifact.
314 | Returns:
315 | str: the sanitized remote path.
316 | """
317 |
318 | remote_path_elems = self._stamp_manager.BasePathElements(self._stamp)
319 | remote_path = '/'.join(remote_path_elems)
320 | base_path = None
321 |
322 | self._bucket_name, base_path = self._SplitGCSUrl()
323 |
324 | if base_path:
325 | remote_path = '/'.join([base_path, remote_path])
326 |
327 | if destination:
328 | remote_path = '/'.join([remote_path, destination])
329 |
330 | remote_path = '/'.join([self._bucket_name, remote_path])
331 |
332 | return remote_path
333 |
334 | def _UploadStream(self, stream, remote_path, update_callback=None):
335 | """Uploads a file object to Google Cloud Storage.
336 |
337 | Args:
338 | stream (file): the file-like object pointing to data to upload.
339 | remote_path (str): the remote path to store the data to.
340 | update_callback (func): an optional function called as upload progresses.
341 | Raises:
342 | errors.RetryableError: when the upload encounters an error that's worth
343 | retrying.
344 | """
345 | if not self._boto_configured:
346 | self._InitBoto()
347 |
348 | try:
349 | dst_uri = boto.storage_uri(remote_path, u'gs')
350 | dst_uri.new_key().set_contents_from_stream(stream, cb=update_callback)
351 | except boto.exception.GSDataError as e:
352 | # This is usually raised when the connection is broken, and deserves to
353 | # be retried.
354 | raise errors.RetryableError(str(e))
355 |
356 |
357 | class GCSSplitterUploader(GCSUploader):
358 | """Handles resumable uploads of data to Google Cloud Storage.
359 |
360 | This class is a specific implementation of GCSUploader, that will split
361 | DiskArtifacts (and only this class of Artifacts) into a specified number of
362 | slices (10 by default).
363 | """
364 |
365 | def __init__(
366 | self, gs_url, gs_keyfile, client_id, stamp_manager, stamp=None,
367 | slices=10):
368 | """Initializes a GCSSplitterUploader object.
369 |
370 | Args:
371 | gs_url (str): the GCS url to the bucket and remote path.
372 | gs_keyfile (str): path of the private key for the Service Account.
373 | client_id (str): the client ID set in the credentials file.
374 | stamp_manager (StampManager): the StampManager object for this
375 | context.
376 | stamp (namedtuple): an optional ForensicsStamp containing
377 | the upload metadata.
378 | slices (int): the number of slices to split DiskArtifacts into.
379 | """
380 | super().__init__(gs_url, gs_keyfile, client_id, stamp_manager, stamp=stamp)
381 | self._slices = int(slices)
382 |
383 | def UploadArtifact(self, artifact, update_callback=None):
384 | """Uploads a file object to Google Cloud Storage.
385 |
386 | Args:
387 | artifact (BaseArtifact): the Artifact object pointing to data to upload.
388 | update_callback (func): an optional function called as upload progresses.
389 |
390 | Returns:
391 | str: the remote destination where the file was uploaded.
392 | """
393 | if not self._boto_configured:
394 | self._InitBoto()
395 |
396 | # Upload the 'stamp' file. This allows us to make sure we have write
397 | # permission on the bucket, and fail early if we don't.
398 | if not self._stamp_uploaded:
399 | self._UploadStamp()
400 |
401 | if not isinstance(artifact, disk.DiskArtifact):
402 | # We do not try to split artifacts that don't represent a disk
403 | remote_path = self._MakeRemotePath(artifact.remote_path)
404 | self._UploadStream(
405 | artifact.OpenStream(), remote_path, update_callback=update_callback)
406 |
407 | else:
408 | total_uploaded = 0
409 | if self._slices < 1:
410 | raise errors.BadConfigOption(
411 | 'The number of slices needs to be greater than 1')
412 |
413 | # mmap requires that the an offset is a multiple of mmap.PAGESIZE
414 | # so we can't just equally divide the total size in the specified number
415 | # of slices.
416 | number_of_pages = int(artifact.size / self._slices / mmap.PAGESIZE)
417 |
418 | slice_size = number_of_pages * mmap.PAGESIZE
419 | # slice_size might not be equal to (total_size / slices)
420 |
421 | base_remote_path = self._MakeRemotePath(artifact.remote_path)
422 |
423 | stream = artifact.OpenStream()
424 |
425 | for slice_num, seek_position in enumerate(
426 | range(0, artifact.size, slice_size)):
427 | remote_path = f'{base_remote_path}_{slice_num}'
428 |
429 | current_slice_size = slice_size
430 | if seek_position+slice_size > artifact.size:
431 | current_slice_size = artifact.size - seek_position
432 |
433 | mmap_slice = mmap.mmap(
434 | stream.fileno(), length=current_slice_size, offset=seek_position,
435 | access=mmap.ACCESS_READ)
436 |
437 | self._UploadStream(
438 | mmap_slice, remote_path, update_callback=update_callback)
439 |
440 | total_uploaded += current_slice_size
441 | update_callback(total_uploaded, artifact.size)
442 |
443 | artifact.CloseStream()
444 | return remote_path
445 |
--------------------------------------------------------------------------------
/auto_forensicate/ux/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2018 Google LLC
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # https://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 |
--------------------------------------------------------------------------------
/auto_forensicate/ux/cli.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2018 Google LLC
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # https://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Function to interact with the user in a console environment."""
16 |
17 |
18 | def AskText(message, mandatory=False):
19 | """Asks a question.
20 |
21 | Args:
22 | message(str): the question.
23 | mandatory(bool): whether the answer can be left empty.
24 |
25 | Returns:
26 | str: the user's answer to the question.
27 | """
28 | print(message)
29 | text = input()
30 | if mandatory and not text:
31 | while not text:
32 | text = input()
33 | # TODO: Sanitize input here, as this will be used to construct GCS paths.
34 | return text
35 |
36 |
37 | def AskDiskList(disk_list):
38 | """Asks the user to select which disks to copy.
39 |
40 | Args:
41 | disk_list(list(DiskArtifact)): list of disks.
42 |
43 | Returns:
44 | list(DiskArtifact): a list of devices.
45 | """
46 | valid_choice = False
47 | disk_indices_to_copy = [
48 | i for i, disk in enumerate(disk_list) if disk.ProbablyADisk()]
49 | while not valid_choice:
50 | print('\nPlease select which disks to copy:')
51 | for num, disk in enumerate(disk_list, start=0):
52 | print('{0:d}\t{1:s}'.format(num, disk.GetDescription()))
53 | user_choices = input(
54 | 'Disk numbers (Default is [{0:s}], comma separated): '.format(
55 | ','.join([str(i) for i in disk_indices_to_copy])))
56 | if user_choices == '':
57 | valid_choice = True
58 | else:
59 | choices = user_choices.replace(' ', ',').split(',')
60 | try:
61 | # Check that all provided indices are valid integers.
62 | choices = list(map(int, choices))
63 | except ValueError:
64 | continue
65 | # Check that all provided indices are between expected values
66 | if all([0 <= int(i) < len(disk_list) for i in choices]):
67 | valid_choice = True
68 | # Removing double values
69 | disk_indices_to_copy = list(set(choices))
70 |
71 | return [
72 | disk for index, disk in enumerate(disk_list, start=0)
73 | if index in disk_indices_to_copy
74 | ]
75 |
76 |
77 | def Confirm(text, default='N'):
78 | """Asks the user to confirm something.
79 |
80 | Args:
81 | text(str): the text of the question.
82 | default(str): set the accepted value if user just hits Enter.
83 | Possible values: 'Y' or 'N' (the default).
84 | Returns:
85 | bool: True if the user confirms, False otherwise.
86 | """
87 | print(text)
88 | user_choice = ''
89 | if default == 'Y':
90 | while user_choice not in ['y', 'n', '']:
91 | user_choice = input('[Y/n]? ').lower()
92 | # An empty user answer means 'y'
93 | return user_choice in ['y', '']
94 | elif default == 'N':
95 | while user_choice not in ['y', 'n', '']:
96 | user_choice = input('[y/N]? ').lower()
97 | return user_choice == 'y'
98 | else:
99 | # Don't allow an empty answer
100 | while user_choice not in ['y', 'n']:
101 | user_choice = input('[y/N]? ').lower()
102 | return user_choice == 'y'
103 |
--------------------------------------------------------------------------------
/auto_forensicate/ux/gui.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2018 Google LLC
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # https://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Function to interact with the user in a graphic environment."""
16 |
17 | from auto_forensicate.ux import zenity
18 |
19 |
20 | def AskText(message, mandatory=False):
21 | """Pops up a UI window asking a question.
22 |
23 | Args:
24 | message(str): the question.
25 | mandatory(bool): whether the answer can be left empty.
26 |
27 | Returns:
28 | str: the user's answer to the question.
29 | """
30 | text = zenity.GetText(message)
31 | if mandatory and not text:
32 | while not text:
33 | text = zenity.GetText(message)
34 | # TODO: Sanitize input here, as this will be used to construct GCS paths.
35 | return text.decode()
36 |
37 |
38 | def AskDiskList(disk_list):
39 | """Asks the user to select which disks to copy.
40 |
41 | Args:
42 | disk_list(list(DiskArtifact)): list of disks.
43 |
44 | Returns:
45 | list(DiskArtifact): a list of devices.
46 | """
47 | disk_description_map = dict(
48 | zip(
49 | [disk.GetDescription() for disk in disk_list],
50 | [disk for disk in disk_list],
51 | )
52 | )
53 |
54 | data = []
55 | for disk in disk_list:
56 | # Default is to un-check block devices that are not internal disks.
57 | data.append(str(disk.ProbablyADisk()))
58 | data.append(disk.GetDescription())
59 |
60 | choices = []
61 | while not choices:
62 | choices = zenity.CheckList(
63 | ['', 'Disks'],
64 | title='Please select which disks to copy.',
65 | data=data
66 | )
67 |
68 | if choices == ['']:
69 | return []
70 | return [disk_description_map[choice] for choice in choices]
71 |
72 |
73 | def Confirm(text):
74 | """Asks the user to confirm something.
75 |
76 | Args:
77 | text(str): the text of the question.
78 | Returns:
79 | bool: True if the user confirms, False otherwise.
80 | """
81 | return zenity.GetYesNo(text)
82 |
--------------------------------------------------------------------------------
/auto_forensicate/ux/zenity.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2018 Google LLC
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # https://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Interface to Zenity."""
16 |
17 | import subprocess
18 |
19 | from auto_forensicate.hostinfo import Which
20 |
21 | def GetYesNo(text):
22 | """Ask user a Yes/No question.
23 |
24 | Args:
25 | text(str): The message to display.
26 | Returns:
27 | bool: the user's answer.
28 | """
29 |
30 | zenity_binary = Which('zenity')
31 |
32 | process = subprocess.Popen(
33 | [zenity_binary, '--question', '--text="{0:s}"'.format(text)],
34 | stdin=subprocess.PIPE, stdout=subprocess.PIPE)
35 |
36 | return process.wait() == 0
37 |
38 |
39 | def GetText(text):
40 | """Ask user for a string.
41 |
42 | Args:
43 | text(str): The message to display.
44 | Returns:
45 | str: the user input.
46 | """
47 |
48 | zenity_binary = Which('zenity')
49 |
50 | process = subprocess.Popen(
51 | [zenity_binary, '--entry', '--text="{0:s}"'.format(text)],
52 | stdin=subprocess.PIPE, stdout=subprocess.PIPE)
53 |
54 | if process.wait() == 0:
55 | return process.stdout.read()[:-1]
56 |
57 | return ''
58 |
59 | def CheckList(column_names, data, title=None):
60 | """Present a list of items to select.
61 |
62 | Args:
63 | column_names(list[str]): A list containing the names of the columns.
64 | data(list[str]]): A list that contains, for each cell in the row,
65 | its selected status, and the value.
66 | For example: ['True', 'field1', 'False', 'Field2']
67 | title(str): The title of the dialog box.
68 | Returns:
69 | list[str]: the selected fields.
70 | """
71 |
72 | zenity_binary = Which('zenity')
73 | command = [zenity_binary, '--list', '--checklist', '--editable=False']
74 | for column in column_names:
75 | command.append('--column={0:s}'.format(column))
76 |
77 | if title:
78 | command.append('--title={0:s}'.format(title))
79 |
80 | command = command + data
81 |
82 | process = subprocess.Popen(
83 | command, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
84 |
85 | if process.wait() == 0:
86 | process_out = process.stdout.read().decode()
87 | return process_out.strip().split('|')
88 |
89 | return []
90 |
--------------------------------------------------------------------------------
/config/jenkins/e2e.sh:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2018 Google LLC
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # https://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 |
16 | # This script builds a GiftStick image, runs it in qemu, starts the aquisition
17 | # script from there, and checks the output image in GCS.
18 |
19 | CLOUD_PROJECT=""
20 | GCS_BUCKET=""
21 | SA_CREDENTIALS_FILE=""
22 | ISO_TO_REMASTER_URL=""
23 | ISO_FILENAME=""
24 |
25 | readonly GIFT_USER="xubuntu"
26 | readonly IMAGE_NAME="giftstick.img"
27 |
28 | readonly DEFAULT_ISO_URL="http://mirror.us.leaseweb.net/ubuntu-cdimage/xubuntu/releases/20.04/release/xubuntu-20.04.1-desktop-amd64.iso"
29 |
30 | readonly REMASTER_SCRIPT="tools/remaster.sh"
31 | readonly EXTRA_GCS_PATH="jenkins-build-${BUILD_TAG}"
32 | readonly SSH_KEY_PATH="test_key"
33 | readonly QEMU_SSH_PORT=5555
34 |
35 | readonly EVIDENCE_DISK="disk_42.img"
36 | readonly EVIDENCE_DISK_MD5_HEX="1e639d0a0b2c718eae71a058582a555e"
37 |
38 |
39 | set -e
40 |
41 | # Adds a timestamp to a message to display
42 | # Args:
43 | # the message as a string
44 | function msg {
45 | local message=$1
46 | echo "[$(date +%Y%m%d-%H%M%S)] ${message}"
47 | }
48 |
49 | # Adds a timestamp to a message to display, and quit with returncode = 1
50 | # Args:
51 | # the message as a string
52 | function die {
53 | local message=$1
54 | echo "[$(date +%Y%m%d-%H%M%S)] ${message}; exiting"
55 | exit 1
56 | }
57 |
58 | # Installs packages required to run the E2E tests
59 | function setup {
60 | local evidence_disk_url
61 | export DEBIAN_FRONTEND=noninteractive
62 | sudo apt update -y
63 | sudo apt install -y \
64 | dosfstools \
65 | gdisk \
66 | genisoimage \
67 | initramfs-tools-core \
68 | kpartx \
69 | jq \
70 | ovmf \
71 | qemu-system-x86 \
72 | squashfs-tools \
73 | syslinux \
74 | syslinux-utils \
75 | wget
76 |
77 | if [ ! -f "${ISO_FILENAME}" ]; then
78 | wget -q -nc -O "${ISO_FILENAME}" "${ISO_TO_REMASTER_URL}"
79 | fi
80 |
81 | if [ ! -f "${EVIDENCE_DISK}" ]; then
82 | evidence_disk_url=$(normalize_gcs_url "${EVIDENCE_DISK_GSURL}")
83 | msg "Downloading evidence disk from ${evidence_disk_url}"
84 | gsutil -q cp "${evidence_disk_url}" "${EVIDENCE_DISK}"
85 | fi
86 |
87 | }
88 |
89 | # Builds a GiftStick image, using the remaster script
90 | function build_image {
91 | sudo bash "${REMASTER_SCRIPT}" \
92 | --project "${CLOUD_PROJECT}" \
93 | --bucket "${GCS_BUCKET}" \
94 | --skip_gcs \
95 | --source_iso "${ISO_FILENAME}" \
96 | --image "${IMAGE_NAME}" \
97 | --e2e_test \
98 | --sa_json_file "${SA_CREDENTIALS_FILE}" \
99 | --extra_gcs_path "${EXTRA_GCS_PATH}"
100 | }
101 |
102 | # Tries to run a command in the Qemu VM.
103 | #
104 | # Args:
105 | # The command to run in the Qemu VM, as a string.
106 | function ssh_and_run {
107 | local ssh_command=$1
108 | if [ ! -f "${SSH_KEY_PATH}" ]; then
109 | # The corresponding public key is pushed in the giftstick "e2etest" image.
110 | # The image is running in Qemu, in the VM that is running the Jenkins Job.
111 | cat >"${SSH_KEY_PATH}" < 0) and data_len > 0:
26 | cb(data_len, cb_size)
27 |
--------------------------------------------------------------------------------
/doc/FAQ.md:
--------------------------------------------------------------------------------
1 | # Frequently Asked Questions
2 |
3 | ## Is this an official Google product?
4 |
5 | No.
6 |
7 | ## Does the code support other Cloud hosting services?
8 |
9 | Not out of the box.
10 |
11 | The uploading code is based on [boto](https://github.com/boto/boto),
12 | and you could create a new class in
13 | [uploader.py](https://github.com/google/GiftStick/blob/master/auto_forensicate/uploader.py)
14 | and implement the private methods to the needs of other Cloud storage services.
15 | Some flags should probably be added to the
16 | [auto_acquire](https://github.com/google/GiftStick/blob/master/auto_forensicate/auto_acquire.py)
17 | main script, and (if you use the image building scripts from the `tools`
18 | directory) add those to `make_bootable_usb_image` function of
19 | [remaster.sh](https://github.com/google/GiftStick/blob/master/tools/remaster.sh)
20 | when the 'double-clickable' helper script is created (search for `EOFORENSICSH`)
21 |
22 | ## Does this support `newStorage method`? (e.g.: MacOS Fusion Drives)
23 |
24 | Probably not.
25 |
26 | The script lists block devices that have been detected by the kernel running on
27 | the system (ie: a vanilla Ubuntu).
28 |
29 | If the block device doesn't show up when running `lsblk` in the booted OS, it's
30 | not going to be detected.
31 |
32 | ## What target system does the script support?
33 |
34 | If your target system can boot over USB (with EFI) and its devices are
35 | recognized as block devices in a vanilla Xubuntu, then those will be acquired.
36 |
37 | Some hardware is still not recognized in the Linux kernel and makes acquisition a
38 | bit more complicated:
39 |
40 | * Wifi module used in MacBook post 2016 is still unsupported [see
41 | bug](https://bugzilla.kernel.org/show_bug.cgi?id=193121). You will need to use
42 | a USB->RJ45 adapter.
43 |
44 | ## Does the code work on `$OS`?
45 |
46 | This has only been tested on Xubuntu Xenial Xerus (16.04) and Bionic Beaver
47 | (18.04).
48 |
49 | ## What if the internet connection is not stable?
50 |
51 | Then the script will most likely fail. Depending on the failure detected, a
52 | message will be displayed to the user saying they probably should retry by
53 | running the script again.
54 |
55 | It performs 'resumable' upload which does handle some errors and will
56 | try to re-send chunks in the vent of some network errors. If internet
57 | connectivity is lost for a significant amount of time, the upload will stop
58 | and you won't be able to resume from the last known uploaded chunk.
59 |
60 | ## Is this project 'forensically sound'?
61 |
62 | Not really.
63 |
64 | None of the code used in this project has been certified, and does not follow
65 | and ISO standard.
66 |
67 | No write blocking mechanism is currently implemented.
68 |
69 | To try and keep some trust about the data being copied from disk, the code also
70 | uploads MD5 and SHA1 hashes for every 128MiB read from the device, as well as
71 | the whole content. This is uploaded alongside the `sdX.image` file, as
72 | `sdX.hash`.
73 |
74 | ## Why `dcfldd` and not `acquisition_method_with_compression`?
75 |
76 | `dd` clones generate raw images, which can be readily processed by most other
77 | forensics tools.
78 |
79 | `dcfldd` was chosen as it's readily available in Ubuntu archives and will
80 | calculate MD5/SHA hashes as it's reading from the block device, even though it
81 | may misbehave when reading faulty drives.
82 |
83 | Adding a new recipe, where one can use another tool to read blocks off the
84 | device is [explained here](doc/new_recipe.md)
85 |
86 | ## Can I also acquire removable disks connected to the target?
87 |
88 | Yes.
89 |
90 | Call the `auto_acquire.py` script with the `--select_disk` flag.
91 |
92 | ## Why are the ISO remastering script in `tools` so ugly?
93 |
94 | These scripts come as helpers to get you started quickly (by setting up GCS and
95 | remastering an vanilla Xubuntu ISO with the acquisition scripts).
96 |
97 | The acquisition scripts don't use them.
98 |
99 | ## Why should I send my data to a remote untrusted Cloud platform?
100 |
101 | If this is a risk you're not willing to take, make sure you acquire only
102 | encrypted devices, e.g.: laptops with Full Disk Encryption such as FileVault or
103 | BitLocker.
104 |
105 | Alternatively, you can also create your own uploader class to upload data to
106 | a destination of your chosing. (see **Does the code support other Cloud hosting services?**)
107 |
108 | You can disable acquiring the firmware of the target system by only enabling the
109 | Disk recipe (see the tool's help).
110 |
--------------------------------------------------------------------------------
/doc/gift_video.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/google/GiftStick/53630f6e14b603fe581670555f7a37abaac03c03/doc/gift_video.gif
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | cachetools==3.1.1
2 | boto==2.49.0
3 | gcs-oauth2-boto-plugin
4 | google-cloud-logging<=2.1.1
5 | google-cloud-storage
6 | mock
7 | progress
8 | six
9 |
--------------------------------------------------------------------------------
/run_tests.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2018 Google LLC
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # https://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Run all tests inside the tests folder."""
16 | import unittest
17 | import os
18 | import sys
19 |
20 |
21 | loader = unittest.TestLoader()
22 | start_dir = os.path.join(os.path.dirname(__file__), 'tests')
23 | suite = loader.discover(start_dir, pattern='*_tests.py')
24 |
25 | runner = unittest.TextTestRunner()
26 | result = runner.run(suite)
27 | sys.exit(not result.wasSuccessful())
28 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2018 Google LLC
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # https://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Installation and deployment script."""
16 |
17 | import pkg_resources
18 | from setuptools import find_packages
19 | from setuptools import setup
20 |
21 |
22 | def ParseRequirements(filename):
23 | """Parse python requirements.
24 |
25 | Args:
26 | filename (str): The requirement file to read.
27 | Returns:
28 | List[str]: a list of requirements.
29 | """
30 | install_requires = []
31 | with open(filename) as requirements:
32 | install_requires = [
33 | str(requirement) for requirement in
34 | pkg_resources.parse_requirements(requirements)]
35 |
36 | return install_requires
37 |
38 |
39 | description = 'Forensics acquisition tool'
40 |
41 | long_description = (
42 | 'auto_forensicate is a module to automate uploading forensics evidence to'
43 | 'Google Cloud Storage')
44 |
45 | setup(
46 | name='auto_forensicate',
47 | version='20210201',
48 | description=description,
49 | long_description=long_description,
50 | url='https://github.com/google/giftstick',
51 | author='giftstick development team',
52 | license='Apache License, Version 2.0',
53 | packages=find_packages(),
54 | install_requires=ParseRequirements('requirements.txt'),
55 | classifiers=[
56 | 'Development Status :: 4 - Beta',
57 | 'Operating System :: OS Independent',
58 | 'Programming Language :: Python',
59 | ],
60 | scripts=['auto_forensicate/auto_acquire.py']
61 | )
62 |
--------------------------------------------------------------------------------
/tests/auto_forensicate_tests.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2018 Google LLC
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # https://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Tests for the auto_forensicate script."""
16 |
17 | from __future__ import unicode_literals
18 |
19 | import argparse
20 | import logging
21 | import os
22 | import sys
23 | import tempfile
24 | import unittest
25 | from six import StringIO
26 | import mock
27 |
28 | from auto_forensicate import auto_acquire
29 | from auto_forensicate import errors
30 | from auto_forensicate import uploader
31 | from auto_forensicate.recipes import base
32 |
33 | DEFAULT_ARTIFACT_CONTENT = os.urandom(1000)
34 |
35 | # pylint: disable=missing-docstring
36 | # pylint: disable=protected-access
37 |
38 |
39 | class BytesIORecipe(base.BaseRecipe):
40 | """A Recipe returning 1 artifact with a BytesIO."""
41 |
42 | def __init__(self, name, options=None):
43 | super(BytesIORecipe, self).__init__(name, options=options)
44 | self.ran_collection = False
45 |
46 | def GetArtifacts(self):
47 | return [base.StringArtifact('fake/path', DEFAULT_ARTIFACT_CONTENT)]
48 |
49 |
50 | class FailingRecipe(base.BaseRecipe):
51 | """A Recipe raising an IOError when running GetArtifact."""
52 |
53 | def GetArtifacts(self):
54 | raise errors.RecipeException('Everything is terrible.')
55 |
56 |
57 | class FileCopyUploader(object):
58 | """Test implementation of an Uploader object that copies content to a file."""
59 |
60 | def __init__(self, destination_file):
61 | self._origin_dir = os.getcwd()
62 | self.destination_file = destination_file
63 |
64 | def UploadArtifact(self, artifact, update_callback=None):
65 | data = artifact._GetStream().read()
66 | self.destination_file.write(data)
67 | if update_callback:
68 | update_callback(len(data), len(data))
69 |
70 |
71 | class FakeGCSUploader(object):
72 | """Test implementation of a GCS Uploader for testing progress reporting"""
73 |
74 | def UploadArtifact(self, artifact, update_callback=None):
75 | current_bytes = 0
76 | total_bytes = 0
77 | boto_callback_interval = 1024
78 |
79 | update_callback(current_bytes, total_bytes)
80 | while True:
81 | data = artifact._GetStream().read(boto_callback_interval)
82 | if data:
83 | current_bytes += len(data)
84 | update_callback(current_bytes, total_bytes)
85 | else:
86 | break
87 |
88 |
89 | class FakeGoogleLogger(object):
90 | """Fake google logger for testing progress reporting"""
91 | logs = []
92 |
93 | def log_text(self, log_entry, severity=None):
94 | self.logs.append((severity, log_entry))
95 |
96 |
97 | class HumanReadableBytesTest(unittest.TestCase):
98 | """Tests for the HumanReadableBytes Function"""
99 |
100 | def testDec(self):
101 | """Tests decimal prefix based conversions"""
102 |
103 | self.assertEqual(auto_acquire.HumanReadableBytes(0.0), '0.0 B')
104 | expected = [
105 | '1.2 B', '12.3 B', '123.0 B',
106 | '1.2 KB', '12.3 KB', '123.0 KB',
107 | '1.2 MB', '12.3 MB', '123.0 MB',
108 | '1.2 GB', '12.3 GB', '123.0 GB',
109 | '1.2 TB', '12.3 TB', '123.0 TB',
110 | '1.2 PB', '12.3 PB', '123.0 PB',
111 | '1230.0 PB', '12300.0 PB', '123000.0 PB',
112 | ]
113 | for index, value in enumerate(expected):
114 | self.assertEqual(
115 | auto_acquire.HumanReadableBytes(1.23 * (10 ** index)), value)
116 |
117 | def testBin(self):
118 | """Tests binary prefix based conversions"""
119 |
120 | self.assertEqual(auto_acquire.HumanReadableBytes(
121 | 1024**1 - 1024**0, 'bin'), '1023.0 B')
122 | self.assertEqual(auto_acquire.HumanReadableBytes(
123 | 1024**1, 'bin'), '1.0 KiB')
124 | self.assertEqual(auto_acquire.HumanReadableBytes(
125 | 1024**4 - 1024**3, 'bin'), '1023.0 GiB')
126 | self.assertEqual(auto_acquire.HumanReadableBytes(
127 | 1024**4, 'bin'), '1.0 TiB')
128 |
129 |
130 | class GCPProgressReporterTest(unittest.TestCase):
131 | """Tests for the GCPProgressReporter class."""
132 |
133 | def setUp(self):
134 | """Set up an instantiated GCPProgressReporter for each test"""
135 | self.progress_reporter = auto_acquire.GCPProgressReporter(
136 | BytesIORecipe('stringio').GetArtifacts()[0],
137 | FakeGoogleLogger())
138 |
139 | def testCheckReportable(self):
140 | """Tests _CheckReportable."""
141 | reporting_frequency = self.progress_reporter._reporting_frequency
142 | CheckReportable = self.progress_reporter._CheckReportable
143 |
144 | self.assertEqual(CheckReportable(0), False)
145 | self.assertEqual(CheckReportable(reporting_frequency), True)
146 | self.progress_reporter._reported_percentage = reporting_frequency
147 | self.assertEqual(CheckReportable(reporting_frequency), False)
148 | self.assertEqual(CheckReportable(reporting_frequency*2), True)
149 |
150 | def testLogProgress(self):
151 | """Tests _LogProgress."""
152 | # For reporting purposes set the artifact to 1MiB
153 | self.progress_reporter._artifact = base.StringArtifact(
154 | 'fake/path', 'A' * (1024**2))
155 |
156 | artifact = self.progress_reporter._artifact
157 | update_callback = self.progress_reporter.update_with_total
158 | logger = self.progress_reporter._progress_logger
159 | reporting_frequency = self.progress_reporter._reporting_frequency
160 | expected_log_entries = 100 // reporting_frequency
161 |
162 | gcs_uploader = FakeGCSUploader()
163 | gcs_uploader.UploadArtifact(artifact, update_callback)
164 |
165 | self.assertEqual(len(logger.logs), expected_log_entries)
166 |
167 |
168 | class AutoForensicateTest(unittest.TestCase):
169 | """Tests for the AutoForensicate class.
170 |
171 | TODO(romaing): Add tests for Main(), by setting sys.argv and testing
172 | the proper recipes ran.
173 | """
174 |
175 | def FakeBadParseGCSJSON(self, _):
176 | return None
177 |
178 | def FakeParseGCSJSON(self, _):
179 | return {'client_id': 'fake_client_id'}
180 |
181 | def FakeMakeProgressBar(self, max_size, name, message=None): # pylint: disable=unused-argument
182 | return mock.create_autospec(auto_acquire.BaBar, spec_set=True)
183 |
184 | def testParseDestination(self):
185 | recipes = {
186 | 'test1': None,
187 | 'test2': None
188 | }
189 | af = auto_acquire.AutoForensicate(recipes=recipes)
190 | test_args = ['--acquire', 'all', 'destination_url']
191 | options = af.ParseArguments(test_args)
192 | self.assertEqual(options.destination, 'destination_url')
193 |
194 | def testParseArgsRequiredJson(self):
195 | recipes = {
196 | 'test1': None,
197 | 'test2': None
198 | }
199 | af = auto_acquire.AutoForensicate(recipes=recipes)
200 | test_args = ['--acquire', 'test1', '--logging', 'stackdriver']
201 | with self.assertRaises(SystemExit):
202 | prev_stderr = sys.stderr
203 | sys.stderr = StringIO()
204 | af.ParseArguments(test_args)
205 | sys.stderr = prev_stderr
206 |
207 | def testParseArgsRequiredURL(self):
208 | recipes = {
209 | 'test1': None,
210 | 'test2': None
211 | }
212 | af = auto_acquire.AutoForensicate(recipes=recipes)
213 | test_args = ['--acquire', 'test1', '--gs_keyfile=null']
214 | prev_stderr = sys.stderr
215 | sys.stderr = StringIO()
216 | with self.assertRaises(SystemExit):
217 | af.ParseArguments(test_args)
218 | sys.stderr = prev_stderr
219 |
220 | def testParseAcquireOneRecipe(self):
221 | recipes = {
222 | 'test1': None,
223 | 'test2': None
224 | }
225 | test_args = ['--acquire', 'test1', 'nfs://destination']
226 | af = auto_acquire.AutoForensicate(recipes=recipes)
227 | parser = af._CreateParser()
228 | options = parser.parse_args(test_args)
229 | expected_recipes = ['test1']
230 | self.assertEqual(options.acquire, expected_recipes)
231 |
232 | def testParseAcquireBad(self):
233 | recipes = {
234 | 'test1': None,
235 | 'test2': None
236 | }
237 | af = auto_acquire.AutoForensicate(recipes=recipes)
238 | test_args = [
239 | '--acquire', 'test4', '--acquire', 'all',
240 | '--gs_keyfile=file', 'gs://bucket']
241 | prev_stderr = sys.stderr
242 | sys.stderr = StringIO()
243 | with self.assertRaises(SystemExit):
244 | af.ParseArguments(test_args)
245 | sys.stderr = prev_stderr
246 |
247 | def testParseAcquireAll(self):
248 | recipes = {
249 | 'test1': None,
250 | 'test2': None
251 | }
252 | af = auto_acquire.AutoForensicate(recipes=recipes)
253 | test_args = ['--acquire', 'test1', '--acquire', 'all', 'gs://bucket']
254 | options = af.ParseArguments(test_args)
255 | expected_recipes = ['disk', 'firmware', 'sysinfo']
256 | self.assertEqual(options.acquire, expected_recipes)
257 |
258 | def testSliceOption(self):
259 | af = auto_acquire.AutoForensicate(recipes={'test': None})
260 |
261 | test_args = ['--slice_disks', '8', '--acquire', 'all', 'gs://bucket']
262 | options = af.ParseArguments(test_args)
263 |
264 | self.assertTrue(options.slice_disks)
265 | self.assertTrue(options.disable_dcfldd)
266 | self.assertEqual(options.slice_disks, 8)
267 |
268 | def testSliceOptionBad(self):
269 | af = auto_acquire.AutoForensicate(recipes={'test': None})
270 |
271 | # Invalid number of slices
272 | test_args = ['--slice_disks', '1', '--acquire', 'all', 'gs://bucket']
273 | with self.assertRaises(errors.BadConfigOption):
274 | options = af.ParseArguments(test_args)
275 |
276 | def testMakeUploader(self):
277 | af = auto_acquire.AutoForensicate(recipes={'test': None})
278 |
279 | options = af.ParseArguments(['--acquire', 'all', 'destination'])
280 | uploader_object = af._MakeUploader(options)
281 | self.assertIsNone(uploader_object)
282 |
283 | options = af.ParseArguments(['--acquire', 'all', 'gs://destination'])
284 | with self.assertRaises(errors.BadConfigOption):
285 | # We need a --gs_keyfile option for gs:// URLs
286 | uploader_object = af._MakeUploader(options)
287 |
288 | af._ParseGCSJSON = self.FakeBadParseGCSJSON
289 | options = af.ParseArguments(
290 | ['--acquire', 'all', '--gs_keyfile', 'keyfile', 'gs://destination'])
291 | with self.assertRaises(errors.BadConfigOption):
292 | # Invalid gs_keyfile
293 | uploader_object = af._MakeUploader(options)
294 |
295 | af._ParseGCSJSON = self.FakeParseGCSJSON
296 | options = af.ParseArguments(
297 | ['--acquire', 'all', '--gs_keyfile', 'keyfile', 'gs://destination'])
298 | uploader_object = af._MakeUploader(options)
299 | self.assertIsInstance(uploader_object, uploader.GCSUploader)
300 |
301 | def testFailDo(self):
302 | af = auto_acquire.AutoForensicate(recipes={})
303 | recipe = FailingRecipe('fail')
304 | with tempfile.TemporaryFile() as destination:
305 | uploader_object = FileCopyUploader(destination)
306 | af._uploader = uploader_object
307 | with self.assertRaises(errors.RecipeException):
308 | af.Do(recipe)
309 |
310 | def testDo(self):
311 | af = auto_acquire.AutoForensicate(recipes={})
312 | parser = argparse.ArgumentParser()
313 | parser.add_argument('--fake', action='store_true')
314 | options = parser.parse_args(['--fake'])
315 | af._logger = logging.getLogger(self.__class__.__name__)
316 | af._MakeProgressBar = self.FakeMakeProgressBar
317 |
318 | recipe = BytesIORecipe('stringio', options=options)
319 | self.assertTrue(recipe._options.fake)
320 |
321 | with tempfile.TemporaryFile() as destination:
322 | uploader_object = FileCopyUploader(destination)
323 | af._uploader = uploader_object
324 | af.Do(recipe)
325 | destination.seek(0)
326 | copied_data = destination.read()
327 | self.assertEqual(copied_data, DEFAULT_ARTIFACT_CONTENT)
328 |
329 |
330 | if __name__ == '__main__':
331 | unittest.main()
332 |
--------------------------------------------------------------------------------
/tests/base_tests.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2018 Google LLC
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # https://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Tests for the base.py module."""
16 |
17 | from __future__ import unicode_literals
18 |
19 | import os
20 | import tempfile
21 | import unittest
22 | import mock
23 |
24 | from auto_forensicate.recipes import base
25 |
26 |
27 | class BaseArtifactTests(unittest.TestCase):
28 | """Tests for the BaseArtifact class."""
29 |
30 | def testInstantiate(self):
31 | """Tests instanciating a BaseArtifact object."""
32 | artifact_name = 'artifact'
33 | artifact = base.BaseArtifact(artifact_name)
34 |
35 | self.assertEqual(artifact.size, 0)
36 | self.assertEqual(artifact.name, artifact_name)
37 | expected_remote_path = 'Base/artifact'
38 | self.assertEqual(artifact.remote_path, expected_remote_path)
39 |
40 | def testReadableSize(self):
41 | """Tests ReadableSize() method."""
42 | artifact_name = 'artifact'
43 | artifact = base.BaseArtifact(artifact_name)
44 | self.assertEqual(artifact.readable_size, 'Unknown size')
45 |
46 | #pylint: disable=protected-access
47 | artifact._size = 12345
48 | self.assertEqual(artifact.readable_size, '12.1KiB')
49 |
50 | artifact._size = 1234567
51 | self.assertEqual(artifact.readable_size, '1.2MiB')
52 |
53 | artifact._size = 123456789
54 | self.assertEqual(artifact.readable_size, '117.7MiB')
55 |
56 | artifact._size = 12345678901
57 | self.assertEqual(artifact.readable_size, '11.5GiB')
58 |
59 | artifact._size = 1023 * 1024 * 1024 * 1024
60 | self.assertEqual(artifact.readable_size, '1,023.0GiB')
61 |
62 | artifact._size = 1234567890123
63 | self.assertEqual(artifact.readable_size, '1.1TiB')
64 |
65 | artifact._size = 12345678901234567890
66 | self.assertEqual(artifact.readable_size, '10,965.2PiB')
67 |
68 | def testOpenStream(self):
69 | """Tests OpenStream."""
70 | artifact = base.BaseArtifact('artifact')
71 | with self.assertRaises(NotImplementedError) as err:
72 | artifact.OpenStream()
73 | expected_err_message = '_GetStream() is not implemented in BaseArtifact'
74 | self.assertEqual(str(err.exception), expected_err_message)
75 |
76 |
77 | class ProcessOutputArtifactTest(unittest.TestCase):
78 | """Tests for the ProcessOutputArtifact class."""
79 |
80 | _TEST_OUTPUT = b'this is some command output'
81 |
82 | def testRunCommand(self):
83 | """Tests RunCommand"""
84 | cmd = ['echo', '-n', self._TEST_OUTPUT]
85 | artifact = base.ProcessOutputArtifact(cmd, 'output.txt')
86 |
87 | #pylint: disable=protected-access
88 | self.assertEqual(artifact._command, cmd)
89 | self.assertEqual(artifact.name, 'output.txt')
90 | # Size is unknown until the command is run
91 | self.assertEqual(artifact.size, 0)
92 |
93 | artifact_content = artifact.OpenStream().read()
94 | self.assertEqual(artifact.size, 27)
95 |
96 | self.assertEqual(artifact_content, self._TEST_OUTPUT)
97 |
98 |
99 | class BaseRecipeTests(unittest.TestCase):
100 | """Tests for the BaseRecipe class."""
101 |
102 | def setUp(self):
103 | self.temp_directory = tempfile.mkdtemp()
104 |
105 | def testContextManager(self):
106 | """Tests creating temp directories in a 'with' statement."""
107 | with mock.patch('tempfile.mkdtemp', lambda: self.temp_directory):
108 | with base.BaseRecipe('fake_recipe') as recipe:
109 | self.assertTrue(os.path.isdir(self.temp_directory))
110 | #pylint: disable=protected-access
111 | self.assertEqual(recipe._workdir, self.temp_directory)
112 | self.assertFalse(os.path.isdir(self.temp_directory))
113 |
--------------------------------------------------------------------------------
/tests/directory_tests.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2018 Google LLC
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # https://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Tests for the disk.py module."""
16 |
17 | from __future__ import unicode_literals
18 |
19 | import os
20 | import subprocess
21 | import tempfile
22 | import unittest
23 | from auto_forensicate.recipes import directory
24 |
25 | # pylint: disable=missing-docstring
26 | # pylint: disable=protected-access
27 |
28 |
29 | class DirectoryArtifactTests(unittest.TestCase):
30 | """Tests for the DirectoryArtifact class."""
31 |
32 | def _EmptyFolderSize(self):
33 | """Returns the size of an empty folder.
34 |
35 | This should match the current filesystem blocksize.
36 | """
37 | size = int(subprocess.check_output(['stat', '-fc', '%s', '.']).strip())
38 | return size
39 |
40 | def testInstantiate(self):
41 | with tempfile.TemporaryDirectory() as path:
42 | expected_name = path.replace(os.path.sep, '_')
43 | d = directory.DirectoryArtifact(path, method='tar', compress=False)
44 | self.assertEqual(d.path, path)
45 | self.assertEqual(d.name, expected_name)
46 | self.assertEqual(
47 | d.remote_path, 'Directories/{0:s}.tar'.format(expected_name))
48 | self.assertEqual(d.size, self._EmptyFolderSize())
49 |
50 | d = directory.DirectoryArtifact(path, method='tar', compress=True)
51 | self.assertEqual(
52 | d.remote_path, 'Directories/{0:s}.tar.gz'.format(expected_name))
53 |
54 | def testGenerateTarCopyCommand(self):
55 | with tempfile.TemporaryDirectory() as path:
56 | d = directory.DirectoryArtifact(path, method='tar', compress=False)
57 | command = d._TAR_COMMAND
58 | command.append(path)
59 | self.assertEqual(d._GenerateCopyCommand(), command)
60 |
61 | def testGenerateTarGzCopyCommand(self):
62 | with tempfile.TemporaryDirectory() as path:
63 | d = directory.DirectoryArtifact(path, method='tar', compress=True)
64 | command = d._TAR_COMMAND
65 | command.append('-z')
66 | command.append(path)
67 | self.assertEqual(d._GenerateCopyCommand(), command)
68 |
--------------------------------------------------------------------------------
/tests/disk_tests.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2018 Google LLC
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # https://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Tests for the disk.py module."""
16 |
17 | from __future__ import unicode_literals
18 |
19 | import json
20 | import unittest
21 | import mock
22 | from auto_forensicate import errors
23 | #pylint: disable=unused-import
24 | from auto_forensicate import hostinfo
25 | from auto_forensicate import macdisk
26 | from auto_forensicate.recipes import base
27 | from auto_forensicate.recipes import disk
28 |
29 |
30 | # pylint: disable=missing-docstring
31 | # pylint: disable=protected-access
32 |
33 | class DiskArtifactTests(unittest.TestCase):
34 | """Tests for the DiskArtifact class."""
35 |
36 | def testInstantiate(self):
37 | name = 'sdx'
38 | path = '/dev/{0:s}'.format(name)
39 | d = disk.DiskArtifact(path, 100)
40 | self.assertEqual(d._path, path)
41 | self.assertEqual(d.name, name)
42 | self.assertEqual(d.remote_path, 'Disks/{0:s}.image'.format(name))
43 | self.assertEqual(d.hashlog_filename, '{0:s}.hash'.format(name))
44 |
45 | def testGenerateDDCommand(self):
46 | name = 'sdx'
47 | path = '/dev/{0:s}'.format(name)
48 | dd_command = [
49 | '/some/place/random/bin/dcfldd', 'if={0:s}'.format(path),
50 | 'hashlog={0:s}.hash'.format(name)]
51 | dd_static_options = [
52 | 'hash=md5,sha1', 'bs=2M', 'conv=noerror', 'hashwindow=128M']
53 | dd_command.extend(dd_static_options)
54 |
55 | with mock.patch('auto_forensicate.hostinfo.Which') as patched_which:
56 | patched_which.return_value = '/some/place/random/bin/dcfldd'
57 | d = disk.DiskArtifact(path, 100)
58 | self.assertEqual(d._GenerateDDCommand(), dd_command)
59 |
60 | class LinuxDiskArtifactTests(unittest.TestCase):
61 | """Tests for the LinuxDiskArtifact class."""
62 |
63 | def testIsFloppy(self):
64 | disk_object = disk.LinuxDiskArtifact('/dev/sdX', 12345)
65 | disk_object._udevadm_metadata = {'MAJOR': '2'}
66 | self.assertTrue(disk_object._IsFloppy())
67 | disk_object._udevadm_metadata = {'MAJOR': '12'}
68 | self.assertFalse(disk_object._IsFloppy())
69 |
70 | def testIsUsb(self):
71 | disk_object = disk.LinuxDiskArtifact('/dev/sdX', 12345)
72 | disk_object._udevadm_metadata = {'ID_BUS': 'usb'}
73 | self.assertTrue(disk_object._IsUsb())
74 | disk_object._udevadm_metadata = {'ID_BUS': 'ata'}
75 | self.assertFalse(disk_object._IsUsb())
76 |
77 | def testProbablyADisk(self):
78 | disk_object = disk.LinuxDiskArtifact('/dev/sdX', 123456789)
79 | disk_object._udevadm_metadata = {'ID_BUS': 'ata'}
80 | self.assertTrue(disk_object.ProbablyADisk())
81 |
82 | # We ignore USB to try to avoid copying the GiftStick itself.
83 | disk_object._udevadm_metadata = {'ID_BUS': 'usb'}
84 | self.assertFalse(disk_object.ProbablyADisk())
85 |
86 | # We ignore Floppy
87 | disk_object._udevadm_metadata = {'MAJOR': '2'}
88 | self.assertFalse(disk_object.ProbablyADisk())
89 |
90 | # Fancy NVME drive
91 | disk_object._udevadm_metadata = {
92 | 'DEVTYPE': 'disk',
93 | 'MAJOR': '259',
94 | 'MINOR': '0'
95 | }
96 | self.assertTrue(disk_object.ProbablyADisk())
97 |
98 | def testGetDescription(self):
99 | disk_object = disk.LinuxDiskArtifact('/dev/sdX', 123456789)
100 | disk_object._udevadm_metadata = {
101 | 'ID_BUS': 'ata',
102 | 'ID_MODEL': 'TestDisk'
103 | }
104 | self.assertEqual('sdX: TestDisk (internal)', disk_object.GetDescription())
105 |
106 | disk_object._udevadm_metadata = {
107 | 'ID_BUS': 'usb',
108 | 'ID_MODEL': 'TestDisk',
109 | 'ID_VENDOR': 'FakeVendor'
110 | }
111 | self.assertEqual(
112 | 'sdX: FakeVendor TestDisk (usb)', disk_object.GetDescription())
113 |
114 | disk_object._udevadm_metadata = {
115 | 'MAJOR': '2',
116 | }
117 | self.assertEqual(
118 | 'sdX: Floppy Disk (internal)', disk_object.GetDescription())
119 |
120 | disk_object.mounted = True
121 | self.assertEqual(
122 | '(WARNING: disk has a mounted partition) sdX: Floppy Disk (internal)',
123 | disk_object.GetDescription())
124 |
125 |
126 | class DiskRecipeTests(unittest.TestCase):
127 | """Tests for the DiskRecipe (on linux) class."""
128 |
129 | def setUp(self):
130 | self._lsblk_dict = {
131 | 'blockdevices': [
132 | {'name': 'loop0', 'maj:min': '7:0', 'rm': '0', 'size': '1073741824',
133 | 'ro': '1', 'type': 'loop', 'mountpoint': '/dev/loop0', 'uuid': None
134 | },
135 | {'name': 'sdx', 'maj:min': '8:0', 'rm': '0', 'size': '502110190592',
136 | 'ro': '0', 'type': 'disk', 'mountpoint': None,
137 | 'children': [
138 | {'name': 'sdx1', 'maj:min': '8:1', 'rm': '0',
139 | 'size': '48725121', 'ro': '0', 'type': 'part',
140 | 'mountpoint': '/boot', 'uuid': 'fake_uuid_1'},
141 | {'name': 'sdx2', 'maj:min': '8:2', 'rm': '0', 'size': '231201',
142 | 'ro': '0', 'type': 'part', 'mountpoint': None,
143 | 'uuid': 'fake_uuid_2'},
144 | ]
145 | },
146 | {'name': 'usb0', 'maj:min': '8:16', 'rm': '1', 'size': '3000041824',
147 | 'ro': '0', 'type': 'disk', 'mountpoint': None, 'uuid': None
148 | },
149 | {'name': 'sdy', 'maj:min': '8:0', 'rm': '0', 'size': '512110190592',
150 | 'ro': '0', 'type': 'disk', 'mountpoint': None, 'uuid': None,
151 | 'children': [
152 | {'name': 'sdy1', 'maj:min': '8:1', 'rm': '0',
153 | 'size': '48725121', 'ro': '0', 'type': 'part',
154 | 'mountpoint': '/boot', 'uuid': None},
155 | ]
156 | }
157 | ]
158 | }
159 |
160 | def _GetLsblkDictZeroDisks(self):
161 | return {'blockdevices': []}
162 |
163 | def _GetLsblkDictThreeDisks(self):
164 | return self._lsblk_dict
165 |
166 | def testListDisksZero(self):
167 | recipe = disk.DiskRecipe('Disk')
168 | recipe._platform = 'linux'
169 | disk.DiskRecipe._GetLsblkDict = self._GetLsblkDictZeroDisks
170 | self.assertEqual(0, len(recipe._ListDisks()))
171 |
172 | def testListAllDisks(self):
173 | recipe = disk.DiskRecipe('Disk')
174 | recipe._platform = 'linux'
175 | disk.DiskRecipe._GetLsblkDict = self._GetLsblkDictThreeDisks
176 | disk_list = recipe._ListDisks(all_devices=True)
177 | self.assertEqual(len(disk_list), 3)
178 | self.assertGreaterEqual(disk_list[0].size, disk_list[1].size)
179 | self.assertEqual(disk_list[0].size, 512110190592)
180 | self.assertEqual(disk_list[1].size, 502110190592)
181 | self.assertEqual(disk_list[2].size, 3000041824)
182 |
183 | def testListDisksWithNames(self):
184 | recipe = disk.DiskRecipe('Disk')
185 | recipe._platform = 'linux'
186 | disk.DiskRecipe._GetLsblkDict = self._GetLsblkDictThreeDisks
187 | disk_list = recipe._ListDisks(all_devices=True, names=['sdz_not_present'])
188 | self.assertEqual(len(disk_list), 0)
189 |
190 | disk_list = recipe._ListDisks(all_devices=True, names=['usb0', 'sdy'])
191 | self.assertEqual(len(disk_list), 2)
192 | self.assertEqual(disk_list[0].name, 'sdy')
193 | self.assertEqual(disk_list[1].name, 'usb0')
194 |
195 | def testGetArtifactsZeroDisk(self):
196 | with mock.patch(
197 | 'auto_forensicate.recipes.disk.DiskRecipe._ListDisks'
198 | ) as patched_listdisk:
199 | patched_listdisk.return_value = []
200 | recipe = disk.DiskRecipe('Disk')
201 | recipe._platform = 'linux'
202 | artifacts = recipe.GetArtifacts()
203 | self.assertEqual(len(artifacts), 1)
204 |
205 | artifact = artifacts[0]
206 | self.assertEqual(artifact.name, 'lsblk.txt')
207 |
208 | def testGetArtifacts(self):
209 | disk_name = 'sdx'
210 | disk_size = 20 * 1024 * 1024 * 1024 # 20GB
211 | disk_object = disk.LinuxDiskArtifact(
212 | '/dev/{0:s}'.format(disk_name), disk_size)
213 | disk_object._udevadm_metadata = {'udevadm_text_output': 'fake disk info'}
214 | with mock.patch(
215 | 'auto_forensicate.recipes.disk.DiskRecipe._ListDisks'
216 | ) as patched_listdisk:
217 | patched_listdisk.return_value = [disk_object]
218 | with mock.patch(
219 | 'auto_forensicate.recipes.disk.DiskRecipe._GetLsblkDict'
220 | ) as patched_lsblk:
221 | patched_lsblk.return_value = self._lsblk_dict
222 | recipe = disk.DiskRecipe('Disk')
223 | recipe._platform = 'linux'
224 | artifacts = recipe.GetArtifacts()
225 | self.assertEqual(len(artifacts), 4)
226 |
227 | udevadm_artifact = artifacts[1]
228 | self.assertIsInstance(udevadm_artifact, base.StringArtifact)
229 | self.assertEqual(
230 | udevadm_artifact._GetStream().read(), b'fake disk info')
231 | self.assertEqual(udevadm_artifact.remote_path, 'Disks/sdx.udevadm.txt')
232 |
233 | lsblk_artifact = artifacts[0]
234 | self.assertIsInstance(lsblk_artifact, base.StringArtifact)
235 | self.assertEqual(
236 | lsblk_artifact._GetStream().read(),
237 | json.dumps(self._lsblk_dict).encode('utf-8'))
238 | self.assertEqual(lsblk_artifact.remote_path, 'Disks/lsblk.txt')
239 |
240 | self.assertEqual(artifacts[2], disk_object)
241 |
242 | file_artifact = artifacts[3]
243 | self.assertIsInstance(file_artifact, base.FileArtifact)
244 | self.assertEqual(file_artifact.name, '{0:s}.hash'.format(disk_name))
245 | self.assertEqual(
246 | file_artifact.remote_path, 'Disks/{0:s}.hash'.format(disk_name))
247 |
248 | def testIsMounted(self):
249 | recipe = disk.DiskRecipe('Disk')
250 | recipe._platform = 'linux'
251 | mounted_devices = [
252 | # No partition
253 | {'name': 'loop0', 'size': '1073741824', 'mountpoint': '/dev/loop0'},
254 | # One partition
255 | {'name': 'sdx', 'size': '502190592', 'type': 'disk', 'mountpoint': None,
256 | 'children': [
257 | {'name': 'sdx1', 'size': '485121', 'mountpoint': '/boot'},
258 | {'name': 'sdx2', 'size': '231201', 'mountpoint': None},
259 | ]
260 | },
261 | # partition has one child
262 | {'name': 'sdy', 'size': '502190592', 'type': 'disk', 'mountpoint': None,
263 | 'children': [
264 | {'name': 'sdy2', 'size': '231201', 'mountpoint': None,
265 | 'children': [
266 | {'name': 'sdy1p0', 'type': 'part', 'mountpoint': '/boot'}
267 | ]},
268 | ]}
269 | ]
270 |
271 | self.assertTrue(
272 | all([recipe._IsDiskMounted(device) for device in mounted_devices]))
273 |
274 | not_mounted_devices = [
275 | # No partition
276 | {'name': 'loop0', 'size': '1073741824', 'mountpoint': None},
277 | # One partition
278 | {'name': 'sdx', 'size': '502190592', 'type': 'disk', 'mountpoint': None,
279 | 'children': [
280 | {'name': 'sdx1', 'size': '485121', 'mountpoint': None},
281 | {'name': 'sdx2', 'size': '231201', 'mountpoint': None},
282 | ]
283 | },
284 | # partition has one child
285 | {'name': 'sdy', 'size': '502190592', 'type': 'disk', 'mountpoint': None,
286 | 'children': [
287 | {'name': 'sdy2', 'size': '231201', 'mountpoint': None,
288 | 'children': [
289 | {'name': 'sdy1p0', 'type': 'part', 'mountpoint': None}
290 | ]},
291 | ]}
292 | ]
293 |
294 | self.assertFalse(
295 | any([recipe._IsDiskMounted(device) for device in not_mounted_devices]))
296 |
297 |
298 | class MacDiskArtifactTests(unittest.TestCase):
299 | """Tests for the MacDiskArtifact class."""
300 |
301 | def setUp(self):
302 | self._fake_disks_list_dict = {
303 | 'AllDisks': ['diskInternal', 'diskUSB'],
304 | }
305 | self._fake_disk_infos = {
306 | 'diskInternal': {
307 | 'BusProtocol': 'PCI-Express',
308 | 'Internal': True,
309 | 'VirtualOrPhysical': 'Unknown'
310 | },
311 | 'diskUSB': {
312 | 'BusProtocol': 'USB',
313 | 'Internal': False,
314 | 'VirtualOrPhysical': 'Physical'
315 | }
316 | }
317 |
318 | @mock.patch('auto_forensicate.macdisk._DictFromDiskutilInfo')
319 | @mock.patch('auto_forensicate.macdisk._DictFromDiskutilList')
320 | def testProbablyADisk(self, patched_list_dict, patched_info_dict):
321 | patched_list_dict.return_value = self._fake_disks_list_dict
322 | patched_info_dict.return_value = self._fake_disk_infos['diskInternal']
323 | disk_object = disk.MacDiskArtifact('/dev/diskInternal', 123456789)
324 | self.assertTrue(disk_object.ProbablyADisk())
325 |
326 | # We ignore USB to try to avoid copying the GiftStick itself.
327 | patched_info_dict.return_value = self._fake_disk_infos['diskUSB']
328 | disk_object = disk.MacDiskArtifact('/dev/diskUSB', 123456789)
329 | self.assertFalse(disk_object.ProbablyADisk())
330 |
331 | @mock.patch('auto_forensicate.macdisk._DictFromDiskutilInfo')
332 | @mock.patch('auto_forensicate.macdisk._DictFromDiskutilList')
333 | def testGetDescription(self, _patched_list_dict, _patched_info_dict):
334 | disk_object = disk.MacDiskArtifact('/dev/sdInternal', 123456789)
335 | self.assertEqual(
336 | 'Name: sdInternal (Size: 123456789)', disk_object.GetDescription())
337 |
--------------------------------------------------------------------------------
/tests/firmware_tests.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2018 Google LLC
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # https://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Tests for the firmware.py module."""
16 |
17 | from __future__ import unicode_literals
18 |
19 | import unittest
20 | from auto_forensicate.recipes import base
21 | from auto_forensicate.recipes import firmware
22 |
23 |
24 | class ChipsecRecipeTests(unittest.TestCase):
25 | """Tests for the ChipsecRecipe class."""
26 |
27 | _CHIPSEC_OUTPUT_STRING = b'\xff' * 256
28 |
29 | def testGetArtifacts(self):
30 | """Tests for the GetArtifacts() method."""
31 | chipsec_recipe = firmware.ChipsecRecipe('chipsec')
32 | #pylint: disable=protected-access
33 | chipsec_recipe._platform = 'linux'
34 | chipsec_recipe._CHIPSEC_CMD = [
35 | 'echo', '-n', self._CHIPSEC_OUTPUT_STRING]
36 |
37 | artifacts = chipsec_recipe.GetArtifacts()
38 | self.assertEqual(len(artifacts), 1)
39 |
40 | artifact = artifacts[0]
41 | self.assertIsInstance(artifact, base.ProcessOutputArtifact)
42 | self.assertEqual(artifact.name, 'rom.bin')
43 | self.assertEqual(artifact.remote_path, 'Firmware/rom.bin')
44 | artifact_content = artifact.OpenStream().read()
45 | self.assertEqual(artifact_content, self._CHIPSEC_OUTPUT_STRING)
46 |
--------------------------------------------------------------------------------
/tests/hostinfo_tests.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2018 Google LLC
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # https://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Tests for the utils module."""
16 |
17 | from __future__ import unicode_literals
18 |
19 | import unittest
20 | import uuid
21 | import mock
22 |
23 | from auto_forensicate import hostinfo
24 |
25 | #pylint: disable=missing-docstring
26 | class HostInfoTests(unittest.TestCase):
27 | """Tests for the HostInfo class."""
28 |
29 | def _ReadDMISerial(self, name):
30 | fake_dmi_values = {
31 | 'chassis_serial': 'test_serial',
32 | 'product_uuid': 'test_uuid',
33 | }
34 | return fake_dmi_values.get(name, None)
35 |
36 | def _ReadDMIMachineUUID(self, name):
37 | fake_dmi_values = {
38 | 'chassis_serial': None,
39 | 'product_uuid': 'test_uuid',
40 | }
41 | return fake_dmi_values.get(name, None)
42 |
43 | def _ReadDMIRandom(self, name):
44 | fake_dmi_values = {
45 | 'chassis_serial': None,
46 | 'product_uuid': None,
47 | }
48 | return fake_dmi_values.get(name, None)
49 |
50 | def _FakeTime(self):
51 | return '20171012-135619'
52 |
53 | def _FakeAskText(self, _, mandatory=False):
54 | if mandatory:
55 | return 'fake mandatory value'
56 | return 'fake value'
57 |
58 | def testGetIdendifierWithSerial(self):
59 | hostinfo.ReadDMI = self._ReadDMISerial
60 | self.assertEqual(hostinfo.GetIdentifier(), 'test_serial')
61 |
62 | def testGetIdendifierWithUUID(self):
63 | hostinfo.ReadDMI = self._ReadDMIMachineUUID
64 | self.assertEqual(hostinfo.GetIdentifier(), 'test_uuid')
65 |
66 | def testGetIdendifierWithRandomUUID(self):
67 | hostinfo.ReadDMI = self._ReadDMIRandom
68 | uuid_ = uuid.uuid4()
69 | with mock.patch('uuid.uuid4', lambda: uuid_):
70 | self.assertEqual(hostinfo.GetIdentifier(), str(uuid_))
71 |
--------------------------------------------------------------------------------
/tests/stamp_tests.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2018 Google LLC
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # https://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Tests for the stamp module."""
16 |
17 | from __future__ import unicode_literals
18 |
19 | import unittest
20 | import mock
21 | from auto_forensicate.stamp import manager
22 |
23 |
24 | #pylint: disable=missing-docstring
25 | class StampManagerTests(unittest.TestCase):
26 | """Tests for the StampManager class."""
27 |
28 | def setUp(self):
29 | self.test_stamp = manager.BaseStamp(
30 | identifier='test_uuid',
31 | start_time='20171012-135619')
32 |
33 | def testBaseElements(self):
34 | path_elements = ['20171012-135619', 'test_uuid']
35 | stamp_manager = manager.BaseStampManager()
36 | self.assertEqual(
37 | stamp_manager.BasePathElements(self.test_stamp), path_elements)
38 |
39 | def testGetStamp(self):
40 | test_stamp = manager.BaseStamp(
41 | identifier='test_uuid',
42 | start_time='20171012-135619')
43 | with mock.patch('auto_forensicate.hostinfo.GetTime') as faked_time:
44 | faked_time.return_value = '20171012-135619'
45 | with mock.patch('auto_forensicate.hostinfo.GetIdentifier') as faked_id:
46 | faked_id.return_value = 'test_uuid'
47 | stamp_manager = manager.BaseStampManager()
48 | self.assertEqual(stamp_manager.GetStamp(), test_stamp)
49 |
--------------------------------------------------------------------------------
/tests/sysinfo_tests.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2018 Google LLC
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # https://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Tests for the sysinfo.py recipe module."""
16 |
17 | from __future__ import unicode_literals
18 |
19 | import unittest
20 |
21 | from auto_forensicate.recipes import base
22 | from auto_forensicate.recipes import sysinfo
23 |
24 | import mock
25 |
26 |
27 | # pylint: disable=missing-docstring
28 | class LinuxSysinfoRecipeTest(unittest.TestCase):
29 | """Tests for the SysinfoRecipe class."""
30 |
31 | _DMIDECODE_OUTPUT_FAIL_STRING = (
32 | b'/dev/mem: Permission denied\n Error running dmidecode')
33 |
34 | _DMIDECODE_OUTPUT_STRING = b"""\
35 | # dmidecode 2.12
36 | SMBIOS 2.8 present.
37 |
38 | Handle 0x0001, DMI type 1, 27 bytes
39 | System Information
40 | Manufacturer: Cyber Computers Inc
41 | Product Name: The Great Workstation
42 | Version: Not Specified
43 | Serial Number: CAAAAAAA
44 | UUID: A419F8CA-1234-0000-9C43-BC0000D00000
45 | Wake-up Type
46 | SKU Number: 1231456#ABU
47 | Family: 103C_55555X G=D
48 | """
49 |
50 | def testGetArtifactsFail(self):
51 | sysinfo_recipe = sysinfo.SysinfoRecipe('failsysinfo')
52 | # pylint: disable=protected-access
53 | sysinfo_recipe._platform = 'linux'
54 | # pylint: disable=line-too-long
55 | with mock.patch('auto_forensicate.recipes.base.ProcessOutputArtifact._RunCommand') as patched_run:
56 | patched_run.return_value = self._DMIDECODE_OUTPUT_FAIL_STRING
57 | artifacts = sysinfo_recipe.GetArtifacts()
58 | self.assertEqual(len(artifacts), 2)
59 |
60 | artifact = artifacts[0]
61 | self.assertIsInstance(artifact, base.ProcessOutputArtifact)
62 | self.assertEqual(artifact.name, 'system_info.txt')
63 | self.assertEqual(artifact.remote_path, 'system_info.txt')
64 | artifact_content = artifact.OpenStream().read()
65 | self.assertEqual(artifact_content, self._DMIDECODE_OUTPUT_FAIL_STRING)
66 |
67 | def testGetArtifacts(self):
68 | sysinfo_recipe = sysinfo.SysinfoRecipe('sysinfo')
69 | # pylint: disable=protected-access
70 | sysinfo_recipe._platform = 'linux'
71 | # pylint: disable=line-too-long
72 | with mock.patch('auto_forensicate.recipes.base.ProcessOutputArtifact._RunCommand') as patched_run:
73 | patched_run.return_value = self._DMIDECODE_OUTPUT_STRING
74 | artifacts = sysinfo_recipe.GetArtifacts()
75 | self.assertEqual(len(artifacts), 2)
76 |
77 | artifact = artifacts[0]
78 | self.assertIsInstance(artifact, base.ProcessOutputArtifact)
79 | self.assertEqual(artifact.name, 'system_info.txt')
80 | self.assertEqual(artifact.remote_path, 'system_info.txt')
81 | artifact_content = artifact.OpenStream().read()
82 | self.assertEqual(artifact_content, self._DMIDECODE_OUTPUT_STRING)
83 |
84 |
85 | class MacSysinfoRecipeTest(unittest.TestCase):
86 | """Tests for the SysinfoRecipe class."""
87 |
88 | _SYSTEM_PROFILER_FAIL_STRING = (
89 | b'/dev/mem: Permission denied\n Error running dmidecode')
90 |
91 | _SYSTEM_PROFILER_OUTPUT_STRING = b"""\
92 | Hardware:
93 |
94 | Hardware Overview:
95 |
96 | Model Name: MacBook Pro
97 | Model Identifier: MacBookPro14,3
98 | Processor Name: Intel Core i7
99 | Processor Speed: 2.8 GHz
100 | Number of Processors: 1
101 | Total Number of Cores: 4
102 | L2 Cache (per Core): 256 KB
103 | L3 Cache: 6 MB
104 | Memory: 16 GB
105 | Boot ROM Version: 185.0.0.0.0
106 | SMC Version (system): 2.45f0
107 | Serial Number (system): CAAAAAAAAAAA
108 | Hardware UUID: 12345678-E004-5158-AAA-BBBBB52F3949
109 |
110 | Software:
111 |
112 | System Software Overview:
113 |
114 | System Version: macOS 10.14.3 (18D42)
115 | Kernel Version: Darwin 18.2.0
116 | Boot Volume: Macintosh HD
117 | Boot Mode: Normal
118 | Computer Name: macbookpro2
119 | User Name: Someone Else (someoneelse)
120 | Secure Virtual Memory: Enabled
121 | System Integrity Protection: Enabled
122 | Time since boot: 4 days 3:38
123 | """
124 |
125 | def testGetArtifactsFail(self):
126 | sysinfo_recipe = sysinfo.SysinfoRecipe('failsysinfo')
127 | # pylint: disable=protected-access
128 | sysinfo_recipe._platform = 'darwin'
129 | # pylint: disable=line-too-long
130 | with mock.patch('auto_forensicate.recipes.base.ProcessOutputArtifact._RunCommand') as patched_run:
131 | patched_run.return_value = self._SYSTEM_PROFILER_FAIL_STRING
132 | artifacts = sysinfo_recipe.GetArtifacts()
133 | self.assertEqual(len(artifacts), 2)
134 |
135 | artifact = artifacts[0]
136 | self.assertIsInstance(artifact, base.ProcessOutputArtifact)
137 | self.assertEqual(artifact.name, 'system_info.txt')
138 | self.assertEqual(artifact.remote_path, 'system_info.txt')
139 | artifact_content = artifact.OpenStream().read()
140 | self.assertEqual(artifact_content, self._SYSTEM_PROFILER_FAIL_STRING)
141 |
142 | def testGetArtifacts(self):
143 | sysinfo_recipe = sysinfo.SysinfoRecipe('sysinfo')
144 | # pylint: disable=protected-access
145 | sysinfo_recipe._platform = 'darwin'
146 | # pylint: disable=line-too-long
147 | with mock.patch('auto_forensicate.recipes.base.ProcessOutputArtifact._RunCommand') as patched_run:
148 | patched_run.return_value = self._SYSTEM_PROFILER_OUTPUT_STRING
149 | artifacts = sysinfo_recipe.GetArtifacts()
150 | self.assertEqual(len(artifacts), 2)
151 |
152 | artifact = artifacts[0]
153 | self.assertIsInstance(artifact, base.ProcessOutputArtifact)
154 | self.assertEqual(artifact.name, 'system_info.txt')
155 | self.assertEqual(artifact.remote_path, 'system_info.txt')
156 | artifact_content = artifact.OpenStream().read()
157 | self.assertEqual(artifact_content, self._SYSTEM_PROFILER_OUTPUT_STRING)
158 |
--------------------------------------------------------------------------------
/tests/uploader_tests.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2018 Google LLC
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # https://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | """Tests for the uploader module."""
16 |
17 | from __future__ import unicode_literals
18 |
19 | import argparse
20 | from collections import namedtuple
21 | import json
22 | try:
23 | from BytesIO import BytesIO
24 | except ImportError:
25 | from io import BytesIO
26 | import tempfile
27 | import mmap
28 | import unittest
29 |
30 | import boto
31 | import mock
32 | import shutil
33 |
34 | from auto_forensicate import errors
35 | from auto_forensicate import uploader
36 | from auto_forensicate.recipes import base
37 | from auto_forensicate.recipes import disk
38 | from auto_forensicate.stamp import manager
39 |
40 | # pylint: disable=missing-docstring
41 | # pylint: disable=protected-access
42 |
43 |
44 | class FakeStamp(
45 | namedtuple('Stamp', [
46 | 'asset_tag',
47 | 'identifier',
48 | 'start_time'
49 | ])):
50 | pass
51 |
52 | FAKE_STAMP = FakeStamp(
53 | asset_tag='fake_asset_tag',
54 | identifier='fake_uuid',
55 | start_time='20171012-135619'
56 | )
57 |
58 | FAKE_STAMP_NO_ASSET = FakeStamp(
59 | asset_tag=None,
60 | identifier='fake_uuid',
61 | start_time='20171012-135619'
62 | )
63 |
64 |
65 | class FakeGCSUploader(uploader.GCSUploader):
66 | """Fake class for the GCSUploader."""
67 |
68 | def __init__(self, gcs_url):
69 | """Initializes the GCSUploader class.
70 |
71 | Args:
72 | gcs_url (str): the GCS url to the bucket and remote path.
73 | """
74 | super(FakeGCSUploader, self).__init__(
75 | gcs_url, 'fake_key.json', 'fake_clientid', FakeStampManager(),
76 | stamp=FAKE_STAMP)
77 | self._uploaded_streams = {}
78 |
79 | def _UploadStream(self, stream, remote_path, update_callback=None):
80 | """Fakes the uploading of a file object.
81 |
82 | This stores the content of the stream and remote_path in _uploaded_streams
83 | as a dict of {remote_path: stream_content}
84 |
85 | Args:
86 | stream (file): the file-like object pointing to data to upload.
87 | remote_path (str): the remote path to store the data to.
88 | update_callback (func): an optional function called as upload progresses.
89 | """
90 | self._uploaded_streams[remote_path] = stream.read().decode('utf-8')
91 |
92 |
93 | class FakeStampManager(manager.BaseStampManager):
94 |
95 | def GetStamp(self):
96 | return FakeStamp(
97 | asset_tag='fake_asset_tag',
98 | identifier='fake_uuid',
99 | start_time='20171012-135619')
100 |
101 |
102 | class LocalCopierTests(unittest.TestCase):
103 | """Tests for the LocalCopier class."""
104 |
105 | def setUp(self):
106 | self.temp_dir = tempfile.mkdtemp()
107 | super().setUp()
108 |
109 | def tearDown(self):
110 | shutil.rmtree(self.temp_dir)
111 | super().tearDown()
112 |
113 | @mock.patch.object(base.BaseArtifact, '_GetStream')
114 | def testUploadArtifact(self, patched_getstream):
115 | test_artifact = base.BaseArtifact('test_artifact')
116 | patched_getstream.return_value = BytesIO(b'fake_content')
117 |
118 | uploader_object = uploader.LocalCopier(
119 | self.temp_dir, FakeStampManager(), stamp=FAKE_STAMP)
120 |
121 | expected_artifact_path = (
122 | self.temp_dir+'/20171012-135619/fake_uuid/Base/test_artifact')
123 | expected_artifact_content = 'fake_content'
124 |
125 | expected_stamp_path = (
126 | self.temp_dir+'/20171012-135619/fake_uuid/stamp.json')
127 | expected_stamp_content = json.dumps(FAKE_STAMP._asdict())
128 |
129 | result_path = uploader_object.UploadArtifact(test_artifact)
130 |
131 | self.assertEqual(expected_artifact_path, result_path)
132 | with open(result_path, 'r') as artifact_file:
133 | self.assertEqual(expected_artifact_content, artifact_file.read())
134 |
135 | with open(expected_stamp_path, 'r') as stamp_file:
136 | self.assertEqual(expected_stamp_content, stamp_file.read())
137 |
138 |
139 | class LocalSplitterCopierTests(LocalCopierTests):
140 | """Tests for the LocalSplitterCopier class."""
141 |
142 | def setUp(self):
143 | super().setUp()
144 | self._copied_streams = {}
145 |
146 | def _AddUploadedData(self, stream, remote_path, update_callback=None):
147 | self._copied_streams[remote_path] = stream.read()
148 |
149 | @mock.patch.object(uploader.LocalSplitterCopier, '_MakeRemotePath')
150 | @mock.patch.object(uploader.LocalSplitterCopier, '_UploadStream')
151 | @mock.patch.object(disk.DiskArtifact, '_GetStream')
152 | def testUploadArtifact(
153 | self, patched_getstream, patched_uploadstream, patched_makeremotepath):
154 | """Tests that an artificact is correctly split in 5 different chunks."""
155 |
156 | patched_uploadstream.side_effect = self._AddUploadedData
157 | temp = tempfile.TemporaryFile()
158 | # Generating some data
159 | fake_data = bytes(range(0, 256))*1000
160 | temp.write(fake_data)
161 |
162 | test_artifact = disk.DiskArtifact(
163 | '/dev/sda', len(fake_data), use_dcfldd=False)
164 | patched_getstream.return_value = temp
165 |
166 | uploader_object = uploader.LocalSplitterCopier(
167 | '/fake_destination/', FakeStampManager(), slices=5)
168 | patched_makeremotepath.return_value = '/fake_destination/sda.image'
169 | uploader_object._stamp_uploaded = True
170 |
171 | uploader_object.UploadArtifact(test_artifact, update_callback=mock.Mock())
172 |
173 | results = self._copied_streams
174 | expected_slice_paths = [
175 | f'/fake_destination/sda.image_{x}'
176 | for x in range(0, len(results))]
177 | self.assertEqual(list(results), expected_slice_paths)
178 |
179 | concatenated_data = bytearray()
180 | for path in list(results):
181 | concatenated_data += results[path]
182 |
183 | self.assertEqual(concatenated_data, fake_data)
184 |
185 |
186 | class GCSUploaderTests(unittest.TestCase):
187 | """Tests for the GCSUploader class."""
188 |
189 | def setUp(self):
190 | self.gcs_bucket = 'bucket_name'
191 | self.gcs_path = 'some/where'
192 | self.gcs_url = 'gs://{0:s}/{1:s}'.format(self.gcs_bucket, self.gcs_path)
193 | super().setUp()
194 |
195 | def testMakeRemotePathNoAsset(self):
196 | uploader_object = uploader.GCSUploader(
197 | self.gcs_url, 'fake_key.json', 'fake_clientid', FakeStampManager(),
198 | stamp=FAKE_STAMP_NO_ASSET)
199 | remote_name = 'remote_file'
200 |
201 | expected_remote_path = (
202 | 'bucket_name/some/where/20171012-135619/fake_uuid/remote_file')
203 | remote_path = uploader_object._MakeRemotePath(remote_name)
204 | self.assertEqual(remote_path, expected_remote_path)
205 |
206 | def testMakeRemotePath(self):
207 | uploader_object = uploader.GCSUploader(
208 | self.gcs_url, 'fake_key.json', 'fake_clientid', FakeStampManager(),
209 | stamp=FAKE_STAMP)
210 | remote_name = 'remote_file'
211 |
212 | expected_remote_path = (
213 | 'bucket_name/some/where/20171012-135619/fake_uuid/'
214 | 'remote_file')
215 | remote_path = uploader_object._MakeRemotePath(remote_name)
216 | self.assertEqual(remote_path, expected_remote_path)
217 |
218 | def testSplitGCSUrl(self):
219 | self.gcs_url = 'gs://bucket_name/some/where'
220 | uploader_object = uploader.GCSUploader(
221 | self.gcs_url, 'fake_key.json', 'fake_clientid', FakeStampManager())
222 | expected_tuple = ('bucket_name', 'some/where')
223 | self.assertEqual(uploader_object._SplitGCSUrl(), expected_tuple)
224 |
225 | self.gcs_url = 'gs://bucket_name'
226 | uploader_object = uploader.GCSUploader(
227 | self.gcs_url, 'fake_key.json', 'fake_clientid', FakeStampManager())
228 | expected_tuple = ('bucket_name', '')
229 | self.assertEqual(uploader_object._SplitGCSUrl(), expected_tuple)
230 |
231 | self.gcs_url = 'gs://bucket_name/'
232 | uploader_object = uploader.GCSUploader(
233 | self.gcs_url, 'fake_key.json', 'fake_clientid', FakeStampManager())
234 | expected_tuple = ('bucket_name', '')
235 | self.assertEqual(uploader_object._SplitGCSUrl(), expected_tuple)
236 |
237 | self.gcs_url = 'invalid'
238 | uploader_object = uploader.GCSUploader(
239 | self.gcs_url, 'fake_key.json', 'fake_clientid', FakeStampManager())
240 | with self.assertRaisesRegex(
241 | argparse.ArgumentError, 'Invalid GCS URL \'{0:s}\''.format('invalid')):
242 | uploader_object._SplitGCSUrl()
243 |
244 | @mock.patch.object(base.BaseArtifact, '_GetStream')
245 | def testUploadArtifact(self, patched_getstream):
246 | test_artifact = base.BaseArtifact('test_artifact')
247 | patched_getstream.return_value = BytesIO(b'fake_content')
248 |
249 | uploader_object = FakeGCSUploader(self.gcs_url)
250 |
251 | expected_resultpath = (
252 | 'bucket_name/some/where/20171012-135619/fake_uuid/Base/'
253 | 'test_artifact')
254 | expected_uploaded_streams = {
255 | ('bucket_name/some/where/20171012-135619/fake_uuid/'
256 | 'Base/test_artifact'): 'fake_content',
257 | ('bucket_name/some/where/20171012-135619/fake_uuid/'
258 | 'stamp.json'): json.dumps(FAKE_STAMP._asdict())
259 | }
260 |
261 | result_path = uploader_object.UploadArtifact(test_artifact)
262 | self.assertEqual(result_path, expected_resultpath)
263 | self.assertEqual(
264 | uploader_object._uploaded_streams, expected_uploaded_streams)
265 |
266 | @mock.patch.object(base.BaseArtifact, '_GetStream')
267 | @mock.patch.object(boto, 'storage_uri')
268 | def testFailUploadRetryWorthy(self, patched_storage, patched_getstream):
269 | patched_getstream.return_value = BytesIO(b'fake_content')
270 | patched_storage.side_effect = boto.exception.GSDataError('boom')
271 |
272 | test_artifact = base.BaseArtifact('test_artifact')
273 |
274 | uploader_object = uploader.GCSUploader(
275 | 'gs://fake_bucket/', 'no_keyfile', 'client_id', FakeStampManager())
276 | uploader_object._boto_configured = True
277 |
278 | with self.assertRaises(errors.RetryableError):
279 | uploader_object._UploadStream(
280 | test_artifact.OpenStream(), 'gs://fake_bucket/remote/path')
281 |
282 | @mock.patch.object(base.BaseArtifact, '_GetStream')
283 | @mock.patch.object(boto, 'storage_uri')
284 | def testFailUploadNoRetry(self, patched_storage, patched_getstream):
285 | patched_getstream.return_value = BytesIO(b'fake_content')
286 | patched_storage.side_effect = errors.ForensicateError('random_error')
287 |
288 | test_artifact = base.BaseArtifact('test_artifact')
289 |
290 | uploader_object = uploader.GCSUploader(
291 | 'gs://fake_bucket/', 'no_keyfile', 'client_id', FakeStampManager())
292 | uploader_object._boto_configured = True
293 |
294 | with self.assertRaises(errors.ForensicateError):
295 | uploader_object._UploadStream(
296 | test_artifact.OpenStream(), 'gs://fake_bucket/remote/path')
297 |
298 |
299 | class GCSSplitterUploaderTests(GCSUploaderTests):
300 | """Tests for the GCSSplitterUploader class."""
301 |
302 | def setUp(self):
303 | super().setUp()
304 | self._uploaded_streams = {}
305 |
306 | def _AddUploadedData(self, stream, remote_path, update_callback=None):
307 | self._uploaded_streams[remote_path] = stream.read()
308 |
309 | @mock.patch.object(uploader.GCSSplitterUploader, '_UploadStream')
310 | @mock.patch.object(disk.DiskArtifact, '_GetStream')
311 | def testUploadArtifact(self, patched_getstream, patched_uploadstream):
312 | """Tests that an artificact is correctly split in 5 different chunks."""
313 |
314 | patched_uploadstream.side_effect = self._AddUploadedData
315 | temp = tempfile.TemporaryFile()
316 | # Generating some data
317 | fake_data = bytes(range(0, 256))*1000
318 | temp.write(fake_data)
319 |
320 | test_artifact = disk.DiskArtifact(
321 | '/dev/sda', len(fake_data), use_dcfldd=False)
322 | patched_getstream.return_value = temp
323 |
324 | uploader_object = uploader.GCSSplitterUploader(
325 | 'gs://fake_bucket/', 'no_keyfile', 'client_id', FakeStampManager(),
326 | slices=5)
327 | uploader_object._boto_configured = True
328 | uploader_object._stamp_uploaded = True
329 |
330 | uploader_object.UploadArtifact(test_artifact, update_callback=mock.Mock())
331 |
332 | results = self._uploaded_streams
333 | expected_slice_paths = [
334 | f'fake_bucket/20171012-135619/fake_uuid/Disks/sda.image_{x}'
335 | for x in range(0, len(results))]
336 | self.assertEqual(list(results), expected_slice_paths)
337 |
338 | concatenated_data = bytearray()
339 | for path in list(results):
340 | concatenated_data += results[path]
341 |
342 | self.assertEqual(concatenated_data, fake_data)
343 |
--------------------------------------------------------------------------------
/tools/commons.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Copyright 2018 Google LLC
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # https://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | #
16 | # This files contains a list of functions and variables to be used by other
17 | # scripts in this directory.
18 |
19 | readonly CURRENT_DIR=$(pwd)
20 | readonly GIFT_USERNAME="xubuntu"
21 | readonly REMASTER_WORKDIR_NAME="remaster_workdir"
22 | readonly REMASTER_WORKDIR_PATH=$(readlink -m "${CURRENT_DIR}/${REMASTER_WORKDIR_NAME}")
23 | readonly REMASTER_SCRIPTS_DIR="${CODE_DIR}/remaster_scripts"
24 | readonly FORENSICATE_SCRIPT_NAME="call_auto_forensicate.sh"
25 | readonly FORENSICATE_SCRIPT_PATH="${REMASTER_SCRIPTS_DIR}/${FORENSICATE_SCRIPT_NAME}"
26 | readonly AUTO_FORENSIC_SCRIPT_NAME="auto_acquire.py"
27 |
28 | # Make sure the provided service account credentials file exists and is valid
29 | function assert_sa_json_path {
30 | readonly local sa_json_path="${1}"
31 | if [[ ! -f "${sa_json_path}" ]] ; then
32 | die "${sa_json_path} does not exist"
33 | fi
34 | if ! grep -q '"type": "service_account",' "${sa_json_path}" ; then
35 | die "${sa_json_path} does not look like a valid service account credentials JSON file"
36 | fi
37 | }
38 |
39 | # Prints an error and terminates execution.
40 | #
41 | # Arguments:
42 | # Message to display, as string.
43 | function die {
44 | printf 'ERROR: %s\n' "$1" >&2
45 | exit 1
46 | }
47 |
48 | # Verifies a package has been installed with DPKG. Exits if package name is
49 | # missing.
50 | #
51 | # Arguments:
52 | # Name of the package.
53 | function check_packages {
54 | local pkg="$1"
55 | if ! dpkg --get-selections | grep -qE "^${pkg}[[:space:]]*install$"; then
56 | die "Please install package ${pkg}"
57 | fi
58 | }
59 |
60 | # Verifies that an option is not empty
61 | #
62 | # Arguments:
63 | # Option name, as string.
64 | # Option value, as string.
65 | function assert_option_argument {
66 | if [[ -z $1 ]]; then
67 | die "$2 requires a non-empty option argument"
68 | fi
69 | }
70 |
--------------------------------------------------------------------------------
/tools/remaster_scripts/call_auto_forensicate.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Copyright 2018 Google LLC
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # https://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 |
16 | # Base forensication script.
17 | # This is customized by the master remaster script.
18 |
19 | # First, check that we have internet
20 | wget -q --spider http://www.google.com
21 | if [[ $? -ne 0 ]]; then
22 | echo "ERROR: No internet connectivity"
23 | echo "Please make sure the system is connected to the internet"
24 | exit 1
25 | fi
26 |
27 | source config.sh
28 |
29 |
30 | # For some reason letting this package be installed with setup.py with
31 | # call_autofirensicate.sh can sometimes exhaust all memory, doing it here.
32 | sudo pip3 install grpcio
33 |
34 | # Make sure have the latest version of the auto_forensicate module
35 | git clone https://github.com/google/GiftStick
36 | cd GiftStick
37 | sudo pip install .
38 |
39 | # Apply patch for boto py3 compatibility
40 | # See https://github.com/boto/boto/pull/3699
41 | boto_dir=$(python -c "import boto; print(boto.__path__[0])")
42 |
43 | if grep -qe "sendall.*encode" "${boto_dir}/connection.py" ; then
44 | echo "skipping patching of ${boto_dir}/connection.py"
45 | else
46 | echo "patching ${boto_dir}/connection.py"
47 | sudo patch -p0 "${boto_dir}/connection.py" config/patches/boto_pr3561_connection.py.patch
48 | fi
49 |
50 | if grep -qe "send.*encode" "${boto_dir}/s3/key.py" ; then
51 | echo "skipping patching of ${boto_dir}/s3/key.py"
52 | else
53 | echo "patching ${boto_dir}/s3/key.py"
54 | sudo patch -p0 "${boto_dir}/s3/key.py" config/patches/boto_pr3561_key.py.patch
55 | fi
56 |
57 |
58 | # Chipsec needs setuptools > 62 to install
59 | sudo pip install --ignore-installed setuptools>=62.4
60 | # We need to build a module for this system, this can't be installed before
61 | # booting.
62 | sudo pip install chipsec
63 |
64 | sudo "${AUTO_FORENSIC_SCRIPT_NAME}" \
65 | --gs_keyfile="${GCS_SA_KEY_FILE}" \
66 | --logging stdout \
67 | --logging stackdriver \
68 | --log_progress \
69 | --acquire all \
70 | ${EXTRA_OPTIONS} "${GCS_REMOTE_URL}/"
71 |
--------------------------------------------------------------------------------
/tools/remaster_scripts/e2e/post-install-root.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Copyright 2018 Google LLC
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # https://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 |
16 | # This script is being run chrooted in the ubuntu live CD ISO image
17 |
18 | export DEBIAN_FRONTEND=noninteractive
19 |
20 | function ubuntu_remove_packages {
21 | local PKG=( ubiquity udisks2 ) # udisks2 is responsible for automounting
22 | if [[ ${DISTRIB_CODENAME} == "trusty" ]]; then
23 | PKG+=( friends friends-dispatcher friends-facebook friends-twitter )
24 | fi
25 | apt -y remove "${BAD_PKG[@]}"
26 | }
27 |
28 | function install_forensication_tools {
29 | readonly local CHIPSEC_PKG=( python3-dev libffi-dev build-essential gcc nasm )
30 | readonly local FORENSIC_PKG=( dcfldd )
31 |
32 | # install common utils
33 | apt-get -y install "${FORENSIC_PKG[@]}" "${CHIPSEC_PKG[@]}"
34 | }
35 |
36 | function install_basic_pkg {
37 | readonly local COMMON_UTILS=( git jq python3-pip pv openssh-server zenity )
38 |
39 | apt-get -y update
40 | apt-get -y install "${COMMON_UTILS[@]}"
41 |
42 | echo "PasswordAuthentication no" >> /etc/ssh/sshd_config
43 |
44 | # Force python3
45 | update-alternatives --install /usr/bin/python python /usr/bin/python3 1
46 | update-alternatives --install /usr/bin/pip pip /usr/bin/pip3 1
47 |
48 | }
49 |
50 | function ubuntu_fix_systemd {
51 | # By default, with systemd, /etc/resolv.conf is a link to
52 | # /run/systemd/resolve/resolve.conf, which is only created when
53 | # systemd-resoved has successfully started.
54 | # Since we're going to chroot, the link will be broken and we'll get no DNS
55 | # resolution. So we make our own temporary static resolv.conf here.
56 | if [[ -L /etc/resolv.conf ]]; then
57 | rm /etc/resolv.conf
58 | echo "nameserver 8.8.8.8" > /etc/resolv.conf.static
59 | ln -s /etc/resolv.conf.static /etc/resolv.conf
60 | fi
61 |
62 | # Systemd fails if DNSSEC fails by default. So we disable that
63 | if [[ -f /etc/systemd/resolved.conf ]]; then
64 | sed -i 's/^#DNSSEC=.*/DNSSEC=no/' /etc/systemd/resolved.conf
65 | fi
66 |
67 | echo "force-confold" >> /etc/dpkg/dpkg.cfg
68 | echo "force-confdef" >> /etc/dpkg/dpkg.cfg
69 |
70 | apt-get -y update
71 | apt-get -y install libnss-resolve
72 | }
73 |
74 | function ignore_chipsec_logs {
75 | # Chipsec generates a ton of logs which can fill up the local storage
76 | echo -e ":msg, contains, \"IOCTL_RDMMIO\" stop\n\
77 | :msg, contains, \"IOCTL_WRMMIO\" stop\n\
78 | & stop" > /etc/rsyslog.d/00-chipsec.conf
79 | }
80 |
81 | # Comment out cdrom repo
82 | sed -e '/cdrom/ s/^#*/#/' -i /etc/apt/sources.list
83 |
84 | source /etc/lsb-release
85 |
86 | if ! [[ "xenial" == "${DISTRIB_CODENAME}" ]]; then
87 | ubuntu_fix_systemd
88 | fi
89 |
90 | ignore_chipsec_logs
91 | install_basic_pkg
92 | install_forensication_tools
93 | ubuntu_remove_packages
94 |
--------------------------------------------------------------------------------
/tools/remaster_scripts/e2e/post-install-user.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Copyright 2018 Google LLC
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # https://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 |
16 | # Does customization for the xubuntu user
17 | # The script is run as sudoer on your workstation, permissions are fixed
18 | # later.
19 |
20 | function add_test_ssh_key {
21 | sudo mkdir .ssh
22 | echo "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIHx26PEzEj5WIZDP/Actr4LruAiIFbVP4hS8ANBrcnnH e2etests" | sudo tee -a .ssh/authorized_keys
23 | }
24 |
25 | add_test_ssh_key
26 |
--------------------------------------------------------------------------------
/tools/remaster_scripts/post-install-root.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Copyright 2018 Google LLC
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # https://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 |
16 | # This script is being run chrooted in the ubuntu live CD ISO image
17 |
18 | export DEBIAN_FRONTEND=noninteractive
19 |
20 | function ubuntu_remove_packages {
21 | local PKG=( ubiquity udisks2 ) # udisks2 is responsible for automounting
22 | if [[ ${DISTRIB_CODENAME} == "trusty" ]]; then
23 | PKG+=( friends friends-dispatcher friends-facebook friends-twitter )
24 | fi
25 | apt-get -y remove "${BAD_PKG[@]}"
26 | }
27 |
28 | function install_forensication_tools {
29 | readonly local CHIPSEC_PKG=( python3-dev libffi-dev build-essential gcc nasm )
30 | readonly local FORENSIC_PKG=( dcfldd )
31 |
32 | # install common utils
33 | apt-get -y install "${FORENSIC_PKG[@]}" "${CHIPSEC_PKG[@]}"
34 | }
35 |
36 | function install_basic_pkg {
37 | readonly local COMMON_UTILS=( git jq python3-pip pv zenity vim )
38 | readonly local WIRELESS_PKG=( firmware-b43-installer bcmwl-kernel-source )
39 |
40 | apt-get -y update
41 | apt-get -y install "${COMMON_UTILS[@]}" "${WIRELESS_PKG[@]}"
42 |
43 | echo "PasswordAuthentication no" >> /etc/ssh/sshd_config
44 |
45 | # Force python3
46 | update-alternatives --install /usr/bin/python python /usr/bin/python3 1
47 | update-alternatives --install /usr/bin/pip pip /usr/bin/pip3 1
48 |
49 | }
50 |
51 | function ubuntu_fix_systemd {
52 | # By default, with systemd, /etc/resolv.conf is a link to
53 | # /run/systemd/resolve/resolve.conf, which is only created when
54 | # systemd-resoved has successfully started.
55 | # Since we're going to chroot, the link will be broken and we'll get no DNS
56 | # resolution. So we make our own temporary static resolv.conf here.
57 | if [[ -L /etc/resolv.conf ]]; then
58 | rm /etc/resolv.conf
59 | # From https://developers.google.com/speed/public-dns/docs/using#addresses
60 | echo "nameserver 8.8.8.8" > /etc/resolv.conf.static
61 | echo "nameserver 2001:4860:4860::8888" >> /etc/resolv.conf.static
62 | ln -s /etc/resolv.conf.static /etc/resolv.conf
63 | fi
64 |
65 | # Systemd fails if DNSSEC fails by default. So we disable that
66 | if [[ -f /etc/systemd/resolved.conf ]]; then
67 | sed -i 's/^#DNSSEC=.*/DNSSEC=no/' /etc/systemd/resolved.conf
68 | fi
69 |
70 | echo "force-confold" >> /etc/dpkg/dpkg.cfg
71 | echo "force-confdef" >> /etc/dpkg/dpkg.cfg
72 |
73 | apt-get -y update
74 | apt-get -y install libnss-resolve
75 | }
76 |
77 | function ubuntu_fix_mbp {
78 |
79 | # This is installing SPI drivers for the keyboard & mousepads on
80 | # MacBook 2016 (with touchbar)
81 | apt-get -y install dkms
82 | git clone https://github.com/roadrunner2/macbook12-spi-driver.git /usr/src/applespi-0.1
83 |
84 | # We need to install for the kernel of the OS we're chrooted in, not the one
85 | # that's currently running on our workstation.
86 | # Ubuntu Live CD should only have one kernel installed, so this should work.
87 | dkms install -m applespi -v 0.1 -k "$(basename /lib/modules/*)"
88 | echo -e "\napplespi\nintel_lpss_pci\nspi_pxa2xx_platform\nspi_pxa2xx_pci" >> /etc/initramfs-tools/modules
89 |
90 | # This is for the apple_ib_tb module
91 | echo "industrialio_triggered_buffer" >> /etc/initramfs-tools/modules
92 | update-initramfs -u
93 | }
94 |
95 | function ignore_chipsec_logs {
96 | # Chipsec generates a ton of logs which can fill up the local storage
97 | echo -e ":msg, contains, \"IOCTL_RDMMIO\" stop\n\
98 | :msg, contains, \"IOCTL_WRMMIO\" stop\n\
99 | & stop" > /etc/rsyslog.d/00-chipsec.conf
100 | }
101 |
102 | # Comment out cdrom repo
103 | sed -e '/cdrom/ s/^#*/#/' -i /etc/apt/sources.list
104 |
105 | source /etc/lsb-release
106 |
107 | if ! [[ "xenial" == "${DISTRIB_CODENAME}" ]]; then
108 | ubuntu_fix_systemd
109 | fi
110 |
111 | ignore_chipsec_logs
112 | install_basic_pkg
113 | install_forensication_tools
114 | ubuntu_remove_packages
115 | ubuntu_fix_mbp
116 |
--------------------------------------------------------------------------------
/tools/remaster_scripts/post-install-user.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Copyright 2018 Google LLC
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # https://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 |
16 | # Does customization for the xubuntu user
17 | # The script is run as sudoer on your workstation, permissions are fixed
18 | # later.
19 |
20 |
21 | function user_customise_desktop {
22 |
23 | sudo mkdir -p Desktop
24 |
25 | sudo mkdir -p .config/xfce4/xfconf/xfce-perchannel-xml
26 | cat << EOXFCONF | sudo tee ".config/xfce4/xfconf/xfce-perchannel-xml/xfce4-desktop.xml"
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 | EOXFCONF
41 |
42 |
43 | # Install click-able shortcut on Desktop
44 | cat << EOCHIPSHORT | sudo tee "Desktop/auto_forensicate.desktop" > /dev/null
45 | [Desktop Entry]
46 | Version=1.0
47 | Type=Application
48 | Name=Forensicate!
49 | Comment=Runs forensics acquisition and upload to GCS
50 | Exec=bash -c 'sudo bash call_auto_forensicate.sh; $SHELL'
51 | Icon=applications-utilities
52 | Terminal=true
53 | StartupNotify=false
54 | EOCHIPSHORT
55 |
56 | sudo chmod a+x "Desktop/auto_forensicate.desktop"
57 |
58 | }
59 |
60 | function user_add_setup_script {
61 | # Sets up a script to be run after GiftStick has booted and user is logged in.
62 | # This currently adds:
63 | # - A hotkey to start the acquisition script with no mouse interaction:
64 | # ctrl + alt + f and ctrl + shift +y
65 | readonly local SENTINEL=".gift_is_setup"
66 | readonly local SETUP_SCRIPT="gift_setup.sh"
67 | cat << EOSETUPSCRIPT | sudo tee ${SETUP_SCRIPT} > /dev/null
68 | #!/bin/bash
69 |
70 | if [[ ! -e \$HOME/${SENTINEL} ]]
71 | then
72 | xfconf-query --create --channel xfce4-keyboard-shortcuts \
73 | --property "/commands/custom/y" --type string \
74 | --set "xfce4-terminal -e \\"bash -c 'sudo bash \$HOME/call_auto_forensicate.sh ; /bin/bash'\\""
75 | xfconf-query --create --channel xfce4-keyboard-shortcuts \
76 | --property "/commands/custom/f" --type string \
77 | --set "xfce4-terminal -e \\"bash -c 'sudo bash \$HOME/call_auto_forensicate.sh ; /bin/bash'\\""
78 |
79 | touch \$HOME/${SENTINEL}
80 | fi
81 | EOSETUPSCRIPT
82 |
83 | sudo mkdir -p .config/autostart
84 |
85 | cat << EOSTARTUPSCRIPT | sudo tee .config/autostart/gift_setup.desktop > /dev/null
86 | # This ensures the GiftStick setup script is run when the user logs in.
87 | [Desktop Entry]
88 | Name=GiftStick Setup
89 | Type=Application
90 | Exec=/bin/sh -c "bash \$HOME/${SETUP_SCRIPT}"
91 | EOSTARTUPSCRIPT
92 | }
93 |
94 |
95 | user_add_setup_script
96 |
97 | user_customise_desktop
98 |
--------------------------------------------------------------------------------