├── ES_templates_dumpzilla
├── addinfo.json
├── addons.json
├── bookmarks.json
├── cert_override.json
├── cookies.json
├── downloads_dir.json
├── downloads_history.json
├── exceptions.json
├── extensions.json
├── forms.json
├── history.json
├── offlinecache.json
├── passwords.json
├── permissions.json
├── session.json
└── thumbnails.json
├── README.md
├── dumpzilla
└── dumpzilla.py
/ES_templates_dumpzilla/addinfo.json:
--------------------------------------------------------------------------------
1 | {
2 | "template" : "dumpzilla_addinfo*",
3 | "settings" : {
4 | "index" : {
5 | "refresh_interval" : "5s"
6 | }
7 | },
8 | "mappings" : {
9 | "_default_" : {
10 | "dynamic_templates" : [ {
11 | "message_field" : {
12 | "mapping" : {
13 | "index" : "analyzed",
14 | "omit_norms" : true,
15 | "type" : "string"
16 | },
17 | "match_mapping_type" : "string",
18 | "match" : "message"
19 | }
20 | }, {
21 | "string_fields" : {
22 | "mapping" : {
23 | "index" : "not_analyzed",
24 | "type" : "string"
25 | },
26 | "match_mapping_type" : "string",
27 | "match" : "*"
28 | }
29 | } ],
30 | "properties" : {
31 | "0-URL/PATH" : {
32 | "index" : "not_analyzed",
33 | "type" : "string"
34 | },
35 | "@version" : {
36 | "index" : "not_analyzed",
37 | "type" : "string"
38 | }
39 | }
40 | }
41 | }
42 | }
43 |
44 |
--------------------------------------------------------------------------------
/ES_templates_dumpzilla/addons.json:
--------------------------------------------------------------------------------
1 | {
2 | "template" : "dumpzilla_addons*",
3 | "settings" : {
4 | "index" : {
5 | "refresh_interval" : "5s"
6 | }
7 | },
8 | "mappings" : {
9 | "_default_" : {
10 | "dynamic_templates" : [ {
11 | "message_field" : {
12 | "mapping" : {
13 | "index" : "analyzed",
14 | "omit_norms" : true,
15 | "type" : "string"
16 | },
17 | "match_mapping_type" : "string",
18 | "match" : "message"
19 | }
20 | }, {
21 | "string_fields" : {
22 | "mapping" : {
23 | "index" : "not_analyzed",
24 | "type" : "string"
25 | },
26 | "match_mapping_type" : "string",
27 | "match" : "*"
28 | }
29 | } ],
30 | "properties" : {
31 | "3-Homepage URL" : {
32 | "index" : "not_analyzed",
33 | "type" : "string"
34 | },
35 | "2-Creator URL" : {
36 | "index" : "not_analyzed",
37 | "type" : "string"
38 | },
39 | "0-Name" : {
40 | "index" : "not_analyzed",
41 | "type" : "string"
42 | },
43 | "1-Version" : {
44 | "index" : "not_analyzed",
45 | "type" : "string"
46 | },
47 | "@version" : {
48 | "index" : "not_analyzed",
49 | "type" : "string"
50 | }
51 | }
52 | }
53 | }
54 | }
55 |
--------------------------------------------------------------------------------
/ES_templates_dumpzilla/bookmarks.json:
--------------------------------------------------------------------------------
1 | {
2 | "template" : "dumpzilla_bookmarks*",
3 | "settings" : {
4 | "index" : {
5 | "refresh_interval" : "5s"
6 | }
7 | },
8 | "mappings" : {
9 | "_default_" : {
10 | "dynamic_templates" : [ {
11 | "message_field" : {
12 | "mapping" : {
13 | "index" : "analyzed",
14 | "omit_norms" : true,
15 | "type" : "string"
16 | },
17 | "match_mapping_type" : "string",
18 | "match" : "message"
19 | }
20 | }, {
21 | "string_fields" : {
22 | "mapping" : {
23 | "index" : "not_analyzed",
24 | "type" : "string"
25 | },
26 | "match_mapping_type" : "string",
27 | "match" : "*"
28 | }
29 | } ],
30 | "properties" : {
31 | "1-URL" : {
32 | "index" : "not_analyzed",
33 | "type" : "string"
34 | },
35 | "0-Title" : {
36 | "index" : "not_analyzed",
37 | "type" : "string"
38 | },
39 | "@version" : {
40 | "index" : "not_analyzed",
41 | "type" : "string"
42 | },
43 | "2-Creation Time" : {
44 | "format" : "yyy-MM-dd HH:mm:ss",
45 | "type" : "date"
46 | },
47 | "3-Last Modified" : {
48 | "format" : "yyy-MM-dd HH:mm:ss",
49 | "type" : "date"
50 | }
51 | }
52 | }
53 | }
54 | }
55 |
--------------------------------------------------------------------------------
/ES_templates_dumpzilla/cert_override.json:
--------------------------------------------------------------------------------
1 | {
2 | "template" : "dumpzilla_cert_override*",
3 | "settings" : {
4 | "index" : {
5 | "refresh_interval" : "5s"
6 | }
7 | },
8 | "mappings" : {
9 | "_default_" : {
10 | "dynamic_templates" : [ {
11 | "message_field" : {
12 | "mapping" : {
13 | "index" : "analyzed",
14 | "omit_norms" : true,
15 | "type" : "string"
16 | },
17 | "match_mapping_type" : "string",
18 | "match" : "message"
19 | }
20 | }, {
21 | "string_fields" : {
22 | "mapping" : {
23 | "index" : "not_analyzed",
24 | "type" : "string"
25 | },
26 | "match_mapping_type" : "string",
27 | "match" : "*"
28 | }
29 | } ],
30 | "properties" : {
31 | "0-Site" : {
32 | "index" : "not_analyzed",
33 | "type" : "string"
34 | },
35 | "1-Hash Algorithm" : {
36 | "index" : "not_analyzed",
37 | "type" : "string"
38 | },
39 | "2-Data" : {
40 | "index" : "not_analyzed",
41 | "type" : "string"
42 | },
43 | "@version" : {
44 | "index" : "not_analyzed",
45 | "type" : "string"
46 | }
47 | }
48 | }
49 | }
50 | }
51 |
--------------------------------------------------------------------------------
/ES_templates_dumpzilla/cookies.json:
--------------------------------------------------------------------------------
1 | {
2 | "template" : "dumpzilla_cookies*",
3 | "settings" : {
4 | "index" : {
5 | "refresh_interval" : "5s"
6 | }
7 | },
8 | "mappings" : {
9 | "_default_" : {
10 | "dynamic_templates" : [ {
11 | "message_field" : {
12 | "mapping" : {
13 | "index" : "analyzed",
14 | "omit_norms" : true,
15 | "type" : "string"
16 | },
17 | "match_mapping_type" : "string",
18 | "match" : "message"
19 | }
20 | }, {
21 | "string_fields" : {
22 | "mapping" : {
23 | "index" : "not_analyzed",
24 | "type" : "string"
25 | },
26 | "match_mapping_type" : "string",
27 | "match" : "*"
28 | }
29 | } ],
30 | "properties" : {
31 | "1-Host" : {
32 | "index" : "not_analyzed",
33 | "type" : "string"
34 | },
35 | "2-Name" : {
36 | "index" : "not_analyzed",
37 | "type" : "string"
38 | },
39 | "4-Path" : {
40 | "index" : "not_analyzed",
41 | "type" : "string"
42 | },
43 | "3-Value" : {
44 | "index" : "not_analyzed",
45 | "type" : "string"
46 | },
47 | "8-Secure" : {
48 | "index" : "not_analyzed",
49 | "type" : "string"
50 | },
51 | "9-HttpOnly" : {
52 | "index" : "not_analyzed",
53 | "type" : "string"
54 | },
55 | "@version" : {
56 | "index" : "not_analyzed",
57 | "type" : "string"
58 | },
59 | "5-Expiry" : {
60 | "format" : "yyy-MM-dd HH:mm:ss",
61 | "type" : "date"
62 | },
63 | "7-Creation Time" : {
64 | "format" : "yyy-MM-dd HH:mm:ss",
65 | "type" : "date"
66 | },
67 | "6-Last Access" : {
68 | "format" : "yyy-MM-dd HH:mm:ss",
69 | "type" : "date"
70 | }
71 | }
72 | }
73 | }
74 | }
75 |
--------------------------------------------------------------------------------
/ES_templates_dumpzilla/downloads_dir.json:
--------------------------------------------------------------------------------
1 | {
2 | "template" : "dumpzilla_downloads_dir*",
3 | "settings" : {
4 | "index" : {
5 | "refresh_interval" : "5s"
6 | }
7 | },
8 | "mappings" : {
9 | "_default_" : {
10 | "dynamic_templates" : [ {
11 | "message_field" : {
12 | "mapping" : {
13 | "index" : "analyzed",
14 | "omit_norms" : true,
15 | "type" : "string"
16 | },
17 | "match_mapping_type" : "string",
18 | "match" : "message"
19 | }
20 | }, {
21 | "string_fields" : {
22 | "mapping" : {
23 | "index" : "not_analyzed",
24 | "type" : "string"
25 | },
26 | "match_mapping_type" : "string",
27 | "match" : "*"
28 | }
29 | } ],
30 | "properties" : {
31 | "0-Name" : {
32 | "index" : "not_analyzed",
33 | "type" : "string"
34 | },
35 | "@version" : {
36 | "index" : "not_analyzed",
37 | "type" : "string"
38 | },
39 | "1-Last date" : {
40 | "format" : "yyy-MM-dd HH:mm:ss",
41 | "type" : "date"
42 | }
43 | }
44 | }
45 | }
46 | }
47 |
--------------------------------------------------------------------------------
/ES_templates_dumpzilla/downloads_history.json:
--------------------------------------------------------------------------------
1 | {
2 | "template" : "dumpzilla_downloads_history*",
3 | "settings" : {
4 | "index" : {
5 | "refresh_interval" : "5s"
6 | }
7 | },
8 | "mappings" : {
9 | "_default_" : {
10 | "dynamic_templates" : [ {
11 | "message_field" : {
12 | "mapping" : {
13 | "index" : "analyzed",
14 | "omit_norms" : true,
15 | "type" : "string"
16 | },
17 | "match_mapping_type" : "string",
18 | "match" : "message"
19 | }
20 | }, {
21 | "string_fields" : {
22 | "mapping" : {
23 | "index" : "not_analyzed",
24 | "type" : "string"
25 | },
26 | "match_mapping_type" : "string",
27 | "match" : "*"
28 | }
29 | } ],
30 | "properties" : {
31 | "1-URL" : {
32 | "index" : "not_analyzed",
33 | "type" : "string"
34 | },
35 | "2-Name" : {
36 | "index" : "not_analyzed",
37 | "type" : "string"
38 | },
39 | "@version" : {
40 | "index" : "not_analyzed",
41 | "type" : "string"
42 | },
43 | "0-Date" : {
44 | "format" : "yyy-MM-dd HH:mm:ss",
45 | "type" : "date"
46 | }
47 | }
48 | }
49 | }
50 | }
51 |
--------------------------------------------------------------------------------
/ES_templates_dumpzilla/exceptions.json:
--------------------------------------------------------------------------------
1 | {
2 | "template" : "dumpzilla_exceptions*",
3 | "settings" : {
4 | "index" : {
5 | "refresh_interval" : "5s"
6 | }
7 | },
8 | "mappings" : {
9 | "_default_" : {
10 | "dynamic_templates" : [ {
11 | "message_field" : {
12 | "mapping" : {
13 | "index" : "analyzed",
14 | "omit_norms" : true,
15 | "type" : "string"
16 | },
17 | "match_mapping_type" : "string",
18 | "match" : "message"
19 | }
20 | }, {
21 | "string_fields" : {
22 | "mapping" : {
23 | "index" : "not_analyzed",
24 | "type" : "string"
25 | },
26 | "match_mapping_type" : "string",
27 | "match" : "*"
28 | }
29 | } ],
30 | "properties" : {
31 | "0-Exception Web" : {
32 | "index" : "not_analyzed",
33 | "type" : "string"
34 | },
35 | "@version" : {
36 | "index" : "not_analyzed",
37 | "type" : "string"
38 | }
39 | }
40 | }
41 | }
42 | }
43 |
44 |
--------------------------------------------------------------------------------
/ES_templates_dumpzilla/extensions.json:
--------------------------------------------------------------------------------
1 | {
2 | "template" : "dumpzilla_extensions*",
3 | "settings" : {
4 | "index" : {
5 | "refresh_interval" : "5s"
6 | }
7 | },
8 | "mappings" : {
9 | "_default_" : {
10 | "dynamic_templates" : [ {
11 | "message_field" : {
12 | "mapping" : {
13 | "index" : "analyzed",
14 | "omit_norms" : true,
15 | "type" : "string"
16 | },
17 | "match_mapping_type" : "string",
18 | "match" : "message"
19 | }
20 | }, {
21 | "string_fields" : {
22 | "mapping" : {
23 | "index" : "not_analyzed",
24 | "type" : "string"
25 | },
26 | "match_mapping_type" : "string",
27 | "match" : "*"
28 | }
29 | } ],
30 | "properties" : {
31 | "4-Version" : {
32 | "index" : "not_analyzed",
33 | "type" : "string"
34 | },
35 | "3-Descriptor" : {
36 | "index" : "not_analyzed",
37 | "type" : "string"
38 | },
39 | "5-Release" : {
40 | "index" : "not_analyzed",
41 | "type" : "string"
42 | },
43 | "2-Id" : {
44 | "index" : "not_analyzed",
45 | "type" : "string"
46 | },
47 | "0-Name" : {
48 | "index" : "not_analyzed",
49 | "type" : "string"
50 | },
51 | "1-Type" : {
52 | "index" : "not_analyzed",
53 | "type" : "string"
54 | },
55 | "8-Active" : {
56 | "index" : "not_analyzed",
57 | "type" : "string"
58 | },
59 | "@version" : {
60 | "index" : "not_analyzed",
61 | "type" : "string"
62 | },
63 | "6-Install Date" : {
64 | "format" : "yyy-MM-dd HH:mm:ss",
65 | "type" : "date"
66 | },
67 | "7-Update Date" : {
68 | "format" : "yyy-MM-dd HH:mm:ss",
69 | "type" : "date"
70 | }
71 | }
72 | }
73 | }
74 | }
75 |
--------------------------------------------------------------------------------
/ES_templates_dumpzilla/forms.json:
--------------------------------------------------------------------------------
1 | {
2 | "template" : "dumpzilla_forms*",
3 | "settings" : {
4 | "index" : {
5 | "refresh_interval" : "5s"
6 | }
7 | },
8 | "mappings" : {
9 | "_default_" : {
10 | "dynamic_templates" : [ {
11 | "message_field" : {
12 | "mapping" : {
13 | "index" : "analyzed",
14 | "omit_norms" : true,
15 | "type" : "string"
16 | },
17 | "match_mapping_type" : "string",
18 | "match" : "message"
19 | }
20 | }, {
21 | "string_fields" : {
22 | "mapping" : {
23 | "index" : "not_analyzed",
24 | "type" : "string"
25 | },
26 | "match_mapping_type" : "string",
27 | "match" : "*"
28 | }
29 | } ],
30 | "properties" : {
31 | "0-Name" : {
32 | "index" : "not_analyzed",
33 | "type" : "string"
34 | },
35 | "1-Value" : {
36 | "index" : "not_analyzed",
37 | "type" : "string"
38 | },
39 | "2-Times Used" : {
40 | "index" : "not_analyzed",
41 | "type" : "integer"
42 | },
43 | "@version" : {
44 | "index" : "not_analyzed",
45 | "type" : "string"
46 | },
47 | "3-First Used" : {
48 | "format" : "yyy-MM-dd HH:mm:ss",
49 | "type" : "date"
50 | },
51 | "4-Last Used" : {
52 | "format" : "yyy-MM-dd HH:mm:ss",
53 | "type" : "date"
54 | }
55 | }
56 | }
57 | }
58 | }
59 |
--------------------------------------------------------------------------------
/ES_templates_dumpzilla/history.json:
--------------------------------------------------------------------------------
1 | {
2 | "template" : "dumpzilla_history*",
3 | "settings" : {
4 | "index" : {
5 | "refresh_interval" : "5s"
6 | }
7 | },
8 | "mappings" : {
9 | "_default_" : {
10 | "dynamic_templates" : [ {
11 | "message_field" : {
12 | "mapping" : {
13 | "index" : "analyzed",
14 | "omit_norms" : true,
15 | "type" : "string"
16 | },
17 | "match_mapping_type" : "string",
18 | "match" : "message"
19 | }
20 | }, {
21 | "string_fields" : {
22 | "mapping" : {
23 | "index" : "not_analyzed",
24 | "type" : "string"
25 | },
26 | "match_mapping_type" : "string",
27 | "match" : "*"
28 | }
29 | } ],
30 | "properties" : {
31 | "1-Title" : {
32 | "index" : "not_analyzed",
33 | "type" : "string"
34 | },
35 | "2-URL" : {
36 | "index" : "not_analyzed",
37 | "type" : "string"
38 | },
39 | "@version" : {
40 | "index" : "not_analyzed",
41 | "type" : "string"
42 | },
43 | "0-Last Access" : {
44 | "format" : "yyy-MM-dd HH:mm:ss",
45 | "type" : "date"
46 | }
47 | }
48 | }
49 | }
50 | }
51 |
--------------------------------------------------------------------------------
/ES_templates_dumpzilla/offlinecache.json:
--------------------------------------------------------------------------------
1 | {
2 | "template" : "dumpzilla_offlinecache*",
3 | "settings" : {
4 | "index" : {
5 | "refresh_interval" : "5s"
6 | }
7 | },
8 | "mappings" : {
9 | "_default_" : {
10 | "dynamic_templates" : [ {
11 | "message_field" : {
12 | "mapping" : {
13 | "index" : "analyzed",
14 | "omit_norms" : true,
15 | "type" : "string"
16 | },
17 | "match_mapping_type" : "string",
18 | "match" : "message"
19 | }
20 | }, {
21 | "string_fields" : {
22 | "mapping" : {
23 | "index" : "not_analyzed",
24 | "type" : "string"
25 | },
26 | "match_mapping_type" : "string",
27 | "match" : "*"
28 | }
29 | } ],
30 | "properties" : {
31 | "1-Value" : {
32 | "index" : "not_analyzed",
33 | "type" : "string"
34 | },
35 | "0-Name" : {
36 | "index" : "not_analyzed",
37 | "type" : "string"
38 | },
39 | "@version" : {
40 | "index" : "not_analyzed",
41 | "type" : "string"
42 | },
43 | "2-Last Modified" : {
44 | "format" : "yyy-MM-dd HH:mm:ss",
45 | "type" : "date"
46 | }
47 | }
48 | }
49 | }
50 | }
51 |
--------------------------------------------------------------------------------
/ES_templates_dumpzilla/passwords.json:
--------------------------------------------------------------------------------
1 | {
2 | "template" : "dumpzilla_passwords*",
3 | "settings" : {
4 | "index" : {
5 | "refresh_interval" : "5s"
6 | }
7 | },
8 | "mappings" : {
9 | "_default_" : {
10 | "dynamic_templates" : [ {
11 | "message_field" : {
12 | "mapping" : {
13 | "index" : "analyzed",
14 | "omit_norms" : true,
15 | "type" : "string"
16 | },
17 | "match_mapping_type" : "string",
18 | "match" : "message"
19 | }
20 | }, {
21 | "string_fields" : {
22 | "mapping" : {
23 | "index" : "not_analyzed",
24 | "type" : "string"
25 | },
26 | "match_mapping_type" : "string",
27 | "match" : "*"
28 | }
29 | } ],
30 | "properties" : {
31 | "1-User field" : {
32 | "index" : "not_analyzed",
33 | "type" : "string"
34 | },
35 | "4-Password login (crypted)" : {
36 | "index" : "not_analyzed",
37 | "type" : "string"
38 | },
39 | "0-Web)" : {
40 | "index" : "not_analyzed",
41 | "type" : "string"
42 | },
43 | "3-User login (crypted)" : {
44 | "index" : "not_analyzed",
45 | "type" : "string"
46 | },
47 | "2-Password field" : {
48 | "index" : "not_analyzed",
49 | "type" : "string"
50 | },
51 | "8-Frequency" : {
52 | "index" : "not_analyzed",
53 | "type" : "integer"
54 | },
55 | "@version" : {
56 | "index" : "not_analyzed",
57 | "type" : "string"
58 | },
59 | "5-Created" : {
60 | "format" : "yyy-MM-dd HH:mm:ss",
61 | "type" : "date"
62 | },
63 | "7-Change" : {
64 | "format" : "yyy-MM-dd HH:mm:ss",
65 | "type" : "date"
66 | },
67 | "6-Last Used" : {
68 | "format" : "yyy-MM-dd HH:mm:ss",
69 | "type" : "date"
70 | }
71 | }
72 | }
73 | }
74 | }
75 |
--------------------------------------------------------------------------------
/ES_templates_dumpzilla/permissions.json:
--------------------------------------------------------------------------------
1 | {
2 | "template" : "dumpzilla_permissisons*",
3 | "settings" : {
4 | "index" : {
5 | "refresh_interval" : "5s"
6 | }
7 | },
8 | "mappings" : {
9 | "_default_" : {
10 | "dynamic_templates" : [ {
11 | "message_field" : {
12 | "mapping" : {
13 | "index" : "analyzed",
14 | "omit_norms" : true,
15 | "type" : "string"
16 | },
17 | "match_mapping_type" : "string",
18 | "match" : "message"
19 | }
20 | }, {
21 | "string_fields" : {
22 | "mapping" : {
23 | "index" : "not_analyzed",
24 | "type" : "string"
25 | },
26 | "match_mapping_type" : "string",
27 | "match" : "*"
28 | }
29 | } ],
30 | "properties" : {
31 | "1-Type" : {
32 | "index" : "not_analyzed",
33 | "type" : "string"
34 | },
35 | "2-Permission" : {
36 | "index" : "not_analyzed",
37 | "type" : "integer"
38 | },
39 | "@version" : {
40 | "index" : "not_analyzed",
41 | "type" : "string"
42 | },
43 | "3-Expire Time" : {
44 | "index" : "not_analyzed",
45 | "type" : "string"
46 | }
47 | }
48 | }
49 | }
50 | }
51 |
--------------------------------------------------------------------------------
/ES_templates_dumpzilla/session.json:
--------------------------------------------------------------------------------
1 | {
2 | "template" : "dumpzilla_session*",
3 | "settings" : {
4 | "index" : {
5 | "refresh_interval" : "5s"
6 | }
7 | },
8 | "mappings" : {
9 | "_default_" : {
10 | "dynamic_templates" : [ {
11 | "message_field" : {
12 | "mapping" : {
13 | "index" : "analyzed",
14 | "omit_norms" : true,
15 | "type" : "string"
16 | },
17 | "match_mapping_type" : "string",
18 | "match" : "message"
19 | }
20 | }, {
21 | "string_fields" : {
22 | "mapping" : {
23 | "index" : "not_analyzed",
24 | "type" : "string"
25 | },
26 | "match_mapping_type" : "string",
27 | "match" : "*"
28 | }
29 | } ],
30 | "properties" : {
31 | "02-Type" : {
32 | "index" : "not_analyzed",
33 | "type" : "string"
34 | },
35 | "03-Title" : {
36 | "index" : "not_analyzed",
37 | "type" : "string"
38 | },
39 | "00-Session type" : {
40 | "index" : "not_analyzed",
41 | "type" : "string"
42 | },
43 | "04-URL" : {
44 | "index" : "not_analyzed",
45 | "type" : "string"
46 | },
47 | "01-Last update" : {
48 | "format" : "E MMM d HH:mm:ss yyyy||E MMM d HH:mm:ss yyyy",
49 | "type" : "date"
50 | }
51 | }
52 | }
53 | }
54 | }
55 |
--------------------------------------------------------------------------------
/ES_templates_dumpzilla/thumbnails.json:
--------------------------------------------------------------------------------
1 | {
2 | "template" : "dumpzilla_thumbnails*",
3 | "settings" : {
4 | "index" : {
5 | "refresh_interval" : "5s"
6 | }
7 | },
8 | "mappings" : {
9 | "_default_" : {
10 | "dynamic_templates" : [ {
11 | "message_field" : {
12 | "mapping" : {
13 | "index" : "analyzed",
14 | "omit_norms" : true,
15 | "type" : "string"
16 | },
17 | "match_mapping_type" : "string",
18 | "match" : "message"
19 | }
20 | }, {
21 | "string_fields" : {
22 | "mapping" : {
23 | "index" : "not_analyzed",
24 | "type" : "string"
25 | },
26 | "match_mapping_type" : "string",
27 | "match" : "*"
28 | }
29 | } ],
30 | "properties" : {
31 | "0-File" : {
32 | "index" : "not_analyzed",
33 | "type" : "string"
34 | },
35 | "@version" : {
36 | "index" : "not_analyzed",
37 | "type" : "string"
38 | }
39 | }
40 | }
41 | }
42 | }
43 |
44 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # dumpzilla (Forensic Tool)
2 |
3 | **Dumpzilla official site**: [www.dumpzilla.org] (http://www.dumpzilla.org "Mozilla browser forensic tool")
4 |
5 | **Manual**: [Español] (http://dumpzilla.org/Manual_dumpzilla_es.txt "Manual en español de dumpzilla") / [English] (http://dumpzilla.org/Manual_dumpzilla_en.txt "Dumpzilla english Manual")
6 |
7 | **SO**: Unix / Win
8 |
9 | **Screenshots**: [Dummpzilla] (http://dumpzilla.org/Screenshots/screenshots.html "dumpzilla screenshots")
10 |
11 | Dumpzilla application is developed in Python 3.x and has as purpose extract all forensic interesting information of Firefox, Iceweasel and Seamonkey browsers to be analyzed. Due to its Python 3.x developement, might not work properly in old Python versions, mainly with certain characters. Works under Unix and Windows 32/64 bits systems. Works in command line interface, so information dumps could be redirected by pipes with tools such as grep, awk, cut, sed... Dumpzilla allows to visualize following sections, search customization and extract certain content.
12 |
13 | - Cookies + DOM Storage (HTML 5).
14 | - User preferences (Domain permissions, Proxy settings...).
15 | - Downloads.
16 | - Web forms (Searches, emails, comments..).
17 | - Historial.
18 | - Bookmarks.
19 | - Cache HTML5 Visualization / Extraction (Offline cache).
20 | - visited sites "thumbnails" Visualization / Extraction .
21 | - Addons / Extensions and used paths or urls.
22 | - Browser saved passwords.
23 | - SSL Certificates added as a exception.
24 | - Session data (Webs, reference URLs and text used in forms).
25 | - Visualize live user surfing, Url used in each tab / window and use of forms.
26 |
27 | Dumpzilla will show SHA256 hash of each file to extract the information and finally a summary with totals.
28 | Sections which date filter is not possible: DOM Storage, Permissions / Preferences, Addons, Extensions, Passwords/Exceptions, Thumbnails and Session.
29 |
--------------------------------------------------------------------------------
/dumpzilla:
--------------------------------------------------------------------------------
1 | # dumpzilla completion -*- shell-script -*-
2 | #
3 | # /etc/bash_completion.d/ (Debian / Ubuntu / CentOS)
4 | # /usr/share/bash-completion/completions (Arch)
5 |
6 | _dumpzilla()
7 | {
8 | local cur prev opts
9 | COMPREPLY=()
10 | cur="${COMP_WORDS[COMP_CWORD]}"
11 | prev="${COMP_WORDS[COMP_CWORD-1]}"
12 |
13 | # Files / Dirs completion
14 | _filedir
15 |
16 | # All
17 | if [[ ${cur} == --* ]] ; then
18 | opts="--Addons --Bookmarks --Certoverride --Cookies --Downloads --Export --Forms --Help --History --OfflineCache --Preferences --Passwords --Permissions --RegExp --Session --Summary --Thumbnails --Verbosity --Watch"
19 | COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) )
20 | return 0
21 | fi
22 |
23 | # Addons
24 | if [[ ${prev} == --Addons ]] ; then
25 | opts="-bm_create_range -bm_last_range"
26 | COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) )
27 | return 0
28 |
29 | elif [[ (${prev} == -bm_create_range) || (${prev} == -bm_last_range) ]] ; then
30 | opts="-bm_last_range -bm_create_range"
31 | COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) )
32 | return 0
33 | fi
34 |
35 | # Cookies
36 | if [[ ${prev} == --Cookies ]] ; then
37 | opts="-showdom -domain -name -hostcookie -access -create -secure -httponly -last_range -create_range"
38 | COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) )
39 | return 0
40 | fi
41 |
42 | if [[ (${prev} == -showdom) || (${prev} == -domain) || (${prev} == -name) || (${prev} == -hostcookie) || (${prev} == -access) || (${prev} == -create) || (${prev} == -secure) || (${prev} == -httponly) || (${prev} == -last_range) || (${prev} == -create_range) ]] ; then
43 | opts="-showdom -domain -name -hostcookie -access -create -secure -httponly -last_range -create_range"
44 | COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) )
45 | return 0
46 | fi
47 |
48 | # Downloads
49 | if [[ ${prev} == --Downloads ]] ; then
50 | opts="-range"
51 | COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) )
52 | return 0
53 | fi
54 |
55 | # Forms
56 | if [[ ${prev} == --Forms ]] ; then
57 | opts="-value -forms_range"
58 | COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) )
59 | return 0
60 |
61 | elif [[ (${prev} == -value) || (${prev} == -forms_range) ]] ; then
62 | opts="-value -forms_range"
63 | COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) )
64 | return 0
65 | fi
66 |
67 | # History
68 | if [[ ${prev} == --History ]] ; then
69 | opts="-url -title -date -history_range -frequency"
70 | COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) )
71 | return 0
72 | fi
73 |
74 | if [[ (${prev} == -url) || (${prev} == -title) || (${prev} == -date) || (${prev} == -history_range) || (${prev} == -frequency) ]] ; then
75 | opts="-url -title -date -history_range -frequency"
76 | COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) )
77 | return 0
78 | fi
79 |
80 | # Key-pinning
81 | if [[ ${prev} == --Keypinning ]] ; then
82 | opts="-entry_type"
83 | COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) )
84 | return 0
85 | fi
86 |
87 | if [[ (${prev} == -entry_type) ]] ; then
88 | opts="HPKP HSTS"
89 | COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) )
90 | return 0
91 | fi
92 |
93 | # OfflineCache
94 | if [[ ${prev} == --OfflineCache ]] ; then
95 | opts="-cache_range -extract"
96 | COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) )
97 | return 0
98 |
99 | elif [[ (${prev} == -cache_range) || (${prev} == -extract) ]] ; then
100 | opts="-cache_range -extract"
101 | COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) )
102 | return 0
103 | fi
104 |
105 | # Permissions
106 | if [[ ${prev} == --Permissions ]] ; then
107 | opts="-host -modif -modif_range"
108 | COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) )
109 | return 0
110 |
111 | elif [[ (${prev} == -host) || (${prev} == -modif) || (${prev} == -modif_range) ]] ; then
112 | opts="-host -modif -modif_range"
113 | COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) )
114 | return 0
115 | fi
116 |
117 | # Thumbnails
118 | if [[ ${prev} == --Thumbnails ]] ; then
119 | opts="-extract_thumb"
120 | COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) )
121 | return 0
122 | fi
123 |
124 | # Verbosity
125 | if [[ ${prev} == --Verbosity ]] ; then
126 | opts="DEBUG INFO WARNING ERROR CRITICAL"
127 | COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) )
128 | return 0
129 | fi
130 |
131 | # Watch
132 | if [[ ${prev} == --Watch ]] ; then
133 | opts="-text"
134 | COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) )
135 | return 0
136 | fi
137 | }
138 | complete -F _dumpzilla dumpzilla
139 |
--------------------------------------------------------------------------------
/dumpzilla.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | import sqlite3, sys, glob, shutil, json, time, hashlib, re, os, logging, lz4.block
5 | from base64 import b64decode
6 | from os import path,walk,makedirs,remove
7 | from ctypes import (Structure, c_uint, c_void_p, c_ubyte,c_char_p, CDLL, cast,byref,string_at)
8 | from datetime import datetime, timedelta
9 | from subprocess import call
10 | from collections import OrderedDict
11 |
12 | import argparse
13 |
14 | # Magic Module: https://github.com/ahupp/python-magic
15 |
16 | class Dumpzilla():
17 | ########################################### GLOBAL VARIABLES ##################################################
18 | VERSION='v20180324'
19 |
20 | magicpath = 'C:\WINDOWS\system32\magic' # Only in Windows, path to magic file (Read Manual in www.dumpzilla.org)
21 |
22 | query_str_f = ""
23 | query_str_a = ""
24 |
25 | output_mode = 0 # Output modes: 0 - Standart output (default)
26 | count = 0
27 | arg_count = 0
28 |
29 | #########################
30 | # TOTAL EXTRACTION DICT
31 | #########################
32 | total_extraction = {}
33 |
34 | #~~~~~~~~~~~~~~#
35 | # ^ Structure #
36 | #~~~~~~~~~~~~~~#
37 | #
38 | # {
39 | # parameter1 : {
40 | # absolute_file1_path : [
41 | # { column1_name : value, column2_name : value, (...), columnN_name : value },
42 | # { column1_name : value, column2_name : value, (...), columnN_name : value },
43 | # (...)
44 | # { column1_name : value, column2_name : value, (...), columnN_name : value }
45 | # ],
46 | # absolute_fileN_path : [
47 | # { column1_name : value, column2_name : value, (...), columnN_name : value },
48 | # { column1_name : value, column2_name : value, (...), columnN_name : value },
49 | # (...)
50 | # { column1_name : value, column2_name : value, (...), columnN_name : value }
51 | # ]
52 | # },
53 | #
54 | # (...)
55 | #
56 | # parameterN : {
57 | # absolute_file1_path : [
58 | # { column1_name : value, column2_name : value, (...), columnN_name : value },
59 | # { column1_name : value, column2_name : value, (...), columnN_name : value },
60 | # (...)
61 | # { column1_name : value, column2_name : value, (...), columnN_name : value }
62 | # ],
63 | # absolute_fileN_path : [
64 | # { column1_name : value, column2_name : value, (...), columnN_name : value },
65 | # { column1_name : value, column2_name : value, (...), columnN_name : value },
66 | # (...)
67 | # { column1_name : value, column2_name : value, (...), columnN_name : value }
68 | # ]
69 | # }
70 | # }
71 | #
72 |
73 | ###############
74 | ### DEFAULTS
75 | ###############
76 |
77 | # TODO: Make a object with all parameters' info
78 |
79 | # --Cookies
80 | cookie_filters = []
81 | domain_filters = []
82 | is_dom_ok = False
83 |
84 | # --Permissions
85 | permissions_filters = []
86 |
87 | # --Downloads
88 | downloads_filters = []
89 | downloads_history_filters = []
90 |
91 | # --Forms
92 | forms_filters = []
93 |
94 | # --History
95 | history_filters = []
96 |
97 | # --Bookmarks
98 | bookmarks_filters = []
99 |
100 | # --OfflineCache Cache
101 | is_cacheoff_extract_ok = False
102 | cacheoff_filters = []
103 | cacheoff_directory = None
104 |
105 | # --Keypinning
106 | keypinning_filters = []
107 |
108 | # --Thumbnails
109 | thumb_filters = []
110 |
111 | # --Watch
112 | watch_text = 1
113 |
114 | args = None
115 |
116 | watchsecond = 4 # --Watch option: Seconds update. (NO Windows)
117 | PYTHON3_DEF = '/usr/bin/python3'
118 | PYTHON3_PATH = ''
119 |
120 | ######################################## NSS LOADING (PASWORD DECODE) #########################################
121 |
122 | if sys.platform.startswith('win') == True: # WINDOWS
123 | libnss_path = "C:\Program Files (x86)\Mozilla Firefox\nss3.dll"
124 | elif sys.platform.endswith('win') == False: # LINUX
125 | libnss_path = "libnss3.so"
126 | elif sys.platform.endswith('win') == True: # MAC
127 | libnss_path = 'libnss3.dylib'
128 | # Example: /usr/local/Cellar/nss/3.28.1/lib/libnss3.dylib
129 | # libnss_path = False
130 | # if path.isdir("/usr/local/Cellar/nss"):
131 | # for s in os.listdir("/usr/local/Cellar/nss"): # Iterate through versions
132 | # libnss_version = path.join("/usr/local/Cellar/nss",s)
133 | # if path.isdir(libnss_version): # Must be a folder (/usr/local/Cellar/nss/x.xx.x)
134 | # libnss_check = path.join(libnss_version,'lib/libnss3.dylib')
135 | # if path.isfile(libnss_check):
136 | # libnss_path = libnss_check
137 | # break
138 | else:
139 | libnss_path = False
140 |
141 | if libnss_path and path.isfile(libnss_path):
142 | libnss = CDLL(libnss_path)
143 | else:
144 | libnss = False
145 |
146 | ########################################### GLOBAL DECODE VARIABLES ###########################################
147 |
148 | class SECItem(Structure):
149 | _fields_ = [('type',c_uint),('data',c_void_p),('len',c_uint)]
150 |
151 | class secuPWData(Structure):
152 | _fields_ = [('source',c_ubyte),('data',c_char_p)]
153 |
154 | (SECWouldBlock,SECFailure,SECSuccess)=(-2,-1,0)
155 | (PW_NONE,PW_FROMFILE,PW_PLAINTEXT,PW_EXTERNAL)=(0,1,2,3)
156 |
157 | pwdata = secuPWData()
158 | pwdata.source = PW_NONE
159 | pwdata.data=0
160 |
161 | uname = SECItem()
162 | passwd = SECItem()
163 | dectext = SECItem()
164 |
165 | ####################################################
166 | # #
167 | # AUX METHODS #
168 | # #
169 | ####################################################
170 |
171 | def get_user_value(self, message):
172 | if sys.version.startswith('2.') == True:
173 | return raw_input(message);
174 | else:
175 | return input(message);
176 |
177 | def serial_date_to_string(self, srl_no):
178 | new_date = datetime(1970,1,1,0,0) + timedelta(srl_no - 1)
179 | return new_date.strftime("%Y-%m-%d %H:%M:%S")
180 |
181 | def log(self, type, message):
182 | # These are the sequences need to get colored ouput
183 | RESET_SEQ = "\033[0m"
184 | COLOR_SEQ = "\033[1;%dm"
185 | BOLD_SEQ = "\033[1m"
186 |
187 | BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = range(8)
188 |
189 | LEVELS = {
190 | 'DEBUG': {
191 | 'color': BLUE,
192 | 'funct': self.logger.debug
193 | },
194 | 'WARNING': {
195 | 'color': YELLOW,
196 | 'funct': self.logger.warning
197 | },
198 | 'INFO': {
199 | 'color': GREEN,
200 | 'funct': self.logger.info
201 | },
202 | 'ERROR': {
203 | 'color': RED,
204 | 'funct': self.logger.error
205 | },
206 | 'CRITICAL': {
207 | 'color': RED,
208 | 'funct': self.logger.critical
209 | }
210 | }
211 | # remove ch to logger
212 | if hasattr(self, 'ch'):
213 | self.logger.removeHandler(self.ch)
214 |
215 | # create console handler and set level to debug
216 | self.ch = logging.StreamHandler()
217 |
218 | # create formatter
219 | formatter = logging.Formatter('['+ COLOR_SEQ % (30 + LEVELS[type]['color']) + '%(levelname)s' + RESET_SEQ + '] %(message)s')
220 | if (self.verbosity_level == "DEBUG"):
221 | formatter = logging.Formatter(COLOR_SEQ % (30 + LEVELS[type]['color']) + '%(levelname)s' + RESET_SEQ + ' - %(asctime)s - ' + sys.argv[0] + ' - %(message)s')
222 |
223 | # add formatter to ch
224 | self.ch.setFormatter(formatter)
225 |
226 | # add ch to logger
227 | self.logger.addHandler(self.ch)
228 | LEVELS[type]['funct'](message)
229 |
230 | def get_path_by_os(self, dir, file, cd_dir = None):
231 | delimiter = "/"
232 | if sys.platform.startswith('win') == True:
233 | delimiter = "\\"
234 | if cd_dir is not None:
235 | cd_dir = cd_dir + delimiter
236 | else:
237 | cd_dir = ""
238 | return dir+delimiter+cd_dir+file
239 |
240 | def decode_reg(self, reg):
241 | try:
242 | if type(reg) is int or type(reg) is str:
243 | return reg
244 | elif reg is None:
245 | return None
246 | else:
247 | return reg.decode()
248 | except UnicodeDecodeError:
249 | #self.log("ERROR","UnicodeDecodeError : "+str(sys.exc_info()[1]))
250 | #return None
251 | return reg.decode('utf-8')
252 |
253 | def show_info_header(self,profile):
254 | if sys.version.startswith('2.') == True:
255 | self.log("WARNING","Python 2.x currently used, Python 3.x and UTF-8 is recommended!")
256 | self.log("INFO", "Mozilla Profile: " + str(profile))
257 |
258 | def show_title(self,varText,source = False):
259 | varText = "\n== "+varText+"\n"
260 | print("")
261 | print(varText.center(243, "="))
262 | if (source):
263 | print("=> Source file: " + source)
264 | print("=> SHA256 hash: "+ self.show_sha256(source))
265 | print("")
266 |
267 | def regexp(self,expr, item):
268 | try:
269 | if item:
270 | reg = re.compile(expr, re.I)
271 | #Debug# print("expr: %s - %s - %s" % (expr, item, reg.match(item)) )
272 | return reg.search(item) is not None
273 | else:
274 | return None
275 | except: # catch *all* exceptions
276 | e = str(sys.exc_info()[0]) + " " + str(sys.exc_info()[1])
277 | self.log("ERROR", "Error using RegExp " + e)
278 | return None
279 |
280 | def validate_date(self,date_str):
281 | if not self.regexp('^[0-9_%:\- ]{1,19}$',date_str):
282 | self.log("WARNING","Erroneous date '"+date_str+"' : Check wildcards ('%' '_' '/') and format (YYYY-MM-DD hh:mi:ss)")
283 | return date_str
284 |
285 | def execute_query(self,cursor,sqlite_query,filters,orderby = None):
286 | sqlite_param = []
287 | cnt = 0
288 | for filter in filters:
289 | if cnt == 0 and sqlite_query.find('where') == -1:
290 | sqlite_query = sqlite_query + " where ("
291 | else:
292 | sqlite_query = sqlite_query + " and ("
293 | if filter[0] == "string":
294 | # SQL Query: [RegExp] column REGEXP ?
295 | # [SQLike] column like ? escape '\'
296 | sqlite_query = sqlite_query + filter[1] + " " + self.query_str_f + " ? " + self.query_str_a
297 | sqlite_param.append(filter[2])
298 | elif filter[0] == "date":
299 | # SQL Query: column like ? escape '\'
300 | sqlite_query = sqlite_query + filter[1] + " like ? escape '\\'"
301 | sqlite_param.append(filter[2])
302 | elif filter[0] == "number":
303 | # SQL Query: column = ?
304 | sqlite_query = sqlite_query + filter[1] + " = ?"
305 | sqlite_param.append(filter[2])
306 | elif filter[0] == "range":
307 | # SQL Query: column between ? and ?
308 | sqlite_query = sqlite_query + filter[1] + " between ? and ?"
309 | sqlite_param.append(filter[2][0])
310 | sqlite_param.append(filter[2][1])
311 | elif filter[0] == "column":
312 | sqlite_query = sqlite_query + filter[1] + " = " + filter[2]
313 | sqlite_query = sqlite_query + ")"
314 | cnt = cnt + 1
315 |
316 | if orderby is not None:
317 | sqlite_query = sqlite_query + " " + orderby
318 |
319 | ### print("%s - %s" % (sqlite_query,sqlite_param))
320 | self.log('DEBUG', 'Execute query: ' + sqlite_query)
321 | cursor.execute(sqlite_query,sqlite_param)
322 |
323 | def decompressLZ4(self, file):
324 | lz4_headers = [ b"mozLz40\0", b"mozLz40p\0", b"mozLz40o\0"]
325 |
326 | for header in lz4_headers:
327 | value = file.read(len(header))
328 | if value == header:
329 | return lz4.block.decompress(file.read())
330 | file.seek(0)
331 |
332 |
333 |
334 | return None
335 |
336 | def getJSON(self, file):
337 | try:
338 | decompress = self.decompressLZ4(file)
339 | if decompress is None:
340 | return json.loads(file.read())
341 | else:
342 | return json.loads(decompress)
343 |
344 | except UnicodeDecodeError:
345 | self.log("ERROR", str(sys.exc_info()[0]) + " " + str(sys.exc_info()[1]) + ". Please check locale settings to verify UTF-8 is set!")
346 |
347 | ###############################################################################################################
348 | ### SHA256 HASHING #
349 | ###############################################################################################################
350 |
351 | def show_sha256(self,filepath):
352 | sha256 = hashlib.sha256()
353 | f = open(filepath, 'rb')
354 | try:
355 | sha256.update(f.read())
356 | finally:
357 | f.close()
358 | return sha256.hexdigest()
359 |
360 | def export_sha256(self,destination,header,sources):
361 | sha256_data = {}
362 |
363 | for source in sources:
364 | if path.isfile(source):
365 | sha256 = hashlib.sha256()
366 | f = open(source, 'rb')
367 | try:
368 | sha256.update(f.read())
369 | finally:
370 | f.close()
371 | sha256_data[source] = sha256.hexdigest()
372 |
373 | outputFilename = header + '.sha256.json';
374 | with open(destination + outputFilename, 'w') as fp:
375 | json.dump(sha256_data, fp)
376 |
377 | #############################################################################################################
378 | ### DECODE PASSWORDS
379 | #############################################################################################################
380 |
381 | def readsignonDB(self, dir):
382 | passwords_sources = ["signons.sqlite","logins.json"]
383 | decode_passwords_extraction_dict = {}
384 | if not self.libnss:
385 | if not self.libnss_path:
386 | self.log("ERROR","Error decoding passwords: libnss not found!")
387 | else:
388 | self.log("ERROR","Error decoding passwords: libnss not found (" + self.libnss_path + ")");
389 |
390 | # TODO: Make self method to decode
391 | if self.libnss and self.libnss.NSS_Init(dir.encode("utf8"))!=0:
392 | self.log("ERROR","Error Initializing NSS_Init, probably no useful results.")
393 |
394 | for a in passwords_sources:
395 | # Setting filename by OS
396 | bbdd = self.get_path_by_os(dir, a)
397 |
398 | # Checking source file
399 | if path.isfile(bbdd) == True:
400 | if a.endswith(".json") == True:
401 | # JSON
402 | f = open(bbdd)
403 | jdata = self.getJSON(f)
404 | f.close()
405 | _extraction_list = []
406 | try:
407 | for l in jdata.get("logins"):
408 | _extraction_dict = {}
409 | if l.get("id") is not None:
410 | self.uname.data = cast(c_char_p(b64decode(l.get("encryptedUsername"))),c_void_p)
411 | self.uname.len = len(b64decode(l.get("encryptedUsername")))
412 | self.passwd.data = cast(c_char_p(b64decode(l.get("encryptedPassword"))),c_void_p)
413 | self.passwd.len=len(b64decode(l.get("encryptedPassword")))
414 |
415 | if self.libnss and self.libnss.PK11SDR_Decrypt(byref(self.uname), byref(self.dectext), byref(self.pwdata)) == -1:
416 | self.log("INFO", "Master password required")
417 | password = c_char_p(self.get_user_value(a + " password: ").encode("utf8"))
418 | keyslot = self.libnss.PK11_GetInternalKeySlot()
419 | if keyslot is None:
420 | # Something went wrong!
421 | self.log("ERROR","Failed to retrieve internal KeySlot")
422 | return
423 | check_rc = self.libnss.PK11_CheckUserPassword(keyslot, password)
424 | if check_rc != 0:
425 | # Something went wrong with given password
426 | self.log("ERROR","Password decoding failed! Check master password")
427 | return;
428 |
429 | _extraction_dict["0-Web"] = self.decode_reg(l.get("hostname"))
430 | _extraction_dict["1-Username"] = self.decode_reg(string_at(self.dectext.data,self.dectext.len))
431 |
432 | if self.libnss and self.libnss.PK11SDR_Decrypt(byref(self.passwd),byref(self.dectext),byref(self.pwdata))==-1:
433 | self.log("ERROR","Master password decryption failed!")
434 | return
435 |
436 | _extraction_dict["2-Password"] = self.decode_reg(string_at(self.dectext.data,self.dectext.len))
437 |
438 | _extraction_list.append(_extraction_dict)
439 |
440 | except:
441 | e = str(sys.exc_info()[0])
442 | self.log("ERROR","Passwords database: Can't process file " + a + ": " + e )
443 |
444 | decode_passwords_extraction_dict[bbdd] = _extraction_list
445 |
446 | elif a.endswith(".sqlite"):
447 | # SQLITE
448 | conn = sqlite3.connect(bbdd)
449 | conn.text_factory = bytes
450 | cursor = conn.cursor()
451 | try:
452 | self.execute_query(cursor,"select hostname, encryptedUsername, encryptedPassword from moz_logins",[])
453 | _extraction_list = []
454 | for row in cursor:
455 | _extraction_dict = {}
456 | self.uname.data = cast(c_char_p(b64decode(row[1])),c_void_p)
457 | self.uname.len = len(b64decode(row[1]))
458 | self.passwd.data = cast(c_char_p(b64decode(row[2])),c_void_p)
459 | self.passwd.len=len(b64decode(row[2]))
460 |
461 | if self.libnss and self.libnss.PK11SDR_Decrypt(byref(self.uname),byref(self.dectext),byref(self.pwdata))==-1:
462 | self.log("INFO", "Master password required")
463 | password = c_char_p(self.get_user_value(a + " password: ").encode("utf8"))
464 | keyslot = self.libnss.PK11_GetInternalKeySlot()
465 | if keyslot is None:
466 | # Something went wrong!
467 | self.log("ERROR","Failed to retrieve internal KeySlot")
468 | return
469 | check_rc = self.libnss.PK11_CheckUserPassword(keyslot, password)
470 | if check_rc != 0:
471 | # Something went wrong with given password
472 | self.log("ERROR","Password decoding failed! Check master password")
473 | return;
474 |
475 | _extraction_dict["0-Web"] = self.decode_reg(row[0])
476 | _extraction_dict["1-Username"] = self.decode_reg(string_at(self.dectext.data,self.dectext.len))
477 |
478 | if self.libnss and self.libnss.PK11SDR_Decrypt(byref(self.passwd),byref(self.dectext),byref(self.pwdata))==-1:
479 | self.log("ERROR","Master password decryption failed!")
480 | return
481 |
482 | _extraction_dict["2-Password"] = self.decode_reg(string_at(self.dectext.data,self.dectext.len))
483 |
484 | _extraction_list.append(_extraction_dict)
485 |
486 | decode_passwords_extraction_dict[bbdd] = _extraction_list
487 |
488 | conn.close()
489 | if self.libnss:
490 | self.libnss.NSS_Shutdown()
491 | except sqlite3.OperationalError:
492 | self.log("WARNING",bbdd + ": no data found!")
493 |
494 | if len(decode_passwords_extraction_dict) == 0:
495 | self.log("INFO","Passwords database not found! Please, check file " + '|'.join(passwords_sources))
496 |
497 | # Saving extraction to main extraction list
498 | self.total_extraction["decode"] = decode_passwords_extraction_dict
499 |
500 |
501 |
502 | ###############################################################################################################
503 | ### PASSWORDS
504 | ###############################################################################################################
505 |
506 | def show_passwords(self,dir):
507 | passwords_sources = ["signons.sqlite","logins.json"]
508 | passwords_extraction_dict = {}
509 | exception_extraction_dict = {}
510 |
511 | for a in passwords_sources:
512 | # Setting filename by OS
513 | bbdd = self.get_path_by_os(dir, a)
514 |
515 | # Checking source file
516 | if path.isfile(bbdd) == True:
517 | if a.endswith(".json") == True:
518 | # JSON
519 | f = open(bbdd)
520 | jdata = self.getJSON(f)
521 | f.close()
522 |
523 | _extraction_list = []
524 | try:
525 | for l in jdata.get("logins"):
526 | _extraction_dict = {}
527 | if l.get("id") is not None:
528 | _extraction_dict['0-Web'] = l.get("hostname")
529 | _extraction_dict['1-User field'] = l.get("usernameField")
530 | _extraction_dict['2-Password field'] = l.get("passwordField")
531 | _extraction_dict['3-User login (crypted)'] = l.get("encryptedUsername")
532 | _extraction_dict['4-Password login (crypted)'] = l.get("encryptedPassword")
533 | #_extraction_dict['99-Encripton type'] = l.get("encType")
534 |
535 | create_date = datetime.fromtimestamp(int(l.get("timeCreated"))/1000).strftime('%Y-%m-%d %H:%M:%S')
536 | _extraction_dict['5-Created'] = create_date
537 |
538 | lastuse_date = datetime.fromtimestamp(int(l.get("timeLastUsed"))/1000).strftime('%Y-%m-%d %H:%M:%S')
539 | _extraction_dict['6-Last used'] = lastuse_date
540 |
541 | change_date = datetime.fromtimestamp(int(l.get("timePasswordChanged"))/1000).strftime('%Y-%m-%d %H:%M:%S')
542 | _extraction_dict['7-Change'] = change_date
543 | _extraction_dict['8-Frequency'] = l.get("timesUsed")
544 |
545 | _extraction_list.append(_extraction_dict)
546 |
547 | except:
548 | e = str(sys.exc_info()[0])
549 | self.log("ERROR","Passwords database: Can't process file " + a + ": " + e )
550 |
551 | passwords_extraction_dict[bbdd] = _extraction_list
552 |
553 | elif a.endswith(".sqlite"):
554 | # SQLITE
555 |
556 | conn = sqlite3.connect(bbdd)
557 | conn.text_factory = bytes
558 | cursor = conn.cursor()
559 |
560 | try:
561 | ### Exceptions
562 | cursor.execute('select hostname from moz_disabledHosts')
563 | _extraction_list = []
564 | for row in cursor:
565 | _extraction_dict = {}
566 | _extraction_dict['0-Exception Web'] = self.decode_reg(row[0])
567 | _extraction_list.append(_extraction_dict)
568 |
569 | exception_extraction_dict[bbdd] = _extraction_list
570 |
571 | ### Passwords
572 | cursor.execute('select formSubMitURL,usernameField,passwordField ,encryptedUsername,encryptedPassword,encType,\
573 | datetime(timeCreated/1000,"unixepoch","localtime"),datetime(timeLastUsed/1000,"unixepoch","localtime"),\
574 | datetime(timePasswordChanged/1000,"unixepoch","localtime"),timesUsed FROM moz_logins')
575 | _extraction_list = []
576 | for row in cursor:
577 | _extraction_dict = {}
578 | _extraction_dict['0-Web'] = self.decode_reg(row[0])
579 | _extraction_dict['1-User field'] = self.decode_reg(row[1])
580 | _extraction_dict['2-Password field'] = self.decode_reg(row[2])
581 | _extraction_dict['3-User login (crypted)'] = self.decode_reg(row[3])
582 | _extraction_dict['4-Password login (crypted)'] = self.decode_reg(row[4])
583 | #_extraction_dict['99-Encripton type'] = self.decode_reg(row[5])
584 | _extraction_dict['5-Created'] = self.decode_reg(row[6])
585 | _extraction_dict['6-Last used'] = self.decode_reg(row[7])
586 | _extraction_dict['7-Change'] = self.decode_reg(row[8])
587 | _extraction_dict['8-Frequency'] = self.decode_reg(row[9])
588 | _extraction_list.append(_extraction_dict)
589 |
590 | passwords_extraction_dict[bbdd] = _extraction_list
591 | except:
592 | e = str(sys.exc_info()[0])
593 | self.log("ERROR","Passwords database: can't process file " + a + ": " + e )
594 |
595 | cursor.close()
596 | conn.close()
597 |
598 | self.total_extraction["exceptions"] = exception_extraction_dict
599 |
600 | if len(passwords_extraction_dict) == 0:
601 | self.log("INFO","Passwords database not found! Please, check file " + '|'.join(passwords_sources))
602 | else:
603 | if sys.platform.startswith('win') == False: # and sys.version.startswith('2.') == True and count > 0:
604 | self.readsignonDB(dir)
605 | else:
606 | self.log("ERROR","Decode password only works on GNU/Linux")
607 |
608 | # Saving extraction to main extraction list
609 | self.total_extraction["passwords"] = passwords_extraction_dict
610 |
611 | ###############################################################################################################
612 | ### SHOW ALL DATA #
613 | ###############################################################################################################
614 |
615 | def All_execute(self,dir):
616 | self.show_cookies(dir)
617 | self.show_permissions(dir)
618 | self.show_preferences(dir)
619 | self.show_addons(dir)
620 | self.show_extensions(dir)
621 | self.show_search_engines(dir)
622 | self.show_info_addons(dir)
623 | self.show_downloads(dir)
624 | self.show_downloads_history(dir)
625 | self.show_downloadsdir(dir)
626 | self.show_forms(dir)
627 | self.show_history(dir)
628 | self.show_bookmarks(dir)
629 | self.show_passwords(dir)
630 | self.show_key_pinning(dir)
631 | self.show_cache(dir)
632 | self.show_cert_override(dir)
633 | self.show_thumbnails(dir)
634 | self.show_session(dir)
635 |
636 | ###############################################################################################################
637 | ### COOKIES #
638 | ###############################################################################################################
639 |
640 | def show_cookies(self,dir):
641 | cookies_extraction_dict = {}
642 | dom_extraction_dict = {}
643 |
644 | bbdd = self.get_path_by_os(dir, 'cookies.sqlite')
645 |
646 | if path.isfile(bbdd) == False:
647 | self.log("INFO","Cookies database not found! Please, check file cookies.sqlite")
648 | return
649 |
650 | conn = sqlite3.connect(bbdd)
651 | conn.text_factory = bytes
652 |
653 | if self.args.is_regexp_ok == True:
654 | conn.create_function("REGEXP", 2, self.regexp)
655 |
656 | cursor = conn.cursor()
657 | sqlite_query = "select name, value, host, path, datetime(expiry, 'unixepoch', 'localtime'), datetime(lastAccessed/1000000,'unixepoch','localtime') as last ,datetime(creationTime/1000000,'unixepoch','localtime') as creat, isSecure, isHttpOnly FROM moz_cookies"
658 | self.execute_query(cursor,sqlite_query,self.cookie_filters)
659 |
660 | _extraction_list = []
661 | for row in cursor:
662 | _extraction_dict = {}
663 | _extraction_dict['1-Host'] = self.decode_reg(row[2])
664 | _extraction_dict['2-Name'] = self.decode_reg(row[0])
665 | _extraction_dict['3-Value'] = self.decode_reg(row[1])
666 | _extraction_dict['4-Path'] = self.decode_reg(row[3])
667 | _extraction_dict['5-Expiry'] = self.decode_reg(row[4])
668 | _extraction_dict['6-Last Access'] = self.decode_reg(row[5])
669 | _extraction_dict['7-Creation Time'] = self.decode_reg(row[6])
670 |
671 | if self.decode_reg(row[7]) == 0:
672 | _extraction_dict['8-Secure'] = 'No'
673 | else:
674 | _extraction_dict['8-Secure'] = 'Yes'
675 |
676 | if self.decode_reg(row[8]) == 0:
677 | _extraction_dict['9-HttpOnly'] = 'No'
678 | else:
679 | _extraction_dict['9-HttpOnly'] = 'Yes'
680 |
681 | _extraction_list.append(_extraction_dict)
682 |
683 | cookies_extraction_dict[bbdd] = _extraction_list
684 |
685 | self.total_extraction["cookies"] = cookies_extraction_dict
686 |
687 | cursor.close()
688 | conn.close()
689 |
690 | ####################################
691 | ### DOM STORAGE #
692 | ####################################
693 | if self.is_dom_ok == True:
694 |
695 | bbdd = self.get_path_by_os(dir, 'webappsstore.sqlite')
696 |
697 | if path.isfile(bbdd) == False:
698 | self.log("INFO","Webappsstore database not found! Please, check file webappsstore.sqlite")
699 | return
700 |
701 | # WARNING! Only RegExp filter allowed!
702 | if len(self.domain_filters) > 0 and self.args.is_regexp_ok == False :
703 | self.log("WARNING","Showing all DOM storage, to filter please use RegExp parameter")
704 |
705 | conn = sqlite3.connect(bbdd)
706 | conn.text_factory = bytes
707 | cursor = conn.cursor()
708 |
709 | sqlite_query = "select scope, value from webappsstore2"
710 | cursor.execute(sqlite_query)
711 |
712 | _extraction_list = []
713 | for row in cursor:
714 | _extraction_dict = {}
715 | fd = ""
716 | if self.decode_reg(row[0]).find("http") == -1:
717 | fd = path.split(self.decode_reg(row[0])[::-1])[1][1:]
718 | if self.decode_reg(row[0]).startswith("/") == False and self.decode_reg(row[0]).find("http") != -1:
719 | fd = path.split(self.decode_reg(row[0])[::-1])[1].rsplit(':.', 1)[1]
720 | # -domain filter
721 | show_this_domain = True
722 | if len(self.domain_filters) > 0 and self.args.is_regexp_ok == True:
723 | show_this_domain = self.regexp(self.domain_filters[0][2],fd)
724 |
725 | if show_this_domain == True:
726 | _extraction_dict['0-Domain'] = fd
727 | _extraction_dict['1-DOM data'] = row[1].decode('utf-8', 'ignore')
728 |
729 | _extraction_list.append(_extraction_dict)
730 |
731 | dom_extraction_dict[bbdd] = _extraction_list
732 |
733 | self.total_extraction["dom"] = dom_extraction_dict
734 |
735 | cursor.close()
736 | conn.close()
737 |
738 | ###############################################################################################################
739 | ### PERMISSIONS #
740 | ###############################################################################################################
741 |
742 | def show_permissions(self,dir):
743 | permissions_extraction_dict = {}
744 |
745 | bbdd = self.get_path_by_os(dir, 'permissions.sqlite')
746 |
747 | if path.isfile(bbdd) == False:
748 | self.log("INFO","Permissions database not found! Please, check file permissions.sqlite")
749 | return
750 |
751 | conn = sqlite3.connect(bbdd)
752 | conn.text_factory = bytes
753 |
754 | if self.args.is_regexp_ok == True:
755 | conn.create_function("REGEXP", 2, self.regexp)
756 |
757 | # Old table for permissions
758 | permissions_tables = ["moz_hosts"]
759 |
760 | # New table for permissions (checking if exists)
761 | cursor = conn.cursor()
762 | sqlite_query = "select count(*) from sqlite_master"
763 | master_filters = [["string","type","table"],["string","name","moz_perms"]]
764 | self.execute_query(cursor,sqlite_query,master_filters)
765 | for row in cursor:
766 | if row[0] > 0:
767 | permissions_tables.append("moz_perms")
768 | cursor.close()
769 |
770 | _extraction_list = []
771 |
772 | for table in permissions_tables:
773 | host_col = "host"
774 | if table == "moz_perms":
775 | host_col = "origin"
776 | for f in self.permissions_filters:
777 | if f[1] == "host":
778 | index = self.permissions_filters.index(f)
779 | self.permissions_filters[index][1] = "origin"
780 |
781 | # Checking if modificationTime column exists
782 | cursor = conn.cursor()
783 | sqlite_query = "pragma table_info("+table+")"
784 |
785 | modificationTime_found = False
786 | for row in cursor:
787 | if self.decode_reg(row[1]) == "modificationTime":
788 | modificationTime_found = True
789 | cursor.close()
790 |
791 | # Making sqlite query
792 | cursor = conn.cursor()
793 | sqlite_query = ""
794 | if modificationTime_found:
795 | sqlite_query = "select "+ host_col +",type,permission,expireType,datetime(expireTime/1000,'unixepoch','localtime') as expire, datetime(modificationTime/1000,'unixepoch','localtime') as modif from "+table
796 | else:
797 | sqlite_query = "select "+ host_col +",type,permission,expireType,datetime(expireTime/1000,'unixepoch','localtime') as expire from "+table
798 | for f in self.permissions_filters:
799 | if f[1] == "modif":
800 | self.permissions_filters.remove(f)
801 | self.log("WARNING","modificationTime : Column not found in permissions database")
802 |
803 | self.execute_query(cursor,sqlite_query,self.permissions_filters)
804 |
805 | for row in cursor:
806 | _extraction_dict = {}
807 | _extraction_dict['0-Host'] = self.decode_reg(row[0])
808 | _extraction_dict['1-Type'] = self.decode_reg(row[1])
809 | permissionType = str( self.decode_reg(row[2]) )
810 |
811 | # Permission
812 | if permissionType == '1':
813 | _extraction_dict['2-Permission'] = permissionType + " (allow)"
814 | elif permissionType == '2':
815 | _extraction_dict['2-Permission'] = permissionType + " (block)"
816 | elif permissionType == '8':
817 | _extraction_dict['2-Permission'] = permissionType + " (allow for session only)"
818 | else:
819 | _extraction_dict['2-Permission'] = permissionType
820 |
821 | # Expire time
822 | if self.decode_reg(row[3]) == 0:
823 | _extraction_dict['3-Expire Time'] = 'Not expire'
824 | else:
825 | _extraction_dict['3-Expire Time'] = self.decode_reg(row[4])
826 |
827 | if modificationTime_found:
828 | _extraction_dict['4-Modification Time'] = self.decode_reg(row[5])
829 | _extraction_list.append(_extraction_dict)
830 | cursor.close()
831 |
832 | permissions_extraction_dict[bbdd] = _extraction_list
833 |
834 | self.total_extraction["permissions"] = permissions_extraction_dict
835 |
836 | cursor.close()
837 | conn.close()
838 |
839 | ###############################################################################################################
840 | ### PREFERENCES #
841 | ###############################################################################################################
842 |
843 | def show_preferences(self,dir):
844 | preferences_extraction_dict = {}
845 |
846 | dirprefs = self.get_path_by_os(dir, 'prefs.js')
847 |
848 | if path.isfile(dirprefs) == False:
849 | self.log("INFO","Preferences database not found! Please, check prefs.js")
850 | return
851 |
852 | firefox = 0
853 | seamonkey = 1
854 | count = 0
855 | _extraction_list = []
856 | for line in open(dirprefs):
857 | _extraction_dict = {}
858 |
859 | if "user_pref(" in line:
860 | count_alpha = str(count).zfill(6)
861 | code = line.split()[0][:-2].replace("\"", "").replace("user_pref(", "")
862 | value = line.split()[1][:-2].replace("\"", "")
863 |
864 | # Calculating Timestamp value
865 | if ( self.regexp('[Tt]ime',code) or self.regexp("[Ll]ast",code) ) and self.regexp("^[0-9]{10}$",value):
866 | tmstmp = datetime.fromtimestamp(int(value)/1000).strftime('%Y-%m-%d %H:%M:%S')
867 | if self.regexp("^197",tmstmp):
868 | tmstmp = datetime.fromtimestamp(int(value)).strftime('%Y-%m-%d %H:%M:%S')
869 | value = tmstmp
870 |
871 | # Transforming description
872 | code_list = code.split('.')
873 | cnt = 0
874 | for c in code_list:
875 | code_list[cnt] = c.capitalize().replace("_"," ")
876 | cnt = cnt + 1
877 | code = " ".join(code_list)
878 | _extraction_dict[count_alpha + "-" + code] = value
879 | count = count + 1
880 |
881 | # if "extensions.lastAppVersion" in line:
882 | # seamonkey = line.split()[1][:-2].replace("\"", "")
883 | # _extraction_dict["00-Browser Version"] = line.split()[1][:-2].replace("\"", "")
884 | # if "extensions.lastPlatformVersion" in line and seamonkey != line.split()[1][:-2].replace("\"", ""): # Only Seamonkey
885 | # _extraction_dict["01-Firefox Version"] = line.split()[1][:-2].replace("\"", "")
886 | # if "browser.download.dir" in line:
887 | # _extraction_dict["02-Download directory"] = line.split()[1][:-2].replace("\"", "")
888 | # elif "browser.download.lastDir" in line:
889 | # _extraction_dict["03-Last Download directory"] = line.split()[1][:-2].replace("\"", "")
890 | # elif "browser.cache.disk.capacity" in line:
891 | # _extraction_dict["04-Browser cache disk capacity"] = line.split()[1][:-2].replace("\"", "")
892 | # elif "network.proxy.backup.ftp_port" in line:
893 | # _extraction_dict["05-FTP backup proxy port"] = line.split()[1][:-2].replace("\"", "")
894 | # elif "network.proxy.backup.ftp" in line:
895 | # _extraction_dict["06-FTP backup proxy"] = line.split()[1][:-2].replace("\"", "")
896 | # elif "network.proxy.backup.socks_port" in line:
897 | # _extraction_dict["07-Socks backup proxy port"] = line.split()[1][:-2].replace("\"", "")
898 | # elif "network.proxy.backup.socks" in line:
899 | # _extraction_dict["08-Socks backup proxy"] = line.split()[1][:-2].replace("\"", "")
900 | # elif "network.proxy.backup.ssl_port" in line:
901 | # _extraction_dict["09-SSL backup proxy port"] = line.split()[1][:-2].replace("\"", "")
902 | # elif "network.proxy.backup.ssl" in line:
903 | # _extraction_dict["10-SSL backup proxy"] = line.split()[1][:-2].replace("\"", "")
904 | # elif "network.proxy.ftp_port" in line:
905 | # _extraction_dict["11-FTP proxy port"] = line.split()[1][:-2].replace("\"", "")
906 | # elif "network.proxy.ftp" in line:
907 | # _extraction_dict["12-FTP proxy"] = line.split()[1][:-2].replace("\"", "")
908 | # elif "network.proxy.socks_port" in line:
909 | # _extraction_dict["13-Socks proxy port"] = line.split()[1][:-2].replace("\"", "")
910 | # elif "network.proxy.socks" in line:
911 | # _extraction_dict["14-Socks proxy"] = line.split()[1][:-2].replace("\"", "")
912 | # elif "network.proxy.ssl_port" in line:
913 | # _extraction_dict["15-SSL proxy port"] = line.split()[1][:-2].replace("\"", "")
914 | # elif "network.proxy.http_port" in line:
915 | # _extraction_dict["16-Http proxy port"] = line.split()[1][:-2].replace("\"", "")
916 | # elif "network.proxy.http" in line:
917 | # _extraction_dict["17-Http proxy"] = line.split()[1][:-2].replace("\"", "")
918 | # elif "network.proxy.share_proxy_settings" in line:
919 | # _extraction_dict["18-Share proxy settings"] = line.split()[1][:-2].replace("\"", "")
920 | # elif "network.proxy.autoconfig_url" in line:
921 | # _extraction_dict["19-URL proxy autoconfig"] = line.split()[1][:-2].replace("\"", "")
922 | # elif "network.proxy.type" in line:
923 | # _extraction_dict["20-Type Proxy"] = line.split()[1][:-2].replace("\"", "")+" (0: No proxy | 4: Auto detect settings | 1: Manual configuration | 2: URL autoconfig)"
924 |
925 | if len(_extraction_dict) > 0:
926 | _extraction_list.append(_extraction_dict)
927 |
928 | preferences_extraction_dict[dirprefs] = _extraction_list
929 |
930 | self.total_extraction["preferences"] = preferences_extraction_dict
931 |
932 | ###############################################################################################################
933 | ### ADDONS #
934 | ###############################################################################################################
935 |
936 | def show_addons(self,dir):
937 | addons_extraction_dict = {}
938 | addons_found = False
939 | addons_sources = ["addons.sqlite","addons.json"]
940 |
941 | for a in addons_sources:
942 | # Setting filename by OS
943 | bbdd = self.get_path_by_os(dir, a)
944 |
945 | # Checking source file
946 | if path.isfile(bbdd) == True:
947 | addons_found = True
948 |
949 | if a.endswith(".json") == True:
950 | # JSON
951 | f = open(bbdd)
952 | jdata = self.getJSON(f)
953 | f.close()
954 | _extraction_list = []
955 | try:
956 | for addon in jdata.get("addons"):
957 | _extraction_dict = {}
958 | if addon.get("id") is not None:
959 | _extraction_dict['0-Name'] = addon.get("name")
960 | _extraction_dict['1-Version'] = addon.get("version")
961 | _extraction_dict['2-Creator URL'] = addon.get("creator").get("url")
962 | _extraction_dict['3-Homepage URL'] = addon.get("homepageURL")
963 | _extraction_list.append(_extraction_dict)
964 | except:
965 | e = str(sys.exc_info()[0])
966 | self.log("ERROR","Addons database: Can't process file " + a + ": " + e )
967 |
968 | addons_extraction_dict[bbdd] = _extraction_list
969 |
970 | elif a.endswith(".sqlite"):
971 | # SQLITE
972 | conn = sqlite3.connect(bbdd)
973 | conn.text_factory = bytes
974 | cursor = conn.cursor()
975 | cursor.execute("select name,version,creatorURL,homepageURL from addon")
976 | _extraction_list = []
977 | for row in cursor:
978 | _extraction_dict = {}
979 | _extraction_dict['0-Name'] = self.decode_reg(row[0])
980 | _extraction_dict['1-Version'] = self.decode_reg(row[3])
981 | _extraction_dict['2-Creator URL'] = self.decode_reg(row[1])
982 | _extraction_dict['3-Homepage URL'] = self.decode_reg(row[2])
983 | _extraction_list.append(_extraction_dict)
984 |
985 | addons_extraction_dict[bbdd] = _extraction_list
986 |
987 | cursor.close()
988 | conn.close()
989 |
990 | # Saving extraction to main extraction list
991 | self.total_extraction["addons"] = addons_extraction_dict
992 | if addons_found == False:
993 | self.log("INFO","Addons database not found! Please, check file %s" % '|'.join(addons_sources))
994 |
995 | ###############################################################################################################
996 | ### ADDONS INFO #
997 | ###############################################################################################################
998 |
999 | def show_info_addons(self,dir):
1000 | addinfo_extraction_dict = {}
1001 | addinfo_found = False
1002 | addinfo_sources = ["xulstore.json","localstore.rdf"]
1003 |
1004 | for a in addinfo_sources:
1005 | # Setting filename by OS
1006 | filepath = self.get_path_by_os(dir, a)
1007 |
1008 | # Checking source file
1009 | if path.isfile(filepath) == True:
1010 |
1011 | addinfo_found = True
1012 |
1013 | if a.endswith(".json") == True:
1014 | # JSON
1015 | f = open(filepath)
1016 | jdata = self.getJSON(f)
1017 | f.close()
1018 | # Fix compatibility python2-python3
1019 | _extraction_list = []
1020 | if sys.version.startswith('2.') == True:
1021 | for key, value in jdata.iteritems():
1022 | _extraction_list.append({"0-URL/PATH":"\"" + key + "\""})
1023 | else:
1024 | for key, value in jdata.items():
1025 | _extraction_list.append({"0-URL/PATH":"\"" + key + "\""})
1026 |
1027 | addinfo_extraction_dict[filepath] = _extraction_list
1028 |
1029 | if a.endswith(".rdf") == True:
1030 | # RDF
1031 | filead = open(filepath)
1032 | lines = filead.readlines()
1033 | i = 3
1034 | y = 0
1035 | _extraction_list = []
1036 | while i != len(lines):
1037 | if lines[i].find("tp://") != -1 or lines[i].find('label="/') != -1 or lines[i].find(':\\') != -1:
1038 | y = i - 1
1039 | while lines[y].find("RDF:Description RDF:about=") == -1:
1040 | y = y - 1
1041 | line_app = lines[y].replace('', "").replace('label="', " ").replace(" ","")
1044 | _extraction_list.append({"0-APP": line_app, "1-URL/PATH": line_url})
1045 | i = i + 1
1046 |
1047 | addinfo_extraction_dict[filepath] = _extraction_list
1048 |
1049 | if y == 0:
1050 | self.log("INFO", "The Addons-Info database " + a + " does not contain URLs or paths!")
1051 |
1052 | # Saving extraction to main extraction list
1053 | self.total_extraction["addinfo"] = addinfo_extraction_dict
1054 | if addinfo_found == False:
1055 | self.log("INFO","Addons-Info database not found! Please, check file " + '|'.join(addinfo_sources))
1056 |
1057 | ###############################################################################################################
1058 | ### EXTENSIONS #
1059 | ###############################################################################################################
1060 |
1061 | def show_extensions(self,dir):
1062 | ext_extraction_dict = {}
1063 | ext_found = False
1064 | ext_sources = ["extensions.json","extensions.sqlite"]
1065 |
1066 | for a in ext_sources:
1067 | # Setting filename by OS
1068 | filepath = self.get_path_by_os(dir, a)
1069 |
1070 | # Checking source file
1071 | if path.isfile(filepath) == True:
1072 |
1073 | ext_found = True
1074 |
1075 | if a.endswith(".json") == True:
1076 | # JSON
1077 | if not sys.version.startswith('2.'):
1078 | jdata = json.load(open(filepath, encoding='utf8'))
1079 | else:
1080 | jdata = json.load(open(filepath))
1081 | try:
1082 | _extraction_list = []
1083 | for ext in jdata.get("addons"):
1084 | _extraction_dict = {}
1085 | if ext.get("id") is not None:
1086 | _extraction_dict['0-Name'] = ext.get("defaultLocale").get("name")
1087 | _extraction_dict['1-Type'] = ext.get("type")
1088 | _extraction_dict['2-Id'] = ext.get("id")
1089 | _extraction_dict['3-Descriptor'] = ext.get("descriptor")
1090 | _extraction_dict['4-Version'] = ext.get("version")
1091 | _extraction_dict['5-Release'] = ext.get("release")
1092 |
1093 | install_date = datetime.fromtimestamp(int(ext.get("installDate"))/1000).strftime('%Y-%m-%d %H:%M:%S')
1094 | _extraction_dict['6-Install Date'] = install_date
1095 |
1096 | update_date = datetime.fromtimestamp(int(ext.get("updateDate"))/1000).strftime('%Y-%m-%d %H:%M:%S')
1097 | _extraction_dict['7-Update Date'] = update_date
1098 |
1099 | _extraction_dict['8-Active'] = ext.get("active")
1100 | _extraction_list.append(_extraction_dict)
1101 |
1102 | ext_extraction_dict[filepath] = _extraction_list
1103 |
1104 | except:
1105 | e = str(sys.exc_info()[0])
1106 | self.log("ERROR","Extensions database: can't process file " + a + ": " + e )
1107 |
1108 |
1109 | if a.endswith(".sqlite") == True:
1110 | # SQLITE
1111 | conn = sqlite3.connect(filepath)
1112 | conn.text_factory = bytes
1113 | cursor = conn.cursor()
1114 | ext_query = "select type, descriptor,version,releaseNotesURI,datetime(installDate/1000,'unixepoch','localtime'),"
1115 | ext_query = ext_query + " datetime(UpdateDate/1000,'unixepoch','localtime'),active from addon"
1116 | cursor.execute(ext_query)
1117 | _extraction_list = []
1118 | for row in cursor:
1119 | _extraction_dict = {}
1120 | _extraction_dict['0-Type'] = self.decode_reg(row[0])
1121 | _extraction_dict['1-Descriptor'] = self.decode_reg(row[1])
1122 | _extraction_dict['2-Version'] = self.decode_reg(row[2])
1123 | _extraction_dict['3-Release'] = self.decode_reg(row[3])
1124 | _extraction_dict['4-Install Date'] = self.decode_reg(row[4])
1125 | _extraction_dict['5-Update Date'] = self.decode_reg(row[5])
1126 | _extraction_dict['6-Active'] = self.decode_reg(row[6])
1127 | _extraction_list.append(_extraction_dict)
1128 |
1129 | ext_extraction_dict[filepath] = _extraction_list
1130 |
1131 | cursor.close()
1132 | conn.close()
1133 |
1134 | # Saving extraction to main extraction list
1135 | self.total_extraction["extensions"] = ext_extraction_dict
1136 | if ext_found == False:
1137 | self.log("INFO","Extensions database not found! Please, check file" + '|'.join(ext_sources))
1138 |
1139 | ###############################################################################################################
1140 | ### SEARCH ENGINES #
1141 | ###############################################################################################################
1142 |
1143 | def show_search_engines(self,dir):
1144 | se_found = False
1145 | se_sources = ["search.json","search.sqlite","search.json.mozlz4"]
1146 | se_extraction_dict = {}
1147 |
1148 | for a in se_sources:
1149 | # Setting filename by OS
1150 | filepath = self.get_path_by_os(dir, a)
1151 |
1152 | # Checking source file
1153 | if path.isfile(filepath) == True:
1154 |
1155 | se_found = True
1156 |
1157 | if a.endswith(".json.mozlz4"):
1158 | # LZ4 COMPRESSED JSON
1159 | fo = open(filepath, "rb")
1160 | jdata = json.loads(self.decompressLZ4(fo))
1161 | try:
1162 | _extraction_list = []
1163 | for engine in jdata.get("engines"):
1164 | _extraction_dict = {}
1165 | _extraction_dict['0-Name'] = engine.get("_name")
1166 | _extraction_dict['1-Description'] = engine.get("description")
1167 | _extraction_dict['2-Path'] = engine.get("_loadPath")
1168 | _extraction_list.append(_extraction_dict)
1169 |
1170 | se_extraction_dict[filepath] = _extraction_list
1171 |
1172 | except:
1173 | e = str(sys.exc_info()[0])
1174 | self.log("ERROR","Search Engines database: can't process file " + a + ": " + e )
1175 |
1176 | if a.endswith(".json"):
1177 | # JSON
1178 | f = open(filepath)
1179 | jdata = self.getJSON(f)
1180 | f.close()
1181 | try:
1182 | _extraction_list = []
1183 | for search_dir in jdata.get("directories"):
1184 | for engine in jdata.get("directories").get(search_dir).get("engines"):
1185 | _extraction_dict = {}
1186 | _extraction_dict['0-Name'] = engine.get("_name")
1187 | _extraction_dict['1-Value'] = engine.get("description")
1188 | _extraction_dict['2-Hidden'] = engine.get("_hidden")
1189 | _extraction_list.append(_extraction_dict)
1190 |
1191 | se_extraction_dict[filepath] = _extraction_list
1192 |
1193 | except:
1194 | e = str(sys.exc_info()[0])
1195 | self.log("ERROR","Search Engines database: can't process file " + a + ": " + e )
1196 |
1197 | if a.endswith(".sqlite") == True:
1198 | # SQLITE
1199 | conn = sqlite3.connect(filepath)
1200 | conn.text_factory = bytes
1201 | cursor = conn.cursor()
1202 | cursor.execute("select name, value from engine_data")
1203 | _extraction_list = []
1204 | for row in cursor:
1205 | _extraction_dict = {}
1206 | _extraction_dict['0-Name'] = self.decode_reg(row[0])
1207 | _extraction_dict['1-Value'] = str(self.decode_reg(row[1]))
1208 | _extraction_list.append(_extraction_dict)
1209 |
1210 | se_extraction_dict[filepath] = _extraction_list
1211 |
1212 | cursor.close()
1213 | conn.close()
1214 |
1215 | # Saving extraction to main extraction list
1216 | self.total_extraction["engines"] = se_extraction_dict
1217 | if se_found == False:
1218 | self.log("INFO","Search Engines database not found! Please, check file " + '|'.join(se_sources))
1219 |
1220 | ###############################################################################################################
1221 | ### DOWNLOADS #
1222 | ###############################################################################################################
1223 |
1224 | def show_downloads(self,dir):
1225 | downloads_extraction_dict = {}
1226 |
1227 | bbdd = self.get_path_by_os(dir, 'downloads.sqlite')
1228 |
1229 | if path.isfile(bbdd) == False:
1230 | self.log("INFO","Recent downloads database (FF<21) not found! Please, check file downloads.sqlite")
1231 | return
1232 |
1233 | conn = sqlite3.connect(bbdd)
1234 | conn.text_factory = bytes
1235 |
1236 | if self.args.is_regexp_ok == True:
1237 | conn.create_function("REGEXP", 2, self.regexp)
1238 |
1239 | cursor = conn.cursor()
1240 | sqlite_query = "select name,mimeType,maxBytes/1024,source,target,referrer,tempPath, datetime(startTime/1000000,'unixepoch','localtime') as start,datetime(endTime/1000000,'unixepoch','localtime') as end,state,preferredApplication,preferredAction from moz_downloads"
1241 | self.execute_query(cursor, sqlite_query ,self.downloads_filters)
1242 |
1243 | _extraction_list = []
1244 | for row in cursor:
1245 | _extraction_dict = {}
1246 | _extraction_dict['00-Name'] = self.decode_reg(row[0])
1247 | _extraction_dict['01-Mime'] = self.decode_reg(row[1])
1248 | _extraction_dict['02-Size (KB)'] = self.decode_reg(row[2])
1249 | _extraction_dict['03-Source'] = self.decode_reg(row[3])
1250 | _extraction_dict['04-Directory'] = self.decode_reg(row[4])
1251 | _extraction_dict['05-Referrer'] = self.decode_reg(row[5])
1252 | _extraction_dict['06-Path temp'] = self.decode_reg(row[6])
1253 | _extraction_dict['07-Start Time'] = self.decode_reg(row[7])
1254 | _extraction_dict['08-End Time'] = self.decode_reg(row[8])
1255 | _extraction_dict['09-State (4 pause, 3 cancell, 1 completed, 0 downloading)'] = self.decode_reg(row[9])
1256 | _extraction_dict['10-Preferred application'] = self.decode_reg(row[10])
1257 | _extraction_dict['11-Preferred action'] = self.decode_reg(row[11])
1258 | _extraction_list.append(_extraction_dict)
1259 |
1260 | downloads_extraction_dict[bbdd] = _extraction_list
1261 |
1262 | self.total_extraction["downloads"] = downloads_extraction_dict
1263 |
1264 | ###############################################################################################################
1265 | ### DOWNLOADS HISTORY #
1266 | ###############################################################################################################
1267 |
1268 | def show_downloads_history(self,dir):
1269 | download_hist_extraction_dict = {}
1270 |
1271 | bbdd = self.get_path_by_os(dir, 'places.sqlite')
1272 |
1273 | if path.isfile(bbdd) == False:
1274 | self.log("INFO","History Downloads database not found! Please, check file places.sqlite")
1275 | return
1276 |
1277 | conn = sqlite3.connect(bbdd)
1278 | conn.text_factory = bytes
1279 |
1280 | if self.args.is_regexp_ok == True:
1281 | conn.create_function("REGEXP", 2, self.regexp)
1282 |
1283 | cursor = conn.cursor()
1284 | sqlite_query = 'select datetime(ann.lastModified/1000000,"unixepoch","localtime") as modified, moz.url, ann.content from moz_annos ann, moz_places moz'
1285 |
1286 | # Default filters
1287 | #~ where moz.id=ann.place_id and ann.content not like and ann.content not like "ISO-%" and ann.content like "file%"
1288 | self.downloads_history_filters.append(["column","moz.id","ann.place_id"])
1289 | if self.args.is_regexp_ok:
1290 | self.downloads_history_filters.append(["string","ann.content","^file.*"])
1291 | else:
1292 | self.downloads_history_filters.append(["string","ann.content","file%"])
1293 |
1294 | self.execute_query(cursor,sqlite_query,self.downloads_history_filters)
1295 |
1296 | _extraction_list = []
1297 | for row in cursor:
1298 | _extraction_dict = {}
1299 | _extraction_dict['0-Date'] = self.decode_reg(row[0])
1300 | _extraction_dict['1-URL'] = self.decode_reg(row[1])
1301 | _extraction_dict['2-Name'] = self.decode_reg(row[2])
1302 | _extraction_list.append(_extraction_dict)
1303 |
1304 | download_hist_extraction_dict[bbdd] = _extraction_list
1305 |
1306 | self.total_extraction["downloads_history"] = download_hist_extraction_dict
1307 |
1308 |
1309 | ###############################################################################################################
1310 | ### DOWNLOADS DIRECTORIES #
1311 | ###############################################################################################################
1312 |
1313 | def show_downloadsdir(self,dir):
1314 | download_dir_extraction_dict = {}
1315 |
1316 | bbdd = self.get_path_by_os(dir, 'content-prefs.sqlite')
1317 |
1318 | if path.isfile(bbdd) == False:
1319 | self.log("INFO","Download Directories database not found! Please, check file content-prefs.sqlite")
1320 | return
1321 |
1322 | conn = sqlite3.connect(bbdd)
1323 | conn.text_factory = bytes
1324 | cursor = conn.cursor()
1325 |
1326 | # Checking if timestamp column exists
1327 | cursor = conn.cursor()
1328 | sqlite_query = "pragma table_info(prefs)"
1329 | self.execute_query(cursor,sqlite_query,[])
1330 | timestamp_found = False
1331 | for row in cursor:
1332 | if self.decode_reg(row[1]) == "timestamp":
1333 | timestamp_found = True
1334 | cursor.close()
1335 |
1336 | # Making sqlite query
1337 | cursor = conn.cursor()
1338 | sqlite_query = ""
1339 | if timestamp_found:
1340 | sqlite_query = 'select value, max(datetime(timestamp/1000,"unixepoch","localtime")) as oldtime, max(datetime(timestamp,"unixepoch","localtime")) as newtime from prefs where value like "/%" group by value'
1341 | else:
1342 | sqlite_query = 'select value from prefs where value like "/%" group by value'
1343 |
1344 | cursor.execute(sqlite_query)
1345 |
1346 | _extraction_list = []
1347 | for row in cursor:
1348 | _extraction_dict = {}
1349 | _extraction_dict['0-Name'] = self.decode_reg(row[0])
1350 |
1351 | if timestamp_found:
1352 | timestamp = self.decode_reg(row[1])
1353 | if self.regexp('^197',timestamp):
1354 | _extraction_dict['1-Last date'] = self.decode_reg(row[1])
1355 | else:
1356 | _extraction_dict['1-Last date'] = timestamp
1357 |
1358 | _extraction_list.append(_extraction_dict)
1359 |
1360 | download_dir_extraction_dict[bbdd] = _extraction_list
1361 |
1362 | self.total_extraction["downloads_dir"] = download_dir_extraction_dict
1363 |
1364 | cursor.close()
1365 | conn.close()
1366 |
1367 | ###############################################################################################################
1368 | ### FORMS #
1369 | ###############################################################################################################
1370 |
1371 | def show_forms(self,dir):
1372 | forms_extraction_dict = {}
1373 |
1374 | bbdd = self.get_path_by_os(dir, 'formhistory.sqlite')
1375 |
1376 | if path.isfile(bbdd) == False:
1377 | self.log("INFO","Forms database not found! Please, check file formhistory.sqlite")
1378 | return
1379 |
1380 | conn = sqlite3.connect(bbdd)
1381 | conn.text_factory = bytes
1382 |
1383 | if self.args.is_regexp_ok == True:
1384 | conn.create_function("REGEXP", 2, self.regexp)
1385 |
1386 | cursor = conn.cursor()
1387 | sqlite_query = "select fieldname,value,timesUsed,datetime(firstUsed/1000000,'unixepoch','localtime') as last,datetime(lastUsed/1000000,'unixepoch','localtime') from moz_formhistory"
1388 | self.execute_query(cursor,sqlite_query,self.forms_filters)
1389 |
1390 | _extraction_list = []
1391 | for row in cursor:
1392 | _extraction_dict = {}
1393 | _extraction_dict['0-Name'] = self.decode_reg(row[0])
1394 | _extraction_dict['1-Value'] = self.decode_reg(row[1])
1395 | _extraction_dict['2-Times Used'] = self.decode_reg(row[2])
1396 | _extraction_dict['3-First Used'] = self.decode_reg(row[3])
1397 | _extraction_dict['4-Last Used'] = self.decode_reg(row[4])
1398 | _extraction_list.append(_extraction_dict)
1399 |
1400 | forms_extraction_dict[bbdd] = _extraction_list
1401 |
1402 | self.total_extraction["forms"] = forms_extraction_dict
1403 |
1404 | cursor.close()
1405 | conn.close()
1406 |
1407 | ###############################################################################################################
1408 | ### HISTORY #
1409 | ###############################################################################################################
1410 |
1411 | def show_history(self,dir):
1412 | history_extraction_dict = {}
1413 |
1414 | bbdd = self.get_path_by_os(dir, 'places.sqlite')
1415 |
1416 | if path.isfile(bbdd) == False:
1417 | self.log("INFO","History database not found! Please, check file places.sqlite")
1418 | return
1419 |
1420 | conn = sqlite3.connect(bbdd)
1421 | conn.text_factory = bytes
1422 |
1423 | if self.args.is_regexp_ok == True:
1424 | conn.create_function("REGEXP", 2, self.regexp)
1425 |
1426 | cursor = conn.cursor()
1427 | sqlite_query = "select datetime(last_visit_date/1000000,'unixepoch','localtime') as last, title, url, visit_count from moz_places"
1428 |
1429 | if self.args.is_frequency_ok == False:
1430 | self.execute_query(cursor,sqlite_query,self.history_filters,"ORDER BY last COLLATE NOCASE")
1431 | else:
1432 | self.execute_query(cursor,sqlite_query,self.history_filters,"ORDER BY visit_count COLLATE NOCASE DESC")
1433 |
1434 | _extraction_list = []
1435 | for row in cursor:
1436 | _extraction_dict = {}
1437 | _extraction_dict['0-Last Access'] = self.decode_reg(row[0])
1438 | _extraction_dict['1-Title'] = self.decode_reg(row[1])
1439 | _extraction_dict['2-URL'] = self.decode_reg(row[2])
1440 | _extraction_dict['3-Frequency'] = self.decode_reg(row[3])
1441 | _extraction_list.append(_extraction_dict)
1442 |
1443 | history_extraction_dict[bbdd] = _extraction_list
1444 |
1445 | self.total_extraction["history"] = history_extraction_dict
1446 |
1447 | cursor.close()
1448 | conn.close()
1449 |
1450 | ###############################################################################################################
1451 | ### BOOKMARKS #
1452 | ###############################################################################################################
1453 |
1454 | def show_bookmarks(self,dir):
1455 | bookmarks_extraction_dict = {}
1456 |
1457 | bbdd = self.get_path_by_os(dir, 'places.sqlite')
1458 |
1459 | if path.isfile(bbdd) == False:
1460 | self.log("INFO","Bookmarks database not found! Please, check file places.sqlite")
1461 | return
1462 |
1463 | conn = sqlite3.connect(bbdd)
1464 | conn.text_factory = bytes
1465 |
1466 | if self.args.is_regexp_ok == True:
1467 | conn.create_function("REGEXP", 2, self.regexp)
1468 |
1469 | cursor = conn.cursor()
1470 | sqlite_query = 'select bm.title,pl.url,datetime(bm.dateAdded/1000000,"unixepoch","localtime") as create_date,datetime(bm.lastModified/1000000,"unixepoch","localtime") as last from moz_places pl,moz_bookmarks bm where pl.id = bm.id'
1471 | self.execute_query(cursor,sqlite_query,self.bookmarks_filters)
1472 |
1473 | _extraction_list = []
1474 | for row in cursor:
1475 | _extraction_dict = {}
1476 | _extraction_dict['0-Title'] = self.decode_reg(row[0])
1477 | _extraction_dict['1-URL'] = self.decode_reg(row[1])
1478 | _extraction_dict['2-Creation Time'] = self.decode_reg(row[2])
1479 | _extraction_dict['3-Last Modified'] = self.decode_reg(row[3])
1480 | _extraction_list.append(_extraction_dict)
1481 |
1482 | bookmarks_extraction_dict[bbdd] = _extraction_list
1483 |
1484 | self.total_extraction["bookmarks"] = bookmarks_extraction_dict
1485 |
1486 | cursor.close()
1487 | conn.close()
1488 |
1489 | ###############################################################################################################
1490 | ### KEY PINNING #
1491 | ###############################################################################################################
1492 |
1493 | def show_key_pinning(self,dir):
1494 | keypinning_extraction_dict = {}
1495 |
1496 | bbdd = self.get_path_by_os(dir, 'SiteSecurityServiceState.txt')
1497 |
1498 | if path.isfile(bbdd) == False:
1499 | self.log("INFO","Key pinning database not found! Please, check file SiteSecurityServiceState.txt")
1500 | return
1501 |
1502 | lines = open(bbdd).readlines()
1503 |
1504 | nl = 0
1505 | _extraction_list = []
1506 | for entry in lines:
1507 | if lines[nl].split()[0].startswith("#") == False:
1508 | _extraction_dict = {}
1509 |
1510 | entry_type = lines[nl].split()[0].split(':')[1]
1511 | entry_last = lines[nl].split()[2]
1512 | entry_data = lines[nl].split()[3]
1513 | entry_expiry = entry_data.split(',')[0]
1514 | entry_state = entry_data.split(',')[1]
1515 | entry_subdomain = entry_data.split(',')[2]
1516 |
1517 | if entry_state == '0':
1518 | entry_state_desc = "- Disabled"
1519 | elif entry_state == '1':
1520 | entry_state_desc = "- Enabled"
1521 | elif entry_state == '2':
1522 | entry_state_desc = "- Overwriten"
1523 | else:
1524 | entry_state_desc = ""
1525 |
1526 | condition = True
1527 | if len(self.keypinning_filters) > 0:
1528 | for f in self.keypinning_filters:
1529 | if f[1] == 'type':
1530 | condition = (entry_type == f[2])
1531 | if condition:
1532 | _extraction_dict["0-Site"] = lines[nl].split()[0].split(':')[0]
1533 | _extraction_dict["1-Type"] = entry_type
1534 | _extraction_dict["2-Access-Score"] = lines[nl].split()[1]
1535 | _extraction_dict["3-Last-Access"] = self.serial_date_to_string( int(entry_last) )
1536 | _extraction_dict["4-Expiry-Date"] = datetime.fromtimestamp(int(entry_expiry)/1000).strftime('%Y-%m-%d %H:%M:%S')
1537 | _extraction_dict["5-Security-Property-State"] = entry_state + " "+ entry_state_desc
1538 |
1539 | if entry_subdomain == '1':
1540 | _extraction_dict["6-Include-Subdomains"] = "Yes"
1541 | else:
1542 | _extraction_dict["6-Include-Subdomains"] = "No"
1543 |
1544 | if entry_type == 'HPKP':
1545 | pins = entry_data.split(',')[3].split('=')
1546 | pin_cnt = 1
1547 | for pin in pins:
1548 | if pin != "":
1549 | _extraction_dict["7-Pin-" + str(pin_cnt)] = pin
1550 | pin_cnt += 1
1551 |
1552 | _extraction_list.append(_extraction_dict)
1553 | nl = nl + 1
1554 |
1555 | keypinning_extraction_dict[bbdd] = _extraction_list
1556 |
1557 | self.total_extraction["keypinning"] = keypinning_extraction_dict
1558 |
1559 |
1560 | ###############################################################################################################
1561 | ### OFFLINE CACHE #
1562 | ###############################################################################################################
1563 |
1564 | def show_cache(self,dir):
1565 | # TODO: firefox-cache2-index-parser.py??
1566 | offlinecache_extraction_dict = {}
1567 | cache_found = False
1568 |
1569 | # [Default, Windows 7]
1570 | cache_abs_sources = [self.get_path_by_os(dir,"index.sqlite","OfflineCache")]
1571 |
1572 | # For Windows 7 profile
1573 | if dir.find("Roaming") > -1:
1574 | cache_abs_sources.append(self.get_path_by_os(dir.replace("Roaming", "Local"),"index.sqlite","OfflineCache"))
1575 |
1576 | # For Linux profile
1577 | if dir.find(".mozilla") > -1:
1578 | cache_abs_sources.append(self.get_path_by_os(dir.replace(".mozilla", ".cache/mozilla"),"index.sqlite","OfflineCache")) # Firefox
1579 | cache_abs_sources.append(self.get_path_by_os(dir.replace(".mozilla", ".cache/mozilla"),"index.sqlite","Cache")) # Seamonkey
1580 |
1581 | for d in cache_abs_sources:
1582 | # Checking source file
1583 | if path.isfile(d) == True:
1584 |
1585 | cache_found = True
1586 |
1587 | if d.endswith(".sqlite") == True:
1588 | # SQLITE
1589 | conn = sqlite3.connect(d)
1590 | conn.text_factory = bytes
1591 | if self.args.is_regexp_ok == True:
1592 | conn.create_function("REGEXP", 2, self.regexp)
1593 |
1594 | cursor = conn.cursor()
1595 | sqlite_query = "select ClientID,key,DataSize,FetchCount,datetime(LastFetched/1000000,'unixepoch','localtime'),datetime(LastModified/1000000,'unixepoch','localtime') as last,datetime(ExpirationTime/1000000,'unixepoch','localtime') from moz_cache"
1596 | self.execute_query(cursor,sqlite_query,self.cacheoff_filters)
1597 |
1598 | _extraction_list = []
1599 | for row in cursor:
1600 | _extraction_dict = {}
1601 | _extraction_dict['0-Name'] = self.decode_reg(row[0])
1602 | _extraction_dict['1-Value'] = str(self.decode_reg(row[1]))
1603 | _extraction_dict['2-Last Modified'] = str(self.decode_reg(row[5]))
1604 | _extraction_list.append(_extraction_dict)
1605 |
1606 | offlinecache_extraction_dict[d] = _extraction_list
1607 |
1608 | cursor.close()
1609 | conn.close()
1610 |
1611 | # Saving extraction to main extraction list
1612 | self.total_extraction["offlinecache"] = offlinecache_extraction_dict
1613 | if cache_found == False:
1614 | self.log("INFO","Offline Cache database not found! Please check file OfflineCache/index.sqlite")
1615 |
1616 | ###############################################################################################################
1617 | ### OFFLINE CACHE #
1618 | ###############################################################################################################
1619 |
1620 | def show_cache_extract(self,dir, directory):
1621 | # TODO: include firefox-cache2-file-parser.py
1622 | offlinecache_ext_extraction_dict = {}
1623 | cache_found = False
1624 |
1625 | try:
1626 | import magic
1627 | except:
1628 | self.log("ERROR","Failed to import magic module!")
1629 | return
1630 |
1631 | # [Default, Windows 7]
1632 | cache_abs_sources = [self.get_path_by_os(dir,"OfflineCache")]
1633 |
1634 | # For Windows 7 profile
1635 | if dir.find("Roaming") > -1:
1636 | cache_abs_sources.append(self.get_path_by_os(dir.replace("Roaming", "Local"),"OfflineCache"))
1637 |
1638 | # For Linux profile
1639 | if dir.find(".mozilla") > -1:
1640 | cache_abs_sources.append(self.get_path_by_os(dir.replace(".mozilla", ".cache/mozilla"),"OfflineCache")) # Firefox
1641 | cache_abs_sources.append(self.get_path_by_os(dir.replace(".mozilla", ".cache/mozilla"),"Cache")) # Seamonkey
1642 |
1643 | for d in cache_abs_sources:
1644 | _extraction_list = []
1645 | count = 0
1646 | # Checking source directory
1647 | if path.isdir(d) == True:
1648 |
1649 | cache_found = True
1650 |
1651 | if sys.platform.startswith('win') == True:
1652 | # Windows systems
1653 | for dirname, dirnames, filenames in walk(d):
1654 | for filename in filenames:
1655 | _extraction_dict = {}
1656 | file = path.join(dirname, filename)
1657 | mime = magic.Magic(magic_file=magicpath)
1658 |
1659 | if not path.exists(directory):
1660 | makedirs(directory)
1661 |
1662 | if mime.from_file(file).decode('unicode-escape').startswith("gzip"):
1663 | if not path.exists(directory+"\\gzip"):
1664 | makedirs(directory+"\\gzip")
1665 | shutil.copy2(file, directory+"\\gzip\\"+filename+".gz")
1666 |
1667 | elif mime.from_file(file).decode('unicode-escape').find("image") != -1 :
1668 | if not path.exists(directory+"\\images"):
1669 | makedirs(directory+"\\images")
1670 | if mime.from_file(file).decode('unicode-escape').find("JPEG") != -1 or mime.from_file(file).decode('unicode-escape').find("jpg") != -1:
1671 | shutil.copy2(file, directory+"\\images\\"+filename+".jpg")
1672 | elif mime.from_file(file).decode('unicode-escape').find("GIF") != -1:
1673 | shutil.copy2(file, directory+"\\images\\"+filename+".gif")
1674 | elif mime.from_file(file).decode('unicode-escape').find("BMP") != -1:
1675 | shutil.copy2(file, directory+"\\images\\"+filename+".bmp")
1676 | elif mime.from_file(file).decode('unicode-escape').find("PNG") != -1:
1677 | shutil.copy2(file, directory+"\\images\\"+filename+".png")
1678 | elif mime.from_file(file).decode('unicode-escape').find("X-ICON") != -1:
1679 | shutil.copy2(file, directory+"\\images\\"+filename+".ico")
1680 | else:
1681 | shutil.copy2(file, directory+"/images/"+filename)
1682 |
1683 | elif mime.from_file(file).decode('unicode-escape').find("text") != -1:
1684 | if not path.exists(directory+"\\text"):
1685 | makedirs(directory+"\\text")
1686 | shutil.copy2(file, directory+"\\text\\"+filename+".txt")
1687 |
1688 | else:
1689 | if not path.exists(directory+"\\others"):
1690 | makedirs(directory+"\\others")
1691 | shutil.copy2(file, directory+"\\others\\"+filename)
1692 |
1693 | if filename != "index.sqlite":
1694 | count_alpha = str(count).zfill(6)
1695 | _extraction_dict = {count_alpha + "-Copying "+filename : mime.from_file(file).decode('unicode-escape')}
1696 |
1697 | if len(_extraction_dict) > 0:
1698 | _extraction_list.append(_extraction_dict)
1699 |
1700 | count = count + 1
1701 |
1702 | try:
1703 | remove(directory+"\\index.sqlite")
1704 | except:
1705 | self.log("WARNING","Failed to remove index.sqlite from "+directory)
1706 |
1707 | else:
1708 | # Unix systems
1709 | for dirname, dirnames, filenames in walk(d):
1710 | for filename in filenames:
1711 | _extraction_dict = {}
1712 | file = path.join(dirname, filename)
1713 | mime = magic.Magic(mime=True)
1714 | if not path.exists(directory):
1715 | makedirs(directory)
1716 | if mime.from_file(file).decode('unicode-escape') == "application/x-gzip":
1717 | if not path.exists(directory+"/gzip/"):
1718 | makedirs(directory+"/gzip/")
1719 | shutil.copy2(file, directory+"/gzip/"+filename+".gz")
1720 |
1721 | elif mime.from_file(file).decode('unicode-escape').startswith("image"):
1722 | if not path.exists(directory+"/images/"):
1723 | makedirs(directory+"/images/")
1724 | if mime.from_file(file).decode('unicode-escape').find("jpeg") != -1 or mime.from_file(file).decode('unicode-escape').find("jpg") != -1:
1725 | shutil.copy2(file, directory+"/images/"+filename+".jpg")
1726 | elif mime.from_file(file).decode('unicode-escape').find("gif") != -1:
1727 | shutil.copy2(file, directory+"/images/"+filename+".gif")
1728 | elif mime.from_file(file).decode('unicode-escape').find("bmp") != -1:
1729 | shutil.copy2(file, directory+"/images/"+filename+".bmp")
1730 | elif mime.from_file(file).decode('unicode-escape').find("png") != -1:
1731 | shutil.copy2(file, directory+"/images/"+filename+".png")
1732 | elif mime.from_file(file).decode('unicode-escape').find("x-icon") != -1:
1733 | shutil.copy2(file, directory+"/images/"+filename+".ico")
1734 | else:
1735 | shutil.copy2(file, directory+"/images/"+filename)
1736 |
1737 | elif mime.from_file(file).decode('unicode-escape').startswith("text"):
1738 | if not path.exists(directory+"/text/"):
1739 | makedirs(directory+"/text/")
1740 | shutil.copy2(file, directory+"/text/"+filename+".txt")
1741 |
1742 | else:
1743 | if not path.exists(directory+"/others/"):
1744 | makedirs(directory+"/others/")
1745 | shutil.copy2(file, directory+"/others/"+filename)
1746 |
1747 | if filename != "index.sqlite":
1748 | count_alpha = str(count).zfill(6)
1749 | _extraction_dict = {count_alpha + "-Copying "+filename : mime.from_file(file).decode('unicode-escape')}
1750 |
1751 | if len(_extraction_dict) > 0:
1752 | _extraction_list.append(_extraction_dict)
1753 |
1754 | count = count + 1
1755 | try:
1756 | remove(directory+"/index.sqlite")
1757 | except:
1758 | self.log("WARNING","Failed to remove index.sqlite from "+directory)
1759 |
1760 | offlinecache_ext_extraction_dict[d] = _extraction_list
1761 |
1762 | self.total_extraction["offlinecache_extract"] = offlinecache_ext_extraction_dict
1763 |
1764 | ###############################################################################################################
1765 | ### THUMBNAILS #
1766 | ###############################################################################################################
1767 |
1768 | def show_thumbnails(self,dir, directory = None):
1769 | thumbnails_found = False
1770 | thumbnails_extraction_dict = {}
1771 |
1772 | # [Default, Windows 7]
1773 | thumbnails_sources = [self.get_path_by_os(dir,"thumbnails")]
1774 |
1775 | # For Windows 7 profile
1776 | if dir.find("Roaming") > -1:
1777 | thumbnails_sources.append(self.get_path_by_os(dir.replace("Roaming", "Local"),"thumbnails"))
1778 |
1779 | # For Linux profile
1780 | if dir.find(".mozilla") > -1:
1781 | thumbnails_sources.append(self.get_path_by_os(dir.replace(".mozilla", ".cache/mozilla"),"thumbnails"))
1782 |
1783 | for d in thumbnails_sources:
1784 | if path.exists(d):
1785 | thumbnails_found = True
1786 |
1787 | _extraction_list = []
1788 | for dirname, dirnames, filenames in walk(d):
1789 | for filename in filenames:
1790 | _extraction_dict = {}
1791 | if directory == None:
1792 | nfile = self.get_path_by_os(dirname, filename)
1793 | _extraction_dict['0-File'] = nfile
1794 | else:
1795 | nfile = self.get_path_by_os(dirname, filename)
1796 | if not path.exists(directory):
1797 | try:
1798 | makedirs(directory)
1799 | except:
1800 | self.log('ERROR', 'Can\'t create thumbnails folder: ' + directory)
1801 | return
1802 | try:
1803 | shutil.copy2(nfile, directory)
1804 | except:
1805 | self.log('ERROR', 'Can\'t copy thumbnail: ' + nfile)
1806 | _extraction_dict['0-File'] = "Copy "+nfile+" to "+directory
1807 | if len(_extraction_dict) > 0:
1808 | _extraction_list.append(_extraction_dict)
1809 |
1810 | thumbnails_extraction_dict[d] = _extraction_list
1811 |
1812 | # Saving extraction to main extraction list
1813 | self.total_extraction["thumbnails"] = thumbnails_extraction_dict
1814 | if thumbnails_found == False:
1815 | self.log("INFO","No thumbnails found!")
1816 |
1817 | ###############################################################################################################
1818 | ### CERT OVERRIDE #
1819 | ###############################################################################################################
1820 |
1821 | def show_cert_override(self,dir):
1822 | cert_override_extraction_dict = {}
1823 |
1824 | bbdd = self.get_path_by_os(dir,"cert_override.txt")
1825 |
1826 | if path.isfile(bbdd):
1827 | lines = open(bbdd).readlines()
1828 |
1829 | nl = 0
1830 | _extraction_list = []
1831 | for cert in lines:
1832 | if lines[nl].split()[0].startswith("#") == False:
1833 | _extraction_dict = {}
1834 | _extraction_dict["0-Site"] = lines[nl].split()[0]
1835 | _extraction_dict["1-Hash Algorithm"] = lines[nl].split()[1]
1836 | _extraction_dict["2-Data"] = lines[nl].split()[2]
1837 | _extraction_list.append(_extraction_dict)
1838 | nl = nl + 1
1839 |
1840 | cert_override_extraction_dict[bbdd] = _extraction_list
1841 | else:
1842 | self.log("INFO","Cert override file not found! Please, check file cert_override.txt")
1843 |
1844 | self.total_extraction["cert_override"] = cert_override_extraction_dict
1845 |
1846 | ###############################################################################################################
1847 | ### WATCH #
1848 | ###############################################################################################################
1849 |
1850 | def show_watch(self,dir,watch_text = 1):
1851 | sw_py_path = self.PYTHON3_PATH
1852 | print(sys.platform)
1853 | if sys.platform.startswith('win'):
1854 | self.log("CRITICAL","--Watch option not supported on Windows!")
1855 | exit(2)
1856 | elif sys.platform.endswith('win'):
1857 | self.log("CRITICAL","--Watch option not supported on MacOS!")
1858 | exit(2)
1859 | elif sw_py_path == '':
1860 | sw_py_path = self.get_user_value('Python 3 path (Press Enter for default - ' + self.PYTHON3_DEF + '): ').strip() # Python 3.x path (NO Windows). Example: /usr/bin/python3.4
1861 | if sw_py_path == '':
1862 | sw_py_path = self.PYTHON3_DEF
1863 |
1864 | if not path.isfile(sw_py_path):
1865 | self.log("CRITICAL","Python path '" + sw_py_path + "' is not a valid file path.")
1866 | sys.exit(1)
1867 |
1868 | elif watch_text == 1:
1869 | cmd = ["watch", "-n", "4",sw_py_path, path.abspath(__file__), dir, "--Live"]
1870 | call(cmd)
1871 | else:
1872 | cmd = ["watch", "-n", "4",sw_py_path, path.abspath(__file__), dir, "--Live", "| grep --group-separator '' -A 2 -B 2 -i", "'"+watch_text+"'" ]
1873 | call(cmd)
1874 |
1875 | def get_param_argurment(arg, num):
1876 | rparam = ""
1877 | try:
1878 | rparam = sys.argv[num]
1879 | return rparam
1880 | except:
1881 | self.log("CRITICAL","Missing argument for parameter " + arg)
1882 | self.show_help()
1883 |
1884 | ###############################################################################################################
1885 | ### SESSION #
1886 | ###############################################################################################################
1887 |
1888 | def show_session(self,dir):
1889 | session_extraction_dict = {}
1890 | session_found = False
1891 | session_sources = ["sessionstore.js","sessionstore.json","sessionstore.bak"]
1892 | # Checking for more backup session sources (I)
1893 | for s in os.listdir(dir):
1894 | if not s.startswith('.'):
1895 | # Adding new source
1896 | if path.isfile(path.join(dir,s)) and s.startswith("sessionstore") and s not in session_sources:
1897 | session_sources.append(s)
1898 |
1899 | # Checking for more backup session sources (II)
1900 | session_folder = path.join(dir,"sessionstore-backups")
1901 | if path.isdir(session_folder):
1902 | for s in os.listdir(session_folder):
1903 | if not s.startswith('.'):
1904 | # Adding new source
1905 | if path.isfile(path.join(session_folder,s)):
1906 | session_sources.append(path.join("sessionstore-backups",s))
1907 |
1908 | # Extraction
1909 | for a in session_sources:
1910 | bbdd = os.path.join(dir,a)
1911 | # Checking source file
1912 | if path.isfile(bbdd) == True:
1913 | session_found = True
1914 | f = open(bbdd)
1915 | jdata = self.getJSON(f)
1916 | f.close()
1917 |
1918 | _extraction_list = self.extract_data_session(jdata, a)
1919 |
1920 | session_extraction_dict[bbdd] = _extraction_list
1921 |
1922 | # Saving extraction to main extraction list
1923 | self.total_extraction["session"] = session_extraction_dict
1924 | if not session_found:
1925 | self.log("WARNING","No session info found!")
1926 |
1927 | ###############################################################################################################
1928 | ### DATA SESSION #
1929 | ###############################################################################################################
1930 |
1931 | def extract_data_session(self,jdata,source):
1932 | _extraction_list = []
1933 | try:
1934 | nodes = [ "windows", "_closedWindows" ];
1935 |
1936 | for node in nodes:
1937 | data = jdata.get(node)
1938 | if len(data) > 0:
1939 | for win in data:
1940 | for tab in win.get("tabs"):
1941 | _extraction_dict = {}
1942 | _extraction_dict["01-Last update"] = str(time.ctime(jdata["session"]["lastUpdate"]/1000.0))
1943 | _extraction_dict["02-Type"] = node
1944 |
1945 | if tab.get("index") is not None:
1946 | i = tab.get("index") - 1
1947 |
1948 | _extraction_dict["03-Title"] = tab.get("entries")[i].get("title")
1949 | _extraction_dict["04-URL"] = tab.get("entries")[i].get("url")
1950 | if tab.get("entries")[i].get("referrer") is not None:
1951 | _extraction_dict["05-Referrer"] = tab.get("entries")[i].get("referrer")
1952 |
1953 | if tab.get("entries")[i].get("formdata") is not None and str(tab.get("entries")[i].get("formdata")) != "{}" :
1954 | if str(tab.get("entries")[i].get("formdata").get("xpath")) == "{}" and str(tab.get("entries")[i].get("formdata").get("id")) != "{}":
1955 | _extraction_dict["06-Form"] = tab.get("entries")[i].get("formdata").get("id")
1956 | elif str(tab.get("entries")[i].get("formdata").get("xpath")) != "{}" and str(tab.get("entries")[i].get("formdata").get("id")) == "{}":
1957 | _extraction_dict["06-Form"] = tab.get("entries")[i].get("formdata").get("xpath")
1958 | else:
1959 | _extraction_dict["06-Form"] = tab.get("entries")[i].get("formdata")
1960 |
1961 | _extraction_list.append(_extraction_dict)
1962 |
1963 | # Closed tabs
1964 | if win.get("_closedTabs") is not None and len(win.get("_closedTabs")) > 0:
1965 | for closed_tab in win.get("_closedTabs")[0].get("state").get("entries"):
1966 | _extraction_dict = {}
1967 | _extraction_dict["07-Last update"] = str(time.ctime(jdata["session"]["lastUpdate"]/1000.0))
1968 | _extraction_dict["08-Type"] = "_closedTabs"
1969 | _extraction_dict["09-Title"] = closed_tab.get("title")
1970 | _extraction_dict["10-URL"] = closed_tab.get("url")
1971 | _extraction_list.append(_extraction_dict)
1972 |
1973 | except:
1974 | e = str(sys.exc_info()[0])
1975 | self.log("ERROR","Sessions database: Can't process file " + source + ": " + e )
1976 |
1977 | return _extraction_list
1978 |
1979 | ###############################################################################################################
1980 | ### DATA SESSION WATCH #
1981 | ###############################################################################################################
1982 |
1983 | def extract_data_session_watch (self,dir):
1984 | session_watch_found = False
1985 | session_watch_sources = ["sessionstore.js","sessionstore.json"]
1986 | # Checking for more backup session sources (I)
1987 | for s in os.listdir(dir):
1988 | # Adding new source
1989 | if path.isfile(path.join(dir,s)) and s.startswith("sessionstore") and s not in session_watch_sources:
1990 | session_watch_sources.append(s)
1991 | # Checking for more backup session sources (II)
1992 | session_watch_folder = path.join(dir,"sessionstore-backups")
1993 | if path.isdir(session_watch_folder):
1994 | for s in os.listdir(session_watch_folder):
1995 | # Adding new source
1996 | if path.isfile(path.join(session_watch_folder,s)):
1997 | session_watch_sources.append(path.join("sessionstore-backups",s))
1998 |
1999 | higher_date = 0
2000 | higher_source = ""
2001 | for a in session_watch_sources:
2002 | bbdd = os.path.join(dir,a)
2003 | # Checking source file
2004 | if path.isfile(bbdd) == True:
2005 | session_watch_found = True
2006 | f = open(bbdd)
2007 | jdata = self.getJSON(f)
2008 | f.close()
2009 | if jdata["session"]["lastUpdate"] > higher_date:
2010 | higher_date=jdata["session"]["lastUpdate"]
2011 | higher_source=bbdd
2012 |
2013 | # Showing last updated session data
2014 | if session_watch_found == True:
2015 | f = open(higher_source)
2016 | jdata = self.getJSON(f)
2017 | f.close()
2018 | count = 0
2019 | countform = 0
2020 | try:
2021 | for win in jdata.get("windows"):
2022 | for tab in win.get("tabs"):
2023 | if tab.get("index") is not None:
2024 | i = tab.get("index") - 1
2025 | print ("\nTitle: %s" % tab.get("entries")[i].get("title"))
2026 | print ("URL: %s" % tab.get("entries")[i].get("url"))
2027 | #print(str(tab.get("entries")[i]))
2028 | if tab.get("entries")[i].get("formdata") is not None and str(tab.get("entries")[i].get("formdata")) != "{}" :
2029 | countform = countform + 1
2030 | if str(tab.get("entries")[i].get("formdata").get("xpath")) == "{}" and str(tab.get("entries")[i].get("formdata").get("id")) != "{}":
2031 | print ("Form: %s\n" % tab.get("entries")[i].get("formdata").get("id"))
2032 | elif str(tab.get("entries")[i].get("formdata").get("xpath")) != "{}" and str(tab.get("entries")[i].get("formdata").get("id")) == "{}":
2033 | print ("Form: %s\n" % tab.get("entries")[i].get("formdata").get("xpath"))
2034 | else:
2035 | print ("Form: %s\n" % tab.get("entries")[i].get("formdata"))
2036 | count = count + 1
2037 | except:
2038 | e = str(sys.exc_info()[0])
2039 | self.log("ERROR","Can't process file " + higher_source + ": " + e )
2040 |
2041 | print ("\n[INFO] Last update: %s " % time.ctime(jdata["session"]["lastUpdate"]/1000.0))
2042 | print ("[INFO] Number of windows / tabs in use: %s" % count)
2043 | print ("[INFO] Number of webs with forms in use: %s" % countform)
2044 | print ("[INFO] Exit: Ctrl + C")
2045 |
2046 | ###############################################################################################################
2047 | ### HELP #
2048 | ###############################################################################################################
2049 |
2050 | def show_full_help(self):
2051 | logo = """
2052 | ./oyhhyo/-` `..`
2053 | :smMNdhyyyhdNMNNmmmNh-
2054 | .omMNhsoshddyoohhhdmNMMMN`
2055 | :hMMMNysoydNNoosdhhNdhdMMNMy:
2056 | `+mMMMMMMhooooshoymNdsshsmmhmhhmNhs/.
2057 | .+//smMMMdooooosyhdddhohmhsyysoooosydmNds-
2058 | .yNNMMNddoooomoooooooodsNMmyoooooooooohMM-
2059 | `+s/omMMdosooosMyoooooooNdMMMMMmyoooooooNmM/
2060 | +yoyMMMsooooyMNdoooooohMMNNMMMmNmdhyssssMy
2061 | ``-dNhMMMyooosdysshyooooshdyyhmNNMNmNMmNNN.
2062 | `+--mNMMMhoooossoodNdhyyhddmNmhyyyhNMNmm:
2063 | -/omNMNoooooooooosmMMMNh+::/+syddy/`
2064 | `-hMMNmhsoooooosmmMN/
2065 | +mNdysoooooohdydM-
2066 | /hmhsoooooyyNMh Dumpzilla Forensic Tool
2067 | `/shddhhmNMM- www.dumpzilla.org
2068 | `-:////. %s
2069 |
2070 | Usage:
2071 |
2072 | """ % (self.VERSION)
2073 | print(logo + self.get_help_msg())
2074 |
2075 | def show_help(self):
2076 | print('Usage: ' + self.get_help_msg())
2077 |
2078 | def get_help_msg(self):
2079 | return format("""python dumpzilla.py PROFILE_DIR [OPTIONS]
2080 |
2081 | Options:
2082 |
2083 | --Addons
2084 | --Search
2085 | --Bookmarks [-bm_create_range ][-bm_last_range ]
2086 | --Certoverride
2087 | --Cookies [-showdom] [-domain ] [-name ] [-hostcookie ] [-access ] [-create ]
2088 | [-secure <0|1>] [-httponly <0|1>] [-last_range ] [-create_range ]
2089 | --Downloads [-range ]
2090 | --Export (export data as json)
2091 | --Forms [-value ] [-forms_range ]
2092 | --Help (shows this help message and exit)
2093 | --History [-url ] [-title ] [-date ] [-history_range ] [-frequency]
2094 | --Keypinning [-entry_type ]
2095 | --OfflineCache [-cache_range -extract ]
2096 | --Preferences
2097 | --Passwords
2098 | --Permissions [-host ] [-modif ] [-modif_range ]
2099 | --RegExp (use Regular Expresions for string type filters instead of Wildcards)
2100 | --Session
2101 | --Summary (no data extraction, only summary report)
2102 | --Thumbnails [-extract_thumb ]
2103 | --Verbosity (DEBUG|INFO|WARNING|ERROR|CRITICAL)
2104 | --Watch [-text ] (shows in daemon mode the URLs and text form in real time; Unix only)
2105 |
2106 | Wildcards (without RegExp option):
2107 |
2108 | '%%' Any string of any length (including zero length)
2109 | '_' Single character
2110 | '\\' Escape character
2111 |
2112 | Regular Expresions: https://docs.python.org/3/library/re.html
2113 |
2114 | Date syntax:
2115 |
2116 | YYYY-MM-DD hh:mi:ss (wildcards allowed)
2117 |
2118 | Profile location:
2119 |
2120 | WinXP profile -> 'C:\\Documents and Settings\\%%USERNAME%%\\Application Data\\Mozilla\\Firefox\\Profiles\\xxxx.default'
2121 | Win7 profile -> 'C:\\Users\\%%USERNAME%%\\AppData\\Roaming\\Mozilla\\Firefox\\Profiles\\xxxx.default'
2122 | MacOS profile -> '/Users/$USER/Library/Application\ Support/Firefox/Profiles/xxxx.default'
2123 | Unix profile -> '/home/$USER/.mozilla/firefox/xxxx.default'
2124 | """)
2125 |
2126 | ###############################################################################################################
2127 | ## #
2128 | ### MAIN #
2129 | ## #
2130 | ###############################################################################################################
2131 | def __init__(self, argv):
2132 |
2133 | # Log Levels
2134 | self.logger = logging.getLogger()
2135 | self.logger.setLevel(logging.WARNING)
2136 | self.verbosity_level = "WARNING";
2137 | self.log('DEBUG', 'Initialization')
2138 |
2139 | # Argparse init
2140 | parser = argparse.ArgumentParser(usage=self.get_help_msg(), add_help=False)
2141 | parser.add_argument('PROFILE_DIR')
2142 | is_all_ok = False
2143 | if len(argv) == 2:
2144 | is_all_ok = True
2145 |
2146 | ###############
2147 | ### ARG PARSER
2148 | ###############
2149 | parser.add_argument("--RegExp", action="store_true", default=False, dest='is_regexp_ok',
2150 | help="(uses Regular Expresions for string type filters instead of Wildcards)")
2151 | parser.add_argument("--Summary", action="store_true", default=False, dest='is_summary_ok',
2152 | help="(only shows debug messages and summary report)")
2153 | #...........................................
2154 | #... Help message
2155 | #...........................................
2156 | parser.add_argument("--Help", action="store_true", default=False, dest='is_help_ok',
2157 | help="Shows this help message and exit")
2158 | #...........................................
2159 | #... Cookie parameters
2160 | #...........................................
2161 | parser.add_argument("--Cookies", action="store_true", default=is_all_ok, dest='is_cookie_ok',
2162 | help="--Cookies [-showdom -domain -name -hostcookie -access -create -secure <0/1> -httponly <0/1> -last_range -create_range ]")
2163 | parser.add_argument("-showdom", action="store_true",
2164 | help="[-showdom]")
2165 | parser.add_argument("-domain", nargs=1,
2166 | help="[-domain ]")
2167 | parser.add_argument("-name", nargs=1,
2168 | help="[-name ]")
2169 | parser.add_argument("-hostcookie", nargs=1,
2170 | help="[-hostcookie ]")
2171 | parser.add_argument("-access", nargs=1,
2172 | help="[-access ]")
2173 | parser.add_argument("-create", nargs=1,
2174 | help="[-create ]")
2175 | parser.add_argument("-secure", nargs=1, type=int,
2176 | help="[-secure <0/1>]")
2177 | parser.add_argument("-httponly", nargs=1, type=int,
2178 | help="[-httponly <0/1>]")
2179 | parser.add_argument("-last_range", nargs='+',
2180 | help="[-last_range ]")
2181 | parser.add_argument("-create_range", nargs='+',
2182 | help="[-create_range ]")
2183 | #...........................................
2184 | #... Permissions parameters
2185 | #...........................................
2186 | parser.add_argument("--Permissions", action="store_true", default=is_all_ok, dest='is_permissions_ok',
2187 | help="--Permissions [-host -type -modif -modif_range ]")
2188 | parser.add_argument("-host", nargs=1,
2189 | help="[-host ")
2190 | parser.add_argument("-type", nargs=1,
2191 | help="[-type ]")
2192 | parser.add_argument("-modif", nargs=1,
2193 | help="[-modif ")
2194 | parser.add_argument("-modif_range", nargs='+',
2195 | help="[-modif_range ]")
2196 | #...........................................
2197 | #... Preferences parameters
2198 | #...........................................
2199 | parser.add_argument("--Preferences", action="store_true", default=is_all_ok, dest='is_preferences_ok',
2200 | help="")
2201 | #...........................................
2202 | #... Addons parameters
2203 | #...........................................
2204 | parser.add_argument("--Addons", action="store_true", default=is_all_ok, dest='is_addon_ok',
2205 | help="")
2206 | #...........................................
2207 | #... Search engines parameters
2208 | #...........................................
2209 | parser.add_argument("--Search", action="store_true", default=is_all_ok, dest='is_search_ok',
2210 | help="")
2211 | #...........................................
2212 | #... Downloads parameters
2213 | #...........................................
2214 | parser.add_argument("--Downloads", action="store_true", default=is_all_ok, dest='is_downloads_ok',
2215 | help="--Downloads [-range ]")
2216 | parser.add_argument("-range", nargs=1,
2217 | help="[-range ]")
2218 | #...........................................
2219 | #... Forms parameters
2220 | #...........................................
2221 | parser.add_argument("--Forms", action="store_true", default=is_all_ok, dest='is_forms_ok',
2222 | help="--Forms [-value -forms_range ]")
2223 | parser.add_argument("-value", nargs=1,
2224 | help="[-value ]")
2225 | parser.add_argument("-forms_range", nargs='+',
2226 | help="[-forms_range ]")
2227 | #...........................................
2228 | #... History parameters
2229 | #...........................................
2230 | parser.add_argument("--History", action="store_true", default=is_all_ok, dest='is_history_ok',
2231 | help="--History [-url -title -date -history_range -frequency]")
2232 | parser.add_argument("-url", nargs=1,
2233 | help="[-url ]")
2234 | parser.add_argument("-frequency", action="store_true", default=is_all_ok, dest='is_frequency_ok',
2235 | help="[-frequency]")
2236 | parser.add_argument("-title", nargs=1,
2237 | help="[-title ]")
2238 | parser.add_argument("-date", nargs=1,
2239 | help="[-date ]")
2240 | parser.add_argument("-history_range", nargs='+',
2241 | help="[-history_range ]")
2242 | #...........................................
2243 | #... Bookmarks parameters
2244 | #...........................................
2245 | parser.add_argument("--Bookmarks", action="store_true", default=is_all_ok, dest='is_bookmarks_ok',
2246 | help="--Bookmarks [-bm_create_range ][-bm_last_range ]")
2247 | parser.add_argument("-bm_create_range", nargs='+',
2248 | help="[-bm_create_range ]")
2249 | parser.add_argument("-bm_last_range", nargs='+',
2250 | help="[-bm_last_range ]")
2251 | #...........................................
2252 | #... Passwords parameters
2253 | #...........................................
2254 | parser.add_argument("--Passwords", action="store_true", default=is_all_ok, dest='is_passwords_ok',
2255 | help="(decode only in Unix)")
2256 | #...........................................
2257 | #... Cache parameters
2258 | #...........................................
2259 | parser.add_argument("--OfflineCache", action="store_true", default=is_all_ok, dest='is_cacheoff_ok',
2260 | help="--OfflineCache [-cache_range -extract ]")
2261 | parser.add_argument("-cache_range", nargs='+',
2262 | help="[-cache_range ]")
2263 | parser.add_argument("-extract", nargs=1,
2264 | help="[-extract ]")
2265 | #...........................................
2266 | #... Key pinning parameters
2267 | #...........................................
2268 | parser.add_argument("--Keypinning", action="store_true", default=is_all_ok, dest='is_keypinning_ok',
2269 | help="--Keypinning [-entry_type ]")
2270 | parser.add_argument("-entry_type", nargs=1, type=str,
2271 | help="[-entry_type ]")
2272 | #...........................................
2273 | #... Certoverride parameters
2274 | #...........................................
2275 | parser.add_argument("--Certoverride", action="store_true", default=is_all_ok, dest='is_cert_ok',
2276 | help="")
2277 | #...........................................
2278 | #... Thumbnails parameters
2279 | #...........................................
2280 | parser.add_argument("--Thumbnails", action="store_true", default=False, dest='is_thump_ok',
2281 | help="--Thumbnails [-extract_thumb ]")
2282 | parser.add_argument("-extract_thumb", nargs=1,
2283 | help="[-extract_thumb ]")
2284 | #...........................................
2285 | #... Session parameters
2286 | #...........................................
2287 | parser.add_argument("--Session", action="store_true", default=is_all_ok, dest='is_session_ok', help="")
2288 | #...........................................
2289 | #... Export parameters
2290 | #...........................................
2291 | parser.add_argument("--Export" , nargs=1,
2292 | help="[--Export ]")
2293 | #...........................................
2294 | #... Verbosity parameters
2295 | #...........................................
2296 | parser.add_argument("--Verbosity" , nargs=1,
2297 | help="[--Verbosity LEVEL]")
2298 | #...........................................
2299 | #... Live session parameters (watch)
2300 | #...........................................
2301 | parser.add_argument("--Live", action="store_true", default=False, dest='is_live_ok', help="")
2302 | #...........................................
2303 | #... Watch parameters
2304 | #...........................................
2305 | parser.add_argument("--Watch", action="store_true", default=False, dest='is_watch_ok',
2306 | help="--Watch [-text ] (Shows in daemon mode the URLs and text form in real time)")
2307 | parser.add_argument("-text", nargs=1,
2308 | help="[-text ] (-text Option allow filter, supports all grep Wildcards. Exit: Ctrl + C. only Unix)")
2309 |
2310 | self.args = parser.parse_args()
2311 |
2312 | #...........................................
2313 | #...........................................
2314 | dir = format(self.args.PROFILE_DIR)
2315 | self.log('DEBUG', 'dir: '+ dir)
2316 |
2317 | if path.isdir(dir) and len(argv) >= 2:
2318 |
2319 | dir = path.abspath(dir)
2320 |
2321 | if self.args.is_help_ok:
2322 | self.show_full_help();
2323 | sys.exit(0);
2324 |
2325 | if self.args.is_cookie_ok:
2326 | if self.args.showdom:
2327 | self.is_dom_ok = True
2328 | if self.args.domain:
2329 | cookie_domain = format(self.args.domain[0])
2330 | self.domain_filters.append(["string","scope",cookie_domain])
2331 | if self.args.name:
2332 | cookie_name = format(self.args.name[0])
2333 | self.cookie_filters.append(["string","name",cookie_name])
2334 | if self.args.hostcookie:
2335 | cookie_host = format(self.args.hostcookie[0])
2336 | self.cookie_filters.append(["string","host",cookie_host])
2337 | if self.args.access:
2338 | cookie_access_date = self.validate_date(format(self.args.access[0]))
2339 | self.cookie_filters.append(["date","last",cookie_access_date])
2340 | if self.args.create:
2341 | cookie_create_date = self.validate_date(format(self.args.create[0]))
2342 | self.cookie_filters.append(["date","creat",cookie_create_date])
2343 | if self.args.secure:
2344 | cookie_secure = format(self.args.secure[0])
2345 | self.cookie_filters.append(["number","isSecure",cookie_secure])
2346 | if self.args.httponly:
2347 | cookie_httponly = format(self.args.httponly[0])
2348 | self.cookie_filters.append(["number","isHttpOnly",cookie_httponly])
2349 | if self.args.last_range:
2350 | cookie_access_range1 = self.validate_date(format(self.args.last_range[0]))
2351 | try:
2352 | cookie_access_range2 = self.validate_date(format(self.args.last_range[1]))
2353 | except IndexError:
2354 | cookie_access_range2 = self.validate_date(format('9999-12-31'))
2355 | self.cookie_filters.append(["range","last",[cookie_access_range1,cookie_access_range2]])
2356 | if self.args.create_range:
2357 | cookie_create_range1 = self.validate_date(format(self.args.create_range[0]))
2358 | try:
2359 | cookie_create_range2 = self.validate_date(format(self.args.create_range[1]))
2360 | except IndexError:
2361 | cookie_create_range2 = self.validate_date(format('9999-12-31'))
2362 | self.cookie_filters.append(["range","creat",[cookie_create_range1,cookie_create_range2]])
2363 |
2364 |
2365 | if self.args.is_permissions_ok:
2366 | if self.args.host:
2367 | permissions_host = format(self.args.host[0])
2368 | self.permissions_filters.append(["string","host",permissions_host])
2369 | if self.args.type:
2370 | permissions_type = format(self.args.type[0])
2371 | self.permissions_filters.append(["string","type",permissions_type])
2372 | if self.args.modif:
2373 | permissions_modif_date = self.validate_date(format(self.args.modif[0]))
2374 | self.permissions_filters.append(["date","modif",permissions_modif_date])
2375 | if self.args.modif_range:
2376 | permissions_modif_range1 = self.validate_date(format(self.args.modif_range[0]))
2377 | try:
2378 | permissions_modif_range2 = self.validate_date(format(self.args.modif_range[1]))
2379 | except IndexError:
2380 | permissions_modif_range2 = self.validate_date(format('9999-12-31'))
2381 | self.permissions_filters.append(["range","modif",[permissions_modif_range1,permissions_modif_range2]])
2382 |
2383 |
2384 | if self.args.is_downloads_ok:
2385 | if self.args.range:
2386 | downloads_range1 = self.validate_date(format(self.args.range[0]))
2387 | try:
2388 | downloads_range2 = self.validate_date(format(self.args.range[1]))
2389 | except IndexError:
2390 | downloads_range2 = self.validate_date(format('9999-12-31'))
2391 | self.downloads_filters.append(["range","start",[downloads_range1,downloads_range2]])
2392 | self.downloads_history_filters.append(["range","modified",[downloads_range1,downloads_range2]])
2393 |
2394 |
2395 | if self.args.is_forms_ok:
2396 | if self.args.value:
2397 | forms_value = format(self.args.value[0])
2398 | self.forms_filters.append(["string","value",forms_value])
2399 | if self.args.forms_range:
2400 | forms_range1 = self.validate_date(format(self.args.forms_range[0]))
2401 | try:
2402 | forms_range2 = self.validate_date(format(self.args.forms_range[1]))
2403 | except IndexError:
2404 | forms_range2 = self.validate_date(format('9999-12-31'))
2405 | self.forms_filters.append(["range","last",[forms_range1,forms_range2]])
2406 |
2407 |
2408 | if self.args.is_history_ok:
2409 | if self.args.url:
2410 | history_url = format(self.args.url[0])
2411 | self.history_filters.append(["string","url",history_url])
2412 | if self.args.title:
2413 | history_title = format(self.args.title[0])
2414 | self.history_filters.append(["string","title",history_title])
2415 | if self.args.date:
2416 | history_date = self.validate_date(format(self.args.date[0]))
2417 | self.history_filters.append(["date","last",history_date])
2418 | if self.args.history_range:
2419 | history_range1 = self.validate_date(format(self.args.history_range[0]))
2420 | try:
2421 | history_range2 = self.validate_date(format(self.args.history_range[1]))
2422 | except IndexError:
2423 | history_range2 = self.validate_date(format('9999-12-31'))
2424 | self.history_filters.append(["range","last",[history_range1,history_range2]])
2425 |
2426 |
2427 | if self.args.is_bookmarks_ok:
2428 | if self.args.bm_last_range:
2429 | bm_last_range1 = self.validate_date(format(self.args.bm_last_range[0]))
2430 | try:
2431 | bm_last_range2 = self.validate_date(format(self.args.bm_last_range[1]))
2432 | except IndexError:
2433 | bm_last_range2 = self.validate_date(format('9999-12-31'))
2434 | self.bookmarks_filters.append(["range","last",[bm_last_range1,bm_last_range2]])
2435 | if self.args.bm_create_range:
2436 | bm_create_range1 = self.validate_date(format(self.args.bm_create_range[0]))
2437 | try:
2438 | bm_create_range2 = self.validate_date(format(self.args.bm_create_range[1]))
2439 | except IndexError:
2440 | bm_create_range2 = self.validate_date(format('9999-12-31'))
2441 | self.bookmarks_filters.append(["range","create_date",[bm_create_range1,bm_create_range2]])
2442 |
2443 |
2444 | if self.args.is_cacheoff_ok:
2445 | if self.args.cache_range:
2446 | cacheoff_range1 = self.validate_date(format(self.args.cache_range[0]))
2447 | try:
2448 | cacheoff_range2 = self.validate_date(format(self.args.cache_range[1]))
2449 | except IndexError:
2450 | cacheoff_range2 = self.validate_date(format('9999-12-31'))
2451 | self.cacheoff_filters.append(["range","last",[cacheoff_range1,cacheoff_range2]])
2452 | if self.args.extract:
2453 | self.is_cacheoff_extract_ok = True
2454 | cacheoff_directory = format(self.args.extract[0])
2455 |
2456 | if self.args.is_keypinning_ok:
2457 | if self.args.entry_type:
2458 | keypinning_type = format(self.args.entry_type[0])
2459 | self.keypinning_filters.append(["string","type",keypinning_type])
2460 |
2461 | if self.args.is_thump_ok:
2462 | if self.args.extract_thumb:
2463 | thumb_directory = format(self.args.extract_thumb[0])
2464 | else:
2465 | thumb_directory = None
2466 |
2467 | if self.args.Verbosity:
2468 | level = self.args.Verbosity[0];
2469 | self.verbosity_level = level;
2470 | if level == 'DEBUG':
2471 | self.logger.setLevel(logging.DEBUG)
2472 | elif level == 'INFO':
2473 | self.logger.setLevel(logging.INFO)
2474 | elif level == 'WARNING':
2475 | self.logger.setLevel(logging.WARNING)
2476 | elif level == 'ERROR':
2477 | self.logger.setLevel(logging.ERROR)
2478 | elif level == 'CRITICAL':
2479 | self.logger.setLevel(logging.CRITICAL)
2480 | else:
2481 | self.verbosity_level = 'WARNING';
2482 |
2483 | if self.args.is_watch_ok:
2484 | if self.args.text:
2485 | self.watch_text = format(self.args.text[0])
2486 |
2487 |
2488 | if len(vars(self.args)) == 0:
2489 | self.show_help()
2490 | sys.exit()
2491 |
2492 | ###############
2493 | ### ACTIONS
2494 | ###############
2495 | self.show_info_header(dir)
2496 |
2497 | if self.args.is_regexp_ok:
2498 | self.query_str_f = "REGEXP"
2499 | self.query_str_a = ""
2500 | self.log("INFO", "Using Regular Expression mode for string type filters")
2501 | else:
2502 | self.query_str_f = "like"
2503 | self.query_str_a = "escape '\\'"
2504 |
2505 | ### TODO: Find another way to make it work without anyexec var
2506 | anyexec = False
2507 | if self.args.is_cookie_ok:
2508 | self.show_cookies(dir)
2509 | anyexec = True
2510 | if self.args.is_permissions_ok:
2511 | self.show_permissions(dir)
2512 | anyexec = True
2513 | if self.args.is_preferences_ok:
2514 | self.show_preferences(dir)
2515 | anyexec = True
2516 | if self.args.is_addon_ok:
2517 | self.show_addons(dir)
2518 | self.show_extensions(dir)
2519 | self.show_info_addons(dir)
2520 | if self.args.is_search_ok:
2521 | self.show_search_engines(dir)
2522 | anyexec = True
2523 | if self.args.is_downloads_ok:
2524 | self.show_downloads(dir)
2525 | self.show_downloads_history(dir)
2526 | self.show_downloadsdir(dir)
2527 | anyexec = True
2528 | if self.args.is_forms_ok:
2529 | self.show_forms(dir)
2530 | anyexec = True
2531 | if self.args.is_history_ok:
2532 | self.show_history(dir)
2533 | anyexec = True
2534 | if self.args.is_bookmarks_ok:
2535 | self.show_bookmarks(dir)
2536 | anyexec = True
2537 | if self.args.is_passwords_ok:
2538 | self.show_passwords(dir)
2539 | anyexec = True
2540 | if self.args.is_cacheoff_ok:
2541 | self.show_cache(dir)
2542 | anyexec = True
2543 | if self.args.is_keypinning_ok:
2544 | self.show_key_pinning(dir)
2545 | anyexec = True
2546 | if self.args.is_cacheoff_ok and self.is_cacheoff_extract_ok:
2547 | self.show_cache_extract(dir, cacheoff_directory)
2548 | anyexec = True
2549 | if self.args.is_cert_ok:
2550 | self.show_cert_override(dir)
2551 | anyexec = True
2552 | if self.args.is_thump_ok:
2553 | self.show_thumbnails(dir, thumb_directory)
2554 | anyexec = True
2555 | if self.args.is_session_ok:
2556 | self.show_session(dir)
2557 | anyexec = True
2558 | if self.args.is_live_ok:
2559 | self.extract_data_session_watch(dir)
2560 | anyexec = True
2561 | if self.args.is_watch_ok:
2562 | self.show_watch(dir,self.watch_text)
2563 | anyexec = True
2564 | if not anyexec:
2565 | if (len(argv) == 2) or (len(argv) > 2 and (self.args.is_summary_ok or self.args.Export)):
2566 | self.All_execute(dir)
2567 |
2568 | ###############
2569 | ### SUMMARY
2570 | ###############
2571 | if not self.args.is_live_ok:
2572 |
2573 | ### HEADERS
2574 | titles = {
2575 | "decode" : "Decode Passwords ",
2576 | "passwords" : "Passwords ",
2577 | "exceptions" : "Exceptions/Passwords ",
2578 | "cookies" : "Cookies ",
2579 | "dom" : "DOM Storage ",
2580 | "permissions" : "Permissions ",
2581 | "preferences" : "Preferences ",
2582 | "addons" : "Addons ",
2583 | "addinfo" : "Addons (URLS/PATHS) ",
2584 | "extensions" : "Extensions ",
2585 | "engines" : "Search Engines ",
2586 | "downloads" : "Downloads ",
2587 | "downloads_history" : "Downloads history ",
2588 | "downloads_dir" : "Directories ",
2589 | "forms" : "Forms ",
2590 | "history" : "History ",
2591 | "bookmarks" : "Bookmarks ",
2592 | "keypinning" : "Public Key Pinning ",
2593 | "offlinecache" : "OfflineCache Html5 ",
2594 | "offlinecache_extract": "OfflineCache Extract ",
2595 | "thumbnails" : "Thumbnails images ",
2596 | "cert_override" : "Cert override ",
2597 | "session" : "Sessions "
2598 | }
2599 | extraction_id = os.path.basename(dir) + '.' + time.strftime("%Y%m%d%H%M%S")
2600 | export_folder = None
2601 |
2602 | if self.args.Export:
2603 | export_folder = self.args.Export[0] + '/' + extraction_id + '/'
2604 | self.log("INFO","Output folder: "+ self.args.Export[0])
2605 | if not os.path.exists(export_folder):
2606 | self.log("INFO","Creating folder: " + export_folder)
2607 | try:
2608 | makedirs(export_folder)
2609 | except:
2610 | self.log('CRITICAL', 'Can\'t create folder: ' + export_folder)
2611 | sys.exit(2)
2612 |
2613 | self.log('DEBUG', 'total_extraction length: ' + str(len(self.total_extraction.keys())))
2614 | info_headers = sorted(self.total_extraction.keys())
2615 | summary = {}
2616 | for header in info_headers:
2617 | self.log('DEBUG', 'header: ' + header)
2618 | sources = self.total_extraction[header].keys()
2619 |
2620 | if self.args.Export:
2621 | outputFilename = header + '.json';
2622 | for source in sources:
2623 | self.log("INFO","Saving " + os.path.basename(source) + " data to "+ outputFilename)
2624 | with open(export_folder + outputFilename, 'w') as fp:
2625 | json.dump(self.total_extraction[header], fp)
2626 | self.export_sha256(export_folder, header, sources);
2627 |
2628 | for source in sources:
2629 |
2630 | # INFO HEADER BY SOURCE
2631 | if not self.args.is_summary_ok and not self.args.Export:
2632 | if path.isfile(source):
2633 | self.show_title(titles[header], source)
2634 | else:
2635 | self.show_title(titles[header])
2636 |
2637 | if header in summary.keys():
2638 | summary[header] = summary[header] + len(self.total_extraction[header][source])
2639 | else:
2640 | summary[header] = len(self.total_extraction[header][source])
2641 |
2642 | if not self.args.Export and not self.args.is_summary_ok:
2643 | if summary[header] > 0:
2644 | for i in self.total_extraction[header][source]:
2645 | tags = sorted(i.keys())
2646 | for tag in tags:
2647 | if i[tag]:
2648 | try:
2649 | print(tag.split('-',1)[1] + ": " + str(i[tag]))
2650 | except UnicodeEncodeError:
2651 | print(tag.split('-',1)[1] + ": " + str(i[tag].encode('utf8')))
2652 | else:
2653 | print(tag.split('-',1)[1] + ": ")
2654 | print("")
2655 | else:
2656 | print("No data found!")
2657 | summary[header] = 0
2658 | self.log("DEBUG", "summary length: " + str(len(summary.keys())))
2659 | info_headers = sorted(summary.keys())
2660 |
2661 | if len(info_headers) == 0 and len(argv) == 2:
2662 | self.show_title("Total Information")
2663 | print("No data found!")
2664 | elif len(info_headers) == 0 and len(argv) < 2:
2665 | self.log("CRITICAL","Missing argument!")
2666 | if self.args.Export and export_folder:
2667 | os.rmdir(export_folder)
2668 | self.show_help()
2669 | else:
2670 | self.show_title("Total Information")
2671 | if len(info_headers) == 0:
2672 | print("No data found!")
2673 | else:
2674 | for header in info_headers:
2675 | print("Total " + titles[header] + ": " + str(summary[header]))
2676 | print("")
2677 | else:
2678 | self.log("CRITICAL","Failed to read profile directory: " + dir)
2679 | self.show_help()
2680 | sys.exit()
2681 |
2682 | if __name__ == '__main__':
2683 | app = Dumpzilla(sys.argv)
2684 |
2685 | # Site: www.dumpzilla.org
2686 | # Authors: Busindre ( busilezas[@]gmail.com )
2687 | # OsamaNehme ( onehdev[@]gmail.com )
2688 |
--------------------------------------------------------------------------------