├── README.md
├── bin
├── evloginout.py
├── lol.py
├── scm_timeline.py
└── vtmon.py
└── docs
├── ext_magic_bytes.md
├── mounting_ntfs.md
├── randomideas.txt
└── vma.md
/README.md:
--------------------------------------------------------------------------------
1 | # forensic-tools
2 | CIRCL system forensic tools or a jumble of tools to support forensic
3 |
--------------------------------------------------------------------------------
/bin/evloginout.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 | #
3 | # evloginout.py Create timeline when user logs in out based on eventlogs
4 | #
5 | # Copyright (C) 2015 Gerard Wagener
6 | #
7 | # This program is free software: you can redistribute it and/or modify
8 | # it under the terms of the GNU Affero General Public License as published by
9 | # the Free Software Foundation, either version 3 of the License, or
10 | # (at your option) any later version.
11 | #
12 | # This program is distributed in the hope that it will be useful,
13 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 | # GNU Affero General Public License for more details.
16 | #
17 | # You should have received a copy of the GNU Affero General Public License
18 | # along with this program. If not, see .
19 |
20 | #Ideas
21 | #Indent session ids to better see logins and logouts
22 | #Map color with a session id
23 | #Compute login and logout durations
24 |
25 | import xml.sax
26 | import sys
27 | import argparse
28 | import textwrap
29 | from unidecode import unidecode
30 |
31 | class EvtxHandler(xml.sax.ContentHandler):
32 | def __init__(self):
33 | self.currentdate = ""
34 | self.eventid= -1
35 | self.inEventID = False
36 | self.buf = []
37 | self.inData = False
38 | self.ubuf = []
39 | self.inTargetUserName = False
40 | self.targetUserName = ""
41 |
42 | self.inTargetLogonId = False
43 | self.lbuf = []
44 | self.targetLogonId = ""
45 |
46 | self.inLogonType = False
47 | self.tbuf = []
48 | self.logonType = -1
49 | #Eventlogs are stored in this array
50 | self.timeline = []
51 | self.counter = 0
52 |
53 | def startElement(self, name, attrs):
54 | if name == "Event":
55 | #A new event is there reset all old states
56 | self.buf = []
57 | self.eventid = -1
58 | self.currentdate = ""
59 | self.inData = False
60 | self.inTargetUserName = False
61 | self.ubuf = []
62 | self.targetUserName = ""
63 | self.inTargetLogonId = False
64 | self.lbuf = []
65 | self.logonType = -1
66 | self.inLogonType = False
67 | self.targetLogonId = ""
68 |
69 | if name == "EventID":
70 | self.inEventID = True
71 |
72 | if name == "TimeCreated":
73 | for (k,v) in attrs.items():
74 | #FIMXE always take the last one
75 | self.currentdate = v
76 |
77 | if name == "Data":
78 | for (k,v) in attrs.items():
79 | if v=="TargetUserName":
80 | self.inTargetUserName = True
81 | if v == "TargetLogonId":
82 | self.inTargetLogonId = True
83 | if v == "LogonType":
84 | self.inLogonType = True
85 |
86 | def characters(self,content):
87 | if self.inEventID:
88 | self.buf.append(unidecode(content))
89 | if self.inTargetUserName:
90 | self.ubuf.append(unidecode(content))
91 | if self.inTargetLogonId:
92 | self.lbuf.append(unidecode(content))
93 | if self.inLogonType:
94 | self.tbuf.append(unidecode(content))
95 |
96 | def endElement(self,name):
97 | if name == "EventID":
98 | self.inEventID = False
99 | a = ''.join(self.buf)
100 | #Recover event id
101 | self.eventid = int(a)
102 | #Reset buffer
103 | self.buf = []
104 |
105 | if name == "Data":
106 | if self.inTargetUserName:
107 | self.targetUserName = ''.join(self.ubuf)
108 | self.ubuf = []
109 | self.inTargetUserName = False
110 | if self.inTargetLogonId:
111 | self.targetLogonId = ''.join(self.lbuf)
112 | self.lbuf = []
113 | self.inTargetLogonId = False
114 | if self.inLogonType:
115 | self.logonType = int(''.join(self.tbuf))
116 | self.tbuf = []
117 | self.inLogonType = False
118 |
119 | if name == "Event":
120 | logon = self.eventid
121 | if self.eventid == 4624:
122 | logon = "Success"
123 | if self.eventid == 4625:
124 | logon = "Failed"
125 | if self.eventid == 4634:
126 | logon = "Log off"
127 |
128 | if self.currentdate != "" and self.targetUserName != "" and self.targetLogonId != "" and self.eventid >=0 and self.logonType >=0:
129 | self.counter = self.counter + 1
130 | self.timeline.append({"date":self.currentdate, "targetUserName": self.targetUserName, "eventid":self.eventid, "targetLogonId":self.targetLogonId, "logonTypeself":self.logonType})
131 | print self.counter, "&", self.currentdate, "&", self.targetUserName, "&",logon,"&",self.targetLogonId, "&",self.logonType, "\\\\"
132 |
133 | cli = argparse.ArgumentParser(description='Create a timeline of logins and logouts baseed on System event files',
134 | epilog=textwrap.dedent('''
135 | DESCRIPTION
136 | Takes the output of evtxdump.pl and create a latex table of logins and logouts.
137 | The output is written on standard output.
138 | '''))
139 |
140 | cli.add_argument('--filename', type=str, nargs=1, required=True,
141 | help="Filename created by evtxdump.pl. The filename - specifies standard input")
142 |
143 | args = cli.parse_args()
144 | parser = xml.sax.make_parser()
145 | obj = EvtxHandler()
146 | parser.setContentHandler(obj)
147 | #By default stdout is used
148 | f=sys.stdin
149 | if args.filename[0] != '-':
150 | f = open(args.filename[0],"r")
151 | parser.parse(f)
152 |
--------------------------------------------------------------------------------
/bin/lol.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | #
3 | # lol.py - Leak Origin Locator
4 | # Analyse leaked credentials to identify the compromised website.
5 | #
6 | # Copyright (C) 2018 Michael Hamm
7 | #
8 | # This program is free software: you can redistribute it and/or modify
9 | # it under the terms of the GNU Affero General Public License as published by
10 | # the Free Software Foundation, either version 3 of the License, or
11 | # (at your option) any later version.
12 | #
13 | # This program is distributed in the hope that it will be useful,
14 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
15 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 | # GNU Affero General Public License for more details.
17 | #
18 | # You should have received a copy of the GNU Affero General Public License
19 | # along with this program. If not, see .
20 |
21 |
22 |
23 | import argparse
24 | import os, sys, re, pprint, copy
25 |
26 |
27 |
28 | MINWORD = 4 # Ignore words with less than 4 characters
29 | numOfHits = 4 # How many passwords to show
30 |
31 | PWD = { 'FRENCH' : ['AZERTY', 'ANNIVERSAIRE', 'BRUXELLES', 'MOTDEPASSE', 'SOLEIL', 'LOULOU', 'DOUDOU',
32 | 'COUCOU', 'JETAIME', 'CHOUCHOU', 'CHOCOLAT'],
33 | 'GERMAN' : ['PASSWORT', 'QWERTZ'],
34 | 'ENGLISH' : ['123456', '1234567', '12345678', 'PASSWORD', 'CANABIS', 'SECRET', 'QWERTY',
35 | 'ABC123', 'MONKEY', 'ILOVEYOU', 'LETMEIN', 'TRUSTNO'],
36 | 'NAMES' : ['PIERRE', 'MANUEL', 'CAROLINE', 'LOUIS', 'MICHAEL', 'THOMAS', 'SANDRINE', 'NICOLAS']}
37 |
38 |
39 |
40 | def loadData(fileName):
41 | if not os.path.exists(fileName):
42 | print('File %s not found!' % (fileName))
43 | sys.exit()
44 | with open(fileName) as f:
45 | data = f.read()
46 | f.close()
47 | return(data)
48 |
49 |
50 |
51 | def extractPwd(data,noMostUsed=True):
52 | # Extract passwords, email addresses out of data
53 | # 1. Delete duplicate lines
54 | # 2. Ignore lines without email addresses (@)
55 | # 3. Sanitize words which are no email address
56 | # 4. Do (not) remove most common passwords
57 | lines = []
58 | passwords = []
59 | emails = []
60 | wordAdding = True
61 |
62 | for line in data.splitlines():
63 | if line in lines:
64 | continue
65 | else:
66 | lines.append(line)
67 | if '@' not in line:
68 | continue
69 | for words in re.split('[\s|:,;]',line):
70 | if '@' not in words:
71 | word = re.compile('[^A-Z\s]').sub('', words.upper())
72 | wordAdding = True
73 |
74 | for key in PWD:
75 | if word in PWD[key] and noMostUsed == True:
76 | wordAdding = False
77 | break
78 |
79 | # Rectoverso ignore most used passwords
80 | for pwd in PWD[key]:
81 | if pwd in word and noMostUsed == True:
82 | wordAdding = False
83 | break
84 |
85 | if len(word) >= MINWORD and wordAdding == True:
86 | passwords.append(word)
87 | else:
88 | emails.append(words)
89 | return({'passwords':passwords, 'emails':emails})
90 |
91 |
92 |
93 | def getWordCount(passwords):
94 | wordCount = {}
95 | for password in passwords:
96 | if len(password) >= MINWORD:
97 | if password not in wordCount:
98 | wordCount[password] = 1
99 | else:
100 | # Detected a hit
101 | # Longer words are more valuable
102 | wordCount[password] += len(password)
103 | return(wordCount)
104 |
105 |
106 |
107 | def getWordSorted(wordCount, numOfHits):
108 | # Sort the passwords by number of occurences
109 | wordGrouped = {}
110 |
111 | for word in wordCount:
112 | if wordCount[word] not in wordGrouped:
113 | wordGrouped[wordCount[word]] = [word]
114 | else:
115 | wordGrouped[wordCount[word]].append(word)
116 |
117 | wordSorted = list(wordGrouped.items())
118 | wordSorted.sort(reverse=True)
119 | return(wordSorted[:numOfHits])
120 |
121 |
122 |
123 | def getCorrelateEmailPwd(emails, wordSorted):
124 | domains = {}
125 | localParts = {}
126 |
127 | for words in wordSorted:
128 | for word in words[1]:
129 | for email in emails:
130 | domain = email.split('@')[1]
131 | localPart = email.split('@')[0]
132 | if word in domain.upper():
133 | if domain.lower() not in domains:
134 | domains[domain.lower()] = 1
135 | else:
136 | domains[domain.lower()] += 1
137 | if word in localPart.upper():
138 | if localPart.lower() not in localParts:
139 | localParts[localPart.lower()] = 1
140 | else:
141 | localParts[localPart.lower()] += 1
142 |
143 | domainsSorted = getWordSorted(domains, 4)
144 | localPartsSorted = getWordSorted(localParts, 2)
145 | return({'domains':domainsSorted, 'localParts':localPartsSorted})
146 |
147 |
148 |
149 | def main():
150 | p = argparse.ArgumentParser(description='Leak origin Locator: Identify the origin of a credentials breach with Password Frequency Analyzis.')
151 | p.add_argument('-m', '--most', action='store_true',
152 | help='Do not remove [M]ost used passwords from the analysis')
153 | p.add_argument('filename', help='Specify file name to analyse.')
154 | args = p.parse_args()
155 | fileName = args.filename
156 |
157 | print("Loading %s ..." % (fileName))
158 | data = loadData(fileName)
159 | print('%s characters loaded.' % (len(data)))
160 |
161 | if args.most:
162 | extractData = extractPwd(data,False)
163 | else:
164 | extractData = extractPwd(data)
165 | passwords = extractData['passwords']
166 | emails = extractData['emails']
167 | print('%s passwords extracted.' % (len(passwords)))
168 |
169 | wordCount = getWordCount(passwords)
170 | print('%s uniq passwords found.' % (len(wordCount)))
171 |
172 | wordSorted = getWordSorted(wordCount, numOfHits)
173 | print('\nCalculate weighting for interesting passwords:')
174 | pprint.pprint(wordSorted)
175 |
176 | print('\nCorrelating email domain names')
177 | correlateEmailPwd = getCorrelateEmailPwd(emails, wordSorted)
178 | pprint.pprint(correlateEmailPwd['domains'])
179 | print('\nCorrelating email local parts')
180 | pprint.pprint(correlateEmailPwd['localParts'])
181 |
182 | print('\nAnalysis completed.....\n\n')
183 |
184 |
185 | if __name__ == "__main__":
186 | main()
187 |
188 |
--------------------------------------------------------------------------------
/bin/scm_timeline.py:
--------------------------------------------------------------------------------
1 | #
2 | # scm-timeline Create timeline from windows services that are started/stopped
3 | #
4 | # Copyright (C) 2015 Gerard Wagener
5 | #
6 | # This program is free software: you can redistribute it and/or modify
7 | # it under the terms of the GNU Affero General Public License as published by
8 | # the Free Software Foundation, either version 3 of the License, or
9 | # (at your option) any later version.
10 | #
11 | # This program is distributed in the hope that it will be useful,
12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 | # GNU Affero General Public License for more details.
15 | #
16 | # You should have received a copy of the GNU Affero General Public License
17 | # along with this program. If not, see .
18 |
19 |
20 | import xml.sax
21 | import sys
22 | from unidecode import unidecode
23 | import argparse
24 | import sys
25 |
26 | class Parser(xml.sax.ContentHandler):
27 | def __init__(self):
28 | self.currentdate = ""
29 | self.isSCM = False
30 | self.msg = []
31 | self.hasdata = False
32 |
33 | def startElement(self, name, attrs):
34 | if name == "TimeCreated":
35 | for (k,v) in attrs.items():
36 | self.currentdate = v
37 | if name == "Provider":
38 | for (k,v) in attrs.items():
39 | if k.startswith("EventSourceName"):
40 | if v.startswith("Service Control Manager"):
41 | self.isSCM = True
42 | if name == "Data":
43 | for (k,v) in attrs.items():
44 | self.hasdata = True
45 |
46 | def characters(self,content):
47 | if self.isSCM == True and self.hasdata == True:
48 | if len(content) > 1:
49 | self.msg.append(unidecode(content))
50 | self.hasdata = True
51 |
52 | def endElement(self,name):
53 | if name == "Event":
54 | if len(self.msg) > 1:
55 | print self.currentdate,"|"," ".join(self.msg)
56 | #Reset states
57 | self.currentdate = ""
58 | self.isSCM = False
59 | self.msg = []
60 | self.hasdata = False
61 | if name == "Data":
62 | self.hasdata = False
63 |
64 | cli = argparse.ArgumentParser(description='Take XML ouput from evtxdump.pl and create a timeline of events when which services started / stopped. The System event file should be parsed prior with evtxdump.pl.')
65 | cli.add_argument('--filename', type=str, nargs=1, help='Filename that should be processed')
66 |
67 | args = cli.parse_args()
68 |
69 | if args.filename is None:
70 | sys.stderr.write("An XML export done by evtxdump.pl of Sytem.evtx must be specified\n")
71 | sys.exit(1)
72 |
73 | parser = xml.sax.make_parser()
74 | parser.setContentHandler(Parser())
75 | parser.parse(open(args.filename[0],"r"))
76 |
--------------------------------------------------------------------------------
/bin/vtmon.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 | #
3 | # vtmon - Monitor a set of hashes to measure AV detection time
4 | #
5 | # Copyright (C) 2015 Gerard Wagener
6 | #
7 | # This program is free software: you can redistribute it and/or modify
8 | # it under the terms of the GNU Affero General Public License as published by
9 | # the Free Software Foundation, either version 3 of the License, or
10 | # (at your option) any later version.
11 | #
12 | # This program is distributed in the hope that it will be useful,
13 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 | # GNU Affero General Public License for more details.
16 | #
17 | # You should have received a copy of the GNU Affero General Public License
18 | # along with this program. If not, see .
19 |
20 |
21 | import simplejson
22 | import urllib
23 | import urllib2
24 | import pprint
25 | import argparse
26 | import textwrap
27 | import os
28 | import time
29 | import syslog
30 |
31 | url = "https://www.virustotal.com/vtapi/v2/file/report"
32 | delay = 1
33 |
34 |
35 | cli = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter, description='Monitor a set of hashes at Virustotal',
36 | epilog=textwrap.dedent('''
37 |
38 | DESCRIPTION
39 |
40 | Read all hases from the file specified by filename and do a lookup of
41 | each hash at Virustotal. The raw json documents are stored in the
42 | folder results. The API key for accessing the API of virustotal should
43 | be specified with the key option. Errors are sent to syslog.
44 | '''))
45 | cli.add_argument('--filename', type=str, nargs=1, required=True,
46 | help='Filename with hashes')
47 | cli.add_argument('--key', type=str, nargs=1, required=True,
48 | help='API key for accessing Virustotal')
49 | cli.add_argument('--results', type=str, nargs=1, required=True,
50 | help = 'Directory where the raw json documents should be stored')
51 |
52 | args = cli.parse_args()
53 |
54 | #Format to store results
55 | #result_dir/hash/year/month/day
56 | try:
57 | key = args.key[0]
58 | key = key.replace('\n', '')
59 | f = open(args.filename[0],'r')
60 | for line in f.readlines():
61 | line = line.replace("\n","")
62 | ts = time.strftime("/%Y/%m/%d")
63 | res = args.results[0] + "/" + line + ts
64 |
65 | if os.path.exists(res) == False:
66 | os.makedirs(res)
67 |
68 | parameters = {"resource": line,
69 | "apikey": key}
70 |
71 | fn = res + '/' + time.strftime("%Y%m%d%H%M%S")+".json"
72 | if os.path.exists(fn) == False:
73 | data = urllib.urlencode(parameters)
74 | req = urllib2.Request(url, data)
75 | response = urllib2.urlopen(req)
76 | json = response.read()
77 | g = open(fn,"w")
78 | g.write(json)
79 | g.close()
80 | else:
81 | syslog.syslog("Failed to store results. Filename already exists.")
82 | time.sleep(delay)
83 | f.close()
84 | except OSError,e:
85 | syslog.syslog(str(e))
86 | except IOError,e:
87 | syslog.syslog(str(e))
88 |
89 |
--------------------------------------------------------------------------------
/docs/ext_magic_bytes.md:
--------------------------------------------------------------------------------
1 | Carving for ext3/ext4 directories
2 | =================================
3 |
4 |
5 |
6 | ## Motivation
7 |
8 | The on Linux most common file system ext3/ext4 is very consequent in deleting files and directories.
9 |
10 | In forensics it could be useful to carve for directories to learn about deleted file- and directory names.
11 |
12 |
13 |
14 | ## Magic Bytes
15 |
16 | Based on the data structure of a directory we could conclude on the quoted sequence of bytes.
17 |
18 |
19 | ????\x0c\x00\x01\x02.\x00\x00\x00????\x0c\x00\x02\x02..\x00\x00
20 |
21 |
22 |
23 | It is common that the block size is 8 sectors resulting in 4096 bytes. This leads to the
24 | quoted configuration line, successfully tested with Foremost and Scalpel. Please review
25 | the block size with a tool like fsstat and in case adapt the configuration.
26 |
27 |
28 | raw y 4096 ????\x0c\x00\x01\x02.\x00\x00\x00????\x0c\x00\x02\x02..\x00\x00
29 |
30 |
31 |
32 |
33 | ## Details
34 |
35 | How did I come to this sequence? The ext data structures are very well described in this exceillent book [1].
36 | The 1st entry of a directory is named '.' and the 2nd entry is named '..'. All numbers are represented in
37 | little endian.
38 |
39 |
40 | 4 Bytes: The inode of this file unknown: ????
41 | 2 Bytes: The beginning of the 2nd entry: x0c x00 --> 12
42 | 1 Byte: The size of the file name: x01
43 | 1 Byte: The type of the file: x02 --> This is a driectory
44 | 1 Byte: The file name (Size defined): .
45 | 3 Bytes: Padding to a 4 byte boundary: x00 x00 x00
46 |
47 | 4 Bytes: The inode of this file unknown: ????
48 | 2 Bytes: The beginning of the 3rd entry: x0c x00 --> 12, We ignore empty directories
49 | 1 Byte: The size of the entry name: x02
50 | 1 Byte: The type of the entry: x02 --> This is a driectory
51 | 2 Byte: The file name (Size defined): ..
52 | 2 Bytes: Padding to a 4 byte boundary: x00 x00
53 |
54 |
55 |
56 |
57 | ## References:
58 |
59 | [1] File System Forensic Analysis; Brian Carrier; Addison Wesley; 2005; ISBN-13: 978-0321268174;
60 |
61 |
62 |
63 |
--------------------------------------------------------------------------------
/docs/mounting_ntfs.md:
--------------------------------------------------------------------------------
1 | How to mount a NTFS partition for forensic investigations under Linux.
2 | ======================================================================
3 |
4 |
5 |
6 | # Partition table
7 |
8 | Read the partition table to identify the offset, where the partition you like to mount starts:
9 |
10 |
11 | $ fdisk -l imagefile.dd
12 |
13 | Units = sectors of 1 * 512 = 512 bytes
14 | Sector size (logical/physical): 512 bytes / 512 bytes
15 |
16 | Device Boot Start End Blocks Id System
17 | imagefile.dd 2048 3907029167 1953513560 7 HPFS/NTFS/exFAT
18 |
19 |
20 | The output of the fdisk command show us that the partition start at sector 2048. Each sector contains 1 block of 512 bytes.
21 |
22 |
23 |
24 | # Mounting the partition
25 |
26 | Now we know the offset. In the example above the offset is at sector 2048.
27 | A sector has 512 bytes. We can mount the partition. The syntax is: `sudo mount -o options `. You need to have root rights to mount, so you need sudo.
28 |
29 | The image should be mounted for forensic investigations. There are some useful options which are explained below. The command to mound should look like:
30 |
31 |
32 | $ sudo mount -o ro,noload,loop,noexec,offset=$((512*2048)),show_sys_files,streams_interface=windows imagefile.dd /mnt/
33 |
34 |
35 |
36 |
37 | ## Explanation of the options
38 |
39 | **ro**
40 | Open the image in read only mode to prevent modification of the evidence.
41 |
42 | **noload**
43 | Do not load the journal on mounting.
44 |
45 | **loop**
46 | Mount image as loopback device.
47 |
48 | **noexec**
49 | Prevent execution of binaries from the image to prevent infection of the forensic workstation.
50 |
51 | **offset**
52 | Offset where the partition starts, in bytes. Easy to calculate for the shell. Just use the fdisk output.
53 |
54 | **show_sys_files**
55 | Provides access to file system meta data. New files which are not accessible without this options are for example:
56 |
57 | - $AttrDef
58 | - $Boot
59 | - $BadClus
60 | - $Extend
61 | - $LogFile
62 | - $UpCase
63 | - $MFTMirr
64 | - $Secure
65 | - $Bitmap
66 | - $Volume
67 |
68 |
69 | **streams_interface=windows**
70 | Privides access to NTFS named data streams. So we can fetch information which was not accessible before, like:
71 |
72 | $ cat /Users/username/Downloads/Dropbox.exe:Zone.Identifier
73 | [ZoneTransfer]
74 | ZoneId=3
75 |
76 |
77 |
78 |
--------------------------------------------------------------------------------
/docs/randomideas.txt:
--------------------------------------------------------------------------------
1 | - Use data from Eventlogs Microsoft-Windows-Application-Experience/Program-Telemetry
2 | to create a time line of which programs were active during a given date with
3 | their PIDs. Which programs were restarted etc
4 |
--------------------------------------------------------------------------------
/docs/vma.md:
--------------------------------------------------------------------------------
1 | # VMA (proxmox) forensic analysis and extraction
2 |
3 | [VMA](https://pve.proxmox.com/wiki/VMA) is the format for images used by proxmox. If you want to extract such file for forensic analysis, you have different options.
4 |
5 | ## Python script - vma-extractor
6 |
7 | [vma-extractor](https://github.com/jancc/vma-extractor) implements an extraction tool for the VMA backup format used by Proxmox. The tool is implemented in Python3. The tool doesn't require any additional dependencies and only extract the [format described by proxmox](https://git.proxmox.com/?p=pve-qemu.git;a=blob_plain;f=vma_spec.txt;hb=refs/heads/master). This is easier the step.
8 |
9 | ## Dockerfile using the standard vma toolset
10 |
11 | https://github.com/AenonDynamics/vma-backup-extractor
12 |
13 | ## Manually installing vma toolset
14 |
15 | - https://github.com/AenonDynamics/vma-backup-extractor/blob/master/Dockerfile
16 |
17 | ```Bash
18 | sudo apt-get update && apt-get install -y gnupg wget lzop \
19 | libaio1 librbd1 glusterfs-common libiscsi-bin libcurl4-gnutls-dev \
20 | libjemalloc2 libglib2.0-0
21 |
22 | echo deb "http://download.proxmox.com/debian buster pve-no-subscription" >> /etc/apt/sources.list \
23 | && wget http://download.proxmox.com/debian/proxmox-ve-release-6.x.gpg -O /etc/apt/trusted.gpg.d/proxmox.gpg \
24 | && apt-get update \
25 | && apt-get install -y libproxmox-backup-qemu0 \
26 | && apt-get download pve-qemu-kvm \
27 | && sudo dpkg -X ./pve-qemu-kvm_* .
28 | && cp usr/bin/vma .
29 |
30 | sudo find -type f -name "*libnettle*"
31 | cp /snap/core/14447/usr/lib/x86_64-linux-gnu/libnettle.so.6.2 /usr/lib
32 | sudo ldconfig
33 | ```
34 |
--------------------------------------------------------------------------------